From 5c5d5aa7afe98886d5ad876568deca6d53570e65 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Mon, 2 Sep 2024 21:50:37 +0200 Subject: [PATCH 01/50] Updae GitHub actions --- .github/workflows/test.yml | 4 ++-- pyproject.toml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index e99059b8..01668f57 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -11,10 +11,10 @@ jobs: python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', 'pypy3.9', 'pypy3.10'] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/pyproject.toml b/pyproject.toml index e149de23..3b9342b1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,11 +53,11 @@ optional = true [tool.poetry.group.test.dependencies] pytest = [ { version = "^8.3", python = ">=3.8" }, - { version = "^7.4", python = "<3.8"} + { version = "^7.4", python = "<3.8" } ] pytest-asyncio = [ { version = "^0.23.8", python = ">=3.8" }, - { version = "~0.21.1", python = "<3.8"} + { version = "~0.21.1", python = "<3.8" } ] pytest-benchmark = "^4.0" pytest-cov = [ From 7d722667c7aa6e1df8137d92dba6a911e155e0d7 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Mon, 2 Sep 2024 22:33:23 +0200 Subject: [PATCH 02/50] Fix docstring --- src/graphql/utilities/ast_to_dict.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/graphql/utilities/ast_to_dict.py b/src/graphql/utilities/ast_to_dict.py index fea70b32..3a2b3504 100644 --- a/src/graphql/utilities/ast_to_dict.py +++ b/src/graphql/utilities/ast_to_dict.py @@ -37,9 +37,8 @@ def ast_to_dict( ) -> Any: """Convert a language AST to a nested Python dictionary. - Set `location` to True in order to get the locations as well. + Set `locations` to True in order to get the locations as well. """ - """Convert a node to a nested Python dictionary.""" if isinstance(node, Node): if cache is None: cache = {} From bc9fa5e1029e2be870879699c29351d2f5d948ac Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 3 Sep 2024 21:24:43 +0200 Subject: [PATCH 03/50] Fix and simplify tox.ini --- tox.ini | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tox.ini b/tox.ini index f32bcfff..c261c70e 100644 --- a/tox.ini +++ b/tox.ini @@ -11,7 +11,7 @@ python = 3.10: py310 3.11: py311 3.12: py312 - pypy3: pypy9 + pypy3: pypy39 pypy3.9: pypy39 pypy3.10: pypy310 @@ -46,9 +46,9 @@ deps = pytest-cov>=4.1,<6 pytest-describe>=2.2,<3 pytest-timeout>=2.3,<3 - py37,py38,py39,pypy39: typing-extensions>=4.7.1,<5 + py3{7,8,9}, pypy39: typing-extensions>=4.7.1,<5 commands = # to also run the time-consuming tests: tox -e py311 -- --run-slow # to run the benchmarks: tox -e py311 -- -k benchmarks --benchmark-enable - py37,py38,py39,py310,py311,pypy39,pypy310: pytest tests {posargs} + py3{7,8,9,10,11}, pypy3{9,10}: pytest tests {posargs} py312: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From eb7c86bbcf78f8030e4951bf76d4d925f1a13881 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 3 Sep 2024 21:52:39 +0200 Subject: [PATCH 04/50] Fix test_description --- tests/pyutils/test_description.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/pyutils/test_description.py b/tests/pyutils/test_description.py index 57edff39..8a19396d 100644 --- a/tests/pyutils/test_description.py +++ b/tests/pyutils/test_description.py @@ -42,7 +42,7 @@ def registered(base: type): try: yield None finally: - unregister_description(LazyString) + unregister_description(base) def describe_description(): From ea8402198cf891716a18da6a5d9090f0246272d6 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 3 Sep 2024 22:38:59 +0200 Subject: [PATCH 05/50] Fix coverage --- src/graphql/pyutils/description.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/graphql/pyutils/description.py b/src/graphql/pyutils/description.py index 812d61fe..9d43a86d 100644 --- a/src/graphql/pyutils/description.py +++ b/src/graphql/pyutils/description.py @@ -51,7 +51,7 @@ def unregister(cls, base: type) -> None: msg = "Only types can be unregistered." raise TypeError(msg) if isinstance(cls.bases, tuple): - if base in cls.bases: + if base in cls.bases: # pragma: no branch cls.bases = tuple(b for b in cls.bases if b is not base) if not cls.bases: cls.bases = object From 02bf4a3ead05f16acbe8d14ebcb67244522bd8b0 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 4 Sep 2024 20:28:59 +0200 Subject: [PATCH 06/50] Update Sphinx requirements --- docs/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index f52741c8..9652132e 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,2 @@ -sphinx>=7.3.7,<8 -sphinx_rtd_theme>=2.0.0,<3 +sphinx>=7,<8 +sphinx_rtd_theme>=2,<3 From 6e6d5be7516324c4e2a2ac0e352fc339f52de82e Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Wed, 4 Sep 2024 21:06:01 +0200 Subject: [PATCH 07/50] Update the README file --- README.md | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 7a0a1e7a..913f81e5 100644 --- a/README.md +++ b/README.md @@ -14,8 +14,8 @@ An extensive test suite with over 2300 unit tests and 100% coverage comprises a replication of the complete test suite of GraphQL.js, making sure this port is reliable and compatible with GraphQL.js. -The current stable version 3.2.3 of GraphQL-core is up-to-date with GraphQL.js -version 16.6.0 and supports Python version 3.7 and newer. +The current stable version 3.2.4 of GraphQL-core is up-to-date with GraphQL.js +version 16.8.2 and supports Python version 3.6 to 3.12. You can also try out the latest alpha version 3.3.0a6 of GraphQL-core which is up-to-date with GraphQL.js version 17.0.0a2. @@ -208,6 +208,10 @@ Some restrictions (mostly in line with the design goals): * supports asynchronous operations only via async.io (does not support the additional executors in GraphQL-core) +Note that meanwhile we are using the amazing [ruff](https://docs.astral.sh/ruff/) tool +to both format and check the code of GraphQL-core 3, +in addition to using [mypy](https://mypy-lang.org/) as type checker. + ## Integration with other libraries and roadmap @@ -217,14 +221,12 @@ Some restrictions (mostly in line with the design goals): also been created by Syrus Akbary, who meanwhile has handed over the maintenance and future development to members of the GraphQL-Python community. - The current version 2 of Graphene is using Graphql-core 2 as core library for much of - the heavy lifting. Note that Graphene 2 is not compatible with GraphQL-core 3. - The new version 3 of Graphene will use GraphQL-core 3 instead of GraphQL-core 2. + Graphene 3 is now using Graphql-core 3 as core library for much of the heavy lifting. * [Ariadne](https://github.com/mirumee/ariadne) is a Python library for implementing GraphQL servers using schema-first approach created by Mirumee Software. - Ariadne is already using GraphQL-core 3 as its GraphQL implementation. + Ariadne is also using GraphQL-core 3 as its GraphQL implementation. * [Strawberry](https://github.com/strawberry-graphql/strawberry), created by Patrick Arminio, is a new GraphQL library for Python 3, inspired by dataclasses, @@ -240,6 +242,7 @@ Changes are tracked as ## Credits and history The GraphQL-core 3 library + * has been created and is maintained by Christoph Zwerschke * uses ideas and code from GraphQL-core 2, a prior work by Syrus Akbary * is a Python port of GraphQL.js which has been developed by Lee Byron and others From b7a18ed48b7d97a79c2d0db5a8b53d820c67b8d2 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 7 Sep 2024 19:20:10 +0200 Subject: [PATCH 08/50] Implement OneOf Input Objects via @oneOf directive Replicates graphql/graphql-js@8cfa3de8a12822efdaa52e43ecd07dea57b4f926 --- src/graphql/__init__.py | 2 + src/graphql/execution/values.py | 15 +- src/graphql/type/__init__.py | 2 + src/graphql/type/definition.py | 5 + src/graphql/type/directives.py | 9 ++ src/graphql/type/introspection.py | 5 + src/graphql/type/validate.py | 24 ++- src/graphql/utilities/coerce_input_value.py | 24 +++ src/graphql/utilities/extend_schema.py | 9 ++ src/graphql/utilities/value_from_ast.py | 8 + .../rules/values_of_correct_type.py | 72 ++++++++- tests/execution/test_oneof.py | 151 ++++++++++++++++++ tests/fixtures/schema_kitchen_sink.graphql | 6 + tests/language/test_schema_printer.py | 6 + tests/type/test_introspection.py | 120 ++++++++++++++ tests/type/test_validation.py | 43 +++++ tests/utilities/test_build_ast_schema.py | 16 +- tests/utilities/test_coerce_input_value.py | 93 +++++++++++ tests/utilities/test_find_breaking_changes.py | 2 + tests/utilities/test_print_schema.py | 4 + tests/utilities/test_value_from_ast.py | 17 ++ tests/validation/harness.py | 6 + .../validation/test_values_of_correct_type.py | 94 +++++++++++ 23 files changed, 720 insertions(+), 13 deletions(-) create mode 100644 tests/execution/test_oneof.py diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index e85c51ee..f70e77b0 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -259,6 +259,7 @@ GraphQLStreamDirective, GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, + GraphQLOneOfDirective, # "Enum" of Type Kinds TypeKind, # Constant Deprecation Reason @@ -504,6 +505,7 @@ "GraphQLStreamDirective", "GraphQLDeprecatedDirective", "GraphQLSpecifiedByDirective", + "GraphQLOneOfDirective", "TypeKind", "DEFAULT_DEPRECATION_REASON", "introspection_types", diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index 4810a8bd..1c223b60 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -128,16 +128,20 @@ def coerce_variable_values( continue def on_input_value_error( - path: list[str | int], invalid_value: Any, error: GraphQLError + path: list[str | int], + invalid_value: Any, + error: GraphQLError, + var_name: str = var_name, + var_def_node: VariableDefinitionNode = var_def_node, ) -> None: invalid_str = inspect(invalid_value) - prefix = f"Variable '${var_name}' got invalid value {invalid_str}" # noqa: B023 + prefix = f"Variable '${var_name}' got invalid value {invalid_str}" if path: - prefix += f" at '{var_name}{print_path_list(path)}'" # noqa: B023 + prefix += f" at '{var_name}{print_path_list(path)}'" on_error( GraphQLError( prefix + "; " + error.message, - var_def_node, # noqa: B023 + var_def_node, original_error=error, ) ) @@ -193,7 +197,8 @@ def get_argument_values( ) raise GraphQLError(msg, value_node) continue # pragma: no cover - is_null = variable_values[variable_name] is None + variable_value = variable_values[variable_name] + is_null = variable_value is None or variable_value is Undefined if is_null and is_non_null_type(arg_type): msg = f"Argument '{name}' of non-null type '{arg_type}' must not be null." diff --git a/src/graphql/type/__init__.py b/src/graphql/type/__init__.py index 4db6516d..b95e0e55 100644 --- a/src/graphql/type/__init__.py +++ b/src/graphql/type/__init__.py @@ -137,6 +137,7 @@ GraphQLStreamDirective, GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, + GraphQLOneOfDirective, # Keyword Args GraphQLDirectiveKwargs, # Constant Deprecation Reason @@ -286,6 +287,7 @@ "GraphQLStreamDirective", "GraphQLDeprecatedDirective", "GraphQLSpecifiedByDirective", + "GraphQLOneOfDirective", "GraphQLDirectiveKwargs", "DEFAULT_DEPRECATION_REASON", "is_specified_scalar_type", diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index dbca4e66..312a41b2 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -1272,6 +1272,7 @@ class GraphQLInputObjectTypeKwargs(GraphQLNamedTypeKwargs, total=False): fields: GraphQLInputFieldMap out_type: GraphQLInputFieldOutType | None + is_one_of: bool class GraphQLInputObjectType(GraphQLNamedType): @@ -1301,6 +1302,7 @@ class GeoPoint(GraphQLInputObjectType): ast_node: InputObjectTypeDefinitionNode | None extension_ast_nodes: tuple[InputObjectTypeExtensionNode, ...] + is_one_of: bool def __init__( self, @@ -1311,6 +1313,7 @@ def __init__( extensions: dict[str, Any] | None = None, ast_node: InputObjectTypeDefinitionNode | None = None, extension_ast_nodes: Collection[InputObjectTypeExtensionNode] | None = None, + is_one_of: bool = False, ) -> None: super().__init__( name=name, @@ -1322,6 +1325,7 @@ def __init__( self._fields = fields if out_type is not None: self.out_type = out_type # type: ignore + self.is_one_of = is_one_of @staticmethod def out_type(value: dict[str, Any]) -> Any: @@ -1340,6 +1344,7 @@ def to_kwargs(self) -> GraphQLInputObjectTypeKwargs: out_type=None if self.out_type is GraphQLInputObjectType.out_type else self.out_type, + is_one_of=self.is_one_of, ) def __copy__(self) -> GraphQLInputObjectType: # pragma: no cover diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index 17e8083c..46201d38 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -261,11 +261,20 @@ def assert_directive(directive: Any) -> GraphQLDirective: description="Exposes a URL that specifies the behaviour of this scalar.", ) +# Used to declare an Input Object as a OneOf Input Objects. +GraphQLOneOfDirective = GraphQLDirective( + name="oneOf", + locations=[DirectiveLocation.INPUT_OBJECT], + args={}, + description="Indicates an Input Object is a OneOf Input Object.", +) + specified_directives: tuple[GraphQLDirective, ...] = ( GraphQLIncludeDirective, GraphQLSkipDirective, GraphQLDeprecatedDirective, GraphQLSpecifiedByDirective, + GraphQLOneOfDirective, ) """A tuple with all directives from the GraphQL specification""" diff --git a/src/graphql/type/introspection.py b/src/graphql/type/introspection.py index 866a0499..e59386a4 100644 --- a/src/graphql/type/introspection.py +++ b/src/graphql/type/introspection.py @@ -305,6 +305,7 @@ def __new__(cls): resolve=cls.input_fields, ), "ofType": GraphQLField(_Type, resolve=cls.of_type), + "isOneOf": GraphQLField(GraphQLBoolean, resolve=cls.is_one_of), } @staticmethod @@ -396,6 +397,10 @@ def input_fields(type_, _info, includeDeprecated=False): def of_type(type_, _info): return getattr(type_, "of_type", None) + @staticmethod + def is_one_of(type_, _info): + return type_.is_one_of if is_input_object_type(type_) else None + _Type: GraphQLObjectType = GraphQLObjectType( name="__Type", diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index 8a6b7257..c1e806c1 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -16,7 +16,7 @@ SchemaDefinitionNode, SchemaExtensionNode, ) -from ..pyutils import and_list, inspect +from ..pyutils import Undefined, and_list, inspect from ..utilities.type_comparators import is_equal_type, is_type_sub_type_of from .definition import ( GraphQLEnumType, @@ -482,6 +482,28 @@ def validate_input_fields(self, input_obj: GraphQLInputObjectType) -> None: ], ) + if input_obj.is_one_of: + self.validate_one_of_input_object_field(input_obj, field_name, field) + + def validate_one_of_input_object_field( + self, + type_: GraphQLInputObjectType, + field_name: str, + field: GraphQLInputField, + ) -> None: + if is_non_null_type(field.type): + self.report_error( + f"OneOf input field {type_.name}.{field_name} must be nullable.", + field.ast_node and field.ast_node.type, + ) + + if field.default_value is not Undefined: + self.report_error( + f"OneOf input field {type_.name}.{field_name}" + " cannot have a default value.", + field.ast_node, + ) + def get_operation_type_node( schema: GraphQLSchema, operation: OperationType diff --git a/src/graphql/utilities/coerce_input_value.py b/src/graphql/utilities/coerce_input_value.py index db74d272..ab06caf1 100644 --- a/src/graphql/utilities/coerce_input_value.py +++ b/src/graphql/utilities/coerce_input_value.py @@ -130,6 +130,30 @@ def coerce_input_value( + did_you_mean(suggestions) ), ) + + if type_.is_one_of: + keys = list(coerced_dict) + if len(keys) != 1: + on_error( + path.as_list() if path else [], + input_value, + GraphQLError( + "Exactly one key must be specified" + f" for OneOf type '{type_.name}'.", + ), + ) + else: + key = keys[0] + value = coerced_dict[key] + if value is None: + on_error( + (path.as_list() if path else []) + [key], + value, + GraphQLError( + f"Field '{key}' must be non-null.", + ), + ) + return type_.out_type(coerced_dict) if is_leaf_type(type_): diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index c5af8669..fc6cee77 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -65,6 +65,7 @@ GraphQLNullableType, GraphQLObjectType, GraphQLObjectTypeKwargs, + GraphQLOneOfDirective, GraphQLOutputType, GraphQLScalarType, GraphQLSchema, @@ -777,6 +778,7 @@ def build_input_object_type( fields=partial(self.build_input_field_map, all_nodes), ast_node=ast_node, extension_ast_nodes=extension_nodes, + is_one_of=is_one_of(ast_node), ) def build_type(self, ast_node: TypeDefinitionNode) -> GraphQLNamedType: @@ -822,3 +824,10 @@ def get_specified_by_url( specified_by_url = get_directive_values(GraphQLSpecifiedByDirective, node) return specified_by_url["url"] if specified_by_url else None + + +def is_one_of(node: InputObjectTypeDefinitionNode) -> bool: + """Given an input object node, returns if the node should be OneOf.""" + from ..execution import get_directive_values + + return get_directive_values(GraphQLOneOfDirective, node) is not None diff --git a/src/graphql/utilities/value_from_ast.py b/src/graphql/utilities/value_from_ast.py index 67ed11dc..dfefb723 100644 --- a/src/graphql/utilities/value_from_ast.py +++ b/src/graphql/utilities/value_from_ast.py @@ -118,6 +118,14 @@ def value_from_ast( return Undefined coerced_obj[field.out_name or field_name] = field_value + if type_.is_one_of: + keys = list(coerced_obj) + if len(keys) != 1: + return Undefined + + if coerced_obj[keys[0]] is None: + return Undefined + return type_.out_type(coerced_obj) if is_leaf_type(type_): diff --git a/src/graphql/validation/rules/values_of_correct_type.py b/src/graphql/validation/rules/values_of_correct_type.py index 8951a2d9..7df72c6e 100644 --- a/src/graphql/validation/rules/values_of_correct_type.py +++ b/src/graphql/validation/rules/values_of_correct_type.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Any, cast +from typing import Any, Mapping, cast from ...error import GraphQLError from ...language import ( @@ -12,16 +12,20 @@ FloatValueNode, IntValueNode, ListValueNode, + NonNullTypeNode, NullValueNode, ObjectFieldNode, ObjectValueNode, StringValueNode, ValueNode, + VariableDefinitionNode, + VariableNode, VisitorAction, print_ast, ) from ...pyutils import Undefined, did_you_mean, suggestion_list from ...type import ( + GraphQLInputObjectType, GraphQLScalarType, get_named_type, get_nullable_type, @@ -31,7 +35,7 @@ is_non_null_type, is_required_input_field, ) -from . import ValidationRule +from . import ValidationContext, ValidationRule __all__ = ["ValuesOfCorrectTypeRule"] @@ -45,6 +49,18 @@ class ValuesOfCorrectTypeRule(ValidationRule): See https://spec.graphql.org/draft/#sec-Values-of-Correct-Type """ + def __init__(self, context: ValidationContext) -> None: + super().__init__(context) + self.variable_definitions: dict[str, VariableDefinitionNode] = {} + + def enter_operation_definition(self, *_args: Any) -> None: + self.variable_definitions.clear() + + def enter_variable_definition( + self, definition: VariableDefinitionNode, *_args: Any + ) -> None: + self.variable_definitions[definition.variable.name.value] = definition + def enter_list_value(self, node: ListValueNode, *_args: Any) -> VisitorAction: # Note: TypeInfo will traverse into a list's item type, so look to the parent # input type to check if it is a list. @@ -72,6 +88,10 @@ def enter_object_value(self, node: ObjectValueNode, *_args: Any) -> VisitorActio node, ) ) + if type_.is_one_of: + validate_one_of_input_object( + self.context, node, type_, field_node_map, self.variable_definitions + ) return None def enter_object_field(self, node: ObjectFieldNode, *_args: Any) -> None: @@ -162,3 +182,51 @@ def is_valid_value_node(self, node: ValueNode) -> None: ) return + + +def validate_one_of_input_object( + context: ValidationContext, + node: ObjectValueNode, + type_: GraphQLInputObjectType, + field_node_map: Mapping[str, ObjectFieldNode], + variable_definitions: dict[str, VariableDefinitionNode], +) -> None: + keys = list(field_node_map) + is_not_exactly_one_filed = len(keys) != 1 + + if is_not_exactly_one_filed: + context.report_error( + GraphQLError( + f"OneOf Input Object '{type_.name}' must specify exactly one key.", + node, + ) + ) + return + + object_field_node = field_node_map.get(keys[0]) + value = object_field_node.value if object_field_node else None + is_null_literal = not value or isinstance(value, NullValueNode) + + if is_null_literal: + context.report_error( + GraphQLError( + f"Field '{type_.name}.{keys[0]}' must be non-null.", + node, + ) + ) + return + + is_variable = value and isinstance(value, VariableNode) + if is_variable: + variable_name = cast(VariableNode, value).name.value + definition = variable_definitions[variable_name] + is_nullable_variable = not isinstance(definition.type, NonNullTypeNode) + + if is_nullable_variable: + context.report_error( + GraphQLError( + f"Variable '{variable_name}' must be non-nullable" + f" to be used for OneOf Input Object '{type_.name}'.", + node, + ) + ) diff --git a/tests/execution/test_oneof.py b/tests/execution/test_oneof.py new file mode 100644 index 00000000..2df1000d --- /dev/null +++ b/tests/execution/test_oneof.py @@ -0,0 +1,151 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Any + +from graphql.execution import ExecutionResult, execute +from graphql.language import parse +from graphql.utilities import build_schema + +if TYPE_CHECKING: + from graphql.pyutils import AwaitableOrValue + +schema = build_schema(""" + type Query { + test(input: TestInputObject!): TestObject + } + + input TestInputObject @oneOf { + a: String + b: Int + } + + type TestObject { + a: String + b: Int + } + """) + + +def execute_query( + query: str, root_value: Any, variable_values: dict[str, Any] | None = None +) -> AwaitableOrValue[ExecutionResult]: + return execute(schema, parse(query), root_value, variable_values=variable_values) + + +def describe_execute_handles_one_of_input_objects(): + def describe_one_of_input_objects(): + root_value = { + "test": lambda _info, input: input, # noqa: A002 + } + + def accepts_a_good_default_value(): + query = """ + query ($input: TestInputObject! = {a: "abc"}) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value) + + assert result == ({"test": {"a": "abc", "b": None}}, None) + + def rejects_a_bad_default_value(): + query = """ + query ($input: TestInputObject! = {a: "abc", b: 123}) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value) + + assert result == ( + {"test": None}, + [ + { + # This type of error would be caught at validation-time + # hence the vague error message here. + "message": "Argument 'input' of non-null type" + " 'TestInputObject!' must not be null.", + "locations": [(3, 31)], + "path": ["test"], + } + ], + ) + + def accepts_a_good_variable(): + query = """ + query ($input: TestInputObject!) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value, {"input": {"a": "abc"}}) + + assert result == ({"test": {"a": "abc", "b": None}}, None) + + def accepts_a_good_variable_with_an_undefined_key(): + query = """ + query ($input: TestInputObject!) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value, {"input": {"a": "abc"}}) + + assert result == ({"test": {"a": "abc", "b": None}}, None) + + def rejects_a_variable_with_multiple_non_null_keys(): + query = """ + query ($input: TestInputObject!) { + test(input: $input) { + a + b + } + } + """ + result = execute_query(query, root_value, {"input": {"a": "abc", "b": 123}}) + + assert result == ( + None, + [ + { + "message": "Variable '$input' got invalid value" + " {'a': 'abc', 'b': 123}; Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + "locations": [(2, 24)], + } + ], + ) + + def rejects_a_variable_with_multiple_nullable_keys(): + query = """ + query ($input: TestInputObject!) { + test(input: $input) { + a + b + } + } + """ + result = execute_query( + query, root_value, {"input": {"a": "abc", "b": None}} + ) + + assert result == ( + None, + [ + { + "message": "Variable '$input' got invalid value" + " {'a': 'abc', 'b': None}; Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + "locations": [(2, 24)], + } + ], + ) diff --git a/tests/fixtures/schema_kitchen_sink.graphql b/tests/fixtures/schema_kitchen_sink.graphql index 8ec1f2d8..c1d9d06e 100644 --- a/tests/fixtures/schema_kitchen_sink.graphql +++ b/tests/fixtures/schema_kitchen_sink.graphql @@ -26,6 +26,7 @@ type Foo implements Bar & Baz & Two { five(argument: [String] = ["string", "string"]): String six(argument: InputType = {key: "value"}): Type seven(argument: Int = null): Type + eight(argument: OneOfInputType): Type } type AnnotatedObject @onObject(arg: "value") { @@ -115,6 +116,11 @@ input InputType { answer: Int = 42 } +input OneOfInputType @oneOf { + string: String + int: Int +} + input AnnotatedInput @onInputObject { annotatedField: Type @onInputFieldDefinition } diff --git a/tests/language/test_schema_printer.py b/tests/language/test_schema_printer.py index 35da0b06..95fcac97 100644 --- a/tests/language/test_schema_printer.py +++ b/tests/language/test_schema_printer.py @@ -57,6 +57,7 @@ def prints_kitchen_sink_without_altering_ast(kitchen_sink_sdl): # noqa: F811 five(argument: [String] = ["string", "string"]): String six(argument: InputType = { key: "value" }): Type seven(argument: Int = null): Type + eight(argument: OneOfInputType): Type } type AnnotatedObject @onObject(arg: "value") { @@ -139,6 +140,11 @@ def prints_kitchen_sink_without_altering_ast(kitchen_sink_sdl): # noqa: F811 answer: Int = 42 } + input OneOfInputType @oneOf { + string: String + int: Int + } + input AnnotatedInput @onInputObject { annotatedField: Type @onInputFieldDefinition } diff --git a/tests/type/test_introspection.py b/tests/type/test_introspection.py index 09a21c31..1a52f7a2 100644 --- a/tests/type/test_introspection.py +++ b/tests/type/test_introspection.py @@ -364,6 +364,17 @@ def executes_an_introspection_query(): "isDeprecated": False, "deprecationReason": None, }, + { + "name": "isOneOf", + "args": [], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": None, + }, + "isDeprecated": False, + "deprecationReason": None, + }, ], "inputFields": None, "interfaces": [], @@ -981,6 +992,12 @@ def executes_an_introspection_query(): } ], }, + { + "name": "oneOf", + "isRepeatable": False, + "locations": ["INPUT_OBJECT"], + "args": [], + }, ], } } @@ -1433,6 +1450,109 @@ def respects_the_include_deprecated_parameter_for_enum_values(): None, ) + def identifies_one_of_for_input_objects(): + schema = build_schema( + """ + input SomeInputObject @oneOf { + a: String + } + + input AnotherInputObject { + a: String + b: String + } + + type Query { + someField(someArg: SomeInputObject): String + anotherField(anotherArg: AnotherInputObject): String + } + """ + ) + + source = """ + { + oneOfInputObject: __type(name: "SomeInputObject") { + isOneOf + } + inputObject: __type(name: "AnotherInputObject") { + isOneOf + } + } + """ + + assert graphql_sync(schema=schema, source=source) == ( + { + "oneOfInputObject": { + "isOneOf": True, + }, + "inputObject": { + "isOneOf": False, + }, + }, + None, + ) + + def returns_null_for_one_of_for_other_types(): + schema = build_schema( + """ + type SomeObject implements SomeInterface { + fieldA: String + } + enum SomeEnum { + SomeObject + } + interface SomeInterface { + fieldA: String + } + union SomeUnion = SomeObject + type Query { + someField(enum: SomeEnum): SomeUnion + anotherField(enum: SomeEnum): SomeInterface + } + """ + ) + + source = """ + { + object: __type(name: "SomeObject") { + isOneOf + } + enum: __type(name: "SomeEnum") { + isOneOf + } + interface: __type(name: "SomeInterface") { + isOneOf + } + scalar: __type(name: "String") { + isOneOf + } + union: __type(name: "SomeUnion") { + isOneOf + } + } + """ + + assert graphql_sync(schema=schema, source=source) == ( + { + "object": { + "isOneOf": None, + }, + "enum": { + "isOneOf": None, + }, + "interface": { + "isOneOf": None, + }, + "scalar": { + "isOneOf": None, + }, + "union": { + "isOneOf": None, + }, + }, + None, + ) + def fails_as_expected_on_the_type_root_field_without_an_arg(): schema = build_schema( """ diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index eb4e2ab7..ab364e9f 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -1593,6 +1593,49 @@ def rejects_with_relevant_locations_for_a_non_input_type(): ] +def describe_type_system_one_of_input_object_fields_must_be_nullable(): + def rejects_non_nullable_fields(): + schema = build_schema( + """ + type Query { + test(arg: SomeInputObject): String + } + + input SomeInputObject @oneOf { + a: String + b: String! + } + """ + ) + assert validate_schema(schema) == [ + { + "message": "OneOf input field SomeInputObject.b must be nullable.", + "locations": [(8, 18)], + } + ] + + def rejects_fields_with_default_values(): + schema = build_schema( + """ + type Query { + test(arg: SomeInputObject): String + } + + input SomeInputObject @oneOf { + a: String + b: String = "foo" + } + """ + ) + assert validate_schema(schema) == [ + { + "message": "OneOf input field SomeInputObject.b" + " cannot have a default value.", + "locations": [(8, 15)], + } + ] + + def describe_objects_must_adhere_to_interfaces_they_implement(): def accepts_an_object_which_implements_an_interface(): schema = build_schema( diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index a0aefb1a..b236025c 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -22,6 +22,7 @@ GraphQLInputField, GraphQLInt, GraphQLNamedType, + GraphQLOneOfDirective, GraphQLSchema, GraphQLSkipDirective, GraphQLSpecifiedByDirective, @@ -237,14 +238,15 @@ def supports_descriptions(): ) assert cycle_sdl(sdl) == sdl - def maintains_include_skip_and_specified_by_url_directives(): + def maintains_include_skip_and_three_other_directives(): schema = build_schema("type Query") - assert len(schema.directives) == 4 + assert len(schema.directives) == 5 assert schema.get_directive("skip") is GraphQLSkipDirective assert schema.get_directive("include") is GraphQLIncludeDirective assert schema.get_directive("deprecated") is GraphQLDeprecatedDirective assert schema.get_directive("specifiedBy") is GraphQLSpecifiedByDirective + assert schema.get_directive("oneOf") is GraphQLOneOfDirective def overriding_directives_excludes_specified(): schema = build_schema( @@ -253,10 +255,11 @@ def overriding_directives_excludes_specified(): directive @include on FIELD directive @deprecated on FIELD_DEFINITION directive @specifiedBy on FIELD_DEFINITION + directive @oneOf on OBJECT """ ) - assert len(schema.directives) == 4 + assert len(schema.directives) == 5 get_directive = schema.get_directive assert get_directive("skip") is not GraphQLSkipDirective assert get_directive("skip") is not None @@ -266,19 +269,22 @@ def overriding_directives_excludes_specified(): assert get_directive("deprecated") is not None assert get_directive("specifiedBy") is not GraphQLSpecifiedByDirective assert get_directive("specifiedBy") is not None + assert get_directive("oneOf") is not GraphQLOneOfDirective + assert get_directive("oneOf") is not None - def adding_directives_maintains_include_skip_and_specified_by_directives(): + def adding_directives_maintains_include_skip_and_three_other_directives(): schema = build_schema( """ directive @foo(arg: Int) on FIELD """ ) - assert len(schema.directives) == 5 + assert len(schema.directives) == 6 assert schema.get_directive("skip") is GraphQLSkipDirective assert schema.get_directive("include") is GraphQLIncludeDirective assert schema.get_directive("deprecated") is GraphQLDeprecatedDirective assert schema.get_directive("specifiedBy") is GraphQLSpecifiedByDirective + assert schema.get_directive("oneOf") is GraphQLOneOfDirective assert schema.get_directive("foo") is not None def type_modifiers(): diff --git a/tests/utilities/test_coerce_input_value.py b/tests/utilities/test_coerce_input_value.py index 61b1feab..c18b5098 100644 --- a/tests/utilities/test_coerce_input_value.py +++ b/tests/utilities/test_coerce_input_value.py @@ -250,6 +250,99 @@ def transforms_values_with_out_type(): result = _coerce_value({"real": 1, "imag": 2}, ComplexInputObject) assert expect_value(result) == 1 + 2j + def describe_for_graphql_input_object_that_is_one_of(): + TestInputObject = GraphQLInputObjectType( + "TestInputObject", + { + "foo": GraphQLInputField(GraphQLInt), + "bar": GraphQLInputField(GraphQLInt), + }, + is_one_of=True, + ) + + def returns_no_error_for_a_valid_input(): + result = _coerce_value({"foo": 123}, TestInputObject) + assert expect_value(result) == {"foo": 123} + + def returns_an_error_if_more_than_one_field_is_specified(): + result = _coerce_value({"foo": 123, "bar": None}, TestInputObject) + assert expect_errors(result) == [ + ( + "Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + [], + {"foo": 123, "bar": None}, + ) + ] + + def returns_an_error_if_the_one_field_is_null(): + result = _coerce_value({"bar": None}, TestInputObject) + assert expect_errors(result) == [ + ( + "Field 'bar' must be non-null.", + ["bar"], + None, + ) + ] + + def returns_an_error_for_an_invalid_field(): + result = _coerce_value({"foo": nan}, TestInputObject) + assert expect_errors(result) == [ + ( + "Int cannot represent non-integer value: nan", + ["foo"], + nan, + ) + ] + + def returns_multiple_errors_for_multiple_invalid_fields(): + result = _coerce_value({"foo": "abc", "bar": "def"}, TestInputObject) + assert expect_errors(result) == [ + ( + "Int cannot represent non-integer value: 'abc'", + ["foo"], + "abc", + ), + ( + "Int cannot represent non-integer value: 'def'", + ["bar"], + "def", + ), + ( + "Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + [], + {"foo": "abc", "bar": "def"}, + ), + ] + + def returns_an_error_for_an_unknown_field(): + result = _coerce_value({"foo": 123, "unknownField": 123}, TestInputObject) + assert expect_errors(result) == [ + ( + "Field 'unknownField' is not defined by type 'TestInputObject'.", + [], + {"foo": 123, "unknownField": 123}, + ) + ] + + def returns_an_error_for_a_misspelled_field(): + result = _coerce_value({"bart": 123}, TestInputObject) + assert expect_errors(result) == [ + ( + "Field 'bart' is not defined by type 'TestInputObject'." + " Did you mean 'bar'?", + [], + {"bart": 123}, + ), + ( + "Exactly one key must be specified" + " for OneOf type 'TestInputObject'.", + [], + {"bart": 123}, + ), + ] + def describe_for_graphql_input_object_with_default_value(): def _get_test_input_object(default_value): return GraphQLInputObjectType( diff --git a/tests/utilities/test_find_breaking_changes.py b/tests/utilities/test_find_breaking_changes.py index c9003a6c..24d03704 100644 --- a/tests/utilities/test_find_breaking_changes.py +++ b/tests/utilities/test_find_breaking_changes.py @@ -1,6 +1,7 @@ from graphql.type import ( GraphQLDeprecatedDirective, GraphQLIncludeDirective, + GraphQLOneOfDirective, GraphQLSchema, GraphQLSkipDirective, GraphQLSpecifiedByDirective, @@ -817,6 +818,7 @@ def should_detect_if_a_directive_was_implicitly_removed(): GraphQLSkipDirective, GraphQLIncludeDirective, GraphQLSpecifiedByDirective, + GraphQLOneOfDirective, ] ) diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index 1939ed59..878d0770 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -771,6 +771,9 @@ def prints_introspection_schema(): url: String! ) on SCALAR + """Indicates an Input Object is a OneOf Input Object.""" + directive @oneOf on INPUT_OBJECT + """ A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all available types and directives on the server, as well as the entry points for query, mutation, and subscription operations. """ @@ -813,6 +816,7 @@ def prints_introspection_schema(): enumValues(includeDeprecated: Boolean = false): [__EnumValue!] inputFields(includeDeprecated: Boolean = false): [__InputValue!] ofType: __Type + isOneOf: Boolean } """An enum describing what kind of type a given `__Type` is.""" diff --git a/tests/utilities/test_value_from_ast.py b/tests/utilities/test_value_from_ast.py index f21abcc2..6622b4dc 100644 --- a/tests/utilities/test_value_from_ast.py +++ b/tests/utilities/test_value_from_ast.py @@ -174,6 +174,15 @@ def coerces_non_null_lists_of_non_null_values(): }, ) + test_one_of_input_obj = GraphQLInputObjectType( + "TestOneOfInput", + { + "a": GraphQLInputField(GraphQLString), + "b": GraphQLInputField(GraphQLString), + }, + is_one_of=True, + ) + def coerces_input_objects_according_to_input_coercion_rules(): assert _value_from("null", test_input_obj) is None assert _value_from("[]", test_input_obj) is Undefined @@ -193,6 +202,14 @@ def coerces_input_objects_according_to_input_coercion_rules(): ) assert _value_from("{ requiredBool: null }", test_input_obj) is Undefined assert _value_from("{ bool: true }", test_input_obj) is Undefined + assert _value_from('{ a: "abc" }', test_one_of_input_obj) == {"a": "abc"} + assert _value_from('{ b: "def" }', test_one_of_input_obj) == {"b": "def"} + assert _value_from('{ a: "abc", b: None }', test_one_of_input_obj) is Undefined + assert _value_from("{ a: null }", test_one_of_input_obj) is Undefined + assert _value_from("{ a: 1 }", test_one_of_input_obj) is Undefined + assert _value_from('{ a: "abc", b: "def" }', test_one_of_input_obj) is Undefined + assert _value_from("{}", test_one_of_input_obj) is Undefined + assert _value_from('{ c: "abc" }', test_one_of_input_obj) is Undefined def accepts_variable_values_assuming_already_coerced(): assert _value_from("$var", GraphQLBoolean, {}) is Undefined diff --git a/tests/validation/harness.py b/tests/validation/harness.py index 1189e922..9a6912f4 100644 --- a/tests/validation/harness.py +++ b/tests/validation/harness.py @@ -86,6 +86,11 @@ stringListField: [String] } + input OneOfInput @oneOf { + stringField: String + intField: Int + } + type ComplicatedArgs { # TODO List # TODO Coercion @@ -100,6 +105,7 @@ stringListArgField(stringListArg: [String]): String stringListNonNullArgField(stringListNonNullArg: [String!]): String complexArgField(complexArg: ComplexInput): String + oneOfArgField(oneOfArg: OneOfInput): String multipleReqs(req1: Int!, req2: Int!): String nonNullFieldWithDefault(arg: Int! = 0): String multipleOpts(opt1: Int = 0, opt2: Int = 0): String diff --git a/tests/validation/test_values_of_correct_type.py b/tests/validation/test_values_of_correct_type.py index e19228aa..7cf20648 100644 --- a/tests/validation/test_values_of_correct_type.py +++ b/tests/validation/test_values_of_correct_type.py @@ -931,6 +931,29 @@ def full_object_with_fields_in_different_order(): """ ) + def describe_valid_one_of_input_object_value(): + def exactly_one_field(): + assert_valid( + """ + { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: "abc" }) + } + } + """ + ) + + def exactly_one_non_nullable_variable(): + assert_valid( + """ + query ($string: String!) { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: $string }) + } + } + """ + ) + def describe_invalid_input_object_value(): def partial_object_missing_required(): assert_errors( @@ -1097,6 +1120,77 @@ def allows_custom_scalar_to_accept_complex_literals(): schema=schema, ) + def describe_invalid_one_of_input_object_value(): + def invalid_field_type(): + assert_errors( + """ + { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: 2 }) + } + } + """, + [ + { + "message": "String cannot represent a non string value: 2", + "locations": [(4, 60)], + }, + ], + ) + + def exactly_one_null_field(): + assert_errors( + """ + { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: null }) + } + } + """, + [ + { + "message": "Field 'OneOfInput.stringField' must be non-null.", + "locations": [(4, 45)], + }, + ], + ) + + def exactly_one_nullable_variable(): + assert_errors( + """ + query ($string: String) { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: $string }) + } + } + """, + [ + { + "message": "Variable 'string' must be non-nullable to be used" + " for OneOf Input Object 'OneOfInput'.", + "locations": [(4, 45)], + }, + ], + ) + + def more_than_one_field(): + assert_errors( + """ + { + complicatedArgs { + oneOfArgField(oneOfArg: { stringField: "abc", intField: 123 }) + } + } + """, + [ + { + "message": "OneOf Input Object 'OneOfInput'" + " must specify exactly one key.", + "locations": [(4, 45)], + }, + ], + ) + def describe_directive_arguments(): def with_directives_of_valid_types(): assert_valid( From 4933704a0f9af3bcce5f4b6178efd68dc33c092c Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 7 Sep 2024 19:29:06 +0200 Subject: [PATCH 09/50] Use American English Replicates graphql/graphql-js@82ff6539a5b961b00367ed7d6ac57a7297af2a9a --- src/graphql/type/directives.py | 6 +++--- tests/utilities/test_build_ast_schema.py | 5 +++-- tests/utilities/test_print_schema.py | 4 ++-- 3 files changed, 8 insertions(+), 7 deletions(-) diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index 46201d38..d4160300 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -248,17 +248,17 @@ def assert_directive(directive: Any) -> GraphQLDirective: description="Marks an element of a GraphQL schema as no longer supported.", ) -# Used to provide a URL for specifying the behaviour of custom scalar definitions: +# Used to provide a URL for specifying the behavior of custom scalar definitions: GraphQLSpecifiedByDirective = GraphQLDirective( name="specifiedBy", locations=[DirectiveLocation.SCALAR], args={ "url": GraphQLArgument( GraphQLNonNull(GraphQLString), - description="The URL that specifies the behaviour of this scalar.", + description="The URL that specifies the behavior of this scalar.", ) }, - description="Exposes a URL that specifies the behaviour of this scalar.", + description="Exposes a URL that specifies the behavior of this scalar.", ) # Used to declare an Input Object as a OneOf Input Objects. diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index b236025c..d4c2dff9 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -7,6 +7,7 @@ from typing import Union import pytest + from graphql import graphql_sync from graphql.language import DocumentNode, InterfaceTypeDefinitionNode, parse, print_ast from graphql.type import ( @@ -1139,7 +1140,7 @@ def can_build_invalid_schema(): assert errors def do_not_override_standard_types(): - # Note: not sure it's desired behaviour to just silently ignore override + # Note: not sure it's desired behavior to just silently ignore override # attempts so just documenting it here. schema = build_schema( @@ -1252,7 +1253,7 @@ def can_deep_copy_pickled_schema(): # check that printing the copied schema gives the same SDL assert print_schema(copied) == sdl - @pytest.mark.slow() + @pytest.mark.slow def describe_deepcopy_and_pickle_big(): # pragma: no cover @pytest.mark.timeout(20) def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index 878d0770..0e96bbbc 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -765,9 +765,9 @@ def prints_introspection_schema(): reason: String = "No longer supported" ) on FIELD_DEFINITION | ARGUMENT_DEFINITION | INPUT_FIELD_DEFINITION | ENUM_VALUE - """Exposes a URL that specifies the behaviour of this scalar.""" + """Exposes a URL that specifies the behavior of this scalar.""" directive @specifiedBy( - """The URL that specifies the behaviour of this scalar.""" + """The URL that specifies the behavior of this scalar.""" url: String! ) on SCALAR From 448d0455e26441e54405413729e501324c20de4a Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 7 Sep 2024 19:39:39 +0200 Subject: [PATCH 10/50] Update ruff and adapt code style --- pyproject.toml | 2 +- src/graphql/execution/execute.py | 2 +- src/graphql/pyutils/suggestion_list.py | 3 +- tests/execution/test_abstract.py | 1 + tests/execution/test_customize.py | 7 +- tests/execution/test_defer.py | 39 +++++---- tests/execution/test_execution_result.py | 1 + tests/execution/test_executor.py | 9 +- tests/execution/test_lists.py | 29 ++++--- tests/execution/test_map_async_iterable.py | 31 +++---- tests/execution/test_middleware.py | 7 +- tests/execution/test_mutations.py | 11 +-- tests/execution/test_nonnull.py | 21 ++--- tests/execution/test_parallel.py | 11 +-- tests/execution/test_stream.py | 87 ++++++++++--------- tests/execution/test_subscribe.py | 55 ++++++------ tests/execution/test_sync.py | 15 ++-- tests/language/test_block_string_fuzz.py | 3 +- tests/language/test_lexer.py | 1 + tests/language/test_parser.py | 1 + tests/language/test_printer.py | 1 + tests/language/test_schema_parser.py | 1 + tests/language/test_schema_printer.py | 1 + tests/language/test_source.py | 1 + tests/language/test_visitor.py | 1 + tests/pyutils/test_async_reduce.py | 9 +- tests/pyutils/test_description.py | 1 + tests/pyutils/test_format_list.py | 1 + tests/pyutils/test_inspect.py | 3 +- tests/pyutils/test_is_awaitable.py | 7 +- tests/pyutils/test_simple_pub_sub.py | 9 +- tests/pyutils/test_undefined.py | 1 + tests/star_wars_schema.py | 1 - tests/test_star_wars_query.py | 37 ++++---- tests/test_user_registry.py | 13 +-- tests/type/test_assert_name.py | 1 + tests/type/test_definition.py | 1 + tests/type/test_directives.py | 1 + tests/type/test_extensions.py | 1 + tests/type/test_predicate.py | 1 + tests/type/test_scalars.py | 1 + tests/type/test_schema.py | 1 + tests/type/test_validation.py | 1 + tests/utilities/test_ast_from_value.py | 1 + tests/utilities/test_build_client_schema.py | 1 + tests/utilities/test_coerce_input_value.py | 1 + tests/utilities/test_extend_schema.py | 1 + .../test_introspection_from_schema.py | 3 +- .../test_strip_ignored_characters.py | 1 + .../test_strip_ignored_characters_fuzz.py | 21 ++--- tests/utilities/test_type_from_ast.py | 1 + .../test_assert_equal_awaitables_or_values.py | 6 +- tests/validation/test_validation.py | 1 + tox.ini | 2 +- 54 files changed, 258 insertions(+), 212 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 3b9342b1..c3c2367c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,7 +75,7 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.5.7,<0.6" +ruff = ">=0.6.4,<0.7" mypy = [ { version = "^1.11", python = ">=3.8" }, { version = "~1.4", python = "<3.8" } diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index e370bcc1..ae56c9b9 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -96,7 +96,7 @@ anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator: AsyncIterator) -> Any: # noqa: A001 + async def anext(iterator: AsyncIterator) -> Any: """Return the next item from an async iterator.""" return await iterator.__anext__() diff --git a/src/graphql/pyutils/suggestion_list.py b/src/graphql/pyutils/suggestion_list.py index 6abeefed..35240c77 100644 --- a/src/graphql/pyutils/suggestion_list.py +++ b/src/graphql/pyutils/suggestion_list.py @@ -99,8 +99,7 @@ def measure(self, option: str, threshold: int) -> int | None: double_diagonal_cell = rows[(i - 2) % 3][j - 2] current_cell = min(current_cell, double_diagonal_cell + 1) - if current_cell < smallest_cell: - smallest_cell = current_cell + smallest_cell = min(current_cell, smallest_cell) current_row[j] = current_cell diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index b5ebc45b..d7d12b7a 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -3,6 +3,7 @@ from typing import Any, NamedTuple import pytest + from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import is_awaitable diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index 23740237..85462147 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -1,6 +1,7 @@ from inspect import isasyncgen import pytest + from graphql.execution import ExecutionContext, execute, subscribe from graphql.language import parse from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString @@ -9,7 +10,7 @@ anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator): # noqa: A001 + async def anext(iterator): """Return the next item from an async iterator.""" return await iterator.__anext__() @@ -62,7 +63,7 @@ def execute_field( def describe_customize_subscription(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def uses_a_custom_subscribe_field_resolver(): schema = GraphQLSchema( query=GraphQLObjectType("Query", {"foo": GraphQLField(GraphQLString)}), @@ -91,7 +92,7 @@ async def custom_foo(): await subscription.aclose() - @pytest.mark.asyncio() + @pytest.mark.asyncio async def uses_a_custom_execution_context_class(): class TestExecutionContext(ExecutionContext): def build_resolve_info(self, *args, **kwargs): diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 6b39f74e..312a2a0b 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -4,6 +4,7 @@ from typing import Any, AsyncGenerator, NamedTuple import pytest + from graphql.error import GraphQLError from graphql.execution import ( ExecutionResult, @@ -333,7 +334,7 @@ def can_print_deferred_fragment_record(): "path=['bar'], label='foo', parent_context, data)" ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_fragments_containing_scalar_types(): document = parse( """ @@ -358,7 +359,7 @@ async def can_defer_fragments_containing_scalar_types(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_disable_defer_using_if_argument(): document = parse( """ @@ -384,7 +385,7 @@ async def can_disable_defer_using_if_argument(): }, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def does_not_disable_defer_with_null_if_argument(): document = parse( """ @@ -409,7 +410,7 @@ async def does_not_disable_defer_with_null_if_argument(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws_an_error_for_defer_directive_with_non_string_label(): document = parse( """ @@ -430,7 +431,7 @@ async def throws_an_error_for_defer_directive_with_non_string_label(): ], } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_fragments_on_the_top_level_query_field(): document = parse( """ @@ -456,7 +457,7 @@ async def can_defer_fragments_on_the_top_level_query_field(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_fragments_with_errors_on_the_top_level_query_field(): document = parse( """ @@ -493,7 +494,7 @@ async def can_defer_fragments_with_errors_on_the_top_level_query_field(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_a_fragment_within_an_already_deferred_fragment(): document = parse( """ @@ -540,7 +541,7 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): document = parse( """ @@ -571,7 +572,7 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first(): document = parse( """ @@ -602,7 +603,7 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_an_inline_fragment(): document = parse( """ @@ -632,7 +633,7 @@ async def can_defer_an_inline_fragment(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_errors_thrown_in_deferred_fragments(): document = parse( """ @@ -669,7 +670,7 @@ async def handles_errors_thrown_in_deferred_fragments(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_non_nullable_errors_thrown_in_deferred_fragments(): document = parse( """ @@ -709,7 +710,7 @@ async def handles_non_nullable_errors_thrown_in_deferred_fragments(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_non_nullable_errors_thrown_outside_deferred_fragments(): document = parse( """ @@ -740,7 +741,7 @@ async def handles_non_nullable_errors_thrown_outside_deferred_fragments(): ], } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): document = parse( """ @@ -780,7 +781,7 @@ async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_payloads_in_correct_order(): document = parse( """ @@ -833,7 +834,7 @@ async def returns_payloads_in_correct_order(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_payloads_from_synchronous_data_in_correct_order(): document = parse( """ @@ -886,7 +887,7 @@ async def returns_payloads_from_synchronous_data_in_correct_order(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def filters_deferred_payloads_when_list_item_from_async_iterable_nulled(): document = parse( """ @@ -920,7 +921,7 @@ async def filters_deferred_payloads_when_list_item_from_async_iterable_nulled(): ], } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def original_execute_function_throws_error_if_deferred_and_all_is_sync(): document = parse( """ @@ -938,7 +939,7 @@ async def original_execute_function_throws_error_if_deferred_and_all_is_sync(): " multiple payloads (due to @defer or @stream directive)" ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def original_execute_function_throws_error_if_deferred_and_not_all_is_sync(): document = parse( """ diff --git a/tests/execution/test_execution_result.py b/tests/execution/test_execution_result.py index 28ba17af..162bd00d 100644 --- a/tests/execution/test_execution_result.py +++ b/tests/execution/test_execution_result.py @@ -1,4 +1,5 @@ import pytest + from graphql.error import GraphQLError from graphql.execution import ExecutionResult diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index 5ea1f25b..792066f1 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -4,6 +4,7 @@ from typing import Any, Awaitable, cast import pytest + from graphql.error import GraphQLError from graphql.execution import execute, execute_sync from graphql.language import FieldNode, OperationDefinitionNode, parse @@ -41,7 +42,7 @@ def accepts_positional_arguments(): assert result == ({"a": "rootValue"}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def executes_arbitrary_code(): # noinspection PyMethodMayBeStatic,PyMethodMayBeStatic class Data: @@ -375,7 +376,7 @@ def resolve(_obj, _info, **args): assert len(resolved_args) == 1 assert resolved_args[0] == {"numArg": 123, "stringArg": "foo"} - @pytest.mark.asyncio() + @pytest.mark.asyncio async def nulls_out_error_subtrees(): document = parse( """ @@ -868,7 +869,7 @@ def resolves_to_an_error_if_schema_does_not_support_operation(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def correct_field_ordering_despite_execution_order(): schema = GraphQLSchema( GraphQLObjectType( @@ -984,7 +985,7 @@ def does_not_include_arguments_that_were_not_set(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def fails_when_is_type_of_check_is_not_met(): class Special: value: str diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py index 3d2bb8fa..5dc4b5f0 100644 --- a/tests/execution/test_lists.py +++ b/tests/execution/test_lists.py @@ -1,6 +1,7 @@ from typing import Any, AsyncGenerator import pytest + from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import is_awaitable @@ -171,7 +172,7 @@ async def _list_field( assert is_awaitable(result) return await result - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_an_async_generator_as_a_list_value(): async def list_field(): yield "two" @@ -183,7 +184,7 @@ async def list_field(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_a_custom_async_iterable_as_a_list_value(): class ListField: def __aiter__(self): @@ -202,7 +203,7 @@ async def __anext__(self): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_an_async_generator_that_throws(): async def list_field(): yield "two" @@ -214,7 +215,7 @@ async def list_field(): [{"message": "bad", "locations": [(1, 3)], "path": ["listField"]}], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_an_async_generator_where_intermediate_value_triggers_an_error(): async def list_field(): yield "two" @@ -232,7 +233,7 @@ async def list_field(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_errors_from_complete_value_in_async_iterables(): async def list_field(): yield "two" @@ -249,7 +250,7 @@ async def list_field(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_functions_from_complete_value_in_async_iterables(): async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: return data.index @@ -259,7 +260,7 @@ async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_single_async_functions_from_complete_value_in_async_iterables(): async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: return data.index @@ -269,7 +270,7 @@ async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_errors_from_complete_value_in_async_iterables(): async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: index = data.index @@ -288,7 +289,7 @@ async def resolve(data: _IndexData, _info: GraphQLResolveInfo) -> int: ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_nulls_yielded_by_async_generator(): async def list_field(): yield 1 @@ -322,7 +323,7 @@ def execute_query(list_value: Any) -> Any: return result - @pytest.mark.asyncio() + @pytest.mark.asyncio async def contains_values(): list_field = [1, 2] assert await _complete(list_field, "[Int]") == ({"listField": [1, 2]}, None) @@ -330,7 +331,7 @@ async def contains_values(): assert await _complete(list_field, "[Int!]") == ({"listField": [1, 2]}, None) assert await _complete(list_field, "[Int!]!") == ({"listField": [1, 2]}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def contains_null(): list_field = [1, None, 2] errors = [ @@ -351,7 +352,7 @@ async def contains_null(): assert await _complete(list_field, "[Int!]") == ({"listField": None}, errors) assert await _complete(list_field, "[Int!]!") == (None, errors) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_null(): list_field = None errors = [ @@ -366,7 +367,7 @@ async def returns_null(): assert await _complete(list_field, "[Int!]") == ({"listField": None}, None) assert await _complete(list_field, "[Int!]!") == (None, errors) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def contains_error(): list_field = [1, RuntimeError("bad"), 2] errors = [ @@ -393,7 +394,7 @@ async def contains_error(): errors, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def results_in_errors(): list_field = RuntimeError("bad") errors = [ diff --git a/tests/execution/test_map_async_iterable.py b/tests/execution/test_map_async_iterable.py index 055a61bc..eb3cddb8 100644 --- a/tests/execution/test_map_async_iterable.py +++ b/tests/execution/test_map_async_iterable.py @@ -1,11 +1,12 @@ import pytest + from graphql.execution import map_async_iterable try: # pragma: no cover anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator): # noqa: A001 + async def anext(iterator): """Return the next item from an async iterator.""" return await iterator.__anext__() @@ -21,7 +22,7 @@ async def throw(_x: int) -> int: def describe_map_async_iterable(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def maps_over_async_generator(): async def source(): yield 1 @@ -36,7 +37,7 @@ async def source(): with pytest.raises(StopAsyncIteration): assert await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def maps_over_async_iterable(): items = [1, 2, 3] @@ -57,7 +58,7 @@ async def __anext__(self): assert not items assert values == [2, 4, 6] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def compatible_with_async_for(): async def source(): yield 1 @@ -70,7 +71,7 @@ async def source(): assert values == [2, 4, 6] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_returning_early_from_mapped_async_generator(): async def source(): yield 1 @@ -91,7 +92,7 @@ async def source(): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_returning_early_from_mapped_async_iterable(): items = [1, 2, 3] @@ -119,7 +120,7 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_throwing_errors_through_async_iterable(): items = [1, 2, 3] @@ -150,7 +151,7 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_throwing_errors_with_traceback_through_async_iterables(): class Iterable: def __aiter__(self): @@ -177,7 +178,7 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await anext(one) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def does_not_map_over_thrown_errors(): async def source(): yield 1 @@ -192,7 +193,7 @@ async def source(): assert str(exc_info.value) == "Goodbye" - @pytest.mark.asyncio() + @pytest.mark.asyncio async def does_not_map_over_externally_thrown_errors(): async def source(): yield 1 @@ -206,7 +207,7 @@ async def source(): assert str(exc_info.value) == "Goodbye" - @pytest.mark.asyncio() + @pytest.mark.asyncio async def iterable_is_closed_when_mapped_iterable_is_closed(): class Iterable: def __init__(self): @@ -230,7 +231,7 @@ async def aclose(self): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def iterable_is_closed_on_callback_error(): class Iterable: def __init__(self): @@ -253,7 +254,7 @@ async def aclose(self): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def iterable_exits_on_callback_error(): exited = False @@ -272,7 +273,7 @@ async def iterable(): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def mapped_iterable_is_closed_when_iterable_cannot_be_closed(): class Iterable: def __aiter__(self): @@ -287,7 +288,7 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await anext(doubles) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def ignores_that_iterable_cannot_be_closed_on_callback_error(): class Iterable: def __aiter__(self): diff --git a/tests/execution/test_middleware.py b/tests/execution/test_middleware.py index d4abba95..291f218c 100644 --- a/tests/execution/test_middleware.py +++ b/tests/execution/test_middleware.py @@ -2,6 +2,7 @@ from typing import Awaitable, cast import pytest + from graphql.execution import Middleware, MiddlewareManager, execute, subscribe from graphql.language.parser import parse from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString @@ -90,7 +91,7 @@ def capitalize_middleware(next_, *args, **kwargs): assert result.data == {"first": "Eno", "second": "Owt"} # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def single_async_function(): doc = parse("{ first second }") @@ -200,7 +201,7 @@ def resolve(self, next_, *args, **kwargs): ) assert result.data == {"field": "devloseR"} # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def with_async_function_and_object(): doc = parse("{ field }") @@ -237,7 +238,7 @@ async def resolve(self, next_, *args, **kwargs): result = await awaitable_result assert result.data == {"field": "devloseR"} - @pytest.mark.asyncio() + @pytest.mark.asyncio async def subscription_simple(): async def bar_resolve(_obj, _info): yield "bar" diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index 20ee1c97..3737bb6a 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -4,6 +4,7 @@ from typing import Any, Awaitable import pytest + from graphql.execution import ( ExperimentalIncrementalExecutionResults, execute, @@ -106,7 +107,7 @@ async def promise_to_get_the_number(holder: NumberHolder, _info) -> int: def describe_execute_handles_mutation_execution_ordering(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def evaluates_mutations_serially(): document = parse( """ @@ -154,7 +155,7 @@ def does_not_include_illegal_mutation_fields_in_output(): result = execute_sync(schema=schema, document=document) assert result == ({}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def evaluates_mutations_correctly_in_presence_of_a_failed_mutation(): document = parse( """ @@ -211,7 +212,7 @@ async def evaluates_mutations_correctly_in_presence_of_a_failed_mutation(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def mutation_fields_with_defer_do_not_block_next_mutation(): document = parse( """ @@ -256,7 +257,7 @@ async def mutation_fields_with_defer_do_not_block_next_mutation(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def mutation_inside_of_a_fragment(): document = parse( """ @@ -282,7 +283,7 @@ async def mutation_inside_of_a_fragment(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def mutation_with_defer_is_not_executed_serially(): document = parse( """ diff --git a/tests/execution/test_nonnull.py b/tests/execution/test_nonnull.py index 053009a9..99810ed9 100644 --- a/tests/execution/test_nonnull.py +++ b/tests/execution/test_nonnull.py @@ -3,6 +3,7 @@ from typing import Any, Awaitable, cast import pytest + from graphql.execution import ExecutionResult, execute, execute_sync from graphql.language import parse from graphql.pyutils import AwaitableOrValue @@ -125,12 +126,12 @@ def describe_nulls_a_nullable_field(): } """ - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_null(): result = await execute_sync_and_async(query, NullingData()) assert result == ({"sync": None}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws(): result = await execute_sync_and_async(query, ThrowingData()) assert result == ( @@ -153,7 +154,7 @@ def describe_nulls_a_returned_object_that_contains_a_non_null_field(): } """ - @pytest.mark.asyncio() + @pytest.mark.asyncio async def that_returns_null(): result = await execute_sync_and_async(query, NullingData()) assert result == ( @@ -168,7 +169,7 @@ async def that_returns_null(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def that_throws(): result = await execute_sync_and_async(query, ThrowingData()) assert result == ( @@ -214,14 +215,14 @@ def describe_nulls_a_complex_tree_of_nullable_fields_each(): }, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_null(): result = await cast( Awaitable[ExecutionResult], execute_query(query, NullingData()) ) assert result == (data, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws(): result = await cast( Awaitable[ExecutionResult], execute_query(query, ThrowingData()) @@ -348,7 +349,7 @@ def describe_nulls_first_nullable_after_long_chain_of_non_null_fields(): "anotherPromiseNest": None, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_null(): result = await cast( Awaitable[ExecutionResult], execute_query(query, NullingData()) @@ -411,7 +412,7 @@ async def returns_null(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws(): result = await cast( Awaitable[ExecutionResult], execute_query(query, ThrowingData()) @@ -477,7 +478,7 @@ def describe_nulls_the_top_level_if_non_nullable_field(): } """ - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_null(): result = await execute_sync_and_async(query, NullingData()) await asyncio.sleep(0) # strangely needed to get coverage on Python 3.11 @@ -493,7 +494,7 @@ async def returns_null(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws(): result = await execute_sync_and_async(query, ThrowingData()) await asyncio.sleep(0) # strangely needed to get coverage on Python 3.11 diff --git a/tests/execution/test_parallel.py b/tests/execution/test_parallel.py index faacd0c4..f4dc86b1 100644 --- a/tests/execution/test_parallel.py +++ b/tests/execution/test_parallel.py @@ -2,6 +2,7 @@ from typing import Awaitable import pytest + from graphql.execution import execute from graphql.language import parse from graphql.type import ( @@ -31,7 +32,7 @@ async def wait(self) -> bool: def describe_parallel_execution(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolve_single_field(): # make sure that the special case of resolving a single field works async def resolve(*_args): @@ -52,7 +53,7 @@ async def resolve(*_args): assert result == ({"foo": True}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolve_fields_in_parallel(): barrier = Barrier(2) @@ -78,7 +79,7 @@ async def resolve(*_args): assert result == ({"foo": True, "bar": True}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolve_single_element_list(): # make sure that the special case of resolving a single element list works async def resolve(*_args): @@ -97,7 +98,7 @@ async def resolve(*_args): assert result == ({"foo": [True]}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolve_list_in_parallel(): barrier = Barrier(2) @@ -127,7 +128,7 @@ async def resolve_list(*args): assert result == ({"foo": [True, True]}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolve_is_type_of_in_parallel(): FooType = GraphQLInterfaceType("Foo", {"foo": GraphQLField(GraphQLString)}) diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 46a53b56..8a1ca605 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -4,6 +4,7 @@ from typing import Any, Awaitable, NamedTuple import pytest + from graphql.error import GraphQLError from graphql.execution import ( ExecutionResult, @@ -28,7 +29,7 @@ anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator): # noqa: A001 + async def anext(iterator): """Return the next item from an async iterator.""" return await iterator.__anext__() @@ -217,7 +218,7 @@ def can_compare_incremental_stream_result(): assert result != dict(list(args.items())[:2] + [("path", ["foo", 2])]) assert result != {**args, "label": "baz"} - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_a_list_field(): document = parse("{ scalarList @stream(initialCount: 1) }") result = await complete( @@ -240,7 +241,7 @@ async def can_stream_a_list_field(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_use_default_value_of_initial_count(): document = parse("{ scalarList @stream }") result = await complete( @@ -267,7 +268,7 @@ async def can_use_default_value_of_initial_count(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def negative_values_of_initial_count_throw_field_errors(): document = parse("{ scalarList @stream(initialCount: -2) }") result = await complete( @@ -286,7 +287,7 @@ async def negative_values_of_initial_count_throw_field_errors(): ], } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def non_integer_values_of_initial_count_throw_field_errors(): document = parse("{ scalarList @stream(initialCount: 1.5) }") result = await complete(document, {"scalarList": ["apple", "half of a banana"]}) @@ -303,7 +304,7 @@ async def non_integer_values_of_initial_count_throw_field_errors(): ], } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_label_from_stream_directive(): document = parse( '{ scalarList @stream(initialCount: 1, label: "scalar-stream") }' @@ -340,7 +341,7 @@ async def returns_label_from_stream_directive(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws_an_error_for_stream_directive_with_non_string_label(): document = parse("{ scalarList @stream(initialCount: 1, label: 42) }") result = await complete(document, {"scalarList": ["some apples"]}) @@ -360,7 +361,7 @@ async def throws_an_error_for_stream_directive_with_non_string_label(): ], } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_disable_stream_using_if_argument(): document = parse("{ scalarList @stream(initialCount: 0, if: false) }") result = await complete( @@ -372,7 +373,7 @@ async def can_disable_stream_using_if_argument(): }, } - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def does_not_disable_stream_with_null_if_argument(): document = parse( @@ -400,7 +401,7 @@ async def does_not_disable_stream_with_null_if_argument(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_multi_dimensional_lists(): document = parse("{ scalarListList @stream(initialCount: 1) }") result = await complete( @@ -440,7 +441,7 @@ async def can_stream_multi_dimensional_lists(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_a_field_that_returns_a_list_of_awaitables(): document = parse( """ @@ -482,7 +483,7 @@ async def await_friend(f): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_in_correct_order_with_list_of_awaitables(): document = parse( """ @@ -537,7 +538,7 @@ async def await_friend(f): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_a_field_that_returns_a_list_with_nested_async_fields(): document = parse( """ @@ -585,7 +586,7 @@ async def get_id(f): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_error_in_list_of_awaitables_before_initial_count_reached(): document = parse( """ @@ -635,7 +636,7 @@ async def await_friend(f, i): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_error_in_list_of_awaitables_after_initial_count_reached(): document = parse( """ @@ -694,7 +695,7 @@ async def await_friend(f, i): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_a_field_that_returns_an_async_iterable(): document = parse( """ @@ -750,7 +751,7 @@ async def friend_list(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_stream_a_field_that_returns_an_async_iterable_with_initial_count(): document = parse( """ @@ -793,7 +794,7 @@ async def friend_list(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def negative_initial_count_throw_error_on_field_returning_async_iterable(): document = parse( """ @@ -821,7 +822,7 @@ async def friend_list(_info): "data": {"friendList": None}, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_handle_concurrent_calls_to_next_without_waiting(): document = parse( """ @@ -869,7 +870,7 @@ async def friend_list(_info): {"done": True, "value": None}, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_error_in_async_iterable_before_initial_count_is_reached(): document = parse( """ @@ -900,7 +901,7 @@ async def friend_list(_info): "data": {"friendList": None}, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_error_in_async_iterable_after_initial_count_is_reached(): document = parse( """ @@ -945,7 +946,7 @@ async def friend_list(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): document = parse( """ @@ -986,7 +987,7 @@ async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_null_for_non_null_async_items_after_initial_count_is_reached(): document = parse( """ @@ -1034,7 +1035,7 @@ async def friend_list(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_error_thrown_in_complete_value_after_initial_count_is_reached(): document = parse( """ @@ -1073,7 +1074,7 @@ async def scalar_list(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_error_in_complete_value_after_initial_count_is_reached(): document = parse( """ @@ -1135,7 +1136,7 @@ def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_nested_async_error_in_complete_value_after_initial_count(): document = parse( """ @@ -1196,7 +1197,7 @@ def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_error_in_complete_value_after_initial_count_non_null(): document = parse( """ @@ -1249,7 +1250,7 @@ def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_nested_async_error_in_complete_value_after_initial_non_null(): document = parse( """ @@ -1301,7 +1302,7 @@ def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_error_in_complete_value_after_initial_from_async_iterable(): document = parse( """ @@ -1367,7 +1368,7 @@ async def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_async_error_in_complete_value_from_async_iterable_non_null(): document = parse( """ @@ -1421,7 +1422,7 @@ async def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def filters_payloads_that_are_nulled(): document = parse( """ @@ -1472,7 +1473,7 @@ async def friend_list(_info): }, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def filters_payloads_that_are_nulled_by_a_later_synchronous_error(): document = parse( """ @@ -1515,7 +1516,7 @@ async def friend_list(_info): }, } - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def does_not_filter_payloads_when_null_error_is_in_a_different_path(): document = parse( @@ -1584,7 +1585,7 @@ async def friend_list(_info): {"hasNext": False}, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def filters_stream_payloads_that_are_nulled_in_a_deferred_payload(): document = parse( @@ -1655,7 +1656,7 @@ async def friend_list(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def filters_defer_payloads_that_are_nulled_in_a_stream_response(): document = parse( """ @@ -1716,7 +1717,7 @@ async def friend_list(_info): ] @pytest.mark.timeout(1) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_iterator_and_ignores_error_when_stream_payloads_are_filtered(): finished = False @@ -1795,7 +1796,7 @@ async def iterable(_info): assert not finished # running iterator cannot be canceled - @pytest.mark.asyncio() + @pytest.mark.asyncio async def handles_awaitables_from_complete_value_after_initial_count_is_reached(): document = parse( """ @@ -1858,7 +1859,7 @@ async def get_friends(_info): }, ] - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_payloads_properly_when_parent_deferred_slower_than_stream(): resolve_slow_field = Event() @@ -1944,7 +1945,7 @@ async def get_friends(_info): await anext(iterator) @pytest.mark.timeout(1) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_fields_that_are_resolved_after_async_iterable_is_complete(): resolve_slow_field = Event() resolve_iterable = Event() @@ -2022,7 +2023,7 @@ async def get_friends(_info): with pytest.raises(StopAsyncIteration): await anext(iterator) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def can_defer_fields_that_are_resolved_before_async_iterable_is_complete(): resolve_slow_field = Event() resolve_iterable = Event() @@ -2106,7 +2107,7 @@ async def get_friends(_info): with pytest.raises(StopAsyncIteration): await anext(iterator) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def finishes_async_iterable_when_returned_generator_is_closed(): finished = False @@ -2146,7 +2147,7 @@ async def iterable(_info): await sleep(0) assert finished - @pytest.mark.asyncio() + @pytest.mark.asyncio async def finishes_async_iterable_when_underlying_iterator_has_no_close_method(): class Iterable: def __init__(self): @@ -2197,7 +2198,7 @@ async def __anext__(self): await sleep(0) assert iterable.index == 4 - @pytest.mark.asyncio() + @pytest.mark.asyncio async def finishes_async_iterable_when_error_is_raised_in_returned_generator(): finished = False diff --git a/tests/execution/test_subscribe.py b/tests/execution/test_subscribe.py index fcbd13ef..8a6b4c38 100644 --- a/tests/execution/test_subscribe.py +++ b/tests/execution/test_subscribe.py @@ -13,6 +13,7 @@ ) import pytest + from graphql.execution import ( ExecutionResult, create_source_event_stream, @@ -44,7 +45,7 @@ anext # noqa: B018 except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins - async def anext(iterator): # noqa: A001 + async def anext(iterator): """Return the next item from an async iterator.""" return await iterator.__anext__() @@ -197,7 +198,7 @@ def subscribe_with_bad_args( # Check all error cases when initializing the subscription. def describe_subscription_initialization_phase(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_positional_arguments(): document = parse( """ @@ -217,7 +218,7 @@ async def empty_async_iterable(_info): await anext(ai) await ai.aclose() # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_multiple_subscription_fields_defined_in_schema(): schema = GraphQLSchema( query=DummyQueryType, @@ -242,7 +243,7 @@ async def foo_generator(_info): await subscription.aclose() # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_type_definition_with_sync_subscribe_function(): async def foo_generator(_obj, _info): yield {"foo": "FooValue"} @@ -262,7 +263,7 @@ async def foo_generator(_obj, _info): await subscription.aclose() # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_type_definition_with_async_subscribe_function(): async def foo_generator(_obj, _info): await asyncio.sleep(0) @@ -290,7 +291,7 @@ async def subscribe_fn(obj, info): await subscription.aclose() # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_only_resolve_the_first_field_of_invalid_multi_field(): did_resolve = {"foo": False, "bar": False} @@ -325,7 +326,7 @@ async def subscribe_bar(_obj, _info): # pragma: no cover await subscription.aclose() # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolves_to_an_error_if_schema_does_not_support_subscriptions(): schema = GraphQLSchema(query=DummyQueryType) document = parse("subscription { unknownField }") @@ -343,7 +344,7 @@ async def resolves_to_an_error_if_schema_does_not_support_subscriptions(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolves_to_an_error_for_unknown_subscription_field(): schema = GraphQLSchema( query=DummyQueryType, @@ -364,7 +365,7 @@ async def resolves_to_an_error_for_unknown_subscription_field(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_pass_through_unexpected_errors_thrown_in_subscribe(): schema = GraphQLSchema( query=DummyQueryType, @@ -375,7 +376,7 @@ async def should_pass_through_unexpected_errors_thrown_in_subscribe(): with pytest.raises(AttributeError): subscribe_with_bad_args(schema=schema, document={}) # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_an_error_if_subscribe_does_not_return_an_iterator(): expected_result = ( @@ -405,7 +406,7 @@ async def async_fn(obj, info): del result cleanup() - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolves_to_an_error_for_subscription_resolver_errors(): expected_result = ( None, @@ -447,7 +448,7 @@ async def reject_with_error(*args): assert is_awaitable(result) assert await result == expected_result - @pytest.mark.asyncio() + @pytest.mark.asyncio async def resolves_to_an_error_if_variables_were_wrong_type(): schema = GraphQLSchema( query=DummyQueryType, @@ -492,7 +493,7 @@ async def resolves_to_an_error_if_variables_were_wrong_type(): # Once a subscription returns a valid AsyncIterator, it can still yield errors. def describe_subscription_publish_phase(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): pubsub = SimplePubSub() @@ -527,7 +528,7 @@ async def produces_a_payload_for_multiple_subscribe_in_same_subscription(): assert await payload1 == (expected_payload, None) assert await payload2 == (expected_payload, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def produces_a_payload_when_queried_fields_are_async(): pubsub = SimplePubSub() subscription = create_subscription(pubsub, {"asyncResolver": True}) @@ -564,7 +565,7 @@ async def produces_a_payload_when_queried_fields_are_async(): with pytest.raises(StopAsyncIteration): await anext(subscription) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def produces_a_payload_per_subscription_event(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -643,7 +644,7 @@ async def produces_a_payload_per_subscription_event(): with pytest.raises(StopAsyncIteration): assert await anext(subscription) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def subscribe_function_returns_errors_with_defer(): pubsub = SimplePubSub() subscription = create_subscription(pubsub, {"shouldDefer": True}) @@ -707,7 +708,7 @@ async def subscribe_function_returns_errors_with_defer(): with pytest.raises(StopAsyncIteration): assert await anext(subscription) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def subscribe_function_returns_errors_with_stream(): pubsub = SimplePubSub() subscription = create_subscription(pubsub, {"shouldStream": True}) @@ -788,7 +789,7 @@ async def subscribe_function_returns_errors_with_stream(): with pytest.raises(StopAsyncIteration): assert await anext(subscription) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def produces_a_payload_when_there_are_multiple_events(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -844,7 +845,7 @@ async def produces_a_payload_when_there_are_multiple_events(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_not_trigger_when_subscription_is_already_done(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -895,7 +896,7 @@ async def should_not_trigger_when_subscription_is_already_done(): with pytest.raises(StopAsyncIteration): await payload - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_not_trigger_when_subscription_is_thrown(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -936,7 +937,7 @@ async def should_not_trigger_when_subscription_is_thrown(): with pytest.raises(StopAsyncIteration): await payload - @pytest.mark.asyncio() + @pytest.mark.asyncio async def event_order_is_correct_for_multiple_publishes(): pubsub = SimplePubSub() subscription = create_subscription(pubsub) @@ -992,7 +993,7 @@ async def event_order_is_correct_for_multiple_publishes(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_handle_error_during_execution_of_source_event(): async def generate_messages(_obj, _info): yield "Hello" @@ -1040,7 +1041,7 @@ def resolve_message(message, _info): # Subsequent events are still executed. assert await anext(subscription) == ({"newMessage": "Bonjour"}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_pass_through_error_thrown_in_source_event_stream(): async def generate_messages(_obj, _info): yield "Hello" @@ -1077,7 +1078,7 @@ def resolve_message(message, _info): with pytest.raises(StopAsyncIteration): await anext(subscription) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_work_with_sync_resolve_function(): async def generate_messages(_obj, _info): yield "Hello" @@ -1105,7 +1106,7 @@ def resolve_message(message, _info): assert await anext(subscription) == ({"newMessage": "Hello"}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_work_with_async_resolve_function(): async def generate_messages(_obj, _info): await asyncio.sleep(0) @@ -1135,7 +1136,7 @@ async def resolve_message(message, _info): assert await anext(subscription) == ({"newMessage": "Hello"}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_work_with_custom_async_iterator(): class MessageGenerator: resolved: List[str] = [] @@ -1185,7 +1186,7 @@ async def resolve(cls, message, _info) -> str: await subscription.aclose() # type: ignore - @pytest.mark.asyncio() + @pytest.mark.asyncio async def should_close_custom_async_iterator(): class MessageGenerator: closed: bool = False diff --git a/tests/execution/test_sync.py b/tests/execution/test_sync.py index 36f8c9a5..d5e9504f 100644 --- a/tests/execution/test_sync.py +++ b/tests/execution/test_sync.py @@ -1,4 +1,5 @@ import pytest + from graphql import graphql_sync from graphql.execution import execute, execute_sync from graphql.language import parse @@ -51,7 +52,7 @@ def does_not_return_an_awaitable_if_mutation_fields_are_all_synchronous(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def returns_an_awaitable_if_any_field_is_asynchronous(): doc = "query Example { syncField, asyncField }" result = execute(schema, parse(doc), "rootValue") @@ -80,7 +81,7 @@ def does_not_throw_if_not_encountering_async_execution_with_check_sync(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_execution_with_check_sync(): doc = "query Example { syncField, asyncField }" @@ -93,7 +94,7 @@ async def throws_if_encountering_async_execution_with_check_sync(): del exc_info cleanup() - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_without_check_sync(): doc = "query Example { syncField, asyncField }" @@ -112,7 +113,7 @@ async def throws_if_encountering_async_operation_without_check_sync(): del result cleanup() - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_iterable_execution_with_check_sync(): doc = """ @@ -132,7 +133,7 @@ async def throws_if_encountering_async_iterable_execution_with_check_sync(): del exc_info cleanup() - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_iterable_execution_without_check_sync(): doc = """ @@ -188,7 +189,7 @@ def does_not_throw_if_not_encountering_async_operation_with_check_sync(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_with_check_sync(): doc = "query Example { syncField, asyncField }" @@ -199,7 +200,7 @@ async def throws_if_encountering_async_operation_with_check_sync(): del exc_info cleanup() - @pytest.mark.asyncio() + @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def throws_if_encountering_async_operation_without_check_sync(): doc = "query Example { syncField, asyncField }" diff --git a/tests/language/test_block_string_fuzz.py b/tests/language/test_block_string_fuzz.py index feb7ca2b..0e17b4d4 100644 --- a/tests/language/test_block_string_fuzz.py +++ b/tests/language/test_block_string_fuzz.py @@ -1,4 +1,5 @@ import pytest + from graphql.language import Lexer, Source, TokenKind from graphql.language.block_string import ( is_printable_as_block_string, @@ -40,7 +41,7 @@ def assert_non_printable_block_string(test_value: str) -> None: def describe_print_block_string(): - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(80) def correctly_print_random_strings(): # Testing with length >7 is taking exponentially more time. However, it is diff --git a/tests/language/test_lexer.py b/tests/language/test_lexer.py index 0bc9a398..d2d24931 100644 --- a/tests/language/test_lexer.py +++ b/tests/language/test_lexer.py @@ -3,6 +3,7 @@ from typing import Optional, Tuple import pytest + from graphql.error import GraphQLSyntaxError from graphql.language import Lexer, Source, SourceLocation, Token, TokenKind from graphql.language.lexer import is_punctuator_token_kind diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py index b671e444..e6d33064 100644 --- a/tests/language/test_parser.py +++ b/tests/language/test_parser.py @@ -3,6 +3,7 @@ from typing import Optional, Tuple, cast import pytest + from graphql.error import GraphQLSyntaxError from graphql.language import ( ArgumentNode, diff --git a/tests/language/test_printer.py b/tests/language/test_printer.py index 6117c69d..b6ac41e0 100644 --- a/tests/language/test_printer.py +++ b/tests/language/test_printer.py @@ -1,6 +1,7 @@ from copy import deepcopy import pytest + from graphql.language import FieldNode, NameNode, parse, print_ast from ..fixtures import kitchen_sink_query # noqa: F401 diff --git a/tests/language/test_schema_parser.py b/tests/language/test_schema_parser.py index a5005a06..df64381a 100644 --- a/tests/language/test_schema_parser.py +++ b/tests/language/test_schema_parser.py @@ -6,6 +6,7 @@ from typing import Optional, Tuple import pytest + from graphql.error import GraphQLSyntaxError from graphql.language import ( ArgumentNode, diff --git a/tests/language/test_schema_printer.py b/tests/language/test_schema_printer.py index 95fcac97..083dcd0f 100644 --- a/tests/language/test_schema_printer.py +++ b/tests/language/test_schema_printer.py @@ -1,6 +1,7 @@ from copy import deepcopy import pytest + from graphql.language import NameNode, ScalarTypeDefinitionNode, parse, print_ast from ..fixtures import kitchen_sink_sdl # noqa: F401 diff --git a/tests/language/test_source.py b/tests/language/test_source.py index 02014445..24008605 100644 --- a/tests/language/test_source.py +++ b/tests/language/test_source.py @@ -4,6 +4,7 @@ from typing import cast import pytest + from graphql.language import Source, SourceLocation from ..utils import dedent diff --git a/tests/language/test_visitor.py b/tests/language/test_visitor.py index 1e74c6ff..00283fe1 100644 --- a/tests/language/test_visitor.py +++ b/tests/language/test_visitor.py @@ -5,6 +5,7 @@ from typing import Any, cast import pytest + from graphql.language import ( BREAK, REMOVE, diff --git a/tests/pyutils/test_async_reduce.py b/tests/pyutils/test_async_reduce.py index cbcef554..0ac606c8 100644 --- a/tests/pyutils/test_async_reduce.py +++ b/tests/pyutils/test_async_reduce.py @@ -1,6 +1,7 @@ from functools import reduce import pytest + from graphql.pyutils import async_reduce, is_awaitable @@ -16,7 +17,7 @@ def callback(accumulator, current_value): assert result == 42 assert result == reduce(callback, values, initial_value) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def works_with_sync_values_and_sync_initial_value(): def callback(accumulator, current_value): return accumulator + "-" + current_value @@ -26,7 +27,7 @@ def callback(accumulator, current_value): assert not is_awaitable(result) assert result == "foo-bar-baz" - @pytest.mark.asyncio() + @pytest.mark.asyncio async def works_with_async_initial_value(): async def async_initial_value(): return "foo" @@ -39,7 +40,7 @@ def callback(accumulator, current_value): assert is_awaitable(result) assert await result == "foo-bar-baz" - @pytest.mark.asyncio() + @pytest.mark.asyncio async def works_with_async_callback(): async def async_callback(accumulator, current_value): return accumulator + "-" + current_value @@ -49,7 +50,7 @@ async def async_callback(accumulator, current_value): assert is_awaitable(result) assert await result == "foo-bar-baz" - @pytest.mark.asyncio() + @pytest.mark.asyncio async def works_with_async_callback_and_async_initial_value(): async def async_initial_value(): return 1 / 8 diff --git a/tests/pyutils/test_description.py b/tests/pyutils/test_description.py index 8a19396d..3148520b 100644 --- a/tests/pyutils/test_description.py +++ b/tests/pyutils/test_description.py @@ -2,6 +2,7 @@ from typing import cast import pytest + from graphql import graphql_sync from graphql.pyutils import ( Description, diff --git a/tests/pyutils/test_format_list.py b/tests/pyutils/test_format_list.py index ee425eca..09567645 100644 --- a/tests/pyutils/test_format_list.py +++ b/tests/pyutils/test_format_list.py @@ -1,4 +1,5 @@ import pytest + from graphql.pyutils import and_list, or_list diff --git a/tests/pyutils/test_inspect.py b/tests/pyutils/test_inspect.py index 3721d018..94c62b48 100644 --- a/tests/pyutils/test_inspect.py +++ b/tests/pyutils/test_inspect.py @@ -6,6 +6,7 @@ from typing import Any import pytest + from graphql.pyutils import Undefined, inspect from graphql.type import ( GraphQLDirective, @@ -138,7 +139,7 @@ def test_generator(): assert inspect(test_generator) == "" assert inspect(test_generator()) == "" - @pytest.mark.asyncio() + @pytest.mark.asyncio async def inspect_coroutine(): async def test_coroutine(): pass diff --git a/tests/pyutils/test_is_awaitable.py b/tests/pyutils/test_is_awaitable.py index dcee07d9..b05f01af 100644 --- a/tests/pyutils/test_is_awaitable.py +++ b/tests/pyutils/test_is_awaitable.py @@ -3,6 +3,7 @@ from sys import version_info as python_version import pytest + from graphql.pyutils import is_awaitable @@ -66,7 +67,7 @@ async def some_async_function(): assert not isawaitable(some_async_function) assert not is_awaitable(some_async_function) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def recognizes_a_coroutine_object(): async def some_async_function(): return True @@ -92,7 +93,7 @@ def some_function(): assert is_awaitable(some_old_style_coroutine) assert is_awaitable(some_old_style_coroutine) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def recognizes_a_future_object(): async def some_async_function(): return True @@ -105,7 +106,7 @@ async def some_async_function(): assert await some_future is True - @pytest.mark.asyncio() + @pytest.mark.asyncio async def declines_an_async_generator(): async def some_async_generator_function(): yield True diff --git a/tests/pyutils/test_simple_pub_sub.py b/tests/pyutils/test_simple_pub_sub.py index 2f30a8e2..f0a88dcb 100644 --- a/tests/pyutils/test_simple_pub_sub.py +++ b/tests/pyutils/test_simple_pub_sub.py @@ -1,11 +1,12 @@ from asyncio import sleep import pytest + from graphql.pyutils import SimplePubSub, is_awaitable def describe_simple_pub_sub(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def subscribe_async_iterator_mock(): pubsub = SimplePubSub() iterator = pubsub.get_subscriber() @@ -49,7 +50,7 @@ async def subscribe_async_iterator_mock(): with pytest.raises(StopAsyncIteration): await iterator.__anext__() - @pytest.mark.asyncio() + @pytest.mark.asyncio async def iterator_aclose_empties_push_queue(): pubsub = SimplePubSub() assert not pubsub.subscribers @@ -67,7 +68,7 @@ async def iterator_aclose_empties_push_queue(): assert iterator.pull_queue.qsize() == 0 assert not iterator.listening - @pytest.mark.asyncio() + @pytest.mark.asyncio async def iterator_aclose_empties_pull_queue(): pubsub = SimplePubSub() assert not pubsub.subscribers @@ -84,7 +85,7 @@ async def iterator_aclose_empties_pull_queue(): assert iterator.pull_queue.qsize() == 0 assert not iterator.listening - @pytest.mark.asyncio() + @pytest.mark.asyncio async def iterator_aclose_is_idempotent(): pubsub = SimplePubSub() iterator = pubsub.get_subscriber() diff --git a/tests/pyutils/test_undefined.py b/tests/pyutils/test_undefined.py index b6f62eea..b34611e3 100644 --- a/tests/pyutils/test_undefined.py +++ b/tests/pyutils/test_undefined.py @@ -1,6 +1,7 @@ import pickle import pytest + from graphql.pyutils import Undefined, UndefinedType diff --git a/tests/star_wars_schema.py b/tests/star_wars_schema.py index 3f8713ab..575bf482 100644 --- a/tests/star_wars_schema.py +++ b/tests/star_wars_schema.py @@ -54,7 +54,6 @@ GraphQLSchema, GraphQLString, ) - from tests.star_wars_data import ( get_droid, get_friends, diff --git a/tests/test_star_wars_query.py b/tests/test_star_wars_query.py index 6e5bbf59..bb1008b8 100644 --- a/tests/test_star_wars_query.py +++ b/tests/test_star_wars_query.py @@ -1,4 +1,5 @@ import pytest + from graphql import graphql, graphql_sync from .star_wars_schema import star_wars_schema as schema @@ -6,7 +7,7 @@ def describe_star_wars_query_tests(): def describe_basic_queries(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def correctly_identifies_r2_d2_as_hero_of_the_star_wars_saga(): source = """ query HeroNameQuery { @@ -18,7 +19,7 @@ async def correctly_identifies_r2_d2_as_hero_of_the_star_wars_saga(): result = await graphql(schema=schema, source=source) assert result == ({"hero": {"name": "R2-D2"}}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def accepts_positional_arguments_to_graphql(): source = """ query HeroNameQuery { @@ -33,7 +34,7 @@ async def accepts_positional_arguments_to_graphql(): sync_result = graphql_sync(schema, source) assert sync_result == result - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_query_for_the_id_and_friends_of_r2_d2(): source = """ query HeroNameAndFriendsQuery { @@ -63,7 +64,7 @@ async def allows_us_to_query_for_the_id_and_friends_of_r2_d2(): ) def describe_nested_queries(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_query_for_the_friends_of_friends_of_r2_d2(): source = """ query NestedQuery { @@ -121,7 +122,7 @@ async def allows_us_to_query_for_the_friends_of_friends_of_r2_d2(): ) def describe_using_ids_and_query_parameters_to_refetch_objects(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_query_for_r2_d2_directly_using_his_id(): source = """ query { @@ -133,7 +134,7 @@ async def allows_us_to_query_for_r2_d2_directly_using_his_id(): result = await graphql(schema=schema, source=source) assert result == ({"droid": {"name": "R2-D2"}}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_query_characters_directly_using_their_id(): source = """ query FetchLukeAndC3POQuery { @@ -151,7 +152,7 @@ async def allows_us_to_query_characters_directly_using_their_id(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_creating_a_generic_query_to_fetch_luke_using_his_id(): source = """ query FetchSomeIDQuery($someId: String!) { @@ -166,7 +167,7 @@ async def allows_creating_a_generic_query_to_fetch_luke_using_his_id(): ) assert result == ({"human": {"name": "Luke Skywalker"}}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_creating_a_generic_query_to_fetch_han_using_his_id(): source = """ query FetchSomeIDQuery($someId: String!) { @@ -181,7 +182,7 @@ async def allows_creating_a_generic_query_to_fetch_han_using_his_id(): ) assert result == ({"human": {"name": "Han Solo"}}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def generic_query_that_gets_null_back_when_passed_invalid_id(): source = """ query humanQuery($id: String!) { @@ -197,7 +198,7 @@ async def generic_query_that_gets_null_back_when_passed_invalid_id(): assert result == ({"human": None}, None) def describe_using_aliases_to_change_the_key_in_the_response(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_query_for_luke_changing_his_key_with_an_alias(): source = """ query FetchLukeAliased { @@ -209,7 +210,7 @@ async def allows_us_to_query_for_luke_changing_his_key_with_an_alias(): result = await graphql(schema=schema, source=source) assert result == ({"luke": {"name": "Luke Skywalker"}}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def query_for_luke_and_leia_using_two_root_fields_and_an_alias(): source = """ query FetchLukeAndLeiaAliased { @@ -228,7 +229,7 @@ async def query_for_luke_and_leia_using_two_root_fields_and_an_alias(): ) def describe_uses_fragments_to_express_more_complex_queries(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_query_using_duplicated_content(): source = """ query DuplicateFields { @@ -251,7 +252,7 @@ async def allows_us_to_query_using_duplicated_content(): None, ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_use_a_fragment_to_avoid_duplicating_content(): source = """ query UseFragment { @@ -277,7 +278,7 @@ async def allows_us_to_use_a_fragment_to_avoid_duplicating_content(): ) def describe_using_typename_to_find_the_type_of_an_object(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_verify_that_r2_d2_is_a_droid(): source = """ query CheckTypeOfR2 { @@ -290,7 +291,7 @@ async def allows_us_to_verify_that_r2_d2_is_a_droid(): result = await graphql(schema=schema, source=source) assert result == ({"hero": {"__typename": "Droid", "name": "R2-D2"}}, None) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def allows_us_to_verify_that_luke_is_a_human(): source = """ query CheckTypeOfLuke { @@ -307,7 +308,7 @@ async def allows_us_to_verify_that_luke_is_a_human(): ) def describe_reporting_errors_raised_in_resolvers(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def correctly_reports_error_on_accessing_secret_backstory(): source = """ query HeroNameQuery { @@ -329,7 +330,7 @@ async def correctly_reports_error_on_accessing_secret_backstory(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def correctly_reports_error_on_accessing_backstory_in_a_list(): source = """ query HeroNameQuery { @@ -373,7 +374,7 @@ async def correctly_reports_error_on_accessing_backstory_in_a_list(): ], ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def correctly_reports_error_on_accessing_through_an_alias(): source = """ query HeroNameQuery { diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index 7d134a52..147e01bd 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -12,6 +12,7 @@ from typing import Any, AsyncIterable, NamedTuple import pytest + from graphql import ( GraphQLArgument, GraphQLBoolean, @@ -212,13 +213,13 @@ async def resolve_subscription_user(event, info, id): # noqa: ARG001, A002 ) -@pytest.fixture() +@pytest.fixture def context(): return {"registry": UserRegistry()} def describe_query(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def query_user(context): user = await context["registry"].create( firstName="John", lastName="Doe", tweets=42, verified=True @@ -250,7 +251,7 @@ async def query_user(context): def describe_mutation(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def create_user(context): received = {} @@ -302,7 +303,7 @@ def receive(msg): "User 0": {"user": user, "mutation": MutationEnum.CREATED.value}, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def update_user(context): received = {} @@ -358,7 +359,7 @@ def receive(msg): "User 0": {"user": user, "mutation": MutationEnum.UPDATED.value}, } - @pytest.mark.asyncio() + @pytest.mark.asyncio async def delete_user(context): received = {} @@ -400,7 +401,7 @@ def receive(msg): def describe_subscription(): - @pytest.mark.asyncio() + @pytest.mark.asyncio async def subscribe_to_user_mutations(context): query = """ subscription ($userId: ID!) { diff --git a/tests/type/test_assert_name.py b/tests/type/test_assert_name.py index 55ef75c7..24ffc55d 100644 --- a/tests/type/test_assert_name.py +++ b/tests/type/test_assert_name.py @@ -1,4 +1,5 @@ import pytest + from graphql.error import GraphQLError from graphql.type import assert_enum_value_name, assert_name diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index 88ce94f7..a8b7c24b 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -12,6 +12,7 @@ from typing_extensions import TypedDict import pytest + from graphql.error import GraphQLError from graphql.language import ( EnumTypeDefinitionNode, diff --git a/tests/type/test_directives.py b/tests/type/test_directives.py index 3f29a947..4257d81f 100644 --- a/tests/type/test_directives.py +++ b/tests/type/test_directives.py @@ -1,4 +1,5 @@ import pytest + from graphql.error import GraphQLError from graphql.language import DirectiveDefinitionNode, DirectiveLocation from graphql.type import GraphQLArgument, GraphQLDirective, GraphQLInt, GraphQLString diff --git a/tests/type/test_extensions.py b/tests/type/test_extensions.py index 5aa087e2..d28b9482 100644 --- a/tests/type/test_extensions.py +++ b/tests/type/test_extensions.py @@ -1,4 +1,5 @@ import pytest + from graphql.type import ( GraphQLArgument, GraphQLDirective, diff --git a/tests/type/test_predicate.py b/tests/type/test_predicate.py index bd006e74..c741eca3 100644 --- a/tests/type/test_predicate.py +++ b/tests/type/test_predicate.py @@ -1,6 +1,7 @@ from typing import Any import pytest + from graphql.language import DirectiveLocation from graphql.type import ( GraphQLArgument, diff --git a/tests/type/test_scalars.py b/tests/type/test_scalars.py index 27255388..0ef5e548 100644 --- a/tests/type/test_scalars.py +++ b/tests/type/test_scalars.py @@ -3,6 +3,7 @@ from typing import Any import pytest + from graphql.error import GraphQLError from graphql.language import parse_value as parse_value_to_ast from graphql.pyutils import Undefined diff --git a/tests/type/test_schema.py b/tests/type/test_schema.py index f589302b..e678de35 100644 --- a/tests/type/test_schema.py +++ b/tests/type/test_schema.py @@ -1,6 +1,7 @@ from copy import deepcopy import pytest + from graphql.language import ( DirectiveLocation, SchemaDefinitionNode, diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index ab364e9f..087832ba 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -3,6 +3,7 @@ from operator import attrgetter import pytest + from graphql.language import DirectiveLocation, parse from graphql.pyutils import inspect from graphql.type import ( diff --git a/tests/utilities/test_ast_from_value.py b/tests/utilities/test_ast_from_value.py index 1432d7a4..947f2b18 100644 --- a/tests/utilities/test_ast_from_value.py +++ b/tests/utilities/test_ast_from_value.py @@ -1,6 +1,7 @@ from math import inf, nan import pytest + from graphql.error import GraphQLError from graphql.language import ( BooleanValueNode, diff --git a/tests/utilities/test_build_client_schema.py b/tests/utilities/test_build_client_schema.py index 518fb5bf..4b861003 100644 --- a/tests/utilities/test_build_client_schema.py +++ b/tests/utilities/test_build_client_schema.py @@ -1,6 +1,7 @@ from typing import cast import pytest + from graphql import graphql_sync from graphql.type import ( GraphQLArgument, diff --git a/tests/utilities/test_coerce_input_value.py b/tests/utilities/test_coerce_input_value.py index c18b5098..90af6cb9 100644 --- a/tests/utilities/test_coerce_input_value.py +++ b/tests/utilities/test_coerce_input_value.py @@ -4,6 +4,7 @@ from typing import Any, NamedTuple import pytest + from graphql.error import GraphQLError from graphql.pyutils import Undefined from graphql.type import ( diff --git a/tests/utilities/test_extend_schema.py b/tests/utilities/test_extend_schema.py index 75c70efd..28ac0be4 100644 --- a/tests/utilities/test_extend_schema.py +++ b/tests/utilities/test_extend_schema.py @@ -3,6 +3,7 @@ from typing import Union import pytest + from graphql import graphql_sync from graphql.language import parse, print_ast from graphql.type import ( diff --git a/tests/utilities/test_introspection_from_schema.py b/tests/utilities/test_introspection_from_schema.py index 895ade9a..1c9dbd52 100644 --- a/tests/utilities/test_introspection_from_schema.py +++ b/tests/utilities/test_introspection_from_schema.py @@ -3,6 +3,7 @@ from copy import deepcopy import pytest + from graphql.type import GraphQLField, GraphQLObjectType, GraphQLSchema, GraphQLString from graphql.utilities import ( IntrospectionQuery, @@ -105,7 +106,7 @@ def can_deep_copy_pickled_schema(): # check that introspecting the copied schema gives the same result assert introspection_from_schema(copied) == introspected_schema - @pytest.mark.slow() + @pytest.mark.slow def describe_deepcopy_and_pickle_big(): # pragma: no cover @pytest.mark.timeout(20) def can_deep_copy_big_schema(big_schema_sdl): # noqa: F811 diff --git a/tests/utilities/test_strip_ignored_characters.py b/tests/utilities/test_strip_ignored_characters.py index d708bfdb..cdc6062d 100644 --- a/tests/utilities/test_strip_ignored_characters.py +++ b/tests/utilities/test_strip_ignored_characters.py @@ -1,6 +1,7 @@ from __future__ import annotations import pytest + from graphql.error import GraphQLSyntaxError from graphql.language import Lexer, Source, TokenKind, parse from graphql.utilities import strip_ignored_characters diff --git a/tests/utilities/test_strip_ignored_characters_fuzz.py b/tests/utilities/test_strip_ignored_characters_fuzz.py index 85c43aec..4c276e07 100644 --- a/tests/utilities/test_strip_ignored_characters_fuzz.py +++ b/tests/utilities/test_strip_ignored_characters_fuzz.py @@ -3,6 +3,7 @@ from json import dumps import pytest + from graphql.error import GraphQLSyntaxError from graphql.language import Lexer, Source, TokenKind from graphql.utilities import strip_ignored_characters @@ -74,7 +75,7 @@ def lex_value(s: str) -> str | None: def describe_strip_ignored_characters(): - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def strips_documents_with_random_combination_of_ignored_characters(): for ignored in ignored_tokens: @@ -85,7 +86,7 @@ def strips_documents_with_random_combination_of_ignored_characters(): ExpectStripped("".join(ignored_tokens)).to_equal("") - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def strips_random_leading_and_trailing_ignored_tokens(): for token in punctuator_tokens + non_punctuator_tokens: @@ -100,7 +101,7 @@ def strips_random_leading_and_trailing_ignored_tokens(): ExpectStripped("".join(ignored_tokens) + token).to_equal(token) ExpectStripped(token + "".join(ignored_tokens)).to_equal(token) - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def strips_random_ignored_tokens_between_punctuator_tokens(): for left in punctuator_tokens: @@ -117,7 +118,7 @@ def strips_random_ignored_tokens_between_punctuator_tokens(): left + right ) - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def strips_random_ignored_tokens_between_punctuator_and_non_punctuator_tokens(): for non_punctuator in non_punctuator_tokens: @@ -136,7 +137,7 @@ def strips_random_ignored_tokens_between_punctuator_and_non_punctuator_tokens(): punctuator + "".join(ignored_tokens) + non_punctuator ).to_equal(punctuator + non_punctuator) - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def strips_random_ignored_tokens_between_non_punctuator_and_punctuator_tokens(): for non_punctuator in non_punctuator_tokens: @@ -159,7 +160,7 @@ def strips_random_ignored_tokens_between_non_punctuator_and_punctuator_tokens(): non_punctuator + "".join(ignored_tokens) + punctuator ).to_equal(non_punctuator + punctuator) - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def replace_random_ignored_tokens_between_non_punctuator_and_spread_with_space(): for non_punctuator in non_punctuator_tokens: @@ -177,7 +178,7 @@ def replace_random_ignored_tokens_between_non_punctuator_and_spread_with_space() non_punctuator + " ..." ) - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def replace_random_ignored_tokens_between_non_punctuator_tokens_with_space(): for left in non_punctuator_tokens: @@ -194,7 +195,7 @@ def replace_random_ignored_tokens_between_non_punctuator_tokens_with_space(): left + " " + right ) - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def does_not_strip_random_ignored_tokens_embedded_in_the_string(): for ignored in ignored_tokens: @@ -205,7 +206,7 @@ def does_not_strip_random_ignored_tokens_embedded_in_the_string(): ExpectStripped(dumps("".join(ignored_tokens))).to_stay_the_same() - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(10) def does_not_strip_random_ignored_tokens_embedded_in_the_block_string(): ignored_tokens_without_formatting = [ @@ -226,7 +227,7 @@ def does_not_strip_random_ignored_tokens_embedded_in_the_block_string(): '"""|' + "".join(ignored_tokens_without_formatting) + '|"""' ).to_stay_the_same() - @pytest.mark.slow() + @pytest.mark.slow @pytest.mark.timeout(80) def strips_ignored_characters_inside_random_block_strings(): # Testing with length >7 is taking exponentially more time. However it is diff --git a/tests/utilities/test_type_from_ast.py b/tests/utilities/test_type_from_ast.py index 282c8f50..fa75a9f9 100644 --- a/tests/utilities/test_type_from_ast.py +++ b/tests/utilities/test_type_from_ast.py @@ -1,4 +1,5 @@ import pytest + from graphql.language import TypeNode, parse_type from graphql.type import GraphQLList, GraphQLNonNull, GraphQLObjectType from graphql.utilities import type_from_ast diff --git a/tests/utils/test_assert_equal_awaitables_or_values.py b/tests/utils/test_assert_equal_awaitables_or_values.py index 214acfea..3e60fbcb 100644 --- a/tests/utils/test_assert_equal_awaitables_or_values.py +++ b/tests/utils/test_assert_equal_awaitables_or_values.py @@ -15,7 +15,7 @@ def does_not_throw_when_given_equal_values(): == test_value ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def does_not_throw_when_given_equal_awaitables(): async def test_value(): return {"test": "test"} @@ -27,7 +27,7 @@ async def test_value(): == await test_value() ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws_when_given_unequal_awaitables(): async def test_value(value): return value @@ -37,7 +37,7 @@ async def test_value(value): test_value({}), test_value({}), test_value({"test": "test"}) ) - @pytest.mark.asyncio() + @pytest.mark.asyncio async def throws_when_given_mixture_of_equal_values_and_awaitables(): async def test_value(): return {"test": "test"} diff --git a/tests/validation/test_validation.py b/tests/validation/test_validation.py index 37d57e9b..e8f08fe1 100644 --- a/tests/validation/test_validation.py +++ b/tests/validation/test_validation.py @@ -1,4 +1,5 @@ import pytest + from graphql.error import GraphQLError from graphql.language import parse from graphql.utilities import TypeInfo, build_schema diff --git a/tox.ini b/tox.ini index c261c70e..e5953a48 100644 --- a/tox.ini +++ b/tox.ini @@ -17,7 +17,7 @@ python = [testenv:ruff] basepython = python3.12 -deps = ruff>=0.5.7,<0.6 +deps = ruff>=0.6.4,<0.7 commands = ruff check src tests ruff format --check src tests From eb9edd583c7ccb2865a4dcde83298748cb7bbefe Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 14 Sep 2024 21:05:40 +0200 Subject: [PATCH 11/50] incremental: subsequent result records should not store parent references Replicates graphql/graphql-js@fae5da500bad94c39a7ecd77a4c4361b58d6d2da --- docs/conf.py | 3 + src/graphql/execution/execute.py | 92 ++++++------ .../execution/incremental_publisher.py | 134 ++++++++---------- tests/execution/test_defer.py | 12 +- tests/execution/test_stream.py | 14 +- 5 files changed, 122 insertions(+), 133 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index bd53efa0..43766c1b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -161,7 +161,9 @@ GraphQLTypeResolver GroupedFieldSet IncrementalDataRecord +InitialResultRecord Middleware +SubsequentDataRecord asyncio.events.AbstractEventLoop graphql.execution.collect_fields.FieldsAndPatches graphql.execution.map_async_iterable.map_async_iterable @@ -169,6 +171,7 @@ graphql.execution.execute.ExperimentalIncrementalExecutionResults graphql.execution.execute.StreamArguments graphql.execution.incremental_publisher.IncrementalPublisher +graphql.execution.incremental_publisher.InitialResultRecord graphql.execution.incremental_publisher.StreamItemsRecord graphql.execution.incremental_publisher.DeferredFragmentRecord graphql.language.lexer.EscapeSequence diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index ae56c9b9..d61909a9 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -86,7 +86,9 @@ IncrementalDataRecord, IncrementalPublisher, IncrementalResult, + InitialResultRecord, StreamItemsRecord, + SubsequentDataRecord, SubsequentIncrementalExecutionResult, ) from .middleware import MiddlewareManager @@ -352,7 +354,6 @@ class ExecutionContext: field_resolver: GraphQLFieldResolver type_resolver: GraphQLTypeResolver subscribe_field_resolver: GraphQLFieldResolver - errors: list[GraphQLError] incremental_publisher: IncrementalPublisher middleware_manager: MiddlewareManager | None @@ -371,7 +372,6 @@ def __init__( field_resolver: GraphQLFieldResolver, type_resolver: GraphQLTypeResolver, subscribe_field_resolver: GraphQLFieldResolver, - errors: list[GraphQLError], incremental_publisher: IncrementalPublisher, middleware_manager: MiddlewareManager | None, is_awaitable: Callable[[Any], bool] | None, @@ -385,7 +385,6 @@ def __init__( self.field_resolver = field_resolver self.type_resolver = type_resolver self.subscribe_field_resolver = subscribe_field_resolver - self.errors = errors self.incremental_publisher = incremental_publisher self.middleware_manager = middleware_manager if is_awaitable: @@ -478,7 +477,6 @@ def build( field_resolver or default_field_resolver, type_resolver or default_type_resolver, subscribe_field_resolver or default_field_resolver, - [], IncrementalPublisher(), middleware_manager, is_awaitable, @@ -514,15 +512,14 @@ def build_per_event_execution_context(self, payload: Any) -> ExecutionContext: self.field_resolver, self.type_resolver, self.subscribe_field_resolver, - [], - # no need to update incrementalPublisher, - # incremental delivery is not supported for subscriptions self.incremental_publisher, self.middleware_manager, self.is_awaitable, ) - def execute_operation(self) -> AwaitableOrValue[dict[str, Any]]: + def execute_operation( + self, initial_result_record: InitialResultRecord + ) -> AwaitableOrValue[dict[str, Any]]: """Execute an operation. Implements the "Executing operations" section of the spec. @@ -551,12 +548,17 @@ def execute_operation(self) -> AwaitableOrValue[dict[str, Any]]: self.execute_fields_serially if operation.operation == OperationType.MUTATION else self.execute_fields - )(root_type, root_value, None, grouped_field_set) # type: ignore + )(root_type, root_value, None, grouped_field_set, initial_result_record) for patch in patches: label, patch_grouped_filed_set = patch self.execute_deferred_fragment( - root_type, root_value, patch_grouped_filed_set, label, None + root_type, + root_value, + patch_grouped_filed_set, + initial_result_record, + label, + None, ) return result @@ -567,6 +569,7 @@ def execute_fields_serially( source_value: Any, path: Path | None, grouped_field_set: GroupedFieldSet, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields serially. @@ -581,7 +584,11 @@ def reducer( response_name, field_group = field_item field_path = Path(path, response_name, parent_type.name) result = self.execute_field( - parent_type, source_value, field_group, field_path + parent_type, + source_value, + field_group, + field_path, + incremental_data_record, ) if result is Undefined: return results @@ -607,7 +614,7 @@ def execute_fields( source_value: Any, path: Path | None, grouped_field_set: GroupedFieldSet, - incremental_data_record: IncrementalDataRecord | None = None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields concurrently. @@ -662,7 +669,7 @@ def execute_field( source: Any, field_group: FieldGroup, path: Path, - incremental_data_record: IncrementalDataRecord | None = None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[Any]: """Resolve the field on the given source object. @@ -774,7 +781,7 @@ def handle_field_error( return_type: GraphQLOutputType, field_group: FieldGroup, path: Path, - incremental_data_record: IncrementalDataRecord | None = None, + incremental_data_record: IncrementalDataRecord, ) -> None: """Handle error properly according to the field type.""" error = located_error(raw_error, field_group, path.as_list()) @@ -784,13 +791,9 @@ def handle_field_error( if is_non_null_type(return_type): raise error - errors = ( - incremental_data_record.errors if incremental_data_record else self.errors - ) - # Otherwise, error protection is applied, logging the error and resolving a # null value for this field if one is encountered. - errors.append(error) + self.incremental_publisher.add_field_error(incremental_data_record, error) def complete_value( self, @@ -799,7 +802,7 @@ def complete_value( info: GraphQLResolveInfo, path: Path, result: Any, - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[Any]: """Complete a value. @@ -888,7 +891,7 @@ async def complete_awaitable_value( info: GraphQLResolveInfo, path: Path, result: Any, - incremental_data_record: IncrementalDataRecord | None = None, + incremental_data_record: IncrementalDataRecord, ) -> Any: """Complete an awaitable value.""" try: @@ -955,7 +958,7 @@ async def complete_async_iterator_value( info: GraphQLResolveInfo, path: Path, async_iterator: AsyncIterator[Any], - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> list[Any]: """Complete an async iterator. @@ -984,8 +987,8 @@ async def complete_async_iterator_value( info, item_type, path, - stream.label, incremental_data_record, + stream.label, ) ), timeout=ASYNC_DELAY, @@ -1039,7 +1042,7 @@ def complete_list_value( info: GraphQLResolveInfo, path: Path, result: AsyncIterable[Any] | Iterable[Any], - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[list[Any]]: """Complete a list value. @@ -1093,8 +1096,8 @@ def complete_list_value( field_group, info, item_type, - stream.label, previous_incremental_data_record, + stream.label, ) continue @@ -1138,7 +1141,7 @@ def complete_list_item_value( field_group: FieldGroup, info: GraphQLResolveInfo, item_path: Path, - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> bool: """Complete a list item value by adding it to the completed results. @@ -1229,7 +1232,7 @@ def complete_abstract_value( info: GraphQLResolveInfo, path: Path, result: Any, - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[Any]: """Complete an abstract value. @@ -1344,7 +1347,7 @@ def complete_object_value( info: GraphQLResolveInfo, path: Path, result: Any, - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[dict[str, Any]]: """Complete an Object value by executing all sub-selections.""" # If there is an `is_type_of()` predicate function, call it with the current @@ -1379,7 +1382,7 @@ def collect_and_execute_subfields( field_group: FieldGroup, path: Path, result: Any, - incremental_data_record: IncrementalDataRecord | None, + incremental_data_record: IncrementalDataRecord, ) -> AwaitableOrValue[dict[str, Any]]: """Collect sub-fields to execute to complete this value.""" sub_grouped_field_set, sub_patches = self.collect_subfields( @@ -1396,9 +1399,9 @@ def collect_and_execute_subfields( return_type, result, sub_patch_grouped_field_set, + incremental_data_record, label, path, - incremental_data_record, ) return sub_fields @@ -1474,9 +1477,9 @@ def execute_deferred_fragment( parent_type: GraphQLObjectType, source_value: Any, fields: GroupedFieldSet, + parent_context: IncrementalDataRecord, label: str | None = None, path: Path | None = None, - parent_context: IncrementalDataRecord | None = None, ) -> None: """Execute deferred fragment.""" incremental_publisher = self.incremental_publisher @@ -1529,9 +1532,9 @@ def execute_stream_field( field_group: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, + parent_context: IncrementalDataRecord, label: str | None = None, - parent_context: IncrementalDataRecord | None = None, - ) -> IncrementalDataRecord: + ) -> SubsequentDataRecord: """Execute stream field.""" is_awaitable = self.is_awaitable incremental_publisher = self.incremental_publisher @@ -1678,8 +1681,8 @@ async def execute_stream_async_iterator( info: GraphQLResolveInfo, item_type: GraphQLOutputType, path: Path, + parent_context: IncrementalDataRecord, label: str | None = None, - parent_context: IncrementalDataRecord | None = None, ) -> None: """Execute stream iterator.""" incremental_publisher = self.incremental_publisher @@ -1877,21 +1880,24 @@ def execute_impl( # Errors from sub-fields of a NonNull type may propagate to the top level, # at which point we still log the error and null the parent field, which # in this case is the entire response. - errors = context.errors incremental_publisher = context.incremental_publisher + initial_result_record = incremental_publisher.prepare_initial_result_record() build_response = context.build_response try: - result = context.execute_operation() + result = context.execute_operation(initial_result_record) if context.is_awaitable(result): # noinspection PyShadowingNames async def await_result() -> Any: try: + errors = incremental_publisher.get_initial_errors( + initial_result_record + ) initial_result = build_response( await result, # type: ignore errors, ) - incremental_publisher.publish_initial() + incremental_publisher.publish_initial(initial_result_record) if incremental_publisher.has_next(): return ExperimentalIncrementalExecutionResults( initial_result=InitialIncrementalExecutionResult( @@ -1902,14 +1908,17 @@ async def await_result() -> Any: subsequent_results=incremental_publisher.subscribe(), ) except GraphQLError as error: - errors.append(error) + incremental_publisher.add_field_error(initial_result_record, error) + errors = incremental_publisher.get_initial_errors( + initial_result_record + ) return build_response(None, errors) return initial_result return await_result() - initial_result = build_response(result, errors) # type: ignore - incremental_publisher.publish_initial() + initial_result = build_response(result, initial_result_record.errors) # type: ignore + incremental_publisher.publish_initial(initial_result_record) if incremental_publisher.has_next(): return ExperimentalIncrementalExecutionResults( initial_result=InitialIncrementalExecutionResult( @@ -1920,7 +1929,8 @@ async def await_result() -> Any: subsequent_results=incremental_publisher.subscribe(), ) except GraphQLError as error: - errors.append(error) + incremental_publisher.add_field_error(initial_result_record, error) + errors = incremental_publisher.get_initial_errors(initial_result_record) return build_response(None, errors) return initial_result diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index fb660e85..bf145da3 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -33,6 +33,7 @@ "FormattedIncrementalResult", "FormattedIncrementalStreamResult", "FormattedSubsequentIncrementalExecutionResult", + "InitialResultRecord", "IncrementalDataRecord", "IncrementalDeferResult", "IncrementalPublisher", @@ -340,34 +341,23 @@ class IncrementalPublisher: The internal publishing state is managed as follows: - ``_released``: the set of Incremental Data records that are ready to be sent to the + ``_released``: the set of Subsequent Data records that are ready to be sent to the client, i.e. their parents have completed and they have also completed. - ``_pending``: the set of Incremental Data records that are definitely pending, i.e. + ``_pending``: the set of Subsequent Data records that are definitely pending, i.e. their parents have completed so that they can no longer be filtered. This includes - all Incremental Data records in `released`, as well as Incremental Data records that + all Subsequent Data records in `released`, as well as Subsequent Data records that have not yet completed. - ``_initial_result``: a record containing the state of the initial result, - as follows: - ``is_completed``: indicates whether the initial result has completed. - ``children``: the set of Incremental Data records that can be be published when the - initial result is completed. - - Each Incremental Data record also contains similar metadata, i.e. these records also - contain similar ``is_completed`` and ``children`` properties. - Note: Instead of sets we use dicts (with values set to None) which preserve order and thereby achieve more deterministic results. """ - _initial_result: InitialResult - _released: dict[IncrementalDataRecord, None] - _pending: dict[IncrementalDataRecord, None] + _released: dict[SubsequentDataRecord, None] + _pending: dict[SubsequentDataRecord, None] _resolve: Event | None def __init__(self) -> None: - self._initial_result = InitialResult({}, False) self._released = {} self._pending = {} self._resolve = None # lazy initialization @@ -420,33 +410,33 @@ async def subscribe( close_async_iterators.append(close_async_iterator) await gather(*close_async_iterators) + def prepare_initial_result_record(self) -> InitialResultRecord: + """Prepare a new initial result record.""" + return InitialResultRecord(errors=[], children={}) + def prepare_new_deferred_fragment_record( self, label: str | None, path: Path | None, - parent_context: IncrementalDataRecord | None, + parent_context: IncrementalDataRecord, ) -> DeferredFragmentRecord: """Prepare a new deferred fragment record.""" - deferred_fragment_record = DeferredFragmentRecord(label, path, parent_context) + deferred_fragment_record = DeferredFragmentRecord(label, path) - context = parent_context or self._initial_result - context.children[deferred_fragment_record] = None + parent_context.children[deferred_fragment_record] = None return deferred_fragment_record def prepare_new_stream_items_record( self, label: str | None, path: Path | None, - parent_context: IncrementalDataRecord | None, + parent_context: IncrementalDataRecord, async_iterator: AsyncIterator[Any] | None = None, ) -> StreamItemsRecord: """Prepare a new stream items record.""" - stream_items_record = StreamItemsRecord( - label, path, parent_context, async_iterator - ) + stream_items_record = StreamItemsRecord(label, path, async_iterator) - context = parent_context or self._initial_result - context.children[stream_items_record] = None + parent_context.children[stream_items_record] = None return stream_items_record def complete_deferred_fragment_record( @@ -481,29 +471,34 @@ def add_field_error( """Add a field error to the given incremental data record.""" incremental_data_record.errors.append(error) - def publish_initial(self) -> None: + def publish_initial(self, initial_result: InitialResultRecord) -> None: """Publish the initial result.""" - for child in self._initial_result.children: + for child in initial_result.children: + if child.filtered: + continue self._publish(child) + def get_initial_errors( + self, initial_result: InitialResultRecord + ) -> list[GraphQLError]: + """Get the errors from the given initial result.""" + return initial_result.errors + def filter( self, null_path: Path, - erroring_incremental_data_record: IncrementalDataRecord | None, + erroring_incremental_data_record: IncrementalDataRecord, ) -> None: """Filter out the given erroring incremental data record.""" null_path_list = null_path.as_list() - children = (erroring_incremental_data_record or self._initial_result).children + descendants = self._get_descendants(erroring_incremental_data_record.children) - for child in self._get_descendants(children): + for child in descendants: if not self._matches_path(child.path, null_path_list): continue - self._delete(child) - parent = child.parent_context or self._initial_result - with suppress_key_error: - del parent.children[child] + child.filtered = True if isinstance(child, StreamItemsRecord): async_iterator = child.async_iterator @@ -522,32 +517,24 @@ def _trigger(self) -> None: resolve.set() self._resolve = Event() - def _introduce(self, item: IncrementalDataRecord) -> None: + def _introduce(self, item: SubsequentDataRecord) -> None: """Introduce a new IncrementalDataRecord.""" self._pending[item] = None - def _release(self, item: IncrementalDataRecord) -> None: + def _release(self, item: SubsequentDataRecord) -> None: """Release the given IncrementalDataRecord.""" if item in self._pending: self._released[item] = None self._trigger() - def _push(self, item: IncrementalDataRecord) -> None: + def _push(self, item: SubsequentDataRecord) -> None: """Push the given IncrementalDataRecord.""" self._released[item] = None self._pending[item] = None self._trigger() - def _delete(self, item: IncrementalDataRecord) -> None: - """Delete the given IncrementalDataRecord.""" - with suppress_key_error: - del self._released[item] - with suppress_key_error: - del self._pending[item] - self._trigger() - def _get_incremental_result( - self, completed_records: Collection[IncrementalDataRecord] + self, completed_records: Collection[SubsequentDataRecord] ) -> SubsequentIncrementalExecutionResult | None: """Get the incremental result with the completed records.""" incremental_results: list[IncrementalResult] = [] @@ -556,6 +543,8 @@ def _get_incremental_result( for incremental_data_record in completed_records: incremental_result: IncrementalResult for child in incremental_data_record.children: + if child.filtered: + continue self._publish(child) if isinstance(incremental_data_record, StreamItemsRecord): items = incremental_data_record.items @@ -591,18 +580,18 @@ def _get_incremental_result( return SubsequentIncrementalExecutionResult(has_next=False) return None - def _publish(self, incremental_data_record: IncrementalDataRecord) -> None: + def _publish(self, subsequent_result_record: SubsequentDataRecord) -> None: """Publish the given incremental data record.""" - if incremental_data_record.is_completed: - self._push(incremental_data_record) + if subsequent_result_record.is_completed: + self._push(subsequent_result_record) else: - self._introduce(incremental_data_record) + self._introduce(subsequent_result_record) def _get_descendants( self, - children: dict[IncrementalDataRecord, None], - descendants: dict[IncrementalDataRecord, None] | None = None, - ) -> dict[IncrementalDataRecord, None]: + children: dict[SubsequentDataRecord, None], + descendants: dict[SubsequentDataRecord, None] | None = None, + ) -> dict[SubsequentDataRecord, None]: """Get the descendants of the given children.""" if descendants is None: descendants = {} @@ -625,6 +614,13 @@ def _add_task(self, awaitable: Awaitable[Any]) -> None: task.add_done_callback(tasks.discard) +class InitialResultRecord(NamedTuple): + """Formatted subsequent incremental execution result""" + + errors: list[GraphQLError] + children: dict[SubsequentDataRecord, None] + + class DeferredFragmentRecord: """A record collecting data marked with the defer directive""" @@ -632,22 +628,16 @@ class DeferredFragmentRecord: label: str | None path: list[str | int] data: dict[str, Any] | None - parent_context: IncrementalDataRecord | None - children: dict[IncrementalDataRecord, None] + children: dict[SubsequentDataRecord, None] is_completed: bool + filtered: bool - def __init__( - self, - label: str | None, - path: Path | None, - parent_context: IncrementalDataRecord | None, - ) -> None: + def __init__(self, label: str | None, path: Path | None) -> None: self.label = label self.path = path.as_list() if path else [] - self.parent_context = parent_context self.errors = [] self.children = {} - self.is_completed = False + self.is_completed = self.filtered = False self.data = None def __repr__(self) -> str: @@ -655,8 +645,6 @@ def __repr__(self) -> str: args: list[str] = [f"path={self.path!r}"] if self.label: args.append(f"label={self.label!r}") - if self.parent_context: - args.append("parent_context") if self.data is not None: args.append("data") return f"{name}({', '.join(args)})" @@ -669,26 +657,24 @@ class StreamItemsRecord: label: str | None path: list[str | int] items: list[str] | None - parent_context: IncrementalDataRecord | None - children: dict[IncrementalDataRecord, None] + children: dict[SubsequentDataRecord, None] async_iterator: AsyncIterator[Any] | None is_completed_async_iterator: bool is_completed: bool + filtered: bool def __init__( self, label: str | None, path: Path | None, - parent_context: IncrementalDataRecord | None, async_iterator: AsyncIterator[Any] | None = None, ) -> None: self.label = label self.path = path.as_list() if path else [] - self.parent_context = parent_context self.async_iterator = async_iterator self.errors = [] self.children = {} - self.is_completed_async_iterator = self.is_completed = False + self.is_completed_async_iterator = self.is_completed = self.filtered = False self.items = None def __repr__(self) -> str: @@ -696,11 +682,11 @@ def __repr__(self) -> str: args: list[str] = [f"path={self.path!r}"] if self.label: args.append(f"label={self.label!r}") - if self.parent_context: - args.append("parent_context") if self.items is not None: args.append("items") return f"{name}({', '.join(args)})" -IncrementalDataRecord = Union[DeferredFragmentRecord, StreamItemsRecord] +SubsequentDataRecord = Union[DeferredFragmentRecord, StreamItemsRecord] + +IncrementalDataRecord = Union[InitialResultRecord, SubsequentDataRecord] diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 312a2a0b..41161248 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -321,17 +321,13 @@ def can_compare_subsequent_incremental_execution_result(): } def can_print_deferred_fragment_record(): - record = DeferredFragmentRecord(None, None, None) + record = DeferredFragmentRecord(None, None) assert str(record) == "DeferredFragmentRecord(path=[])" - record = DeferredFragmentRecord("foo", Path(None, "bar", "Bar"), record) - assert ( - str(record) == "DeferredFragmentRecord(" - "path=['bar'], label='foo', parent_context)" - ) + record = DeferredFragmentRecord("foo", Path(None, "bar", "Bar")) + assert str(record) == "DeferredFragmentRecord(" "path=['bar'], label='foo')" record.data = {"hello": "world"} assert ( - str(record) == "DeferredFragmentRecord(" - "path=['bar'], label='foo', parent_context, data)" + str(record) == "DeferredFragmentRecord(" "path=['bar'], label='foo', data)" ) @pytest.mark.asyncio diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 8a1ca605..42188517 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -173,18 +173,12 @@ def can_format_and_print_incremental_stream_result(): ) def can_print_stream_record(): - record = StreamItemsRecord(None, None, None, None) + record = StreamItemsRecord(None, None, None) assert str(record) == "StreamItemsRecord(path=[])" - record = StreamItemsRecord("foo", Path(None, "bar", "Bar"), record, None) - assert ( - str(record) == "StreamItemsRecord(" - "path=['bar'], label='foo', parent_context)" - ) + record = StreamItemsRecord("foo", Path(None, "bar", "Bar"), None) + assert str(record) == "StreamItemsRecord(" "path=['bar'], label='foo')" record.items = ["hello", "world"] - assert ( - str(record) == "StreamItemsRecord(" - "path=['bar'], label='foo', parent_context, items)" - ) + assert str(record) == "StreamItemsRecord(" "path=['bar'], label='foo', items)" # noinspection PyTypeChecker def can_compare_incremental_stream_result(): From 7134b68ebc2eff0dfc7ab2988851854c9aba964b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 14 Sep 2024 21:26:04 +0200 Subject: [PATCH 12/50] Speedup sorting and building/extending schema Replicates graphql/graphql-js@361078603d0d67fee2dce8214f7213fa14b393f0 --- src/graphql/utilities/extend_schema.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index fc6cee77..72283269 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -230,8 +230,12 @@ def extend_schema_args( return schema_kwargs self = cls(type_extensions) - for existing_type in schema_kwargs["types"] or (): - self.type_map[existing_type.name] = self.extend_named_type(existing_type) + + self.type_map = { + type_.name: self.extend_named_type(type_) + for type_ in schema_kwargs["types"] or () + } + for type_node in type_defs: name = type_node.name.value self.type_map[name] = std_type_map.get(name) or self.build_type(type_node) From 2a3799fec77d5e71d7ebe61d0e257a13cc3b2f4b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 14 Sep 2024 21:32:28 +0200 Subject: [PATCH 13/50] Add support for fourfold nested lists in introspection Replicates graphql/graphql-js@826ae7f952dcccd8bb8a7ade3d9f9c7540edcc06 --- src/graphql/utilities/get_introspection_query.py | 8 ++++++++ tests/utilities/test_build_client_schema.py | 14 +++++++------- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index cffaa12d..4babfaec 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -149,6 +149,14 @@ def input_deprecation(string: str) -> str | None: ofType {{ kind name + ofType {{ + kind + name + ofType {{ + kind + name + }} + }} }} }} }} diff --git a/tests/utilities/test_build_client_schema.py b/tests/utilities/test_build_client_schema.py index 4b861003..8a4cecba 100644 --- a/tests/utilities/test_build_client_schema.py +++ b/tests/utilities/test_build_client_schema.py @@ -991,11 +991,11 @@ def throws_when_missing_directive_args(): build_client_schema(introspection) def describe_very_deep_decorators_are_not_supported(): - def fails_on_very_deep_lists_more_than_7_levels(): + def fails_on_very_deep_lists_more_than_8_levels(): schema = build_schema( """ type Query { - foo: [[[[[[[[String]]]]]]]] + foo: [[[[[[[[[[String]]]]]]]]]] } """ ) @@ -1010,11 +1010,11 @@ def fails_on_very_deep_lists_more_than_7_levels(): " Decorated type deeper than introspection query." ) - def fails_on_a_very_deep_non_null_more_than_7_levels(): + def fails_on_a_very_deep_more_than_8_levels_non_null(): schema = build_schema( """ type Query { - foo: [[[[String!]!]!]!] + foo: [[[[[String!]!]!]!]!] } """ ) @@ -1029,12 +1029,12 @@ def fails_on_a_very_deep_non_null_more_than_7_levels(): " Decorated type deeper than introspection query." ) - def succeeds_on_deep_types_less_or_equal_7_levels(): - # e.g., fully non-null 3D matrix + def succeeds_on_deep_less_or_equal_8_levels_types(): + # e.g., fully non-null 4D matrix sdl = dedent( """ type Query { - foo: [[[String!]!]!]! + foo: [[[[String!]!]!]!]! } """ ) From 69a30bbc76c2e163a205e27c377b0c863440721e Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 15 Sep 2024 17:58:39 +0200 Subject: [PATCH 14/50] Configure default test path --- pyproject.toml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index c3c2367c..28c7707f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -308,12 +308,14 @@ minversion = "7.4" addopts = "--benchmark-disable" # Deactivate default name pattern for test classes (we use pytest_describe). python_classes = "PyTest*" -# Handle all async fixtures and tests automatically by asyncio +# Handle all async fixtures and tests automatically by asyncio, asyncio_mode = "auto" # Set a timeout in seconds for aborting tests that run too long. timeout = "100" # Ignore config options not (yet) available in older Python versions. filterwarnings = "ignore::pytest.PytestConfigWarning" +# All tests can be found in the tests directory. +testpaths = ["tests"] [build-system] requires = ["poetry_core>=1.6.1,<2"] From 26701397d84338a42c7acbce78368ae8f9d97271 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 15 Sep 2024 18:15:50 +0200 Subject: [PATCH 15/50] incremental publisher should handle all response building Replicates graphql/graphql-js@1f30b54edc3f7b8443f4aedc48fc56c0d2be9705 --- docs/conf.py | 9 +- src/graphql/execution/__init__.py | 10 +- src/graphql/execution/execute.py | 278 +-------------- .../execution/incremental_publisher.py | 331 +++++++++++++++--- 4 files changed, 301 insertions(+), 327 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 43766c1b..4655434b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -150,6 +150,7 @@ EnterLeaveVisitor ExperimentalIncrementalExecutionResults FieldGroup +FormattedIncrementalResult FormattedSourceLocation GraphQLAbstractType GraphQLCompositeType @@ -161,19 +162,19 @@ GraphQLTypeResolver GroupedFieldSet IncrementalDataRecord +IncrementalResult InitialResultRecord Middleware SubsequentDataRecord asyncio.events.AbstractEventLoop graphql.execution.collect_fields.FieldsAndPatches -graphql.execution.map_async_iterable.map_async_iterable -graphql.execution.Middleware -graphql.execution.execute.ExperimentalIncrementalExecutionResults graphql.execution.execute.StreamArguments +graphql.execution.map_async_iterable.map_async_iterable +graphql.execution.incremental_publisher.DeferredFragmentRecord graphql.execution.incremental_publisher.IncrementalPublisher graphql.execution.incremental_publisher.InitialResultRecord graphql.execution.incremental_publisher.StreamItemsRecord -graphql.execution.incremental_publisher.DeferredFragmentRecord +graphql.execution.Middleware graphql.language.lexer.EscapeSequence graphql.language.visitor.EnterLeaveVisitor graphql.type.definition.GT_co diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index aec85be1..2d5225be 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -14,21 +14,21 @@ default_type_resolver, subscribe, ExecutionContext, - ExecutionResult, - ExperimentalIncrementalExecutionResults, - InitialIncrementalExecutionResult, - FormattedExecutionResult, - FormattedInitialIncrementalExecutionResult, Middleware, ) from .incremental_publisher import ( + ExecutionResult, + ExperimentalIncrementalExecutionResults, FormattedSubsequentIncrementalExecutionResult, FormattedIncrementalDeferResult, FormattedIncrementalResult, FormattedIncrementalStreamResult, + FormattedExecutionResult, + FormattedInitialIncrementalExecutionResult, IncrementalDeferResult, IncrementalResult, IncrementalStreamResult, + InitialIncrementalExecutionResult, SubsequentIncrementalExecutionResult, ) from .async_iterables import map_async_iterable diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index d61909a9..ca4df8ff 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -13,20 +13,14 @@ Awaitable, Callable, Iterable, - Iterator, List, NamedTuple, Optional, - Sequence, Tuple, Union, cast, ) -try: - from typing import TypedDict -except ImportError: # Python < 3.8 - from typing_extensions import TypedDict try: from typing import TypeAlias, TypeGuard except ImportError: # Python < 3.10 @@ -37,7 +31,7 @@ except ImportError: # Python < 3.7 from concurrent.futures import TimeoutError -from ..error import GraphQLError, GraphQLFormattedError, located_error +from ..error import GraphQLError, located_error from ..language import ( DocumentNode, FragmentDefinitionNode, @@ -82,14 +76,13 @@ ) from .incremental_publisher import ( ASYNC_DELAY, - FormattedIncrementalResult, + ExecutionResult, + ExperimentalIncrementalExecutionResults, IncrementalDataRecord, IncrementalPublisher, - IncrementalResult, InitialResultRecord, StreamItemsRecord, SubsequentDataRecord, - SubsequentIncrementalExecutionResult, ) from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values @@ -112,12 +105,7 @@ async def anext(iterator: AsyncIterator) -> Any: "execute_sync", "experimental_execute_incrementally", "subscribe", - "ExecutionResult", "ExecutionContext", - "ExperimentalIncrementalExecutionResults", - "FormattedExecutionResult", - "FormattedInitialIncrementalExecutionResult", - "InitialIncrementalExecutionResult", "Middleware", ] @@ -144,181 +132,7 @@ async def anext(iterator: AsyncIterator) -> Any: # 3) inline fragment "spreads" e.g. "...on Type { a }" -class FormattedExecutionResult(TypedDict, total=False): - """Formatted execution result""" - - data: dict[str, Any] | None - errors: list[GraphQLFormattedError] - extensions: dict[str, Any] - - -class ExecutionResult: - """The result of GraphQL execution. - - - ``data`` is the result of a successful execution of the query. - - ``errors`` is included when any errors occurred as a non-empty list. - - ``extensions`` is reserved for adding non-standard properties. - """ - - __slots__ = "data", "errors", "extensions" - - data: dict[str, Any] | None - errors: list[GraphQLError] | None - extensions: dict[str, Any] | None - - def __init__( - self, - data: dict[str, Any] | None = None, - errors: list[GraphQLError] | None = None, - extensions: dict[str, Any] | None = None, - ) -> None: - self.data = data - self.errors = errors - self.extensions = extensions - - def __repr__(self) -> str: - name = self.__class__.__name__ - ext = "" if self.extensions is None else f", extensions={self.extensions}" - return f"{name}(data={self.data!r}, errors={self.errors!r}{ext})" - - def __iter__(self) -> Iterator[Any]: - return iter((self.data, self.errors)) - - @property - def formatted(self) -> FormattedExecutionResult: - """Get execution result formatted according to the specification.""" - formatted: FormattedExecutionResult = {"data": self.data} - if self.errors is not None: - formatted["errors"] = [error.formatted for error in self.errors] - if self.extensions is not None: - formatted["extensions"] = self.extensions - return formatted - - def __eq__(self, other: object) -> bool: - if isinstance(other, dict): - if "extensions" not in other: - return other == {"data": self.data, "errors": self.errors} - return other == { - "data": self.data, - "errors": self.errors, - "extensions": self.extensions, - } - if isinstance(other, tuple): - if len(other) == 2: - return other == (self.data, self.errors) - return other == (self.data, self.errors, self.extensions) - return ( - isinstance(other, self.__class__) - and other.data == self.data - and other.errors == self.errors - and other.extensions == self.extensions - ) - - def __ne__(self, other: object) -> bool: - return not self == other - - -class FormattedInitialIncrementalExecutionResult(TypedDict, total=False): - """Formatted initial incremental execution result""" - - data: dict[str, Any] | None - errors: list[GraphQLFormattedError] - hasNext: bool - incremental: list[FormattedIncrementalResult] - extensions: dict[str, Any] - - -class InitialIncrementalExecutionResult: - """Initial incremental execution result. - - - ``has_next`` is True if a future payload is expected. - - ``incremental`` is a list of the results from defer/stream directives. - """ - - data: dict[str, Any] | None - errors: list[GraphQLError] | None - incremental: Sequence[IncrementalResult] | None - has_next: bool - extensions: dict[str, Any] | None - - __slots__ = "data", "errors", "has_next", "incremental", "extensions" - - def __init__( - self, - data: dict[str, Any] | None = None, - errors: list[GraphQLError] | None = None, - incremental: Sequence[IncrementalResult] | None = None, - has_next: bool = False, - extensions: dict[str, Any] | None = None, - ) -> None: - self.data = data - self.errors = errors - self.incremental = incremental - self.has_next = has_next - self.extensions = extensions - - def __repr__(self) -> str: - name = self.__class__.__name__ - args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] - if self.incremental: - args.append(f"incremental[{len(self.incremental)}]") - if self.has_next: - args.append("has_next") - if self.extensions: - args.append(f"extensions={self.extensions}") - return f"{name}({', '.join(args)})" - - @property - def formatted(self) -> FormattedInitialIncrementalExecutionResult: - """Get execution result formatted according to the specification.""" - formatted: FormattedInitialIncrementalExecutionResult = {"data": self.data} - if self.errors is not None: - formatted["errors"] = [error.formatted for error in self.errors] - if self.incremental: - formatted["incremental"] = [result.formatted for result in self.incremental] - formatted["hasNext"] = self.has_next - if self.extensions is not None: - formatted["extensions"] = self.extensions - return formatted - - def __eq__(self, other: object) -> bool: - if isinstance(other, dict): - return ( - other.get("data") == self.data - and other.get("errors") == self.errors - and ( - "incremental" not in other - or other["incremental"] == self.incremental - ) - and ("hasNext" not in other or other["hasNext"] == self.has_next) - and ( - "extensions" not in other or other["extensions"] == self.extensions - ) - ) - if isinstance(other, tuple): - size = len(other) - return ( - 1 < size < 6 - and ( - self.data, - self.errors, - self.incremental, - self.has_next, - self.extensions, - )[:size] - == other - ) - return ( - isinstance(other, self.__class__) - and other.data == self.data - and other.errors == self.errors - and other.incremental == self.incremental - and other.has_next == self.has_next - and other.extensions == self.extensions - ) - - def __ne__(self, other: object) -> bool: - return not self == other +Middleware: TypeAlias = Optional[Union[Tuple, List, MiddlewareManager]] class StreamArguments(NamedTuple): @@ -328,16 +142,6 @@ class StreamArguments(NamedTuple): label: str | None -class ExperimentalIncrementalExecutionResults(NamedTuple): - """Execution results when retrieved incrementally.""" - - initial_result: InitialIncrementalExecutionResult - subsequent_results: AsyncGenerator[SubsequentIncrementalExecutionResult, None] - - -Middleware: TypeAlias = Optional[Union[Tuple, List, MiddlewareManager]] - - class ExecutionContext: """Data that must be available at all points during query execution. @@ -482,24 +286,6 @@ def build( is_awaitable, ) - @staticmethod - def build_response( - data: dict[str, Any] | None, errors: list[GraphQLError] - ) -> ExecutionResult: - """Build response. - - Given a completed execution context and data, build the (data, errors) response - defined by the "Response" section of the GraphQL spec. - """ - if not errors: - return ExecutionResult(data, None) - # Sort the error list in order to make it deterministic, since we might have - # been using parallel execution. - errors.sort( - key=lambda error: (error.locations or [], error.path or [], error.message) - ) - return ExecutionResult(data, errors) - def build_per_event_execution_context(self, payload: Any) -> ExecutionContext: """Create a copy of the execution context for usage with subscribe events.""" return self.__class__( @@ -1882,57 +1668,29 @@ def execute_impl( # in this case is the entire response. incremental_publisher = context.incremental_publisher initial_result_record = incremental_publisher.prepare_initial_result_record() - build_response = context.build_response try: - result = context.execute_operation(initial_result_record) + data = context.execute_operation(initial_result_record) + if context.is_awaitable(data): - if context.is_awaitable(result): - # noinspection PyShadowingNames - async def await_result() -> Any: + async def await_response() -> ( + ExecutionResult | ExperimentalIncrementalExecutionResults + ): try: - errors = incremental_publisher.get_initial_errors( - initial_result_record - ) - initial_result = build_response( - await result, # type: ignore - errors, + return incremental_publisher.build_data_response( + initial_result_record, + await data, # type: ignore ) - incremental_publisher.publish_initial(initial_result_record) - if incremental_publisher.has_next(): - return ExperimentalIncrementalExecutionResults( - initial_result=InitialIncrementalExecutionResult( - initial_result.data, - initial_result.errors, - has_next=True, - ), - subsequent_results=incremental_publisher.subscribe(), - ) except GraphQLError as error: - incremental_publisher.add_field_error(initial_result_record, error) - errors = incremental_publisher.get_initial_errors( - initial_result_record + return incremental_publisher.build_error_response( + initial_result_record, error ) - return build_response(None, errors) - return initial_result - return await_result() + return await_response() + + return incremental_publisher.build_data_response(initial_result_record, data) # type: ignore - initial_result = build_response(result, initial_result_record.errors) # type: ignore - incremental_publisher.publish_initial(initial_result_record) - if incremental_publisher.has_next(): - return ExperimentalIncrementalExecutionResults( - initial_result=InitialIncrementalExecutionResult( - initial_result.data, - initial_result.errors, - has_next=True, - ), - subsequent_results=incremental_publisher.subscribe(), - ) except GraphQLError as error: - incremental_publisher.add_field_error(initial_result_record, error) - errors = incremental_publisher.get_initial_errors(initial_result_record) - return build_response(None, errors) - return initial_result + return incremental_publisher.build_error_response(initial_result_record, error) def assume_not_awaitable(_value: Any) -> bool: diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index bf145da3..fdc35fff 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -11,6 +11,7 @@ AsyncIterator, Awaitable, Collection, + Iterator, NamedTuple, Sequence, Union, @@ -21,7 +22,6 @@ except ImportError: # Python < 3.8 from typing_extensions import TypedDict - if TYPE_CHECKING: from ..error import GraphQLError, GraphQLFormattedError from ..pyutils import Path @@ -29,10 +29,15 @@ __all__ = [ "ASYNC_DELAY", "DeferredFragmentRecord", + "ExecutionResult", + "ExperimentalIncrementalExecutionResults", + "FormattedExecutionResult", "FormattedIncrementalDeferResult", "FormattedIncrementalResult", "FormattedIncrementalStreamResult", + "FormattedInitialIncrementalExecutionResult", "FormattedSubsequentIncrementalExecutionResult", + "InitialIncrementalExecutionResult", "InitialResultRecord", "IncrementalDataRecord", "IncrementalDeferResult", @@ -49,6 +54,190 @@ suppress_key_error = suppress(KeyError) +class FormattedExecutionResult(TypedDict, total=False): + """Formatted execution result""" + + data: dict[str, Any] | None + errors: list[GraphQLFormattedError] + extensions: dict[str, Any] + + +class ExecutionResult: + """The result of GraphQL execution. + + - ``data`` is the result of a successful execution of the query. + - ``errors`` is included when any errors occurred as a non-empty list. + - ``extensions`` is reserved for adding non-standard properties. + """ + + __slots__ = "data", "errors", "extensions" + + data: dict[str, Any] | None + errors: list[GraphQLError] | None + extensions: dict[str, Any] | None + + def __init__( + self, + data: dict[str, Any] | None = None, + errors: list[GraphQLError] | None = None, + extensions: dict[str, Any] | None = None, + ) -> None: + self.data = data + self.errors = errors + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + ext = "" if self.extensions is None else f", extensions={self.extensions}" + return f"{name}(data={self.data!r}, errors={self.errors!r}{ext})" + + def __iter__(self) -> Iterator[Any]: + return iter((self.data, self.errors)) + + @property + def formatted(self) -> FormattedExecutionResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedExecutionResult = {"data": self.data} + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + if "extensions" not in other: + return other == {"data": self.data, "errors": self.errors} + return other == { + "data": self.data, + "errors": self.errors, + "extensions": self.extensions, + } + if isinstance(other, tuple): + if len(other) == 2: + return other == (self.data, self.errors) + return other == (self.data, self.errors, self.extensions) + return ( + isinstance(other, self.__class__) + and other.data == self.data + and other.errors == self.errors + and other.extensions == self.extensions + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class FormattedInitialIncrementalExecutionResult(TypedDict, total=False): + """Formatted initial incremental execution result""" + + data: dict[str, Any] | None + errors: list[GraphQLFormattedError] + hasNext: bool + incremental: list[FormattedIncrementalResult] + extensions: dict[str, Any] + + +class InitialIncrementalExecutionResult: + """Initial incremental execution result. + + - ``has_next`` is True if a future payload is expected. + - ``incremental`` is a list of the results from defer/stream directives. + """ + + data: dict[str, Any] | None + errors: list[GraphQLError] | None + incremental: Sequence[IncrementalResult] | None + has_next: bool + extensions: dict[str, Any] | None + + __slots__ = "data", "errors", "has_next", "incremental", "extensions" + + def __init__( + self, + data: dict[str, Any] | None = None, + errors: list[GraphQLError] | None = None, + incremental: Sequence[IncrementalResult] | None = None, + has_next: bool = False, + extensions: dict[str, Any] | None = None, + ) -> None: + self.data = data + self.errors = errors + self.incremental = incremental + self.has_next = has_next + self.extensions = extensions + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] + if self.incremental: + args.append(f"incremental[{len(self.incremental)}]") + if self.has_next: + args.append("has_next") + if self.extensions: + args.append(f"extensions={self.extensions}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedInitialIncrementalExecutionResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedInitialIncrementalExecutionResult = {"data": self.data} + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + if self.incremental: + formatted["incremental"] = [result.formatted for result in self.incremental] + formatted["hasNext"] = self.has_next + if self.extensions is not None: + formatted["extensions"] = self.extensions + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return ( + other.get("data") == self.data + and other.get("errors") == self.errors + and ( + "incremental" not in other + or other["incremental"] == self.incremental + ) + and ("hasNext" not in other or other["hasNext"] == self.has_next) + and ( + "extensions" not in other or other["extensions"] == self.extensions + ) + ) + if isinstance(other, tuple): + size = len(other) + return ( + 1 < size < 6 + and ( + self.data, + self.errors, + self.incremental, + self.has_next, + self.extensions, + )[:size] + == other + ) + return ( + isinstance(other, self.__class__) + and other.data == self.data + and other.errors == self.errors + and other.incremental == self.incremental + and other.has_next == self.has_next + and other.extensions == self.extensions + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class ExperimentalIncrementalExecutionResults(NamedTuple): + """Execution results when retrieved incrementally.""" + + initial_result: InitialIncrementalExecutionResult + subsequent_results: AsyncGenerator[SubsequentIncrementalExecutionResult, None] + + class FormattedIncrementalDeferResult(TypedDict, total=False): """Formatted incremental deferred execution result""" @@ -363,53 +552,6 @@ def __init__(self) -> None: self._resolve = None # lazy initialization self._tasks: set[Awaitable] = set() - def has_next(self) -> bool: - """Check whether there is a next incremental result.""" - return bool(self._pending) - - async def subscribe( - self, - ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: - """Subscribe to the incremental results.""" - is_done = False - pending = self._pending - - try: - while not is_done: - released = self._released - for item in released: - with suppress_key_error: - del pending[item] - self._released = {} - - result = self._get_incremental_result(released) - - if not self.has_next(): - is_done = True - - if result is not None: - yield result - else: - resolve = self._resolve - if resolve is None: - self._resolve = resolve = Event() - await resolve.wait() - finally: - close_async_iterators = [] - for incremental_data_record in pending: - if isinstance( - incremental_data_record, StreamItemsRecord - ): # pragma: no cover - async_iterator = incremental_data_record.async_iterator - if async_iterator: - try: - close_async_iterator = async_iterator.aclose() # type: ignore - except AttributeError: - pass - else: - close_async_iterators.append(close_async_iterator) - await gather(*close_async_iterators) - def prepare_initial_result_record(self) -> InitialResultRecord: """Prepare a new initial result record.""" return InitialResultRecord(errors=[], children={}) @@ -471,18 +613,47 @@ def add_field_error( """Add a field error to the given incremental data record.""" incremental_data_record.errors.append(error) - def publish_initial(self, initial_result: InitialResultRecord) -> None: - """Publish the initial result.""" - for child in initial_result.children: + def build_data_response( + self, initial_result_record: InitialResultRecord, data: dict[str, Any] | None + ) -> ExecutionResult | ExperimentalIncrementalExecutionResults: + """Build response for the given data.""" + for child in initial_result_record.children: if child.filtered: continue self._publish(child) - def get_initial_errors( - self, initial_result: InitialResultRecord - ) -> list[GraphQLError]: - """Get the errors from the given initial result.""" - return initial_result.errors + errors = initial_result_record.errors or None + if errors: + errors.sort( + key=lambda error: ( + error.locations or [], + error.path or [], + error.message, + ) + ) + if self._pending: + return ExperimentalIncrementalExecutionResults( + initial_result=InitialIncrementalExecutionResult( + data, + errors, + has_next=True, + ), + subsequent_results=self._subscribe(), + ) + return ExecutionResult(data, errors) + + def build_error_response( + self, initial_result_record: InitialResultRecord, error: GraphQLError + ) -> ExecutionResult: + """Build response for the given error.""" + errors = initial_result_record.errors + errors.append(error) + # Sort the error list in order to make it deterministic, since we might have + # been using parallel execution. + errors.sort( + key=lambda error: (error.locations or [], error.path or [], error.message) + ) + return ExecutionResult(None, errors) def filter( self, @@ -510,6 +681,49 @@ def filter( else: self._add_task(close_async_iterator) + async def _subscribe( + self, + ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: + """Subscribe to the incremental results.""" + is_done = False + pending = self._pending + + try: + while not is_done: + released = self._released + for item in released: + with suppress_key_error: + del pending[item] + self._released = {} + + result = self._get_incremental_result(released) + + if not self._pending: + is_done = True + + if result is not None: + yield result + else: + resolve = self._resolve + if resolve is None: + self._resolve = resolve = Event() + await resolve.wait() + finally: + close_async_iterators = [] + for incremental_data_record in pending: + if isinstance( + incremental_data_record, StreamItemsRecord + ): # pragma: no cover + async_iterator = incremental_data_record.async_iterator + if async_iterator: + try: + close_async_iterator = async_iterator.aclose() # type: ignore + except AttributeError: + pass + else: + close_async_iterators.append(close_async_iterator) + await gather(*close_async_iterators) + def _trigger(self) -> None: """Trigger the resolve event.""" resolve = self._resolve @@ -572,11 +786,12 @@ def _get_incremental_result( ) append_result(incremental_result) + has_next = bool(self._pending) if incremental_results: return SubsequentIncrementalExecutionResult( - incremental=incremental_results, has_next=self.has_next() + incremental=incremental_results, has_next=has_next ) - if encountered_completed_async_iterator and not self.has_next(): + if encountered_completed_async_iterator and not has_next: return SubsequentIncrementalExecutionResult(has_next=False) return None From 841c3d2ff52661d71270a8911c769683b7142de4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Thu, 19 Sep 2024 22:27:25 +0200 Subject: [PATCH 16/50] add tests with regard to duplication Replicates graphql/graphql-js@75d419d7c6935745f99f7b14ff4b3901d813e6e9 --- tests/execution/test_defer.py | 1166 +++++++++++++++++++++++++++++++- tests/execution/test_stream.py | 175 ++++- 2 files changed, 1329 insertions(+), 12 deletions(-) diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 41161248..83201377 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -37,6 +37,79 @@ }, ) + +class Friend(NamedTuple): + id: int + name: str + + +friends = [Friend(2, "Han"), Friend(3, "Leia"), Friend(4, "C-3PO")] + +deeper_object = GraphQLObjectType( + "DeeperObject", + { + "foo": GraphQLField(GraphQLString), + "bar": GraphQLField(GraphQLString), + "baz": GraphQLField(GraphQLString), + "bak": GraphQLField(GraphQLString), + }, +) + +nested_object = GraphQLObjectType( + "NestedObject", + {"deeperObject": GraphQLField(deeper_object), "name": GraphQLField(GraphQLString)}, +) + +another_nested_object = GraphQLObjectType( + "AnotherNestedObject", {"deeperObject": GraphQLField(deeper_object)} +) + +hero = { + "name": "Luke", + "id": 1, + "friends": friends, + "nestedObject": nested_object, + "AnotherNestedObject": another_nested_object, +} + +c = GraphQLObjectType( + "c", + { + "d": GraphQLField(GraphQLString), + "nonNullErrorField": GraphQLField(GraphQLNonNull(GraphQLString)), + }, +) + +e = GraphQLObjectType( + "e", + { + "f": GraphQLField(GraphQLString), + }, +) + +b = GraphQLObjectType( + "b", + { + "c": GraphQLField(c), + "e": GraphQLField(e), + }, +) + +a = GraphQLObjectType( + "a", + { + "b": GraphQLField(b), + "someField": GraphQLField(GraphQLString), + }, +) + +g = GraphQLObjectType( + "g", + { + "h": GraphQLField(GraphQLString), + }, +) + hero_type = GraphQLObjectType( "Hero", { @@ -44,24 +117,19 @@ "name": GraphQLField(GraphQLString), "nonNullName": GraphQLField(GraphQLNonNull(GraphQLString)), "friends": GraphQLField(GraphQLList(friend_type)), + "nestedObject": GraphQLField(nested_object), + "anotherNestedObject": GraphQLField(another_nested_object), }, ) -query = GraphQLObjectType("Query", {"hero": GraphQLField(hero_type)}) +query = GraphQLObjectType( + "Query", + {"hero": GraphQLField(hero_type), "a": GraphQLField(a), "g": GraphQLField(g)}, +) schema = GraphQLSchema(query) -class Friend(NamedTuple): - id: int - name: str - - -friends = [Friend(2, "Han"), Friend(3, "Leia"), Friend(4, "C-3PO")] - -hero = {"id": 1, "name": "Luke", "friends": friends} - - class Resolvers: """Various resolver functions for testing.""" @@ -629,6 +697,1082 @@ async def can_defer_an_inline_fragment(): }, ] + @pytest.mark.asyncio + async def emits_empty_defer_fragments(): + document = parse( + """ + query HeroNameQuery { + hero { + ... @defer { + name @skip(if: true) + } + } + } + fragment TopFragment on Hero { + name + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {}}, "hasNext": True}, + { + "incremental": [ + { + "data": {}, + "path": ["hero"], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def can_separately_emit_defer_fragments_different_labels_varying_fields(): + document = parse( + """ + query HeroNameQuery { + hero { + ... @defer(label: "DeferID") { + id + } + ... @defer(label: "DeferName") { + name + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {}}, "hasNext": True}, + { + "incremental": [ + { + "data": {"id": "1"}, + "path": ["hero"], + "label": "DeferID", + }, + { + "data": {"name": "Luke"}, + "path": ["hero"], + "label": "DeferName", + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def does_not_deduplicate_multiple_defers_on_the_same_object(): + document = parse( + """ + query { + hero { + friends { + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + ... @defer { + ...FriendFrag + } + } + } + } + } + } + } + + fragment FriendFrag on Friend { + id + name + } + """ + ) + result = await complete(document) + + assert result == [ + {"data": {"hero": {"friends": [{}, {}, {}]}}, "hasNext": True}, + { + "incremental": [ + {"data": {}, "path": ["hero", "friends", 0]}, + {"data": {}, "path": ["hero", "friends", 0]}, + {"data": {}, "path": ["hero", "friends", 0]}, + { + "data": {"id": "2", "name": "Han"}, + "path": ["hero", "friends", 0], + }, + {"data": {}, "path": ["hero", "friends", 1]}, + {"data": {}, "path": ["hero", "friends", 1]}, + {"data": {}, "path": ["hero", "friends", 1]}, + { + "data": {"id": "3", "name": "Leia"}, + "path": ["hero", "friends", 1], + }, + {"data": {}, "path": ["hero", "friends", 2]}, + {"data": {}, "path": ["hero", "friends", 2]}, + {"data": {}, "path": ["hero", "friends", 2]}, + { + "data": {"id": "4", "name": "C-3PO"}, + "path": ["hero", "friends", 2], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def does_not_deduplicate_fields_present_in_the_initial_payload(): + document = parse( + """ + query { + hero { + nestedObject { + deeperObject { + foo + } + } + anotherNestedObject { + deeperObject { + foo + } + } + ... @defer { + nestedObject { + deeperObject { + bar + } + } + anotherNestedObject { + deeperObject { + foo + } + } + } + } + } + """ + ) + result = await complete( + document, + { + "hero": { + "nestedObject": {"deeperObject": {"foo": "foo", "bar": "bar"}}, + "anotherNestedObject": {"deeperObject": {"foo": "foo"}}, + } + }, + ) + + assert result == [ + { + "data": { + "hero": { + "nestedObject": {"deeperObject": {"foo": "foo"}}, + "anotherNestedObject": {"deeperObject": {"foo": "foo"}}, + } + }, + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "nestedObject": { + "deeperObject": { + "bar": "bar", + }, + }, + "anotherNestedObject": { + "deeperObject": { + "foo": "foo", + }, + }, + }, + "path": ["hero"], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def does_not_deduplicate_fields_present_in_a_parent_defer_payload(): + document = parse( + """ + query { + hero { + ... @defer { + nestedObject { + deeperObject { + foo + ... @defer { + foo + bar + } + } + } + } + } + } + """ + ) + result = await complete( + document, + {"hero": {"nestedObject": {"deeperObject": {"foo": "foo", "bar": "bar"}}}}, + ) + + assert result == [ + { + "data": {"hero": {}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "nestedObject": { + "deeperObject": { + "foo": "foo", + }, + } + }, + "path": ["hero"], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "foo": "foo", + "bar": "bar", + }, + "path": ["hero", "nestedObject", "deeperObject"], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def does_not_deduplicate_fields_with_deferred_fragments_at_multiple_levels(): + document = parse( + """ + query { + hero { + nestedObject { + deeperObject { + foo + } + } + ... @defer { + nestedObject { + deeperObject { + foo + bar + } + ... @defer { + deeperObject { + foo + bar + baz + ... @defer { + foo + bar + baz + bak + } + } + } + } + } + } + } + """ + ) + result = await complete( + document, + { + "hero": { + "nestedObject": { + "deeperObject": { + "foo": "foo", + "bar": "bar", + "baz": "baz", + "bak": "bak", + } + } + } + }, + ) + + assert result == [ + { + "data": {"hero": {"nestedObject": {"deeperObject": {"foo": "foo"}}}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "nestedObject": { + "deeperObject": { + "foo": "foo", + "bar": "bar", + }, + } + }, + "path": ["hero"], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "deeperObject": { + "foo": "foo", + "bar": "bar", + "baz": "baz", + } + }, + "path": ["hero", "nestedObject"], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "foo": "foo", + "bar": "bar", + "baz": "baz", + "bak": "bak", + }, + "path": ["hero", "nestedObject", "deeperObject"], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def does_not_combine_fields_from_deferred_fragments_branches_same_level(): + document = parse( + """ + query { + hero { + nestedObject { + deeperObject { + ... @defer { + foo + } + } + } + ... @defer { + nestedObject { + deeperObject { + ... @defer { + foo + bar + } + } + } + } + } + } + """ + ) + result = await complete( + document, + {"hero": {"nestedObject": {"deeperObject": {"foo": "foo", "bar": "bar"}}}}, + ) + + assert result == [ + { + "data": {"hero": {"nestedObject": {"deeperObject": {}}}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "foo": "foo", + }, + "path": ["hero", "nestedObject", "deeperObject"], + }, + { + "data": {"nestedObject": {"deeperObject": {}}}, + "path": ["hero"], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "foo": "foo", + "bar": "bar", + }, + "path": ["hero", "nestedObject", "deeperObject"], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def does_not_combine_fields_from_deferred_fragments_branches_multi_levels(): + document = parse( + """ + query { + a { + b { + c { + d + } + ... @defer { + e { + f + } + } + } + } + ... @defer { + a { + b { + e { + f + } + } + } + g { + h + } + } + } + """ + ) + result = await complete( + document, + {"a": {"b": {"c": {"d": "d"}, "e": {"f": "f"}}}, "g": {"h": "h"}}, + ) + + assert result == [ + { + "data": {"a": {"b": {"c": {"d": "d"}}}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"e": {"f": "f"}}, + "path": ["a", "b"], + }, + { + "data": {"a": {"b": {"e": {"f": "f"}}}, "g": {"h": "h"}}, + "path": [], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def preserves_error_boundaries_null_first(): + document = parse( + """ + query { + ... @defer { + a { + someField + b { + c { + nonNullErrorField + } + } + } + } + a { + ... @defer { + b { + c { + d + } + } + } + } + } + """ + ) + result = await complete( + document, + {"a": {"b": {"c": {"d": "d"}}, "someField": "someField"}}, + ) + + assert result == [ + { + "data": {"a": {}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"b": {"c": {"d": "d"}}}, + "path": ["a"], + }, + { + "data": {"a": {"b": {"c": None}, "someField": "someField"}}, + "path": [], + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field c.nonNullErrorField.", + "locations": [{"line": 8, "column": 23}], + "path": ["a", "b", "c", "nonNullErrorField"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + async def preserves_error_boundaries_value_first(): + document = parse( + """ + query { + ... @defer { + a { + b { + c { + d + } + } + } + } + a { + ... @defer { + someField + b { + c { + nonNullErrorField + } + } + } + } + } + """ + ) + result = await complete( + document, + { + "a": { + "b": {"c": {"d": "d"}, "nonNullErrorFIeld": None}, + "someField": "someField", + } + }, + ) + + assert result == [ + { + "data": {"a": {}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"b": {"c": None}, "someField": "someField"}, + "path": ["a"], + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field c.nonNullErrorField.", + "locations": [{"line": 17, "column": 23}], + "path": ["a", "b", "c", "nonNullErrorField"], + }, + ], + }, + { + "data": {"a": {"b": {"c": {"d": "d"}}}}, + "path": [], + }, + ], + "hasNext": False, + }, + ] + + async def correctly_handle_a_slow_null(): + document = parse( + """ + query { + ... @defer { + a { + someField + b { + c { + nonNullErrorField + } + } + } + } + a { + ... @defer { + b { + c { + d + } + } + } + } + } + """ + ) + + async def slow_null(_info) -> None: + await sleep(0) + + result = await complete( + document, + { + "a": { + "b": {"c": {"d": "d", "nonNullErrorField": slow_null}}, + "someField": "someField", + } + }, + ) + + assert result == [ + { + "data": {"a": {}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"b": {"c": {"d": "d"}}}, + "path": ["a"], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"a": {"b": {"c": None}, "someField": "someField"}}, + "path": [], + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field c.nonNullErrorField.", + "locations": [{"line": 8, "column": 23}], + "path": ["a", "b", "c", "nonNullErrorField"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + async def cancels_deferred_fields_when_initial_result_exhibits_null_bubbling(): + document = parse( + """ + query { + hero { + nonNullName + } + ... @defer { + hero { + name + } + } + } + """ + ) + result = await complete( + document, + { + "hero": {**hero, "nonNullName": lambda _info: None}, + }, + ) + + assert result == [ + { + "data": {"hero": None}, + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field Hero.nonNullName.", + "locations": [{"line": 4, "column": 17}], + "path": ["hero", "nonNullName"], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"hero": {"name": "Luke"}}, + "path": [], + }, + ], + "hasNext": False, + }, + ] + + async def cancels_deferred_fields_when_deferred_result_exhibits_null_bubbling(): + document = parse( + """ + query { + ... @defer { + hero { + nonNullName + name + } + } + } + """ + ) + result = await complete( + document, + { + "hero": {**hero, "nonNullName": lambda _info: None}, + }, + ) + + assert result == [ + { + "data": {}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"hero": None}, + "path": [], + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field Hero.nonNullName.", + "locations": [{"line": 5, "column": 19}], + "path": ["hero", "nonNullName"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_list_fields(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + """ + ) + + result = await complete(document) + + assert result == [ + { + "data": { + "hero": { + "friends": [ + {"name": "Han"}, + {"name": "Leia"}, + {"name": "C-3PO"}, + ] + } + }, + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "friends": [ + {"name": "Han"}, + {"name": "Leia"}, + {"name": "C-3PO"}, + ] + }, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_async_iterable_list_fields(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + """ + ) + + async def resolve_friends(_info): + await sleep(0) + yield friends[0] + + result = await complete( + document, + { + "hero": {**hero, "friends": resolve_friends}, + }, + ) + + assert result == [ + { + "data": {"hero": {"friends": [{"name": "Han"}]}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"friends": [{"name": "Han"}]}, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_empty_async_iterable_list_fields(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + """ + ) + + async def resolve_friends(_info): + await sleep(0) + for friend in []: # type: ignore + yield friend # pragma: no cover + + result = await complete( + document, + { + "hero": {**hero, "friends": resolve_friends}, + }, + ) + + assert result == [ + { + "data": {"hero": {"friends": []}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"friends": []}, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_list_fields_with_non_overlapping_fields(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + id + } + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": { + "hero": { + "friends": [ + {"name": "Han"}, + {"name": "Leia"}, + {"name": "C-3PO"}, + ] + } + }, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"friends": [{"id": "2"}, {"id": "3"}, {"id": "4"}]}, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_list_fields_that_return_empty_lists(): + document = parse( + """ + query { + hero { + friends { + name + } + ... @defer { + friends { + name + } + } + } + } + """ + ) + result = await complete( + document, {"hero": {**hero, "friends": lambda _info: []}} + ) + + assert result == [ + { + "data": {"hero": {"friends": []}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"friends": []}, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_null_object_fields(): + document = parse( + """ + query { + hero { + nestedObject { + name + } + ... @defer { + nestedObject { + name + } + } + } + } + """ + ) + result = await complete( + document, {"hero": {**hero, "nestedObject": lambda _info: None}} + ) + + assert result == [ + { + "data": {"hero": {"nestedObject": None}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"nestedObject": None}, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + + async def does_not_deduplicate_async_object_fields(): + document = parse( + """ + query { + hero { + nestedObject { + name + } + ... @defer { + nestedObject { + name + } + } + } + } + """ + ) + + async def resolve_nested_object(_info): + return {"name": "foo"} + + result = await complete( + document, {"hero": {"nestedObject": resolve_nested_object}} + ) + + assert result == [ + { + "data": {"hero": {"nestedObject": {"name": "foo"}}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"nestedObject": {"name": "foo"}}, + "path": ["hero"], + } + ], + "hasNext": False, + }, + ] + @pytest.mark.asyncio async def handles_errors_thrown_in_deferred_fragments(): document = parse( diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 42188517..d611f7a9 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -1363,7 +1363,7 @@ async def get_friends(_info): ] @pytest.mark.asyncio - async def handles_async_error_in_complete_value_from_async_iterable_non_null(): + async def handles_async_error_in_complete_value_from_async_generator_non_null(): document = parse( """ query { @@ -1853,6 +1853,179 @@ async def get_friends(_info): }, ] + @pytest.mark.asyncio + async def handles_overlapping_deferred_and_non_deferred_streams(): + document = parse( + """ + query { + nestedObject { + nestedFriendList @stream(initialCount: 0) { + id + } + } + nestedObject { + ... @defer { + nestedFriendList @stream(initialCount: 0) { + id + name + } + } + } + } + """ + ) + + async def get_nested_friend_list(_info): + for i in range(2): + await sleep(0) + yield friends[i] + + result = await complete( + document, + { + "nestedObject": { + "nestedFriendList": get_nested_friend_list, + } + }, + ) + + assert result in ( + # exact order of results depends on timing and Python version + [ + { + "data": {"nestedObject": {"nestedFriendList": []}}, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "1"}], + "path": ["nestedObject", "nestedFriendList", 0], + }, + {"data": {"nestedFriendList": []}, "path": ["nestedObject"]}, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2"}], + "path": ["nestedObject", "nestedFriendList", 1], + }, + { + "items": [{"id": "1", "name": "Luke"}], + "path": ["nestedObject", "nestedFriendList", 0], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2", "name": "Han"}], + "path": ["nestedObject", "nestedFriendList", 1], + }, + ], + "hasNext": True, + }, + { + "hasNext": False, + }, + ], + [ + { + "data": {"nestedObject": {"nestedFriendList": []}}, + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "1"}], + "path": ["nestedObject", "nestedFriendList", 0], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2"}], + "path": ["nestedObject", "nestedFriendList", 1], + }, + {"data": {"nestedFriendList": []}, "path": ["nestedObject"]}, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "1", "name": "Luke"}], + "path": ["nestedObject", "nestedFriendList", 0], + }, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2", "name": "Han"}], + "path": ["nestedObject", "nestedFriendList", 1], + }, + ], + "hasNext": True, + }, + { + "hasNext": False, + }, + ], + [ + {"data": {"nestedObject": {"nestedFriendList": []}}, "hasNext": True}, + { + "incremental": [ + { + "items": [{"id": "1"}], + "path": ["nestedObject", "nestedFriendList", 0], + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2"}], + "path": ["nestedObject", "nestedFriendList", 1], + } + ], + "hasNext": True, + }, + { + "incremental": [ + {"data": {"nestedFriendList": []}, "path": ["nestedObject"]} + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "1", "name": "Luke"}], + "path": ["nestedObject", "nestedFriendList", 0], + } + ], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2", "name": "Han"}], + "path": ["nestedObject", "nestedFriendList", 1], + } + ], + "hasNext": True, + }, + {"hasNext": False}, + ], + ) + @pytest.mark.asyncio async def returns_payloads_properly_when_parent_deferred_slower_than_stream(): resolve_slow_field = Event() From 4e83d4201a2be88832f24c5733c0a1b8568c3708 Mon Sep 17 00:00:00 2001 From: Trim21 Date: Mon, 23 Sep 2024 05:58:04 +0800 Subject: [PATCH 17/50] Export ValidationAbortedError (#227) --- src/graphql/validation/validate.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/graphql/validation/validate.py b/src/graphql/validation/validate.py index 1439f7e4..08c83780 100644 --- a/src/graphql/validation/validate.py +++ b/src/graphql/validation/validate.py @@ -14,7 +14,13 @@ if TYPE_CHECKING: from .rules import ASTValidationRule -__all__ = ["assert_valid_sdl", "assert_valid_sdl_extension", "validate", "validate_sdl"] +__all__ = [ + "assert_valid_sdl", + "assert_valid_sdl_extension", + "validate", + "validate_sdl", + "ValidationAbortedError", +] class ValidationAbortedError(GraphQLError): From a25b40b9de260f59a3d1b8c93722a2e80e32a3c7 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 19 Oct 2024 17:01:52 +0200 Subject: [PATCH 18/50] Support Python 3.13 and update dependencies --- .github/workflows/test.yml | 2 +- poetry.lock | 463 ++++++++++++++++------------ pyproject.toml | 5 +- src/graphql/language/parser.py | 5 +- src/graphql/language/visitor.py | 2 +- src/graphql/pyutils/async_reduce.py | 6 +- tests/execution/test_abstract.py | 2 +- tox.ini | 9 +- 8 files changed, 279 insertions(+), 215 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 01668f57..8959d0de 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -8,7 +8,7 @@ jobs: strategy: matrix: - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', 'pypy3.9', 'pypy3.10'] + python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', 'pypy3.9', 'pypy3.10'] steps: - uses: actions/checkout@v4 diff --git a/poetry.lock b/poetry.lock index 1d4f8e60..c402516a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -58,24 +58,24 @@ files = [ [[package]] name = "cachetools" -version = "5.4.0" +version = "5.5.0" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.4.0-py3-none-any.whl", hash = "sha256:3ae3b49a3d5e28a77a0be2b37dbcb89005058959cb2323858c2657c4a8cab474"}, - {file = "cachetools-5.4.0.tar.gz", hash = "sha256:b8adc2e7c07f105ced7bc56dbb6dfbe7c4a00acce20e2227b3f355be89bc6827"}, + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, ] [[package]] name = "certifi" -version = "2024.7.4" +version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, + {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] @@ -91,101 +91,116 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -363,13 +378,13 @@ toml = ["tomli"] [[package]] name = "distlib" -version = "0.3.8" +version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, ] [[package]] @@ -425,31 +440,34 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p [[package]] name = "filelock" -version = "3.15.4" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.15.4-py3-none-any.whl", hash = "sha256:6ca1fffae96225dab4c6eaf1c4f4f28cd2568d3ec2a44e15a08520504de468e7"}, - {file = "filelock-3.15.4.tar.gz", hash = "sha256:2207938cbc1844345cb01a5a95524dae30f0ce089eba5b00378295a17e3e90cb"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.3.2)", "diff-cover (>=8.0.1)", "pytest (>=7.4.3)", "pytest-asyncio (>=0.21)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)", "pytest-timeout (>=2.2)", "virtualenv (>=20.26.2)"] -typing = ["typing-extensions (>=4.8)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "idna" -version = "3.7" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, - {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "imagesize" version = "1.4.1" @@ -483,22 +501,26 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs [[package]] name = "importlib-metadata" -version = "8.2.0" +version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "importlib_metadata-8.2.0-py3-none-any.whl", hash = "sha256:11901fa0c2f97919b288679932bb64febaeacf289d18ac84dd68cb2e74213369"}, - {file = "importlib_metadata-8.2.0.tar.gz", hash = "sha256:72e8d4399996132204f9a16dcc751af254a48f8d1b20b9ff0f98d4a8f901e73d"}, + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-perf (>=0.9.2)", "pytest-ruff (>=0.2.1)"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] name = "iniconfig" @@ -646,38 +668,43 @@ reports = ["lxml"] [[package]] name = "mypy" -version = "1.11.1" +version = "1.12.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.11.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a32fc80b63de4b5b3e65f4be82b4cfa362a46702672aa6a0f443b4689af7008c"}, - {file = "mypy-1.11.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c1952f5ea8a5a959b05ed5f16452fddadbaae48b5d39235ab4c3fc444d5fd411"}, - {file = "mypy-1.11.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e1e30dc3bfa4e157e53c1d17a0dad20f89dc433393e7702b813c10e200843b03"}, - {file = "mypy-1.11.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c63350af88f43a66d3dfeeeb8d77af34a4f07d760b9eb3a8697f0386c7590b4"}, - {file = "mypy-1.11.1-cp310-cp310-win_amd64.whl", hash = "sha256:a831671bad47186603872a3abc19634f3011d7f83b083762c942442d51c58d58"}, - {file = "mypy-1.11.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7b6343d338390bb946d449677726edf60102a1c96079b4f002dedff375953fc5"}, - {file = "mypy-1.11.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e4fe9f4e5e521b458d8feb52547f4bade7ef8c93238dfb5bbc790d9ff2d770ca"}, - {file = "mypy-1.11.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:886c9dbecc87b9516eff294541bf7f3655722bf22bb898ee06985cd7269898de"}, - {file = "mypy-1.11.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fca4a60e1dd9fd0193ae0067eaeeb962f2d79e0d9f0f66223a0682f26ffcc809"}, - {file = "mypy-1.11.1-cp311-cp311-win_amd64.whl", hash = "sha256:0bd53faf56de9643336aeea1c925012837432b5faf1701ccca7fde70166ccf72"}, - {file = "mypy-1.11.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f39918a50f74dc5969807dcfaecafa804fa7f90c9d60506835036cc1bc891dc8"}, - {file = "mypy-1.11.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bc71d1fb27a428139dd78621953effe0d208aed9857cb08d002280b0422003a"}, - {file = "mypy-1.11.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b868d3bcff720dd7217c383474008ddabaf048fad8d78ed948bb4b624870a417"}, - {file = "mypy-1.11.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a707ec1527ffcdd1c784d0924bf5cb15cd7f22683b919668a04d2b9c34549d2e"}, - {file = "mypy-1.11.1-cp312-cp312-win_amd64.whl", hash = "sha256:64f4a90e3ea07f590c5bcf9029035cf0efeae5ba8be511a8caada1a4893f5525"}, - {file = "mypy-1.11.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:749fd3213916f1751fff995fccf20c6195cae941dc968f3aaadf9bb4e430e5a2"}, - {file = "mypy-1.11.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b639dce63a0b19085213ec5fdd8cffd1d81988f47a2dec7100e93564f3e8fb3b"}, - {file = "mypy-1.11.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c956b49c5d865394d62941b109728c5c596a415e9c5b2be663dd26a1ff07bc0"}, - {file = "mypy-1.11.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:45df906e8b6804ef4b666af29a87ad9f5921aad091c79cc38e12198e220beabd"}, - {file = "mypy-1.11.1-cp38-cp38-win_amd64.whl", hash = "sha256:d44be7551689d9d47b7abc27c71257adfdb53f03880841a5db15ddb22dc63edb"}, - {file = "mypy-1.11.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2684d3f693073ab89d76da8e3921883019ea8a3ec20fa5d8ecca6a2db4c54bbe"}, - {file = "mypy-1.11.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:79c07eb282cb457473add5052b63925e5cc97dfab9812ee65a7c7ab5e3cb551c"}, - {file = "mypy-1.11.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11965c2f571ded6239977b14deebd3f4c3abd9a92398712d6da3a772974fad69"}, - {file = "mypy-1.11.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a2b43895a0f8154df6519706d9bca8280cda52d3d9d1514b2d9c3e26792a0b74"}, - {file = "mypy-1.11.1-cp39-cp39-win_amd64.whl", hash = "sha256:1a81cf05975fd61aec5ae16501a091cfb9f605dc3e3c878c0da32f250b74760b"}, - {file = "mypy-1.11.1-py3-none-any.whl", hash = "sha256:0624bdb940255d2dd24e829d99a13cfeb72e4e9031f9492148f410ed30bcab54"}, - {file = "mypy-1.11.1.tar.gz", hash = "sha256:f404a0b069709f18bbdb702eb3dcfe51910602995de00bd39cea3050b5772d08"}, + {file = "mypy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4397081e620dc4dc18e2f124d5e1d2c288194c2c08df6bdb1db31c38cd1fe1ed"}, + {file = "mypy-1.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:684a9c508a283f324804fea3f0effeb7858eb03f85c4402a967d187f64562469"}, + {file = "mypy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cabe4cda2fa5eca7ac94854c6c37039324baaa428ecbf4de4567279e9810f9e"}, + {file = "mypy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:060a07b10e999ac9e7fa249ce2bdcfa9183ca2b70756f3bce9df7a92f78a3c0a"}, + {file = "mypy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:0eff042d7257f39ba4ca06641d110ca7d2ad98c9c1fb52200fe6b1c865d360ff"}, + {file = "mypy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b86de37a0da945f6d48cf110d5206c5ed514b1ca2614d7ad652d4bf099c7de7"}, + {file = "mypy-1.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20c7c5ce0c1be0b0aea628374e6cf68b420bcc772d85c3c974f675b88e3e6e57"}, + {file = "mypy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a64ee25f05fc2d3d8474985c58042b6759100a475f8237da1f4faf7fcd7e6309"}, + {file = "mypy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:faca7ab947c9f457a08dcb8d9a8664fd438080e002b0fa3e41b0535335edcf7f"}, + {file = "mypy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:5bc81701d52cc8767005fdd2a08c19980de9ec61a25dbd2a937dfb1338a826f9"}, + {file = "mypy-1.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8462655b6694feb1c99e433ea905d46c478041a8b8f0c33f1dab00ae881b2164"}, + {file = "mypy-1.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:923ea66d282d8af9e0f9c21ffc6653643abb95b658c3a8a32dca1eff09c06475"}, + {file = "mypy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ebf9e796521f99d61864ed89d1fb2926d9ab6a5fab421e457cd9c7e4dd65aa9"}, + {file = "mypy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e478601cc3e3fa9d6734d255a59c7a2e5c2934da4378f3dd1e3411ea8a248642"}, + {file = "mypy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:c72861b7139a4f738344faa0e150834467521a3fba42dc98264e5aa9507dd601"}, + {file = "mypy-1.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52b9e1492e47e1790360a43755fa04101a7ac72287b1a53ce817f35899ba0521"}, + {file = "mypy-1.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48d3e37dd7d9403e38fa86c46191de72705166d40b8c9f91a3de77350daa0893"}, + {file = "mypy-1.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f106db5ccb60681b622ac768455743ee0e6a857724d648c9629a9bd2ac3f721"}, + {file = "mypy-1.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:233e11b3f73ee1f10efada2e6da0f555b2f3a5316e9d8a4a1224acc10e7181d3"}, + {file = "mypy-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:4ae8959c21abcf9d73aa6c74a313c45c0b5a188752bf37dace564e29f06e9c1b"}, + {file = "mypy-1.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eafc1b7319b40ddabdc3db8d7d48e76cfc65bbeeafaa525a4e0fa6b76175467f"}, + {file = "mypy-1.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9b9ce1ad8daeb049c0b55fdb753d7414260bad8952645367e70ac91aec90e07e"}, + {file = "mypy-1.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfe012b50e1491d439172c43ccb50db66d23fab714d500b57ed52526a1020bb7"}, + {file = "mypy-1.12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c40658d4fa1ab27cb53d9e2f1066345596af2f8fe4827defc398a09c7c9519b"}, + {file = "mypy-1.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:dee78a8b9746c30c1e617ccb1307b351ded57f0de0d287ca6276378d770006c0"}, + {file = "mypy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b5df6c8a8224f6b86746bda716bbe4dbe0ce89fd67b1fa4661e11bfe38e8ec8"}, + {file = "mypy-1.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5feee5c74eb9749e91b77f60b30771563327329e29218d95bedbe1257e2fe4b0"}, + {file = "mypy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:77278e8c6ffe2abfba6db4125de55f1024de9a323be13d20e4f73b8ed3402bd1"}, + {file = "mypy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dcfb754dea911039ac12434d1950d69a2f05acd4d56f7935ed402be09fad145e"}, + {file = "mypy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:06de0498798527451ffb60f68db0d368bd2bae2bbfb5237eae616d4330cc87aa"}, + {file = "mypy-1.12.0-py3-none-any.whl", hash = "sha256:fd313226af375d52e1e36c383f39bf3836e1f192801116b31b090dfcd3ec5266"}, + {file = "mypy-1.12.0.tar.gz", hash = "sha256:65a22d87e757ccd95cbbf6f7e181e6caa87128255eb2b6be901bb71b26d8a99d"}, ] [package.dependencies] @@ -744,19 +771,19 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "platformdirs" -version = "4.2.2" +version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" files = [ - {file = "platformdirs-4.2.2-py3-none-any.whl", hash = "sha256:2d7a1657e36a80ea911db832a8a6ece5ee53d8de21edd5cc5879af6530b1bfee"}, - {file = "platformdirs-4.2.2.tar.gz", hash = "sha256:38b7b51f512eed9e84a22788b4bce1de17c0adb134d6becb09836e37d8654cd3"}, + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, ] [package.extras] -docs = ["furo (>=2023.9.10)", "proselint (>=0.13)", "sphinx (>=7.2.6)", "sphinx-autodoc-typehints (>=1.25.2)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest-cov (>=4.1)", "pytest-mock (>=3.12)"] -type = ["mypy (>=1.8)"] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" @@ -844,13 +871,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pyproject-api" -version = "1.7.1" +version = "1.8.0" description = "API to interact with the python pyproject.toml based projects" optional = false python-versions = ">=3.8" files = [ - {file = "pyproject_api-1.7.1-py3-none-any.whl", hash = "sha256:2dc1654062c2b27733d8fd4cdda672b22fe8741ef1dde8e3a998a9547b071eeb"}, - {file = "pyproject_api-1.7.1.tar.gz", hash = "sha256:7ebc6cd10710f89f4cf2a2731710a98abce37ebff19427116ff2174c9236a827"}, + {file = "pyproject_api-1.8.0-py3-none-any.whl", hash = "sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228"}, + {file = "pyproject_api-1.8.0.tar.gz", hash = "sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496"}, ] [package.dependencies] @@ -858,8 +885,8 @@ packaging = ">=24.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] -docs = ["furo (>=2024.5.6)", "sphinx-autodoc-typehints (>=2.2.1)"] -testing = ["covdefaults (>=2.3)", "pytest (>=8.2.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=70.1)"] +docs = ["furo (>=2024.8.6)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "pytest (>=8.3.3)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=75.1)"] [[package]] name = "pytest" @@ -886,13 +913,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest" -version = "8.3.2" +version = "8.3.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.2-py3-none-any.whl", hash = "sha256:4ba08f9ae7dcf84ded419494d229b48d0903ea6407b030eaec46df5e6a73bba5"}, - {file = "pytest-8.3.2.tar.gz", hash = "sha256:c132345d12ce551242c87269de812483f5bcc87cdbb4722e48487ba194f9fdce"}, + {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, + {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, ] [package.dependencies] @@ -1029,13 +1056,13 @@ pytest = ">=7.0.0" [[package]] name = "pytz" -version = "2024.1" +version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, - {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, + {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, + {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] @@ -1082,29 +1109,29 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "ruff" -version = "0.5.7" +version = "0.7.0" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a"}, - {file = "ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be"}, - {file = "ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb"}, - {file = "ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5"}, - {file = "ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e"}, - {file = "ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a"}, - {file = "ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3"}, - {file = "ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4"}, - {file = "ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5"}, + {file = "ruff-0.7.0-py3-none-linux_armv6l.whl", hash = "sha256:0cdf20c2b6ff98e37df47b2b0bd3a34aaa155f59a11182c1303cce79be715628"}, + {file = "ruff-0.7.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:496494d350c7fdeb36ca4ef1c9f21d80d182423718782222c29b3e72b3512737"}, + {file = "ruff-0.7.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:214b88498684e20b6b2b8852c01d50f0651f3cc6118dfa113b4def9f14faaf06"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630fce3fefe9844e91ea5bbf7ceadab4f9981f42b704fae011bb8efcaf5d84be"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:211d877674e9373d4bb0f1c80f97a0201c61bcd1e9d045b6e9726adc42c156aa"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:194d6c46c98c73949a106425ed40a576f52291c12bc21399eb8f13a0f7073495"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:82c2579b82b9973a110fab281860403b397c08c403de92de19568f32f7178598"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9af971fe85dcd5eaed8f585ddbc6bdbe8c217fb8fcf510ea6bca5bdfff56040e"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b641c7f16939b7d24b7bfc0be4102c56562a18281f84f635604e8a6989948914"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d71672336e46b34e0c90a790afeac8a31954fd42872c1f6adaea1dff76fd44f9"}, + {file = "ruff-0.7.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ab7d98c7eed355166f367597e513a6c82408df4181a937628dbec79abb2a1fe4"}, + {file = "ruff-0.7.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1eb54986f770f49edb14f71d33312d79e00e629a57387382200b1ef12d6a4ef9"}, + {file = "ruff-0.7.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:dc452ba6f2bb9cf8726a84aa877061a2462afe9ae0ea1d411c53d226661c601d"}, + {file = "ruff-0.7.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4b406c2dce5be9bad59f2de26139a86017a517e6bcd2688da515481c05a2cb11"}, + {file = "ruff-0.7.0-py3-none-win32.whl", hash = "sha256:f6c968509f767776f524a8430426539587d5ec5c662f6addb6aa25bc2e8195ec"}, + {file = "ruff-0.7.0-py3-none-win_amd64.whl", hash = "sha256:ff4aabfbaaba880e85d394603b9e75d32b0693152e16fa659a3064a85df7fce2"}, + {file = "ruff-0.7.0-py3-none-win_arm64.whl", hash = "sha256:10842f69c245e78d6adec7e1db0a7d9ddc2fff0621d730e61657b64fa36f207e"}, + {file = "ruff-0.7.0.tar.gz", hash = "sha256:47a86360cf62d9cd53ebfb0b5eb0e882193fc191c6d717e8bef4462bc3b9ea2b"}, ] [[package]] @@ -1362,6 +1389,17 @@ files = [ {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] +[[package]] +name = "tomli" +version = "2.0.2" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +files = [ + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, +] + [[package]] name = "tox" version = "3.28.0" @@ -1390,30 +1428,27 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.17.1" +version = "4.23.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.17.1-py3-none-any.whl", hash = "sha256:2974597c0353577126ab014f52d1a399fb761049e165ff34427f84e8cfe6c990"}, - {file = "tox-4.17.1.tar.gz", hash = "sha256:2c41565a571e34480bd401d668a4899806169a4633e972ac296c54406d2ded8a"}, + {file = "tox-4.23.0-py3-none-any.whl", hash = "sha256:46da40afb660e46238c251280eb910bdaf00b390c7557c8e4bb611f422e9db12"}, + {file = "tox-4.23.0.tar.gz", hash = "sha256:a6bd7d54231d755348d3c3a7b450b5bf6563833716d1299a1619587a1b77a3bf"}, ] [package.dependencies] -cachetools = ">=5.4" +cachetools = ">=5.5" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.15.4" +filelock = ">=3.16.1" packaging = ">=24.1" -platformdirs = ">=4.2.2" +platformdirs = ">=4.3.6" pluggy = ">=1.5" -pyproject-api = ">=1.7.1" +pyproject-api = ">=1.8" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -virtualenv = ">=20.26.3" - -[package.extras] -docs = ["furo (>=2024.7.18)", "sphinx (>=7.4.7)", "sphinx-argparse-cli (>=1.16)", "sphinx-autodoc-typehints (>=2.2.3)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.11)"] -testing = ["build[virtualenv] (>=1.2.1)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=70.3)", "time-machine (>=2.14.2)", "wheel (>=0.43)"] +typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} +virtualenv = ">=20.26.6" [[package]] name = "typed-ast" @@ -1506,13 +1541,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "urllib3" -version = "2.2.2" +version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ - {file = "urllib3-2.2.2-py3-none-any.whl", hash = "sha256:a448b2f64d686155468037e1ace9f2d2199776e17f0a46610480d311f73e3472"}, - {file = "urllib3-2.2.2.tar.gz", hash = "sha256:dd505485549a7a552833da5e6063639d0d177c04f23bc3864e41e5dc5f612168"}, + {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, + {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] @@ -1523,13 +1558,13 @@ zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.26.3" +version = "20.26.6" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.26.3-py3-none-any.whl", hash = "sha256:8cc4a31139e796e9a7de2cd5cf2489de1217193116a8fd42328f1bd65f434589"}, - {file = "virtualenv-20.26.3.tar.gz", hash = "sha256:4c43a2a236279d9ea36a0d76f98d84bd6ca94ac4e0f4a3b9d46d05e10fea542a"}, + {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, + {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, ] [package.dependencies] @@ -1542,6 +1577,26 @@ platformdirs = ">=3.9.1,<5" docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] +[[package]] +name = "virtualenv" +version = "20.27.0" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.8" +files = [ + {file = "virtualenv-20.27.0-py3-none-any.whl", hash = "sha256:44a72c29cceb0ee08f300b314848c86e57bf8d1f13107a5e671fb9274138d655"}, + {file = "virtualenv-20.27.0.tar.gz", hash = "sha256:2ca56a68ed615b8fe4326d11a0dca5dfbe8fd68510fb6c6349163bed3c15f2b2"}, +] + +[package.dependencies] +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" + +[package.extras] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] + [[package]] name = "zipp" version = "3.15.0" @@ -1559,20 +1614,24 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [[package]] name = "zipp" -version = "3.20.0" +version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ - {file = "zipp-3.20.0-py3-none-any.whl", hash = "sha256:58da6168be89f0be59beb194da1250516fdaa062ccebd30127ac65d30045e10d"}, - {file = "zipp-3.20.0.tar.gz", hash = "sha256:0145e43d89664cfe1a2e533adc75adafed82fe2da404b4bbb6b026c0157bdb31"}, + {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, + {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "de9ad44d919a23237212508ca6da20b929c8c6cc8aa0da01406ef2f731debe10" +content-hash = "450b262692d7c4cd0e88d628604e32375eef04d37d6f352cf9a93a34ed189506" diff --git a/pyproject.toml b/pyproject.toml index 28c7707f..668dd7ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ classifiers = [ "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13" ] packages = [ { include = "graphql", from = "src" }, @@ -75,9 +76,9 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.6.4,<0.7" +ruff = ">=0.7,<0.8" mypy = [ - { version = "^1.11", python = ">=3.8" }, + { version = "^1.12", python = ">=3.8" }, { version = "~1.4", python = "<3.8" } ] bump2version = ">=1.0,<2" diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index 78d308d0..95c69ccb 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -471,8 +471,9 @@ def parse_nullability_assertion(self) -> NullabilityAssertionNode | None: def parse_arguments(self, is_const: bool) -> list[ArgumentNode]: """Arguments[Const]: (Argument[?Const]+)""" item = self.parse_const_argument if is_const else self.parse_argument - item = cast(Callable[[], ArgumentNode], item) - return self.optional_many(TokenKind.PAREN_L, item, TokenKind.PAREN_R) + return self.optional_many( + TokenKind.PAREN_L, cast(Callable[[], ArgumentNode], item), TokenKind.PAREN_R + ) def parse_argument(self, is_const: bool = False) -> ArgumentNode: """Argument[Const]: Name : Value[?Const]""" diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index be410466..450996d8 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -289,7 +289,7 @@ def visit( else: stack = Stack(in_array, idx, keys, edits, stack) in_array = isinstance(node, tuple) - keys = node if in_array else visitor_keys.get(node.kind, ()) + keys = node if in_array else visitor_keys.get(node.kind, ()) # type: ignore idx = -1 edits = [] if parent: diff --git a/src/graphql/pyutils/async_reduce.py b/src/graphql/pyutils/async_reduce.py index 33d97f9c..02fbf648 100644 --- a/src/graphql/pyutils/async_reduce.py +++ b/src/graphql/pyutils/async_reduce.py @@ -36,8 +36,10 @@ def async_reduce( async def async_callback( current_accumulator: Awaitable[U], current_value: T ) -> U: - result = callback(await current_accumulator, current_value) - return await cast(Awaitable, result) if is_awaitable(result) else result + result: AwaitableOrValue[U] = callback( + await current_accumulator, current_value + ) + return await result if is_awaitable(result) else result # type: ignore accumulator = async_callback(cast(Awaitable[U], accumulator), value) else: diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index d7d12b7a..75a1e875 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -42,7 +42,7 @@ async def execute_query( assert isinstance(schema, GraphQLSchema) assert isinstance(query, str) document = parse(query) - result = (execute_sync if sync else execute)(schema, document, root_value) # type: ignore + result = (execute_sync if sync else execute)(schema, document, root_value) if not sync and is_awaitable(result): result = await result assert isinstance(result, ExecutionResult) diff --git a/tox.ini b/tox.ini index e5953a48..fcd4f015 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py3{7,8,9,10,11,12}, pypy3{9,10}, ruff, mypy, docs +envlist = py3{7,8,9,10,11,12,13}, pypy3{9,10}, ruff, mypy, docs isolated_build = true [gh-actions] @@ -11,13 +11,14 @@ python = 3.10: py310 3.11: py311 3.12: py312 + 3.13: py313 pypy3: pypy39 pypy3.9: pypy39 pypy3.10: pypy310 [testenv:ruff] basepython = python3.12 -deps = ruff>=0.6.4,<0.7 +deps = ruff>=0.7,<0.8 commands = ruff check src tests ruff format --check src tests @@ -25,7 +26,7 @@ commands = [testenv:mypy] basepython = python3.12 deps = - mypy>=1.11,<2 + mypy>=1.12,<2 pytest>=8.3,<9 commands = mypy src tests @@ -50,5 +51,5 @@ deps = commands = # to also run the time-consuming tests: tox -e py311 -- --run-slow # to run the benchmarks: tox -e py311 -- -k benchmarks --benchmark-enable - py3{7,8,9,10,11}, pypy3{9,10}: pytest tests {posargs} + py3{7,8,9,10,11,13}, pypy3{9,10}: pytest tests {posargs} py312: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From 0d573dab0cf5d2c3bf3e133275d1fe70bdd2731f Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Mon, 21 Oct 2024 20:20:33 +0200 Subject: [PATCH 19/50] feat: add codspeed for continuous benchmarking (#230) --- .github/workflows/benchmarks.yml | 31 +++++ poetry.lock | 199 ++++++++++++++++++++++++++++++- pyproject.toml | 1 + tests/benchmarks/test_visit.py | 2 +- 4 files changed, 231 insertions(+), 2 deletions(-) create mode 100644 .github/workflows/benchmarks.yml diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml new file mode 100644 index 00000000..35867e43 --- /dev/null +++ b/.github/workflows/benchmarks.yml @@ -0,0 +1,31 @@ +name: CodSpeed + +on: + push: + branches: + - "main" + pull_request: + workflow_dispatch: + +jobs: + benchmarks: + name: 📈 Benchmarks + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + id: setup-python + with: + python-version: "3.12" + architecture: x64 + + - run: pipx install poetry + + - run: poetry env use 3.12 + - run: poetry install --with test + + - name: Run benchmarks + uses: CodSpeedHQ/action@v3 + with: + token: ${{ secrets.CODSPEED_TOKEN }} + run: poetry run pytest tests --benchmark-enable --codspeed diff --git a/poetry.lock b/poetry.lock index c402516a..dfb6a622 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -15,6 +16,7 @@ files = [ name = "babel" version = "2.14.0" description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -32,6 +34,7 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "babel" version = "2.16.0" description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -49,6 +52,7 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "bump2version" version = "1.0.1" description = "Version-bump your software with a single command!" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -60,6 +64,7 @@ files = [ name = "cachetools" version = "5.5.0" description = "Extensible memoizing collections and decorators" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -71,6 +76,7 @@ files = [ name = "certifi" version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -78,10 +84,88 @@ files = [ {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] +[[package]] +name = "cffi" +version = "1.15.1" +description = "Foreign Function Interface for Python calling C code." +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, + {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, + {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, + {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, + {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, + {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, + {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, + {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, + {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, + {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, + {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, + {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, + {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, + {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, + {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, + {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, + {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, + {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, + {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, + {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, + {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, + {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, + {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, + {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, + {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, + {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, + {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, + {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, + {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, + {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, + {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, + {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, + {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "chardet" version = "5.2.0" description = "Universal encoding detector for Python 3" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -93,6 +177,7 @@ files = [ name = "charset-normalizer" version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -207,6 +292,7 @@ files = [ name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -218,6 +304,7 @@ files = [ name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -293,6 +380,7 @@ toml = ["tomli"] name = "coverage" version = "7.6.1" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -380,6 +468,7 @@ toml = ["tomli"] name = "distlib" version = "0.3.9" description = "Distribution utilities" +category = "dev" optional = false python-versions = "*" files = [ @@ -391,6 +480,7 @@ files = [ name = "docutils" version = "0.19" description = "Docutils -- Python Documentation Utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -402,6 +492,7 @@ files = [ name = "docutils" version = "0.20.1" description = "Docutils -- Python Documentation Utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -413,6 +504,7 @@ files = [ name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -427,6 +519,7 @@ test = ["pytest (>=6)"] name = "filelock" version = "3.12.2" description = "A platform independent file lock." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -442,6 +535,7 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p name = "filelock" version = "3.16.1" description = "A platform independent file lock." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -458,6 +552,7 @@ typing = ["typing-extensions (>=4.12.2)"] name = "idna" version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -472,6 +567,7 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -483,6 +579,7 @@ files = [ name = "importlib-metadata" version = "6.7.0" description = "Read metadata from Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -503,6 +600,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-metadata" version = "8.5.0" description = "Read metadata from Python packages" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -526,6 +624,7 @@ type = ["pytest-mypy"] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -537,6 +636,7 @@ files = [ name = "jinja2" version = "3.1.4" description = "A very fast and expressive template engine." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -554,6 +654,7 @@ i18n = ["Babel (>=2.7)"] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -623,6 +724,7 @@ files = [ name = "mypy" version = "1.4.1" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -670,6 +772,7 @@ reports = ["lxml"] name = "mypy" version = "1.12.0" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -722,6 +825,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -733,6 +837,7 @@ files = [ name = "packaging" version = "24.0" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -744,6 +849,7 @@ files = [ name = "packaging" version = "24.1" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -755,6 +861,7 @@ files = [ name = "platformdirs" version = "4.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -773,6 +880,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "platformdirs" version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -789,6 +897,7 @@ type = ["mypy (>=1.11.2)"] name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -807,6 +916,7 @@ testing = ["pytest", "pytest-benchmark"] name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -822,6 +932,7 @@ testing = ["pytest", "pytest-benchmark"] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -833,6 +944,7 @@ files = [ name = "py-cpuinfo" version = "9.0.0" description = "Get CPU info with pure Python" +category = "dev" optional = false python-versions = "*" files = [ @@ -840,10 +952,23 @@ files = [ {file = "py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5"}, ] +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] + [[package]] name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -859,6 +984,7 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pygments" version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -873,6 +999,7 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyproject-api" version = "1.8.0" description = "API to interact with the python pyproject.toml based projects" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -892,6 +1019,7 @@ testing = ["covdefaults (>=2.3)", "pytest (>=8.3.3)", "pytest-cov (>=5)", "pytes name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -915,6 +1043,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest" version = "8.3.3" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -937,6 +1066,7 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments name = "pytest-asyncio" version = "0.21.2" description = "Pytest support for asyncio" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -956,6 +1086,7 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy name = "pytest-asyncio" version = "0.23.8" description = "Pytest support for asyncio" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -974,6 +1105,7 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] name = "pytest-benchmark" version = "4.0.0" description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -990,10 +1122,34 @@ aspect = ["aspectlib"] elasticsearch = ["elasticsearch"] histogram = ["pygal", "pygaljs"] +[[package]] +name = "pytest-codspeed" +version = "2.2.1" +description = "Pytest plugin to create CodSpeed benchmarks" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest_codspeed-2.2.1-py3-none-any.whl", hash = "sha256:aad08033015f3e6c8c14c8bf0eca475921a9b088e92c98b626bf8af8f516471e"}, + {file = "pytest_codspeed-2.2.1.tar.gz", hash = "sha256:0adc24baf01c64a6ca0a0b83b3cd704351708997e09ec086b7776c32227d4e0a"}, +] + +[package.dependencies] +cffi = ">=1.15.1" +filelock = ">=3.12.2" +pytest = ">=3.8" +setuptools = {version = "*", markers = "python_full_version >= \"3.12.0\""} + +[package.extras] +compat = ["pytest-benchmark (>=4.0.0,<4.1.0)", "pytest-xdist (>=2.0.0,<2.1.0)"] +lint = ["mypy (>=1.3.0,<1.4.0)", "ruff (>=0.3.3,<0.4.0)"] +test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] + [[package]] name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1012,6 +1168,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-cov" version = "5.0.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1030,6 +1187,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] name = "pytest-describe" version = "2.2.0" description = "Describe-style plugin for pytest" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1044,6 +1202,7 @@ pytest = ">=4.6,<9" name = "pytest-timeout" version = "2.3.1" description = "pytest plugin to abort hanging tests" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1058,6 +1217,7 @@ pytest = ">=7.0.0" name = "pytz" version = "2024.2" description = "World timezone definitions, modern and historical" +category = "dev" optional = false python-versions = "*" files = [ @@ -1069,6 +1229,7 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1090,6 +1251,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1111,6 +1273,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "ruff" version = "0.7.0" description = "An extremely fast Python linter and code formatter, written in Rust." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1134,10 +1297,20 @@ files = [ {file = "ruff-0.7.0.tar.gz", hash = "sha256:47a86360cf62d9cd53ebfb0b5eb0e882193fc191c6d717e8bef4462bc3b9ea2b"}, ] +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] +core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.11.0,<1.12.0)", "pytest-mypy"] + [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1149,6 +1322,7 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" optional = false python-versions = "*" files = [ @@ -1160,6 +1334,7 @@ files = [ name = "sphinx" version = "5.3.0" description = "Python documentation generator" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1195,6 +1370,7 @@ test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] name = "sphinx" version = "7.1.2" description = "Python documentation generator" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1230,6 +1406,7 @@ test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] name = "sphinx-rtd-theme" version = "2.0.0" description = "Read the Docs theme for Sphinx" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1249,6 +1426,7 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] name = "sphinxcontrib-applehelp" version = "1.0.2" description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1264,6 +1442,7 @@ test = ["pytest"] name = "sphinxcontrib-applehelp" version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1279,6 +1458,7 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1294,6 +1474,7 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1309,6 +1490,7 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1324,6 +1506,7 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jquery" version = "4.1" description = "Extension to include jQuery on newer Sphinx releases" +category = "dev" optional = false python-versions = ">=2.7" files = [ @@ -1338,6 +1521,7 @@ Sphinx = ">=1.8" name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1352,6 +1536,7 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1367,6 +1552,7 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1382,6 +1568,7 @@ test = ["pytest"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1404,6 +1591,7 @@ files = [ name = "tox" version = "3.28.0" description = "tox is a generic virtualenv management and test command line tool" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -1430,6 +1618,7 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu name = "tox" version = "4.23.0" description = "tox is a generic virtualenv management and test command line tool" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1454,6 +1643,7 @@ virtualenv = ">=20.26.6" name = "typed-ast" version = "1.5.5" description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1504,6 +1694,7 @@ files = [ name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1515,6 +1706,7 @@ files = [ name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1526,6 +1718,7 @@ files = [ name = "urllib3" version = "2.0.7" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1543,6 +1736,7 @@ zstd = ["zstandard (>=0.18.0)"] name = "urllib3" version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1560,6 +1754,7 @@ zstd = ["zstandard (>=0.18.0)"] name = "virtualenv" version = "20.26.6" description = "Virtual Python Environment builder" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1601,6 +1796,7 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1616,6 +1812,7 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more name = "zipp" version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" optional = false python-versions = ">=3.8" files = [ diff --git a/pyproject.toml b/pyproject.toml index 668dd7ff..70f39c61 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,6 +71,7 @@ tox = [ { version = "^4.16", python = ">=3.8" }, { version = "^3.28", python = "<3.8" } ] +pytest-codspeed = "^2.2.1" [tool.poetry.group.lint] optional = true diff --git a/tests/benchmarks/test_visit.py b/tests/benchmarks/test_visit.py index 53bfc98e..583075bf 100644 --- a/tests/benchmarks/test_visit.py +++ b/tests/benchmarks/test_visit.py @@ -23,5 +23,5 @@ def test_visit_all_ast_nodes(benchmark, big_schema_sdl): # noqa: F811 def test_visit_all_ast_nodes_in_parallel(benchmark, big_schema_sdl): # noqa: F811 document_ast = parse(big_schema_sdl) visitor = DummyVisitor() - parallel_visitor = ParallelVisitor([visitor] * 50) + parallel_visitor = ParallelVisitor([visitor] * 20) benchmark(lambda: visit(document_ast, parallel_visitor)) From 46244446c66ca3033f5295f6c796108e4c1f7365 Mon Sep 17 00:00:00 2001 From: Erik Wrede Date: Mon, 21 Oct 2024 21:10:06 +0200 Subject: [PATCH 20/50] fix: recreate poetry lock (#231) --- poetry.lock | 125 +++++++++++++--------------------------------------- 1 file changed, 30 insertions(+), 95 deletions(-) diff --git a/poetry.lock b/poetry.lock index dfb6a622..2d534289 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,10 +1,9 @@ -# This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -16,7 +15,6 @@ files = [ name = "babel" version = "2.14.0" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -34,7 +32,6 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "babel" version = "2.16.0" description = "Internationalization utilities" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -52,7 +49,6 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "bump2version" version = "1.0.1" description = "Version-bump your software with a single command!" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -64,7 +60,6 @@ files = [ name = "cachetools" version = "5.5.0" description = "Extensible memoizing collections and decorators" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -76,7 +71,6 @@ files = [ name = "certifi" version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -88,7 +82,6 @@ files = [ name = "cffi" version = "1.15.1" description = "Foreign Function Interface for Python calling C code." -category = "dev" optional = false python-versions = "*" files = [ @@ -165,7 +158,6 @@ pycparser = "*" name = "chardet" version = "5.2.0" description = "Universal encoding detector for Python 3" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -177,7 +169,6 @@ files = [ name = "charset-normalizer" version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -292,7 +283,6 @@ files = [ name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -304,7 +294,6 @@ files = [ name = "coverage" version = "7.2.7" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -380,7 +369,6 @@ toml = ["tomli"] name = "coverage" version = "7.6.1" description = "Code coverage measurement for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -468,7 +456,6 @@ toml = ["tomli"] name = "distlib" version = "0.3.9" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" files = [ @@ -480,7 +467,6 @@ files = [ name = "docutils" version = "0.19" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -492,7 +478,6 @@ files = [ name = "docutils" version = "0.20.1" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -504,7 +489,6 @@ files = [ name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -519,7 +503,6 @@ test = ["pytest (>=6)"] name = "filelock" version = "3.12.2" description = "A platform independent file lock." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -533,26 +516,24 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p [[package]] name = "filelock" -version = "3.16.1" +version = "3.16.0" description = "A platform independent file lock." -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, - {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, + {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, + {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "idna" version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -567,7 +548,6 @@ all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2 name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -579,7 +559,6 @@ files = [ name = "importlib-metadata" version = "6.7.0" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -600,7 +579,6 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs name = "importlib-metadata" version = "8.5.0" description = "Read metadata from Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -624,7 +602,6 @@ type = ["pytest-mypy"] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -636,7 +613,6 @@ files = [ name = "jinja2" version = "3.1.4" description = "A very fast and expressive template engine." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -654,7 +630,6 @@ i18n = ["Babel (>=2.7)"] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -724,7 +699,6 @@ files = [ name = "mypy" version = "1.4.1" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -772,7 +746,6 @@ reports = ["lxml"] name = "mypy" version = "1.12.0" description = "Optional static typing for Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -825,7 +798,6 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -837,7 +809,6 @@ files = [ name = "packaging" version = "24.0" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -849,7 +820,6 @@ files = [ name = "packaging" version = "24.1" description = "Core utilities for Python packages" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -861,7 +831,6 @@ files = [ name = "platformdirs" version = "4.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -880,7 +849,6 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co name = "platformdirs" version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -897,7 +865,6 @@ type = ["mypy (>=1.11.2)"] name = "pluggy" version = "1.2.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -916,7 +883,6 @@ testing = ["pytest", "pytest-benchmark"] name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -932,7 +898,6 @@ testing = ["pytest", "pytest-benchmark"] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ @@ -944,7 +909,6 @@ files = [ name = "py-cpuinfo" version = "9.0.0" description = "Get CPU info with pure Python" -category = "dev" optional = false python-versions = "*" files = [ @@ -956,7 +920,6 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -968,7 +931,6 @@ files = [ name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -984,7 +946,6 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pygments" version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -999,7 +960,6 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pyproject-api" version = "1.8.0" description = "API to interact with the python pyproject.toml based projects" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1019,7 +979,6 @@ testing = ["covdefaults (>=2.3)", "pytest (>=8.3.3)", "pytest-cov (>=5)", "pytes name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1043,7 +1002,6 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest" version = "8.3.3" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1066,7 +1024,6 @@ dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments name = "pytest-asyncio" version = "0.21.2" description = "Pytest support for asyncio" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1086,7 +1043,6 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy name = "pytest-asyncio" version = "0.23.8" description = "Pytest support for asyncio" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1105,7 +1061,6 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] name = "pytest-benchmark" version = "4.0.0" description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1126,7 +1081,6 @@ histogram = ["pygal", "pygaljs"] name = "pytest-codspeed" version = "2.2.1" description = "Pytest plugin to create CodSpeed benchmarks" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1149,7 +1103,6 @@ test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1168,7 +1121,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "pytest-cov" version = "5.0.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1187,7 +1139,6 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] name = "pytest-describe" version = "2.2.0" description = "Describe-style plugin for pytest" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1202,7 +1153,6 @@ pytest = ">=4.6,<9" name = "pytest-timeout" version = "2.3.1" description = "pytest plugin to abort hanging tests" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1217,7 +1167,6 @@ pytest = ">=7.0.0" name = "pytz" version = "2024.2" description = "World timezone definitions, modern and historical" -category = "dev" optional = false python-versions = "*" files = [ @@ -1229,7 +1178,6 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1251,7 +1199,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1273,7 +1220,6 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "ruff" version = "0.7.0" description = "An extremely fast Python linter and code formatter, written in Rust." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1297,6 +1243,17 @@ files = [ {file = "ruff-0.7.0.tar.gz", hash = "sha256:47a86360cf62d9cd53ebfb0b5eb0e882193fc191c6d717e8bef4462bc3b9ea2b"}, ] +[[package]] +name = "setuptools" +version = "75.2.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, + {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, +] + [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] @@ -1304,13 +1261,12 @@ cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] enabler = ["pytest-enabler (>=2.2)"] test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.11.0,<1.12.0)", "pytest-mypy"] +type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -1322,7 +1278,6 @@ files = [ name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" files = [ @@ -1334,7 +1289,6 @@ files = [ name = "sphinx" version = "5.3.0" description = "Python documentation generator" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1370,7 +1324,6 @@ test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] name = "sphinx" version = "7.1.2" description = "Python documentation generator" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1406,7 +1359,6 @@ test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] name = "sphinx-rtd-theme" version = "2.0.0" description = "Read the Docs theme for Sphinx" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1426,7 +1378,6 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] name = "sphinxcontrib-applehelp" version = "1.0.2" description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1442,7 +1393,6 @@ test = ["pytest"] name = "sphinxcontrib-applehelp" version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1458,7 +1408,6 @@ test = ["pytest"] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1474,7 +1423,6 @@ test = ["pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1490,7 +1438,6 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-htmlhelp" version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1506,7 +1453,6 @@ test = ["html5lib", "pytest"] name = "sphinxcontrib-jquery" version = "4.1" description = "Extension to include jQuery on newer Sphinx releases" -category = "dev" optional = false python-versions = ">=2.7" files = [ @@ -1521,7 +1467,6 @@ Sphinx = ">=1.8" name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1536,7 +1481,6 @@ test = ["flake8", "mypy", "pytest"] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1552,7 +1496,6 @@ test = ["pytest"] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1568,7 +1511,6 @@ test = ["pytest"] name = "tomli" version = "2.0.1" description = "A lil' TOML parser" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1591,7 +1533,6 @@ files = [ name = "tox" version = "3.28.0" description = "tox is a generic virtualenv management and test command line tool" -category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -1616,34 +1557,35 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.23.0" +version = "4.20.0" description = "tox is a generic virtualenv management and test command line tool" -category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.23.0-py3-none-any.whl", hash = "sha256:46da40afb660e46238c251280eb910bdaf00b390c7557c8e4bb611f422e9db12"}, - {file = "tox-4.23.0.tar.gz", hash = "sha256:a6bd7d54231d755348d3c3a7b450b5bf6563833716d1299a1619587a1b77a3bf"}, + {file = "tox-4.20.0-py3-none-any.whl", hash = "sha256:21a8005e3d3fe5658a8e36b8ca3ed13a4230429063c5cc2a2fdac6ee5aa0de34"}, + {file = "tox-4.20.0.tar.gz", hash = "sha256:5b78a49b6eaaeab3ae4186415e7c97d524f762ae967c63562687c3e5f0ec23d5"}, ] [package.dependencies] cachetools = ">=5.5" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.16.1" +filelock = ">=3.15.4" packaging = ">=24.1" -platformdirs = ">=4.3.6" +platformdirs = ">=4.2.2" pluggy = ">=1.5" -pyproject-api = ">=1.8" +pyproject-api = ">=1.7.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} -virtualenv = ">=20.26.6" +virtualenv = ">=20.26.3" + +[package.extras] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-argparse-cli (>=1.17)", "sphinx-autodoc-typehints (>=2.4)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=24.8)"] +testing = ["build[virtualenv] (>=1.2.2)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=74.1.2)", "time-machine (>=2.15)", "wheel (>=0.44)"] [[package]] name = "typed-ast" version = "1.5.5" description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1694,7 +1636,6 @@ files = [ name = "typing-extensions" version = "4.7.1" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1706,7 +1647,6 @@ files = [ name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" -category = "main" optional = false python-versions = ">=3.8" files = [ @@ -1718,7 +1658,6 @@ files = [ name = "urllib3" version = "2.0.7" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1736,7 +1675,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "urllib3" version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1754,7 +1692,6 @@ zstd = ["zstandard (>=0.18.0)"] name = "virtualenv" version = "20.26.6" description = "Virtual Python Environment builder" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1796,7 +1733,6 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "zipp" version = "3.15.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1812,7 +1748,6 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more name = "zipp" version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1831,4 +1766,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "450b262692d7c4cd0e88d628604e32375eef04d37d6f352cf9a93a34ed189506" +content-hash = "5d8998e59f0991b7dea9d5302fadc9c06be82722d75d9f00271efa1cd81555dd" From fc4ce56515a196a67c4d26a32107c1a9d3bca342 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 17 Dec 2024 20:36:14 +0100 Subject: [PATCH 21/50] Unify action titles --- .github/workflows/benchmarks.yml | 18 +++++++++++------- .github/workflows/lint.yml | 1 + .github/workflows/publish.yml | 1 + .github/workflows/test.yml | 1 + tests/benchmarks/test_visit.py | 2 +- 5 files changed, 15 insertions(+), 8 deletions(-) diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmarks.yml index 35867e43..fce1037f 100644 --- a/.github/workflows/benchmarks.yml +++ b/.github/workflows/benchmarks.yml @@ -1,4 +1,4 @@ -name: CodSpeed +name: Performance on: push: @@ -11,20 +11,24 @@ jobs: benchmarks: name: 📈 Benchmarks runs-on: ubuntu-latest + steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v5 + + - name: Set up Python 3.12 + uses: actions/setup-python@v5 id: setup-python with: python-version: "3.12" architecture: x64 - - run: pipx install poetry - - - run: poetry env use 3.12 - - run: poetry install --with test + - name: Install with poetry + run: | + pipx install poetry + poetry env use 3.12 + poetry install --with test - - name: Run benchmarks + - name: Run benchmarks with CodSpeed uses: CodSpeedHQ/action@v3 with: token: ${{ secrets.CODSPEED_TOKEN }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 74f14604..703a56aa 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -4,6 +4,7 @@ on: [push, pull_request] jobs: lint: + name: 🧹 Lint runs-on: ubuntu-latest steps: diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 561b3028..8bd8c296 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -7,6 +7,7 @@ on: jobs: build: + name: 🏗️ Build runs-on: ubuntu-latest steps: diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 8959d0de..298d3dd0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -4,6 +4,7 @@ on: [push, pull_request] jobs: tests: + name: 🧪 Tests runs-on: ubuntu-latest strategy: diff --git a/tests/benchmarks/test_visit.py b/tests/benchmarks/test_visit.py index 583075bf..4e7a85a2 100644 --- a/tests/benchmarks/test_visit.py +++ b/tests/benchmarks/test_visit.py @@ -23,5 +23,5 @@ def test_visit_all_ast_nodes(benchmark, big_schema_sdl): # noqa: F811 def test_visit_all_ast_nodes_in_parallel(benchmark, big_schema_sdl): # noqa: F811 document_ast = parse(big_schema_sdl) visitor = DummyVisitor() - parallel_visitor = ParallelVisitor([visitor] * 20) + parallel_visitor = ParallelVisitor([visitor] * 25) benchmark(lambda: visit(document_ast, parallel_visitor)) From e4a15d6abfe38344f3dc46f49ea78a079c2e93de Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 17 Dec 2024 21:43:56 +0100 Subject: [PATCH 22/50] Fix dependencies --- .../{benchmarks.yml => benchmark.yml} | 0 poetry.lock | 420 +++++++++++++----- pyproject.toml | 7 +- tests/execution/test_oneof.py | 2 +- tox.ini | 2 +- 5 files changed, 319 insertions(+), 112 deletions(-) rename .github/workflows/{benchmarks.yml => benchmark.yml} (100%) diff --git a/.github/workflows/benchmarks.yml b/.github/workflows/benchmark.yml similarity index 100% rename from .github/workflows/benchmarks.yml rename to .github/workflows/benchmark.yml diff --git a/poetry.lock b/poetry.lock index 2d534289..abd0077f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.5 and should not be changed by hand. [[package]] name = "alabaster" @@ -69,13 +69,13 @@ files = [ [[package]] name = "certifi" -version = "2024.8.30" +version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, ] [[package]] @@ -154,6 +154,85 @@ files = [ [package.dependencies] pycparser = "*" +[[package]] +name = "cffi" +version = "1.17.1" +description = "Foreign Function Interface for Python calling C code." +optional = false +python-versions = ">=3.8" +files = [ + {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, + {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, + {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, + {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, + {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, + {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, + {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, + {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, + {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, + {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, + {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, + {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, + {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, + {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, + {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, + {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, + {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, + {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, + {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, + {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, + {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, + {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, + {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, + {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, + {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, + {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, + {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, + {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, + {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, + {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, + {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, +] + +[package.dependencies] +pycparser = "*" + [[package]] name = "chardet" version = "5.2.0" @@ -516,18 +595,18 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "p [[package]] name = "filelock" -version = "3.16.0" +version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ - {file = "filelock-3.16.0-py3-none-any.whl", hash = "sha256:f6ed4c963184f4c84dd5557ce8fece759a3724b37b80c6c4f20a2f63a4dc6609"}, - {file = "filelock-3.16.0.tar.gz", hash = "sha256:81de9eb8453c769b63369f87f11131a7ab04e367f8d97ad39dc230daa07e3bec"}, + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.1.1)", "pytest (>=8.3.2)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.3)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] @@ -626,6 +705,30 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +description = "Python port of markdown-it. Markdown parsing, done right!" +optional = false +python-versions = ">=3.8" +files = [ + {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, + {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, +] + +[package.dependencies] +mdurl = ">=0.1,<1.0" + +[package.extras] +benchmarking = ["psutil", "pytest", "pytest-benchmark"] +code-style = ["pre-commit (>=3.0,<4.0)"] +compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] +linkify = ["linkify-it-py (>=1,<3)"] +plugins = ["mdit-py-plugins"] +profiling = ["gprof2dot"] +rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] +testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] + [[package]] name = "markupsafe" version = "2.1.5" @@ -695,6 +798,17 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "mdurl" +version = "0.1.2" +description = "Markdown URL utilities" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, + {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, +] + [[package]] name = "mypy" version = "1.4.1" @@ -744,43 +858,43 @@ reports = ["lxml"] [[package]] name = "mypy" -version = "1.12.0" +version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4397081e620dc4dc18e2f124d5e1d2c288194c2c08df6bdb1db31c38cd1fe1ed"}, - {file = "mypy-1.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:684a9c508a283f324804fea3f0effeb7858eb03f85c4402a967d187f64562469"}, - {file = "mypy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cabe4cda2fa5eca7ac94854c6c37039324baaa428ecbf4de4567279e9810f9e"}, - {file = "mypy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:060a07b10e999ac9e7fa249ce2bdcfa9183ca2b70756f3bce9df7a92f78a3c0a"}, - {file = "mypy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:0eff042d7257f39ba4ca06641d110ca7d2ad98c9c1fb52200fe6b1c865d360ff"}, - {file = "mypy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b86de37a0da945f6d48cf110d5206c5ed514b1ca2614d7ad652d4bf099c7de7"}, - {file = "mypy-1.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20c7c5ce0c1be0b0aea628374e6cf68b420bcc772d85c3c974f675b88e3e6e57"}, - {file = "mypy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a64ee25f05fc2d3d8474985c58042b6759100a475f8237da1f4faf7fcd7e6309"}, - {file = "mypy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:faca7ab947c9f457a08dcb8d9a8664fd438080e002b0fa3e41b0535335edcf7f"}, - {file = "mypy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:5bc81701d52cc8767005fdd2a08c19980de9ec61a25dbd2a937dfb1338a826f9"}, - {file = "mypy-1.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8462655b6694feb1c99e433ea905d46c478041a8b8f0c33f1dab00ae881b2164"}, - {file = "mypy-1.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:923ea66d282d8af9e0f9c21ffc6653643abb95b658c3a8a32dca1eff09c06475"}, - {file = "mypy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ebf9e796521f99d61864ed89d1fb2926d9ab6a5fab421e457cd9c7e4dd65aa9"}, - {file = "mypy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e478601cc3e3fa9d6734d255a59c7a2e5c2934da4378f3dd1e3411ea8a248642"}, - {file = "mypy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:c72861b7139a4f738344faa0e150834467521a3fba42dc98264e5aa9507dd601"}, - {file = "mypy-1.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52b9e1492e47e1790360a43755fa04101a7ac72287b1a53ce817f35899ba0521"}, - {file = "mypy-1.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48d3e37dd7d9403e38fa86c46191de72705166d40b8c9f91a3de77350daa0893"}, - {file = "mypy-1.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f106db5ccb60681b622ac768455743ee0e6a857724d648c9629a9bd2ac3f721"}, - {file = "mypy-1.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:233e11b3f73ee1f10efada2e6da0f555b2f3a5316e9d8a4a1224acc10e7181d3"}, - {file = "mypy-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:4ae8959c21abcf9d73aa6c74a313c45c0b5a188752bf37dace564e29f06e9c1b"}, - {file = "mypy-1.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eafc1b7319b40ddabdc3db8d7d48e76cfc65bbeeafaa525a4e0fa6b76175467f"}, - {file = "mypy-1.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9b9ce1ad8daeb049c0b55fdb753d7414260bad8952645367e70ac91aec90e07e"}, - {file = "mypy-1.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfe012b50e1491d439172c43ccb50db66d23fab714d500b57ed52526a1020bb7"}, - {file = "mypy-1.12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c40658d4fa1ab27cb53d9e2f1066345596af2f8fe4827defc398a09c7c9519b"}, - {file = "mypy-1.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:dee78a8b9746c30c1e617ccb1307b351ded57f0de0d287ca6276378d770006c0"}, - {file = "mypy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b5df6c8a8224f6b86746bda716bbe4dbe0ce89fd67b1fa4661e11bfe38e8ec8"}, - {file = "mypy-1.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5feee5c74eb9749e91b77f60b30771563327329e29218d95bedbe1257e2fe4b0"}, - {file = "mypy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:77278e8c6ffe2abfba6db4125de55f1024de9a323be13d20e4f73b8ed3402bd1"}, - {file = "mypy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dcfb754dea911039ac12434d1950d69a2f05acd4d56f7935ed402be09fad145e"}, - {file = "mypy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:06de0498798527451ffb60f68db0d368bd2bae2bbfb5237eae616d4330cc87aa"}, - {file = "mypy-1.12.0-py3-none-any.whl", hash = "sha256:fd313226af375d52e1e36c383f39bf3836e1f192801116b31b090dfcd3ec5266"}, - {file = "mypy-1.12.0.tar.gz", hash = "sha256:65a22d87e757ccd95cbbf6f7e181e6caa87128255eb2b6be901bb71b26d8a99d"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] @@ -790,6 +904,7 @@ typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] @@ -818,13 +933,13 @@ files = [ [[package]] name = "packaging" -version = "24.1" +version = "24.2" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, - {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] @@ -927,6 +1042,17 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +[[package]] +name = "pycparser" +version = "2.22" +description = "C parser in Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, + {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, +] + [[package]] name = "pygments" version = "2.17.2" @@ -1000,13 +1126,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest" -version = "8.3.3" +version = "8.3.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.3-py3-none-any.whl", hash = "sha256:a6853c7375b2663155079443d2e45de913a911a11d669df02a50814944db57b2"}, - {file = "pytest-8.3.3.tar.gz", hash = "sha256:70b98107bd648308a7952b06e6ca9a50bc660be218d53c257cc1fc94fda10181"}, + {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, + {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, ] [package.dependencies] @@ -1092,13 +1218,43 @@ files = [ cffi = ">=1.15.1" filelock = ">=3.12.2" pytest = ">=3.8" -setuptools = {version = "*", markers = "python_full_version >= \"3.12.0\""} [package.extras] compat = ["pytest-benchmark (>=4.0.0,<4.1.0)", "pytest-xdist (>=2.0.0,<2.1.0)"] lint = ["mypy (>=1.3.0,<1.4.0)", "ruff (>=0.3.3,<0.4.0)"] test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] +[[package]] +name = "pytest-codspeed" +version = "3.1.0" +description = "Pytest plugin to create CodSpeed benchmarks" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest_codspeed-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cb7c16e5a64cb30bad30f5204c7690f3cbc9ae5b9839ce187ef1727aa5d2d9c"}, + {file = "pytest_codspeed-3.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d23910893c22ceef6efbdf85d80e803b7fb4a231c9e7676ab08f5ddfc228438"}, + {file = "pytest_codspeed-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb1495a633a33e15268a1f97d91a4809c868de06319db50cf97b4e9fa426372c"}, + {file = "pytest_codspeed-3.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd8a54b99207bd25a4c3f64d9a83ac0f3def91cdd87204ca70a49f822ba919c"}, + {file = "pytest_codspeed-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4d1ac896ebaea5b365e69b41319b4d09b57dab85ec6234f6ff26116b3795f03"}, + {file = "pytest_codspeed-3.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5f0c1857a0a6cce6a23c49f98c588c2eef66db353c76ecbb2fb65c1a2b33a8d5"}, + {file = "pytest_codspeed-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a4731a7cf1d8d38f58140d51faa69b7c1401234c59d9759a2507df570c805b11"}, + {file = "pytest_codspeed-3.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f2e4b63260f65493b8d42c8167f831b8ed90788f81eb4eb95a103ee6aa4294"}, + {file = "pytest_codspeed-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db44099b3f1ec1c9c41f0267c4d57d94e31667f4cb3fb4b71901561e8ab8bc98"}, + {file = "pytest_codspeed-3.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a533c1ad3cc60f07be432864c83d1769ce2877753ac778e1bfc5a9821f5c6ddf"}, + {file = "pytest_codspeed-3.1.0.tar.gz", hash = "sha256:f29641d27b4ded133b1058a4c859e510a2612ad4217ef9a839ba61750abd2f8a"}, +] + +[package.dependencies] +cffi = ">=1.17.1" +importlib-metadata = {version = ">=8.5.0", markers = "python_version < \"3.10\""} +pytest = ">=3.8" +rich = ">=13.8.1" + +[package.extras] +compat = ["pytest-benchmark (>=5.0.0,<5.1.0)", "pytest-xdist (>=3.6.1,<3.7.0)"] +lint = ["mypy (>=1.11.2,<1.12.0)", "ruff (>=0.6.5,<0.7.0)"] +test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] + [[package]] name = "pytest-cov" version = "4.1.0" @@ -1216,62 +1372,61 @@ urllib3 = ">=1.21.1,<3" socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] +[[package]] +name = "rich" +version = "13.9.4" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +optional = false +python-versions = ">=3.8.0" +files = [ + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, +] + +[package.dependencies] +markdown-it-py = ">=2.2.0" +pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<9)"] + [[package]] name = "ruff" -version = "0.7.0" +version = "0.8.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.7.0-py3-none-linux_armv6l.whl", hash = "sha256:0cdf20c2b6ff98e37df47b2b0bd3a34aaa155f59a11182c1303cce79be715628"}, - {file = "ruff-0.7.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:496494d350c7fdeb36ca4ef1c9f21d80d182423718782222c29b3e72b3512737"}, - {file = "ruff-0.7.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:214b88498684e20b6b2b8852c01d50f0651f3cc6118dfa113b4def9f14faaf06"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630fce3fefe9844e91ea5bbf7ceadab4f9981f42b704fae011bb8efcaf5d84be"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:211d877674e9373d4bb0f1c80f97a0201c61bcd1e9d045b6e9726adc42c156aa"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:194d6c46c98c73949a106425ed40a576f52291c12bc21399eb8f13a0f7073495"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:82c2579b82b9973a110fab281860403b397c08c403de92de19568f32f7178598"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9af971fe85dcd5eaed8f585ddbc6bdbe8c217fb8fcf510ea6bca5bdfff56040e"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b641c7f16939b7d24b7bfc0be4102c56562a18281f84f635604e8a6989948914"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d71672336e46b34e0c90a790afeac8a31954fd42872c1f6adaea1dff76fd44f9"}, - {file = "ruff-0.7.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ab7d98c7eed355166f367597e513a6c82408df4181a937628dbec79abb2a1fe4"}, - {file = "ruff-0.7.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1eb54986f770f49edb14f71d33312d79e00e629a57387382200b1ef12d6a4ef9"}, - {file = "ruff-0.7.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:dc452ba6f2bb9cf8726a84aa877061a2462afe9ae0ea1d411c53d226661c601d"}, - {file = "ruff-0.7.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4b406c2dce5be9bad59f2de26139a86017a517e6bcd2688da515481c05a2cb11"}, - {file = "ruff-0.7.0-py3-none-win32.whl", hash = "sha256:f6c968509f767776f524a8430426539587d5ec5c662f6addb6aa25bc2e8195ec"}, - {file = "ruff-0.7.0-py3-none-win_amd64.whl", hash = "sha256:ff4aabfbaaba880e85d394603b9e75d32b0693152e16fa659a3064a85df7fce2"}, - {file = "ruff-0.7.0-py3-none-win_arm64.whl", hash = "sha256:10842f69c245e78d6adec7e1db0a7d9ddc2fff0621d730e61657b64fa36f207e"}, - {file = "ruff-0.7.0.tar.gz", hash = "sha256:47a86360cf62d9cd53ebfb0b5eb0e882193fc191c6d717e8bef4462bc3b9ea2b"}, -] - -[[package]] -name = "setuptools" -version = "75.2.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-75.2.0-py3-none-any.whl", hash = "sha256:a7fcb66f68b4d9e8e66b42f9876150a3371558f98fa32222ffaa5bced76406f8"}, - {file = "setuptools-75.2.0.tar.gz", hash = "sha256:753bb6ebf1f465a1912e19ed1d41f403a79173a9acf66a42e7e6aec45c3c16ec"}, + {file = "ruff-0.8.3-py3-none-linux_armv6l.whl", hash = "sha256:8d5d273ffffff0acd3db5bf626d4b131aa5a5ada1276126231c4174543ce20d6"}, + {file = "ruff-0.8.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e4d66a21de39f15c9757d00c50c8cdd20ac84f55684ca56def7891a025d7e939"}, + {file = "ruff-0.8.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c356e770811858bd20832af696ff6c7e884701115094f427b64b25093d6d932d"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c0a60a825e3e177116c84009d5ebaa90cf40dfab56e1358d1df4e29a9a14b13"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fb782f4db39501210ac093c79c3de581d306624575eddd7e4e13747e61ba18"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f26bc76a133ecb09a38b7868737eded6941b70a6d34ef53a4027e83913b6502"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:01b14b2f72a37390c1b13477c1c02d53184f728be2f3ffc3ace5b44e9e87b90d"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53babd6e63e31f4e96ec95ea0d962298f9f0d9cc5990a1bbb023a6baf2503a82"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ae441ce4cf925b7f363d33cd6570c51435972d697e3e58928973994e56e1452"}, + {file = "ruff-0.8.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7c65bc0cadce32255e93c57d57ecc2cca23149edd52714c0c5d6fa11ec328cd"}, + {file = "ruff-0.8.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5be450bb18f23f0edc5a4e5585c17a56ba88920d598f04a06bd9fd76d324cb20"}, + {file = "ruff-0.8.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8faeae3827eaa77f5721f09b9472a18c749139c891dbc17f45e72d8f2ca1f8fc"}, + {file = "ruff-0.8.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:db503486e1cf074b9808403991663e4277f5c664d3fe237ee0d994d1305bb060"}, + {file = "ruff-0.8.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6567be9fb62fbd7a099209257fef4ad2c3153b60579818b31a23c886ed4147ea"}, + {file = "ruff-0.8.3-py3-none-win32.whl", hash = "sha256:19048f2f878f3ee4583fc6cb23fb636e48c2635e30fb2022b3a1cd293402f964"}, + {file = "ruff-0.8.3-py3-none-win_amd64.whl", hash = "sha256:f7df94f57d7418fa7c3ffb650757e0c2b96cf2501a0b192c18e4fb5571dfada9"}, + {file = "ruff-0.8.3-py3-none-win_arm64.whl", hash = "sha256:fe2756edf68ea79707c8d68b78ca9a58ed9af22e430430491ee03e718b5e4936"}, + {file = "ruff-0.8.3.tar.gz", hash = "sha256:5e7558304353b84279042fc584a4f4cb8a07ae79b2bf3da1a7551d960b5626d3"}, ] -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.5.2)"] -core = ["importlib-metadata (>=6)", "importlib-resources (>=5.10.2)", "jaraco.collections", "jaraco.functools", "jaraco.text (>=3.7)", "more-itertools", "more-itertools (>=8.8)", "packaging", "packaging (>=24)", "platformdirs (>=2.6.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib-metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.11.*)", "pytest-mypy"] - [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -1520,13 +1675,43 @@ files = [ [[package]] name = "tomli" -version = "2.0.2" +version = "2.2.1" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ - {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, - {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, ] [[package]] @@ -1557,30 +1742,30 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.20.0" +version = "4.23.2" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.20.0-py3-none-any.whl", hash = "sha256:21a8005e3d3fe5658a8e36b8ca3ed13a4230429063c5cc2a2fdac6ee5aa0de34"}, - {file = "tox-4.20.0.tar.gz", hash = "sha256:5b78a49b6eaaeab3ae4186415e7c97d524f762ae967c63562687c3e5f0ec23d5"}, + {file = "tox-4.23.2-py3-none-any.whl", hash = "sha256:452bc32bb031f2282881a2118923176445bac783ab97c874b8770ab4c3b76c38"}, + {file = "tox-4.23.2.tar.gz", hash = "sha256:86075e00e555df6e82e74cfc333917f91ecb47ffbc868dcafbd2672e332f4a2c"}, ] [package.dependencies] cachetools = ">=5.5" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.15.4" +filelock = ">=3.16.1" packaging = ">=24.1" -platformdirs = ">=4.2.2" +platformdirs = ">=4.3.6" pluggy = ">=1.5" -pyproject-api = ">=1.7.1" +pyproject-api = ">=1.8" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} -virtualenv = ">=20.26.3" +typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} +virtualenv = ">=20.26.6" [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-argparse-cli (>=1.17)", "sphinx-autodoc-typehints (>=2.4)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=24.8)"] -testing = ["build[virtualenv] (>=1.2.2)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1)", "diff-cover (>=9.1.1)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=74.1.2)", "time-machine (>=2.15)", "wheel (>=0.44)"] +test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.3)", "pytest-mock (>=3.14)"] [[package]] name = "typed-ast" @@ -1711,13 +1896,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "virtualenv" -version = "20.27.0" +version = "20.28.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.27.0-py3-none-any.whl", hash = "sha256:44a72c29cceb0ee08f300b314848c86e57bf8d1f13107a5e671fb9274138d655"}, - {file = "virtualenv-20.27.0.tar.gz", hash = "sha256:2ca56a68ed615b8fe4326d11a0dca5dfbe8fd68510fb6c6349163bed3c15f2b2"}, + {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"}, + {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"}, ] [package.dependencies] @@ -1763,7 +1948,26 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] +[[package]] +name = "zipp" +version = "3.21.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "5d8998e59f0991b7dea9d5302fadc9c06be82722d75d9f00271efa1cd81555dd" +content-hash = "2f41e2d562a00d6905a8b02cd7ccf5dbcc2fb0218476addd64faff18ee8b46bf" diff --git a/pyproject.toml b/pyproject.toml index 70f39c61..30026bc5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,17 +67,20 @@ pytest-cov = [ ] pytest-describe = "^2.2" pytest-timeout = "^2.3" +pytest-codspeed = [ + { version = "^3.1.0", python = ">=3.9" }, + { version = "^2.2.1", python = "<3.8" } +] tox = [ { version = "^4.16", python = ">=3.8" }, { version = "^3.28", python = "<3.8" } ] -pytest-codspeed = "^2.2.1" [tool.poetry.group.lint] optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.7,<0.8" +ruff = ">=0.8,<0.9" mypy = [ { version = "^1.12", python = ">=3.8" }, { version = "~1.4", python = "<3.8" } diff --git a/tests/execution/test_oneof.py b/tests/execution/test_oneof.py index 2df1000d..81f3d224 100644 --- a/tests/execution/test_oneof.py +++ b/tests/execution/test_oneof.py @@ -35,7 +35,7 @@ def execute_query( def describe_execute_handles_one_of_input_objects(): def describe_one_of_input_objects(): root_value = { - "test": lambda _info, input: input, # noqa: A002 + "test": lambda _info, input: input, } def accepts_a_good_default_value(): diff --git a/tox.ini b/tox.ini index fcd4f015..c998afd8 100644 --- a/tox.ini +++ b/tox.ini @@ -18,7 +18,7 @@ python = [testenv:ruff] basepython = python3.12 -deps = ruff>=0.7,<0.8 +deps = ruff>=0.8,<0.9 commands = ruff check src tests ruff format --check src tests From 59d478af061af9721a9a04ebc6def17c6fc30c55 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Tue, 17 Dec 2024 22:10:22 +0100 Subject: [PATCH 23/50] Fix sorting of exported names --- pyproject.toml | 1 - src/graphql/__init__.py | 576 +++++++++--------- src/graphql/error/graphql_error.py | 10 +- src/graphql/execution/__init__.py | 32 +- src/graphql/execution/collect_fields.py | 4 +- src/graphql/execution/execute.py | 8 +- .../execution/incremental_publisher.py | 12 +- src/graphql/execution/middleware.py | 2 +- src/graphql/language/__init__.py | 152 ++--- src/graphql/language/ast.py | 140 ++--- src/graphql/language/block_string.py | 3 +- src/graphql/language/character_classes.py | 2 +- src/graphql/language/location.py | 2 +- src/graphql/language/parser.py | 2 +- src/graphql/language/predicates.py | 8 +- src/graphql/language/source.py | 2 +- src/graphql/language/visitor.py | 10 +- src/graphql/pyutils/__init__.py | 32 +- src/graphql/pyutils/format_list.py | 2 +- src/graphql/pyutils/is_awaitable.py | 6 +- src/graphql/type/__init__.py | 216 +++---- src/graphql/type/assert_name.py | 2 +- src/graphql/type/definition.py | 96 +-- src/graphql/type/directives.py | 16 +- src/graphql/type/scalars.py | 14 +- src/graphql/type/schema.py | 2 +- src/graphql/type/validate.py | 2 +- src/graphql/utilities/__init__.py | 6 +- src/graphql/utilities/extend_schema.py | 2 +- .../utilities/find_breaking_changes.py | 7 +- .../utilities/get_introspection_query.py | 2 +- src/graphql/utilities/print_schema.py | 4 +- src/graphql/utilities/type_comparators.py | 2 +- src/graphql/validation/__init__.py | 32 +- ...ream_directive_on_valid_operations_rule.py | 3 +- .../validation/rules/known_argument_names.py | 2 +- .../rules/provided_required_arguments.py | 2 +- src/graphql/validation/validate.py | 2 +- src/graphql/version.py | 2 +- tests/execution/test_schema.py | 2 +- tests/fixtures/__init__.py | 4 +- tests/utils/__init__.py | 2 +- tests/validation/harness.py | 4 +- 43 files changed, 715 insertions(+), 717 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 30026bc5..7cdedaa9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -149,7 +149,6 @@ select = [ "YTT", # flake8-2020 ] ignore = [ - "ANN101", "ANN102", # no type annotation for self and cls needed "ANN401", # allow explicit Any "COM812", # allow trailing commas for auto-formatting "D105", "D107", # no docstring needed for magic methods diff --git a/src/graphql/__init__.py b/src/graphql/__init__.py index f70e77b0..6938435a 100644 --- a/src/graphql/__init__.py +++ b/src/graphql/__init__.py @@ -474,345 +474,345 @@ __all__ = [ - "version", - "version_info", - "version_js", - "version_info_js", - "graphql", - "graphql_sync", - "GraphQLSchema", - "GraphQLDirective", - "GraphQLScalarType", - "GraphQLObjectType", - "GraphQLInterfaceType", - "GraphQLUnionType", - "GraphQLEnumType", - "GraphQLInputObjectType", - "GraphQLList", - "GraphQLNonNull", - "specified_scalar_types", - "GraphQLInt", - "GraphQLFloat", - "GraphQLString", - "GraphQLBoolean", - "GraphQLID", + "BREAK", + "DEFAULT_DEPRECATION_REASON", "GRAPHQL_MAX_INT", "GRAPHQL_MIN_INT", - "specified_directives", - "GraphQLIncludeDirective", - "GraphQLSkipDirective", - "GraphQLDeferDirective", - "GraphQLStreamDirective", - "GraphQLDeprecatedDirective", - "GraphQLSpecifiedByDirective", - "GraphQLOneOfDirective", - "TypeKind", - "DEFAULT_DEPRECATION_REASON", - "introspection_types", - "SchemaMetaFieldDef", - "TypeMetaFieldDef", - "TypeNameMetaFieldDef", - "is_schema", - "is_directive", - "is_type", - "is_scalar_type", - "is_object_type", - "is_interface_type", - "is_union_type", - "is_enum_type", - "is_input_object_type", - "is_list_type", - "is_non_null_type", - "is_input_type", - "is_output_type", - "is_leaf_type", - "is_composite_type", - "is_abstract_type", - "is_wrapping_type", - "is_nullable_type", - "is_named_type", - "is_required_argument", - "is_required_input_field", - "is_specified_scalar_type", - "is_introspection_type", - "is_specified_directive", - "assert_schema", - "assert_directive", - "assert_type", - "assert_scalar_type", - "assert_object_type", - "assert_interface_type", - "assert_union_type", - "assert_enum_type", - "assert_input_object_type", - "assert_list_type", - "assert_non_null_type", - "assert_input_type", - "assert_output_type", - "assert_leaf_type", - "assert_composite_type", - "assert_abstract_type", - "assert_wrapping_type", - "assert_nullable_type", - "assert_named_type", - "get_nullable_type", - "get_named_type", - "resolve_thunk", - "validate_schema", - "assert_valid_schema", - "assert_name", - "assert_enum_value_name", - "GraphQLType", - "GraphQLInputType", - "GraphQLOutputType", - "GraphQLLeafType", - "GraphQLCompositeType", + "IDLE", + "REMOVE", + "SKIP", + "ASTValidationRule", + "ArgumentNode", + "BooleanValueNode", + "BreakingChange", + "BreakingChangeType", + "ConstArgumentNode", + "ConstDirectiveNode", + "ConstListValueNode", + "ConstObjectFieldNode", + "ConstObjectValueNode", + "ConstValueNode", + "DangerousChange", + "DangerousChangeType", + "DefinitionNode", + "DirectiveDefinitionNode", + "DirectiveLocation", + "DirectiveNode", + "DocumentNode", + "EnumTypeDefinitionNode", + "EnumTypeExtensionNode", + "EnumValueDefinitionNode", + "EnumValueNode", + "ErrorBoundaryNode", + "ExecutableDefinitionNode", + "ExecutableDefinitionsRule", + "ExecutionContext", + "ExecutionResult", + "ExperimentalIncrementalExecutionResults", + "FieldDefinitionNode", + "FieldNode", + "FieldsOnCorrectTypeRule", + "FloatValueNode", + "FormattedExecutionResult", + "FormattedIncrementalDeferResult", + "FormattedIncrementalResult", + "FormattedIncrementalStreamResult", + "FormattedInitialIncrementalExecutionResult", + "FormattedSubsequentIncrementalExecutionResult", + "FragmentDefinitionNode", + "FragmentSpreadNode", + "FragmentsOnCompositeTypesRule", "GraphQLAbstractType", - "GraphQLWrappingType", - "GraphQLNullableType", - "GraphQLNullableInputType", - "GraphQLNullableOutputType", - "GraphQLNamedType", - "GraphQLNamedInputType", - "GraphQLNamedOutputType", - "Thunk", - "ThunkCollection", - "ThunkMapping", "GraphQLArgument", + "GraphQLArgumentKwargs", "GraphQLArgumentMap", + "GraphQLBoolean", + "GraphQLCompositeType", + "GraphQLDeferDirective", + "GraphQLDeprecatedDirective", + "GraphQLDirective", + "GraphQLDirectiveKwargs", + "GraphQLEnumType", + "GraphQLEnumTypeKwargs", "GraphQLEnumValue", + "GraphQLEnumValueKwargs", "GraphQLEnumValueMap", + "GraphQLError", + "GraphQLErrorExtensions", "GraphQLField", + "GraphQLFieldKwargs", "GraphQLFieldMap", "GraphQLFieldResolver", + "GraphQLFloat", + "GraphQLFormattedError", + "GraphQLID", + "GraphQLIncludeDirective", "GraphQLInputField", + "GraphQLInputFieldKwargs", "GraphQLInputFieldMap", "GraphQLInputFieldOutType", - "GraphQLScalarSerializer", - "GraphQLScalarValueParser", - "GraphQLScalarLiteralParser", - "GraphQLIsTypeOfFn", - "GraphQLResolveInfo", - "ResponsePath", - "GraphQLTypeResolver", - "GraphQLArgumentKwargs", - "GraphQLDirectiveKwargs", - "GraphQLEnumTypeKwargs", - "GraphQLEnumValueKwargs", - "GraphQLFieldKwargs", - "GraphQLInputFieldKwargs", + "GraphQLInputObjectType", "GraphQLInputObjectTypeKwargs", + "GraphQLInputType", + "GraphQLInt", + "GraphQLInterfaceType", "GraphQLInterfaceTypeKwargs", + "GraphQLIsTypeOfFn", + "GraphQLLeafType", + "GraphQLList", + "GraphQLNamedInputType", + "GraphQLNamedOutputType", + "GraphQLNamedType", "GraphQLNamedTypeKwargs", + "GraphQLNonNull", + "GraphQLNullableInputType", + "GraphQLNullableOutputType", + "GraphQLNullableType", + "GraphQLObjectType", "GraphQLObjectTypeKwargs", + "GraphQLOneOfDirective", + "GraphQLOutputType", + "GraphQLResolveInfo", + "GraphQLScalarLiteralParser", + "GraphQLScalarSerializer", + "GraphQLScalarType", "GraphQLScalarTypeKwargs", + "GraphQLScalarValueParser", + "GraphQLSchema", "GraphQLSchemaKwargs", - "GraphQLUnionTypeKwargs", - "Source", - "get_location", - "print_location", - "print_source_location", - "Lexer", - "TokenKind", - "parse", - "parse_value", - "parse_const_value", - "parse_type", - "print_ast", - "visit", - "ParallelVisitor", - "TypeInfoVisitor", - "Visitor", - "VisitorAction", - "VisitorKeyMap", - "BREAK", - "SKIP", - "REMOVE", - "IDLE", - "DirectiveLocation", - "is_definition_node", - "is_executable_definition_node", - "is_nullability_assertion_node", - "is_selection_node", - "is_value_node", - "is_const_value_node", - "is_type_node", - "is_type_system_definition_node", - "is_type_definition_node", - "is_type_system_extension_node", - "is_type_extension_node", - "SourceLocation", - "Location", - "Token", - "Node", - "NameNode", - "DocumentNode", - "DefinitionNode", - "ExecutableDefinitionNode", - "OperationDefinitionNode", - "OperationType", - "VariableDefinitionNode", - "VariableNode", - "SelectionSetNode", - "SelectionNode", - "FieldNode", - "ArgumentNode", - "NullabilityAssertionNode", - "NonNullAssertionNode", - "ErrorBoundaryNode", - "ListNullabilityOperatorNode", - "ConstArgumentNode", - "FragmentSpreadNode", - "InlineFragmentNode", - "FragmentDefinitionNode", - "ValueNode", - "ConstValueNode", - "IntValueNode", - "FloatValueNode", - "StringValueNode", - "BooleanValueNode", - "NullValueNode", - "EnumValueNode", - "ListValueNode", - "ConstListValueNode", - "ObjectValueNode", - "ConstObjectValueNode", - "ObjectFieldNode", - "ConstObjectFieldNode", - "DirectiveNode", - "ConstDirectiveNode", - "TypeNode", - "NamedTypeNode", - "ListTypeNode", - "NonNullTypeNode", - "TypeSystemDefinitionNode", - "SchemaDefinitionNode", - "OperationTypeDefinitionNode", - "TypeDefinitionNode", - "ScalarTypeDefinitionNode", - "ObjectTypeDefinitionNode", - "FieldDefinitionNode", - "InputValueDefinitionNode", - "InterfaceTypeDefinitionNode", - "UnionTypeDefinitionNode", - "EnumTypeDefinitionNode", - "EnumValueDefinitionNode", - "InputObjectTypeDefinitionNode", - "DirectiveDefinitionNode", - "TypeSystemExtensionNode", - "SchemaExtensionNode", - "TypeExtensionNode", - "ScalarTypeExtensionNode", - "ObjectTypeExtensionNode", - "InterfaceTypeExtensionNode", - "UnionTypeExtensionNode", - "EnumTypeExtensionNode", - "InputObjectTypeExtensionNode", - "execute", - "execute_sync", - "default_field_resolver", - "default_type_resolver", - "get_argument_values", - "get_directive_values", - "get_variable_values", - "ExecutionContext", - "ExecutionResult", - "ExperimentalIncrementalExecutionResults", - "InitialIncrementalExecutionResult", - "SubsequentIncrementalExecutionResult", + "GraphQLSkipDirective", + "GraphQLSpecifiedByDirective", + "GraphQLStreamDirective", + "GraphQLString", + "GraphQLSyntaxError", + "GraphQLType", + "GraphQLTypeResolver", + "GraphQLUnionType", + "GraphQLUnionTypeKwargs", + "GraphQLWrappingType", "IncrementalDeferResult", - "IncrementalStreamResult", "IncrementalResult", - "FormattedExecutionResult", - "FormattedInitialIncrementalExecutionResult", - "FormattedSubsequentIncrementalExecutionResult", - "FormattedIncrementalDeferResult", - "FormattedIncrementalStreamResult", - "FormattedIncrementalResult", - "Middleware", - "MiddlewareManager", - "subscribe", - "create_source_event_stream", - "map_async_iterable", - "validate", - "ValidationContext", - "ValidationRule", - "ASTValidationRule", - "SDLValidationRule", - "specified_rules", - "ExecutableDefinitionsRule", - "FieldsOnCorrectTypeRule", - "FragmentsOnCompositeTypesRule", + "IncrementalStreamResult", + "InitialIncrementalExecutionResult", + "InlineFragmentNode", + "InputObjectTypeDefinitionNode", + "InputObjectTypeExtensionNode", + "InputValueDefinitionNode", + "IntValueNode", + "InterfaceTypeDefinitionNode", + "InterfaceTypeExtensionNode", + "IntrospectionQuery", "KnownArgumentNamesRule", "KnownDirectivesRule", "KnownFragmentNamesRule", "KnownTypeNamesRule", + "Lexer", + "ListNullabilityOperatorNode", + "ListTypeNode", + "ListValueNode", + "Location", "LoneAnonymousOperationRule", + "LoneSchemaDefinitionRule", + "Middleware", + "MiddlewareManager", + "NameNode", + "NamedTypeNode", + "NoDeprecatedCustomRule", "NoFragmentCyclesRule", + "NoSchemaIntrospectionCustomRule", "NoUndefinedVariablesRule", "NoUnusedFragmentsRule", "NoUnusedVariablesRule", + "Node", + "NonNullAssertionNode", + "NonNullTypeNode", + "NullValueNode", + "NullabilityAssertionNode", + "ObjectFieldNode", + "ObjectTypeDefinitionNode", + "ObjectTypeExtensionNode", + "ObjectValueNode", + "OperationDefinitionNode", + "OperationType", + "OperationTypeDefinitionNode", "OverlappingFieldsCanBeMergedRule", + "ParallelVisitor", "PossibleFragmentSpreadsRule", + "PossibleTypeExtensionsRule", "ProvidedRequiredArgumentsRule", + "ResponsePath", + "SDLValidationRule", "ScalarLeafsRule", + "ScalarTypeDefinitionNode", + "ScalarTypeExtensionNode", + "SchemaDefinitionNode", + "SchemaExtensionNode", + "SchemaMetaFieldDef", + "SelectionNode", + "SelectionSetNode", "SingleFieldSubscriptionsRule", + "Source", + "SourceLocation", + "StringValueNode", + "SubsequentIncrementalExecutionResult", + "Thunk", + "ThunkCollection", + "ThunkMapping", + "Token", + "TokenKind", + "TypeDefinitionNode", + "TypeExtensionNode", + "TypeInfo", + "TypeInfoVisitor", + "TypeKind", + "TypeMetaFieldDef", + "TypeNameMetaFieldDef", + "TypeNode", + "TypeSystemDefinitionNode", + "TypeSystemExtensionNode", + "Undefined", + "UndefinedType", + "UnionTypeDefinitionNode", + "UnionTypeExtensionNode", + "UniqueArgumentDefinitionNamesRule", "UniqueArgumentNamesRule", + "UniqueDirectiveNamesRule", "UniqueDirectivesPerLocationRule", + "UniqueEnumValueNamesRule", + "UniqueFieldDefinitionNamesRule", "UniqueFragmentNamesRule", "UniqueInputFieldNamesRule", "UniqueOperationNamesRule", + "UniqueOperationTypesRule", + "UniqueTypeNamesRule", "UniqueVariableNamesRule", + "ValidationContext", + "ValidationRule", + "ValueNode", "ValuesOfCorrectTypeRule", + "VariableDefinitionNode", + "VariableNode", "VariablesAreInputTypesRule", "VariablesInAllowedPositionRule", - "LoneSchemaDefinitionRule", - "UniqueOperationTypesRule", - "UniqueTypeNamesRule", - "UniqueEnumValueNamesRule", - "UniqueFieldDefinitionNamesRule", - "UniqueArgumentDefinitionNamesRule", - "UniqueDirectiveNamesRule", - "PossibleTypeExtensionsRule", - "NoDeprecatedCustomRule", - "NoSchemaIntrospectionCustomRule", - "GraphQLError", - "GraphQLErrorExtensions", - "GraphQLFormattedError", - "GraphQLSyntaxError", - "located_error", - "get_introspection_query", - "IntrospectionQuery", - "get_operation_ast", - "introspection_from_schema", - "build_client_schema", + "Visitor", + "VisitorAction", + "VisitorKeyMap", + "assert_abstract_type", + "assert_composite_type", + "assert_directive", + "assert_enum_type", + "assert_enum_value_name", + "assert_input_object_type", + "assert_input_type", + "assert_interface_type", + "assert_leaf_type", + "assert_list_type", + "assert_name", + "assert_named_type", + "assert_non_null_type", + "assert_nullable_type", + "assert_object_type", + "assert_output_type", + "assert_scalar_type", + "assert_schema", + "assert_type", + "assert_union_type", + "assert_valid_schema", + "assert_wrapping_type", + "ast_from_value", + "ast_to_dict", "build_ast_schema", + "build_client_schema", "build_schema", + "coerce_input_value", + "concat_ast", + "create_source_event_stream", + "default_field_resolver", + "default_type_resolver", + "do_types_overlap", + "execute", + "execute_sync", "extend_schema", + "find_breaking_changes", + "find_dangerous_changes", + "get_argument_values", + "get_directive_values", + "get_introspection_query", + "get_location", + "get_named_type", + "get_nullable_type", + "get_operation_ast", + "get_variable_values", + "graphql", + "graphql_sync", + "introspection_from_schema", + "introspection_types", + "is_abstract_type", + "is_composite_type", + "is_const_value_node", + "is_definition_node", + "is_directive", + "is_enum_type", + "is_equal_type", + "is_executable_definition_node", + "is_input_object_type", + "is_input_type", + "is_interface_type", + "is_introspection_type", + "is_leaf_type", + "is_list_type", + "is_named_type", + "is_non_null_type", + "is_nullability_assertion_node", + "is_nullable_type", + "is_object_type", + "is_output_type", + "is_required_argument", + "is_required_input_field", + "is_scalar_type", + "is_schema", + "is_selection_node", + "is_specified_directive", + "is_specified_scalar_type", + "is_type", + "is_type_definition_node", + "is_type_extension_node", + "is_type_node", + "is_type_sub_type_of", + "is_type_system_definition_node", + "is_type_system_extension_node", + "is_union_type", + "is_value_node", + "is_wrapping_type", "lexicographic_sort_schema", - "print_schema", - "print_type", + "located_error", + "map_async_iterable", + "parse", + "parse_const_value", + "parse_type", + "parse_value", + "print_ast", "print_directive", "print_introspection_schema", + "print_location", + "print_schema", + "print_source_location", + "print_type", + "resolve_thunk", + "separate_operations", + "specified_directives", + "specified_rules", + "specified_scalar_types", + "strip_ignored_characters", + "subscribe", "type_from_ast", + "validate", + "validate_schema", "value_from_ast", "value_from_ast_untyped", - "ast_from_value", - "ast_to_dict", - "TypeInfo", - "coerce_input_value", - "concat_ast", - "separate_operations", - "strip_ignored_characters", - "is_equal_type", - "is_type_sub_type_of", - "do_types_overlap", - "find_breaking_changes", - "find_dangerous_changes", - "BreakingChange", - "BreakingChangeType", - "DangerousChange", - "DangerousChangeType", - "Undefined", - "UndefinedType", + "version", + "version_info", + "version_info_js", + "version_js", + "visit", ] diff --git a/src/graphql/error/graphql_error.py b/src/graphql/error/graphql_error.py index ff128748..8123a713 100644 --- a/src/graphql/error/graphql_error.py +++ b/src/graphql/error/graphql_error.py @@ -108,14 +108,14 @@ class GraphQLError(Exception): """Extension fields to add to the formatted error""" __slots__ = ( + "extensions", + "locations", "message", "nodes", - "source", - "positions", - "locations", - "path", "original_error", - "extensions", + "path", + "positions", + "source", ) __hash__ = Exception.__hash__ diff --git a/src/graphql/execution/__init__.py b/src/graphql/execution/__init__.py index 2d5225be..375ec400 100644 --- a/src/graphql/execution/__init__.py +++ b/src/graphql/execution/__init__.py @@ -37,31 +37,31 @@ __all__ = [ "ASYNC_DELAY", - "create_source_event_stream", - "execute", - "experimental_execute_incrementally", - "execute_sync", - "default_field_resolver", - "default_type_resolver", - "subscribe", "ExecutionContext", "ExecutionResult", "ExperimentalIncrementalExecutionResults", - "InitialIncrementalExecutionResult", - "SubsequentIncrementalExecutionResult", - "IncrementalDeferResult", - "IncrementalStreamResult", - "IncrementalResult", "FormattedExecutionResult", - "FormattedInitialIncrementalExecutionResult", - "FormattedSubsequentIncrementalExecutionResult", "FormattedIncrementalDeferResult", - "FormattedIncrementalStreamResult", "FormattedIncrementalResult", - "map_async_iterable", + "FormattedIncrementalStreamResult", + "FormattedInitialIncrementalExecutionResult", + "FormattedSubsequentIncrementalExecutionResult", + "IncrementalDeferResult", + "IncrementalResult", + "IncrementalStreamResult", + "InitialIncrementalExecutionResult", "Middleware", "MiddlewareManager", + "SubsequentIncrementalExecutionResult", + "create_source_event_stream", + "default_field_resolver", + "default_type_resolver", + "execute", + "execute_sync", + "experimental_execute_incrementally", "get_argument_values", "get_directive_values", "get_variable_values", + "map_async_iterable", + "subscribe", ] diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 5cb5a723..4f581252 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -33,11 +33,11 @@ __all__ = [ - "collect_fields", - "collect_subfields", "FieldGroup", "FieldsAndPatches", "GroupedFieldSet", + "collect_fields", + "collect_subfields", ] if sys.version_info < (3, 9): diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index ca4df8ff..30a6234d 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -27,9 +27,9 @@ from typing_extensions import TypeAlias, TypeGuard try: # only needed for Python < 3.11 # noinspection PyCompatibility - from asyncio.exceptions import TimeoutError + from asyncio.exceptions import TimeoutError # noqa: A004 except ImportError: # Python < 3.7 - from concurrent.futures import TimeoutError + from concurrent.futures import TimeoutError # noqa: A004 from ..error import GraphQLError, located_error from ..language import ( @@ -98,6 +98,8 @@ async def anext(iterator: AsyncIterator) -> Any: __all__ = [ "ASYNC_DELAY", + "ExecutionContext", + "Middleware", "create_source_event_stream", "default_field_resolver", "default_type_resolver", @@ -105,8 +107,6 @@ async def anext(iterator: AsyncIterator) -> Any: "execute_sync", "experimental_execute_incrementally", "subscribe", - "ExecutionContext", - "Middleware", ] suppress_exceptions = suppress(Exception) diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index fdc35fff..a1b8c507 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -37,13 +37,13 @@ "FormattedIncrementalStreamResult", "FormattedInitialIncrementalExecutionResult", "FormattedSubsequentIncrementalExecutionResult", - "InitialIncrementalExecutionResult", - "InitialResultRecord", "IncrementalDataRecord", "IncrementalDeferResult", "IncrementalPublisher", "IncrementalResult", "IncrementalStreamResult", + "InitialIncrementalExecutionResult", + "InitialResultRecord", "StreamItemsRecord", "SubsequentIncrementalExecutionResult", ] @@ -151,7 +151,7 @@ class InitialIncrementalExecutionResult: has_next: bool extensions: dict[str, Any] | None - __slots__ = "data", "errors", "has_next", "incremental", "extensions" + __slots__ = "data", "errors", "extensions", "has_next", "incremental" def __init__( self, @@ -257,7 +257,7 @@ class IncrementalDeferResult: label: str | None extensions: dict[str, Any] | None - __slots__ = "data", "errors", "path", "label", "extensions" + __slots__ = "data", "errors", "extensions", "label", "path" def __init__( self, @@ -350,7 +350,7 @@ class IncrementalStreamResult: label: str | None extensions: dict[str, Any] | None - __slots__ = "items", "errors", "path", "label", "extensions" + __slots__ = "errors", "extensions", "items", "label", "path" def __init__( self, @@ -446,7 +446,7 @@ class SubsequentIncrementalExecutionResult: - ``incremental`` is a list of the results from defer/stream directives. """ - __slots__ = "has_next", "incremental", "extensions" + __slots__ = "extensions", "has_next", "incremental" incremental: Sequence[IncrementalResult] | None has_next: bool diff --git a/src/graphql/execution/middleware.py b/src/graphql/execution/middleware.py index de99e12b..6d999171 100644 --- a/src/graphql/execution/middleware.py +++ b/src/graphql/execution/middleware.py @@ -30,7 +30,7 @@ class MiddlewareManager: """ # allow custom attributes (not used internally) - __slots__ = "__dict__", "middlewares", "_middleware_resolvers", "_cached_resolvers" + __slots__ = "__dict__", "_cached_resolvers", "_middleware_resolvers", "middlewares" _cached_resolvers: dict[GraphQLFieldResolver, GraphQLFieldResolver] _middleware_resolvers: list[Callable] | None diff --git a/src/graphql/language/__init__.py b/src/graphql/language/__init__.py index 2f105a98..bd5e7be1 100644 --- a/src/graphql/language/__init__.py +++ b/src/graphql/language/__init__.py @@ -115,104 +115,104 @@ from .directive_locations import DirectiveLocation __all__ = [ - "get_location", - "SourceLocation", - "FormattedSourceLocation", - "print_location", - "print_source_location", - "TokenKind", - "Lexer", - "parse", - "parse_value", - "parse_const_value", - "parse_type", - "print_ast", - "Source", - "visit", - "Visitor", - "ParallelVisitor", - "VisitorAction", - "VisitorKeyMap", "BREAK", - "SKIP", - "REMOVE", "IDLE", - "Location", - "Token", + "REMOVE", + "SKIP", + "ArgumentNode", + "BooleanValueNode", + "ConstArgumentNode", + "ConstDirectiveNode", + "ConstListValueNode", + "ConstObjectFieldNode", + "ConstObjectValueNode", + "ConstValueNode", + "DefinitionNode", + "DirectiveDefinitionNode", "DirectiveLocation", - "Node", - "NameNode", + "DirectiveNode", "DocumentNode", - "DefinitionNode", + "EnumTypeDefinitionNode", + "EnumTypeExtensionNode", + "EnumValueDefinitionNode", + "EnumValueNode", + "ErrorBoundaryNode", "ExecutableDefinitionNode", - "OperationDefinitionNode", - "OperationType", - "VariableDefinitionNode", - "VariableNode", - "SelectionSetNode", - "SelectionNode", + "FieldDefinitionNode", "FieldNode", - "NullabilityAssertionNode", - "NonNullAssertionNode", - "ErrorBoundaryNode", - "ListNullabilityOperatorNode", - "ArgumentNode", - "ConstArgumentNode", + "FloatValueNode", + "FormattedSourceLocation", + "FragmentDefinitionNode", "FragmentSpreadNode", "InlineFragmentNode", - "FragmentDefinitionNode", - "ValueNode", - "ConstValueNode", + "InputObjectTypeDefinitionNode", + "InputObjectTypeExtensionNode", + "InputValueDefinitionNode", "IntValueNode", - "FloatValueNode", - "StringValueNode", - "BooleanValueNode", - "NullValueNode", - "EnumValueNode", + "InterfaceTypeDefinitionNode", + "InterfaceTypeExtensionNode", + "Lexer", + "ListNullabilityOperatorNode", + "ListTypeNode", "ListValueNode", - "ConstListValueNode", - "ObjectValueNode", - "ConstObjectValueNode", - "ObjectFieldNode", - "ConstObjectFieldNode", - "DirectiveNode", - "ConstDirectiveNode", - "TypeNode", + "Location", + "NameNode", "NamedTypeNode", - "ListTypeNode", + "Node", + "NonNullAssertionNode", "NonNullTypeNode", - "TypeSystemDefinitionNode", - "SchemaDefinitionNode", + "NullValueNode", + "NullabilityAssertionNode", + "ObjectFieldNode", + "ObjectTypeDefinitionNode", + "ObjectTypeExtensionNode", + "ObjectValueNode", + "OperationDefinitionNode", + "OperationType", "OperationTypeDefinitionNode", - "TypeDefinitionNode", + "ParallelVisitor", "ScalarTypeDefinitionNode", - "ObjectTypeDefinitionNode", - "FieldDefinitionNode", - "InputValueDefinitionNode", - "InterfaceTypeDefinitionNode", - "UnionTypeDefinitionNode", - "EnumTypeDefinitionNode", - "EnumValueDefinitionNode", - "InputObjectTypeDefinitionNode", - "DirectiveDefinitionNode", - "TypeSystemExtensionNode", + "ScalarTypeExtensionNode", + "SchemaDefinitionNode", "SchemaExtensionNode", + "SelectionNode", + "SelectionSetNode", + "Source", + "SourceLocation", + "StringValueNode", + "Token", + "TokenKind", + "TypeDefinitionNode", "TypeExtensionNode", - "ScalarTypeExtensionNode", - "ObjectTypeExtensionNode", - "InterfaceTypeExtensionNode", + "TypeNode", + "TypeSystemDefinitionNode", + "TypeSystemExtensionNode", + "UnionTypeDefinitionNode", "UnionTypeExtensionNode", - "EnumTypeExtensionNode", - "InputObjectTypeExtensionNode", + "ValueNode", + "VariableDefinitionNode", + "VariableNode", + "Visitor", + "VisitorAction", + "VisitorKeyMap", + "get_location", + "is_const_value_node", "is_definition_node", "is_executable_definition_node", "is_nullability_assertion_node", "is_selection_node", - "is_value_node", - "is_const_value_node", + "is_type_definition_node", + "is_type_extension_node", "is_type_node", "is_type_system_definition_node", - "is_type_definition_node", "is_type_system_extension_node", - "is_type_extension_node", + "is_value_node", + "parse", + "parse_const_value", + "parse_type", + "parse_value", + "print_ast", + "print_location", + "print_source_location", + "visit", ] diff --git a/src/graphql/language/ast.py b/src/graphql/language/ast.py index 5b61767d..a67ee1ea 100644 --- a/src/graphql/language/ast.py +++ b/src/graphql/language/ast.py @@ -19,73 +19,73 @@ __all__ = [ - "Location", - "Token", - "Node", - "NameNode", - "DocumentNode", + "QUERY_DOCUMENT_KEYS", + "ArgumentNode", + "BooleanValueNode", + "ConstArgumentNode", + "ConstDirectiveNode", + "ConstListValueNode", + "ConstObjectFieldNode", + "ConstObjectValueNode", + "ConstValueNode", "DefinitionNode", + "DirectiveDefinitionNode", + "DirectiveNode", + "DocumentNode", + "EnumTypeDefinitionNode", + "EnumTypeExtensionNode", + "EnumValueDefinitionNode", + "EnumValueNode", + "ErrorBoundaryNode", "ExecutableDefinitionNode", - "OperationDefinitionNode", - "VariableDefinitionNode", - "SelectionSetNode", - "SelectionNode", + "FieldDefinitionNode", "FieldNode", - "NullabilityAssertionNode", - "NonNullAssertionNode", - "ErrorBoundaryNode", - "ListNullabilityOperatorNode", - "ArgumentNode", - "ConstArgumentNode", + "FloatValueNode", + "FragmentDefinitionNode", "FragmentSpreadNode", "InlineFragmentNode", - "FragmentDefinitionNode", - "ValueNode", - "ConstValueNode", - "VariableNode", + "InputObjectTypeDefinitionNode", + "InputObjectTypeExtensionNode", + "InputValueDefinitionNode", "IntValueNode", - "FloatValueNode", - "StringValueNode", - "BooleanValueNode", - "NullValueNode", - "EnumValueNode", + "InterfaceTypeDefinitionNode", + "InterfaceTypeExtensionNode", + "ListNullabilityOperatorNode", + "ListTypeNode", "ListValueNode", - "ConstListValueNode", - "ObjectValueNode", - "ConstObjectValueNode", - "ObjectFieldNode", - "ConstObjectFieldNode", - "DirectiveNode", - "ConstDirectiveNode", - "TypeNode", + "Location", + "NameNode", "NamedTypeNode", - "ListTypeNode", + "Node", + "NonNullAssertionNode", "NonNullTypeNode", - "TypeSystemDefinitionNode", - "SchemaDefinitionNode", + "NullValueNode", + "NullabilityAssertionNode", + "ObjectFieldNode", + "ObjectTypeDefinitionNode", + "ObjectTypeExtensionNode", + "ObjectValueNode", + "OperationDefinitionNode", "OperationType", "OperationTypeDefinitionNode", - "TypeDefinitionNode", "ScalarTypeDefinitionNode", - "ObjectTypeDefinitionNode", - "FieldDefinitionNode", - "InputValueDefinitionNode", - "InterfaceTypeDefinitionNode", - "UnionTypeDefinitionNode", - "EnumTypeDefinitionNode", - "EnumValueDefinitionNode", - "InputObjectTypeDefinitionNode", - "DirectiveDefinitionNode", + "ScalarTypeExtensionNode", + "SchemaDefinitionNode", "SchemaExtensionNode", + "SelectionNode", + "SelectionSetNode", + "StringValueNode", + "Token", + "TypeDefinitionNode", "TypeExtensionNode", + "TypeNode", + "TypeSystemDefinitionNode", "TypeSystemExtensionNode", - "ScalarTypeExtensionNode", - "ObjectTypeExtensionNode", - "InterfaceTypeExtensionNode", + "UnionTypeDefinitionNode", "UnionTypeExtensionNode", - "EnumTypeExtensionNode", - "InputObjectTypeExtensionNode", - "QUERY_DOCUMENT_KEYS", + "ValueNode", + "VariableDefinitionNode", + "VariableNode", ] @@ -95,7 +95,7 @@ class Token: Represents a range of characters represented by a lexical token within a Source. """ - __slots__ = "kind", "start", "end", "line", "column", "prev", "next", "value" + __slots__ = "column", "end", "kind", "line", "next", "prev", "start", "value" kind: TokenKind # the kind of token start: int # the character offset at which this Node begins @@ -202,11 +202,11 @@ class Location: """ __slots__ = ( - "start", "end", - "start_token", "end_token", "source", + "start", + "start_token", ) start: int # character offset at which this Node begins @@ -345,7 +345,7 @@ class Node: """AST nodes""" # allow custom attributes and weak references (not used internally) - __slots__ = "__dict__", "__weakref__", "loc", "_hash" + __slots__ = "__dict__", "__weakref__", "_hash", "loc" loc: Location | None @@ -457,7 +457,7 @@ class DefinitionNode(Node): class ExecutableDefinitionNode(DefinitionNode): - __slots__ = "name", "directives", "variable_definitions", "selection_set" + __slots__ = "directives", "name", "selection_set", "variable_definitions" name: NameNode | None directives: tuple[DirectiveNode, ...] @@ -472,7 +472,7 @@ class OperationDefinitionNode(ExecutableDefinitionNode): class VariableDefinitionNode(Node): - __slots__ = "variable", "type", "default_value", "directives" + __slots__ = "default_value", "directives", "type", "variable" variable: VariableNode type: TypeNode @@ -493,7 +493,7 @@ class SelectionNode(Node): class FieldNode(SelectionNode): - __slots__ = "alias", "name", "arguments", "nullability_assertion", "selection_set" + __slots__ = "alias", "arguments", "name", "nullability_assertion", "selection_set" alias: NameNode | None name: NameNode @@ -542,7 +542,7 @@ class FragmentSpreadNode(SelectionNode): class InlineFragmentNode(SelectionNode): - __slots__ = "type_condition", "selection_set" + __slots__ = "selection_set", "type_condition" type_condition: NamedTypeNode selection_set: SelectionSetNode @@ -581,7 +581,7 @@ class FloatValueNode(ValueNode): class StringValueNode(ValueNode): - __slots__ = "value", "block" + __slots__ = "block", "value" value: str block: bool | None @@ -650,7 +650,7 @@ class ConstObjectFieldNode(ObjectFieldNode): class DirectiveNode(Node): - __slots__ = "name", "arguments" + __slots__ = "arguments", "name" name: NameNode arguments: tuple[ArgumentNode, ...] @@ -711,7 +711,7 @@ class OperationTypeDefinitionNode(Node): class TypeDefinitionNode(TypeSystemDefinitionNode): - __slots__ = "description", "name", "directives" + __slots__ = "description", "directives", "name" description: StringValueNode | None name: NameNode @@ -725,7 +725,7 @@ class ScalarTypeDefinitionNode(TypeDefinitionNode): class ObjectTypeDefinitionNode(TypeDefinitionNode): - __slots__ = "interfaces", "fields" + __slots__ = "fields", "interfaces" interfaces: tuple[NamedTypeNode, ...] directives: tuple[ConstDirectiveNode, ...] @@ -733,7 +733,7 @@ class ObjectTypeDefinitionNode(TypeDefinitionNode): class FieldDefinitionNode(DefinitionNode): - __slots__ = "description", "name", "directives", "arguments", "type" + __slots__ = "arguments", "description", "directives", "name", "type" description: StringValueNode | None name: NameNode @@ -743,7 +743,7 @@ class FieldDefinitionNode(DefinitionNode): class InputValueDefinitionNode(DefinitionNode): - __slots__ = "description", "name", "directives", "type", "default_value" + __slots__ = "default_value", "description", "directives", "name", "type" description: StringValueNode | None name: NameNode @@ -775,7 +775,7 @@ class EnumTypeDefinitionNode(TypeDefinitionNode): class EnumValueDefinitionNode(DefinitionNode): - __slots__ = "description", "name", "directives" + __slots__ = "description", "directives", "name" description: StringValueNode | None name: NameNode @@ -793,7 +793,7 @@ class InputObjectTypeDefinitionNode(TypeDefinitionNode): class DirectiveDefinitionNode(TypeSystemDefinitionNode): - __slots__ = "description", "name", "arguments", "repeatable", "locations" + __slots__ = "arguments", "description", "locations", "name", "repeatable" description: StringValueNode | None name: NameNode @@ -816,7 +816,7 @@ class SchemaExtensionNode(Node): class TypeExtensionNode(TypeSystemDefinitionNode): - __slots__ = "name", "directives" + __slots__ = "directives", "name" name: NameNode directives: tuple[ConstDirectiveNode, ...] @@ -830,14 +830,14 @@ class ScalarTypeExtensionNode(TypeExtensionNode): class ObjectTypeExtensionNode(TypeExtensionNode): - __slots__ = "interfaces", "fields" + __slots__ = "fields", "interfaces" interfaces: tuple[NamedTypeNode, ...] fields: tuple[FieldDefinitionNode, ...] class InterfaceTypeExtensionNode(TypeExtensionNode): - __slots__ = "interfaces", "fields" + __slots__ = "fields", "interfaces" interfaces: tuple[NamedTypeNode, ...] fields: tuple[FieldDefinitionNode, ...] diff --git a/src/graphql/language/block_string.py b/src/graphql/language/block_string.py index d784c236..248927b4 100644 --- a/src/graphql/language/block_string.py +++ b/src/graphql/language/block_string.py @@ -149,8 +149,7 @@ def print_block_string(value: str, minimize: bool = False) -> str: skip_leading_new_line = is_single_line and value and value[0] in " \t" before = ( "\n" - if print_as_multiple_lines - and not skip_leading_new_line + if (print_as_multiple_lines and not skip_leading_new_line) or force_leading_new_line else "" ) diff --git a/src/graphql/language/character_classes.py b/src/graphql/language/character_classes.py index 628bd60f..5d870576 100644 --- a/src/graphql/language/character_classes.py +++ b/src/graphql/language/character_classes.py @@ -1,6 +1,6 @@ """Character classes""" -__all__ = ["is_digit", "is_letter", "is_name_start", "is_name_continue"] +__all__ = ["is_digit", "is_letter", "is_name_continue", "is_name_start"] def is_digit(char: str) -> bool: diff --git a/src/graphql/language/location.py b/src/graphql/language/location.py index 8b1ee38d..7af55082 100644 --- a/src/graphql/language/location.py +++ b/src/graphql/language/location.py @@ -12,7 +12,7 @@ if TYPE_CHECKING: from .source import Source -__all__ = ["get_location", "SourceLocation", "FormattedSourceLocation"] +__all__ = ["FormattedSourceLocation", "SourceLocation", "get_location"] class FormattedSourceLocation(TypedDict): diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index 95c69ccb..55c249ba 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -77,7 +77,7 @@ from typing_extensions import TypeAlias -__all__ = ["parse", "parse_type", "parse_value", "parse_const_value"] +__all__ = ["parse", "parse_const_value", "parse_type", "parse_value"] T = TypeVar("T") diff --git a/src/graphql/language/predicates.py b/src/graphql/language/predicates.py index b65b1982..280662f8 100644 --- a/src/graphql/language/predicates.py +++ b/src/graphql/language/predicates.py @@ -26,17 +26,17 @@ __all__ = [ + "is_const_value_node", "is_definition_node", "is_executable_definition_node", "is_nullability_assertion_node", "is_selection_node", - "is_value_node", - "is_const_value_node", + "is_type_definition_node", + "is_type_extension_node", "is_type_node", "is_type_system_definition_node", - "is_type_definition_node", "is_type_system_extension_node", - "is_type_extension_node", + "is_value_node", ] diff --git a/src/graphql/language/source.py b/src/graphql/language/source.py index 01bb013f..d54bf969 100644 --- a/src/graphql/language/source.py +++ b/src/graphql/language/source.py @@ -21,7 +21,7 @@ class Source: """A representation of source input to GraphQL.""" # allow custom attributes and weak references (not used internally) - __slots__ = "__weakref__", "__dict__", "body", "name", "location_offset" + __slots__ = "__dict__", "__weakref__", "body", "location_offset", "name" def __init__( self, diff --git a/src/graphql/language/visitor.py b/src/graphql/language/visitor.py index 450996d8..c9901230 100644 --- a/src/graphql/language/visitor.py +++ b/src/graphql/language/visitor.py @@ -25,15 +25,15 @@ __all__ = [ - "Visitor", + "BREAK", + "IDLE", + "REMOVE", + "SKIP", "ParallelVisitor", + "Visitor", "VisitorAction", "VisitorKeyMap", "visit", - "BREAK", - "SKIP", - "REMOVE", - "IDLE", ] diff --git a/src/graphql/pyutils/__init__.py b/src/graphql/pyutils/__init__.py index e1aefd6a..10faca9e 100644 --- a/src/graphql/pyutils/__init__.py +++ b/src/graphql/pyutils/__init__.py @@ -35,32 +35,32 @@ from .undefined import Undefined, UndefinedType __all__ = [ + "AwaitableOrValue", + "Description", + "FrozenError", + "Path", + "SimplePubSub", + "SimplePubSubIterator", + "Undefined", + "UndefinedType", + "and_list", "async_reduce", - "camel_to_snake", - "snake_to_camel", "cached_property", + "camel_to_snake", "did_you_mean", - "or_list", - "and_list", - "Description", "group_by", - "is_description", - "register_description", - "unregister_description", "identity_func", "inspect", "is_awaitable", "is_collection", + "is_description", "is_iterable", "merge_kwargs", "natural_comparison_key", - "AwaitableOrValue", - "suggestion_list", - "FrozenError", - "Path", + "or_list", "print_path_list", - "SimplePubSub", - "SimplePubSubIterator", - "Undefined", - "UndefinedType", + "register_description", + "snake_to_camel", + "suggestion_list", + "unregister_description", ] diff --git a/src/graphql/pyutils/format_list.py b/src/graphql/pyutils/format_list.py index 87184728..368e7ae0 100644 --- a/src/graphql/pyutils/format_list.py +++ b/src/graphql/pyutils/format_list.py @@ -4,7 +4,7 @@ from typing import Sequence -__all__ = ["or_list", "and_list"] +__all__ = ["and_list", "or_list"] def or_list(items: Sequence[str]) -> str: diff --git a/src/graphql/pyutils/is_awaitable.py b/src/graphql/pyutils/is_awaitable.py index ce8c93c0..158bcd40 100644 --- a/src/graphql/pyutils/is_awaitable.py +++ b/src/graphql/pyutils/is_awaitable.py @@ -27,8 +27,10 @@ def is_awaitable(value: Any) -> TypeGuard[Awaitable]: # check for coroutine objects isinstance(value, CoroutineType) # check for old-style generator based coroutine objects - or isinstance(value, GeneratorType) # for Python < 3.11 - and bool(value.gi_code.co_flags & CO_ITERABLE_COROUTINE) + or ( + isinstance(value, GeneratorType) # for Python < 3.11 + and bool(value.gi_code.co_flags & CO_ITERABLE_COROUTINE) + ) # check for other awaitables (e.g. futures) or hasattr(value, "__await__") ) diff --git a/src/graphql/type/__init__.py b/src/graphql/type/__init__.py index b95e0e55..8c41bd28 100644 --- a/src/graphql/type/__init__.py +++ b/src/graphql/type/__init__.py @@ -177,134 +177,134 @@ from .validate import validate_schema, assert_valid_schema __all__ = [ - "is_schema", - "assert_schema", - "assert_name", - "assert_enum_value_name", - "GraphQLSchema", - "GraphQLSchemaKwargs", - "is_type", - "is_scalar_type", - "is_object_type", - "is_interface_type", - "is_union_type", - "is_enum_type", - "is_input_object_type", - "is_list_type", - "is_non_null_type", - "is_input_type", - "is_output_type", - "is_leaf_type", - "is_composite_type", - "is_abstract_type", - "is_wrapping_type", - "is_nullable_type", - "is_named_type", - "is_required_argument", - "is_required_input_field", - "assert_type", - "assert_scalar_type", - "assert_object_type", - "assert_interface_type", - "assert_union_type", - "assert_enum_type", - "assert_input_object_type", - "assert_list_type", - "assert_non_null_type", - "assert_input_type", - "assert_output_type", - "assert_leaf_type", - "assert_composite_type", - "assert_abstract_type", - "assert_wrapping_type", - "assert_nullable_type", - "assert_named_type", - "get_nullable_type", - "get_named_type", - "resolve_thunk", - "GraphQLScalarType", - "GraphQLObjectType", - "GraphQLInterfaceType", - "GraphQLUnionType", - "GraphQLEnumType", - "GraphQLInputObjectType", - "GraphQLInputType", - "GraphQLArgument", - "GraphQLList", - "GraphQLNonNull", - "GraphQLType", - "GraphQLInputType", - "GraphQLOutputType", - "GraphQLLeafType", - "GraphQLCompositeType", + "DEFAULT_DEPRECATION_REASON", + "GRAPHQL_MAX_INT", + "GRAPHQL_MIN_INT", "GraphQLAbstractType", - "GraphQLWrappingType", - "GraphQLNullableType", - "GraphQLNullableInputType", - "GraphQLNullableOutputType", - "GraphQLNamedType", - "GraphQLNamedInputType", - "GraphQLNamedOutputType", - "Thunk", - "ThunkCollection", - "ThunkMapping", "GraphQLArgument", + "GraphQLArgument", + "GraphQLArgumentKwargs", "GraphQLArgumentMap", + "GraphQLBoolean", + "GraphQLCompositeType", + "GraphQLDeferDirective", + "GraphQLDeprecatedDirective", + "GraphQLDirective", + "GraphQLDirectiveKwargs", + "GraphQLEnumType", + "GraphQLEnumTypeKwargs", "GraphQLEnumValue", + "GraphQLEnumValueKwargs", "GraphQLEnumValueMap", "GraphQLField", + "GraphQLFieldKwargs", "GraphQLFieldMap", + "GraphQLFieldResolver", + "GraphQLFloat", + "GraphQLID", + "GraphQLIncludeDirective", "GraphQLInputField", + "GraphQLInputFieldKwargs", "GraphQLInputFieldMap", "GraphQLInputFieldOutType", - "GraphQLScalarSerializer", - "GraphQLScalarValueParser", - "GraphQLScalarLiteralParser", - "GraphQLArgumentKwargs", - "GraphQLEnumTypeKwargs", - "GraphQLEnumValueKwargs", - "GraphQLFieldKwargs", - "GraphQLInputFieldKwargs", + "GraphQLInputObjectType", "GraphQLInputObjectTypeKwargs", + "GraphQLInputType", + "GraphQLInputType", + "GraphQLInt", + "GraphQLInterfaceType", "GraphQLInterfaceTypeKwargs", + "GraphQLIsTypeOfFn", + "GraphQLLeafType", + "GraphQLList", + "GraphQLNamedInputType", + "GraphQLNamedOutputType", + "GraphQLNamedType", "GraphQLNamedTypeKwargs", + "GraphQLNonNull", + "GraphQLNullableInputType", + "GraphQLNullableOutputType", + "GraphQLNullableType", + "GraphQLObjectType", "GraphQLObjectTypeKwargs", - "GraphQLScalarTypeKwargs", - "GraphQLUnionTypeKwargs", - "GraphQLFieldResolver", - "GraphQLTypeResolver", - "GraphQLIsTypeOfFn", + "GraphQLOneOfDirective", + "GraphQLOutputType", "GraphQLResolveInfo", - "ResponsePath", - "is_directive", - "assert_directive", - "is_specified_directive", - "specified_directives", - "GraphQLDirective", - "GraphQLIncludeDirective", + "GraphQLScalarLiteralParser", + "GraphQLScalarSerializer", + "GraphQLScalarType", + "GraphQLScalarTypeKwargs", + "GraphQLScalarValueParser", + "GraphQLSchema", + "GraphQLSchemaKwargs", "GraphQLSkipDirective", - "GraphQLDeferDirective", - "GraphQLStreamDirective", - "GraphQLDeprecatedDirective", "GraphQLSpecifiedByDirective", - "GraphQLOneOfDirective", - "GraphQLDirectiveKwargs", - "DEFAULT_DEPRECATION_REASON", - "is_specified_scalar_type", - "specified_scalar_types", - "GraphQLInt", - "GraphQLFloat", + "GraphQLStreamDirective", "GraphQLString", - "GraphQLBoolean", - "GraphQLID", - "GRAPHQL_MAX_INT", - "GRAPHQL_MIN_INT", - "is_introspection_type", - "introspection_types", - "TypeKind", + "GraphQLType", + "GraphQLTypeResolver", + "GraphQLUnionType", + "GraphQLUnionTypeKwargs", + "GraphQLWrappingType", + "ResponsePath", "SchemaMetaFieldDef", + "Thunk", + "ThunkCollection", + "ThunkMapping", + "TypeKind", "TypeMetaFieldDef", "TypeNameMetaFieldDef", - "validate_schema", + "assert_abstract_type", + "assert_composite_type", + "assert_directive", + "assert_enum_type", + "assert_enum_value_name", + "assert_input_object_type", + "assert_input_type", + "assert_interface_type", + "assert_leaf_type", + "assert_list_type", + "assert_name", + "assert_named_type", + "assert_non_null_type", + "assert_nullable_type", + "assert_object_type", + "assert_output_type", + "assert_scalar_type", + "assert_schema", + "assert_type", + "assert_union_type", "assert_valid_schema", + "assert_wrapping_type", + "get_named_type", + "get_nullable_type", + "introspection_types", + "is_abstract_type", + "is_composite_type", + "is_directive", + "is_enum_type", + "is_input_object_type", + "is_input_type", + "is_interface_type", + "is_introspection_type", + "is_leaf_type", + "is_list_type", + "is_named_type", + "is_non_null_type", + "is_nullable_type", + "is_object_type", + "is_output_type", + "is_required_argument", + "is_required_input_field", + "is_scalar_type", + "is_schema", + "is_specified_directive", + "is_specified_scalar_type", + "is_type", + "is_union_type", + "is_wrapping_type", + "resolve_thunk", + "specified_directives", + "specified_scalar_types", + "validate_schema", ] diff --git a/src/graphql/type/assert_name.py b/src/graphql/type/assert_name.py index b7e94e2d..1a8f7689 100644 --- a/src/graphql/type/assert_name.py +++ b/src/graphql/type/assert_name.py @@ -3,7 +3,7 @@ from ..error import GraphQLError from ..language.character_classes import is_name_continue, is_name_start -__all__ = ["assert_name", "assert_enum_value_name"] +__all__ = ["assert_enum_value_name", "assert_name"] def assert_name(name: str) -> str: diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 312a41b2..f49691e7 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -70,45 +70,6 @@ from .schema import GraphQLSchema __all__ = [ - "is_type", - "is_scalar_type", - "is_object_type", - "is_interface_type", - "is_union_type", - "is_enum_type", - "is_input_object_type", - "is_list_type", - "is_non_null_type", - "is_input_type", - "is_output_type", - "is_leaf_type", - "is_composite_type", - "is_abstract_type", - "is_wrapping_type", - "is_nullable_type", - "is_named_type", - "is_required_argument", - "is_required_input_field", - "assert_type", - "assert_scalar_type", - "assert_object_type", - "assert_interface_type", - "assert_union_type", - "assert_enum_type", - "assert_input_object_type", - "assert_list_type", - "assert_non_null_type", - "assert_input_type", - "assert_output_type", - "assert_leaf_type", - "assert_composite_type", - "assert_abstract_type", - "assert_wrapping_type", - "assert_nullable_type", - "assert_named_type", - "get_nullable_type", - "get_named_type", - "resolve_thunk", "GraphQLAbstractType", "GraphQLArgument", "GraphQLArgumentKwargs", @@ -135,23 +96,23 @@ "GraphQLIsTypeOfFn", "GraphQLLeafType", "GraphQLList", - "GraphQLNamedType", - "GraphQLNamedTypeKwargs", "GraphQLNamedInputType", "GraphQLNamedOutputType", - "GraphQLNullableType", + "GraphQLNamedType", + "GraphQLNamedTypeKwargs", + "GraphQLNonNull", "GraphQLNullableInputType", "GraphQLNullableOutputType", - "GraphQLNonNull", + "GraphQLNullableType", + "GraphQLObjectType", + "GraphQLObjectTypeKwargs", + "GraphQLOutputType", "GraphQLResolveInfo", + "GraphQLScalarLiteralParser", + "GraphQLScalarSerializer", "GraphQLScalarType", "GraphQLScalarTypeKwargs", - "GraphQLScalarSerializer", "GraphQLScalarValueParser", - "GraphQLScalarLiteralParser", - "GraphQLObjectType", - "GraphQLObjectTypeKwargs", - "GraphQLOutputType", "GraphQLType", "GraphQLTypeResolver", "GraphQLUnionType", @@ -160,6 +121,45 @@ "Thunk", "ThunkCollection", "ThunkMapping", + "assert_abstract_type", + "assert_composite_type", + "assert_enum_type", + "assert_input_object_type", + "assert_input_type", + "assert_interface_type", + "assert_leaf_type", + "assert_list_type", + "assert_named_type", + "assert_non_null_type", + "assert_nullable_type", + "assert_object_type", + "assert_output_type", + "assert_scalar_type", + "assert_type", + "assert_union_type", + "assert_wrapping_type", + "get_named_type", + "get_nullable_type", + "is_abstract_type", + "is_composite_type", + "is_enum_type", + "is_input_object_type", + "is_input_type", + "is_interface_type", + "is_leaf_type", + "is_list_type", + "is_named_type", + "is_non_null_type", + "is_nullable_type", + "is_object_type", + "is_output_type", + "is_required_argument", + "is_required_input_field", + "is_scalar_type", + "is_type", + "is_union_type", + "is_wrapping_type", + "resolve_thunk", ] diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index d4160300..5fe48b94 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -20,20 +20,20 @@ from typing_extensions import TypeGuard __all__ = [ - "is_directive", - "assert_directive", - "is_specified_directive", - "specified_directives", + "DEFAULT_DEPRECATION_REASON", + "DirectiveLocation", "GraphQLDeferDirective", + "GraphQLDeprecatedDirective", "GraphQLDirective", "GraphQLDirectiveKwargs", "GraphQLIncludeDirective", "GraphQLSkipDirective", - "GraphQLStreamDirective", - "GraphQLDeprecatedDirective", "GraphQLSpecifiedByDirective", - "DirectiveLocation", - "DEFAULT_DEPRECATION_REASON", + "GraphQLStreamDirective", + "assert_directive", + "is_directive", + "is_specified_directive", + "specified_directives", ] diff --git a/src/graphql/type/scalars.py b/src/graphql/type/scalars.py index 22669c80..1bc98c21 100644 --- a/src/graphql/type/scalars.py +++ b/src/graphql/type/scalars.py @@ -23,15 +23,15 @@ from typing_extensions import TypeGuard __all__ = [ - "is_specified_scalar_type", - "specified_scalar_types", - "GraphQLInt", - "GraphQLFloat", - "GraphQLString", - "GraphQLBoolean", - "GraphQLID", "GRAPHQL_MAX_INT", "GRAPHQL_MIN_INT", + "GraphQLBoolean", + "GraphQLFloat", + "GraphQLID", + "GraphQLInt", + "GraphQLString", + "is_specified_scalar_type", + "specified_scalar_types", ] # As per the GraphQL Spec, Integers are only treated as valid diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index 5e546298..3099991d 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -49,7 +49,7 @@ except ImportError: # Python < 3.10 from typing_extensions import TypeAlias, TypeGuard -__all__ = ["GraphQLSchema", "GraphQLSchemaKwargs", "is_schema", "assert_schema"] +__all__ = ["GraphQLSchema", "GraphQLSchemaKwargs", "assert_schema", "is_schema"] TypeMap: TypeAlias = Dict[str, GraphQLNamedType] diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index c1e806c1..109667f1 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -41,7 +41,7 @@ from .introspection import is_introspection_type from .schema import GraphQLSchema, assert_schema -__all__ = ["validate_schema", "assert_valid_schema"] +__all__ = ["assert_valid_schema", "validate_schema"] def validate_schema(schema: GraphQLSchema) -> list[GraphQLError]: diff --git a/src/graphql/utilities/__init__.py b/src/graphql/utilities/__init__.py index f528bdcc..5aadcc31 100644 --- a/src/graphql/utilities/__init__.py +++ b/src/graphql/utilities/__init__.py @@ -100,14 +100,14 @@ "find_dangerous_changes", "get_introspection_query", "get_operation_ast", + "introspection_from_schema", "is_equal_type", "is_type_sub_type_of", - "introspection_from_schema", "lexicographic_sort_schema", - "print_schema", - "print_type", "print_directive", "print_introspection_schema", + "print_schema", + "print_type", "print_value", "separate_operations", "strip_ignored_characters", diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 72283269..14adc661 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -92,8 +92,8 @@ from .value_from_ast import value_from_ast __all__ = [ - "extend_schema", "ExtendSchemaImpl", + "extend_schema", ] diff --git a/src/graphql/utilities/find_breaking_changes.py b/src/graphql/utilities/find_breaking_changes.py index c88c1265..d436f1d4 100644 --- a/src/graphql/utilities/find_breaking_changes.py +++ b/src/graphql/utilities/find_breaking_changes.py @@ -216,11 +216,8 @@ def find_type_changes( schema_changes.extend(find_union_type_changes(old_type, new_type)) elif is_input_object_type(old_type) and is_input_object_type(new_type): schema_changes.extend(find_input_object_type_changes(old_type, new_type)) - elif ( - is_object_type(old_type) - and is_object_type(new_type) - or is_interface_type(old_type) - and is_interface_type(new_type) + elif (is_object_type(old_type) and is_object_type(new_type)) or ( + is_interface_type(old_type) and is_interface_type(new_type) ): schema_changes.extend(find_field_changes(old_type, new_type)) schema_changes.extend( diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index 4babfaec..c23a1533 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -19,7 +19,6 @@ __all__ = [ - "get_introspection_query", "IntrospectionDirective", "IntrospectionEnumType", "IntrospectionField", @@ -35,6 +34,7 @@ "IntrospectionType", "IntrospectionTypeRef", "IntrospectionUnionType", + "get_introspection_query", ] diff --git a/src/graphql/utilities/print_schema.py b/src/graphql/utilities/print_schema.py index 44c876dc..dd68e54e 100644 --- a/src/graphql/utilities/print_schema.py +++ b/src/graphql/utilities/print_schema.py @@ -33,10 +33,10 @@ from .ast_from_value import ast_from_value __all__ = [ - "print_schema", - "print_type", "print_directive", "print_introspection_schema", + "print_schema", + "print_type", "print_value", ] diff --git a/src/graphql/utilities/type_comparators.py b/src/graphql/utilities/type_comparators.py index 3ab50dc5..609c19b6 100644 --- a/src/graphql/utilities/type_comparators.py +++ b/src/graphql/utilities/type_comparators.py @@ -11,7 +11,7 @@ is_object_type, ) -__all__ = ["is_equal_type", "is_type_sub_type_of", "do_types_overlap"] +__all__ = ["do_types_overlap", "is_equal_type", "is_type_sub_type_of"] def is_equal_type(type_a: GraphQLType, type_b: GraphQLType) -> bool: diff --git a/src/graphql/validation/__init__.py b/src/graphql/validation/__init__.py index 8f67f9b7..ed6ca6c8 100644 --- a/src/graphql/validation/__init__.py +++ b/src/graphql/validation/__init__.py @@ -124,14 +124,8 @@ from .rules.custom.no_schema_introspection import NoSchemaIntrospectionCustomRule __all__ = [ - "validate", "ASTValidationContext", "ASTValidationRule", - "SDLValidationContext", - "SDLValidationRule", - "ValidationContext", - "ValidationRule", - "specified_rules", "DeferStreamDirectiveLabel", "DeferStreamDirectiveOnRootField", "DeferStreamDirectiveOnValidOperationsRule", @@ -143,33 +137,39 @@ "KnownFragmentNamesRule", "KnownTypeNamesRule", "LoneAnonymousOperationRule", + "LoneSchemaDefinitionRule", + "NoDeprecatedCustomRule", "NoFragmentCyclesRule", + "NoSchemaIntrospectionCustomRule", "NoUndefinedVariablesRule", "NoUnusedFragmentsRule", "NoUnusedVariablesRule", "OverlappingFieldsCanBeMergedRule", "PossibleFragmentSpreadsRule", + "PossibleTypeExtensionsRule", "ProvidedRequiredArgumentsRule", + "SDLValidationContext", + "SDLValidationRule", "ScalarLeafsRule", "SingleFieldSubscriptionsRule", "StreamDirectiveOnListField", + "UniqueArgumentDefinitionNamesRule", "UniqueArgumentNamesRule", + "UniqueDirectiveNamesRule", "UniqueDirectivesPerLocationRule", + "UniqueEnumValueNamesRule", + "UniqueFieldDefinitionNamesRule", "UniqueFragmentNamesRule", "UniqueInputFieldNamesRule", "UniqueOperationNamesRule", + "UniqueOperationTypesRule", + "UniqueTypeNamesRule", "UniqueVariableNamesRule", + "ValidationContext", + "ValidationRule", "ValuesOfCorrectTypeRule", "VariablesAreInputTypesRule", "VariablesInAllowedPositionRule", - "LoneSchemaDefinitionRule", - "UniqueOperationTypesRule", - "UniqueTypeNamesRule", - "UniqueEnumValueNamesRule", - "UniqueFieldDefinitionNamesRule", - "UniqueArgumentDefinitionNamesRule", - "UniqueDirectiveNamesRule", - "PossibleTypeExtensionsRule", - "NoDeprecatedCustomRule", - "NoSchemaIntrospectionCustomRule", + "specified_rules", + "validate", ] diff --git a/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py b/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py index c412b89e..0159715d 100644 --- a/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py +++ b/src/graphql/validation/rules/defer_stream_directive_on_valid_operations_rule.py @@ -66,7 +66,8 @@ def enter_directive( if ( isinstance(definition_node, FragmentDefinitionNode) and definition_node.name.value in self.fragments_used_on_subscriptions - or isinstance(definition_node, OperationDefinitionNode) + ) or ( + isinstance(definition_node, OperationDefinitionNode) and definition_node.operation == OperationType.SUBSCRIPTION ): if node.name.value == GraphQLDeferDirective.name: diff --git a/src/graphql/validation/rules/known_argument_names.py b/src/graphql/validation/rules/known_argument_names.py index dadfd34a..46f9ef42 100644 --- a/src/graphql/validation/rules/known_argument_names.py +++ b/src/graphql/validation/rules/known_argument_names.py @@ -16,7 +16,7 @@ from ...type import specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext -__all__ = ["KnownArgumentNamesRule", "KnownArgumentNamesOnDirectivesRule"] +__all__ = ["KnownArgumentNamesOnDirectivesRule", "KnownArgumentNamesRule"] class KnownArgumentNamesOnDirectivesRule(ASTValidationRule): diff --git a/src/graphql/validation/rules/provided_required_arguments.py b/src/graphql/validation/rules/provided_required_arguments.py index a9313273..f94515fe 100644 --- a/src/graphql/validation/rules/provided_required_arguments.py +++ b/src/graphql/validation/rules/provided_required_arguments.py @@ -19,7 +19,7 @@ from ...type import GraphQLArgument, is_required_argument, is_type, specified_directives from . import ASTValidationRule, SDLValidationContext, ValidationContext -__all__ = ["ProvidedRequiredArgumentsRule", "ProvidedRequiredArgumentsOnDirectivesRule"] +__all__ = ["ProvidedRequiredArgumentsOnDirectivesRule", "ProvidedRequiredArgumentsRule"] class ProvidedRequiredArgumentsOnDirectivesRule(ASTValidationRule): diff --git a/src/graphql/validation/validate.py b/src/graphql/validation/validate.py index 08c83780..8e59821c 100644 --- a/src/graphql/validation/validate.py +++ b/src/graphql/validation/validate.py @@ -15,11 +15,11 @@ from .rules import ASTValidationRule __all__ = [ + "ValidationAbortedError", "assert_valid_sdl", "assert_valid_sdl_extension", "validate", "validate_sdl", - "ValidationAbortedError", ] diff --git a/src/graphql/version.py b/src/graphql/version.py index 29166e49..7b08ac67 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -5,7 +5,7 @@ import re from typing import NamedTuple -__all__ = ["version", "version_info", "version_js", "version_info_js"] +__all__ = ["version", "version_info", "version_info_js", "version_js"] version = "3.3.0a6" diff --git a/tests/execution/test_schema.py b/tests/execution/test_schema.py index a3448d89..593c1cf6 100644 --- a/tests/execution/test_schema.py +++ b/tests/execution/test_schema.py @@ -78,7 +78,7 @@ def __init__(self, id: int): # noqa: A002 "article": GraphQLField( BlogArticle, args={"id": GraphQLArgument(GraphQLID)}, - resolve=lambda _obj, _info, id: Article(id), # noqa: A002 + resolve=lambda _obj, _info, id: Article(id), ), "feed": GraphQLField( GraphQLList(BlogArticle), diff --git a/tests/fixtures/__init__.py b/tests/fixtures/__init__.py index 3df1c2f0..5e4058f9 100644 --- a/tests/fixtures/__init__.py +++ b/tests/fixtures/__init__.py @@ -7,11 +7,11 @@ import pytest __all__ = [ + "big_schema_introspection_result", + "big_schema_sdl", "cleanup", "kitchen_sink_query", "kitchen_sink_sdl", - "big_schema_sdl", - "big_schema_introspection_result", ] diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py index 6ae4a6e5..ea374993 100644 --- a/tests/utils/__init__.py +++ b/tests/utils/__init__.py @@ -8,8 +8,8 @@ from .viral_sdl import viral_sdl __all__ = [ - "assert_matching_values", "assert_equal_awaitables_or_values", + "assert_matching_values", "dedent", "gen_fuzz_strings", "viral_schema", diff --git a/tests/validation/harness.py b/tests/validation/harness.py index 9a6912f4..737fb2df 100644 --- a/tests/validation/harness.py +++ b/tests/validation/harness.py @@ -12,9 +12,9 @@ from graphql.validation import ASTValidationRule __all__ = [ - "test_schema", - "assert_validation_errors", "assert_sdl_validation_errors", + "assert_validation_errors", + "test_schema", ] test_schema = build_schema( From 233df173133a0044e8e88ac93c556a3aeabb430d Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 18 Jan 2025 19:39:10 +0100 Subject: [PATCH 24/50] Introduces new incremental response format Replicates graphql/graphql-js@00e2b50fc453b5a4c00e65ab5c902963cca26d3f --- docs/conf.py | 17 +- docs/modules/pyutils.rst | 4 + pyproject.toml | 2 + src/graphql/execution/async_iterables.py | 7 +- src/graphql/execution/collect_fields.py | 363 ++++++--- src/graphql/execution/execute.py | 615 ++++++++++----- .../execution/incremental_publisher.py | 644 +++++++++++----- src/graphql/pyutils/__init__.py | 4 + src/graphql/pyutils/ref_map.py | 79 ++ src/graphql/pyutils/ref_set.py | 67 ++ .../rules/single_field_subscriptions.py | 26 +- tests/execution/test_customize.py | 10 +- tests/execution/test_defer.py | 702 ++++++++++++------ tests/execution/test_lists.py | 1 + tests/execution/test_mutations.py | 4 +- tests/execution/test_stream.py | 545 +++++++------- tests/pyutils/test_ref_map.py | 124 ++++ tests/pyutils/test_ref_set.py | 89 +++ 18 files changed, 2290 insertions(+), 1013 deletions(-) create mode 100644 src/graphql/pyutils/ref_map.py create mode 100644 src/graphql/pyutils/ref_set.py create mode 100644 tests/pyutils/test_ref_map.py create mode 100644 tests/pyutils/test_ref_set.py diff --git a/docs/conf.py b/docs/conf.py index 4655434b..d3de91ea 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -147,6 +147,8 @@ types.TracebackType TypeMap AwaitableOrValue +DeferredFragmentRecord +DeferUsage EnterLeaveVisitor ExperimentalIncrementalExecutionResults FieldGroup @@ -165,18 +167,31 @@ IncrementalResult InitialResultRecord Middleware +StreamItemsRecord +StreamRecord SubsequentDataRecord asyncio.events.AbstractEventLoop -graphql.execution.collect_fields.FieldsAndPatches +collections.abc.MutableMapping +collections.abc.MutableSet +graphql.execution.collect_fields.DeferUsage +graphql.execution.collect_fields.CollectFieldsResult +graphql.execution.collect_fields.FieldGroup graphql.execution.execute.StreamArguments +graphql.execution.execute.StreamUsage graphql.execution.map_async_iterable.map_async_iterable +graphql.execution.incremental_publisher.CompletedResult graphql.execution.incremental_publisher.DeferredFragmentRecord +graphql.execution.incremental_publisher.DeferredGroupedFieldSetRecord +graphql.execution.incremental_publisher.FormattedCompletedResult graphql.execution.incremental_publisher.IncrementalPublisher graphql.execution.incremental_publisher.InitialResultRecord graphql.execution.incremental_publisher.StreamItemsRecord +graphql.execution.incremental_publisher.StreamRecord graphql.execution.Middleware graphql.language.lexer.EscapeSequence graphql.language.visitor.EnterLeaveVisitor +graphql.pyutils.ref_map.K +graphql.pyutils.ref_map.V graphql.type.definition.GT_co graphql.type.definition.GNT_co graphql.type.definition.TContext diff --git a/docs/modules/pyutils.rst b/docs/modules/pyutils.rst index cd178d65..e33b5d1f 100644 --- a/docs/modules/pyutils.rst +++ b/docs/modules/pyutils.rst @@ -30,3 +30,7 @@ PyUtils .. autoclass:: SimplePubSub .. autoclass:: SimplePubSubIterator .. autodata:: Undefined +.. autoclass:: RefMap + :no-inherited-members: +.. autoclass:: RefSet + :no-inherited-members: diff --git a/pyproject.toml b/pyproject.toml index 7cdedaa9..4d366945 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -320,6 +320,8 @@ timeout = "100" filterwarnings = "ignore::pytest.PytestConfigWarning" # All tests can be found in the tests directory. testpaths = ["tests"] +# Use the functions scope as the default for asynchronous tests. +asyncio_default_fixture_loop_scope = "function" [build-system] requires = ["poetry_core>=1.6.1,<2"] diff --git a/src/graphql/execution/async_iterables.py b/src/graphql/execution/async_iterables.py index 747a515d..b8faad88 100644 --- a/src/graphql/execution/async_iterables.py +++ b/src/graphql/execution/async_iterables.py @@ -2,7 +2,7 @@ from __future__ import annotations -from contextlib import AbstractAsyncContextManager +from contextlib import AbstractAsyncContextManager, suppress from typing import ( AsyncGenerator, AsyncIterable, @@ -20,6 +20,8 @@ AsyncIterableOrGenerator = Union[AsyncGenerator[T, None], AsyncIterable[T]] +suppress_exceptions = suppress(Exception) + class aclosing(AbstractAsyncContextManager, Generic[T]): # noqa: N801 """Async context manager for safely finalizing an async iterator or generator. @@ -40,7 +42,8 @@ async def __aexit__(self, *_exc_info: object) -> None: except AttributeError: pass # do not complain if the iterator has no aclose() method else: - await aclose() + with suppress_exceptions: # or if the aclose() method fails + await aclose() async def map_async_iterable( diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 4f581252..613a55c2 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -3,8 +3,7 @@ from __future__ import annotations import sys -from collections import defaultdict -from typing import Any, Dict, List, NamedTuple +from typing import Any, Dict, NamedTuple, Union, cast from ..language import ( FieldNode, @@ -15,6 +14,7 @@ OperationType, SelectionSetNode, ) +from ..pyutils import RefMap, RefSet from ..type import ( GraphQLDeferDirective, GraphQLIncludeDirective, @@ -33,33 +33,88 @@ __all__ = [ + "NON_DEFERRED_TARGET_SET", + "CollectFieldsContext", + "CollectFieldsResult", + "DeferUsage", + "DeferUsageSet", + "FieldDetails", "FieldGroup", - "FieldsAndPatches", - "GroupedFieldSet", + "GroupedFieldSetDetails", + "Target", + "TargetSet", "collect_fields", "collect_subfields", ] + +class DeferUsage(NamedTuple): + """An optionally labelled list of ancestor targets.""" + + label: str | None + ancestors: list[Target] + + +Target: TypeAlias = Union[DeferUsage, None] + +TargetSet: TypeAlias = RefSet[Target] +DeferUsageSet: TypeAlias = RefSet[DeferUsage] + + +NON_DEFERRED_TARGET_SET: TargetSet = RefSet([None]) + + +class FieldDetails(NamedTuple): + """A field node and its target.""" + + node: FieldNode + target: Target + + +class FieldGroup(NamedTuple): + """A group of fields that share the same target set.""" + + fields: list[FieldDetails] + targets: TargetSet + + def to_nodes(self) -> list[FieldNode]: + """Return the field nodes in this group.""" + return [field_details.node for field_details in self.fields] + + if sys.version_info < (3, 9): - FieldGroup: TypeAlias = List[FieldNode] - GroupedFieldSet = Dict[str, FieldGroup] + GroupedFieldSet: TypeAlias = Dict[str, FieldGroup] else: # Python >= 3.9 - FieldGroup: TypeAlias = list[FieldNode] - GroupedFieldSet = dict[str, FieldGroup] + GroupedFieldSet: TypeAlias = dict[str, FieldGroup] -class PatchFields(NamedTuple): - """Optionally labelled set of fields to be used as a patch.""" +class GroupedFieldSetDetails(NamedTuple): + """A grouped field set with defer info.""" - label: str | None grouped_field_set: GroupedFieldSet + should_initiate_defer: bool -class FieldsAndPatches(NamedTuple): - """Tuple of collected fields and patches to be applied.""" +class CollectFieldsResult(NamedTuple): + """Collected fields and deferred usages.""" grouped_field_set: GroupedFieldSet - patches: list[PatchFields] + new_grouped_field_set_details: RefMap[DeferUsageSet, GroupedFieldSetDetails] + new_defer_usages: list[DeferUsage] + + +class CollectFieldsContext(NamedTuple): + """Context for collecting fields.""" + + schema: GraphQLSchema + fragments: dict[str, FragmentDefinitionNode] + variable_values: dict[str, Any] + operation: OperationDefinitionNode + runtime_type: GraphQLObjectType + targets_by_key: dict[str, TargetSet] + fields_by_target: RefMap[Target, dict[str, list[FieldNode]]] + new_defer_usages: list[DeferUsage] + visited_fragment_names: set[str] def collect_fields( @@ -68,7 +123,7 @@ def collect_fields( variable_values: dict[str, Any], runtime_type: GraphQLObjectType, operation: OperationDefinitionNode, -) -> FieldsAndPatches: +) -> CollectFieldsResult: """Collect fields. Given a selection_set, collects all the fields and returns them. @@ -79,20 +134,23 @@ def collect_fields( For internal use only. """ - grouped_field_set: dict[str, list[FieldNode]] = defaultdict(list) - patches: list[PatchFields] = [] - collect_fields_impl( + context = CollectFieldsContext( schema, fragments, variable_values, operation, runtime_type, - operation.selection_set, - grouped_field_set, - patches, + {}, + RefMap(), + [], set(), ) - return FieldsAndPatches(grouped_field_set, patches) + collect_fields_impl(context, operation.selection_set) + + return CollectFieldsResult( + *build_grouped_field_sets(context.targets_by_key, context.fields_by_target), + context.new_defer_usages, + ) def collect_subfields( @@ -102,7 +160,7 @@ def collect_subfields( operation: OperationDefinitionNode, return_type: GraphQLObjectType, field_group: FieldGroup, -) -> FieldsAndPatches: +) -> CollectFieldsResult: """Collect subfields. Given a list of field nodes, collects all the subfields of the passed in fields, @@ -114,47 +172,73 @@ def collect_subfields( For internal use only. """ - sub_grouped_field_set: dict[str, list[FieldNode]] = defaultdict(list) - visited_fragment_names: set[str] = set() - - sub_patches: list[PatchFields] = [] - sub_fields_and_patches = FieldsAndPatches(sub_grouped_field_set, sub_patches) + context = CollectFieldsContext( + schema, + fragments, + variable_values, + operation, + return_type, + {}, + RefMap(), + [], + set(), + ) - for node in field_group: + for field_details in field_group.fields: + node = field_details.node if node.selection_set: - collect_fields_impl( - schema, - fragments, - variable_values, - operation, - return_type, - node.selection_set, - sub_grouped_field_set, - sub_patches, - visited_fragment_names, - ) - return sub_fields_and_patches + collect_fields_impl(context, node.selection_set, field_details.target) + + return CollectFieldsResult( + *build_grouped_field_sets( + context.targets_by_key, context.fields_by_target, field_group.targets + ), + context.new_defer_usages, + ) def collect_fields_impl( - schema: GraphQLSchema, - fragments: dict[str, FragmentDefinitionNode], - variable_values: dict[str, Any], - operation: OperationDefinitionNode, - runtime_type: GraphQLObjectType, + context: CollectFieldsContext, selection_set: SelectionSetNode, - grouped_field_set: dict[str, list[FieldNode]], - patches: list[PatchFields], - visited_fragment_names: set[str], + parent_target: Target | None = None, + new_target: Target | None = None, ) -> None: """Collect fields (internal implementation).""" - patch_fields: dict[str, list[FieldNode]] + ( + schema, + fragments, + variable_values, + operation, + runtime_type, + targets_by_key, + fields_by_target, + new_defer_usages, + visited_fragment_names, + ) = context + + ancestors: list[Target] for selection in selection_set.selections: if isinstance(selection, FieldNode): if not should_include_node(variable_values, selection): continue - grouped_field_set[get_field_entry_key(selection)].append(selection) + key = get_field_entry_key(selection) + target = new_target or parent_target + key_targets = targets_by_key.get(key) + if key_targets is None: + key_targets = RefSet([target]) + targets_by_key[key] = key_targets + else: + key_targets.add(target) + target_fields = fields_by_target.get(target) + if target_fields is None: + fields_by_target[target] = {key: [selection]} + else: + field_nodes = target_fields.get(key) + if field_nodes is None: + target_fields[key] = [selection] + else: + field_nodes.append(selection) elif isinstance(selection, InlineFragmentNode): if not should_include_node( variable_values, selection @@ -162,32 +246,19 @@ def collect_fields_impl( continue defer = get_defer_values(operation, variable_values, selection) + if defer: - patch_fields = defaultdict(list) - collect_fields_impl( - schema, - fragments, - variable_values, - operation, - runtime_type, - selection.selection_set, - patch_fields, - patches, - visited_fragment_names, + ancestors = ( + [None] + if parent_target is None + else [parent_target, *parent_target.ancestors] ) - patches.append(PatchFields(defer.label, patch_fields)) + target = DeferUsage(defer.label, ancestors) + new_defer_usages.append(target) else: - collect_fields_impl( - schema, - fragments, - variable_values, - operation, - runtime_type, - selection.selection_set, - grouped_field_set, - patches, - visited_fragment_names, - ) + target = new_target + + collect_fields_impl(context, selection.selection_set, parent_target, target) elif isinstance(selection, FragmentSpreadNode): # pragma: no cover else frag_name = selection.name.value @@ -204,35 +275,19 @@ def collect_fields_impl( ): continue - if not defer: - visited_fragment_names.add(frag_name) - if defer: - patch_fields = defaultdict(list) - collect_fields_impl( - schema, - fragments, - variable_values, - operation, - runtime_type, - fragment.selection_set, - patch_fields, - patches, - visited_fragment_names, + ancestors = ( + [None] + if parent_target is None + else [parent_target, *parent_target.ancestors] ) - patches.append(PatchFields(defer.label, patch_fields)) + target = DeferUsage(defer.label, ancestors) + new_defer_usages.append(target) else: - collect_fields_impl( - schema, - fragments, - variable_values, - operation, - runtime_type, - fragment.selection_set, - grouped_field_set, - patches, - visited_fragment_names, - ) + visited_fragment_names.add(frag_name) + target = new_target + + collect_fields_impl(context, fragment.selection_set, parent_target, target) class DeferValues(NamedTuple): @@ -305,3 +360,111 @@ def does_fragment_condition_match( def get_field_entry_key(node: FieldNode) -> str: """Implement the logic to compute the key of a given field's entry""" return node.alias.value if node.alias else node.name.value + + +def build_grouped_field_sets( + targets_by_key: dict[str, TargetSet], + fields_by_target: RefMap[Target, dict[str, list[FieldNode]]], + parent_targets: TargetSet = NON_DEFERRED_TARGET_SET, +) -> tuple[GroupedFieldSet, RefMap[DeferUsageSet, GroupedFieldSetDetails]]: + """Build grouped field sets.""" + parent_target_keys, target_set_details_map = get_target_set_details( + targets_by_key, parent_targets + ) + + grouped_field_set = ( + get_ordered_grouped_field_set( + parent_target_keys, parent_targets, targets_by_key, fields_by_target + ) + if parent_target_keys + else {} + ) + + new_grouped_field_set_details: RefMap[DeferUsageSet, GroupedFieldSetDetails] = ( + RefMap() + ) + + for masking_targets, target_set_details in target_set_details_map.items(): + keys, should_initiate_defer = target_set_details + + new_grouped_field_set = get_ordered_grouped_field_set( + keys, masking_targets, targets_by_key, fields_by_target + ) + + # All TargetSets that causes new grouped field sets consist only of DeferUsages + # and have should_initiate_defer defined + + new_grouped_field_set_details[cast(DeferUsageSet, masking_targets)] = ( + GroupedFieldSetDetails(new_grouped_field_set, should_initiate_defer) + ) + + return grouped_field_set, new_grouped_field_set_details + + +class TargetSetDetails(NamedTuple): + """A set of target keys with defer info.""" + + keys: set[str] + should_initiate_defer: bool + + +def get_target_set_details( + targets_by_key: dict[str, TargetSet], parent_targets: TargetSet +) -> tuple[set[str], RefMap[TargetSet, TargetSetDetails]]: + """Get target set details.""" + parent_target_keys: set[str] = set() + target_set_details_map: RefMap[TargetSet, TargetSetDetails] = RefMap() + + for response_key, targets in targets_by_key.items(): + masking_target_list: list[Target] = [] + for target in targets: + if not target or all( + ancestor not in targets for ancestor in target.ancestors + ): + masking_target_list.append(target) + + masking_targets: TargetSet = RefSet(masking_target_list) + if masking_targets == parent_targets: + parent_target_keys.add(response_key) + continue + + for target_set, target_set_details in target_set_details_map.items(): + if target_set == masking_targets: + target_set_details.keys.add(response_key) + break + else: + target_set_details = TargetSetDetails( + {response_key}, + any( + defer_usage not in parent_targets for defer_usage in masking_targets + ), + ) + target_set_details_map[masking_targets] = target_set_details + + return parent_target_keys, target_set_details_map + + +def get_ordered_grouped_field_set( + keys: set[str], + masking_targets: TargetSet, + targets_by_key: dict[str, TargetSet], + fields_by_target: RefMap[Target, dict[str, list[FieldNode]]], +) -> GroupedFieldSet: + """Get ordered grouped field set.""" + grouped_field_set: GroupedFieldSet = {} + + first_target = next(iter(masking_targets)) + first_fields = fields_by_target[first_target] + for key in list(first_fields): + if key in keys: + field_group = grouped_field_set.get(key) + if field_group is None: # pragma: no cover else + field_group = FieldGroup([], masking_targets) + grouped_field_set[key] = field_group + for target in targets_by_key[key]: + fields_for_target = fields_by_target[target] + nodes = fields_for_target[key] + del fields_for_target[key] + field_group.fields.extend(FieldDetails(node, target) for node in nodes) + + return grouped_field_set diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 30a6234d..ac041392 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -3,7 +3,6 @@ from __future__ import annotations from asyncio import ensure_future, gather, shield, wait_for -from collections.abc import Mapping from contextlib import suppress from typing import ( Any, @@ -14,19 +13,20 @@ Callable, Iterable, List, + Mapping, NamedTuple, Optional, + Sequence, Tuple, Union, cast, ) try: - from typing import TypeAlias, TypeGuard + from typing import TypeAlias, TypeGuard # noqa: F401 except ImportError: # Python < 3.10 - from typing_extensions import TypeAlias, TypeGuard + from typing_extensions import TypeAlias try: # only needed for Python < 3.11 - # noinspection PyCompatibility from asyncio.exceptions import TimeoutError # noqa: A004 except ImportError: # Python < 3.7 from concurrent.futures import TimeoutError # noqa: A004 @@ -41,6 +41,7 @@ from ..pyutils import ( AwaitableOrValue, Path, + RefMap, Undefined, async_reduce, inspect, @@ -68,27 +69,34 @@ ) from .async_iterables import map_async_iterable from .collect_fields import ( + NON_DEFERRED_TARGET_SET, + CollectFieldsResult, + DeferUsage, + DeferUsageSet, + FieldDetails, FieldGroup, - FieldsAndPatches, GroupedFieldSet, + GroupedFieldSetDetails, collect_fields, collect_subfields, ) from .incremental_publisher import ( ASYNC_DELAY, + DeferredFragmentRecord, + DeferredGroupedFieldSetRecord, ExecutionResult, ExperimentalIncrementalExecutionResults, IncrementalDataRecord, IncrementalPublisher, InitialResultRecord, StreamItemsRecord, - SubsequentDataRecord, + StreamRecord, ) from .middleware import MiddlewareManager from .values import get_argument_values, get_directive_values, get_variable_values try: # pragma: no cover - anext # noqa: B018 + anext # noqa: B018 # pyright: ignore except NameError: # pragma: no cover (Python < 3.10) # noinspection PyShadowingBuiltins async def anext(iterator: AsyncIterator) -> Any: @@ -135,11 +143,12 @@ async def anext(iterator: AsyncIterator) -> Any: Middleware: TypeAlias = Optional[Union[Tuple, List, MiddlewareManager]] -class StreamArguments(NamedTuple): - """Arguments of the stream directive""" +class StreamUsage(NamedTuple): + """Stream directive usage information""" - initial_count: int label: str | None + initial_count: int + field_group: FieldGroup class ExecutionContext: @@ -161,9 +170,7 @@ class ExecutionContext: incremental_publisher: IncrementalPublisher middleware_manager: MiddlewareManager | None - is_awaitable: Callable[[Any], TypeGuard[Awaitable]] = staticmethod( - default_is_awaitable - ) + is_awaitable: Callable[[Any], bool] = staticmethod(default_is_awaitable) def __init__( self, @@ -194,8 +201,9 @@ def __init__( if is_awaitable: self.is_awaitable = is_awaitable self._canceled_iterators: set[AsyncIterator] = set() - self._subfields_cache: dict[tuple, FieldsAndPatches] = {} + self._subfields_cache: dict[tuple, CollectFieldsResult] = {} self._tasks: set[Awaitable] = set() + self._stream_usages: RefMap[FieldGroup, StreamUsage] = RefMap() @classmethod def build( @@ -310,8 +318,8 @@ def execute_operation( Implements the "Executing operations" section of the spec. """ - schema = self.schema operation = self.operation + schema = self.schema root_type = schema.get_root_type(operation.operation) if root_type is None: msg = ( @@ -320,12 +328,24 @@ def execute_operation( ) raise GraphQLError(msg, operation) - grouped_field_set, patches = collect_fields( - schema, - self.fragments, - self.variable_values, - root_type, - operation, + grouped_field_set, new_grouped_field_set_details, new_defer_usages = ( + collect_fields( + schema, self.fragments, self.variable_values, root_type, operation + ) + ) + + incremental_publisher = self.incremental_publisher + new_defer_map = add_new_deferred_fragments( + incremental_publisher, new_defer_usages, initial_result_record + ) + + path: Path | None = None + + new_deferred_grouped_field_set_records = add_new_deferred_grouped_field_sets( + incremental_publisher, + new_grouped_field_set_details, + new_defer_map, + path, ) root_value = self.root_value @@ -334,18 +354,22 @@ def execute_operation( self.execute_fields_serially if operation.operation == OperationType.MUTATION else self.execute_fields - )(root_type, root_value, None, grouped_field_set, initial_result_record) - - for patch in patches: - label, patch_grouped_filed_set = patch - self.execute_deferred_fragment( - root_type, - root_value, - patch_grouped_filed_set, - initial_result_record, - label, - None, - ) + )( + root_type, + root_value, + path, + grouped_field_set, + initial_result_record, + new_defer_map, + ) + + self.execute_deferred_grouped_field_sets( + root_type, + root_value, + path, + new_deferred_grouped_field_set_records, + new_defer_map, + ) return result @@ -356,6 +380,7 @@ def execute_fields_serially( path: Path | None, grouped_field_set: GroupedFieldSet, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields serially. @@ -375,6 +400,7 @@ def reducer( field_group, field_path, incremental_data_record, + defer_map, ) if result is Undefined: return results @@ -401,6 +427,7 @@ def execute_fields( path: Path | None, grouped_field_set: GroupedFieldSet, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[dict[str, Any]]: """Execute the given fields concurrently. @@ -419,6 +446,7 @@ def execute_fields( field_group, field_path, incremental_data_record, + defer_map, ) if result is not Undefined: results[response_name] = result @@ -456,6 +484,7 @@ def execute_field( field_group: FieldGroup, path: Path, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[Any]: """Resolve the field on the given source object. @@ -465,7 +494,7 @@ def execute_field( calling its resolve function, then calls complete_value to await coroutine objects, serialize scalars, or execute the sub-selection-set for objects. """ - field_name = field_group[0].name.value + field_name = field_group.fields[0].node.name.value field_def = self.schema.get_field(parent_type, field_name) if not field_def: return Undefined @@ -483,7 +512,9 @@ def execute_field( try: # Build a dictionary of arguments from the field.arguments AST, using the # variables scope to fulfill any variable references. - args = get_argument_values(field_def, field_group[0], self.variable_values) + args = get_argument_values( + field_def, field_group.fields[0].node, self.variable_values + ) # Note that contrary to the JavaScript implementation, we pass the context # value as part of the resolve info. @@ -497,10 +528,17 @@ def execute_field( path, result, incremental_data_record, + defer_map, ) completed = self.complete_value( - return_type, field_group, info, path, result, incremental_data_record + return_type, + field_group, + info, + path, + result, + incremental_data_record, + defer_map, ) if self.is_awaitable(completed): # noinspection PyShadowingNames @@ -547,8 +585,8 @@ def build_resolve_info( # The resolve function's first argument is a collection of information about # the current execution state. return GraphQLResolveInfo( - field_group[0].name.value, - field_group, + field_group.fields[0].node.name.value, + field_group.to_nodes(), field_def.type, parent_type, path, @@ -570,7 +608,7 @@ def handle_field_error( incremental_data_record: IncrementalDataRecord, ) -> None: """Handle error properly according to the field type.""" - error = located_error(raw_error, field_group, path.as_list()) + error = located_error(raw_error, field_group.to_nodes(), path.as_list()) # If the field type is non-nullable, then it is resolved without any protection # from errors, however it still properly locates the error. @@ -589,6 +627,7 @@ def complete_value( path: Path, result: Any, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[Any]: """Complete a value. @@ -626,6 +665,7 @@ def complete_value( path, result, incremental_data_record, + defer_map, ) if completed is None: msg = ( @@ -642,7 +682,13 @@ def complete_value( # If field type is List, complete each item in the list with inner type if is_list_type(return_type): return self.complete_list_value( - return_type, field_group, info, path, result, incremental_data_record + return_type, + field_group, + info, + path, + result, + incremental_data_record, + defer_map, ) # If field type is a leaf type, Scalar or Enum, serialize to a valid value, @@ -654,13 +700,25 @@ def complete_value( # Object type and complete for that type. if is_abstract_type(return_type): return self.complete_abstract_value( - return_type, field_group, info, path, result, incremental_data_record + return_type, + field_group, + info, + path, + result, + incremental_data_record, + defer_map, ) # If field type is Object, execute and complete all sub-selections. if is_object_type(return_type): return self.complete_object_value( - return_type, field_group, info, path, result, incremental_data_record + return_type, + field_group, + info, + path, + result, + incremental_data_record, + defer_map, ) # Not reachable. All possible output types have been considered. @@ -678,6 +736,7 @@ async def complete_awaitable_value( path: Path, result: Any, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> Any: """Complete an awaitable value.""" try: @@ -689,6 +748,7 @@ async def complete_awaitable_value( path, resolved, incremental_data_record, + defer_map, ) if self.is_awaitable(completed): completed = await completed @@ -700,12 +760,12 @@ async def complete_awaitable_value( completed = None return completed - def get_stream_values( + def get_stream_usage( self, field_group: FieldGroup, path: Path - ) -> StreamArguments | None: - """Get stream values. + ) -> StreamUsage | None: + """Get stream usage. - Returns an object containing the `@stream` arguments if a field should be + Returns an object containing info for streaming if a field should be streamed based on the experimental flag, stream directive present and not disabled by the "if" argument. """ @@ -713,10 +773,14 @@ def get_stream_values( if isinstance(path.key, int): return None + stream_usage = self._stream_usages.get(field_group) + if stream_usage is not None: + return stream_usage # pragma: no cover + # validation only allows equivalent streams on multiple fields, so it is # safe to only check the first field_node for the stream directive stream = get_directive_values( - GraphQLStreamDirective, field_group[0], self.variable_values + GraphQLStreamDirective, field_group.fields[0].node, self.variable_values ) if not stream or stream.get("if") is False: @@ -734,8 +798,21 @@ def get_stream_values( ) raise TypeError(msg) - label = stream.get("label") - return StreamArguments(initial_count=initial_count, label=label) + streamed_field_group = FieldGroup( + [ + FieldDetails(field_details.node, None) + for field_details in field_group.fields + ], + NON_DEFERRED_TARGET_SET, + ) + + stream_usage = StreamUsage( + stream.get("label"), stream["initialCount"], streamed_field_group + ) + + self._stream_usages[field_group] = stream_usage + + return stream_usage async def complete_async_iterator_value( self, @@ -745,36 +822,39 @@ async def complete_async_iterator_value( path: Path, async_iterator: AsyncIterator[Any], incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> list[Any]: """Complete an async iterator. Complete an async iterator value by completing the result and calling recursively until all the results are completed. """ - stream = self.get_stream_values(field_group, path) + stream_usage = self.get_stream_usage(field_group, path) complete_list_item_value = self.complete_list_item_value awaitable_indices: list[int] = [] append_awaitable = awaitable_indices.append completed_results: list[Any] = [] index = 0 while True: - if ( - stream - and isinstance(stream.initial_count, int) - and index >= stream.initial_count - ): + if stream_usage and index >= stream_usage.initial_count: + try: + early_return = async_iterator.aclose # type: ignore + except AttributeError: + early_return = None + stream_record = StreamRecord(path, stream_usage.label, early_return) + with suppress_timeout_error: await wait_for( shield( self.execute_stream_async_iterator( index, async_iterator, - field_group, + stream_usage.field_group, info, item_type, path, incremental_data_record, - stream.label, + stream_record, ) ), timeout=ASYNC_DELAY, @@ -789,7 +869,7 @@ async def complete_async_iterator_value( break except Exception as raw_error: raise located_error( - raw_error, field_group, path.as_list() + raw_error, field_group.to_nodes(), path.as_list() ) from raw_error if complete_list_item_value( value, @@ -799,6 +879,7 @@ async def complete_async_iterator_value( info, item_path, incremental_data_record, + defer_map, ): append_awaitable(index) @@ -829,6 +910,7 @@ def complete_list_value( path: Path, result: AsyncIterable[Any] | Iterable[Any], incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[list[Any]]: """Complete a list value. @@ -846,6 +928,7 @@ def complete_list_value( path, async_iterator, incremental_data_record, + defer_map, ) if not is_iterable(result): @@ -855,35 +938,34 @@ def complete_list_value( ) raise GraphQLError(msg) - stream = self.get_stream_values(field_group, path) + stream_usage = self.get_stream_usage(field_group, path) # This is specified as a simple map, however we're optimizing the path where # the list contains no coroutine objects by avoiding creating another coroutine # object. complete_list_item_value = self.complete_list_item_value + current_parents = incremental_data_record awaitable_indices: list[int] = [] append_awaitable = awaitable_indices.append - previous_incremental_data_record = incremental_data_record completed_results: list[Any] = [] + stream_record: StreamRecord | None = None for index, item in enumerate(result): # No need to modify the info object containing the path, since from here on # it is not ever accessed by resolver functions. item_path = path.add_key(index, None) - if ( - stream - and isinstance(stream.initial_count, int) - and index >= stream.initial_count - ): - previous_incremental_data_record = self.execute_stream_field( + if stream_usage and index >= stream_usage.initial_count: + if stream_record is None: + stream_record = StreamRecord(path, stream_usage.label) + current_parents = self.execute_stream_field( path, item_path, item, - field_group, + stream_usage.field_group, info, item_type, - previous_incremental_data_record, - stream.label, + current_parents, + stream_record, ) continue @@ -895,9 +977,15 @@ def complete_list_value( info, item_path, incremental_data_record, + defer_map, ): append_awaitable(index) + if stream_record is not None: + self.incremental_publisher.set_is_final_record( + cast(StreamItemsRecord, current_parents) + ) + if not awaitable_indices: return completed_results @@ -928,6 +1016,7 @@ def complete_list_item_value( info: GraphQLResolveInfo, item_path: Path, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> bool: """Complete a list item value by adding it to the completed results. @@ -944,6 +1033,7 @@ def complete_list_item_value( item_path, item, incremental_data_record, + defer_map, ) ) return True @@ -956,6 +1046,7 @@ def complete_list_item_value( item_path, item, incremental_data_record, + defer_map, ) if is_awaitable(completed_item): @@ -1019,6 +1110,7 @@ def complete_abstract_value( path: Path, result: Any, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[Any]: """Complete an abstract value. @@ -1045,6 +1137,7 @@ async def await_complete_object_value() -> Any: path, result, incremental_data_record, + defer_map, ) if self.is_awaitable(value): return await value # type: ignore @@ -1062,6 +1155,7 @@ async def await_complete_object_value() -> Any: path, result, incremental_data_record, + defer_map, ) def ensure_valid_runtime_type( @@ -1082,7 +1176,7 @@ def ensure_valid_runtime_type( " a 'resolve_type' function or each possible type should provide" " an 'is_type_of' function." ) - raise GraphQLError(msg, field_group) + raise GraphQLError(msg, field_group.to_nodes()) if is_object_type(runtime_type_name): # pragma: no cover msg = ( @@ -1098,7 +1192,7 @@ def ensure_valid_runtime_type( f" for field '{info.parent_type.name}.{info.field_name}' with value" f" {inspect(result)}, received '{inspect(runtime_type_name)}'." ) - raise GraphQLError(msg, field_group) + raise GraphQLError(msg, field_group.to_nodes()) runtime_type = self.schema.get_type(runtime_type_name) @@ -1107,21 +1201,21 @@ def ensure_valid_runtime_type( f"Abstract type '{return_type.name}' was resolved to a type" f" '{runtime_type_name}' that does not exist inside the schema." ) - raise GraphQLError(msg, field_group) + raise GraphQLError(msg, field_group.to_nodes()) if not is_object_type(runtime_type): msg = ( f"Abstract type '{return_type.name}' was resolved" f" to a non-object type '{runtime_type_name}'." ) - raise GraphQLError(msg, field_group) + raise GraphQLError(msg, field_group.to_nodes()) if not self.schema.is_sub_type(return_type, runtime_type): msg = ( f"Runtime Object type '{runtime_type.name}' is not a possible" f" type for '{return_type.name}'." ) - raise GraphQLError(msg, field_group) + raise GraphQLError(msg, field_group.to_nodes()) # noinspection PyTypeChecker return runtime_type @@ -1134,6 +1228,7 @@ def complete_object_value( path: Path, result: Any, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[dict[str, Any]]: """Complete an Object value by executing all sub-selections.""" # If there is an `is_type_of()` predicate function, call it with the current @@ -1150,7 +1245,12 @@ async def execute_subfields_async() -> dict[str, Any]: return_type, result, field_group ) return self.collect_and_execute_subfields( - return_type, field_group, path, result, incremental_data_record + return_type, + field_group, + path, + result, + incremental_data_record, + defer_map, ) # type: ignore return execute_subfields_async() @@ -1159,7 +1259,7 @@ async def execute_subfields_async() -> dict[str, Any]: raise invalid_return_type_error(return_type, result, field_group) return self.collect_and_execute_subfields( - return_type, field_group, path, result, incremental_data_record + return_type, field_group, path, result, incremental_data_record, defer_map ) def collect_and_execute_subfields( @@ -1169,32 +1269,47 @@ def collect_and_execute_subfields( path: Path, result: Any, incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> AwaitableOrValue[dict[str, Any]]: """Collect sub-fields to execute to complete this value.""" - sub_grouped_field_set, sub_patches = self.collect_subfields( - return_type, field_group + grouped_field_set, new_grouped_field_set_details, new_defer_usages = ( + self.collect_subfields(return_type, field_group) + ) + + incremental_publisher = self.incremental_publisher + new_defer_map = add_new_deferred_fragments( + incremental_publisher, + new_defer_usages, + incremental_data_record, + defer_map, + path, + ) + new_deferred_grouped_field_set_records = add_new_deferred_grouped_field_sets( + incremental_publisher, new_grouped_field_set_details, new_defer_map, path ) sub_fields = self.execute_fields( - return_type, result, path, sub_grouped_field_set, incremental_data_record + return_type, + result, + path, + grouped_field_set, + incremental_data_record, + new_defer_map, ) - for sub_patch in sub_patches: - label, sub_patch_grouped_field_set = sub_patch - self.execute_deferred_fragment( - return_type, - result, - sub_patch_grouped_field_set, - incremental_data_record, - label, - path, - ) + self.execute_deferred_grouped_field_sets( + return_type, + result, + path, + new_deferred_grouped_field_set_records, + new_defer_map, + ) return sub_fields def collect_subfields( self, return_type: GraphQLObjectType, field_group: FieldGroup - ) -> FieldsAndPatches: + ) -> CollectFieldsResult: """Collect subfields. A cached collection of relevant subfields with regard to the return type is @@ -1258,57 +1373,91 @@ async def callback(payload: Any) -> ExecutionResult: return map_async_iterable(result_or_stream, callback) - def execute_deferred_fragment( + def execute_deferred_grouped_field_sets( + self, + parent_type: GraphQLObjectType, + source_value: Any, + path: Path | None, + new_deferred_grouped_field_set_records: Sequence[DeferredGroupedFieldSetRecord], + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + ) -> None: + """Execute deferred grouped field sets.""" + for deferred_grouped_field_set_record in new_deferred_grouped_field_set_records: + if deferred_grouped_field_set_record.should_initiate_defer: + + async def execute_deferred_grouped_field_set( + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + ) -> None: + self.execute_deferred_grouped_field_set( + parent_type, + source_value, + path, + deferred_grouped_field_set_record, + defer_map, + ) + + self.add_task( + execute_deferred_grouped_field_set( + deferred_grouped_field_set_record + ) + ) + + else: + self.execute_deferred_grouped_field_set( + parent_type, + source_value, + path, + deferred_grouped_field_set_record, + defer_map, + ) + + def execute_deferred_grouped_field_set( self, parent_type: GraphQLObjectType, source_value: Any, - fields: GroupedFieldSet, - parent_context: IncrementalDataRecord, - label: str | None = None, - path: Path | None = None, + path: Path | None, + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], ) -> None: - """Execute deferred fragment.""" + """Execute deferred grouped field set.""" incremental_publisher = self.incremental_publisher - incremental_data_record = ( - incremental_publisher.prepare_new_deferred_fragment_record( - label, path, parent_context - ) - ) try: - awaitable_or_data = self.execute_fields( - parent_type, source_value, path, fields, incremental_data_record + incremental_result = self.execute_fields( + parent_type, + source_value, + path, + deferred_grouped_field_set_record.grouped_field_set, + deferred_grouped_field_set_record, + defer_map, ) - if self.is_awaitable(awaitable_or_data): + if self.is_awaitable(incremental_result): + incremental_result = cast(Awaitable, incremental_result) - async def await_data() -> None: + async def await_incremental_result() -> None: try: - data = await awaitable_or_data # type: ignore + result = await incremental_result except GraphQLError as error: - incremental_publisher.add_field_error( - incremental_data_record, error - ) - incremental_publisher.complete_deferred_fragment_record( - incremental_data_record, None + incremental_publisher.mark_errored_deferred_grouped_field_set( + deferred_grouped_field_set_record, error ) else: - incremental_publisher.complete_deferred_fragment_record( - incremental_data_record, data + incremental_publisher.complete_deferred_grouped_field_set( + deferred_grouped_field_set_record, result ) - self.add_task(await_data()) + self.add_task(await_incremental_result()) else: - incremental_publisher.complete_deferred_fragment_record( - incremental_data_record, - awaitable_or_data, # type: ignore + incremental_publisher.complete_deferred_grouped_field_set( + deferred_grouped_field_set_record, + incremental_result, # type: ignore ) + except GraphQLError as error: - incremental_publisher.add_field_error(incremental_data_record, error) - incremental_publisher.complete_deferred_fragment_record( - incremental_data_record, None + incremental_publisher.mark_errored_deferred_grouped_field_set( + deferred_grouped_field_set_record, error ) - awaitable_or_data = None def execute_stream_field( self, @@ -1318,14 +1467,15 @@ def execute_stream_field( field_group: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, - parent_context: IncrementalDataRecord, - label: str | None = None, - ) -> SubsequentDataRecord: + incremental_data_record: IncrementalDataRecord, + stream_record: StreamRecord, + ) -> StreamItemsRecord: """Execute stream field.""" is_awaitable = self.is_awaitable incremental_publisher = self.incremental_publisher - incremental_data_record = incremental_publisher.prepare_new_stream_items_record( - label, item_path, parent_context + stream_items_record = StreamItemsRecord(stream_record, item_path) + incremental_publisher.report_new_stream_items_record( + stream_items_record, incremental_data_record ) completed_item: Any @@ -1339,23 +1489,21 @@ async def await_completed_awaitable_item() -> None: info, item_path, item, - incremental_data_record, + stream_items_record, + RefMap(), ) except GraphQLError as error: - incremental_publisher.add_field_error( - incremental_data_record, error - ) - incremental_publisher.filter(path, incremental_data_record) - incremental_publisher.complete_stream_items_record( - incremental_data_record, None + incremental_publisher.filter(path, stream_items_record) + incremental_publisher.mark_errored_stream_items_record( + stream_items_record, error ) else: incremental_publisher.complete_stream_items_record( - incremental_data_record, [value] + stream_items_record, [value] ) self.add_task(await_completed_awaitable_item()) - return incremental_data_record + return stream_items_record try: try: @@ -1365,7 +1513,8 @@ async def await_completed_awaitable_item() -> None: info, item_path, item, - incremental_data_record, + stream_items_record, + RefMap(), ) except Exception as raw_error: self.handle_field_error( @@ -1373,17 +1522,16 @@ async def await_completed_awaitable_item() -> None: item_type, field_group, item_path, - incremental_data_record, + stream_items_record, ) completed_item = None - incremental_publisher.filter(item_path, incremental_data_record) + incremental_publisher.filter(item_path, stream_items_record) except GraphQLError as error: - incremental_publisher.add_field_error(incremental_data_record, error) - incremental_publisher.filter(path, incremental_data_record) - incremental_publisher.complete_stream_items_record( - incremental_data_record, None + incremental_publisher.filter(path, stream_items_record) + incremental_publisher.mark_errored_stream_items_record( + stream_items_record, error ) - return incremental_data_record + return stream_items_record if is_awaitable(completed_item): @@ -1397,30 +1545,27 @@ async def await_completed_item() -> None: item_type, field_group, item_path, - incremental_data_record, + stream_items_record, ) - incremental_publisher.filter(item_path, incremental_data_record) + incremental_publisher.filter(item_path, stream_items_record) value = None except GraphQLError as error: # pragma: no cover - incremental_publisher.add_field_error( - incremental_data_record, error - ) - incremental_publisher.filter(path, incremental_data_record) - incremental_publisher.complete_stream_items_record( - incremental_data_record, None + incremental_publisher.filter(path, stream_items_record) + incremental_publisher.mark_errored_stream_items_record( + stream_items_record, error ) else: incremental_publisher.complete_stream_items_record( - incremental_data_record, [value] + stream_items_record, [value] ) self.add_task(await_completed_item()) - return incremental_data_record + return stream_items_record incremental_publisher.complete_stream_items_record( - incremental_data_record, [completed_item] + stream_items_record, [completed_item] ) - return incremental_data_record + return stream_items_record async def execute_stream_async_iterator_item( self, @@ -1428,8 +1573,7 @@ async def execute_stream_async_iterator_item( field_group: FieldGroup, info: GraphQLResolveInfo, item_type: GraphQLOutputType, - incremental_data_record: StreamItemsRecord, - path: Path, + stream_items_record: StreamItemsRecord, item_path: Path, ) -> Any: """Execute stream iterator item.""" @@ -1439,14 +1583,27 @@ async def execute_stream_async_iterator_item( item = await anext(async_iterator) except StopAsyncIteration as raw_error: self.incremental_publisher.set_is_completed_async_iterator( - incremental_data_record + stream_items_record ) raise StopAsyncIteration from raw_error except Exception as raw_error: - raise located_error(raw_error, field_group, path.as_list()) from raw_error + raise located_error( + raw_error, + field_group.to_nodes(), + stream_items_record.stream_record.path, + ) from raw_error + else: + if stream_items_record.stream_record.errors: + raise StopAsyncIteration # pragma: no cover try: completed_item = self.complete_value( - item_type, field_group, info, item_path, item, incremental_data_record + item_type, + field_group, + info, + item_path, + item, + stream_items_record, + RefMap(), ) return ( await completed_item @@ -1455,9 +1612,9 @@ async def execute_stream_async_iterator_item( ) except Exception as raw_error: self.handle_field_error( - raw_error, item_type, field_group, item_path, incremental_data_record + raw_error, item_type, field_group, item_path, stream_items_record ) - self.incremental_publisher.filter(item_path, incremental_data_record) + self.incremental_publisher.filter(item_path, stream_items_record) async def execute_stream_async_iterator( self, @@ -1467,21 +1624,19 @@ async def execute_stream_async_iterator( info: GraphQLResolveInfo, item_type: GraphQLOutputType, path: Path, - parent_context: IncrementalDataRecord, - label: str | None = None, + incremental_data_record: IncrementalDataRecord, + stream_record: StreamRecord, ) -> None: """Execute stream iterator.""" incremental_publisher = self.incremental_publisher index = initial_index - previous_incremental_data_record = parent_context + current_incremental_data_record = incremental_data_record - done = False while True: item_path = Path(path, index, None) - incremental_data_record = ( - incremental_publisher.prepare_new_stream_items_record( - label, item_path, previous_incremental_data_record, async_iterator - ) + stream_items_record = StreamItemsRecord(stream_record, item_path) + incremental_publisher.report_new_stream_items_record( + stream_items_record, current_incremental_data_record ) try: @@ -1490,15 +1645,13 @@ async def execute_stream_async_iterator( field_group, info, item_type, - incremental_data_record, - path, + stream_items_record, item_path, ) except GraphQLError as error: - incremental_publisher.add_field_error(incremental_data_record, error) - incremental_publisher.filter(path, incremental_data_record) - incremental_publisher.complete_stream_items_record( - incremental_data_record, None + incremental_publisher.filter(path, stream_items_record) + incremental_publisher.mark_errored_stream_items_record( + stream_items_record, error ) if async_iterator: # pragma: no cover else with suppress_exceptions: @@ -1506,18 +1659,20 @@ async def execute_stream_async_iterator( # running generators cannot be closed since Python 3.8, # so we need to remember that this iterator is already canceled self._canceled_iterators.add(async_iterator) - break + return except StopAsyncIteration: done = True + completed_item = None + else: + done = False incremental_publisher.complete_stream_items_record( - incremental_data_record, - [completed_item], + stream_items_record, [completed_item] ) if done: break - previous_incremental_data_record = incremental_data_record + current_incremental_data_record = stream_items_record index += 1 def add_task(self, awaitable: Awaitable[Any]) -> None: @@ -1667,7 +1822,7 @@ def execute_impl( # at which point we still log the error and null the parent field, which # in this case is the entire response. incremental_publisher = context.incremental_publisher - initial_result_record = incremental_publisher.prepare_initial_result_record() + initial_result_record = InitialResultRecord() try: data = context.execute_operation(initial_result_record) if context.is_awaitable(data): @@ -1759,10 +1914,92 @@ def invalid_return_type_error( """Create a GraphQLError for an invalid return type.""" return GraphQLError( f"Expected value of type '{return_type.name}' but got: {inspect(result)}.", - field_group, + field_group.to_nodes(), ) +def add_new_deferred_fragments( + incremental_publisher: IncrementalPublisher, + new_defer_usages: Sequence[DeferUsage], + incremental_data_record: IncrementalDataRecord, + defer_map: RefMap[DeferUsage, DeferredFragmentRecord] | None = None, + path: Path | None = None, +) -> RefMap[DeferUsage, DeferredFragmentRecord]: + """Add new deferred fragments to the defer map.""" + new_defer_map: RefMap[DeferUsage, DeferredFragmentRecord] + if not new_defer_usages: + return RefMap() if defer_map is None else defer_map + new_defer_map = RefMap() if defer_map is None else RefMap(defer_map.items()) + for defer_usage in new_defer_usages: + ancestors = defer_usage.ancestors + parent_defer_usage = ancestors[0] if ancestors else None + + parent = ( + cast(Union[InitialResultRecord, StreamItemsRecord], incremental_data_record) + if parent_defer_usage is None + else deferred_fragment_record_from_defer_usage( + parent_defer_usage, new_defer_map + ) + ) + + deferred_fragment_record = DeferredFragmentRecord(path, defer_usage.label) + + incremental_publisher.report_new_defer_fragment_record( + deferred_fragment_record, parent + ) + + new_defer_map[defer_usage] = deferred_fragment_record + + return new_defer_map + + +def deferred_fragment_record_from_defer_usage( + defer_usage: DeferUsage, defer_map: RefMap[DeferUsage, DeferredFragmentRecord] +) -> DeferredFragmentRecord: + """Get the deferred fragment record mapped to the given defer usage.""" + return defer_map[defer_usage] + + +def add_new_deferred_grouped_field_sets( + incremental_publisher: IncrementalPublisher, + new_grouped_field_set_details: Mapping[DeferUsageSet, GroupedFieldSetDetails], + defer_map: RefMap[DeferUsage, DeferredFragmentRecord], + path: Path | None = None, +) -> list[DeferredGroupedFieldSetRecord]: + """Add new deferred grouped field sets to the defer map.""" + new_deferred_grouped_field_set_records: list[DeferredGroupedFieldSetRecord] = [] + + for ( + new_grouped_field_set_defer_usages, + grouped_field_set_details, + ) in new_grouped_field_set_details.items(): + deferred_fragment_records = get_deferred_fragment_records( + new_grouped_field_set_defer_usages, defer_map + ) + deferred_grouped_field_set_record = DeferredGroupedFieldSetRecord( + deferred_fragment_records, + grouped_field_set_details.grouped_field_set, + grouped_field_set_details.should_initiate_defer, + path, + ) + incremental_publisher.report_new_deferred_grouped_filed_set_record( + deferred_grouped_field_set_record + ) + new_deferred_grouped_field_set_records.append(deferred_grouped_field_set_record) + + return new_deferred_grouped_field_set_records + + +def get_deferred_fragment_records( + defer_usages: DeferUsageSet, defer_map: RefMap[DeferUsage, DeferredFragmentRecord] +) -> list[DeferredFragmentRecord]: + """Get the deferred fragment records for the given defer usages.""" + return [ + deferred_fragment_record_from_defer_usage(defer_usage, defer_map) + for defer_usage in defer_usages + ] + + def get_typename(value: Any) -> str | None: """Get the ``__typename`` property of the given value.""" if isinstance(value, Mapping): @@ -2025,12 +2262,12 @@ def execute_subscription( ).grouped_field_set first_root_field = next(iter(grouped_field_set.items())) response_name, field_group = first_root_field - field_name = field_group[0].name.value + field_name = field_group.fields[0].node.name.value field_def = schema.get_field(root_type, field_name) if not field_def: msg = f"The subscription field '{field_name}' is not defined." - raise GraphQLError(msg, field_group) + raise GraphQLError(msg, field_group.to_nodes()) path = Path(None, response_name, root_type.name) info = context.build_resolve_info(field_def, field_group, root_type, path) @@ -2041,7 +2278,9 @@ def execute_subscription( try: # Build a dictionary of arguments from the field.arguments AST, using the # variables scope to fulfill any variable references. - args = get_argument_values(field_def, field_group[0], context.variable_values) + args = get_argument_values( + field_def, field_group.fields[0].node, context.variable_values + ) # Call the `subscribe()` resolver or the default resolver to produce an # AsyncIterable yielding raw payloads. @@ -2054,14 +2293,16 @@ async def await_result() -> AsyncIterable[Any]: try: return assert_event_stream(await result) except Exception as error: - raise located_error(error, field_group, path.as_list()) from error + raise located_error( + error, field_group.to_nodes(), path.as_list() + ) from error return await_result() return assert_event_stream(result) except Exception as error: - raise located_error(error, field_group, path.as_list()) from error + raise located_error(error, field_group.to_nodes(), path.as_list()) from error def assert_event_stream(result: Any) -> AsyncIterable: diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index a1b8c507..18890fb3 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -2,14 +2,14 @@ from __future__ import annotations -from asyncio import Event, ensure_future, gather +from asyncio import Event, ensure_future, gather, sleep from contextlib import suppress from typing import ( TYPE_CHECKING, Any, AsyncGenerator, - AsyncIterator, Awaitable, + Callable, Collection, Iterator, NamedTuple, @@ -25,6 +25,7 @@ if TYPE_CHECKING: from ..error import GraphQLError, GraphQLFormattedError from ..pyutils import Path + from .collect_fields import GroupedFieldSet __all__ = [ "ASYNC_DELAY", @@ -54,6 +55,80 @@ suppress_key_error = suppress(KeyError) +class FormattedCompletedResult(TypedDict, total=False): + """Formatted completed execution result""" + + path: list[str | int] + label: str + errors: list[GraphQLFormattedError] + + +class CompletedResult: + """Completed execution result""" + + path: list[str | int] + label: str | None + errors: list[GraphQLError] | None + + __slots__ = "errors", "label", "path" + + def __init__( + self, + path: list[str | int], + label: str | None = None, + errors: list[GraphQLError] | None = None, + ) -> None: + self.path = path + self.label = label + self.errors = errors + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"path={self.path!r}"] + if self.label: + args.append(f"label={self.label!r}") + if self.errors: + args.append(f"errors={self.errors!r}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedCompletedResult: + """Get execution result formatted according to the specification.""" + formatted: FormattedCompletedResult = {"path": self.path} + if self.label is not None: + formatted["label"] = self.label + if self.errors is not None: + formatted["errors"] = [error.formatted for error in self.errors] + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return ( + other.get("path") == self.path + and ("label" not in other or other["label"] == self.label) + and ("errors" not in other or other["errors"] == self.errors) + ) + if isinstance(other, tuple): + size = len(other) + return 1 < size < 4 and (self.path, self.label, self.errors)[:size] == other + return ( + isinstance(other, self.__class__) + and other.path == self.path + and other.label == self.label + and other.errors == self.errors + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + +class IncrementalUpdate(NamedTuple): + """Incremental update""" + + incremental: list[IncrementalResult] + completed: list[CompletedResult] + + class FormattedExecutionResult(TypedDict, total=False): """Formatted execution result""" @@ -147,31 +222,26 @@ class InitialIncrementalExecutionResult: data: dict[str, Any] | None errors: list[GraphQLError] | None - incremental: Sequence[IncrementalResult] | None has_next: bool extensions: dict[str, Any] | None - __slots__ = "data", "errors", "extensions", "has_next", "incremental" + __slots__ = "data", "errors", "extensions", "has_next" def __init__( self, data: dict[str, Any] | None = None, errors: list[GraphQLError] | None = None, - incremental: Sequence[IncrementalResult] | None = None, has_next: bool = False, extensions: dict[str, Any] | None = None, ) -> None: self.data = data self.errors = errors - self.incremental = incremental self.has_next = has_next self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] - if self.incremental: - args.append(f"incremental[{len(self.incremental)}]") if self.has_next: args.append("has_next") if self.extensions: @@ -184,8 +254,6 @@ def formatted(self) -> FormattedInitialIncrementalExecutionResult: formatted: FormattedInitialIncrementalExecutionResult = {"data": self.data} if self.errors is not None: formatted["errors"] = [error.formatted for error in self.errors] - if self.incremental: - formatted["incremental"] = [result.formatted for result in self.incremental] formatted["hasNext"] = self.has_next if self.extensions is not None: formatted["extensions"] = self.extensions @@ -196,10 +264,6 @@ def __eq__(self, other: object) -> bool: return ( other.get("data") == self.data and other.get("errors") == self.errors - and ( - "incremental" not in other - or other["incremental"] == self.incremental - ) and ("hasNext" not in other or other["hasNext"] == self.has_next) and ( "extensions" not in other or other["extensions"] == self.extensions @@ -208,11 +272,10 @@ def __eq__(self, other: object) -> bool: if isinstance(other, tuple): size = len(other) return ( - 1 < size < 6 + 1 < size < 5 and ( self.data, self.errors, - self.incremental, self.has_next, self.extensions, )[:size] @@ -222,7 +285,6 @@ def __eq__(self, other: object) -> bool: isinstance(other, self.__class__) and other.data == self.data and other.errors == self.errors - and other.incremental == self.incremental and other.has_next == self.has_next and other.extensions == self.extensions ) @@ -244,7 +306,6 @@ class FormattedIncrementalDeferResult(TypedDict, total=False): data: dict[str, Any] | None errors: list[GraphQLFormattedError] path: list[str | int] - label: str extensions: dict[str, Any] @@ -254,23 +315,20 @@ class IncrementalDeferResult: data: dict[str, Any] | None errors: list[GraphQLError] | None path: list[str | int] | None - label: str | None extensions: dict[str, Any] | None - __slots__ = "data", "errors", "extensions", "label", "path" + __slots__ = "data", "errors", "extensions", "path" def __init__( self, data: dict[str, Any] | None = None, errors: list[GraphQLError] | None = None, path: list[str | int] | None = None, - label: str | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.data = data self.errors = errors self.path = path - self.label = label self.extensions = extensions def __repr__(self) -> str: @@ -278,8 +336,6 @@ def __repr__(self) -> str: args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] if self.path: args.append(f"path={self.path!r}") - if self.label: - args.append(f"label={self.label!r}") if self.extensions: args.append(f"extensions={self.extensions}") return f"{name}({', '.join(args)})" @@ -292,8 +348,6 @@ def formatted(self) -> FormattedIncrementalDeferResult: formatted["errors"] = [error.formatted for error in self.errors] if self.path is not None: formatted["path"] = self.path - if self.label is not None: - formatted["label"] = self.label if self.extensions is not None: formatted["extensions"] = self.extensions return formatted @@ -304,7 +358,6 @@ def __eq__(self, other: object) -> bool: other.get("data") == self.data and other.get("errors") == self.errors and ("path" not in other or other["path"] == self.path) - and ("label" not in other or other["label"] == self.label) and ( "extensions" not in other or other["extensions"] == self.extensions ) @@ -312,18 +365,14 @@ def __eq__(self, other: object) -> bool: if isinstance(other, tuple): size = len(other) return ( - 1 < size < 6 - and (self.data, self.errors, self.path, self.label, self.extensions)[ - :size - ] - == other + 1 < size < 5 + and (self.data, self.errors, self.path, self.extensions)[:size] == other ) return ( isinstance(other, self.__class__) and other.data == self.data and other.errors == self.errors and other.path == self.path - and other.label == self.label and other.extensions == self.extensions ) @@ -337,7 +386,6 @@ class FormattedIncrementalStreamResult(TypedDict, total=False): items: list[Any] | None errors: list[GraphQLFormattedError] path: list[str | int] - label: str extensions: dict[str, Any] @@ -347,7 +395,6 @@ class IncrementalStreamResult: items: list[Any] | None errors: list[GraphQLError] | None path: list[str | int] | None - label: str | None extensions: dict[str, Any] | None __slots__ = "errors", "extensions", "items", "label", "path" @@ -357,13 +404,11 @@ def __init__( items: list[Any] | None = None, errors: list[GraphQLError] | None = None, path: list[str | int] | None = None, - label: str | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.items = items self.errors = errors self.path = path - self.label = label self.extensions = extensions def __repr__(self) -> str: @@ -371,8 +416,6 @@ def __repr__(self) -> str: args: list[str] = [f"items={self.items!r}, errors={self.errors!r}"] if self.path: args.append(f"path={self.path!r}") - if self.label: - args.append(f"label={self.label!r}") if self.extensions: args.append(f"extensions={self.extensions}") return f"{name}({', '.join(args)})" @@ -385,8 +428,6 @@ def formatted(self) -> FormattedIncrementalStreamResult: formatted["errors"] = [error.formatted for error in self.errors] if self.path is not None: formatted["path"] = self.path - if self.label is not None: - formatted["label"] = self.label if self.extensions is not None: formatted["extensions"] = self.extensions return formatted @@ -397,7 +438,6 @@ def __eq__(self, other: object) -> bool: other.get("items") == self.items and other.get("errors") == self.errors and ("path" not in other or other["path"] == self.path) - and ("label" not in other or other["label"] == self.label) and ( "extensions" not in other or other["extensions"] == self.extensions ) @@ -405,10 +445,8 @@ def __eq__(self, other: object) -> bool: if isinstance(other, tuple): size = len(other) return ( - 1 < size < 6 - and (self.items, self.errors, self.path, self.label, self.extensions)[ - :size - ] + 1 < size < 5 + and (self.items, self.errors, self.path, self.extensions)[:size] == other ) return ( @@ -416,7 +454,6 @@ def __eq__(self, other: object) -> bool: and other.items == self.items and other.errors == self.errors and other.path == self.path - and other.label == self.label and other.extensions == self.extensions ) @@ -434,8 +471,9 @@ def __ne__(self, other: object) -> bool: class FormattedSubsequentIncrementalExecutionResult(TypedDict, total=False): """Formatted subsequent incremental execution result""" - incremental: list[FormattedIncrementalResult] hasNext: bool + incremental: list[FormattedIncrementalResult] + completed: list[FormattedCompletedResult] extensions: dict[str, Any] @@ -446,29 +484,34 @@ class SubsequentIncrementalExecutionResult: - ``incremental`` is a list of the results from defer/stream directives. """ - __slots__ = "extensions", "has_next", "incremental" + __slots__ = "completed", "extensions", "has_next", "incremental" - incremental: Sequence[IncrementalResult] | None has_next: bool + incremental: Sequence[IncrementalResult] | None + completed: Sequence[CompletedResult] | None extensions: dict[str, Any] | None def __init__( self, - incremental: Sequence[IncrementalResult] | None = None, has_next: bool = False, + incremental: Sequence[IncrementalResult] | None = None, + completed: Sequence[CompletedResult] | None = None, extensions: dict[str, Any] | None = None, ) -> None: - self.incremental = incremental self.has_next = has_next + self.incremental = incremental + self.completed = completed self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ args: list[str] = [] - if self.incremental: - args.append(f"incremental[{len(self.incremental)}]") if self.has_next: args.append("has_next") + if self.incremental: + args.append(f"incremental[{len(self.incremental)}]") + if self.completed: + args.append(f"completed[{len(self.completed)}]") if self.extensions: args.append(f"extensions={self.extensions}") return f"{name}({', '.join(args)})" @@ -477,9 +520,11 @@ def __repr__(self) -> str: def formatted(self) -> FormattedSubsequentIncrementalExecutionResult: """Get execution result formatted according to the specification.""" formatted: FormattedSubsequentIncrementalExecutionResult = {} + formatted["hasNext"] = self.has_next if self.incremental: formatted["incremental"] = [result.formatted for result in self.incremental] - formatted["hasNext"] = self.has_next + if self.completed: + formatted["completed"] = [result.formatted for result in self.completed] if self.extensions is not None: formatted["extensions"] = self.extensions return formatted @@ -487,8 +532,12 @@ def formatted(self) -> FormattedSubsequentIncrementalExecutionResult: def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( - ("incremental" not in other or other["incremental"] == self.incremental) - and ("hasNext" in other and other["hasNext"] == self.has_next) + ("hasNext" in other and other["hasNext"] == self.has_next) + and ( + "incremental" not in other + or other["incremental"] == self.incremental + ) + and ("completed" not in other or other["completed"] == self.completed) and ( "extensions" not in other or other["extensions"] == self.extensions ) @@ -496,18 +545,20 @@ def __eq__(self, other: object) -> bool: if isinstance(other, tuple): size = len(other) return ( - 1 < size < 4 + 1 < size < 5 and ( - self.incremental, self.has_next, + self.incremental, + self.completed, self.extensions, )[:size] == other ) return ( isinstance(other, self.__class__) - and other.incremental == self.incremental and other.has_next == self.has_next + and other.incremental == self.incremental + and other.completed == self.completed and other.extensions == self.extensions ) @@ -530,20 +581,20 @@ class IncrementalPublisher: The internal publishing state is managed as follows: - ``_released``: the set of Subsequent Data records that are ready to be sent to the + ``_released``: the set of Subsequent Result records that are ready to be sent to the client, i.e. their parents have completed and they have also completed. - ``_pending``: the set of Subsequent Data records that are definitely pending, i.e. + ``_pending``: the set of Subsequent Result records that are definitely pending, i.e. their parents have completed so that they can no longer be filtered. This includes - all Subsequent Data records in `released`, as well as Subsequent Data records that - have not yet completed. + all Subsequent Result records in `released`, as well as the records that have not + yet completed. Note: Instead of sets we use dicts (with values set to None) which preserve order and thereby achieve more deterministic results. """ - _released: dict[SubsequentDataRecord, None] - _pending: dict[SubsequentDataRecord, None] + _released: dict[SubsequentResultRecord, None] + _pending: dict[SubsequentResultRecord, None] _resolve: Event | None def __init__(self) -> None: @@ -552,60 +603,107 @@ def __init__(self) -> None: self._resolve = None # lazy initialization self._tasks: set[Awaitable] = set() - def prepare_initial_result_record(self) -> InitialResultRecord: - """Prepare a new initial result record.""" - return InitialResultRecord(errors=[], children={}) - - def prepare_new_deferred_fragment_record( - self, - label: str | None, - path: Path | None, - parent_context: IncrementalDataRecord, - ) -> DeferredFragmentRecord: - """Prepare a new deferred fragment record.""" - deferred_fragment_record = DeferredFragmentRecord(label, path) + @staticmethod + def report_new_defer_fragment_record( + deferred_fragment_record: DeferredFragmentRecord, + parent_incremental_result_record: InitialResultRecord + | DeferredFragmentRecord + | StreamItemsRecord, + ) -> None: + """Report a new deferred fragment record.""" + parent_incremental_result_record.children[deferred_fragment_record] = None - parent_context.children[deferred_fragment_record] = None - return deferred_fragment_record + @staticmethod + def report_new_deferred_grouped_filed_set_record( + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + ) -> None: + """Report a new deferred grouped field set record.""" + for ( + deferred_fragment_record + ) in deferred_grouped_field_set_record.deferred_fragment_records: + deferred_fragment_record._pending[deferred_grouped_field_set_record] = None # noqa: SLF001 + deferred_fragment_record.deferred_grouped_field_set_records[ + deferred_grouped_field_set_record + ] = None + + @staticmethod + def report_new_stream_items_record( + stream_items_record: StreamItemsRecord, + parent_incremental_data_record: IncrementalDataRecord, + ) -> None: + """Report a new stream items record.""" + if isinstance(parent_incremental_data_record, DeferredGroupedFieldSetRecord): + for parent in parent_incremental_data_record.deferred_fragment_records: + parent.children[stream_items_record] = None + else: + parent_incremental_data_record.children[stream_items_record] = None - def prepare_new_stream_items_record( + def complete_deferred_grouped_field_set( self, - label: str | None, - path: Path | None, - parent_context: IncrementalDataRecord, - async_iterator: AsyncIterator[Any] | None = None, - ) -> StreamItemsRecord: - """Prepare a new stream items record.""" - stream_items_record = StreamItemsRecord(label, path, async_iterator) - - parent_context.children[stream_items_record] = None - return stream_items_record + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + data: dict[str, Any], + ) -> None: + """Complete the given deferred grouped field set record with the given data.""" + deferred_grouped_field_set_record.data = data + for ( + deferred_fragment_record + ) in deferred_grouped_field_set_record.deferred_fragment_records: + pending = deferred_fragment_record._pending # noqa: SLF001 + del pending[deferred_grouped_field_set_record] + if not pending: + self.complete_deferred_fragment_record(deferred_fragment_record) + + def mark_errored_deferred_grouped_field_set( + self, + deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord, + error: GraphQLError, + ) -> None: + """Mark the given deferred grouped field set record as errored.""" + for ( + deferred_fragment_record + ) in deferred_grouped_field_set_record.deferred_fragment_records: + deferred_fragment_record.errors.append(error) + self.complete_deferred_fragment_record(deferred_fragment_record) def complete_deferred_fragment_record( - self, - deferred_fragment_record: DeferredFragmentRecord, - data: dict[str, Any] | None, + self, deferred_fragment_record: DeferredFragmentRecord ) -> None: """Complete the given deferred fragment record.""" - deferred_fragment_record.data = data - deferred_fragment_record.is_completed = True self._release(deferred_fragment_record) def complete_stream_items_record( self, stream_items_record: StreamItemsRecord, - items: list[str] | None, + items: list[Any], ) -> None: """Complete the given stream items record.""" stream_items_record.items = items stream_items_record.is_completed = True self._release(stream_items_record) + def mark_errored_stream_items_record( + self, stream_items_record: StreamItemsRecord, error: GraphQLError + ) -> None: + """Mark the given stream items record as errored.""" + stream_items_record.stream_record.errors.append(error) + self.set_is_final_record(stream_items_record) + stream_items_record.is_completed = True + early_return = stream_items_record.stream_record.early_return + if early_return: + self._add_task(early_return()) + self._release(stream_items_record) + + @staticmethod + def set_is_final_record(stream_items_record: StreamItemsRecord) -> None: + """Mark stream items record as final.""" + stream_items_record.is_final_record = True + def set_is_completed_async_iterator( self, stream_items_record: StreamItemsRecord ) -> None: """Mark async iterator for stream items as completed.""" stream_items_record.is_completed_async_iterator = True + self.set_is_final_record(stream_items_record) def add_field_error( self, incremental_data_record: IncrementalDataRecord, error: GraphQLError @@ -657,29 +755,33 @@ def build_error_response( def filter( self, - null_path: Path, + null_path: Path | None, erroring_incremental_data_record: IncrementalDataRecord, ) -> None: """Filter out the given erroring incremental data record.""" - null_path_list = null_path.as_list() + null_path_list = null_path.as_list() if null_path else [] + + streams: list[StreamRecord] = [] - descendants = self._get_descendants(erroring_incremental_data_record.children) + children = self._get_children(erroring_incremental_data_record) + descendants = self._get_descendants(children) for child in descendants: - if not self._matches_path(child.path, null_path_list): + if not self._nulls_child_subsequent_result_record(child, null_path_list): continue child.filtered = True if isinstance(child, StreamItemsRecord): - async_iterator = child.async_iterator - if async_iterator: - try: - close_async_iterator = async_iterator.aclose() # type:ignore - except AttributeError: # pragma: no cover - pass - else: - self._add_task(close_async_iterator) + streams.append(child.stream_record) + + early_returns = [] + for stream in streams: + early_return = stream.early_return + if early_return: + early_returns.append(early_return()) + if early_returns: + self._add_task(gather(*early_returns)) async def _subscribe( self, @@ -688,6 +790,8 @@ async def _subscribe( is_done = False pending = self._pending + await sleep(0) # execute pending tasks + try: while not is_done: released = self._released @@ -709,20 +813,18 @@ async def _subscribe( self._resolve = resolve = Event() await resolve.wait() finally: - close_async_iterators = [] - for incremental_data_record in pending: - if isinstance( - incremental_data_record, StreamItemsRecord - ): # pragma: no cover - async_iterator = incremental_data_record.async_iterator - if async_iterator: - try: - close_async_iterator = async_iterator.aclose() # type: ignore - except AttributeError: - pass - else: - close_async_iterators.append(close_async_iterator) - await gather(*close_async_iterators) + streams: list[StreamRecord] = [] + descendants = self._get_descendants(pending) + for subsequent_result_record in descendants: # pragma: no cover + if isinstance(subsequent_result_record, StreamItemsRecord): + streams.append(subsequent_result_record.stream_record) + early_returns = [] + for stream in streams: # pragma: no cover + early_return = stream.early_return + if early_return: + early_returns.append(early_return()) + if early_returns: # pragma: no cover + await gather(*early_returns) def _trigger(self) -> None: """Trigger the resolve event.""" @@ -731,82 +833,129 @@ def _trigger(self) -> None: resolve.set() self._resolve = Event() - def _introduce(self, item: SubsequentDataRecord) -> None: + def _introduce(self, item: SubsequentResultRecord) -> None: """Introduce a new IncrementalDataRecord.""" self._pending[item] = None - def _release(self, item: SubsequentDataRecord) -> None: + def _release(self, item: SubsequentResultRecord) -> None: """Release the given IncrementalDataRecord.""" if item in self._pending: self._released[item] = None self._trigger() - def _push(self, item: SubsequentDataRecord) -> None: + def _push(self, item: SubsequentResultRecord) -> None: """Push the given IncrementalDataRecord.""" self._released[item] = None self._pending[item] = None self._trigger() def _get_incremental_result( - self, completed_records: Collection[SubsequentDataRecord] + self, completed_records: Collection[SubsequentResultRecord] ) -> SubsequentIncrementalExecutionResult | None: """Get the incremental result with the completed records.""" + update = self._process_pending(completed_records) + incremental, completed = update.incremental, update.completed + + has_next = bool(self._pending) + if not incremental and not completed and has_next: + return None + + return SubsequentIncrementalExecutionResult( + has_next, incremental or None, completed or None + ) + + def _process_pending( + self, + completed_records: Collection[SubsequentResultRecord], + ) -> IncrementalUpdate: + """Process the pending records.""" incremental_results: list[IncrementalResult] = [] - encountered_completed_async_iterator = False - append_result = incremental_results.append - for incremental_data_record in completed_records: - incremental_result: IncrementalResult - for child in incremental_data_record.children: + completed_results: list[CompletedResult] = [] + to_result = self._completed_record_to_result + for subsequent_result_record in completed_records: + for child in subsequent_result_record.children: if child.filtered: continue self._publish(child) - if isinstance(incremental_data_record, StreamItemsRecord): - items = incremental_data_record.items - if incremental_data_record.is_completed_async_iterator: + incremental_result: IncrementalResult + if isinstance(subsequent_result_record, StreamItemsRecord): + if subsequent_result_record.is_final_record: + completed_results.append( + to_result(subsequent_result_record.stream_record) + ) + if subsequent_result_record.is_completed_async_iterator: # async iterable resolver finished but there may be pending payload - encountered_completed_async_iterator = True - continue # pragma: no cover + continue + if subsequent_result_record.stream_record.errors: + continue incremental_result = IncrementalStreamResult( - items, - incremental_data_record.errors - if incremental_data_record.errors - else None, - incremental_data_record.path, - incremental_data_record.label, + subsequent_result_record.items, + subsequent_result_record.errors or None, + subsequent_result_record.stream_record.path, ) + incremental_results.append(incremental_result) else: - data = incremental_data_record.data - incremental_result = IncrementalDeferResult( - data, - incremental_data_record.errors - if incremental_data_record.errors - else None, - incremental_data_record.path, - incremental_data_record.label, - ) - append_result(incremental_result) - - has_next = bool(self._pending) - if incremental_results: - return SubsequentIncrementalExecutionResult( - incremental=incremental_results, has_next=has_next - ) - if encountered_completed_async_iterator and not has_next: - return SubsequentIncrementalExecutionResult(has_next=False) - return None + completed_results.append(to_result(subsequent_result_record)) + if subsequent_result_record.errors: + continue + for ( + deferred_grouped_field_set_record + ) in subsequent_result_record.deferred_grouped_field_set_records: + if not deferred_grouped_field_set_record.sent: + deferred_grouped_field_set_record.sent = True + incremental_result = IncrementalDeferResult( + deferred_grouped_field_set_record.data, + deferred_grouped_field_set_record.errors or None, + deferred_grouped_field_set_record.path, + ) + incremental_results.append(incremental_result) + return IncrementalUpdate(incremental_results, completed_results) + + @staticmethod + def _completed_record_to_result( + completed_record: DeferredFragmentRecord | StreamRecord, + ) -> CompletedResult: + """Convert the completed record to a result.""" + return CompletedResult( + completed_record.path, + completed_record.label or None, + completed_record.errors or None, + ) - def _publish(self, subsequent_result_record: SubsequentDataRecord) -> None: + def _publish(self, subsequent_result_record: SubsequentResultRecord) -> None: """Publish the given incremental data record.""" - if subsequent_result_record.is_completed: + if isinstance(subsequent_result_record, StreamItemsRecord): + if subsequent_result_record.is_completed: + self._push(subsequent_result_record) + else: + self._introduce(subsequent_result_record) + elif subsequent_result_record._pending: # noqa: SLF001 + self._introduce(subsequent_result_record) + else: self._push(subsequent_result_record) + + @staticmethod + def _get_children( + erroring_incremental_data_record: IncrementalDataRecord, + ) -> dict[SubsequentResultRecord, None]: + """Get the children of the given erroring incremental data record.""" + children: dict[SubsequentResultRecord, None] = {} + if isinstance(erroring_incremental_data_record, DeferredGroupedFieldSetRecord): + for ( + erroring_incremental_result_record + ) in erroring_incremental_data_record.deferred_fragment_records: + for child in erroring_incremental_result_record.children: + children[child] = None else: - self._introduce(subsequent_result_record) + for child in erroring_incremental_data_record.children: + children[child] = None + return children def _get_descendants( self, - children: dict[SubsequentDataRecord, None], - descendants: dict[SubsequentDataRecord, None] | None = None, - ) -> dict[SubsequentDataRecord, None]: + children: dict[SubsequentResultRecord, None], + descendants: dict[SubsequentResultRecord, None] | None = None, + ) -> dict[SubsequentResultRecord, None]: """Get the descendants of the given children.""" if descendants is None: descendants = {} @@ -815,6 +964,24 @@ def _get_descendants( self._get_descendants(child.children, descendants) return descendants + def _nulls_child_subsequent_result_record( + self, + subsequent_result_record: SubsequentResultRecord, + null_path: list[str | int], + ) -> bool: + """Check whether the given subsequent result record is nulled.""" + incremental_data_records: ( + list[SubsequentResultRecord] | dict[DeferredGroupedFieldSetRecord, None] + ) = ( + [subsequent_result_record] + if isinstance(subsequent_result_record, StreamItemsRecord) + else subsequent_result_record.deferred_grouped_field_set_records + ) + return any( + self._matches_path(incremental_data_record.path, null_path) + for incremental_data_record in incremental_data_records + ) + def _matches_path( self, test_path: list[str | int], base_path: list[str | int] ) -> bool: @@ -829,79 +996,148 @@ def _add_task(self, awaitable: Awaitable[Any]) -> None: task.add_done_callback(tasks.discard) -class InitialResultRecord(NamedTuple): - """Formatted subsequent incremental execution result""" +class InitialResultRecord: + """Initial result record""" errors: list[GraphQLError] - children: dict[SubsequentDataRecord, None] + children: dict[SubsequentResultRecord, None] + + def __init__(self) -> None: + self.errors = [] + self.children = {} + + +class DeferredGroupedFieldSetRecord: + """Deferred grouped field set record""" + + path: list[str | int] + deferred_fragment_records: list[DeferredFragmentRecord] + grouped_field_set: GroupedFieldSet + should_initiate_defer: bool + errors: list[GraphQLError] + data: dict[str, Any] | None + sent: bool + + def __init__( + self, + deferred_fragment_records: list[DeferredFragmentRecord], + grouped_field_set: GroupedFieldSet, + should_initiate_defer: bool, + path: Path | None = None, + ) -> None: + self.path = path.as_list() if path else [] + self.deferred_fragment_records = deferred_fragment_records + self.grouped_field_set = grouped_field_set + self.should_initiate_defer = should_initiate_defer + self.errors = [] + self.sent = False + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [ + f"deferred_fragment_records={self.deferred_fragment_records!r}", + f"grouped_field_set={self.grouped_field_set!r}", + ] + if self.path: + args.append(f"path={self.path!r}") + return f"{name}({', '.join(args)})" class DeferredFragmentRecord: - """A record collecting data marked with the defer directive""" + """Deferred fragment record""" + path: list[str | int] + label: str | None + children: dict[SubsequentResultRecord, None] + deferred_grouped_field_set_records: dict[DeferredGroupedFieldSetRecord, None] errors: list[GraphQLError] + filtered: bool + _pending: dict[DeferredGroupedFieldSetRecord, None] + + def __init__(self, path: Path | None = None, label: str | None = None) -> None: + self.path = path.as_list() if path else [] + self.label = label + self.children = {} + self.filtered = False + self.deferred_grouped_field_set_records = {} + self.errors = [] + self._pending = {} + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [] + if self.path: + args.append(f"path={self.path!r}") + if self.label: + args.append(f"label={self.label!r}") + return f"{name}({', '.join(args)})" + + +class StreamRecord: + """Stream record""" + label: str | None path: list[str | int] - data: dict[str, Any] | None - children: dict[SubsequentDataRecord, None] - is_completed: bool - filtered: bool + errors: list[GraphQLError] + early_return: Callable[[], Awaitable[Any]] | None - def __init__(self, label: str | None, path: Path | None) -> None: + def __init__( + self, + path: Path, + label: str | None = None, + early_return: Callable[[], Awaitable[Any]] | None = None, + ) -> None: + self.path = path.as_list() self.label = label - self.path = path.as_list() if path else [] self.errors = [] - self.children = {} - self.is_completed = self.filtered = False - self.data = None + self.early_return = early_return def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"path={self.path!r}"] + args: list[str] = [] + if self.path: + args.append(f"path={self.path!r}") if self.label: args.append(f"label={self.label!r}") - if self.data is not None: - args.append("data") return f"{name}({', '.join(args)})" class StreamItemsRecord: - """A record collecting items marked with the stream directive""" + """Stream items record""" errors: list[GraphQLError] - label: str | None + stream_record: StreamRecord path: list[str | int] - items: list[str] | None - children: dict[SubsequentDataRecord, None] - async_iterator: AsyncIterator[Any] | None + items: list[str] + children: dict[SubsequentResultRecord, None] + is_final_record: bool is_completed_async_iterator: bool is_completed: bool filtered: bool def __init__( self, - label: str | None, - path: Path | None, - async_iterator: AsyncIterator[Any] | None = None, + stream_record: StreamRecord, + path: Path | None = None, ) -> None: - self.label = label + self.stream_record = stream_record self.path = path.as_list() if path else [] - self.async_iterator = async_iterator - self.errors = [] self.children = {} - self.is_completed_async_iterator = self.is_completed = self.filtered = False - self.items = None + self.errors = [] + self.is_completed_async_iterator = self.is_completed = False + self.is_final_record = self.filtered = False + self.items = [] def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"path={self.path!r}"] - if self.label: - args.append(f"label={self.label!r}") - if self.items is not None: - args.append("items") + args: list[str] = [f"stream_record={self.stream_record!r}"] + if self.path: + args.append(f"path={self.path!r}") return f"{name}({', '.join(args)})" -SubsequentDataRecord = Union[DeferredFragmentRecord, StreamItemsRecord] +IncrementalDataRecord = Union[ + InitialResultRecord, DeferredGroupedFieldSetRecord, StreamItemsRecord +] -IncrementalDataRecord = Union[InitialResultRecord, SubsequentDataRecord] +SubsequentResultRecord = Union[DeferredFragmentRecord, StreamItemsRecord] diff --git a/src/graphql/pyutils/__init__.py b/src/graphql/pyutils/__init__.py index 10faca9e..28ad1a92 100644 --- a/src/graphql/pyutils/__init__.py +++ b/src/graphql/pyutils/__init__.py @@ -33,12 +33,16 @@ from .print_path_list import print_path_list from .simple_pub_sub import SimplePubSub, SimplePubSubIterator from .undefined import Undefined, UndefinedType +from .ref_map import RefMap +from .ref_set import RefSet __all__ = [ "AwaitableOrValue", "Description", "FrozenError", "Path", + "RefMap", + "RefSet", "SimplePubSub", "SimplePubSubIterator", "Undefined", diff --git a/src/graphql/pyutils/ref_map.py b/src/graphql/pyutils/ref_map.py new file mode 100644 index 00000000..0cffd533 --- /dev/null +++ b/src/graphql/pyutils/ref_map.py @@ -0,0 +1,79 @@ +"""A Map class that work similar to JavaScript.""" + +from __future__ import annotations + +from collections.abc import MutableMapping + +try: + MutableMapping[str, int] +except TypeError: # Python < 3.9 + from typing import MutableMapping +from typing import Any, Iterable, Iterator, TypeVar + +__all__ = ["RefMap"] + +K = TypeVar("K") +V = TypeVar("V") + + +class RefMap(MutableMapping[K, V]): + """A dictionary like object that allows mutable objects as keys. + + This class keeps the insertion order like a normal dictionary. + + Note that the implementation is limited to what is needed internally. + """ + + _map: dict[int, tuple[K, V]] + + def __init__(self, items: Iterable[tuple[K, V]] | None = None) -> None: + super().__init__() + self._map = {} + if items: + self.update(items) + + def __setitem__(self, key: K, value: V) -> None: + self._map[id(key)] = (key, value) + + def __getitem__(self, key: K) -> Any: + return self._map[id(key)][1] + + def __delitem__(self, key: K) -> None: + del self._map[id(key)] + + def __contains__(self, key: Any) -> bool: + return id(key) in self._map + + def __len__(self) -> int: + return len(self._map) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({list(self.items())!r})" + + def get(self, key: Any, default: Any = None) -> Any: + """Get the mapped value for the given key.""" + try: + return self._map[id(key)][1] + except KeyError: + return default + + def __iter__(self) -> Iterator[K]: + return self.keys() + + def keys(self) -> Iterator[K]: # type: ignore + """Return an iterator over the keys of the map.""" + return (item[0] for item in self._map.values()) + + def values(self) -> Iterator[V]: # type: ignore + """Return an iterator over the values of the map.""" + return (item[1] for item in self._map.values()) + + def items(self) -> Iterator[tuple[K, V]]: # type: ignore + """Return an iterator over the key/value-pairs of the map.""" + return self._map.values() # type: ignore + + def update(self, items: Iterable[tuple[K, V]] | None = None) -> None: # type: ignore + """Update the map with the given key/value-pairs.""" + if items: + for key, value in items: + self[key] = value diff --git a/src/graphql/pyutils/ref_set.py b/src/graphql/pyutils/ref_set.py new file mode 100644 index 00000000..731c021d --- /dev/null +++ b/src/graphql/pyutils/ref_set.py @@ -0,0 +1,67 @@ +"""A Set class that work similar to JavaScript.""" + +from __future__ import annotations + +from collections.abc import MutableSet + +try: + MutableSet[int] +except TypeError: # Python < 3.9 + from typing import MutableSet +from contextlib import suppress +from typing import Any, Iterable, Iterator, TypeVar + +from .ref_map import RefMap + +__all__ = ["RefSet"] + + +T = TypeVar("T") + + +class RefSet(MutableSet[T]): + """A set like object that allows mutable objects as elements. + + This class keeps the insertion order unlike a normal set. + + Note that the implementation is limited to what is needed internally. + """ + + _map: RefMap[T, None] + + def __init__(self, values: Iterable[T] | None = None) -> None: + super().__init__() + self._map = RefMap() + if values: + self.update(values) + + def __contains__(self, key: Any) -> bool: + return key in self._map + + def __iter__(self) -> Iterator[T]: + return iter(self._map) + + def __len__(self) -> int: + return len(self._map) + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({list(self)!r})" + + def add(self, value: T) -> None: + """Add the given item to the set.""" + self._map[value] = None + + def remove(self, value: T) -> None: + """Remove the given item from the set.""" + del self._map[value] + + def discard(self, value: T) -> None: + """Remove the given item from the set if it exists.""" + with suppress(KeyError): + self.remove(value) + + def update(self, values: Iterable[T] | None = None) -> None: + """Update the set with the given items.""" + if values: + for item in values: + self.add(item) diff --git a/src/graphql/validation/rules/single_field_subscriptions.py b/src/graphql/validation/rules/single_field_subscriptions.py index 9a689809..89235856 100644 --- a/src/graphql/validation/rules/single_field_subscriptions.py +++ b/src/graphql/validation/rules/single_field_subscriptions.py @@ -2,10 +2,10 @@ from __future__ import annotations -from typing import Any, cast +from typing import Any from ...error import GraphQLError -from ...execution.collect_fields import collect_fields +from ...execution.collect_fields import FieldGroup, collect_fields from ...language import ( FieldNode, FragmentDefinitionNode, @@ -17,6 +17,10 @@ __all__ = ["SingleFieldSubscriptionsRule"] +def to_nodes(field_group: FieldGroup) -> list[FieldNode]: + return [field_details.node for field_details in field_group.fields] + + class SingleFieldSubscriptionsRule(ValidationRule): """Subscriptions must only include a single non-introspection field. @@ -50,16 +54,12 @@ def enter_operation_definition( node, ).grouped_field_set if len(grouped_field_set) > 1: - field_selection_lists = list(grouped_field_set.values()) - extra_field_selection_lists = field_selection_lists[1:] + field_groups = list(grouped_field_set.values()) + extra_field_groups = field_groups[1:] extra_field_selection = [ - field - for fields in extra_field_selection_lists - for field in ( - fields - if isinstance(fields, list) - else [cast(FieldNode, fields)] - ) + node + for field_group in extra_field_groups + for node in to_nodes(field_group) ] self.report_error( GraphQLError( @@ -73,7 +73,7 @@ def enter_operation_definition( ) ) for field_group in grouped_field_set.values(): - field_name = field_group[0].name.value + field_name = to_nodes(field_group)[0].name.value if field_name.startswith("__"): self.report_error( GraphQLError( @@ -83,6 +83,6 @@ def enter_operation_definition( else f"Subscription '{operation_name}'" ) + " must not select an introspection top level field.", - field_group, + to_nodes(field_group), ) ) diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index 85462147..ac8b9ae1 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -49,10 +49,16 @@ def execute_field( source, field_group, path, - incremental_data_record=None, + incremental_data_record, + defer_map, ): result = super().execute_field( - parent_type, source, field_group, path, incremental_data_record + parent_type, + source, + field_group, + path, + incremental_data_record, + defer_map, ) return result * 2 # type: ignore diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 83201377..d6d17105 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -15,7 +15,13 @@ execute, experimental_execute_incrementally, ) -from graphql.execution.incremental_publisher import DeferredFragmentRecord +from graphql.execution.incremental_publisher import ( + CompletedResult, + DeferredFragmentRecord, + DeferredGroupedFieldSetRecord, + StreamItemsRecord, + StreamRecord, +) from graphql.language import DocumentNode, parse from graphql.pyutils import Path, is_awaitable from graphql.type import ( @@ -145,19 +151,23 @@ async def null_async(_info) -> None: @staticmethod async def slow(_info) -> str: - """Simulate a slow async resolver returning a value.""" + """Simulate a slow async resolver returning a non-null value.""" await sleep(0) return "slow" + @staticmethod + async def slow_null(_info) -> None: + """Simulate a slow async resolver returning a null value.""" + await sleep(0) + @staticmethod def bad(_info) -> str: """Simulate a bad resolver raising an error.""" raise RuntimeError("bad") @staticmethod - async def friends(_info) -> AsyncGenerator[Friend, None]: - """A slow async generator yielding the first friend.""" - await sleep(0) + async def first_friend(_info) -> AsyncGenerator[Friend, None]: + """An async generator yielding the first friend.""" yield friends[0] @@ -183,6 +193,42 @@ def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: def describe_execute_defer_directive(): + def can_format_and_print_completed_result(): + result = CompletedResult([]) + assert result.formatted == {"path": []} + assert str(result) == "CompletedResult(path=[])" + + result = CompletedResult( + path=["foo", 1], label="bar", errors=[GraphQLError("oops")] + ) + assert result.formatted == { + "path": ["foo", 1], + "label": "bar", + "errors": [{"message": "oops"}], + } + assert ( + str(result) == "CompletedResult(path=['foo', 1], label='bar'," + " errors=[GraphQLError('oops')])" + ) + + def can_compare_completed_result(): + args: dict[str, Any] = {"path": ["foo", 1], "label": "bar", "errors": []} + result = CompletedResult(**args) + assert result == CompletedResult(**args) + assert result != CompletedResult(**modified_args(args, path=["foo", 2])) + assert result != CompletedResult(**modified_args(args, label="baz")) + assert result != CompletedResult( + **modified_args(args, errors=[GraphQLError("oops")]) + ) + assert result == tuple(args.values()) + assert result == tuple(args.values())[:2] + assert result != tuple(args.values())[:1] + assert result == args + assert result == dict(list(args.items())[:2]) + assert result != dict( + list(args.items())[:1] + [("errors", [GraphQLError("oops")])] + ) + def can_format_and_print_incremental_defer_result(): result = IncrementalDeferResult() assert result.formatted == {"data": None} @@ -192,20 +238,17 @@ def can_format_and_print_incremental_defer_result(): data={"hello": "world"}, errors=[GraphQLError("msg")], path=["foo", 1], - label="bar", extensions={"baz": 2}, ) assert result.formatted == { "data": {"hello": "world"}, "errors": [{"message": "msg"}], "extensions": {"baz": 2}, - "label": "bar", "path": ["foo", 1], } assert ( str(result) == "IncrementalDeferResult(data={'hello': 'world'}," - " errors=[GraphQLError('msg')], path=['foo', 1], label='bar'," - " extensions={'baz': 2})" + " errors=[GraphQLError('msg')], path=['foo', 1], extensions={'baz': 2})" ) # noinspection PyTypeChecker @@ -214,7 +257,6 @@ def can_compare_incremental_defer_result(): "data": {"hello": "world"}, "errors": [GraphQLError("msg")], "path": ["foo", 1], - "label": "bar", "extensions": {"baz": 2}, } result = IncrementalDeferResult(**args) @@ -224,7 +266,6 @@ def can_compare_incremental_defer_result(): ) assert result != IncrementalDeferResult(**modified_args(args, errors=[])) assert result != IncrementalDeferResult(**modified_args(args, path=["foo", 2])) - assert result != IncrementalDeferResult(**modified_args(args, label="baz")) assert result != IncrementalDeferResult( **modified_args(args, extensions={"baz": 1}) ) @@ -238,7 +279,7 @@ def can_compare_incremental_defer_result(): assert result == dict(list(args.items())[:2]) assert result == dict(list(args.items())[:3]) assert result != dict(list(args.items())[:2] + [("path", ["foo", 2])]) - assert result != {**args, "label": "baz"} + assert result != {**args, "extensions": {"baz": 3}} def can_format_and_print_initial_incremental_execution_result(): result = InitialIncrementalExecutionResult() @@ -254,33 +295,28 @@ def can_format_and_print_initial_incremental_execution_result(): == "InitialIncrementalExecutionResult(data=None, errors=None, has_next)" ) - incremental = [IncrementalDeferResult(label="foo")] result = InitialIncrementalExecutionResult( data={"hello": "world"}, errors=[GraphQLError("msg")], - incremental=incremental, has_next=True, extensions={"baz": 2}, ) assert result.formatted == { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "incremental": [{"data": None, "label": "foo"}], "hasNext": True, "extensions": {"baz": 2}, } assert ( str(result) == "InitialIncrementalExecutionResult(" - "data={'hello': 'world'}, errors=[GraphQLError('msg')], incremental[1]," - " has_next, extensions={'baz': 2})" + "data={'hello': 'world'}, errors=[GraphQLError('msg')], has_next," + " extensions={'baz': 2})" ) def can_compare_initial_incremental_execution_result(): - incremental = [IncrementalDeferResult(label="foo")] args: dict[str, Any] = { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "incremental": incremental, "has_next": True, "extensions": {"baz": 2}, } @@ -292,9 +328,6 @@ def can_compare_initial_incremental_execution_result(): assert result != InitialIncrementalExecutionResult( **modified_args(args, errors=[]) ) - assert result != InitialIncrementalExecutionResult( - **modified_args(args, incremental=[]) - ) assert result != InitialIncrementalExecutionResult( **modified_args(args, has_next=False) ) @@ -311,20 +344,17 @@ def can_compare_initial_incremental_execution_result(): assert result == { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "incremental": incremental, "hasNext": True, "extensions": {"baz": 2}, } assert result == { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "incremental": incremental, "hasNext": True, } assert result != { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "incremental": incremental, "hasNext": False, "extensions": {"baz": 2}, } @@ -338,27 +368,32 @@ def can_format_and_print_subsequent_incremental_execution_result(): assert result.formatted == {"hasNext": True} assert str(result) == "SubsequentIncrementalExecutionResult(has_next)" - incremental = [IncrementalDeferResult(label="foo")] + incremental = [IncrementalDeferResult()] + completed = [CompletedResult(["foo", 1])] result = SubsequentIncrementalExecutionResult( - incremental=incremental, has_next=True, + incremental=incremental, + completed=completed, extensions={"baz": 2}, ) assert result.formatted == { - "incremental": [{"data": None, "label": "foo"}], "hasNext": True, + "incremental": [{"data": None}], + "completed": [{"path": ["foo", 1]}], "extensions": {"baz": 2}, } assert ( - str(result) == "SubsequentIncrementalExecutionResult(incremental[1]," - " has_next, extensions={'baz': 2})" + str(result) == "SubsequentIncrementalExecutionResult(has_next," + " incremental[1], completed[1], extensions={'baz': 2})" ) def can_compare_subsequent_incremental_execution_result(): - incremental = [IncrementalDeferResult(label="foo")] + incremental = [IncrementalDeferResult()] + completed = [CompletedResult(path=["foo", 1])] args: dict[str, Any] = { - "incremental": incremental, "has_next": True, + "incremental": incremental, + "completed": completed, "extensions": {"baz": 2}, } result = SubsequentIncrementalExecutionResult(**args) @@ -377,25 +412,57 @@ def can_compare_subsequent_incremental_execution_result(): assert result != tuple(args.values())[:1] assert result != (incremental, False) assert result == { - "incremental": incremental, "hasNext": True, + "incremental": incremental, + "completed": completed, "extensions": {"baz": 2}, } assert result == {"incremental": incremental, "hasNext": True} assert result != { - "incremental": incremental, "hasNext": False, + "incremental": incremental, + "completed": completed, "extensions": {"baz": 2}, } + def can_print_deferred_grouped_field_set_record(): + record = DeferredGroupedFieldSetRecord([], {}, False) + assert ( + str(record) == "DeferredGroupedFieldSetRecord(" + "deferred_fragment_records=[], grouped_field_set={})" + ) + record = DeferredGroupedFieldSetRecord([], {}, True, Path(None, "foo", "Foo")) + assert ( + str(record) == "DeferredGroupedFieldSetRecord(" + "deferred_fragment_records=[], grouped_field_set={}, path=['foo'])" + ) + def can_print_deferred_fragment_record(): record = DeferredFragmentRecord(None, None) - assert str(record) == "DeferredFragmentRecord(path=[])" - record = DeferredFragmentRecord("foo", Path(None, "bar", "Bar")) - assert str(record) == "DeferredFragmentRecord(" "path=['bar'], label='foo')" - record.data = {"hello": "world"} + assert str(record) == "DeferredFragmentRecord()" + record = DeferredFragmentRecord(Path(None, "bar", "Bar"), "foo") + assert str(record) == "DeferredFragmentRecord(path=['bar'], label='foo')" + + def can_print_stream_record(): + record = StreamRecord(Path(None, "bar", "Bar"), "foo") + assert str(record) == "StreamRecord(path=['bar'], label='foo')" + record.path = [] + assert str(record) == "StreamRecord(label='foo')" + record.label = None + assert str(record) == "StreamRecord()" + + def can_print_stream_items_record(): + record = StreamItemsRecord( + StreamRecord(Path(None, "bar", "Bar"), "foo"), + Path(None, "baz", "Baz"), + ) + assert ( + str(record) == "StreamItemsRecord(stream_record=StreamRecord(" + "path=['bar'], label='foo'), path=['baz'])" + ) + record = StreamItemsRecord(StreamRecord(Path(None, "bar", "Bar"))) assert ( - str(record) == "DeferredFragmentRecord(" "path=['bar'], label='foo', data)" + str(record) == "StreamItemsRecord(stream_record=StreamRecord(path=['bar']))" ) @pytest.mark.asyncio @@ -419,6 +486,7 @@ async def can_defer_fragments_containing_scalar_types(): {"data": {"hero": {"id": "1"}}, "hasNext": True}, { "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] @@ -470,6 +538,7 @@ async def does_not_disable_defer_with_null_if_argument(): {"data": {"hero": {"id": "1"}}, "hasNext": True}, { "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] @@ -514,9 +583,8 @@ async def can_defer_fragments_on_the_top_level_query_field(): assert result == [ {"data": {}, "hasNext": True}, { - "incremental": [ - {"data": {"hero": {"id": "1"}}, "path": [], "label": "DeferQuery"} - ], + "incremental": [{"data": {"hero": {"id": "1"}}, "path": []}], + "completed": [{"path": [], "label": "DeferQuery"}], "hasNext": False, }, ] @@ -551,9 +619,9 @@ async def can_defer_fragments_with_errors_on_the_top_level_query_field(): } ], "path": [], - "label": "DeferQuery", } ], + "completed": [{"path": [], "label": "DeferQuery"}], "hasNext": False, }, ] @@ -584,6 +652,10 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): {"data": {"hero": {}}, "hasNext": True}, { "incremental": [ + { + "data": {"id": "1"}, + "path": ["hero"], + }, { "data": { "friends": [ @@ -593,14 +665,12 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): ] }, "path": ["hero"], - "label": "DeferNested", - }, - { - "data": {"id": "1"}, - "path": ["hero"], - "label": "DeferTop", }, ], + "completed": [ + {"path": ["hero"], "label": "DeferTop"}, + {"path": ["hero"], "label": "DeferNested"}, + ], "hasNext": False, }, ] @@ -625,13 +695,7 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): assert result == [ {"data": {"hero": {"name": "Luke"}}, "hasNext": True}, { - "incremental": [ - { - "data": {"name": "Luke"}, - "path": ["hero"], - "label": "DeferTop", - }, - ], + "completed": [{"path": ["hero"], "label": "DeferTop"}], "hasNext": False, }, ] @@ -656,13 +720,7 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first assert result == [ {"data": {"hero": {"name": "Luke"}}, "hasNext": True}, { - "incremental": [ - { - "data": {"name": "Luke"}, - "path": ["hero"], - "label": "DeferTop", - }, - ], + "completed": [{"path": ["hero"], "label": "DeferTop"}], "hasNext": False, }, ] @@ -686,19 +744,14 @@ async def can_defer_an_inline_fragment(): assert result == [ {"data": {"hero": {"id": "1"}}, "hasNext": True}, { - "incremental": [ - { - "data": {"name": "Luke"}, - "path": ["hero"], - "label": "InlineDeferred", - }, - ], + "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], + "completed": [{"path": ["hero"], "label": "InlineDeferred"}], "hasNext": False, }, ] @pytest.mark.asyncio - async def emits_empty_defer_fragments(): + async def does_not_emit_empty_defer_fragments(): document = parse( """ query HeroNameQuery { @@ -717,19 +770,164 @@ async def emits_empty_defer_fragments(): assert result == [ {"data": {"hero": {}}, "hasNext": True}, + { + "completed": [{"path": ["hero"]}], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def separately_emits_defer_fragments_different_labels_varying_fields(): + document = parse( + """ + query HeroNameQuery { + hero { + ... @defer(label: "DeferID") { + id + } + ... @defer(label: "DeferName") { + name + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {"hero": {}}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"id": "1"}, + "path": ["hero"], + }, + { + "data": {"name": "Luke"}, + "path": ["hero"], + }, + ], + "completed": [ + {"path": ["hero"], "label": "DeferID"}, + {"path": ["hero"], "label": "DeferName"}, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def separately_emits_defer_fragments_different_labels_varying_subfields(): + document = parse( + """ + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": {"hero": {}}, + "path": [], + }, + { + "data": {"id": "1"}, + "path": ["hero"], + }, + { + "data": {"name": "Luke"}, + "path": ["hero"], + }, + ], + "completed": [ + {"path": [], "label": "DeferID"}, + {"path": [], "label": "DeferName"}, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def separately_emits_defer_fragments_different_labels_var_subfields_async(): + document = parse( + """ + query HeroNameQuery { + ... @defer(label: "DeferID") { + hero { + id + } + } + ... @defer(label: "DeferName") { + hero { + name + } + } + } + """ + ) + + async def resolve(value): + return value + + result = await complete( + document, + { + "hero": { + "id": lambda _info: resolve(1), + "name": lambda _info: resolve("Luke"), + } + }, + ) + + assert result == [ + { + "data": {}, + "hasNext": True, + }, { "incremental": [ { - "data": {}, + "data": {"hero": {}}, + "path": [], + }, + { + "data": {"id": "1"}, + "path": ["hero"], + }, + { + "data": {"name": "Luke"}, "path": ["hero"], }, ], + "completed": [ + {"path": [], "label": "DeferID"}, + {"path": [], "label": "DeferName"}, + ], "hasNext": False, }, ] @pytest.mark.asyncio - async def can_separately_emit_defer_fragments_different_labels_varying_fields(): + async def separately_emits_defer_fragments_var_subfields_same_prio_diff_level(): document = parse( """ query HeroNameQuery { @@ -737,7 +935,9 @@ async def can_separately_emit_defer_fragments_different_labels_varying_fields(): ... @defer(label: "DeferID") { id } - ... @defer(label: "DeferName") { + } + ... @defer(label: "DeferName") { + hero { name } } @@ -747,26 +947,84 @@ async def can_separately_emit_defer_fragments_different_labels_varying_fields(): result = await complete(document) assert result == [ - {"data": {"hero": {}}, "hasNext": True}, + { + "data": {"hero": {}}, + "hasNext": True, + }, { "incremental": [ { "data": {"id": "1"}, "path": ["hero"], - "label": "DeferID", }, { "data": {"name": "Luke"}, "path": ["hero"], - "label": "DeferName", }, ], + "completed": [ + {"path": ["hero"], "label": "DeferID"}, + {"path": [], "label": "DeferName"}, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def separately_emits_nested_defer_frags_var_subfields_same_prio_diff_level(): + document = parse( + """ + query HeroNameQuery { + ... @defer(label: "DeferName") { + hero { + name + ... @defer(label: "DeferID") { + id + } + } + } + } + """ + ) + result = await complete(document) + + assert result == [ + { + "data": {}, + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "hero": { + "name": "Luke", + }, + }, + "path": [], + }, + ], + "completed": [ + {"path": [], "label": "DeferName"}, + ], + "hasNext": True, + }, + { + "incremental": [ + { + "data": { + "id": "1", + }, + "path": ["hero"], + }, + ], + "completed": [{"path": ["hero"], "label": "DeferID"}], "hasNext": False, }, ] @pytest.mark.asyncio - async def does_not_deduplicate_multiple_defers_on_the_same_object(): + async def can_deduplicate_multiple_defers_on_the_same_object(): document = parse( """ query { @@ -800,34 +1058,39 @@ async def does_not_deduplicate_multiple_defers_on_the_same_object(): {"data": {"hero": {"friends": [{}, {}, {}]}}, "hasNext": True}, { "incremental": [ - {"data": {}, "path": ["hero", "friends", 0]}, - {"data": {}, "path": ["hero", "friends", 0]}, - {"data": {}, "path": ["hero", "friends", 0]}, { "data": {"id": "2", "name": "Han"}, "path": ["hero", "friends", 0], }, - {"data": {}, "path": ["hero", "friends", 1]}, - {"data": {}, "path": ["hero", "friends", 1]}, - {"data": {}, "path": ["hero", "friends", 1]}, { "data": {"id": "3", "name": "Leia"}, "path": ["hero", "friends", 1], }, - {"data": {}, "path": ["hero", "friends", 2]}, - {"data": {}, "path": ["hero", "friends", 2]}, - {"data": {}, "path": ["hero", "friends", 2]}, { "data": {"id": "4", "name": "C-3PO"}, "path": ["hero", "friends", 2], }, ], + "completed": [ + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + {"path": ["hero", "friends", 2]}, + {"path": ["hero", "friends", 2]}, + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + ], "hasNext": False, }, ] @pytest.mark.asyncio - async def does_not_deduplicate_fields_present_in_the_initial_payload(): + async def deduplicates_fields_present_in_the_initial_payload(): document = parse( """ query { @@ -881,27 +1144,17 @@ async def does_not_deduplicate_fields_present_in_the_initial_payload(): { "incremental": [ { - "data": { - "nestedObject": { - "deeperObject": { - "bar": "bar", - }, - }, - "anotherNestedObject": { - "deeperObject": { - "foo": "foo", - }, - }, - }, - "path": ["hero"], + "data": {"bar": "bar"}, + "path": ["hero", "nestedObject", "deeperObject"], }, ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] @pytest.mark.asyncio - async def does_not_deduplicate_fields_present_in_a_parent_defer_payload(): + async def deduplicates_fields_present_in_a_parent_defer_payload(): document = parse( """ query { @@ -944,24 +1197,25 @@ async def does_not_deduplicate_fields_present_in_a_parent_defer_payload(): "path": ["hero"], }, ], + "completed": [{"path": ["hero"]}], "hasNext": True, }, { "incremental": [ { "data": { - "foo": "foo", "bar": "bar", }, "path": ["hero", "nestedObject", "deeperObject"], }, ], + "completed": [{"path": ["hero", "nestedObject", "deeperObject"]}], "hasNext": False, }, ] @pytest.mark.asyncio - async def does_not_deduplicate_fields_with_deferred_fragments_at_multiple_levels(): + async def deduplicates_fields_with_deferred_fragments_at_multiple_levels(): document = parse( """ query { @@ -1014,58 +1268,51 @@ async def does_not_deduplicate_fields_with_deferred_fragments_at_multiple_levels assert result == [ { - "data": {"hero": {"nestedObject": {"deeperObject": {"foo": "foo"}}}}, + "data": { + "hero": { + "nestedObject": { + "deeperObject": { + "foo": "foo", + }, + }, + }, + }, "hasNext": True, }, { "incremental": [ { - "data": { - "nestedObject": { - "deeperObject": { - "foo": "foo", - "bar": "bar", - }, - } - }, - "path": ["hero"], + "data": {"bar": "bar"}, + "path": ["hero", "nestedObject", "deeperObject"], }, ], + "completed": [{"path": ["hero"]}], "hasNext": True, }, { "incremental": [ { - "data": { - "deeperObject": { - "foo": "foo", - "bar": "bar", - "baz": "baz", - } - }, - "path": ["hero", "nestedObject"], + "data": {"baz": "baz"}, + "path": ["hero", "nestedObject", "deeperObject"], }, ], "hasNext": True, + "completed": [{"path": ["hero", "nestedObject"]}], }, { "incremental": [ { - "data": { - "foo": "foo", - "bar": "bar", - "baz": "baz", - "bak": "bak", - }, + "data": {"bak": "bak"}, "path": ["hero", "nestedObject", "deeperObject"], }, ], + "completed": [{"path": ["hero", "nestedObject", "deeperObject"]}], "hasNext": False, }, ] @pytest.mark.asyncio - async def does_not_combine_fields_from_deferred_fragments_branches_same_level(): + async def deduplicates_fields_from_deferred_fragments_branches_same_level(): document = parse( """ query { @@ -1109,10 +1356,10 @@ async def does_not_combine_fields_from_deferred_fragments_branches_same_level(): }, "path": ["hero", "nestedObject", "deeperObject"], }, - { - "data": {"nestedObject": {"deeperObject": {}}}, - "path": ["hero"], - }, + ], + "completed": [ + {"path": ["hero"]}, + {"path": ["hero", "nestedObject", "deeperObject"]}, ], "hasNext": True, }, @@ -1120,18 +1367,18 @@ async def does_not_combine_fields_from_deferred_fragments_branches_same_level(): "incremental": [ { "data": { - "foo": "foo", "bar": "bar", }, "path": ["hero", "nestedObject", "deeperObject"], }, ], + "completed": [{"path": ["hero", "nestedObject", "deeperObject"]}], "hasNext": False, }, ] @pytest.mark.asyncio - async def does_not_combine_fields_from_deferred_fragments_branches_multi_levels(): + async def deduplicates_fields_from_deferred_fragments_branches_multi_levels(): document = parse( """ query { @@ -1179,16 +1426,17 @@ async def does_not_combine_fields_from_deferred_fragments_branches_multi_levels( "path": ["a", "b"], }, { - "data": {"a": {"b": {"e": {"f": "f"}}}, "g": {"h": "h"}}, + "data": {"g": {"h": "h"}}, "path": [], }, ], + "completed": [{"path": ["a", "b"]}, {"path": []}], "hasNext": False, }, ] @pytest.mark.asyncio - async def preserves_error_boundaries_null_first(): + async def nulls_cross_defer_boundaries_null_first(): document = parse( """ query { @@ -1227,11 +1475,16 @@ async def preserves_error_boundaries_null_first(): { "incremental": [ { - "data": {"b": {"c": {"d": "d"}}}, + "data": {"b": {"c": {}}}, "path": ["a"], }, { - "data": {"a": {"b": {"c": None}, "someField": "someField"}}, + "data": {"d": "d"}, + "path": ["a", "b", "c"], + }, + ], + "completed": [ + { "path": [], "errors": [ { @@ -1242,12 +1495,13 @@ async def preserves_error_boundaries_null_first(): }, ], }, + {"path": ["a"]}, ], "hasNext": False, }, ] - async def preserves_error_boundaries_value_first(): + async def nulls_cross_defer_boundaries_value_first(): document = parse( """ query { @@ -1291,7 +1545,16 @@ async def preserves_error_boundaries_value_first(): { "incremental": [ { - "data": {"b": {"c": None}, "someField": "someField"}, + "data": {"b": {"c": {}}}, + "path": ["a"], + }, + { + "data": {"d": "d"}, + "path": ["a", "b", "c"], + }, + ], + "completed": [ + { "path": ["a"], "errors": [ { @@ -1303,7 +1566,6 @@ async def preserves_error_boundaries_value_first(): ], }, { - "data": {"a": {"b": {"c": {"d": "d"}}}}, "path": [], }, ], @@ -1311,7 +1573,7 @@ async def preserves_error_boundaries_value_first(): }, ] - async def correctly_handle_a_slow_null(): + async def filters_a_payload_with_a_null_that_cannot_be_merged(): document = parse( """ query { @@ -1338,14 +1600,11 @@ async def correctly_handle_a_slow_null(): """ ) - async def slow_null(_info) -> None: - await sleep(0) - result = await complete( document, { "a": { - "b": {"c": {"d": "d", "nonNullErrorField": slow_null}}, + "b": {"c": {"d": "d", "nonNullErrorField": Resolvers.slow_null}}, "someField": "someField", } }, @@ -1359,16 +1618,20 @@ async def slow_null(_info) -> None: { "incremental": [ { - "data": {"b": {"c": {"d": "d"}}}, + "data": {"b": {"c": {}}}, "path": ["a"], }, + { + "data": {"d": "d"}, + "path": ["a", "b", "c"], + }, ], + "completed": [{"path": ["a"]}], "hasNext": True, }, { - "incremental": [ + "completed": [ { - "data": {"a": {"b": {"c": None}, "someField": "someField"}}, "path": [], "errors": [ { @@ -1406,29 +1669,17 @@ async def cancels_deferred_fields_when_initial_result_exhibits_null_bubbling(): }, ) - assert result == [ - { - "data": {"hero": None}, - "errors": [ - { - "message": "Cannot return null" - " for non-nullable field Hero.nonNullName.", - "locations": [{"line": 4, "column": 17}], - "path": ["hero", "nonNullName"], - }, - ], - "hasNext": True, - }, - { - "incremental": [ - { - "data": {"hero": {"name": "Luke"}}, - "path": [], - }, - ], - "hasNext": False, - }, - ] + assert result == { + "data": {"hero": None}, + "errors": [ + { + "message": "Cannot return null" + " for non-nullable field Hero.nonNullName.", + "locations": [{"line": 4, "column": 17}], + "path": ["hero", "nonNullName"], + }, + ], + } async def cancels_deferred_fields_when_deferred_result_exhibits_null_bubbling(): document = parse( @@ -1470,11 +1721,12 @@ async def cancels_deferred_fields_when_deferred_result_exhibits_null_bubbling(): ], }, ], + "completed": [{"path": []}], "hasNext": False, }, ] - async def does_not_deduplicate_list_fields(): + async def deduplicates_list_fields(): document = parse( """ query { @@ -1508,23 +1760,12 @@ async def does_not_deduplicate_list_fields(): "hasNext": True, }, { - "incremental": [ - { - "data": { - "friends": [ - {"name": "Han"}, - {"name": "Leia"}, - {"name": "C-3PO"}, - ] - }, - "path": ["hero"], - } - ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] - async def does_not_deduplicate_async_iterable_list_fields(): + async def deduplicates_async_iterable_list_fields(): document = parse( """ query { @@ -1542,14 +1783,10 @@ async def does_not_deduplicate_async_iterable_list_fields(): """ ) - async def resolve_friends(_info): - await sleep(0) - yield friends[0] - result = await complete( document, { - "hero": {**hero, "friends": resolve_friends}, + "hero": {**hero, "friends": Resolvers.first_friend}, }, ) @@ -1559,17 +1796,12 @@ async def resolve_friends(_info): "hasNext": True, }, { - "incremental": [ - { - "data": {"friends": [{"name": "Han"}]}, - "path": ["hero"], - } - ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] - async def does_not_deduplicate_empty_async_iterable_list_fields(): + async def deduplicates_empty_async_iterable_list_fields(): document = parse( """ query { @@ -1605,12 +1837,7 @@ async def resolve_friends(_info): "hasNext": True, }, { - "incremental": [ - { - "data": {"friends": []}, - "path": ["hero"], - } - ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] @@ -1650,15 +1877,24 @@ async def does_not_deduplicate_list_fields_with_non_overlapping_fields(): { "incremental": [ { - "data": {"friends": [{"id": "2"}, {"id": "3"}, {"id": "4"}]}, - "path": ["hero"], - } + "data": {"id": "2"}, + "path": ["hero", "friends", 0], + }, + { + "data": {"id": "3"}, + "path": ["hero", "friends", 1], + }, + { + "data": {"id": "4"}, + "path": ["hero", "friends", 2], + }, ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] - async def does_not_deduplicate_list_fields_that_return_empty_lists(): + async def deduplicates_list_fields_that_return_empty_lists(): document = parse( """ query { @@ -1685,17 +1921,12 @@ async def does_not_deduplicate_list_fields_that_return_empty_lists(): "hasNext": True, }, { - "incremental": [ - { - "data": {"friends": []}, - "path": ["hero"], - } - ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] - async def does_not_deduplicate_null_object_fields(): + async def deduplicates_null_object_fields(): document = parse( """ query { @@ -1722,17 +1953,12 @@ async def does_not_deduplicate_null_object_fields(): "hasNext": True, }, { - "incremental": [ - { - "data": {"nestedObject": None}, - "path": ["hero"], - } - ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] - async def does_not_deduplicate_async_object_fields(): + async def deduplicates_async_object_fields(): document = parse( """ query { @@ -1763,12 +1989,7 @@ async def resolve_nested_object(_info): "hasNext": True, }, { - "incremental": [ - { - "data": {"nestedObject": {"name": "foo"}}, - "path": ["hero"], - } - ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] @@ -1806,6 +2027,7 @@ async def handles_errors_thrown_in_deferred_fragments(): ], }, ], + "completed": [{"path": ["hero"]}], "hasNext": False, }, ] @@ -1832,9 +2054,8 @@ async def handles_non_nullable_errors_thrown_in_deferred_fragments(): assert result == [ {"data": {"hero": {"id": "1"}}, "hasNext": True}, { - "incremental": [ + "completed": [ { - "data": None, "path": ["hero"], "errors": [ { @@ -1903,9 +2124,8 @@ async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): assert result == [ {"data": {"hero": {"id": "1"}}, "hasNext": True}, { - "incremental": [ + "completed": [ { - "data": None, "path": ["hero"], "errors": [ { @@ -1953,6 +2173,7 @@ async def returns_payloads_in_correct_order(): "path": ["hero"], } ], + "completed": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1970,6 +2191,11 @@ async def returns_payloads_in_correct_order(): "path": ["hero", "friends", 2], }, ], + "completed": [ + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + ], "hasNext": False, }, ] @@ -2004,8 +2230,9 @@ async def returns_payloads_from_synchronous_data_in_correct_order(): { "data": {"name": "Luke", "friends": [{}, {}, {}]}, "path": ["hero"], - }, + } ], + "completed": [{"path": ["hero"]}], "hasNext": True, }, { @@ -2023,6 +2250,11 @@ async def returns_payloads_from_synchronous_data_in_correct_order(): "path": ["hero", "friends", 2], }, ], + "completed": [ + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + ], "hasNext": False, }, ] @@ -2046,7 +2278,7 @@ async def filters_deferred_payloads_when_list_item_from_async_iterable_nulled(): ) result = await complete( - document, {"hero": {**hero, "friends": Resolvers.friends}} + document, {"hero": {**hero, "friends": Resolvers.first_friend}} ) assert result == { diff --git a/tests/execution/test_lists.py b/tests/execution/test_lists.py index 5dc4b5f0..a7f747fb 100644 --- a/tests/execution/test_lists.py +++ b/tests/execution/test_lists.py @@ -50,6 +50,7 @@ def accepts_a_tuple_as_a_list_value(): result = _complete(list_field) assert result == ({"listField": list(list_field)}, None) + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") def accepts_a_set_as_a_list_value(): # Note that sets are not ordered in Python. list_field = {"apple", "banana", "coconut"} diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index 3737bb6a..f5030c88 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -246,13 +246,13 @@ async def mutation_fields_with_defer_do_not_block_next_mutation(): { "incremental": [ { - "label": "defer-label", "path": ["first"], "data": { "promiseToGetTheNumber": 2, }, }, ], + "completed": [{"path": ["first"], "label": "defer-label"}], "hasNext": False, }, ] @@ -317,13 +317,13 @@ async def mutation_with_defer_is_not_executed_serially(): { "incremental": [ { - "label": "defer-label", "path": [], "data": { "first": {"theNumber": 1}, }, }, ], + "completed": [{"path": [], "label": "defer-label"}], "hasNext": False, }, ] diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index d611f7a9..5454e826 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -12,7 +12,7 @@ IncrementalStreamResult, experimental_execute_incrementally, ) -from graphql.execution.incremental_publisher import StreamItemsRecord +from graphql.execution.incremental_publisher import StreamRecord from graphql.language import DocumentNode, parse from graphql.pyutils import Path from graphql.type import ( @@ -156,29 +156,24 @@ def can_format_and_print_incremental_stream_result(): items=["hello", "world"], errors=[GraphQLError("msg")], path=["foo", 1], - label="bar", extensions={"baz": 2}, ) assert result.formatted == { "items": ["hello", "world"], "errors": [{"message": "msg"}], "extensions": {"baz": 2}, - "label": "bar", "path": ["foo", 1], } assert ( str(result) == "IncrementalStreamResult(items=['hello', 'world']," - " errors=[GraphQLError('msg')], path=['foo', 1], label='bar'," - " extensions={'baz': 2})" + " errors=[GraphQLError('msg')], path=['foo', 1], extensions={'baz': 2})" ) def can_print_stream_record(): - record = StreamItemsRecord(None, None, None) - assert str(record) == "StreamItemsRecord(path=[])" - record = StreamItemsRecord("foo", Path(None, "bar", "Bar"), None) - assert str(record) == "StreamItemsRecord(" "path=['bar'], label='foo')" - record.items = ["hello", "world"] - assert str(record) == "StreamItemsRecord(" "path=['bar'], label='foo', items)" + record = StreamRecord(Path(None, 0, None)) + assert str(record) == "StreamRecord(path=[0])" + record = StreamRecord(Path(None, "bar", "Bar"), "foo") + assert str(record) == "StreamRecord(path=['bar'], label='foo')" # noinspection PyTypeChecker def can_compare_incremental_stream_result(): @@ -186,7 +181,6 @@ def can_compare_incremental_stream_result(): "items": ["hello", "world"], "errors": [GraphQLError("msg")], "path": ["foo", 1], - "label": "bar", "extensions": {"baz": 2}, } result = IncrementalStreamResult(**args) @@ -196,12 +190,10 @@ def can_compare_incremental_stream_result(): ) assert result != IncrementalStreamResult(**modified_args(args, errors=[])) assert result != IncrementalStreamResult(**modified_args(args, path=["foo", 2])) - assert result != IncrementalStreamResult(**modified_args(args, label="baz")) assert result != IncrementalStreamResult( **modified_args(args, extensions={"baz": 1}) ) assert result == tuple(args.values()) - assert result == tuple(args.values())[:4] assert result == tuple(args.values())[:3] assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] @@ -210,7 +202,7 @@ def can_compare_incremental_stream_result(): assert result == dict(list(args.items())[:2]) assert result == dict(list(args.items())[:3]) assert result != dict(list(args.items())[:2] + [("path", ["foo", 2])]) - assert result != {**args, "label": "baz"} + assert result != {**args, "extensions": {"baz": 1}} @pytest.mark.asyncio async def can_stream_a_list_field(): @@ -226,11 +218,12 @@ async def can_stream_a_list_field(): "hasNext": True, }, { - "incremental": [{"items": ["banana"], "path": ["scalarList", 1]}], + "incremental": [{"items": ["banana"], "path": ["scalarList"]}], "hasNext": True, }, { - "incremental": [{"items": ["coconut"], "path": ["scalarList", 2]}], + "incremental": [{"items": ["coconut"], "path": ["scalarList"]}], + "completed": [{"path": ["scalarList"]}], "hasNext": False, }, ] @@ -249,15 +242,16 @@ async def can_use_default_value_of_initial_count(): "hasNext": True, }, { - "incremental": [{"items": ["apple"], "path": ["scalarList", 0]}], + "incremental": [{"items": ["apple"], "path": ["scalarList"]}], "hasNext": True, }, { - "incremental": [{"items": ["banana"], "path": ["scalarList", 1]}], + "incremental": [{"items": ["banana"], "path": ["scalarList"]}], "hasNext": True, }, { - "incremental": [{"items": ["coconut"], "path": ["scalarList", 2]}], + "incremental": [{"items": ["coconut"], "path": ["scalarList"]}], + "completed": [{"path": ["scalarList"]}], "hasNext": False, }, ] @@ -317,8 +311,7 @@ async def returns_label_from_stream_directive(): "incremental": [ { "items": ["banana"], - "path": ["scalarList", 1], - "label": "scalar-stream", + "path": ["scalarList"], } ], "hasNext": True, @@ -327,10 +320,10 @@ async def returns_label_from_stream_directive(): "incremental": [ { "items": ["coconut"], - "path": ["scalarList", 2], - "label": "scalar-stream", + "path": ["scalarList"], } ], + "completed": [{"path": ["scalarList"], "label": "scalar-stream"}], "hasNext": False, }, ] @@ -388,9 +381,10 @@ async def does_not_disable_stream_with_null_if_argument(): "incremental": [ { "items": ["coconut"], - "path": ["scalarList", 2], + "path": ["scalarList"], } ], + "completed": [{"path": ["scalarList"]}], "hasNext": False, }, ] @@ -419,7 +413,7 @@ async def can_stream_multi_dimensional_lists(): "incremental": [ { "items": [["banana", "banana", "banana"]], - "path": ["scalarListList", 1], + "path": ["scalarListList"], } ], "hasNext": True, @@ -428,9 +422,10 @@ async def can_stream_multi_dimensional_lists(): "incremental": [ { "items": [["coconut", "coconut", "coconut"]], - "path": ["scalarListList", 2], + "path": ["scalarListList"], } ], + "completed": [{"path": ["scalarListList"]}], "hasNext": False, }, ] @@ -449,7 +444,6 @@ async def can_stream_a_field_that_returns_a_list_of_awaitables(): ) async def await_friend(f): - await sleep(0) return f result = await complete( @@ -470,9 +464,10 @@ async def await_friend(f): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -491,7 +486,6 @@ async def can_stream_in_correct_order_with_list_of_awaitables(): ) async def await_friend(f): - await sleep(0) return f result = await complete( @@ -507,7 +501,7 @@ async def await_friend(f): "incremental": [ { "items": [{"name": "Luke", "id": "1"}], - "path": ["friendList", 0], + "path": ["friendList"], } ], "hasNext": True, @@ -516,7 +510,7 @@ async def await_friend(f): "incremental": [ { "items": [{"name": "Han", "id": "2"}], - "path": ["friendList", 1], + "path": ["friendList"], } ], "hasNext": True, @@ -525,9 +519,10 @@ async def await_friend(f): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -573,9 +568,10 @@ async def get_id(f): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -594,7 +590,6 @@ async def handles_error_in_list_of_awaitables_before_initial_count_reached(): ) async def await_friend(f, i): - await sleep(0) if i == 1: raise RuntimeError("bad") return f @@ -623,9 +618,10 @@ async def await_friend(f, i): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -644,7 +640,6 @@ async def handles_error_in_list_of_awaitables_after_initial_count_reached(): ) async def await_friend(f, i): - await sleep(0) if i == 1: raise RuntimeError("bad") return f @@ -666,7 +661,7 @@ async def await_friend(f, i): "incremental": [ { "items": [None], - "path": ["friendList", 1], + "path": ["friendList"], "errors": [ { "message": "bad", @@ -682,9 +677,10 @@ async def await_friend(f, i): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -704,7 +700,6 @@ async def can_stream_a_field_that_returns_an_async_iterable(): async def friend_list(_info): for i in range(3): - await sleep(0) yield friends[i] result = await complete(document, {"friendList": friend_list}) @@ -717,7 +712,7 @@ async def friend_list(_info): "incremental": [ { "items": [{"name": "Luke", "id": "1"}], - "path": ["friendList", 0], + "path": ["friendList"], } ], "hasNext": True, @@ -726,7 +721,7 @@ async def friend_list(_info): "incremental": [ { "items": [{"name": "Han", "id": "2"}], - "path": ["friendList", 1], + "path": ["friendList"], } ], "hasNext": True, @@ -735,12 +730,13 @@ async def friend_list(_info): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], "hasNext": True, }, { + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -760,7 +756,6 @@ async def can_stream_a_field_that_returns_an_async_iterable_with_initial_count() async def friend_list(_info): for i in range(3): - await sleep(0) yield friends[i] result = await complete(document, {"friendList": friend_list}) @@ -778,12 +773,13 @@ async def friend_list(_info): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], "hasNext": True, }, { + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -831,7 +827,6 @@ async def can_handle_concurrent_calls_to_next_without_waiting(): async def friend_list(_info): for i in range(3): - await sleep(0) yield friends[i] result = await complete_async(document, 3, {"friendList": friend_list}) @@ -854,13 +849,16 @@ async def friend_list(_info): "incremental": [ { "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], "hasNext": True, }, }, - {"done": False, "value": {"hasNext": False}}, + { + "done": False, + "value": {"completed": [{"path": ["friendList"]}], "hasNext": False}, + }, {"done": True, "value": None}, ] @@ -878,9 +876,7 @@ async def handles_error_in_async_iterable_before_initial_count_is_reached(): ) async def friend_list(_info): - await sleep(0) yield friends[0] - await sleep(0) raise RuntimeError("bad") result = await complete(document, {"friendList": friend_list}) @@ -909,9 +905,7 @@ async def handles_error_in_async_iterable_after_initial_count_is_reached(): ) async def friend_list(_info): - await sleep(0) yield friends[0] - await sleep(0) raise RuntimeError("bad") result = await complete(document, {"friendList": friend_list}) @@ -923,10 +917,9 @@ async def friend_list(_info): "hasNext": True, }, { - "incremental": [ + "completed": [ { - "items": None, - "path": ["friendList", 1], + "path": ["friendList"], "errors": [ { "message": "bad", @@ -963,10 +956,9 @@ async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): "hasNext": True, }, { - "incremental": [ + "completed": [ { - "items": None, - "path": ["nonNullFriendList", 1], + "path": ["nonNullFriendList"], "errors": [ { "message": "Cannot return null for non-nullable field" @@ -995,9 +987,7 @@ async def handles_null_for_non_null_async_items_after_initial_count_is_reached() async def friend_list(_info): try: - await sleep(0) yield friends[0] - await sleep(0) yield None finally: raise RuntimeError("Oops") @@ -1011,10 +1001,9 @@ async def friend_list(_info): "hasNext": True, }, { - "incremental": [ + "completed": [ { - "items": None, - "path": ["nonNullFriendList", 1], + "path": ["nonNullFriendList"], "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1054,7 +1043,7 @@ async def scalar_list(_info): "incremental": [ { "items": [None], - "path": ["scalarList", 1], + "path": ["scalarList"], "errors": [ { "message": "String cannot represent value: {}", @@ -1064,6 +1053,7 @@ async def scalar_list(_info): ], }, ], + "completed": [{"path": ["scalarList"]}], "hasNext": False, }, ] @@ -1084,7 +1074,6 @@ async def throw(): raise RuntimeError("Oops") async def get_friend(i): - await sleep(0) return {"nonNullName": throw() if i < 0 else friends[i].name} def get_friends(_info): @@ -1107,7 +1096,7 @@ def get_friends(_info): "incremental": [ { "items": [None], - "path": ["friendList", 1], + "path": ["friendList"], "errors": [ { "message": "Oops", @@ -1123,9 +1112,10 @@ def get_friends(_info): "incremental": [ { "items": [{"nonNullName": "Han"}], - "path": ["friendList", 2], + "path": ["friendList"], }, ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -1143,7 +1133,6 @@ async def handles_nested_async_error_in_complete_value_after_initial_count(): ) async def get_friend_name(i): - await sleep(0) if i < 0: raise RuntimeError("Oops") return friends[i].name @@ -1168,7 +1157,7 @@ def get_friends(_info): "incremental": [ { "items": [None], - "path": ["friendList", 1], + "path": ["friendList"], "errors": [ { "message": "Oops", @@ -1184,9 +1173,10 @@ def get_friends(_info): "incremental": [ { "items": [{"nonNullName": "Han"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -1207,7 +1197,6 @@ async def throw(): raise RuntimeError("Oops") async def get_friend(i): - await sleep(0) return {"nonNullName": throw() if i < 0 else friends[i].name} def get_friends(_info): @@ -1227,10 +1216,9 @@ def get_friends(_info): "hasNext": True, }, { - "incremental": [ + "completed": [ { - "items": None, - "path": ["nonNullFriendList", 1], + "path": ["nonNullFriendList"], "errors": [ { "message": "Oops", @@ -1257,7 +1245,6 @@ async def handles_nested_async_error_in_complete_value_after_initial_non_null(): ) async def get_friend_name(i): - await sleep(0) if i < 0: raise RuntimeError("Oops") return friends[i].name @@ -1279,10 +1266,9 @@ def get_friends(_info): "hasNext": True, }, { - "incremental": [ + "completed": [ { - "items": None, - "path": ["nonNullFriendList", 1], + "path": ["nonNullFriendList"], "errors": [ { "message": "Oops", @@ -1312,7 +1298,6 @@ async def throw(): raise RuntimeError("Oops") async def get_friend(i): - await sleep(0) return {"nonNullName": throw() if i < 0 else friends[i].name} async def get_friends(_info): @@ -1336,7 +1321,7 @@ async def get_friends(_info): "incremental": [ { "items": [None], - "path": ["friendList", 1], + "path": ["friendList"], "errors": [ { "message": "Oops", @@ -1352,12 +1337,13 @@ async def get_friends(_info): "incremental": [ { "items": [{"nonNullName": "Han"}], - "path": ["friendList", 2], + "path": ["friendList"], }, ], "hasNext": True, }, { + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -1378,7 +1364,6 @@ async def throw(): raise RuntimeError("Oops") async def get_friend(i): - await sleep(0) return {"nonNullName": throw() if i < 0 else friends[i].name} async def get_friends(_info): @@ -1399,10 +1384,9 @@ async def get_friends(_info): "hasNext": True, }, { - "incremental": [ + "completed": [ { - "items": None, - "path": ["nonNullFriendList", 1], + "path": ["nonNullFriendList"], "errors": [ { "message": "Oops", @@ -1416,6 +1400,138 @@ async def get_friends(_info): }, ] + @pytest.mark.asyncio + async def handles_async_errors_in_complete_value_after_initial_count_no_aclose(): + # Handles async errors thrown by complete_value after initialCount is reached + # from async iterable for a non-nullable list when the async iterable does + # not provide an aclose method. + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + class AsyncIterableWithoutAclose: + def __init__(self): + self.count = 0 + + def __aiter__(self): + return self + + async def __anext__(self): + count = self.count + self.count += 1 + if count == 1: + name = throw() + else: + if count: + count -= 1 # pragma: no cover + name = friends[count].name + return {"nonNullName": name} + + async_iterable = AsyncIterableWithoutAclose() + result = await complete(document, {"nonNullFriendList": async_iterable}) + assert result == [ + { + "data": { + "nonNullFriendList": [{"nonNullName": "Luke"}], + }, + "hasNext": True, + }, + { + "completed": [ + { + "path": ["nonNullFriendList"], + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + + @pytest.mark.asyncio + async def handles_async_errors_in_complete_value_after_initial_count_slow_aclose(): + # Handles async errors thrown by completeValue after initialCount is reached + # from async iterable for a non-nullable list when the async iterable provides + # concurrent next/return methods and has a slow aclose() + document = parse( + """ + query { + nonNullFriendList @stream(initialCount: 1) { + nonNullName + } + } + """ + ) + + async def throw(): + raise RuntimeError("Oops") + + class AsyncIterableWithSlowAclose: + def __init__(self): + self.count = 0 + self.finished = False + + def __aiter__(self): + return self + + async def __anext__(self): + if self.finished: + raise StopAsyncIteration # pragma: no cover + count = self.count + self.count += 1 + if count == 1: + name = throw() + else: + if count: + count -= 1 # pragma: no cover + name = friends[count].name + return {"nonNullName": name} + + async def aclose(self): + await sleep(0) + self.finished = True + + async_iterable = AsyncIterableWithSlowAclose() + result = await complete(document, {"nonNullFriendList": async_iterable}) + assert result == [ + { + "data": { + "nonNullFriendList": [{"nonNullName": "Luke"}], + }, + "hasNext": True, + }, + { + "completed": [ + { + "path": ["nonNullFriendList"], + "errors": [ + { + "message": "Oops", + "locations": [{"line": 4, "column": 17}], + "path": ["nonNullFriendList", 1, "nonNullName"], + }, + ], + }, + ], + "hasNext": False, + }, + ] + assert async_iterable.finished + @pytest.mark.asyncio async def filters_payloads_that_are_nulled(): document = parse( @@ -1432,10 +1548,9 @@ async def filters_payloads_that_are_nulled(): ) async def resolve_null(_info): - await sleep(0) + return None async def friend_list(_info): - await sleep(0) yield friends[0] result = await complete( @@ -1483,7 +1598,6 @@ async def filters_payloads_that_are_nulled_by_a_later_synchronous_error(): ) async def friend_list(_info): - await sleep(0) # pragma: no cover yield friends[0] # pragma: no cover result = await complete( @@ -1531,11 +1645,9 @@ async def does_not_filter_payloads_when_null_error_is_in_a_different_path(): ) async def error_field(_info): - await sleep(0) raise RuntimeError("Oops") async def friend_list(_info): - await sleep(0) yield friends[0] result = await complete( @@ -1571,12 +1683,16 @@ async def friend_list(_info): }, { "items": [{"name": "Luke"}], - "path": ["nestedObject", "nestedFriendList", 0], + "path": ["nestedObject", "nestedFriendList"], }, ], + "completed": [{"path": ["otherNestedObject"]}], "hasNext": True, }, - {"hasNext": False}, + { + "completed": [{"path": ["nestedObject", "nestedFriendList"]}], + "hasNext": False, + }, ] @pytest.mark.asyncio @@ -1600,10 +1716,9 @@ async def filters_stream_payloads_that_are_nulled_in_a_deferred_payload(): ) async def resolve_null(_info): - await sleep(0) + return None async def friend_list(_info): - await sleep(0) yield friends[0] result = await complete( @@ -1646,11 +1761,13 @@ async def friend_list(_info): ], }, ], + "completed": [{"path": ["nestedObject"]}], "hasNext": False, }, ] @pytest.mark.asyncio + @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") async def filters_defer_payloads_that_are_nulled_in_a_stream_response(): document = parse( """ @@ -1666,17 +1783,15 @@ async def filters_defer_payloads_that_are_nulled_in_a_stream_response(): ) async def resolve_null(_info): - await sleep(0) + return None async def friend(): - await sleep(0) return { "name": friends[0].name, "nonNullName": resolve_null, } async def friend_list(_info): - await sleep(0) yield await friend() result = await complete(document, {"friendList": friend_list}) @@ -1692,7 +1807,7 @@ async def friend_list(_info): "incremental": [ { "items": [None], - "path": ["friendList", 0], + "path": ["friendList"], "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1706,6 +1821,7 @@ async def friend_list(_info): "hasNext": True, }, { + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -1716,15 +1832,14 @@ async def returns_iterator_and_ignores_error_when_stream_payloads_are_filtered() finished = False async def resolve_null(_info): - await sleep(0) + return None async def iterable(_info): nonlocal finished for i in range(3): - await sleep(0) friend = friends[i] yield {"name": friend.name, "nonNullName": None} - finished = True # pragma: no cover + finished = True document = parse( """ @@ -1762,6 +1877,8 @@ async def iterable(_info): result1 = execute_result.initial_result assert result1 == {"data": {"nestedObject": {}}, "hasNext": True} + assert not finished + result2 = await anext(iterator) assert result2.formatted == { "incremental": [ @@ -1782,13 +1899,14 @@ async def iterable(_info): ], }, ], + "completed": [{"path": ["nestedObject"]}], "hasNext": False, } with pytest.raises(StopAsyncIteration): await anext(iterator) - assert not finished # running iterator cannot be canceled + assert finished @pytest.mark.asyncio async def handles_awaitables_from_complete_value_after_initial_count_is_reached(): @@ -1804,11 +1922,9 @@ async def handles_awaitables_from_complete_value_after_initial_count_is_reached( ) async def get_friend_name(i): - await sleep(0) return friends[i].name async def get_friend(i): - await sleep(0) if i < 2: return friends[i] return {"id": friends[2].id, "name": get_friend_name(i)} @@ -1834,7 +1950,7 @@ async def get_friends(_info): "incremental": [ { "items": [{"id": "2", "name": "Han"}], - "path": ["friendList", 1], + "path": ["friendList"], } ], "hasNext": True, @@ -1843,12 +1959,13 @@ async def get_friends(_info): "incremental": [ { "items": [{"id": "3", "name": "Leia"}], - "path": ["friendList", 2], + "path": ["friendList"], } ], "hasNext": True, }, { + "completed": [{"path": ["friendList"]}], "hasNext": False, }, ] @@ -1877,7 +1994,6 @@ async def handles_overlapping_deferred_and_non_deferred_streams(): async def get_nested_friend_list(_info): for i in range(2): - await sleep(0) yield friends[i] result = await complete( @@ -1889,142 +2005,39 @@ async def get_nested_friend_list(_info): }, ) - assert result in ( - # exact order of results depends on timing and Python version - [ - { - "data": {"nestedObject": {"nestedFriendList": []}}, - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "1"}], - "path": ["nestedObject", "nestedFriendList", 0], - }, - {"data": {"nestedFriendList": []}, "path": ["nestedObject"]}, - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "2"}], - "path": ["nestedObject", "nestedFriendList", 1], - }, - { - "items": [{"id": "1", "name": "Luke"}], - "path": ["nestedObject", "nestedFriendList", 0], - }, - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "2", "name": "Han"}], - "path": ["nestedObject", "nestedFriendList", 1], - }, - ], - "hasNext": True, - }, - { - "hasNext": False, - }, - ], - [ - { - "data": {"nestedObject": {"nestedFriendList": []}}, - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "1"}], - "path": ["nestedObject", "nestedFriendList", 0], - }, - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "2"}], - "path": ["nestedObject", "nestedFriendList", 1], - }, - {"data": {"nestedFriendList": []}, "path": ["nestedObject"]}, - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "1", "name": "Luke"}], - "path": ["nestedObject", "nestedFriendList", 0], - }, - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "2", "name": "Han"}], - "path": ["nestedObject", "nestedFriendList", 1], - }, - ], - "hasNext": True, - }, - { - "hasNext": False, - }, - ], - [ - {"data": {"nestedObject": {"nestedFriendList": []}}, "hasNext": True}, - { - "incremental": [ - { - "items": [{"id": "1"}], - "path": ["nestedObject", "nestedFriendList", 0], - } - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "2"}], - "path": ["nestedObject", "nestedFriendList", 1], - } - ], - "hasNext": True, - }, - { - "incremental": [ - {"data": {"nestedFriendList": []}, "path": ["nestedObject"]} - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "1", "name": "Luke"}], - "path": ["nestedObject", "nestedFriendList", 0], - } - ], - "hasNext": True, - }, - { - "incremental": [ - { - "items": [{"id": "2", "name": "Han"}], - "path": ["nestedObject", "nestedFriendList", 1], - } - ], - "hasNext": True, + assert result == [ + { + "data": { + "nestedObject": { + "nestedFriendList": [], + }, }, - {"hasNext": False}, - ], - ) + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "1", "name": "Luke"}], + "path": ["nestedObject", "nestedFriendList"], + }, + ], + "completed": [{"path": ["nestedObject"]}], + "hasNext": True, + }, + { + "incremental": [ + { + "items": [{"id": "2", "name": "Han"}], + "path": ["nestedObject", "nestedFriendList"], + }, + ], + "hasNext": True, + }, + { + "completed": [{"path": ["nestedObject", "nestedFriendList"]}], + "hasNext": False, + }, + ] @pytest.mark.asyncio async def returns_payloads_properly_when_parent_deferred_slower_than_stream(): @@ -2052,7 +2065,6 @@ async def slow_field(_info): async def get_friends(_info): for i in range(2): - await sleep(0) yield friends[i] execute_result = experimental_execute_incrementally( @@ -2081,6 +2093,7 @@ async def get_friends(_info): "path": ["nestedObject"], }, ], + "completed": [{"path": ["nestedObject"]}], "hasNext": True, } result3 = await anext(iterator) @@ -2088,7 +2101,7 @@ async def get_friends(_info): "incremental": [ { "items": [{"name": "Luke"}], - "path": ["nestedObject", "nestedFriendList", 0], + "path": ["nestedObject", "nestedFriendList"], }, ], "hasNext": True, @@ -2098,13 +2111,14 @@ async def get_friends(_info): "incremental": [ { "items": [{"name": "Han"}], - "path": ["nestedObject", "nestedFriendList", 1], + "path": ["nestedObject", "nestedFriendList"], }, ], "hasNext": True, } result5 = await anext(iterator) assert result5.formatted == { + "completed": [{"path": ["nestedObject", "nestedFriendList"]}], "hasNext": False, } @@ -2136,9 +2150,7 @@ async def slow_field(_info): ) async def get_friends(_info): - await sleep(0) yield friends[0] - await sleep(0) yield {"id": friends[1].id, "name": slow_field} await resolve_iterable.wait() @@ -2163,27 +2175,36 @@ async def get_friends(_info): { "data": {"name": "Luke"}, "path": ["friendList", 0], - "label": "DeferName", }, { "items": [{"id": "2"}], - "path": ["friendList", 1], - "label": "stream-label", + "path": ["friendList"], }, ], + "completed": [{"path": ["friendList", 0], "label": "DeferName"}], "hasNext": True, } resolve_slow_field.set() result3 = await anext(iterator) assert result3.formatted == { + "completed": [ + { + "path": ["friendList"], + "label": "stream-label", + }, + ], + "hasNext": True, + } + result4 = await anext(iterator) + assert result4.formatted == { "incremental": [ { "data": {"name": "Han"}, "path": ["friendList", 1], - "label": "DeferName", }, ], + "completed": [{"path": ["friendList", 1], "label": "DeferName"}], "hasNext": False, } @@ -2214,11 +2235,8 @@ async def slow_field(_info): ) async def get_friends(_info): - await sleep(0) yield friends[0] - await sleep(0) yield {"id": friends[1].id, "name": slow_field} - await sleep(0) await resolve_iterable.wait() execute_result = await experimental_execute_incrementally( # type: ignore @@ -2242,14 +2260,13 @@ async def get_friends(_info): { "data": {"name": "Luke"}, "path": ["friendList", 0], - "label": "DeferName", }, { "items": [{"id": "2"}], - "path": ["friendList", 1], - "label": "stream-label", + "path": ["friendList"], }, ], + "completed": [{"path": ["friendList", 0], "label": "DeferName"}], "hasNext": True, } @@ -2259,15 +2276,16 @@ async def get_friends(_info): { "data": {"name": "Han"}, "path": ["friendList", 1], - "label": "DeferName", }, ], + "completed": [{"path": ["friendList", 1], "label": "DeferName"}], "hasNext": True, } resolve_iterable.set() result4 = await anext(iterator) assert result4.formatted == { + "completed": [{"path": ["friendList"], "label": "stream-label"}], "hasNext": False, } @@ -2275,13 +2293,12 @@ async def get_friends(_info): await anext(iterator) @pytest.mark.asyncio - async def finishes_async_iterable_when_returned_generator_is_closed(): + async def finishes_async_iterable_when_finished_generator_is_closed(): finished = False async def iterable(_info): nonlocal finished for i in range(3): - await sleep(0) yield friends[i] finished = True @@ -2311,7 +2328,6 @@ async def iterable(_info): with pytest.raises(StopAsyncIteration): await anext(iterator) - await sleep(0) assert finished @pytest.mark.asyncio @@ -2324,7 +2340,6 @@ def __aiter__(self): return self async def __anext__(self): - await sleep(0) index = self.index self.index = index + 1 try: @@ -2361,18 +2376,15 @@ async def __anext__(self): with pytest.raises(StopAsyncIteration): await anext(iterator) - await sleep(0) - await sleep(0) assert iterable.index == 4 @pytest.mark.asyncio - async def finishes_async_iterable_when_error_is_raised_in_returned_generator(): + async def finishes_async_iterable_when_error_is_raised_in_finished_generator(): finished = False async def iterable(_info): nonlocal finished for i in range(3): - await sleep(0) yield friends[i] finished = True @@ -2404,5 +2416,4 @@ async def iterable(_info): with pytest.raises(StopAsyncIteration): await anext(iterator) - await sleep(0) assert finished diff --git a/tests/pyutils/test_ref_map.py b/tests/pyutils/test_ref_map.py new file mode 100644 index 00000000..96e15c58 --- /dev/null +++ b/tests/pyutils/test_ref_map.py @@ -0,0 +1,124 @@ +import pytest + +from graphql.pyutils import RefMap + +obj1 = {"a": 1, "b": 2, "c": 3} +obj2 = obj1.copy() +obj3 = obj1.copy() +obj4 = obj1.copy() + + +def describe_object_map(): + def can_create_an_empty_map(): + m = RefMap[str, int]() + assert not m + assert len(m) == 0 + assert list(m) == [] + assert list(m.keys()) == [] + assert list(m.values()) == [] + assert list(m.items()) == [] + + def can_create_a_map_with_scalar_keys_and_values(): + m = RefMap[str, int](list(obj1.items())) + assert m + assert len(m) == 3 + assert list(m) == ["a", "b", "c"] + assert list(m.keys()) == ["a", "b", "c"] + assert list(m.values()) == [1, 2, 3] + assert list(m.items()) == [("a", 1), ("b", 2), ("c", 3)] + for k, v in m.items(): + assert k in m + assert m[k] == v + assert m.get(k) == v + assert v not in m + with pytest.raises(KeyError): + m[v] # type: ignore + assert m.get(v) is None + + def can_create_a_map_with_one_object_as_key(): + m = RefMap[dict, int]([(obj1, 1)]) + assert m + assert len(m) == 1 + assert list(m) == [obj1] + assert list(m.keys()) == [obj1] + assert list(m.values()) == [1] + assert list(m.items()) == [(obj1, 1)] + assert obj1 in m + assert 1 not in m + assert obj2 not in m + assert m[obj1] == 1 + assert m.get(obj1) == 1 + with pytest.raises(KeyError): + m[1] # type: ignore + assert m.get(1) is None + with pytest.raises(KeyError): + m[obj2] + assert m.get(obj2) is None + + def can_create_a_map_with_three_objects_as_keys(): + m = RefMap[dict, int]([(obj1, 1), (obj2, 2), (obj3, 3)]) + assert m + assert len(m) == 3 + assert list(m) == [obj1, obj2, obj3] + assert list(m.keys()) == [obj1, obj2, obj3] + assert list(m.values()) == [1, 2, 3] + assert list(m.items()) == [(obj1, 1), (obj2, 2), (obj3, 3)] + for k, v in m.items(): + assert k in m + assert m[k] == v + assert m.get(k) == v + assert v not in m + with pytest.raises(KeyError): + m[v] # type: ignore + assert m.get(v) is None + assert obj4 not in m + with pytest.raises(KeyError): + m[obj4] + assert m.get(obj4) is None + + def can_set_a_key_that_is_an_object(): + m = RefMap[dict, int]() + m[obj1] = 1 + assert m[obj1] == 1 + assert list(m) == [obj1] + with pytest.raises(KeyError): + m[obj2] + m[obj2] = 2 + assert m[obj1] == 1 + assert m[obj2] == 2 + assert list(m) == [obj1, obj2] + m[obj2] = 3 + assert m[obj1] == 1 + assert m[obj2] == 3 + assert list(m) == [obj1, obj2] + assert len(m) == 2 + + def can_delete_a_key_that_is_an_object(): + m = RefMap[dict, int]([(obj1, 1), (obj2, 2), (obj3, 3)]) + del m[obj2] + assert obj2 not in m + assert list(m) == [obj1, obj3] + with pytest.raises(KeyError): + del m[obj2] + assert list(m) == [obj1, obj3] + assert len(m) == 2 + + def can_update_a_map(): + m = RefMap[dict, int]([(obj1, 1), (obj2, 2)]) + m.update([]) + assert list(m.keys()) == [obj1, obj2] + assert len(m) == 2 + m.update([(obj2, 3), (obj3, 4)]) + assert list(m.keys()) == [obj1, obj2, obj3] + assert list(m.values()) == [1, 3, 4] + assert list(m.items()) == [(obj1, 1), (obj2, 3), (obj3, 4)] + assert obj3 in m + assert m[obj2] == 3 + assert m[obj3] == 4 + assert len(m) == 3 + + def can_get_the_representation_of_a_ref_map(): + m = RefMap[dict, int]([(obj1, 1), (obj2, 2)]) + assert repr(m) == ( + "RefMap([({'a': 1, 'b': 2, 'c': 3}, 1), ({'a': 1, 'b': 2, 'c': 3}, 2)])" + ) diff --git a/tests/pyutils/test_ref_set.py b/tests/pyutils/test_ref_set.py new file mode 100644 index 00000000..fead877b --- /dev/null +++ b/tests/pyutils/test_ref_set.py @@ -0,0 +1,89 @@ +import pytest + +from graphql.pyutils import RefSet + +obj1 = ["a", "b", "c"] +obj2 = obj1.copy() +obj3 = obj1.copy() +obj4 = obj1.copy() + + +def describe_object_set(): + def can_create_an_empty_set(): + s = RefSet[int]() + assert not s + assert len(s) == 0 + assert list(s) == [] + + def can_create_a_set_with_scalar_values(): + s = RefSet[str](obj1) + assert s + assert len(s) == 3 + assert list(s) == ["a", "b", "c"] + for v in s: + assert v in s + + def can_create_a_set_with_one_object_as_value(): + s = RefSet[list]([obj1]) + assert s + assert len(s) == 1 + assert obj1 in s + assert obj2 not in s + + def can_create_a_set_with_three_objects_as_keys(): + s = RefSet[list]([obj1, obj2, obj3]) + assert s + assert len(s) == 3 + assert list(s) == [obj1, obj2, obj3] + for v in s: + assert v in s + assert obj4 not in s + + def can_add_a_value_that_is_an_object(): + s = RefSet[list]() + s.add(obj1) + assert obj1 in s + assert list(s) == [obj1] + assert obj2 not in s + s.add(obj2) + assert obj1 in s + assert obj2 in s + assert list(s) == [obj1, obj2] + s.add(obj2) + assert obj1 in s + assert obj2 in s + assert list(s) == [obj1, obj2] + assert len(s) == 2 + + def can_remove_a_value_that_is_an_object(): + s = RefSet[list]([obj1, obj2, obj3]) + s.remove(obj2) + assert obj2 not in s + assert list(s) == [obj1, obj3] + with pytest.raises(KeyError): + s.remove(obj2) + assert list(s) == [obj1, obj3] + assert len(s) == 2 + + def can_discard_a_value_that_is_an_object(): + s = RefSet[list]([obj1, obj2, obj3]) + s.discard(obj2) + assert obj2 not in s + assert list(s) == [obj1, obj3] + s.discard(obj2) + assert list(s) == [obj1, obj3] + assert len(s) == 2 + + def can_update_a_set(): + s = RefSet[list]([obj1, obj2]) + s.update([]) + assert list(s) == [obj1, obj2] + assert len(s) == 2 + s.update([obj2, obj3]) + assert list(s) == [obj1, obj2, obj3] + assert obj3 in s + assert len(s) == 3 + + def can_get_the_representation_of_a_ref_set(): + s = RefSet[list]([obj1, obj2]) + assert repr(s) == ("RefSet([['a', 'b', 'c'], ['a', 'b', 'c']])") From 9d915b247a4d55d8a8a8211b9816f62bfcac0de2 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 18 Jan 2025 19:52:45 +0100 Subject: [PATCH 25/50] Fix GitHub action for older Python versions --- .github/workflows/test.yml | 26 +++++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 298d3dd0..77f15bf1 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -9,7 +9,31 @@ jobs: strategy: matrix: - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11', '3.12', '3.13', 'pypy3.9', 'pypy3.10'] + python-version: ['3.9', '3.10', '3.11', '3.12', '3.13', 'pypy3.9', 'pypy3.10'] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install "tox>=3.28,<5" "tox-gh-actions>=3.2,<4" + + - name: Run unit tests with tox + run: tox + + tests-old: + name: 🧪 Tests (older Python versions) + runs-on: ubuntu-22.04 + + strategy: + matrix: + python-version: ['3.7', '3.8'] steps: - uses: actions/checkout@v4 From a4e5778f6d9cdef5d8e564ecd3b66cd44e205315 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 18 Jan 2025 20:18:59 +0100 Subject: [PATCH 26/50] Improve README file --- README.md | 43 +++++++++++++++++++++---------------------- tox.ini | 4 ++-- 2 files changed, 23 insertions(+), 24 deletions(-) diff --git a/README.md b/README.md index 913f81e5..fa10c81c 100644 --- a/README.md +++ b/README.md @@ -10,14 +10,14 @@ a query language for APIs created by Facebook. ![Lint Status](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml/badge.svg) [![Code Style](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black) -An extensive test suite with over 2300 unit tests and 100% coverage comprises a -replication of the complete test suite of GraphQL.js, making sure this port is -reliable and compatible with GraphQL.js. +An extensive test suite with over 2200 unit tests and 100% coverage replicates the +complete test suite of GraphQL.js, ensuring that this port is reliable and compatible +with GraphQL.js. -The current stable version 3.2.4 of GraphQL-core is up-to-date with GraphQL.js -version 16.8.2 and supports Python version 3.6 to 3.12. +The current stable version 3.2.5 of GraphQL-core is up-to-date with GraphQL.js +version 16.8.2 and supports Python versions 3.6 to 3.13. -You can also try out the latest alpha version 3.3.0a6 of GraphQL-core +You can also try out the latest alpha version 3.3.0a6 of GraphQL-core, which is up-to-date with GraphQL.js version 17.0.0a2. Please note that this new minor version of GraphQL-core does not support Python 3.6 anymore. @@ -26,13 +26,12 @@ Note that for various reasons, GraphQL-core does not use SemVer like GraphQL.js. Changes in the major version of GraphQL.js are reflected in the minor version of GraphQL-core instead. This means there can be breaking changes in the API when the minor version changes, and only patch releases are fully backward compatible. -Therefore, we recommend something like `=~ 3.2.0` as version specifier +Therefore, we recommend using something like `~= 3.2.0` as the version specifier when including GraphQL-core as a dependency. - ## Documentation -A more detailed documentation for GraphQL-core 3 can be found at +More detailed documentation for GraphQL-core 3 can be found at [graphql-core-3.readthedocs.io](https://graphql-core-3.readthedocs.io/). The documentation for GraphQL.js can be found at [graphql.org/graphql-js/](https://graphql.org/graphql-js/). @@ -47,10 +46,10 @@ examples. A general overview of GraphQL is available in the [README](https://github.com/graphql/graphql-spec/blob/main/README.md) for the -[Specification for GraphQL](https://github.com/graphql/graphql-spec). That overview -describes a simple set of GraphQL examples that exist as [tests](tests) in this -repository. A good way to get started with this repository is to walk through that -README and the corresponding tests in parallel. +[Specification for GraphQL](https://github.com/graphql/graphql-spec). This overview +includes a simple set of GraphQL examples that are also available as [tests](tests) +in this repository. A good way to get started with this repository is to walk through +that README and the corresponding tests in parallel. ## Installation @@ -174,17 +173,17 @@ asyncio.run(main()) ## Goals and restrictions -GraphQL-core tries to reproduce the code of the reference implementation GraphQL.js -in Python as closely as possible and to stay up-to-date with the latest development of -GraphQL.js. +GraphQL-core aims to reproduce the code of the reference implementation GraphQL.js +in Python as closely as possible while staying up-to-date with the latest development +of GraphQL.js. -GraphQL-core 3 (formerly known as GraphQL-core-next) has been created as a modern +GraphQL-core 3 (formerly known as GraphQL-core-next) was created as a modern alternative to [GraphQL-core 2](https://github.com/graphql-python/graphql-core-legacy), -a prior work by Syrus Akbary, based on an older version of GraphQL.js and also -targeting older Python versions. Some parts of GraphQL-core 3 have been inspired by -GraphQL-core 2 or directly taken over with only slight modifications, but most of the -code has been re-implemented from scratch, replicating the latest code in GraphQL.js -very closely and adding type hints for Python. +a prior work by Syrus Akbary based on an older version of GraphQL.js that still +supported legacy Python versions. While some parts of GraphQL-core 3 were inspired by +GraphQL-core 2 or directly taken over with slight modifications, most of the code has +been re-implemented from scratch. This re-implementation closely replicates the latest +code in GraphQL.js and adds type hints for Python. Design goals for the GraphQL-core 3 library were: diff --git a/tox.ini b/tox.ini index c998afd8..7f6b4dcb 100644 --- a/tox.ini +++ b/tox.ini @@ -49,7 +49,7 @@ deps = pytest-timeout>=2.3,<3 py3{7,8,9}, pypy39: typing-extensions>=4.7.1,<5 commands = - # to also run the time-consuming tests: tox -e py311 -- --run-slow - # to run the benchmarks: tox -e py311 -- -k benchmarks --benchmark-enable + # to also run the time-consuming tests: tox -e py312 -- --run-slow + # to run the benchmarks: tox -e py312 -- -k benchmarks --benchmark-enable py3{7,8,9,10,11,13}, pypy3{9,10}: pytest tests {posargs} py312: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From 666ecdc870ba7419ef2a5171f31c39552d1981de Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 18 Jan 2025 21:33:09 +0100 Subject: [PATCH 27/50] Update dependencies --- poetry.lock | 503 +++++++++++------- pyproject.toml | 28 +- src/graphql/execution/values.py | 3 +- src/graphql/language/lexer.py | 6 +- src/graphql/type/definition.py | 3 +- src/graphql/type/introspection.py | 3 +- src/graphql/type/scalars.py | 2 +- src/graphql/type/validate.py | 3 +- .../utilities/find_breaking_changes.py | 4 +- .../rules/overlapping_fields_can_be_merged.py | 3 +- .../rules/unique_field_definition_names.py | 3 +- tests/error/test_graphql_error.py | 2 +- tests/execution/test_abstract.py | 4 +- tests/execution/test_oneof.py | 2 +- tests/execution/test_schema.py | 2 +- tests/language/test_lexer.py | 3 +- tests/language/test_printer.py | 3 +- tests/star_wars_schema.py | 10 +- tests/type/test_definition.py | 3 +- tests/type/test_validation.py | 3 +- tests/utilities/test_extend_schema.py | 3 +- tests/utilities/test_find_breaking_changes.py | 3 +- tests/utilities/test_type_info.py | 3 +- tests/validation/test_validation.py | 3 +- tox.ini | 12 +- 25 files changed, 367 insertions(+), 250 deletions(-) diff --git a/poetry.lock b/poetry.lock index abd0077f..2208d903 100644 --- a/poetry.lock +++ b/poetry.lock @@ -246,116 +246,103 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.4.0" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, +python-versions = ">=3.7" +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] @@ -531,6 +518,83 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "coverage" +version = "7.6.10" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, + {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, + {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, + {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, + {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, + {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, + {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, + {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, + {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, + {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, + {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, + {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, + {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, + {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, + {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + [[package]] name = "distlib" version = "0.3.9" @@ -690,13 +754,13 @@ files = [ [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.5" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, ] [package.dependencies] @@ -858,49 +922,55 @@ reports = ["lxml"] [[package]] name = "mypy" -version = "1.13.0" +version = "1.14.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, - {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, - {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, - {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, - {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, - {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, - {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, - {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, - {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, - {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, - {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, - {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, - {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, - {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, - {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, - {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, - {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, - {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, - {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, - {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, - {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, - {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, + {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, + {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b"}, + {file = "mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427"}, + {file = "mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f"}, + {file = "mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1"}, + {file = "mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e"}, + {file = "mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89"}, + {file = "mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9"}, + {file = "mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd"}, + {file = "mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac"}, + {file = "mypy-1.14.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b"}, + {file = "mypy-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb"}, + {file = "mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60"}, + {file = "mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c"}, + {file = "mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1"}, + {file = "mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6"}, ] [package.dependencies] -mypy-extensions = ">=1.0.0" +mypy_extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.6.0" +typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] @@ -1070,13 +1140,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pygments" -version = "2.18.0" +version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] @@ -1167,22 +1237,40 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy [[package]] name = "pytest-asyncio" -version = "0.23.8" +version = "0.24.0" description = "Pytest support for asyncio" optional = false python-versions = ">=3.8" files = [ - {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, - {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, + {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, + {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, ] [package.dependencies] -pytest = ">=7.0.0,<9" +pytest = ">=8.2,<9" [package.extras] docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] +[[package]] +name = "pytest-asyncio" +version = "0.25.2" +description = "Pytest support for asyncio" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest_asyncio-0.25.2-py3-none-any.whl", hash = "sha256:0d0bb693f7b99da304a0634afc0a4b19e49d5e0de2d670f38dc4bfa5727c5075"}, + {file = "pytest_asyncio-0.25.2.tar.gz", hash = "sha256:3f8ef9a98f45948ea91a0ed3dc4268b5326c0e7bce73892acc654df4262ad45f"}, +] + +[package.dependencies] +pytest = ">=8.2,<9" + +[package.extras] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] + [[package]] name = "pytest-benchmark" version = "4.0.0" @@ -1203,6 +1291,26 @@ aspect = ["aspectlib"] elasticsearch = ["elasticsearch"] histogram = ["pygal", "pygaljs"] +[[package]] +name = "pytest-benchmark" +version = "5.1.0" +description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest-benchmark-5.1.0.tar.gz", hash = "sha256:9ea661cdc292e8231f7cd4c10b0319e56a2118e2c09d9f50e1b3d150d2aca105"}, + {file = "pytest_benchmark-5.1.0-py3-none-any.whl", hash = "sha256:922de2dfa3033c227c96da942d1878191afa135a29485fb942e85dff1c592c89"}, +] + +[package.dependencies] +py-cpuinfo = "*" +pytest = ">=8.1" + +[package.extras] +aspect = ["aspectlib"] +elasticsearch = ["elasticsearch"] +histogram = ["pygal", "pygaljs", "setuptools"] + [[package]] name = "pytest-codspeed" version = "2.2.1" @@ -1226,22 +1334,23 @@ test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] [[package]] name = "pytest-codspeed" -version = "3.1.0" +version = "3.1.2" description = "Pytest plugin to create CodSpeed benchmarks" optional = false python-versions = ">=3.9" files = [ - {file = "pytest_codspeed-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1cb7c16e5a64cb30bad30f5204c7690f3cbc9ae5b9839ce187ef1727aa5d2d9c"}, - {file = "pytest_codspeed-3.1.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d23910893c22ceef6efbdf85d80e803b7fb4a231c9e7676ab08f5ddfc228438"}, - {file = "pytest_codspeed-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fb1495a633a33e15268a1f97d91a4809c868de06319db50cf97b4e9fa426372c"}, - {file = "pytest_codspeed-3.1.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dbd8a54b99207bd25a4c3f64d9a83ac0f3def91cdd87204ca70a49f822ba919c"}, - {file = "pytest_codspeed-3.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4d1ac896ebaea5b365e69b41319b4d09b57dab85ec6234f6ff26116b3795f03"}, - {file = "pytest_codspeed-3.1.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5f0c1857a0a6cce6a23c49f98c588c2eef66db353c76ecbb2fb65c1a2b33a8d5"}, - {file = "pytest_codspeed-3.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a4731a7cf1d8d38f58140d51faa69b7c1401234c59d9759a2507df570c805b11"}, - {file = "pytest_codspeed-3.1.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c5f2e4b63260f65493b8d42c8167f831b8ed90788f81eb4eb95a103ee6aa4294"}, - {file = "pytest_codspeed-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db44099b3f1ec1c9c41f0267c4d57d94e31667f4cb3fb4b71901561e8ab8bc98"}, - {file = "pytest_codspeed-3.1.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a533c1ad3cc60f07be432864c83d1769ce2877753ac778e1bfc5a9821f5c6ddf"}, - {file = "pytest_codspeed-3.1.0.tar.gz", hash = "sha256:f29641d27b4ded133b1058a4c859e510a2612ad4217ef9a839ba61750abd2f8a"}, + {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aed496f873670ce0ea8f980a7c1a2c6a08f415e0ebdf207bf651b2d922103374"}, + {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee45b0b763f6b5fa5d74c7b91d694a9615561c428b320383660672f4471756e3"}, + {file = "pytest_codspeed-3.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c84e591a7a0f67d45e2dc9fd05b276971a3aabcab7478fe43363ebefec1358f4"}, + {file = "pytest_codspeed-3.1.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6ae6d094247156407770e6b517af70b98862dd59a3c31034aede11d5f71c32c"}, + {file = "pytest_codspeed-3.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0f264991de5b5cdc118b96fc671386cca3f0f34e411482939bf2459dc599097"}, + {file = "pytest_codspeed-3.1.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0695a4bcd5ff04e8379124dba5d9795ea5e0cadf38be7a0406432fc1467b555"}, + {file = "pytest_codspeed-3.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dc356c8dcaaa883af83310f397ac06c96fac9b8a1146e303d4b374b2cb46a18"}, + {file = "pytest_codspeed-3.1.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc8a5d0366322a75cf562f7d8d672d28c1cf6948695c4dddca50331e08f6b3d5"}, + {file = "pytest_codspeed-3.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c5fe7a19b72f54f217480b3b527102579547b1de9fe3acd9e66cb4629ff46c8"}, + {file = "pytest_codspeed-3.1.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b67205755a665593f6521a98317d02a9d07d6fdc593f6634de2c94dea47a3055"}, + {file = "pytest_codspeed-3.1.2-py3-none-any.whl", hash = "sha256:5e7ed0315e33496c5c07dba262b50303b8d0bc4c3d10bf1d422a41e70783f1cb"}, + {file = "pytest_codspeed-3.1.2.tar.gz", hash = "sha256:09c1733af3aab35e94a621aa510f2d2114f65591e6f644c42ca3f67547edad4b"}, ] [package.dependencies] @@ -1291,6 +1400,24 @@ pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] +[[package]] +name = "pytest-cov" +version = "6.0.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.9" +files = [ + {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, + {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, +] + +[package.dependencies] +coverage = {version = ">=7.5", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + [[package]] name = "pytest-describe" version = "2.2.0" @@ -1393,29 +1520,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.8.3" +version = "0.9.2" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.8.3-py3-none-linux_armv6l.whl", hash = "sha256:8d5d273ffffff0acd3db5bf626d4b131aa5a5ada1276126231c4174543ce20d6"}, - {file = "ruff-0.8.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:e4d66a21de39f15c9757d00c50c8cdd20ac84f55684ca56def7891a025d7e939"}, - {file = "ruff-0.8.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c356e770811858bd20832af696ff6c7e884701115094f427b64b25093d6d932d"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c0a60a825e3e177116c84009d5ebaa90cf40dfab56e1358d1df4e29a9a14b13"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:75fb782f4db39501210ac093c79c3de581d306624575eddd7e4e13747e61ba18"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f26bc76a133ecb09a38b7868737eded6941b70a6d34ef53a4027e83913b6502"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:01b14b2f72a37390c1b13477c1c02d53184f728be2f3ffc3ace5b44e9e87b90d"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:53babd6e63e31f4e96ec95ea0d962298f9f0d9cc5990a1bbb023a6baf2503a82"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ae441ce4cf925b7f363d33cd6570c51435972d697e3e58928973994e56e1452"}, - {file = "ruff-0.8.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7c65bc0cadce32255e93c57d57ecc2cca23149edd52714c0c5d6fa11ec328cd"}, - {file = "ruff-0.8.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5be450bb18f23f0edc5a4e5585c17a56ba88920d598f04a06bd9fd76d324cb20"}, - {file = "ruff-0.8.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8faeae3827eaa77f5721f09b9472a18c749139c891dbc17f45e72d8f2ca1f8fc"}, - {file = "ruff-0.8.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:db503486e1cf074b9808403991663e4277f5c664d3fe237ee0d994d1305bb060"}, - {file = "ruff-0.8.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6567be9fb62fbd7a099209257fef4ad2c3153b60579818b31a23c886ed4147ea"}, - {file = "ruff-0.8.3-py3-none-win32.whl", hash = "sha256:19048f2f878f3ee4583fc6cb23fb636e48c2635e30fb2022b3a1cd293402f964"}, - {file = "ruff-0.8.3-py3-none-win_amd64.whl", hash = "sha256:f7df94f57d7418fa7c3ffb650757e0c2b96cf2501a0b192c18e4fb5571dfada9"}, - {file = "ruff-0.8.3-py3-none-win_arm64.whl", hash = "sha256:fe2756edf68ea79707c8d68b78ca9a58ed9af22e430430491ee03e718b5e4936"}, - {file = "ruff-0.8.3.tar.gz", hash = "sha256:5e7558304353b84279042fc584a4f4cb8a07ae79b2bf3da1a7551d960b5626d3"}, + {file = "ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347"}, + {file = "ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00"}, + {file = "ruff-0.9.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df"}, + {file = "ruff-0.9.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247"}, + {file = "ruff-0.9.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e"}, + {file = "ruff-0.9.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe"}, + {file = "ruff-0.9.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb"}, + {file = "ruff-0.9.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a"}, + {file = "ruff-0.9.2-py3-none-win32.whl", hash = "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145"}, + {file = "ruff-0.9.2-py3-none-win_amd64.whl", hash = "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5"}, + {file = "ruff-0.9.2-py3-none-win_arm64.whl", hash = "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6"}, + {file = "ruff-0.9.2.tar.gz", hash = "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0"}, ] [[package]] @@ -1896,13 +2023,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "virtualenv" -version = "20.28.0" +version = "20.29.1" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"}, - {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"}, + {file = "virtualenv-20.29.1-py3-none-any.whl", hash = "sha256:4e4cb403c0b0da39e13b46b1b2476e505cb0046b25f242bee80f62bf990b2779"}, + {file = "virtualenv-20.29.1.tar.gz", hash = "sha256:b8b8970138d32fb606192cb97f6cd4bb644fa486be9308fb9b63f81091b5dc35"}, ] [package.dependencies] @@ -1970,4 +2097,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "2f41e2d562a00d6905a8b02cd7ccf5dbcc2fb0218476addd64faff18ee8b46bf" +content-hash = "37c41caf594570c2c84273ca5abc41ab2ec53d4e05a7bf6440b3e10e6de122d7" diff --git a/pyproject.toml b/pyproject.toml index 4d366945..bc191f97 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,7 +44,7 @@ Changelog = "https://github.com/graphql-python/graphql-core/releases" [tool.poetry.dependencies] python = "^3.7" typing-extensions = [ - { version = "^4.12", python = ">=3.8,<3.10" }, + { version = "^4.12.2", python = ">=3.8,<3.10" }, { version = "^4.7.1", python = "<3.8" }, ] @@ -57,18 +57,23 @@ pytest = [ { version = "^7.4", python = "<3.8" } ] pytest-asyncio = [ - { version = "^0.23.8", python = ">=3.8" }, + { version = "^0.25.2", python = ">=3.9" }, + { version = "~0.24.0", python = ">=3.8,<3.9" }, { version = "~0.21.1", python = "<3.8" } ] -pytest-benchmark = "^4.0" +pytest-benchmark = [ + { version = "^5.1", python = ">=3.9" }, + { version = "^4.0", python = "<3.9" } +] pytest-cov = [ - { version = "^5.0", python = ">=3.8" }, + { version = "^6.0", python = ">=3.9" }, + { version = "^5.0", python = ">=3.8,<3.9" }, { version = "^4.1", python = "<3.8" }, ] pytest-describe = "^2.2" pytest-timeout = "^2.3" pytest-codspeed = [ - { version = "^3.1.0", python = ">=3.9" }, + { version = "^3.1.2", python = ">=3.9" }, { version = "^2.2.1", python = "<3.8" } ] tox = [ @@ -80,22 +85,22 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.8,<0.9" +ruff = ">=0.9,<0.10" mypy = [ - { version = "^1.12", python = ">=3.8" }, + { version = "^1.14", python = ">=3.8" }, { version = "~1.4", python = "<3.8" } ] -bump2version = ">=1.0,<2" +bump2version = ">=1,<2" [tool.poetry.group.doc] optional = true [tool.poetry.group.doc.dependencies] sphinx = [ - { version = ">=7,<8", python = ">=3.8" }, + { version = ">=7,<9", python = ">=3.8" }, { version = ">=4,<6", python = "<3.8" } ] -sphinx_rtd_theme = "^2.0" +sphinx_rtd_theme = ">=2,<4" [tool.ruff] line-length = 88 @@ -149,6 +154,7 @@ select = [ "YTT", # flake8-2020 ] ignore = [ + "A005", # allow using standard-lib module names "ANN401", # allow explicit Any "COM812", # allow trailing commas for auto-formatting "D105", "D107", # no docstring needed for magic methods @@ -324,5 +330,5 @@ testpaths = ["tests"] asyncio_default_fixture_loop_scope = "function" [build-system] -requires = ["poetry_core>=1.6.1,<2"] +requires = ["poetry_core>=1.6.1,<3"] build-backend = "poetry.core.masonry.api" diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index 1c223b60..fda472de 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -175,8 +175,7 @@ def get_argument_values( coerced_values[arg_def.out_name or name] = arg_def.default_value elif is_non_null_type(arg_type): # pragma: no cover else msg = ( - f"Argument '{name}' of required type '{arg_type}'" - " was not provided." + f"Argument '{name}' of required type '{arg_type}' was not provided." ) raise GraphQLError(msg, node) continue # pragma: no cover diff --git a/src/graphql/language/lexer.py b/src/graphql/language/lexer.py index f93bd3b7..9ec37427 100644 --- a/src/graphql/language/lexer.py +++ b/src/graphql/language/lexer.py @@ -342,7 +342,7 @@ def read_escaped_unicode_variable_width(self, position: int) -> EscapeSequence: raise GraphQLSyntaxError( self.source, position, - f"Invalid Unicode escape sequence: '{body[position: position + size]}'.", + f"Invalid Unicode escape sequence: '{body[position : position + size]}'.", ) def read_escaped_unicode_fixed_width(self, position: int) -> EscapeSequence: @@ -368,7 +368,7 @@ def read_escaped_unicode_fixed_width(self, position: int) -> EscapeSequence: raise GraphQLSyntaxError( self.source, position, - f"Invalid Unicode escape sequence: '{body[position: position + 6]}'.", + f"Invalid Unicode escape sequence: '{body[position : position + 6]}'.", ) def read_escaped_character(self, position: int) -> EscapeSequence: @@ -380,7 +380,7 @@ def read_escaped_character(self, position: int) -> EscapeSequence: raise GraphQLSyntaxError( self.source, position, - f"Invalid character escape sequence: '{body[position: position + 2]}'.", + f"Invalid character escape sequence: '{body[position : position + 2]}'.", ) def read_block_string(self, start: int) -> Token: diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index f49691e7..480c1879 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -386,8 +386,7 @@ def __init__( self.parse_literal = parse_literal # type: ignore if parse_literal is not None and parse_value is None: msg = ( - f"{name} must provide" - " both 'parse_value' and 'parse_literal' functions." + f"{name} must provide both 'parse_value' and 'parse_literal' functions." ) raise TypeError(msg) self.specified_by_url = specified_by_url diff --git a/src/graphql/type/introspection.py b/src/graphql/type/introspection.py index e59386a4..313c3679 100644 --- a/src/graphql/type/introspection.py +++ b/src/graphql/type/introspection.py @@ -639,8 +639,7 @@ class TypeKind(Enum): ), "NON_NULL": GraphQLEnumValue( TypeKind.NON_NULL, - description="Indicates this type is a non-null." - " `ofType` is a valid field.", + description="Indicates this type is a non-null. `ofType` is a valid field.", ), }, ) diff --git a/src/graphql/type/scalars.py b/src/graphql/type/scalars.py index 1bc98c21..d35e6e26 100644 --- a/src/graphql/type/scalars.py +++ b/src/graphql/type/scalars.py @@ -315,7 +315,7 @@ def parse_id_literal(value_node: ValueNode, _variables: Any = None) -> str: GraphQLBoolean, GraphQLID, ) -} +} # pyright: ignore def is_specified_scalar_type(type_: GraphQLNamedType) -> TypeGuard[GraphQLScalarType]: diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index 109667f1..d5f8f8ce 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -454,8 +454,7 @@ def validate_input_fields(self, input_obj: GraphQLInputObjectType) -> None: if not fields: self.report_error( - f"Input Object type {input_obj.name}" - " must define one or more fields.", + f"Input Object type {input_obj.name} must define one or more fields.", [input_obj.ast_node, *input_obj.extension_ast_nodes], ) diff --git a/src/graphql/utilities/find_breaking_changes.py b/src/graphql/utilities/find_breaking_changes.py index d436f1d4..d2a03ad2 100644 --- a/src/graphql/utilities/find_breaking_changes.py +++ b/src/graphql/utilities/find_breaking_changes.py @@ -294,7 +294,7 @@ def find_union_type_changes( schema_changes.append( DangerousChange( DangerousChangeType.TYPE_ADDED_TO_UNION, - f"{possible_type.name} was added" f" to union type {old_type.name}.", + f"{possible_type.name} was added to union type {old_type.name}.", ) ) @@ -407,7 +407,7 @@ def find_arg_changes( schema_changes.append( BreakingChange( BreakingChangeType.ARG_REMOVED, - f"{old_type.name}.{field_name} arg" f" {arg_name} was removed.", + f"{old_type.name}.{field_name} arg {arg_name} was removed.", ) ) diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index b077958b..58a7a3b7 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -44,8 +44,7 @@ def reason_message(reason: ConflictReasonMessage) -> str: if isinstance(reason, list): return " and ".join( - f"subfields '{response_name}' conflict" - f" because {reason_message(sub_reason)}" + f"subfields '{response_name}' conflict because {reason_message(sub_reason)}" for response_name, sub_reason in reason ) return reason diff --git a/src/graphql/validation/rules/unique_field_definition_names.py b/src/graphql/validation/rules/unique_field_definition_names.py index 8451bc27..39df7203 100644 --- a/src/graphql/validation/rules/unique_field_definition_names.py +++ b/src/graphql/validation/rules/unique_field_definition_names.py @@ -47,8 +47,7 @@ def check_field_uniqueness( elif field_name in field_names: self.report_error( GraphQLError( - f"Field '{type_name}.{field_name}'" - " can only be defined once.", + f"Field '{type_name}.{field_name}' can only be defined once.", [field_names[field_name], field_def.name], ) ) diff --git a/tests/error/test_graphql_error.py b/tests/error/test_graphql_error.py index d01e1e8a..fbc8602e 100644 --- a/tests/error/test_graphql_error.py +++ b/tests/error/test_graphql_error.py @@ -224,7 +224,7 @@ def serializes_to_include_all_standard_fields(): extensions = {"foo": "bar "} e_full = GraphQLError("msg", field_node, None, None, path, None, extensions) assert str(e_full) == ( - "msg\n\nGraphQL request:2:3\n" "1 | {\n2 | field\n | ^\n3 | }" + "msg\n\nGraphQL request:2:3\n1 | {\n2 | field\n | ^\n3 | }" ) assert repr(e_full) == ( "GraphQLError('msg', locations=[SourceLocation(line=2, column=3)]," diff --git a/tests/execution/test_abstract.py b/tests/execution/test_abstract.py index 75a1e875..ddb01345 100644 --- a/tests/execution/test_abstract.py +++ b/tests/execution/test_abstract.py @@ -23,14 +23,14 @@ def sync_and_async(spec): """Decorator for running a test synchronously and asynchronously.""" return pytest.mark.asyncio( - pytest.mark.parametrize("sync", (True, False), ids=("sync", "async"))(spec) + pytest.mark.parametrize("sync", [True, False], ids=("sync", "async"))(spec) ) def access_variants(spec): """Decorator for tests with dict and object access, including inheritance.""" return pytest.mark.asyncio( - pytest.mark.parametrize("access", ("dict", "object", "inheritance"))(spec) + pytest.mark.parametrize("access", ["dict", "object", "inheritance"])(spec) ) diff --git a/tests/execution/test_oneof.py b/tests/execution/test_oneof.py index 81f3d224..2040b1a7 100644 --- a/tests/execution/test_oneof.py +++ b/tests/execution/test_oneof.py @@ -35,7 +35,7 @@ def execute_query( def describe_execute_handles_one_of_input_objects(): def describe_one_of_input_objects(): root_value = { - "test": lambda _info, input: input, + "test": lambda _info, input: input, # noqa: A006 } def accepts_a_good_default_value(): diff --git a/tests/execution/test_schema.py b/tests/execution/test_schema.py index 593c1cf6..7096c5fb 100644 --- a/tests/execution/test_schema.py +++ b/tests/execution/test_schema.py @@ -78,7 +78,7 @@ def __init__(self, id: int): # noqa: A002 "article": GraphQLField( BlogArticle, args={"id": GraphQLArgument(GraphQLID)}, - resolve=lambda _obj, _info, id: Article(id), + resolve=lambda _obj, _info, id: Article(id), # noqa: A006 ), "feed": GraphQLField( GraphQLList(BlogArticle), diff --git a/tests/language/test_lexer.py b/tests/language/test_lexer.py index d2d24931..a44e859d 100644 --- a/tests/language/test_lexer.py +++ b/tests/language/test_lexer.py @@ -394,8 +394,7 @@ def lexes_block_strings(): TokenKind.BLOCK_STRING, 0, 19, 1, 1, "slashes \\\\ \\/" ) assert lex_one( - '"""\n\n spans\n multiple\n' - ' lines\n\n """' + '"""\n\n spans\n multiple\n lines\n\n """' ) == Token(TokenKind.BLOCK_STRING, 0, 68, 1, 1, "spans\n multiple\n lines") def advance_line_after_lexing_multiline_block_string(): diff --git a/tests/language/test_printer.py b/tests/language/test_printer.py index b6ac41e0..42531096 100644 --- a/tests/language/test_printer.py +++ b/tests/language/test_printer.py @@ -60,8 +60,7 @@ def correctly_prints_mutation_operation_with_artifacts(): def prints_query_with_variable_directives(): query_ast_with_variable_directive = parse( - "query ($foo: TestType = { a: 123 }" - " @testDirective(if: true) @test) { id }" + "query ($foo: TestType = { a: 123 } @testDirective(if: true) @test) { id }" ) assert print_ast(query_ast_with_variable_directive) == dedent( """ diff --git a/tests/star_wars_schema.py b/tests/star_wars_schema.py index 575bf482..5f4c0809 100644 --- a/tests/star_wars_schema.py +++ b/tests/star_wars_schema.py @@ -140,8 +140,7 @@ "name": GraphQLField(GraphQLString, description="The name of the human."), "friends": GraphQLField( GraphQLList(character_interface), - description="The friends of the human," - " or an empty list if they have none.", + description="The friends of the human, or an empty list if they have none.", resolve=lambda human, _info: get_friends(human), ), "appearsIn": GraphQLField( @@ -182,8 +181,7 @@ "name": GraphQLField(GraphQLString, description="The name of the droid."), "friends": GraphQLField( GraphQLList(character_interface), - description="The friends of the droid," - " or an empty list if they have none.", + description="The friends of the droid, or an empty list if they have none.", resolve=lambda droid, _info: get_friends(droid), ), "appearsIn": GraphQLField( @@ -238,7 +236,7 @@ GraphQLNonNull(GraphQLString), description="id of the human" ) }, - resolve=lambda _source, _info, id: get_human(id), + resolve=lambda _source, _info, id: get_human(id), # noqa: A006 ), "droid": GraphQLField( droid_type, @@ -247,7 +245,7 @@ GraphQLNonNull(GraphQLString), description="id of the droid" ) }, - resolve=lambda _source, _info, id: get_droid(id), + resolve=lambda _source, _info, id: get_droid(id), # noqa: A006 ), }, ) diff --git a/tests/type/test_definition.py b/tests/type/test_definition.py index a8b7c24b..ac7830ef 100644 --- a/tests/type/test_definition.py +++ b/tests/type/test_definition.py @@ -198,8 +198,7 @@ def parse_literal(_node: ValueNode, _vars=None): with pytest.raises(TypeError) as exc_info: GraphQLScalarType("SomeScalar", parse_literal=parse_literal) assert str(exc_info.value) == ( - "SomeScalar must provide both" - " 'parse_value' and 'parse_literal' functions." + "SomeScalar must provide both 'parse_value' and 'parse_literal' functions." ) def pickles_a_custom_scalar_type(): diff --git a/tests/type/test_validation.py b/tests/type/test_validation.py index 087832ba..a4efe041 100644 --- a/tests/type/test_validation.py +++ b/tests/type/test_validation.py @@ -242,8 +242,7 @@ def rejects_a_schema_whose_query_root_type_is_not_an_object_type(): ) assert validate_schema(schema) == [ { - "message": "Query root type must be Object type," - " it cannot be Query.", + "message": "Query root type must be Object type, it cannot be Query.", "locations": [(2, 13)], } ] diff --git a/tests/utilities/test_extend_schema.py b/tests/utilities/test_extend_schema.py index 28ac0be4..1eb98d38 100644 --- a/tests/utilities/test_extend_schema.py +++ b/tests/utilities/test_extend_schema.py @@ -1363,8 +1363,7 @@ def does_not_allow_replacing_a_default_directive(): with pytest.raises(TypeError) as exc_info: extend_schema(schema, extend_ast) assert str(exc_info.value).startswith( - "Directive '@include' already exists in the schema." - " It cannot be redefined." + "Directive '@include' already exists in the schema. It cannot be redefined." ) def does_not_allow_replacing_an_existing_enum_value(): diff --git a/tests/utilities/test_find_breaking_changes.py b/tests/utilities/test_find_breaking_changes.py index 24d03704..bfcc7e72 100644 --- a/tests/utilities/test_find_breaking_changes.py +++ b/tests/utilities/test_find_breaking_changes.py @@ -755,8 +755,7 @@ def should_detect_all_breaking_changes(): ), ( BreakingChangeType.TYPE_CHANGED_KIND, - "TypeThatChangesType changed from an Object type to an" - " Interface type.", + "TypeThatChangesType changed from an Object type to an Interface type.", ), ( BreakingChangeType.FIELD_REMOVED, diff --git a/tests/utilities/test_type_info.py b/tests/utilities/test_type_info.py index d23b878b..01f7e464 100644 --- a/tests/utilities/test_type_info.py +++ b/tests/utilities/test_type_info.py @@ -375,8 +375,7 @@ def leave(*args): assert print_ast(edited_ast) == print_ast( parse( - "{ human(id: 4) { name, pets { __typename } }," - " alien { __typename } }" + "{ human(id: 4) { name, pets { __typename } }, alien { __typename } }" ) ) diff --git a/tests/validation/test_validation.py b/tests/validation/test_validation.py index e8f08fe1..78efbce9 100644 --- a/tests/validation/test_validation.py +++ b/tests/validation/test_validation.py @@ -71,8 +71,7 @@ def deprecated_validates_using_a_custom_type_info(): "Cannot query field 'human' on type 'QueryRoot'. Did you mean 'human'?", "Cannot query field 'meowsVolume' on type 'Cat'." " Did you mean 'meowsVolume'?", - "Cannot query field 'barkVolume' on type 'Dog'." - " Did you mean 'barkVolume'?", + "Cannot query field 'barkVolume' on type 'Dog'. Did you mean 'barkVolume'?", ] def validates_using_a_custom_rule(): diff --git a/tox.ini b/tox.ini index 7f6b4dcb..7f2e07d4 100644 --- a/tox.ini +++ b/tox.ini @@ -18,7 +18,7 @@ python = [testenv:ruff] basepython = python3.12 -deps = ruff>=0.8,<0.9 +deps = ruff>=0.9,<0.10 commands = ruff check src tests ruff format --check src tests @@ -26,7 +26,7 @@ commands = [testenv:mypy] basepython = python3.12 deps = - mypy>=1.12,<2 + mypy>=1.14,<2 pytest>=8.3,<9 commands = mypy src tests @@ -34,8 +34,8 @@ commands = [testenv:docs] basepython = python3.12 deps = - sphinx>=7,<8 - sphinx_rtd_theme>=2.0,<3 + sphinx>=8,<9 + sphinx_rtd_theme>=3,<4 commands = sphinx-build -b html -nEW docs docs/_build/html @@ -43,8 +43,8 @@ commands = deps = pytest>=7.4,<9 pytest-asyncio>=0.21.1,<1 - pytest-benchmark>=4,<5 - pytest-cov>=4.1,<6 + pytest-benchmark>=4,<6 + pytest-cov>=4.1,<7 pytest-describe>=2.2,<3 pytest-timeout>=2.3,<3 py3{7,8,9}, pypy39: typing-extensions>=4.7.1,<5 From d9c5e1dedb682264788c7c681529268422c987d1 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 19 Jan 2025 16:50:36 +0100 Subject: [PATCH 28/50] incremental delivery: add pending notifications Replicates graphql/graphql-js@fe65bc8be6813d03870ba0d7faffa733c1ba7351 --- docs/conf.py | 4 + .../execution/incremental_publisher.py | 225 ++++++++++---- tests/execution/test_defer.py | 281 +++++++++++++++--- tests/execution/test_execution_result.py | 12 +- tests/execution/test_mutations.py | 12 +- tests/execution/test_stream.py | 88 +++++- 6 files changed, 507 insertions(+), 115 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index d3de91ea..1d7afde0 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -153,6 +153,7 @@ ExperimentalIncrementalExecutionResults FieldGroup FormattedIncrementalResult +FormattedPendingResult FormattedSourceLocation GraphQLAbstractType GraphQLCompositeType @@ -167,6 +168,7 @@ IncrementalResult InitialResultRecord Middleware +PendingResult StreamItemsRecord StreamRecord SubsequentDataRecord @@ -183,8 +185,10 @@ graphql.execution.incremental_publisher.DeferredFragmentRecord graphql.execution.incremental_publisher.DeferredGroupedFieldSetRecord graphql.execution.incremental_publisher.FormattedCompletedResult +graphql.execution.incremental_publisher.FormattedPendingResult graphql.execution.incremental_publisher.IncrementalPublisher graphql.execution.incremental_publisher.InitialResultRecord +graphql.execution.incremental_publisher.PendingResult graphql.execution.incremental_publisher.StreamItemsRecord graphql.execution.incremental_publisher.StreamRecord graphql.execution.Middleware diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index 18890fb3..4ba1d553 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -13,7 +13,6 @@ Collection, Iterator, NamedTuple, - Sequence, Union, ) @@ -22,6 +21,8 @@ except ImportError: # Python < 3.8 from typing_extensions import TypedDict +from ..pyutils import RefSet + if TYPE_CHECKING: from ..error import GraphQLError, GraphQLFormattedError from ..pyutils import Path @@ -55,6 +56,63 @@ suppress_key_error = suppress(KeyError) +class FormattedPendingResult(TypedDict, total=False): + """Formatted pending execution result""" + + path: list[str | int] + label: str + + +class PendingResult: + """Pending execution result""" + + path: list[str | int] + label: str | None + + __slots__ = "label", "path" + + def __init__( + self, + path: list[str | int], + label: str | None = None, + ) -> None: + self.path = path + self.label = label + + def __repr__(self) -> str: + name = self.__class__.__name__ + args: list[str] = [f"path={self.path!r}"] + if self.label: + args.append(f"label={self.label!r}") + return f"{name}({', '.join(args)})" + + @property + def formatted(self) -> FormattedPendingResult: + """Get pending result formatted according to the specification.""" + formatted: FormattedPendingResult = {"path": self.path} + if self.label is not None: + formatted["label"] = self.label + return formatted + + def __eq__(self, other: object) -> bool: + if isinstance(other, dict): + return (other.get("path") or None) == (self.path or None) and ( + other.get("label") or None + ) == (self.label or None) + + if isinstance(other, tuple): + size = len(other) + return 1 < size < 3 and (self.path, self.label)[:size] == other + return ( + isinstance(other, self.__class__) + and other.path == self.path + and other.label == self.label + ) + + def __ne__(self, other: object) -> bool: + return not self == other + + class FormattedCompletedResult(TypedDict, total=False): """Formatted completed execution result""" @@ -93,7 +151,7 @@ def __repr__(self) -> str: @property def formatted(self) -> FormattedCompletedResult: - """Get execution result formatted according to the specification.""" + """Get completed result formatted according to the specification.""" formatted: FormattedCompletedResult = {"path": self.path} if self.label is not None: formatted["label"] = self.label @@ -104,9 +162,9 @@ def formatted(self) -> FormattedCompletedResult: def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( - other.get("path") == self.path - and ("label" not in other or other["label"] == self.label) - and ("errors" not in other or other["errors"] == self.errors) + (other.get("path") or None) == (self.path or None) + and (other.get("label") or None) == (self.label or None) + and (other.get("errors") or None) == (self.errors or None) ) if isinstance(other, tuple): size = len(other) @@ -125,6 +183,7 @@ def __ne__(self, other: object) -> bool: class IncrementalUpdate(NamedTuple): """Incremental update""" + pending: list[PendingResult] incremental: list[IncrementalResult] completed: list[CompletedResult] @@ -181,13 +240,11 @@ def formatted(self) -> FormattedExecutionResult: def __eq__(self, other: object) -> bool: if isinstance(other, dict): - if "extensions" not in other: - return other == {"data": self.data, "errors": self.errors} - return other == { - "data": self.data, - "errors": self.errors, - "extensions": self.extensions, - } + return ( + (other.get("data") == self.data) + and (other.get("errors") or None) == (self.errors or None) + and (other.get("extensions") or None) == (self.extensions or None) + ) if isinstance(other, tuple): if len(other) == 2: return other == (self.data, self.errors) @@ -208,40 +265,42 @@ class FormattedInitialIncrementalExecutionResult(TypedDict, total=False): data: dict[str, Any] | None errors: list[GraphQLFormattedError] + pending: list[FormattedPendingResult] hasNext: bool incremental: list[FormattedIncrementalResult] extensions: dict[str, Any] class InitialIncrementalExecutionResult: - """Initial incremental execution result. - - - ``has_next`` is True if a future payload is expected. - - ``incremental`` is a list of the results from defer/stream directives. - """ + """Initial incremental execution result.""" data: dict[str, Any] | None errors: list[GraphQLError] | None + pending: list[PendingResult] has_next: bool extensions: dict[str, Any] | None - __slots__ = "data", "errors", "extensions", "has_next" + __slots__ = "data", "errors", "extensions", "has_next", "pending" def __init__( self, data: dict[str, Any] | None = None, errors: list[GraphQLError] | None = None, + pending: list[PendingResult] | None = None, has_next: bool = False, extensions: dict[str, Any] | None = None, ) -> None: self.data = data self.errors = errors + self.pending = pending or [] self.has_next = has_next self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] + if self.pending: + args.append(f"pending={self.pending!r}") if self.has_next: args.append("has_next") if self.extensions: @@ -254,6 +313,7 @@ def formatted(self) -> FormattedInitialIncrementalExecutionResult: formatted: FormattedInitialIncrementalExecutionResult = {"data": self.data} if self.errors is not None: formatted["errors"] = [error.formatted for error in self.errors] + formatted["pending"] = [pending.formatted for pending in self.pending] formatted["hasNext"] = self.has_next if self.extensions is not None: formatted["extensions"] = self.extensions @@ -263,19 +323,19 @@ def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( other.get("data") == self.data - and other.get("errors") == self.errors - and ("hasNext" not in other or other["hasNext"] == self.has_next) - and ( - "extensions" not in other or other["extensions"] == self.extensions - ) + and (other.get("errors") or None) == (self.errors or None) + and (other.get("pending") or None) == (self.pending or None) + and (other.get("hasNext") or None) == (self.has_next or None) + and (other.get("extensions") or None) == (self.extensions or None) ) if isinstance(other, tuple): size = len(other) return ( - 1 < size < 5 + 1 < size < 6 and ( self.data, self.errors, + self.pending, self.has_next, self.extensions, )[:size] @@ -285,6 +345,7 @@ def __eq__(self, other: object) -> bool: isinstance(other, self.__class__) and other.data == self.data and other.errors == self.errors + and other.pending == self.pending and other.has_next == self.has_next and other.extensions == self.extensions ) @@ -356,11 +417,9 @@ def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( other.get("data") == self.data - and other.get("errors") == self.errors - and ("path" not in other or other["path"] == self.path) - and ( - "extensions" not in other or other["extensions"] == self.extensions - ) + and (other.get("errors") or None) == (self.errors or None) + and (other.get("path") or None) == (self.path or None) + and (other.get("extensions") or None) == (self.extensions or None) ) if isinstance(other, tuple): size = len(other) @@ -435,12 +494,10 @@ def formatted(self) -> FormattedIncrementalStreamResult: def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( - other.get("items") == self.items - and other.get("errors") == self.errors - and ("path" not in other or other["path"] == self.path) - and ( - "extensions" not in other or other["extensions"] == self.extensions - ) + (other.get("items") or None) == (self.items or None) + and (other.get("errors") or None) == (self.errors or None) + and (other.get("path", None) == (self.path or None)) + and (other.get("extensions", None) == (self.extensions or None)) ) if isinstance(other, tuple): size = len(other) @@ -472,33 +529,33 @@ class FormattedSubsequentIncrementalExecutionResult(TypedDict, total=False): """Formatted subsequent incremental execution result""" hasNext: bool + pending: list[FormattedPendingResult] incremental: list[FormattedIncrementalResult] completed: list[FormattedCompletedResult] extensions: dict[str, Any] class SubsequentIncrementalExecutionResult: - """Subsequent incremental execution result. - - - ``has_next`` is True if a future payload is expected. - - ``incremental`` is a list of the results from defer/stream directives. - """ + """Subsequent incremental execution result.""" - __slots__ = "completed", "extensions", "has_next", "incremental" + __slots__ = "completed", "extensions", "has_next", "incremental", "pending" has_next: bool - incremental: Sequence[IncrementalResult] | None - completed: Sequence[CompletedResult] | None + pending: list[PendingResult] | None + incremental: list[IncrementalResult] | None + completed: list[CompletedResult] | None extensions: dict[str, Any] | None def __init__( self, has_next: bool = False, - incremental: Sequence[IncrementalResult] | None = None, - completed: Sequence[CompletedResult] | None = None, + pending: list[PendingResult] | None = None, + incremental: list[IncrementalResult] | None = None, + completed: list[CompletedResult] | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.has_next = has_next + self.pending = pending or [] self.incremental = incremental self.completed = completed self.extensions = extensions @@ -508,6 +565,8 @@ def __repr__(self) -> str: args: list[str] = [] if self.has_next: args.append("has_next") + if self.pending: + args.append(f"pending[{len(self.pending)}]") if self.incremental: args.append(f"incremental[{len(self.incremental)}]") if self.completed: @@ -521,6 +580,8 @@ def formatted(self) -> FormattedSubsequentIncrementalExecutionResult: """Get execution result formatted according to the specification.""" formatted: FormattedSubsequentIncrementalExecutionResult = {} formatted["hasNext"] = self.has_next + if self.pending: + formatted["pending"] = [result.formatted for result in self.pending] if self.incremental: formatted["incremental"] = [result.formatted for result in self.incremental] if self.completed: @@ -532,22 +593,19 @@ def formatted(self) -> FormattedSubsequentIncrementalExecutionResult: def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( - ("hasNext" in other and other["hasNext"] == self.has_next) - and ( - "incremental" not in other - or other["incremental"] == self.incremental - ) - and ("completed" not in other or other["completed"] == self.completed) - and ( - "extensions" not in other or other["extensions"] == self.extensions - ) + (other.get("hasNext") or None) == (self.has_next or None) + and (other.get("pending") or None) == (self.pending or None) + and (other.get("incremental") or None) == (self.incremental or None) + and (other.get("completed") or None) == (self.completed or None) + and (other.get("extensions") or None) == (self.extensions or None) ) if isinstance(other, tuple): size = len(other) return ( - 1 < size < 5 + 1 < size < 6 and ( self.has_next, + self.pending, self.incremental, self.completed, self.extensions, @@ -557,6 +615,7 @@ def __eq__(self, other: object) -> bool: return ( isinstance(other, self.__class__) and other.has_next == self.has_next + and self.pending == other.pending and other.incremental == self.incremental and other.completed == self.completed and other.extensions == self.extensions @@ -729,11 +788,19 @@ def build_data_response( error.message, ) ) - if self._pending: + pending = self._pending + if pending: + pending_sources: RefSet[DeferredFragmentRecord | StreamRecord] = RefSet( + subsequent_result_record.stream_record + if isinstance(subsequent_result_record, StreamItemsRecord) + else subsequent_result_record + for subsequent_result_record in pending + ) return ExperimentalIncrementalExecutionResults( initial_result=InitialIncrementalExecutionResult( data, errors, + pending=self._pending_sources_to_results(pending_sources), has_next=True, ), subsequent_results=self._subscribe(), @@ -783,6 +850,19 @@ def filter( if early_returns: self._add_task(gather(*early_returns)) + def _pending_sources_to_results( + self, + pending_sources: RefSet[DeferredFragmentRecord | StreamRecord], + ) -> list[PendingResult]: + """Convert pending sources to pending results.""" + pending_results: list[PendingResult] = [] + for pending_source in pending_sources: + pending_source.pending_sent = True + pending_results.append( + PendingResult(pending_source.path, pending_source.label) + ) + return pending_results + async def _subscribe( self, ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: @@ -854,14 +934,18 @@ def _get_incremental_result( ) -> SubsequentIncrementalExecutionResult | None: """Get the incremental result with the completed records.""" update = self._process_pending(completed_records) - incremental, completed = update.incremental, update.completed + pending, incremental, completed = ( + update.pending, + update.incremental, + update.completed, + ) has_next = bool(self._pending) if not incremental and not completed and has_next: return None return SubsequentIncrementalExecutionResult( - has_next, incremental or None, completed or None + has_next, pending or None, incremental or None, completed or None ) def _process_pending( @@ -869,6 +953,7 @@ def _process_pending( completed_records: Collection[SubsequentResultRecord], ) -> IncrementalUpdate: """Process the pending records.""" + new_pending_sources: RefSet[DeferredFragmentRecord | StreamRecord] = RefSet() incremental_results: list[IncrementalResult] = [] completed_results: list[CompletedResult] = [] to_result = self._completed_record_to_result @@ -876,13 +961,20 @@ def _process_pending( for child in subsequent_result_record.children: if child.filtered: continue + pending_source: DeferredFragmentRecord | StreamRecord = ( + child.stream_record + if isinstance(child, StreamItemsRecord) + else child + ) + if not pending_source.pending_sent: + new_pending_sources.add(pending_source) self._publish(child) incremental_result: IncrementalResult if isinstance(subsequent_result_record, StreamItemsRecord): if subsequent_result_record.is_final_record: - completed_results.append( - to_result(subsequent_result_record.stream_record) - ) + stream_record = subsequent_result_record.stream_record + new_pending_sources.discard(stream_record) + completed_results.append(to_result(stream_record)) if subsequent_result_record.is_completed_async_iterator: # async iterable resolver finished but there may be pending payload continue @@ -895,6 +987,7 @@ def _process_pending( ) incremental_results.append(incremental_result) else: + new_pending_sources.discard(subsequent_result_record) completed_results.append(to_result(subsequent_result_record)) if subsequent_result_record.errors: continue @@ -909,7 +1002,11 @@ def _process_pending( deferred_grouped_field_set_record.path, ) incremental_results.append(incremental_result) - return IncrementalUpdate(incremental_results, completed_results) + return IncrementalUpdate( + self._pending_sources_to_results(new_pending_sources), + incremental_results, + completed_results, + ) @staticmethod def _completed_record_to_result( @@ -1052,6 +1149,7 @@ class DeferredFragmentRecord: deferred_grouped_field_set_records: dict[DeferredGroupedFieldSetRecord, None] errors: list[GraphQLError] filtered: bool + pending_sent: bool _pending: dict[DeferredGroupedFieldSetRecord, None] def __init__(self, path: Path | None = None, label: str | None = None) -> None: @@ -1059,6 +1157,7 @@ def __init__(self, path: Path | None = None, label: str | None = None) -> None: self.label = label self.children = {} self.filtered = False + self.pending_sent = False self.deferred_grouped_field_set_records = {} self.errors = [] self._pending = {} @@ -1080,6 +1179,7 @@ class StreamRecord: path: list[str | int] errors: list[GraphQLError] early_return: Callable[[], Awaitable[Any]] | None + pending_sent: bool def __init__( self, @@ -1091,6 +1191,7 @@ def __init__( self.label = label self.errors = [] self.early_return = early_return + self.pending_sent = False def __repr__(self) -> str: name = self.__class__.__name__ diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index d6d17105..2de10173 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -1,7 +1,7 @@ from __future__ import annotations from asyncio import sleep -from typing import Any, AsyncGenerator, NamedTuple +from typing import Any, AsyncGenerator, NamedTuple, cast import pytest @@ -10,6 +10,7 @@ ExecutionResult, ExperimentalIncrementalExecutionResults, IncrementalDeferResult, + IncrementalResult, InitialIncrementalExecutionResult, SubsequentIncrementalExecutionResult, execute, @@ -19,6 +20,7 @@ CompletedResult, DeferredFragmentRecord, DeferredGroupedFieldSetRecord, + PendingResult, StreamItemsRecord, StreamRecord, ) @@ -193,6 +195,31 @@ def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: def describe_execute_defer_directive(): + def can_format_and_print_pending_result(): + result = PendingResult([]) + assert result.formatted == {"path": []} + assert str(result) == "PendingResult(path=[])" + + result = PendingResult(path=["foo", 1], label="bar") + assert result.formatted == { + "path": ["foo", 1], + "label": "bar", + } + assert str(result) == "PendingResult(path=['foo', 1], label='bar')" + + def can_compare_pending_result(): + args: dict[str, Any] = {"path": ["foo", 1], "label": "bar"} + result = PendingResult(**args) + assert result == PendingResult(**args) + assert result != CompletedResult(**modified_args(args, path=["foo", 2])) + assert result != CompletedResult(**modified_args(args, label="baz")) + assert result == tuple(args.values()) + assert result != tuple(args.values())[:1] + assert result != tuple(args.values())[:1] + ("baz",) + assert result == args + assert result != {**args, "path": ["foo", 2]} + assert result != {**args, "label": "baz"} + def can_format_and_print_completed_result(): result = CompletedResult([]) assert result.formatted == {"path": []} @@ -224,10 +251,9 @@ def can_compare_completed_result(): assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] assert result == args - assert result == dict(list(args.items())[:2]) - assert result != dict( - list(args.items())[:1] + [("errors", [GraphQLError("oops")])] - ) + assert result != {**args, "path": ["foo", 2]} + assert result != {**args, "label": "baz"} + assert result != {**args, "errors": [{"message": "oops"}]} def can_format_and_print_incremental_defer_result(): result = IncrementalDeferResult() @@ -276,20 +302,20 @@ def can_compare_incremental_defer_result(): assert result != tuple(args.values())[:1] assert result != ({"hello": "world"}, []) assert result == args - assert result == dict(list(args.items())[:2]) - assert result == dict(list(args.items())[:3]) - assert result != dict(list(args.items())[:2] + [("path", ["foo", 2])]) - assert result != {**args, "extensions": {"baz": 3}} + assert result != {**args, "data": {"hello": "foo"}} + assert result != {**args, "errors": []} + assert result != {**args, "path": ["foo", 2]} + assert result != {**args, "extensions": {"baz": 1}} def can_format_and_print_initial_incremental_execution_result(): result = InitialIncrementalExecutionResult() - assert result.formatted == {"data": None, "hasNext": False} + assert result.formatted == {"data": None, "hasNext": False, "pending": []} assert ( str(result) == "InitialIncrementalExecutionResult(data=None, errors=None)" ) result = InitialIncrementalExecutionResult(has_next=True) - assert result.formatted == {"data": None, "hasNext": True} + assert result.formatted == {"data": None, "hasNext": True, "pending": []} assert ( str(result) == "InitialIncrementalExecutionResult(data=None, errors=None, has_next)" @@ -298,25 +324,28 @@ def can_format_and_print_initial_incremental_execution_result(): result = InitialIncrementalExecutionResult( data={"hello": "world"}, errors=[GraphQLError("msg")], + pending=[PendingResult(["bar"])], has_next=True, extensions={"baz": 2}, ) assert result.formatted == { "data": {"hello": "world"}, - "errors": [GraphQLError("msg")], + "errors": [{"message": "msg"}], + "pending": [{"path": ["bar"]}], "hasNext": True, "extensions": {"baz": 2}, } assert ( str(result) == "InitialIncrementalExecutionResult(" - "data={'hello': 'world'}, errors=[GraphQLError('msg')], has_next," - " extensions={'baz': 2})" + "data={'hello': 'world'}, errors=[GraphQLError('msg')]," + " pending=[PendingResult(path=['bar'])], has_next, extensions={'baz': 2})" ) def can_compare_initial_incremental_execution_result(): args: dict[str, Any] = { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], + "pending": [PendingResult(["bar"])], "has_next": True, "extensions": {"baz": 2}, } @@ -328,6 +357,9 @@ def can_compare_initial_incremental_execution_result(): assert result != InitialIncrementalExecutionResult( **modified_args(args, errors=[]) ) + assert result != InitialIncrementalExecutionResult( + **modified_args(args, pending=[]) + ) assert result != InitialIncrementalExecutionResult( **modified_args(args, has_next=False) ) @@ -335,6 +367,7 @@ def can_compare_initial_incremental_execution_result(): **modified_args(args, extensions={"baz": 1}) ) assert result == tuple(args.values()) + assert result == tuple(args.values())[:5] assert result == tuple(args.values())[:4] assert result == tuple(args.values())[:3] assert result == tuple(args.values())[:2] @@ -344,20 +377,40 @@ def can_compare_initial_incremental_execution_result(): assert result == { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], + "pending": [PendingResult(["bar"])], "hasNext": True, "extensions": {"baz": 2}, } - assert result == { + assert result != { + "errors": [GraphQLError("msg")], + "pending": [PendingResult(["bar"])], + "hasNext": True, + "extensions": {"baz": 2}, + } + assert result != { + "data": {"hello": "world"}, + "pending": [PendingResult(["bar"])], + "hasNext": True, + "extensions": {"baz": 2}, + } + assert result != { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], "hasNext": True, + "extensions": {"baz": 2}, } assert result != { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "hasNext": False, + "pending": [PendingResult(["bar"])], "extensions": {"baz": 2}, } + assert result != { + "data": {"hello": "world"}, + "errors": [GraphQLError("msg")], + "pending": [PendingResult(["bar"])], + "hasNext": True, + } def can_format_and_print_subsequent_incremental_execution_result(): result = SubsequentIncrementalExecutionResult() @@ -368,36 +421,44 @@ def can_format_and_print_subsequent_incremental_execution_result(): assert result.formatted == {"hasNext": True} assert str(result) == "SubsequentIncrementalExecutionResult(has_next)" - incremental = [IncrementalDeferResult()] + pending = [PendingResult(["bar"])] + incremental = [cast(IncrementalResult, IncrementalDeferResult())] completed = [CompletedResult(["foo", 1])] result = SubsequentIncrementalExecutionResult( has_next=True, + pending=pending, incremental=incremental, completed=completed, extensions={"baz": 2}, ) assert result.formatted == { "hasNext": True, + "pending": [{"path": ["bar"]}], "incremental": [{"data": None}], "completed": [{"path": ["foo", 1]}], "extensions": {"baz": 2}, } assert ( str(result) == "SubsequentIncrementalExecutionResult(has_next," - " incremental[1], completed[1], extensions={'baz': 2})" + " pending[1], incremental[1], completed[1], extensions={'baz': 2})" ) def can_compare_subsequent_incremental_execution_result(): - incremental = [IncrementalDeferResult()] + pending = [PendingResult(["bar"])] + incremental = [cast(IncrementalResult, IncrementalDeferResult())] completed = [CompletedResult(path=["foo", 1])] args: dict[str, Any] = { "has_next": True, + "pending": pending, "incremental": incremental, "completed": completed, "extensions": {"baz": 2}, } result = SubsequentIncrementalExecutionResult(**args) assert result == SubsequentIncrementalExecutionResult(**args) + assert result != SubsequentIncrementalExecutionResult( + **modified_args(args, pending=[]) + ) assert result != SubsequentIncrementalExecutionResult( **modified_args(args, incremental=[]) ) @@ -408,22 +469,47 @@ def can_compare_subsequent_incremental_execution_result(): **modified_args(args, extensions={"baz": 1}) ) assert result == tuple(args.values()) + assert result == tuple(args.values())[:3] assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] assert result != (incremental, False) assert result == { "hasNext": True, + "pending": pending, "incremental": incremental, "completed": completed, "extensions": {"baz": 2}, } - assert result == {"incremental": incremental, "hasNext": True} assert result != { - "hasNext": False, + "pending": pending, + "incremental": incremental, + "completed": completed, + "extensions": {"baz": 2}, + } + assert result != { + "hasNext": True, "incremental": incremental, "completed": completed, "extensions": {"baz": 2}, } + assert result != { + "hasNext": True, + "pending": pending, + "completed": completed, + "extensions": {"baz": 2}, + } + assert result != { + "hasNext": True, + "pending": pending, + "incremental": incremental, + "extensions": {"baz": 2}, + } + assert result != { + "hasNext": True, + "pending": pending, + "incremental": incremental, + "completed": completed, + } def can_print_deferred_grouped_field_set_record(): record = DeferredGroupedFieldSetRecord([], {}, False) @@ -483,7 +569,11 @@ async def can_defer_fragments_containing_scalar_types(): result = await complete(document) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, { "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], "completed": [{"path": ["hero"]}], @@ -535,7 +625,11 @@ async def does_not_disable_defer_with_null_if_argument(): result = await complete(document) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, { "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], "completed": [{"path": ["hero"]}], @@ -581,7 +675,11 @@ async def can_defer_fragments_on_the_top_level_query_field(): result = await complete(document) assert result == [ - {"data": {}, "hasNext": True}, + { + "data": {}, + "pending": [{"path": [], "label": "DeferQuery"}], + "hasNext": True, + }, { "incremental": [{"data": {"hero": {"id": "1"}}, "path": []}], "completed": [{"path": [], "label": "DeferQuery"}], @@ -606,7 +704,11 @@ async def can_defer_fragments_with_errors_on_the_top_level_query_field(): result = await complete(document, {"hero": {**hero, "name": Resolvers.bad}}) assert result == [ - {"data": {}, "hasNext": True}, + { + "data": {}, + "pending": [{"path": [], "label": "DeferQuery"}], + "hasNext": True, + }, { "incremental": [ { @@ -649,7 +751,14 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): result = await complete(document) assert result == [ - {"data": {"hero": {}}, "hasNext": True}, + { + "data": {"hero": {}}, + "pending": [ + {"path": ["hero"], "label": "DeferTop"}, + {"path": ["hero"], "label": "DeferNested"}, + ], + "hasNext": True, + }, { "incremental": [ { @@ -693,7 +802,11 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): result = await complete(document) assert result == [ - {"data": {"hero": {"name": "Luke"}}, "hasNext": True}, + { + "data": {"hero": {"name": "Luke"}}, + "pending": [{"path": ["hero"], "label": "DeferTop"}], + "hasNext": True, + }, { "completed": [{"path": ["hero"], "label": "DeferTop"}], "hasNext": False, @@ -718,7 +831,11 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first result = await complete(document) assert result == [ - {"data": {"hero": {"name": "Luke"}}, "hasNext": True}, + { + "data": {"hero": {"name": "Luke"}}, + "pending": [{"path": ["hero"], "label": "DeferTop"}], + "hasNext": True, + }, { "completed": [{"path": ["hero"], "label": "DeferTop"}], "hasNext": False, @@ -742,7 +859,11 @@ async def can_defer_an_inline_fragment(): result = await complete(document) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"], "label": "InlineDeferred"}], + "hasNext": True, + }, { "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], "completed": [{"path": ["hero"], "label": "InlineDeferred"}], @@ -769,7 +890,7 @@ async def does_not_emit_empty_defer_fragments(): result = await complete(document) assert result == [ - {"data": {"hero": {}}, "hasNext": True}, + {"data": {"hero": {}}, "pending": [{"path": ["hero"]}], "hasNext": True}, { "completed": [{"path": ["hero"]}], "hasNext": False, @@ -797,6 +918,10 @@ async def separately_emits_defer_fragments_different_labels_varying_fields(): assert result == [ { "data": {"hero": {}}, + "pending": [ + {"path": ["hero"], "label": "DeferID"}, + {"path": ["hero"], "label": "DeferName"}, + ], "hasNext": True, }, { @@ -841,6 +966,10 @@ async def separately_emits_defer_fragments_different_labels_varying_subfields(): assert result == [ { "data": {}, + "pending": [ + {"path": [], "label": "DeferID"}, + {"path": [], "label": "DeferName"}, + ], "hasNext": True, }, { @@ -901,6 +1030,10 @@ async def resolve(value): assert result == [ { "data": {}, + "pending": [ + {"path": [], "label": "DeferID"}, + {"path": [], "label": "DeferName"}, + ], "hasNext": True, }, { @@ -949,6 +1082,10 @@ async def separately_emits_defer_fragments_var_subfields_same_prio_diff_level(): assert result == [ { "data": {"hero": {}}, + "pending": [ + {"path": [], "label": "DeferName"}, + {"path": ["hero"], "label": "DeferID"}, + ], "hasNext": True, }, { @@ -991,9 +1128,11 @@ async def separately_emits_nested_defer_frags_var_subfields_same_prio_diff_level assert result == [ { "data": {}, + "pending": [{"path": [], "label": "DeferName"}], "hasNext": True, }, { + "pending": [{"path": ["hero"], "label": "DeferID"}], "incremental": [ { "data": { @@ -1055,7 +1194,24 @@ async def can_deduplicate_multiple_defers_on_the_same_object(): result = await complete(document) assert result == [ - {"data": {"hero": {"friends": [{}, {}, {}]}}, "hasNext": True}, + { + "data": {"hero": {"friends": [{}, {}, {}]}}, + "pending": [ + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + {"path": ["hero", "friends", 2]}, + {"path": ["hero", "friends", 2]}, + {"path": ["hero", "friends", 2]}, + ], + "hasNext": True, + }, { "incremental": [ { @@ -1139,6 +1295,7 @@ async def deduplicates_fields_present_in_the_initial_payload(): "anotherNestedObject": {"deeperObject": {"foo": "foo"}}, } }, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1182,9 +1339,11 @@ async def deduplicates_fields_present_in_a_parent_defer_payload(): assert result == [ { "data": {"hero": {}}, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { + "pending": [{"path": ["hero", "nestedObject", "deeperObject"]}], "incremental": [ { "data": { @@ -1277,9 +1436,11 @@ async def deduplicates_fields_with_deferred_fragments_at_multiple_levels(): }, }, }, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { + "pending": [{"path": ["hero", "nestedObject"]}], "incremental": [ { "data": {"bar": "bar"}, @@ -1290,6 +1451,7 @@ async def deduplicates_fields_with_deferred_fragments_at_multiple_levels(): "hasNext": True, }, { + "pending": [{"path": ["hero", "nestedObject", "deeperObject"]}], "incremental": [ { "data": {"baz": "baz"}, @@ -1346,9 +1508,14 @@ async def deduplicates_fields_from_deferred_fragments_branches_same_level(): assert result == [ { "data": {"hero": {"nestedObject": {"deeperObject": {}}}}, + "pending": [ + {"path": ["hero"]}, + {"path": ["hero", "nestedObject", "deeperObject"]}, + ], "hasNext": True, }, { + "pending": [{"path": ["hero", "nestedObject", "deeperObject"]}], "incremental": [ { "data": { @@ -1417,6 +1584,7 @@ async def deduplicates_fields_from_deferred_fragments_branches_multi_levels(): assert result == [ { "data": {"a": {"b": {"c": {"d": "d"}}}}, + "pending": [{"path": []}, {"path": ["a", "b"]}], "hasNext": True, }, { @@ -1470,6 +1638,7 @@ async def nulls_cross_defer_boundaries_null_first(): assert result == [ { "data": {"a": {}}, + "pending": [{"path": []}, {"path": ["a"]}], "hasNext": True, }, { @@ -1540,6 +1709,7 @@ async def nulls_cross_defer_boundaries_value_first(): assert result == [ { "data": {"a": {}}, + "pending": [{"path": []}, {"path": ["a"]}], "hasNext": True, }, { @@ -1613,6 +1783,7 @@ async def filters_a_payload_with_a_null_that_cannot_be_merged(): assert result == [ { "data": {"a": {}}, + "pending": [{"path": []}, {"path": ["a"]}], "hasNext": True, }, { @@ -1704,6 +1875,7 @@ async def cancels_deferred_fields_when_deferred_result_exhibits_null_bubbling(): assert result == [ { "data": {}, + "pending": [{"path": []}], "hasNext": True, }, { @@ -1757,6 +1929,7 @@ async def deduplicates_list_fields(): ] } }, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1793,6 +1966,7 @@ async def deduplicates_async_iterable_list_fields(): assert result == [ { "data": {"hero": {"friends": [{"name": "Han"}]}}, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1834,6 +2008,7 @@ async def resolve_friends(_info): assert result == [ { "data": {"hero": {"friends": []}}, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1872,6 +2047,7 @@ async def does_not_deduplicate_list_fields_with_non_overlapping_fields(): ] } }, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1918,6 +2094,7 @@ async def deduplicates_list_fields_that_return_empty_lists(): assert result == [ { "data": {"hero": {"friends": []}}, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1950,6 +2127,7 @@ async def deduplicates_null_object_fields(): assert result == [ { "data": {"hero": {"nestedObject": None}}, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -1986,6 +2164,7 @@ async def resolve_nested_object(_info): assert result == [ { "data": {"hero": {"nestedObject": {"name": "foo"}}}, + "pending": [{"path": ["hero"]}], "hasNext": True, }, { @@ -2012,7 +2191,11 @@ async def handles_errors_thrown_in_deferred_fragments(): result = await complete(document, {"hero": {**hero, "name": Resolvers.bad}}) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, { "incremental": [ { @@ -2052,7 +2235,11 @@ async def handles_non_nullable_errors_thrown_in_deferred_fragments(): ) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, { "completed": [ { @@ -2122,7 +2309,11 @@ async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): ) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, + { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, { "completed": [ { @@ -2165,8 +2356,17 @@ async def returns_payloads_in_correct_order(): result = await complete(document, {"hero": {**hero, "name": Resolvers.slow}}) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, + { + "pending": [ + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + ], "incremental": [ { "data": {"name": "slow", "friends": [{}, {}, {}]}, @@ -2224,8 +2424,17 @@ async def returns_payloads_from_synchronous_data_in_correct_order(): result = await complete(document) assert result == [ - {"data": {"hero": {"id": "1"}}, "hasNext": True}, { + "data": {"hero": {"id": "1"}}, + "pending": [{"path": ["hero"]}], + "hasNext": True, + }, + { + "pending": [ + {"path": ["hero", "friends", 0]}, + {"path": ["hero", "friends", 1]}, + {"path": ["hero", "friends", 2]}, + ], "incremental": [ { "data": {"name": "Luke", "friends": [{}, {}, {}]}, diff --git a/tests/execution/test_execution_result.py b/tests/execution/test_execution_result.py index 162bd00d..96935d99 100644 --- a/tests/execution/test_execution_result.py +++ b/tests/execution/test_execution_result.py @@ -55,15 +55,15 @@ def compares_to_dict(): res = ExecutionResult(data, errors) assert res == {"data": data, "errors": errors} assert res == {"data": data, "errors": errors, "extensions": None} - assert res != {"data": data, "errors": None} - assert res != {"data": None, "errors": errors} + assert res == {"data": data, "errors": errors, "extensions": {}} + assert res != {"errors": errors} + assert res != {"data": data} assert res != {"data": data, "errors": errors, "extensions": extensions} res = ExecutionResult(data, errors, extensions) - assert res == {"data": data, "errors": errors} assert res == {"data": data, "errors": errors, "extensions": extensions} - assert res != {"data": data, "errors": None} - assert res != {"data": None, "errors": errors} - assert res != {"data": data, "errors": errors, "extensions": None} + assert res != {"errors": errors, "extensions": extensions} + assert res != {"data": data, "extensions": extensions} + assert res != {"data": data, "errors": errors} def compares_to_tuple(): res = ExecutionResult(data, errors) diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index f5030c88..987eba45 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -242,7 +242,11 @@ async def mutation_fields_with_defer_do_not_block_next_mutation(): patches.append(patch.formatted) assert patches == [ - {"data": {"first": {}, "second": {"theNumber": 2}}, "hasNext": True}, + { + "data": {"first": {}, "second": {"theNumber": 2}}, + "pending": [{"path": ["first"], "label": "defer-label"}], + "hasNext": True, + }, { "incremental": [ { @@ -313,7 +317,11 @@ async def mutation_with_defer_is_not_executed_serially(): patches.append(patch.formatted) assert patches == [ - {"data": {"second": {"theNumber": 2}}, "hasNext": True}, + { + "data": {"second": {"theNumber": 2}}, + "pending": [{"path": [], "label": "defer-label"}], + "hasNext": True, + }, { "incremental": [ { diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 5454e826..4331eaa4 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -199,9 +199,9 @@ def can_compare_incremental_stream_result(): assert result != tuple(args.values())[:1] assert result != (["hello", "world"], []) assert result == args - assert result == dict(list(args.items())[:2]) - assert result == dict(list(args.items())[:3]) - assert result != dict(list(args.items())[:2] + [("path", ["foo", 2])]) + assert result != {**args, "items": ["hello", "foo"]} + assert result != {**args, "errors": []} + assert result != {**args, "path": ["foo", 2]} assert result != {**args, "extensions": {"baz": 1}} @pytest.mark.asyncio @@ -215,6 +215,7 @@ async def can_stream_a_list_field(): "data": { "scalarList": ["apple"], }, + "pending": [{"path": ["scalarList"]}], "hasNext": True, }, { @@ -239,6 +240,7 @@ async def can_use_default_value_of_initial_count(): "data": { "scalarList": [], }, + "pending": [{"path": ["scalarList"]}], "hasNext": True, }, { @@ -305,6 +307,7 @@ async def returns_label_from_stream_directive(): "data": { "scalarList": ["apple"], }, + "pending": [{"path": ["scalarList"], "label": "scalar-stream"}], "hasNext": True, }, { @@ -375,6 +378,7 @@ async def does_not_disable_stream_with_null_if_argument(): "data": { "scalarList": ["apple", "banana"], }, + "pending": [{"path": ["scalarList"]}], "hasNext": True, }, { @@ -407,6 +411,7 @@ async def can_stream_multi_dimensional_lists(): "data": { "scalarListList": [["apple", "apple", "apple"]], }, + "pending": [{"path": ["scalarListList"]}], "hasNext": True, }, { @@ -458,6 +463,7 @@ async def await_friend(f): {"name": "Han", "id": "2"}, ], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -495,6 +501,7 @@ async def await_friend(f): assert result == [ { "data": {"friendList": []}, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -562,6 +569,7 @@ async def get_id(f): {"name": "Han", "id": "2"}, ] }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -612,6 +620,7 @@ async def await_friend(f, i): "path": ["friendList", 1], } ], + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -655,6 +664,7 @@ async def await_friend(f, i): assert result == [ { "data": {"friendList": [{"name": "Luke", "id": "1"}]}, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -706,6 +716,7 @@ async def friend_list(_info): assert result == [ { "data": {"friendList": []}, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -767,6 +778,7 @@ async def friend_list(_info): {"name": "Han", "id": "2"}, ] }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -840,6 +852,7 @@ async def friend_list(_info): {"name": "Han", "id": "2"}, ] }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, }, @@ -914,6 +927,7 @@ async def friend_list(_info): "data": { "friendList": [{"name": "Luke", "id": "1"}], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -953,6 +967,7 @@ async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): "data": { "nonNullFriendList": [{"name": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -998,6 +1013,7 @@ async def friend_list(_info): "data": { "nonNullFriendList": [{"name": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -1037,6 +1053,7 @@ async def scalar_list(_info): "data": { "scalarList": ["Luke"], }, + "pending": [{"path": ["scalarList"]}], "hasNext": True, }, { @@ -1090,6 +1107,7 @@ def get_friends(_info): "data": { "friendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -1151,6 +1169,7 @@ def get_friends(_info): "data": { "friendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -1213,6 +1232,7 @@ def get_friends(_info): "data": { "nonNullFriendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -1263,6 +1283,7 @@ def get_friends(_info): "data": { "nonNullFriendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -1315,6 +1336,7 @@ async def get_friends(_info): "data": { "friendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -1381,6 +1403,7 @@ async def get_friends(_info): "data": { "nonNullFriendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -1443,6 +1466,7 @@ async def __anext__(self): "data": { "nonNullFriendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -1512,6 +1536,7 @@ async def aclose(self): "data": { "nonNullFriendList": [{"nonNullName": "Luke"}], }, + "pending": [{"path": ["nonNullFriendList"]}], "hasNext": True, }, { @@ -1666,6 +1691,10 @@ async def friend_list(_info): "otherNestedObject": {}, "nestedObject": {"nestedFriendList": []}, }, + "pending": [ + {"path": ["otherNestedObject"]}, + {"path": ["nestedObject", "nestedFriendList"]}, + ], "hasNext": True, }, { @@ -1738,6 +1767,7 @@ async def friend_list(_info): "data": { "nestedObject": {}, }, + "pending": [{"path": ["nestedObject"]}], "hasNext": True, }, { @@ -1801,6 +1831,7 @@ async def friend_list(_info): "data": { "friendList": [], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -1875,7 +1906,11 @@ async def iterable(_info): iterator = execute_result.subsequent_results result1 = execute_result.initial_result - assert result1 == {"data": {"nestedObject": {}}, "hasNext": True} + assert result1 == { + "data": {"nestedObject": {}}, + "pending": [{"path": ["nestedObject"]}], + "hasNext": True, + } assert not finished @@ -1944,6 +1979,7 @@ async def get_friends(_info): "data": { "friendList": [{"id": "1", "name": "Luke"}], }, + "pending": [{"path": ["friendList"]}], "hasNext": True, }, { @@ -2012,6 +2048,10 @@ async def get_nested_friend_list(_info): "nestedFriendList": [], }, }, + "pending": [ + {"path": ["nestedObject"]}, + {"path": ["nestedObject", "nestedFriendList"]}, + ], "hasNext": True, }, { @@ -2082,11 +2122,16 @@ async def get_friends(_info): iterator = execute_result.subsequent_results result1 = execute_result.initial_result - assert result1 == {"data": {"nestedObject": {}}, "hasNext": True} + assert result1 == { + "data": {"nestedObject": {}}, + "pending": [{"path": ["nestedObject"]}], + "hasNext": True, + } resolve_slow_field.set() result2 = await anext(iterator) assert result2.formatted == { + "pending": [{"path": ["nestedObject", "nestedFriendList"]}], "incremental": [ { "data": {"scalarField": "slow", "nestedFriendList": []}, @@ -2166,11 +2211,19 @@ async def get_friends(_info): iterator = execute_result.subsequent_results result1 = execute_result.initial_result - assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} + assert result1 == { + "data": {"friendList": [{"id": "1"}]}, + "pending": [ + {"path": ["friendList", 0], "label": "DeferName"}, + {"path": ["friendList"], "label": "stream-label"}, + ], + "hasNext": True, + } resolve_iterable.set() result2 = await anext(iterator) assert result2.formatted == { + "pending": [{"path": ["friendList", 1], "label": "DeferName"}], "incremental": [ { "data": {"name": "Luke"}, @@ -2251,11 +2304,19 @@ async def get_friends(_info): iterator = execute_result.subsequent_results result1 = execute_result.initial_result - assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} + assert result1 == { + "data": {"friendList": [{"id": "1"}]}, + "pending": [ + {"path": ["friendList", 0], "label": "DeferName"}, + {"path": ["friendList"], "label": "stream-label"}, + ], + "hasNext": True, + } resolve_slow_field.set() result2 = await anext(iterator) assert result2.formatted == { + "pending": [{"path": ["friendList", 1], "label": "DeferName"}], "incremental": [ { "data": {"name": "Luke"}, @@ -2322,7 +2383,11 @@ async def iterable(_info): iterator = execute_result.subsequent_results result1 = execute_result.initial_result - assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} + assert result1 == { + "data": {"friendList": [{"id": "1"}]}, + "pending": [{"path": ["friendList", 0]}, {"path": ["friendList"]}], + "hasNext": True, + } await iterator.aclose() with pytest.raises(StopAsyncIteration): @@ -2369,6 +2434,7 @@ async def __anext__(self): result1 = execute_result.initial_result assert result1 == { "data": {"friendList": [{"id": "1", "name": "Luke"}]}, + "pending": [{"path": ["friendList"]}], "hasNext": True, } @@ -2408,7 +2474,11 @@ async def iterable(_info): iterator = execute_result.subsequent_results result1 = execute_result.initial_result - assert result1 == {"data": {"friendList": [{"id": "1"}]}, "hasNext": True} + assert result1 == { + "data": {"friendList": [{"id": "1"}]}, + "pending": [{"path": ["friendList", 0]}, {"path": ["friendList"]}], + "hasNext": True, + } with pytest.raises(RuntimeError, match="bad"): await iterator.athrow(RuntimeError("bad")) From 9445c0b8f1bc585e88cf3c221b435cf779007e09 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 14:18:32 +0100 Subject: [PATCH 29/50] fix(types): path is required within incremental results Replicates graphql/graphql-js@d1d66a34b697c24efd549150c3a5df9bc01be5af --- .../execution/incremental_publisher.py | 62 ++++++++++--------- tests/execution/test_defer.py | 14 ++--- tests/execution/test_stream.py | 8 +-- 3 files changed, 43 insertions(+), 41 deletions(-) diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index 4ba1d553..dba04461 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -364,39 +364,39 @@ class ExperimentalIncrementalExecutionResults(NamedTuple): class FormattedIncrementalDeferResult(TypedDict, total=False): """Formatted incremental deferred execution result""" - data: dict[str, Any] | None - errors: list[GraphQLFormattedError] + data: dict[str, Any] path: list[str | int] + errors: list[GraphQLFormattedError] extensions: dict[str, Any] class IncrementalDeferResult: """Incremental deferred execution result""" - data: dict[str, Any] | None + data: dict[str, Any] + path: list[str | int] errors: list[GraphQLError] | None - path: list[str | int] | None extensions: dict[str, Any] | None __slots__ = "data", "errors", "extensions", "path" def __init__( self, - data: dict[str, Any] | None = None, + data: dict[str, Any], + path: list[str | int], errors: list[GraphQLError] | None = None, - path: list[str | int] | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.data = data - self.errors = errors self.path = path + self.errors = errors self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] - if self.path: - args.append(f"path={self.path!r}") + args: list[str] = [f"data={self.data!r}, path={self.path!r}"] + if self.errors: + args.append(f"errors={self.errors!r}") if self.extensions: args.append(f"extensions={self.extensions}") return f"{name}({', '.join(args)})" @@ -404,11 +404,12 @@ def __repr__(self) -> str: @property def formatted(self) -> FormattedIncrementalDeferResult: """Get execution result formatted according to the specification.""" - formatted: FormattedIncrementalDeferResult = {"data": self.data} + formatted: FormattedIncrementalDeferResult = { + "data": self.data, + "path": self.path, + } if self.errors is not None: formatted["errors"] = [error.formatted for error in self.errors] - if self.path is not None: - formatted["path"] = self.path if self.extensions is not None: formatted["extensions"] = self.extensions return formatted @@ -442,39 +443,39 @@ def __ne__(self, other: object) -> bool: class FormattedIncrementalStreamResult(TypedDict, total=False): """Formatted incremental stream execution result""" - items: list[Any] | None - errors: list[GraphQLFormattedError] + items: list[Any] path: list[str | int] + errors: list[GraphQLFormattedError] extensions: dict[str, Any] class IncrementalStreamResult: """Incremental streamed execution result""" - items: list[Any] | None + items: list[Any] + path: list[str | int] errors: list[GraphQLError] | None - path: list[str | int] | None extensions: dict[str, Any] | None __slots__ = "errors", "extensions", "items", "label", "path" def __init__( self, - items: list[Any] | None = None, + items: list[Any], + path: list[str | int], errors: list[GraphQLError] | None = None, - path: list[str | int] | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.items = items - self.errors = errors self.path = path + self.errors = errors self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"items={self.items!r}, errors={self.errors!r}"] - if self.path: - args.append(f"path={self.path!r}") + args: list[str] = [f"items={self.items!r}, path={self.path!r}"] + if self.errors: + args.append(f"errors={self.errors!r}") if self.extensions: args.append(f"extensions={self.extensions}") return f"{name}({', '.join(args)})" @@ -482,11 +483,12 @@ def __repr__(self) -> str: @property def formatted(self) -> FormattedIncrementalStreamResult: """Get execution result formatted according to the specification.""" - formatted: FormattedIncrementalStreamResult = {"items": self.items} - if self.errors is not None: + formatted: FormattedIncrementalStreamResult = { + "items": self.items, + "path": self.path, + } + if self.errors: formatted["errors"] = [error.formatted for error in self.errors] - if self.path is not None: - formatted["path"] = self.path if self.extensions is not None: formatted["extensions"] = self.extensions return formatted @@ -982,8 +984,8 @@ def _process_pending( continue incremental_result = IncrementalStreamResult( subsequent_result_record.items, - subsequent_result_record.errors or None, subsequent_result_record.stream_record.path, + subsequent_result_record.errors or None, ) incremental_results.append(incremental_result) else: @@ -997,9 +999,9 @@ def _process_pending( if not deferred_grouped_field_set_record.sent: deferred_grouped_field_set_record.sent = True incremental_result = IncrementalDeferResult( - deferred_grouped_field_set_record.data, - deferred_grouped_field_set_record.errors or None, + deferred_grouped_field_set_record.data, # type: ignore deferred_grouped_field_set_record.path, + deferred_grouped_field_set_record.errors or None, ) incremental_results.append(incremental_result) return IncrementalUpdate( diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 2de10173..7f7c0c01 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -256,9 +256,9 @@ def can_compare_completed_result(): assert result != {**args, "errors": [{"message": "oops"}]} def can_format_and_print_incremental_defer_result(): - result = IncrementalDeferResult() - assert result.formatted == {"data": None} - assert str(result) == "IncrementalDeferResult(data=None, errors=None)" + result = IncrementalDeferResult(data={}, path=[]) + assert result.formatted == {"data": {}, "path": []} + assert str(result) == "IncrementalDeferResult(data={}, path=[])" result = IncrementalDeferResult( data={"hello": "world"}, @@ -274,7 +274,7 @@ def can_format_and_print_incremental_defer_result(): } assert ( str(result) == "IncrementalDeferResult(data={'hello': 'world'}," - " errors=[GraphQLError('msg')], path=['foo', 1], extensions={'baz': 2})" + " path=['foo', 1], errors=[GraphQLError('msg')], extensions={'baz': 2})" ) # noinspection PyTypeChecker @@ -422,7 +422,7 @@ def can_format_and_print_subsequent_incremental_execution_result(): assert str(result) == "SubsequentIncrementalExecutionResult(has_next)" pending = [PendingResult(["bar"])] - incremental = [cast(IncrementalResult, IncrementalDeferResult())] + incremental = [cast(IncrementalResult, IncrementalDeferResult({"one": 1}, [1]))] completed = [CompletedResult(["foo", 1])] result = SubsequentIncrementalExecutionResult( has_next=True, @@ -434,7 +434,7 @@ def can_format_and_print_subsequent_incremental_execution_result(): assert result.formatted == { "hasNext": True, "pending": [{"path": ["bar"]}], - "incremental": [{"data": None}], + "incremental": [{"data": {"one": 1}, "path": [1]}], "completed": [{"path": ["foo", 1]}], "extensions": {"baz": 2}, } @@ -445,7 +445,7 @@ def can_format_and_print_subsequent_incremental_execution_result(): def can_compare_subsequent_incremental_execution_result(): pending = [PendingResult(["bar"])] - incremental = [cast(IncrementalResult, IncrementalDeferResult())] + incremental = [cast(IncrementalResult, IncrementalDeferResult({"one": 1}, [1]))] completed = [CompletedResult(path=["foo", 1])] args: dict[str, Any] = { "has_next": True, diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 4331eaa4..487817b4 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -148,9 +148,9 @@ def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: def describe_execute_stream_directive(): def can_format_and_print_incremental_stream_result(): - result = IncrementalStreamResult() - assert result.formatted == {"items": None} - assert str(result) == "IncrementalStreamResult(items=None, errors=None)" + result = IncrementalStreamResult(items=[], path=[]) + assert result.formatted == {"items": [], "path": []} + assert str(result) == "IncrementalStreamResult(items=[], path=[])" result = IncrementalStreamResult( items=["hello", "world"], @@ -166,7 +166,7 @@ def can_format_and_print_incremental_stream_result(): } assert ( str(result) == "IncrementalStreamResult(items=['hello', 'world']," - " errors=[GraphQLError('msg')], path=['foo', 1], extensions={'baz': 2})" + " path=['foo', 1], errors=[GraphQLError('msg')], extensions={'baz': 2})" ) def can_print_stream_record(): From f9b19b088615cd0531830883447f207b3b222c51 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 19:32:12 +0100 Subject: [PATCH 30/50] incremental: utilize id and subPath rather than path and label Replicates graphql/graphql-js@d2e280ac3eaa90adf2b6118cf35687a21bef8e15 --- .../execution/incremental_publisher.py | 215 +++-- tests/execution/test_defer.py | 734 ++++++------------ tests/execution/test_mutations.py | 26 +- tests/execution/test_stream.py | 641 +++++---------- 4 files changed, 607 insertions(+), 1009 deletions(-) diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index dba04461..d112651e 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -59,6 +59,7 @@ class FormattedPendingResult(TypedDict, total=False): """Formatted pending execution result""" + id: str path: list[str | int] label: str @@ -66,22 +67,25 @@ class FormattedPendingResult(TypedDict, total=False): class PendingResult: """Pending execution result""" + id: str path: list[str | int] label: str | None - __slots__ = "label", "path" + __slots__ = "id", "label", "path" def __init__( self, + id: str, # noqa: A002 path: list[str | int], label: str | None = None, ) -> None: + self.id = id self.path = path self.label = label def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"path={self.path!r}"] + args: list[str] = [f"id={self.id!r}, path={self.path!r}"] if self.label: args.append(f"label={self.label!r}") return f"{name}({', '.join(args)})" @@ -89,22 +93,25 @@ def __repr__(self) -> str: @property def formatted(self) -> FormattedPendingResult: """Get pending result formatted according to the specification.""" - formatted: FormattedPendingResult = {"path": self.path} + formatted: FormattedPendingResult = {"id": self.id, "path": self.path} if self.label is not None: formatted["label"] = self.label return formatted def __eq__(self, other: object) -> bool: if isinstance(other, dict): - return (other.get("path") or None) == (self.path or None) and ( - other.get("label") or None - ) == (self.label or None) + return ( + other.get("id") == self.id + and (other.get("path") or None) == (self.path or None) + and (other.get("label") or None) == (self.label or None) + ) if isinstance(other, tuple): size = len(other) - return 1 < size < 3 and (self.path, self.label)[:size] == other + return 1 < size < 4 and (self.id, self.path, self.label)[:size] == other return ( isinstance(other, self.__class__) + and other.id == self.id and other.path == self.path and other.label == self.label ) @@ -116,35 +123,29 @@ def __ne__(self, other: object) -> bool: class FormattedCompletedResult(TypedDict, total=False): """Formatted completed execution result""" - path: list[str | int] - label: str + id: str errors: list[GraphQLFormattedError] class CompletedResult: """Completed execution result""" - path: list[str | int] - label: str | None + id: str errors: list[GraphQLError] | None - __slots__ = "errors", "label", "path" + __slots__ = "errors", "id" def __init__( self, - path: list[str | int], - label: str | None = None, + id: str, # noqa: A002 errors: list[GraphQLError] | None = None, ) -> None: - self.path = path - self.label = label + self.id = id self.errors = errors def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"path={self.path!r}"] - if self.label: - args.append(f"label={self.label!r}") + args: list[str] = [f"id={self.id!r}"] if self.errors: args.append(f"errors={self.errors!r}") return f"{name}({', '.join(args)})" @@ -152,27 +153,22 @@ def __repr__(self) -> str: @property def formatted(self) -> FormattedCompletedResult: """Get completed result formatted according to the specification.""" - formatted: FormattedCompletedResult = {"path": self.path} - if self.label is not None: - formatted["label"] = self.label + formatted: FormattedCompletedResult = {"id": self.id} if self.errors is not None: formatted["errors"] = [error.formatted for error in self.errors] return formatted def __eq__(self, other: object) -> bool: if isinstance(other, dict): - return ( - (other.get("path") or None) == (self.path or None) - and (other.get("label") or None) == (self.label or None) - and (other.get("errors") or None) == (self.errors or None) + return other.get("id") == self.id and (other.get("errors") or None) == ( + self.errors or None ) if isinstance(other, tuple): size = len(other) - return 1 < size < 4 and (self.path, self.label, self.errors)[:size] == other + return 1 < size < 3 and (self.id, self.errors)[:size] == other return ( isinstance(other, self.__class__) - and other.path == self.path - and other.label == self.label + and other.id == self.id and other.errors == self.errors ) @@ -222,7 +218,7 @@ def __init__( def __repr__(self) -> str: name = self.__class__.__name__ - ext = "" if self.extensions is None else f", extensions={self.extensions}" + ext = "" if self.extensions is None else f", extensions={self.extensions!r}" return f"{name}(data={self.data!r}, errors={self.errors!r}{ext})" def __iter__(self) -> Iterator[Any]: @@ -298,13 +294,15 @@ def __init__( def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"data={self.data!r}, errors={self.errors!r}"] + args: list[str] = [f"data={self.data!r}"] + if self.errors: + args.append(f"errors={self.errors!r}") if self.pending: args.append(f"pending={self.pending!r}") if self.has_next: args.append("has_next") if self.extensions: - args.append(f"extensions={self.extensions}") + args.append(f"extensions={self.extensions!r}") return f"{name}({', '.join(args)})" @property @@ -365,7 +363,8 @@ class FormattedIncrementalDeferResult(TypedDict, total=False): """Formatted incremental deferred execution result""" data: dict[str, Any] - path: list[str | int] + id: str + subPath: list[str | int] errors: list[GraphQLFormattedError] extensions: dict[str, Any] @@ -374,31 +373,36 @@ class IncrementalDeferResult: """Incremental deferred execution result""" data: dict[str, Any] - path: list[str | int] + id: str + sub_path: list[str | int] | None errors: list[GraphQLError] | None extensions: dict[str, Any] | None - __slots__ = "data", "errors", "extensions", "path" + __slots__ = "data", "errors", "extensions", "id", "sub_path" def __init__( self, data: dict[str, Any], - path: list[str | int], + id: str, # noqa: A002 + sub_path: list[str | int] | None = None, errors: list[GraphQLError] | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.data = data - self.path = path + self.id = id + self.sub_path = sub_path self.errors = errors self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"data={self.data!r}, path={self.path!r}"] - if self.errors: + args: list[str] = [f"data={self.data!r}, id={self.id!r}"] + if self.sub_path is not None: + args.append(f"sub_path={self.sub_path!r}") + if self.errors is not None: args.append(f"errors={self.errors!r}") - if self.extensions: - args.append(f"extensions={self.extensions}") + if self.extensions is not None: + args.append(f"extensions={self.extensions!r}") return f"{name}({', '.join(args)})" @property @@ -406,8 +410,10 @@ def formatted(self) -> FormattedIncrementalDeferResult: """Get execution result formatted according to the specification.""" formatted: FormattedIncrementalDeferResult = { "data": self.data, - "path": self.path, + "id": self.id, } + if self.sub_path is not None: + formatted["subPath"] = self.sub_path if self.errors is not None: formatted["errors"] = [error.formatted for error in self.errors] if self.extensions is not None: @@ -418,21 +424,26 @@ def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( other.get("data") == self.data + and other.get("id") == self.id + and (other.get("subPath") or None) == (self.sub_path or None) and (other.get("errors") or None) == (self.errors or None) - and (other.get("path") or None) == (self.path or None) and (other.get("extensions") or None) == (self.extensions or None) ) if isinstance(other, tuple): size = len(other) return ( - 1 < size < 5 - and (self.data, self.errors, self.path, self.extensions)[:size] == other + 1 < size < 6 + and (self.data, self.id, self.sub_path, self.errors, self.extensions)[ + :size + ] + == other ) return ( isinstance(other, self.__class__) and other.data == self.data + and other.id == self.id + and other.sub_path == self.sub_path and other.errors == self.errors - and other.path == self.path and other.extensions == self.extensions ) @@ -444,7 +455,8 @@ class FormattedIncrementalStreamResult(TypedDict, total=False): """Formatted incremental stream execution result""" items: list[Any] - path: list[str | int] + id: str + subPath: list[str | int] errors: list[GraphQLFormattedError] extensions: dict[str, Any] @@ -453,31 +465,36 @@ class IncrementalStreamResult: """Incremental streamed execution result""" items: list[Any] - path: list[str | int] + id: str + sub_path: list[str | int] | None errors: list[GraphQLError] | None extensions: dict[str, Any] | None - __slots__ = "errors", "extensions", "items", "label", "path" + __slots__ = "errors", "extensions", "id", "items", "label", "sub_path" def __init__( self, items: list[Any], - path: list[str | int], + id: str, # noqa: A002 + sub_path: list[str | int] | None = None, errors: list[GraphQLError] | None = None, extensions: dict[str, Any] | None = None, ) -> None: self.items = items - self.path = path + self.id = id + self.sub_path = sub_path self.errors = errors self.extensions = extensions def __repr__(self) -> str: name = self.__class__.__name__ - args: list[str] = [f"items={self.items!r}, path={self.path!r}"] - if self.errors: + args: list[str] = [f"items={self.items!r}, id={self.id!r}"] + if self.sub_path is not None: + args.append(f"sub_path={self.sub_path!r}") + if self.errors is not None: args.append(f"errors={self.errors!r}") - if self.extensions: - args.append(f"extensions={self.extensions}") + if self.extensions is not None: + args.append(f"extensions={self.extensions!r}") return f"{name}({', '.join(args)})" @property @@ -485,9 +502,11 @@ def formatted(self) -> FormattedIncrementalStreamResult: """Get execution result formatted according to the specification.""" formatted: FormattedIncrementalStreamResult = { "items": self.items, - "path": self.path, + "id": self.id, } - if self.errors: + if self.sub_path is not None: + formatted["subPath"] = self.sub_path + if self.errors is not None: formatted["errors"] = [error.formatted for error in self.errors] if self.extensions is not None: formatted["extensions"] = self.extensions @@ -496,23 +515,27 @@ def formatted(self) -> FormattedIncrementalStreamResult: def __eq__(self, other: object) -> bool: if isinstance(other, dict): return ( - (other.get("items") or None) == (self.items or None) + other.get("items") == self.items + and other.get("id") == self.id + and (other.get("subPath", None) == (self.sub_path or None)) and (other.get("errors") or None) == (self.errors or None) - and (other.get("path", None) == (self.path or None)) and (other.get("extensions", None) == (self.extensions or None)) ) if isinstance(other, tuple): size = len(other) return ( - 1 < size < 5 - and (self.items, self.errors, self.path, self.extensions)[:size] + 1 < size < 6 + and (self.items, self.id, self.sub_path, self.errors, self.extensions)[ + :size + ] == other ) return ( isinstance(other, self.__class__) and other.items == self.items + and other.id == self.id + and other.sub_path == self.sub_path and other.errors == self.errors - and other.path == self.path and other.extensions == self.extensions ) @@ -574,7 +597,7 @@ def __repr__(self) -> str: if self.completed: args.append(f"completed[{len(self.completed)}]") if self.extensions: - args.append(f"extensions={self.extensions}") + args.append(f"extensions={self.extensions!r}") return f"{name}({', '.join(args)})" @property @@ -654,15 +677,18 @@ class IncrementalPublisher: and thereby achieve more deterministic results. """ + _next_id: int _released: dict[SubsequentResultRecord, None] _pending: dict[SubsequentResultRecord, None] _resolve: Event | None + _tasks: set[Awaitable] def __init__(self) -> None: + self._next_id = 0 self._released = {} self._pending = {} self._resolve = None # lazy initialization - self._tasks: set[Awaitable] = set() + self._tasks = set() @staticmethod def report_new_defer_fragment_record( @@ -860,11 +886,19 @@ def _pending_sources_to_results( pending_results: list[PendingResult] = [] for pending_source in pending_sources: pending_source.pending_sent = True + id_ = self._get_next_id() + pending_source.id = id_ pending_results.append( - PendingResult(pending_source.path, pending_source.label) + PendingResult(id_, pending_source.path, pending_source.label) ) return pending_results + def _get_next_id(self) -> str: + """Get the next ID for pending results.""" + id_ = self._next_id + self._next_id += 1 + return str(id_) + async def _subscribe( self, ) -> AsyncGenerator[SubsequentIncrementalExecutionResult, None]: @@ -984,9 +1018,12 @@ def _process_pending( continue incremental_result = IncrementalStreamResult( subsequent_result_record.items, - subsequent_result_record.stream_record.path, - subsequent_result_record.errors or None, + # safe because `id` is defined + # once the stream has been released as pending + subsequent_result_record.stream_record.id, # type: ignore ) + if subsequent_result_record.errors: + incremental_result.errors = subsequent_result_record.errors incremental_results.append(incremental_result) else: new_pending_sources.discard(subsequent_result_record) @@ -998,11 +1035,13 @@ def _process_pending( ) in subsequent_result_record.deferred_grouped_field_set_records: if not deferred_grouped_field_set_record.sent: deferred_grouped_field_set_record.sent = True - incremental_result = IncrementalDeferResult( - deferred_grouped_field_set_record.data, # type: ignore - deferred_grouped_field_set_record.path, - deferred_grouped_field_set_record.errors or None, + incremental_result = self._get_incremental_defer_result( + deferred_grouped_field_set_record ) + if deferred_grouped_field_set_record.errors: + incremental_result.errors = ( + deferred_grouped_field_set_record.errors + ) incremental_results.append(incremental_result) return IncrementalUpdate( self._pending_sources_to_results(new_pending_sources), @@ -1010,14 +1049,40 @@ def _process_pending( completed_results, ) + def _get_incremental_defer_result( + self, deferred_grouped_field_set_record: DeferredGroupedFieldSetRecord + ) -> IncrementalDeferResult: + """Get the incremental defer result from the grouped field set record.""" + data = deferred_grouped_field_set_record.data + fragment_records = deferred_grouped_field_set_record.deferred_fragment_records + max_length = len(fragment_records[0].path) + max_index = 0 + for i in range(1, len(fragment_records)): + fragment_record = fragment_records[i] + length = len(fragment_record.path) + if length > max_length: + max_length = length + max_index = i + record_with_longest_path = fragment_records[max_index] + longest_path = record_with_longest_path.path + sub_path = deferred_grouped_field_set_record.path[len(longest_path) :] + id_ = record_with_longest_path.id + return IncrementalDeferResult( + data, # type: ignore + # safe because `id` is defined + # once the fragment has been released as pending + id_, # type: ignore + sub_path or None, + ) + @staticmethod def _completed_record_to_result( completed_record: DeferredFragmentRecord | StreamRecord, ) -> CompletedResult: """Convert the completed record to a result.""" return CompletedResult( - completed_record.path, - completed_record.label or None, + # safe because `id` is defined once the stream has been released as pending + completed_record.id, # type: ignore completed_record.errors or None, ) @@ -1147,6 +1212,7 @@ class DeferredFragmentRecord: path: list[str | int] label: str | None + id: str | None children: dict[SubsequentResultRecord, None] deferred_grouped_field_set_records: dict[DeferredGroupedFieldSetRecord, None] errors: list[GraphQLError] @@ -1157,6 +1223,7 @@ class DeferredFragmentRecord: def __init__(self, path: Path | None = None, label: str | None = None) -> None: self.path = path.as_list() if path else [] self.label = label + self.id = None self.children = {} self.filtered = False self.pending_sent = False @@ -1179,6 +1246,7 @@ class StreamRecord: label: str | None path: list[str | int] + id: str | None errors: list[GraphQLError] early_return: Callable[[], Awaitable[Any]] | None pending_sent: bool @@ -1191,6 +1259,7 @@ def __init__( ) -> None: self.path = path.as_list() self.label = label + self.id = None self.errors = [] self.early_return = early_return self.pending_sent = False diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 7f7c0c01..62dc88bb 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -196,93 +196,86 @@ def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: def describe_execute_defer_directive(): def can_format_and_print_pending_result(): - result = PendingResult([]) - assert result.formatted == {"path": []} - assert str(result) == "PendingResult(path=[])" + result = PendingResult("foo", []) + assert result.formatted == {"id": "foo", "path": []} + assert str(result) == "PendingResult(id='foo', path=[])" - result = PendingResult(path=["foo", 1], label="bar") - assert result.formatted == { - "path": ["foo", 1], - "label": "bar", - } - assert str(result) == "PendingResult(path=['foo', 1], label='bar')" + result = PendingResult(id="foo", path=["bar", 1], label="baz") + assert result.formatted == {"id": "foo", "path": ["bar", 1], "label": "baz"} + assert str(result) == "PendingResult(id='foo', path=['bar', 1], label='baz')" def can_compare_pending_result(): - args: dict[str, Any] = {"path": ["foo", 1], "label": "bar"} + args: dict[str, Any] = {"id": "foo", "path": ["bar", 1], "label": "baz"} result = PendingResult(**args) assert result == PendingResult(**args) - assert result != CompletedResult(**modified_args(args, path=["foo", 2])) - assert result != CompletedResult(**modified_args(args, label="baz")) + assert result != PendingResult(**modified_args(args, id="bar")) + assert result != PendingResult(**modified_args(args, path=["bar", 2])) + assert result != PendingResult(**modified_args(args, label="bar")) assert result == tuple(args.values()) + assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] - assert result != tuple(args.values())[:1] + ("baz",) + assert result != tuple(args.values())[:1] + (["bar", 2],) assert result == args - assert result != {**args, "path": ["foo", 2]} - assert result != {**args, "label": "baz"} + assert result != {**args, "id": "bar"} + assert result != {**args, "path": ["bar", 2]} + assert result != {**args, "label": "bar"} def can_format_and_print_completed_result(): - result = CompletedResult([]) - assert result.formatted == {"path": []} - assert str(result) == "CompletedResult(path=[])" + result = CompletedResult("foo") + assert result.formatted == {"id": "foo"} + assert str(result) == "CompletedResult(id='foo')" - result = CompletedResult( - path=["foo", 1], label="bar", errors=[GraphQLError("oops")] - ) - assert result.formatted == { - "path": ["foo", 1], - "label": "bar", - "errors": [{"message": "oops"}], - } - assert ( - str(result) == "CompletedResult(path=['foo', 1], label='bar'," - " errors=[GraphQLError('oops')])" - ) + result = CompletedResult(id="foo", errors=[GraphQLError("oops")]) + assert result.formatted == {"id": "foo", "errors": [{"message": "oops"}]} + assert str(result) == "CompletedResult(id='foo', errors=[GraphQLError('oops')])" def can_compare_completed_result(): - args: dict[str, Any] = {"path": ["foo", 1], "label": "bar", "errors": []} + args: dict[str, Any] = {"id": "foo", "errors": []} result = CompletedResult(**args) assert result == CompletedResult(**args) - assert result != CompletedResult(**modified_args(args, path=["foo", 2])) - assert result != CompletedResult(**modified_args(args, label="baz")) + assert result != CompletedResult(**modified_args(args, id="bar")) assert result != CompletedResult( **modified_args(args, errors=[GraphQLError("oops")]) ) assert result == tuple(args.values()) - assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] + assert result != tuple(args.values())[:1] + ([GraphQLError("oops")],) assert result == args - assert result != {**args, "path": ["foo", 2]} - assert result != {**args, "label": "baz"} + assert result != {**args, "id": "bar"} assert result != {**args, "errors": [{"message": "oops"}]} def can_format_and_print_incremental_defer_result(): - result = IncrementalDeferResult(data={}, path=[]) - assert result.formatted == {"data": {}, "path": []} - assert str(result) == "IncrementalDeferResult(data={}, path=[])" + result = IncrementalDeferResult(data={}, id="foo") + assert result.formatted == {"data": {}, "id": "foo"} + assert str(result) == "IncrementalDeferResult(data={}, id='foo')" result = IncrementalDeferResult( data={"hello": "world"}, - errors=[GraphQLError("msg")], - path=["foo", 1], + id="foo", + sub_path=["bar", 1], + errors=[GraphQLError("oops")], extensions={"baz": 2}, ) assert result.formatted == { "data": {"hello": "world"}, - "errors": [{"message": "msg"}], + "id": "foo", + "subPath": ["bar", 1], + "errors": [{"message": "oops"}], "extensions": {"baz": 2}, - "path": ["foo", 1], } assert ( str(result) == "IncrementalDeferResult(data={'hello': 'world'}," - " path=['foo', 1], errors=[GraphQLError('msg')], extensions={'baz': 2})" + " id='foo', sub_path=['bar', 1], errors=[GraphQLError('oops')]," + " extensions={'baz': 2})" ) # noinspection PyTypeChecker def can_compare_incremental_defer_result(): args: dict[str, Any] = { "data": {"hello": "world"}, - "errors": [GraphQLError("msg")], - "path": ["foo", 1], + "id": "foo", + "sub_path": ["bar", 1], + "errors": [GraphQLError("oops")], "extensions": {"baz": 2}, } result = IncrementalDeferResult(**args) @@ -290,8 +283,11 @@ def can_compare_incremental_defer_result(): assert result != IncrementalDeferResult( **modified_args(args, data={"hello": "foo"}) ) + assert result != IncrementalDeferResult(**modified_args(args, id="bar")) + assert result != IncrementalDeferResult( + **modified_args(args, sub_path=["bar", 2]) + ) assert result != IncrementalDeferResult(**modified_args(args, errors=[])) - assert result != IncrementalDeferResult(**modified_args(args, path=["foo", 2])) assert result != IncrementalDeferResult( **modified_args(args, extensions={"baz": 1}) ) @@ -300,52 +296,50 @@ def can_compare_incremental_defer_result(): assert result == tuple(args.values())[:3] assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] - assert result != ({"hello": "world"}, []) + assert result != ({"hello": "world"}, "bar") + args["subPath"] = args.pop("sub_path") assert result == args assert result != {**args, "data": {"hello": "foo"}} + assert result != {**args, "id": "bar"} + assert result != {**args, "subPath": ["bar", 2]} assert result != {**args, "errors": []} - assert result != {**args, "path": ["foo", 2]} assert result != {**args, "extensions": {"baz": 1}} def can_format_and_print_initial_incremental_execution_result(): result = InitialIncrementalExecutionResult() assert result.formatted == {"data": None, "hasNext": False, "pending": []} - assert ( - str(result) == "InitialIncrementalExecutionResult(data=None, errors=None)" - ) + assert str(result) == "InitialIncrementalExecutionResult(data=None)" result = InitialIncrementalExecutionResult(has_next=True) assert result.formatted == {"data": None, "hasNext": True, "pending": []} - assert ( - str(result) - == "InitialIncrementalExecutionResult(data=None, errors=None, has_next)" - ) + assert str(result) == "InitialIncrementalExecutionResult(data=None, has_next)" result = InitialIncrementalExecutionResult( data={"hello": "world"}, errors=[GraphQLError("msg")], - pending=[PendingResult(["bar"])], + pending=[PendingResult("foo", ["bar"])], has_next=True, extensions={"baz": 2}, ) assert result.formatted == { "data": {"hello": "world"}, "errors": [{"message": "msg"}], - "pending": [{"path": ["bar"]}], + "pending": [{"id": "foo", "path": ["bar"]}], "hasNext": True, "extensions": {"baz": 2}, } assert ( str(result) == "InitialIncrementalExecutionResult(" "data={'hello': 'world'}, errors=[GraphQLError('msg')]," - " pending=[PendingResult(path=['bar'])], has_next, extensions={'baz': 2})" + " pending=[PendingResult(id='foo', path=['bar'])], has_next," + " extensions={'baz': 2})" ) def can_compare_initial_incremental_execution_result(): args: dict[str, Any] = { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "pending": [PendingResult(["bar"])], + "pending": [PendingResult("foo", ["bar"])], "has_next": True, "extensions": {"baz": 2}, } @@ -377,19 +371,19 @@ def can_compare_initial_incremental_execution_result(): assert result == { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "pending": [PendingResult(["bar"])], + "pending": [PendingResult("foo", ["bar"])], "hasNext": True, "extensions": {"baz": 2}, } assert result != { "errors": [GraphQLError("msg")], - "pending": [PendingResult(["bar"])], + "pending": [PendingResult("foo", ["bar"])], "hasNext": True, "extensions": {"baz": 2}, } assert result != { "data": {"hello": "world"}, - "pending": [PendingResult(["bar"])], + "pending": [PendingResult("foo", ["bar"])], "hasNext": True, "extensions": {"baz": 2}, } @@ -402,13 +396,13 @@ def can_compare_initial_incremental_execution_result(): assert result != { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "pending": [PendingResult(["bar"])], + "pending": [PendingResult("foo", ["bar"])], "extensions": {"baz": 2}, } assert result != { "data": {"hello": "world"}, "errors": [GraphQLError("msg")], - "pending": [PendingResult(["bar"])], + "pending": [PendingResult("foo", ["bar"])], "hasNext": True, } @@ -421,9 +415,11 @@ def can_format_and_print_subsequent_incremental_execution_result(): assert result.formatted == {"hasNext": True} assert str(result) == "SubsequentIncrementalExecutionResult(has_next)" - pending = [PendingResult(["bar"])] - incremental = [cast(IncrementalResult, IncrementalDeferResult({"one": 1}, [1]))] - completed = [CompletedResult(["foo", 1])] + pending = [PendingResult("foo", ["bar"])] + incremental = [ + cast(IncrementalResult, IncrementalDeferResult({"foo": 1}, "bar")) + ] + completed = [CompletedResult("foo")] result = SubsequentIncrementalExecutionResult( has_next=True, pending=pending, @@ -433,9 +429,9 @@ def can_format_and_print_subsequent_incremental_execution_result(): ) assert result.formatted == { "hasNext": True, - "pending": [{"path": ["bar"]}], - "incremental": [{"data": {"one": 1}, "path": [1]}], - "completed": [{"path": ["foo", 1]}], + "pending": [{"id": "foo", "path": ["bar"]}], + "incremental": [{"data": {"foo": 1}, "id": "bar"}], + "completed": [{"id": "foo"}], "extensions": {"baz": 2}, } assert ( @@ -444,9 +440,11 @@ def can_format_and_print_subsequent_incremental_execution_result(): ) def can_compare_subsequent_incremental_execution_result(): - pending = [PendingResult(["bar"])] - incremental = [cast(IncrementalResult, IncrementalDeferResult({"one": 1}, [1]))] - completed = [CompletedResult(path=["foo", 1])] + pending = [PendingResult("foo", ["bar"])] + incremental = [ + cast(IncrementalResult, IncrementalDeferResult({"foo": 1}, "bar")) + ] + completed = [CompletedResult("foo")] args: dict[str, Any] = { "has_next": True, "pending": pending, @@ -571,12 +569,12 @@ async def can_defer_fragments_containing_scalar_types(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { - "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], - "completed": [{"path": ["hero"]}], + "incremental": [{"data": {"name": "Luke"}, "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -598,14 +596,7 @@ async def can_disable_defer_using_if_argument(): ) result = await complete(document) - assert result == { - "data": { - "hero": { - "id": "1", - "name": "Luke", - }, - }, - } + assert result == {"data": {"hero": {"id": "1", "name": "Luke"}}} @pytest.mark.asyncio async def does_not_disable_defer_with_null_if_argument(): @@ -627,12 +618,12 @@ async def does_not_disable_defer_with_null_if_argument(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { - "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], - "completed": [{"path": ["hero"]}], + "incremental": [{"data": {"name": "Luke"}, "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -677,12 +668,12 @@ async def can_defer_fragments_on_the_top_level_query_field(): assert result == [ { "data": {}, - "pending": [{"path": [], "label": "DeferQuery"}], + "pending": [{"id": "0", "path": [], "label": "DeferQuery"}], "hasNext": True, }, { - "incremental": [{"data": {"hero": {"id": "1"}}, "path": []}], - "completed": [{"path": [], "label": "DeferQuery"}], + "incremental": [{"data": {"hero": {"id": "1"}}, "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -706,7 +697,7 @@ async def can_defer_fragments_with_errors_on_the_top_level_query_field(): assert result == [ { "data": {}, - "pending": [{"path": [], "label": "DeferQuery"}], + "pending": [{"id": "0", "path": [], "label": "DeferQuery"}], "hasNext": True, }, { @@ -720,10 +711,10 @@ async def can_defer_fragments_with_errors_on_the_top_level_query_field(): "path": ["hero", "name"], } ], - "path": [], + "id": "0", } ], - "completed": [{"path": [], "label": "DeferQuery"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -754,17 +745,14 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): { "data": {"hero": {}}, "pending": [ - {"path": ["hero"], "label": "DeferTop"}, - {"path": ["hero"], "label": "DeferNested"}, + {"id": "0", "path": ["hero"], "label": "DeferTop"}, + {"id": "1", "path": ["hero"], "label": "DeferNested"}, ], "hasNext": True, }, { "incremental": [ - { - "data": {"id": "1"}, - "path": ["hero"], - }, + {"data": {"id": "1"}, "id": "0"}, { "data": { "friends": [ @@ -773,13 +761,10 @@ async def can_defer_a_fragment_within_an_already_deferred_fragment(): {"name": "C-3PO"}, ] }, - "path": ["hero"], + "id": "1", }, ], - "completed": [ - {"path": ["hero"], "label": "DeferTop"}, - {"path": ["hero"], "label": "DeferNested"}, - ], + "completed": [{"id": "0"}, {"id": "1"}], "hasNext": False, }, ] @@ -804,13 +789,10 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_deferred_first(): assert result == [ { "data": {"hero": {"name": "Luke"}}, - "pending": [{"path": ["hero"], "label": "DeferTop"}], + "pending": [{"id": "0", "path": ["hero"], "label": "DeferTop"}], "hasNext": True, }, - { - "completed": [{"path": ["hero"], "label": "DeferTop"}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -833,13 +815,10 @@ async def can_defer_a_fragment_that_is_also_not_deferred_with_non_deferred_first assert result == [ { "data": {"hero": {"name": "Luke"}}, - "pending": [{"path": ["hero"], "label": "DeferTop"}], + "pending": [{"id": "0", "path": ["hero"], "label": "DeferTop"}], "hasNext": True, }, - { - "completed": [{"path": ["hero"], "label": "DeferTop"}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -861,12 +840,12 @@ async def can_defer_an_inline_fragment(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"], "label": "InlineDeferred"}], + "pending": [{"id": "0", "path": ["hero"], "label": "InlineDeferred"}], "hasNext": True, }, { - "incremental": [{"data": {"name": "Luke"}, "path": ["hero"]}], - "completed": [{"path": ["hero"], "label": "InlineDeferred"}], + "incremental": [{"data": {"name": "Luke"}, "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -890,11 +869,12 @@ async def does_not_emit_empty_defer_fragments(): result = await complete(document) assert result == [ - {"data": {"hero": {}}, "pending": [{"path": ["hero"]}], "hasNext": True}, { - "completed": [{"path": ["hero"]}], - "hasNext": False, + "data": {"hero": {}}, + "pending": [{"id": "0", "path": ["hero"]}], + "hasNext": True, }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -919,26 +899,17 @@ async def separately_emits_defer_fragments_different_labels_varying_fields(): { "data": {"hero": {}}, "pending": [ - {"path": ["hero"], "label": "DeferID"}, - {"path": ["hero"], "label": "DeferName"}, + {"id": "0", "path": ["hero"], "label": "DeferID"}, + {"id": "1", "path": ["hero"], "label": "DeferName"}, ], "hasNext": True, }, { "incremental": [ - { - "data": {"id": "1"}, - "path": ["hero"], - }, - { - "data": {"name": "Luke"}, - "path": ["hero"], - }, - ], - "completed": [ - {"path": ["hero"], "label": "DeferID"}, - {"path": ["hero"], "label": "DeferName"}, + {"data": {"id": "1"}, "id": "0"}, + {"data": {"name": "Luke"}, "id": "1"}, ], + "completed": [{"id": "0"}, {"id": "1"}], "hasNext": False, }, ] @@ -967,30 +938,18 @@ async def separately_emits_defer_fragments_different_labels_varying_subfields(): { "data": {}, "pending": [ - {"path": [], "label": "DeferID"}, - {"path": [], "label": "DeferName"}, + {"id": "0", "path": [], "label": "DeferID"}, + {"id": "1", "path": [], "label": "DeferName"}, ], "hasNext": True, }, { "incremental": [ - { - "data": {"hero": {}}, - "path": [], - }, - { - "data": {"id": "1"}, - "path": ["hero"], - }, - { - "data": {"name": "Luke"}, - "path": ["hero"], - }, - ], - "completed": [ - {"path": [], "label": "DeferID"}, - {"path": [], "label": "DeferName"}, + {"data": {"hero": {}}, "id": "0"}, + {"data": {"id": "1"}, "id": "0", "subPath": ["hero"]}, + {"data": {"name": "Luke"}, "id": "1", "subPath": ["hero"]}, ], + "completed": [{"id": "0"}, {"id": "1"}], "hasNext": False, }, ] @@ -1031,30 +990,18 @@ async def resolve(value): { "data": {}, "pending": [ - {"path": [], "label": "DeferID"}, - {"path": [], "label": "DeferName"}, + {"id": "0", "path": [], "label": "DeferID"}, + {"id": "1", "path": [], "label": "DeferName"}, ], "hasNext": True, }, { "incremental": [ - { - "data": {"hero": {}}, - "path": [], - }, - { - "data": {"id": "1"}, - "path": ["hero"], - }, - { - "data": {"name": "Luke"}, - "path": ["hero"], - }, - ], - "completed": [ - {"path": [], "label": "DeferID"}, - {"path": [], "label": "DeferName"}, + {"data": {"hero": {}}, "id": "0"}, + {"data": {"id": "1"}, "id": "0", "subPath": ["hero"]}, + {"data": {"name": "Luke"}, "id": "1", "subPath": ["hero"]}, ], + "completed": [{"id": "0"}, {"id": "1"}], "hasNext": False, }, ] @@ -1083,26 +1030,17 @@ async def separately_emits_defer_fragments_var_subfields_same_prio_diff_level(): { "data": {"hero": {}}, "pending": [ - {"path": [], "label": "DeferName"}, - {"path": ["hero"], "label": "DeferID"}, + {"id": "0", "path": [], "label": "DeferName"}, + {"id": "1", "path": ["hero"], "label": "DeferID"}, ], "hasNext": True, }, { "incremental": [ - { - "data": {"id": "1"}, - "path": ["hero"], - }, - { - "data": {"name": "Luke"}, - "path": ["hero"], - }, - ], - "completed": [ - {"path": ["hero"], "label": "DeferID"}, - {"path": [], "label": "DeferName"}, + {"data": {"id": "1"}, "id": "1"}, + {"data": {"name": "Luke"}, "id": "0", "subPath": ["hero"]}, ], + "completed": [{"id": "1"}, {"id": "0"}], "hasNext": False, }, ] @@ -1128,36 +1066,18 @@ async def separately_emits_nested_defer_frags_var_subfields_same_prio_diff_level assert result == [ { "data": {}, - "pending": [{"path": [], "label": "DeferName"}], + "pending": [{"id": "0", "path": [], "label": "DeferName"}], "hasNext": True, }, { - "pending": [{"path": ["hero"], "label": "DeferID"}], - "incremental": [ - { - "data": { - "hero": { - "name": "Luke", - }, - }, - "path": [], - }, - ], - "completed": [ - {"path": [], "label": "DeferName"}, - ], + "pending": [{"id": "1", "path": ["hero"], "label": "DeferID"}], + "incremental": [{"data": {"hero": {"name": "Luke"}}, "id": "0"}], + "completed": [{"id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "data": { - "id": "1", - }, - "path": ["hero"], - }, - ], - "completed": [{"path": ["hero"], "label": "DeferID"}], + "incremental": [{"data": {"id": "1"}, "id": "1"}], + "completed": [{"id": "1"}], "hasNext": False, }, ] @@ -1197,49 +1117,40 @@ async def can_deduplicate_multiple_defers_on_the_same_object(): { "data": {"hero": {"friends": [{}, {}, {}]}}, "pending": [ - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, - {"path": ["hero", "friends", 2]}, - {"path": ["hero", "friends", 2]}, - {"path": ["hero", "friends", 2]}, + {"id": "0", "path": ["hero", "friends", 0]}, + {"id": "1", "path": ["hero", "friends", 0]}, + {"id": "2", "path": ["hero", "friends", 0]}, + {"id": "3", "path": ["hero", "friends", 0]}, + {"id": "4", "path": ["hero", "friends", 1]}, + {"id": "5", "path": ["hero", "friends", 1]}, + {"id": "6", "path": ["hero", "friends", 1]}, + {"id": "7", "path": ["hero", "friends", 1]}, + {"id": "8", "path": ["hero", "friends", 2]}, + {"id": "9", "path": ["hero", "friends", 2]}, + {"id": "10", "path": ["hero", "friends", 2]}, + {"id": "11", "path": ["hero", "friends", 2]}, ], "hasNext": True, }, { "incremental": [ - { - "data": {"id": "2", "name": "Han"}, - "path": ["hero", "friends", 0], - }, - { - "data": {"id": "3", "name": "Leia"}, - "path": ["hero", "friends", 1], - }, - { - "data": {"id": "4", "name": "C-3PO"}, - "path": ["hero", "friends", 2], - }, + {"data": {"id": "2", "name": "Han"}, "id": "0"}, + {"data": {"id": "3", "name": "Leia"}, "id": "4"}, + {"data": {"id": "4", "name": "C-3PO"}, "id": "8"}, ], "completed": [ - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, - {"path": ["hero", "friends", 2]}, - {"path": ["hero", "friends", 2]}, - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, + {"id": "1"}, + {"id": "2"}, + {"id": "3"}, + {"id": "5"}, + {"id": "6"}, + {"id": "7"}, + {"id": "9"}, + {"id": "10"}, + {"id": "11"}, + {"id": "0"}, + {"id": "4"}, + {"id": "8"}, ], "hasNext": False, }, @@ -1295,17 +1206,18 @@ async def deduplicates_fields_present_in_the_initial_payload(): "anotherNestedObject": {"deeperObject": {"foo": "foo"}}, } }, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "incremental": [ { "data": {"bar": "bar"}, - "path": ["hero", "nestedObject", "deeperObject"], + "id": "0", + "subPath": ["nestedObject", "deeperObject"], }, ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -1339,36 +1251,25 @@ async def deduplicates_fields_present_in_a_parent_defer_payload(): assert result == [ { "data": {"hero": {}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { - "pending": [{"path": ["hero", "nestedObject", "deeperObject"]}], + "pending": [ + {"id": "1", "path": ["hero", "nestedObject", "deeperObject"]} + ], "incremental": [ { - "data": { - "nestedObject": { - "deeperObject": { - "foo": "foo", - }, - } - }, - "path": ["hero"], + "data": {"nestedObject": {"deeperObject": {"foo": "foo"}}}, + "id": "0", }, ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "data": { - "bar": "bar", - }, - "path": ["hero", "nestedObject", "deeperObject"], - }, - ], - "completed": [{"path": ["hero", "nestedObject", "deeperObject"]}], + "incremental": [{"data": {"bar": "bar"}, "id": "1"}], + "completed": [{"id": "1"}], "hasNext": False, }, ] @@ -1436,39 +1337,34 @@ async def deduplicates_fields_with_deferred_fragments_at_multiple_levels(): }, }, }, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { - "pending": [{"path": ["hero", "nestedObject"]}], + "pending": [{"id": "1", "path": ["hero", "nestedObject"]}], "incremental": [ { "data": {"bar": "bar"}, - "path": ["hero", "nestedObject", "deeperObject"], + "id": "0", + "subPath": ["nestedObject", "deeperObject"], }, ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": True, }, { - "pending": [{"path": ["hero", "nestedObject", "deeperObject"]}], + "pending": [ + {"id": "2", "path": ["hero", "nestedObject", "deeperObject"]} + ], "incremental": [ - { - "data": {"baz": "baz"}, - "path": ["hero", "nestedObject", "deeperObject"], - }, + {"data": {"baz": "baz"}, "id": "1", "subPath": ["deeperObject"]}, ], "hasNext": True, - "completed": [{"path": ["hero", "nestedObject"]}], + "completed": [{"id": "1"}], }, { - "incremental": [ - { - "data": {"bak": "bak"}, - "path": ["hero", "nestedObject", "deeperObject"], - }, - ], - "completed": [{"path": ["hero", "nestedObject", "deeperObject"]}], + "incremental": [{"data": {"bak": "bak"}, "id": "2"}], + "completed": [{"id": "2"}], "hasNext": False, }, ] @@ -1509,37 +1405,22 @@ async def deduplicates_fields_from_deferred_fragments_branches_same_level(): { "data": {"hero": {"nestedObject": {"deeperObject": {}}}}, "pending": [ - {"path": ["hero"]}, - {"path": ["hero", "nestedObject", "deeperObject"]}, + {"id": "0", "path": ["hero"]}, + {"id": "1", "path": ["hero", "nestedObject", "deeperObject"]}, ], "hasNext": True, }, { - "pending": [{"path": ["hero", "nestedObject", "deeperObject"]}], - "incremental": [ - { - "data": { - "foo": "foo", - }, - "path": ["hero", "nestedObject", "deeperObject"], - }, - ], - "completed": [ - {"path": ["hero"]}, - {"path": ["hero", "nestedObject", "deeperObject"]}, + "pending": [ + {"id": "2", "path": ["hero", "nestedObject", "deeperObject"]} ], + "incremental": [{"data": {"foo": "foo"}, "id": "1"}], + "completed": [{"id": "0"}, {"id": "1"}], "hasNext": True, }, { - "incremental": [ - { - "data": { - "bar": "bar", - }, - "path": ["hero", "nestedObject", "deeperObject"], - }, - ], - "completed": [{"path": ["hero", "nestedObject", "deeperObject"]}], + "incremental": [{"data": {"bar": "bar"}, "id": "2"}], + "completed": [{"id": "2"}], "hasNext": False, }, ] @@ -1584,21 +1465,15 @@ async def deduplicates_fields_from_deferred_fragments_branches_multi_levels(): assert result == [ { "data": {"a": {"b": {"c": {"d": "d"}}}}, - "pending": [{"path": []}, {"path": ["a", "b"]}], + "pending": [{"id": "0", "path": []}, {"id": "1", "path": ["a", "b"]}], "hasNext": True, }, { "incremental": [ - { - "data": {"e": {"f": "f"}}, - "path": ["a", "b"], - }, - { - "data": {"g": {"h": "h"}}, - "path": [], - }, + {"data": {"e": {"f": "f"}}, "id": "1"}, + {"data": {"g": {"h": "h"}}, "id": "0"}, ], - "completed": [{"path": ["a", "b"]}, {"path": []}], + "completed": [{"id": "1"}, {"id": "0"}], "hasNext": False, }, ] @@ -1638,23 +1513,17 @@ async def nulls_cross_defer_boundaries_null_first(): assert result == [ { "data": {"a": {}}, - "pending": [{"path": []}, {"path": ["a"]}], + "pending": [{"id": "0", "path": []}, {"id": "1", "path": ["a"]}], "hasNext": True, }, { "incremental": [ - { - "data": {"b": {"c": {}}}, - "path": ["a"], - }, - { - "data": {"d": "d"}, - "path": ["a", "b", "c"], - }, + {"data": {"b": {"c": {}}}, "id": "1"}, + {"data": {"d": "d"}, "id": "1", "subPath": ["b", "c"]}, ], "completed": [ { - "path": [], + "id": "0", "errors": [ { "message": "Cannot return null" @@ -1664,7 +1533,7 @@ async def nulls_cross_defer_boundaries_null_first(): }, ], }, - {"path": ["a"]}, + {"id": "1"}, ], "hasNext": False, }, @@ -1709,23 +1578,17 @@ async def nulls_cross_defer_boundaries_value_first(): assert result == [ { "data": {"a": {}}, - "pending": [{"path": []}, {"path": ["a"]}], + "pending": [{"id": "0", "path": []}, {"id": "1", "path": ["a"]}], "hasNext": True, }, { "incremental": [ - { - "data": {"b": {"c": {}}}, - "path": ["a"], - }, - { - "data": {"d": "d"}, - "path": ["a", "b", "c"], - }, + {"data": {"b": {"c": {}}}, "id": "1"}, + {"data": {"d": "d"}, "id": "0", "subPath": ["a", "b", "c"]}, ], "completed": [ { - "path": ["a"], + "id": "1", "errors": [ { "message": "Cannot return null" @@ -1735,9 +1598,7 @@ async def nulls_cross_defer_boundaries_value_first(): }, ], }, - { - "path": [], - }, + {"id": "0"}, ], "hasNext": False, }, @@ -1783,27 +1644,21 @@ async def filters_a_payload_with_a_null_that_cannot_be_merged(): assert result == [ { "data": {"a": {}}, - "pending": [{"path": []}, {"path": ["a"]}], + "pending": [{"id": "0", "path": []}, {"id": "1", "path": ["a"]}], "hasNext": True, }, { "incremental": [ - { - "data": {"b": {"c": {}}}, - "path": ["a"], - }, - { - "data": {"d": "d"}, - "path": ["a", "b", "c"], - }, + {"data": {"b": {"c": {}}}, "id": "1"}, + {"data": {"d": "d"}, "id": "1", "subPath": ["b", "c"]}, ], - "completed": [{"path": ["a"]}], + "completed": [{"id": "1"}], "hasNext": True, }, { "completed": [ { - "path": [], + "id": "0", "errors": [ { "message": "Cannot return null" @@ -1834,10 +1689,7 @@ async def cancels_deferred_fields_when_initial_result_exhibits_null_bubbling(): """ ) result = await complete( - document, - { - "hero": {**hero, "nonNullName": lambda _info: None}, - }, + document, {"hero": {**hero, "nonNullName": lambda _info: None}} ) assert result == { @@ -1866,23 +1718,20 @@ async def cancels_deferred_fields_when_deferred_result_exhibits_null_bubbling(): """ ) result = await complete( - document, - { - "hero": {**hero, "nonNullName": lambda _info: None}, - }, + document, {"hero": {**hero, "nonNullName": lambda _info: None}} ) assert result == [ { "data": {}, - "pending": [{"path": []}], + "pending": [{"id": "0", "path": []}], "hasNext": True, }, { "incremental": [ { "data": {"hero": None}, - "path": [], + "id": "0", "errors": [ { "message": "Cannot return null" @@ -1893,7 +1742,7 @@ async def cancels_deferred_fields_when_deferred_result_exhibits_null_bubbling(): ], }, ], - "completed": [{"path": []}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -1929,13 +1778,10 @@ async def deduplicates_list_fields(): ] } }, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, - { - "completed": [{"path": ["hero"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] async def deduplicates_async_iterable_list_fields(): @@ -1957,22 +1803,16 @@ async def deduplicates_async_iterable_list_fields(): ) result = await complete( - document, - { - "hero": {**hero, "friends": Resolvers.first_friend}, - }, + document, {"hero": {**hero, "friends": Resolvers.first_friend}} ) assert result == [ { "data": {"hero": {"friends": [{"name": "Han"}]}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, - { - "completed": [{"path": ["hero"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] async def deduplicates_empty_async_iterable_list_fields(): @@ -1999,22 +1839,16 @@ async def resolve_friends(_info): yield friend # pragma: no cover result = await complete( - document, - { - "hero": {**hero, "friends": resolve_friends}, - }, + document, {"hero": {**hero, "friends": resolve_friends}} ) assert result == [ { "data": {"hero": {"friends": []}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, - { - "completed": [{"path": ["hero"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] async def does_not_deduplicate_list_fields_with_non_overlapping_fields(): @@ -2047,25 +1881,16 @@ async def does_not_deduplicate_list_fields_with_non_overlapping_fields(): ] } }, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "incremental": [ - { - "data": {"id": "2"}, - "path": ["hero", "friends", 0], - }, - { - "data": {"id": "3"}, - "path": ["hero", "friends", 1], - }, - { - "data": {"id": "4"}, - "path": ["hero", "friends", 2], - }, + {"data": {"id": "2"}, "id": "0", "subPath": ["friends", 0]}, + {"data": {"id": "3"}, "id": "0", "subPath": ["friends", 1]}, + {"data": {"id": "4"}, "id": "0", "subPath": ["friends", 2]}, ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -2094,13 +1919,10 @@ async def deduplicates_list_fields_that_return_empty_lists(): assert result == [ { "data": {"hero": {"friends": []}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, - { - "completed": [{"path": ["hero"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] async def deduplicates_null_object_fields(): @@ -2127,13 +1949,10 @@ async def deduplicates_null_object_fields(): assert result == [ { "data": {"hero": {"nestedObject": None}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, - { - "completed": [{"path": ["hero"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] async def deduplicates_async_object_fields(): @@ -2164,13 +1983,10 @@ async def resolve_nested_object(_info): assert result == [ { "data": {"hero": {"nestedObject": {"name": "foo"}}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, - { - "completed": [{"path": ["hero"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -2193,14 +2009,14 @@ async def handles_errors_thrown_in_deferred_fragments(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "incremental": [ { "data": {"name": None}, - "path": ["hero"], + "id": "0", "errors": [ { "message": "bad", @@ -2210,7 +2026,7 @@ async def handles_errors_thrown_in_deferred_fragments(): ], }, ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -2237,13 +2053,13 @@ async def handles_non_nullable_errors_thrown_in_deferred_fragments(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "completed": [ { - "path": ["hero"], + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -2311,13 +2127,13 @@ async def handles_async_non_nullable_errors_thrown_in_deferred_fragments(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "completed": [ { - "path": ["hero"], + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -2358,44 +2174,28 @@ async def returns_payloads_in_correct_order(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "pending": [ - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, + {"id": "1", "path": ["hero", "friends", 0]}, + {"id": "2", "path": ["hero", "friends", 1]}, + {"id": "3", "path": ["hero", "friends", 2]}, ], "incremental": [ - { - "data": {"name": "slow", "friends": [{}, {}, {}]}, - "path": ["hero"], - } + {"data": {"name": "slow", "friends": [{}, {}, {}]}, "id": "0"} ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": True, }, { "incremental": [ - { - "data": {"name": "Han"}, - "path": ["hero", "friends", 0], - }, - { - "data": {"name": "Leia"}, - "path": ["hero", "friends", 1], - }, - { - "data": {"name": "C-3PO"}, - "path": ["hero", "friends", 2], - }, - ], - "completed": [ - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, + {"data": {"name": "Han"}, "id": "1"}, + {"data": {"name": "Leia"}, "id": "2"}, + {"data": {"name": "C-3PO"}, "id": "3"}, ], + "completed": [{"id": "1"}, {"id": "2"}, {"id": "3"}], "hasNext": False, }, ] @@ -2426,44 +2226,28 @@ async def returns_payloads_from_synchronous_data_in_correct_order(): assert result == [ { "data": {"hero": {"id": "1"}}, - "pending": [{"path": ["hero"]}], + "pending": [{"id": "0", "path": ["hero"]}], "hasNext": True, }, { "pending": [ - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, + {"id": "1", "path": ["hero", "friends", 0]}, + {"id": "2", "path": ["hero", "friends", 1]}, + {"id": "3", "path": ["hero", "friends", 2]}, ], "incremental": [ - { - "data": {"name": "Luke", "friends": [{}, {}, {}]}, - "path": ["hero"], - } + {"data": {"name": "Luke", "friends": [{}, {}, {}]}, "id": "0"} ], - "completed": [{"path": ["hero"]}], + "completed": [{"id": "0"}], "hasNext": True, }, { "incremental": [ - { - "data": {"name": "Han"}, - "path": ["hero", "friends", 0], - }, - { - "data": {"name": "Leia"}, - "path": ["hero", "friends", 1], - }, - { - "data": {"name": "C-3PO"}, - "path": ["hero", "friends", 2], - }, - ], - "completed": [ - {"path": ["hero", "friends", 0]}, - {"path": ["hero", "friends", 1]}, - {"path": ["hero", "friends", 2]}, + {"data": {"name": "Han"}, "id": "1"}, + {"data": {"name": "Leia"}, "id": "2"}, + {"data": {"name": "C-3PO"}, "id": "3"}, ], + "completed": [{"id": "1"}, {"id": "2"}, {"id": "3"}], "hasNext": False, }, ] diff --git a/tests/execution/test_mutations.py b/tests/execution/test_mutations.py index 987eba45..b03004de 100644 --- a/tests/execution/test_mutations.py +++ b/tests/execution/test_mutations.py @@ -244,19 +244,12 @@ async def mutation_fields_with_defer_do_not_block_next_mutation(): assert patches == [ { "data": {"first": {}, "second": {"theNumber": 2}}, - "pending": [{"path": ["first"], "label": "defer-label"}], + "pending": [{"id": "0", "path": ["first"], "label": "defer-label"}], "hasNext": True, }, { - "incremental": [ - { - "path": ["first"], - "data": { - "promiseToGetTheNumber": 2, - }, - }, - ], - "completed": [{"path": ["first"], "label": "defer-label"}], + "incremental": [{"id": "0", "data": {"promiseToGetTheNumber": 2}}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -319,19 +312,12 @@ async def mutation_with_defer_is_not_executed_serially(): assert patches == [ { "data": {"second": {"theNumber": 2}}, - "pending": [{"path": [], "label": "defer-label"}], + "pending": [{"id": "0", "path": [], "label": "defer-label"}], "hasNext": True, }, { - "incremental": [ - { - "path": [], - "data": { - "first": {"theNumber": 1}, - }, - }, - ], - "completed": [{"path": [], "label": "defer-label"}], + "incremental": [{"id": "0", "data": {"first": {"theNumber": 1}}}], + "completed": [{"id": "0"}], "hasNext": False, }, ] diff --git a/tests/execution/test_stream.py b/tests/execution/test_stream.py index 487817b4..46237fc1 100644 --- a/tests/execution/test_stream.py +++ b/tests/execution/test_stream.py @@ -148,39 +148,39 @@ def modified_args(args: dict[str, Any], **modifications: Any) -> dict[str, Any]: def describe_execute_stream_directive(): def can_format_and_print_incremental_stream_result(): - result = IncrementalStreamResult(items=[], path=[]) - assert result.formatted == {"items": [], "path": []} - assert str(result) == "IncrementalStreamResult(items=[], path=[])" + result = IncrementalStreamResult(items=["hello", "world"], id="foo") + assert result.formatted == {"items": ["hello", "world"], "id": "foo"} + assert ( + str(result) == "IncrementalStreamResult(items=['hello', 'world'], id='foo')" + ) result = IncrementalStreamResult( items=["hello", "world"], - errors=[GraphQLError("msg")], - path=["foo", 1], + id="foo", + sub_path=["bar", 1], + errors=[GraphQLError("oops")], extensions={"baz": 2}, ) assert result.formatted == { "items": ["hello", "world"], - "errors": [{"message": "msg"}], + "id": "foo", + "subPath": ["bar", 1], + "errors": [{"message": "oops"}], "extensions": {"baz": 2}, - "path": ["foo", 1], } assert ( str(result) == "IncrementalStreamResult(items=['hello', 'world']," - " path=['foo', 1], errors=[GraphQLError('msg')], extensions={'baz': 2})" + " id='foo', sub_path=['bar', 1], errors=[GraphQLError('oops')]," + " extensions={'baz': 2})" ) - def can_print_stream_record(): - record = StreamRecord(Path(None, 0, None)) - assert str(record) == "StreamRecord(path=[0])" - record = StreamRecord(Path(None, "bar", "Bar"), "foo") - assert str(record) == "StreamRecord(path=['bar'], label='foo')" - # noinspection PyTypeChecker def can_compare_incremental_stream_result(): args: dict[str, Any] = { "items": ["hello", "world"], - "errors": [GraphQLError("msg")], - "path": ["foo", 1], + "id": "foo", + "sub_path": ["bar", 1], + "errors": [GraphQLError("oops")], "extensions": {"baz": 2}, } result = IncrementalStreamResult(**args) @@ -188,22 +188,34 @@ def can_compare_incremental_stream_result(): assert result != IncrementalStreamResult( **modified_args(args, items=["hello", "foo"]) ) + assert result != IncrementalStreamResult(**modified_args(args, id="bar")) + assert result != IncrementalStreamResult( + **modified_args(args, sub_path=["bar", 2]) + ) assert result != IncrementalStreamResult(**modified_args(args, errors=[])) - assert result != IncrementalStreamResult(**modified_args(args, path=["foo", 2])) assert result != IncrementalStreamResult( **modified_args(args, extensions={"baz": 1}) ) assert result == tuple(args.values()) + assert result == tuple(args.values())[:4] assert result == tuple(args.values())[:3] assert result == tuple(args.values())[:2] assert result != tuple(args.values())[:1] - assert result != (["hello", "world"], []) + assert result != (["hello", "world"], "bar") + args["subPath"] = args.pop("sub_path") assert result == args assert result != {**args, "items": ["hello", "foo"]} + assert result != {**args, "id": "bar"} + assert result != {**args, "subPath": ["bar", 2]} assert result != {**args, "errors": []} - assert result != {**args, "path": ["foo", 2]} assert result != {**args, "extensions": {"baz": 1}} + def can_print_stream_record(): + record = StreamRecord(Path(None, 0, None)) + assert str(record) == "StreamRecord(path=[0])" + record = StreamRecord(Path(None, "bar", "Bar"), "foo") + assert str(record) == "StreamRecord(path=['bar'], label='foo')" + @pytest.mark.asyncio async def can_stream_a_list_field(): document = parse("{ scalarList @stream(initialCount: 1) }") @@ -212,19 +224,14 @@ async def can_stream_a_list_field(): ) assert result == [ { - "data": { - "scalarList": ["apple"], - }, - "pending": [{"path": ["scalarList"]}], - "hasNext": True, - }, - { - "incremental": [{"items": ["banana"], "path": ["scalarList"]}], + "data": {"scalarList": ["apple"]}, + "pending": [{"id": "0", "path": ["scalarList"]}], "hasNext": True, }, + {"incremental": [{"items": ["banana"], "id": "0"}], "hasNext": True}, { - "incremental": [{"items": ["coconut"], "path": ["scalarList"]}], - "completed": [{"path": ["scalarList"]}], + "incremental": [{"items": ["coconut"], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -237,23 +244,15 @@ async def can_use_default_value_of_initial_count(): ) assert result == [ { - "data": { - "scalarList": [], - }, - "pending": [{"path": ["scalarList"]}], + "data": {"scalarList": []}, + "pending": [{"id": "0", "path": ["scalarList"]}], "hasNext": True, }, + {"incremental": [{"items": ["apple"], "id": "0"}], "hasNext": True}, + {"incremental": [{"items": ["banana"], "id": "0"}], "hasNext": True}, { - "incremental": [{"items": ["apple"], "path": ["scalarList"]}], - "hasNext": True, - }, - { - "incremental": [{"items": ["banana"], "path": ["scalarList"]}], - "hasNext": True, - }, - { - "incremental": [{"items": ["coconut"], "path": ["scalarList"]}], - "completed": [{"path": ["scalarList"]}], + "incremental": [{"items": ["coconut"], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -265,9 +264,7 @@ async def negative_values_of_initial_count_throw_field_errors(): document, {"scalarList": ["apple", "banana", "coconut"]} ) assert result == { - "data": { - "scalarList": None, - }, + "data": {"scalarList": None}, "errors": [ { "message": "initialCount must be a positive integer", @@ -282,9 +279,7 @@ async def non_integer_values_of_initial_count_throw_field_errors(): document = parse("{ scalarList @stream(initialCount: 1.5) }") result = await complete(document, {"scalarList": ["apple", "half of a banana"]}) assert result == { - "data": { - "scalarList": None, - }, + "data": {"scalarList": None}, "errors": [ { "message": "Argument 'initialCount' has invalid value 1.5.", @@ -304,29 +299,16 @@ async def returns_label_from_stream_directive(): ) assert result == [ { - "data": { - "scalarList": ["apple"], - }, - "pending": [{"path": ["scalarList"], "label": "scalar-stream"}], - "hasNext": True, - }, - { - "incremental": [ - { - "items": ["banana"], - "path": ["scalarList"], - } + "data": {"scalarList": ["apple"]}, + "pending": [ + {"id": "0", "path": ["scalarList"], "label": "scalar-stream"} ], "hasNext": True, }, + {"incremental": [{"items": ["banana"], "id": "0"}], "hasNext": True}, { - "incremental": [ - { - "items": ["coconut"], - "path": ["scalarList"], - } - ], - "completed": [{"path": ["scalarList"], "label": "scalar-stream"}], + "incremental": [{"items": ["coconut"], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -339,12 +321,7 @@ async def throws_an_error_for_stream_directive_with_non_string_label(): "data": {"scalarList": None}, "errors": [ { - "locations": [ - { - "line": 1, - "column": 46, - } - ], + "locations": [{"line": 1, "column": 46}], "message": "Argument 'label' has invalid value 42.", "path": ["scalarList"], } @@ -357,11 +334,7 @@ async def can_disable_stream_using_if_argument(): result = await complete( document, {"scalarList": ["apple", "banana", "coconut"]} ) - assert result == { - "data": { - "scalarList": ["apple", "banana", "coconut"], - }, - } + assert result == {"data": {"scalarList": ["apple", "banana", "coconut"]}} @pytest.mark.asyncio @pytest.mark.filterwarnings("ignore:.* was never awaited:RuntimeWarning") @@ -375,20 +348,13 @@ async def does_not_disable_stream_with_null_if_argument(): ) assert result == [ { - "data": { - "scalarList": ["apple", "banana"], - }, - "pending": [{"path": ["scalarList"]}], + "data": {"scalarList": ["apple", "banana"]}, + "pending": [{"id": "0", "path": ["scalarList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": ["coconut"], - "path": ["scalarList"], - } - ], - "completed": [{"path": ["scalarList"]}], + "incremental": [{"items": ["coconut"], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -408,29 +374,19 @@ async def can_stream_multi_dimensional_lists(): ) assert result == [ { - "data": { - "scalarListList": [["apple", "apple", "apple"]], - }, - "pending": [{"path": ["scalarListList"]}], + "data": {"scalarListList": [["apple", "apple", "apple"]]}, + "pending": [{"id": "0", "path": ["scalarListList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [["banana", "banana", "banana"]], - "path": ["scalarListList"], - } - ], + "incremental": [{"items": [["banana", "banana", "banana"]], "id": "0"}], "hasNext": True, }, { "incremental": [ - { - "items": [["coconut", "coconut", "coconut"]], - "path": ["scalarListList"], - } + {"items": [["coconut", "coconut", "coconut"]], "id": "0"} ], - "completed": [{"path": ["scalarListList"]}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -463,17 +419,12 @@ async def await_friend(f): {"name": "Han", "id": "2"}, ], }, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -501,35 +452,20 @@ async def await_friend(f): assert result == [ { "data": {"friendList": []}, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Luke", "id": "1"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"name": "Luke", "id": "1"}], "id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Han", "id": "2"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"name": "Han", "id": "2"}], "id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -569,17 +505,12 @@ async def get_id(f): {"name": "Han", "id": "2"}, ] }, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -620,17 +551,12 @@ async def await_friend(f, i): "path": ["friendList", 1], } ], - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -664,14 +590,14 @@ async def await_friend(f, i): assert result == [ { "data": {"friendList": [{"name": "Luke", "id": "1"}]}, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { "incremental": [ { "items": [None], - "path": ["friendList"], + "id": "0", "errors": [ { "message": "bad", @@ -684,13 +610,8 @@ async def await_friend(f, i): "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -716,40 +637,22 @@ async def friend_list(_info): assert result == [ { "data": {"friendList": []}, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Luke", "id": "1"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"name": "Luke", "id": "1"}], "id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Han", "id": "2"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"name": "Han", "id": "2"}], "id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], "hasNext": True, }, - { - "completed": [{"path": ["friendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -778,22 +681,14 @@ async def friend_list(_info): {"name": "Han", "id": "2"}, ] }, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"name": "Leia", "id": "3"}], "id": "0"}], "hasNext": True, }, - { - "completed": [{"path": ["friendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -852,7 +747,7 @@ async def friend_list(_info): {"name": "Han", "id": "2"}, ] }, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, }, @@ -860,17 +755,14 @@ async def friend_list(_info): "done": False, "value": { "incremental": [ - { - "items": [{"name": "Leia", "id": "3"}], - "path": ["friendList"], - } + {"items": [{"name": "Leia", "id": "3"}], "id": "0"} ], "hasNext": True, }, }, { "done": False, - "value": {"completed": [{"path": ["friendList"]}], "hasNext": False}, + "value": {"completed": [{"id": "0"}], "hasNext": False}, }, {"done": True, "value": None}, ] @@ -924,16 +816,14 @@ async def friend_list(_info): result = await complete(document, {"friendList": friend_list}) assert result == [ { - "data": { - "friendList": [{"name": "Luke", "id": "1"}], - }, - "pending": [{"path": ["friendList"]}], + "data": {"friendList": [{"name": "Luke", "id": "1"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["friendList"], + "id": "0", "errors": [ { "message": "bad", @@ -964,16 +854,14 @@ async def handles_null_for_non_null_list_items_after_initial_count_is_reached(): ) assert result == [ { - "data": { - "nonNullFriendList": [{"name": "Luke"}], - }, - "pending": [{"path": ["nonNullFriendList"]}], + "data": {"nonNullFriendList": [{"name": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1010,16 +898,14 @@ async def friend_list(_info): result = await complete(document, {"nonNullFriendList": friend_list}) assert result == [ { - "data": { - "nonNullFriendList": [{"name": "Luke"}], - }, - "pending": [{"path": ["nonNullFriendList"]}], + "data": {"nonNullFriendList": [{"name": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1050,17 +936,15 @@ async def scalar_list(_info): result = await complete(document, {"scalarList": scalar_list}) assert result == [ { - "data": { - "scalarList": ["Luke"], - }, - "pending": [{"path": ["scalarList"]}], + "data": {"scalarList": ["Luke"]}, + "pending": [{"id": "0", "path": ["scalarList"]}], "hasNext": True, }, { "incremental": [ { "items": [None], - "path": ["scalarList"], + "id": "0", "errors": [ { "message": "String cannot represent value: {}", @@ -1070,7 +954,7 @@ async def scalar_list(_info): ], }, ], - "completed": [{"path": ["scalarList"]}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -1104,17 +988,15 @@ def get_friends(_info): ) assert result == [ { - "data": { - "friendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["friendList"]}], + "data": {"friendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { "incremental": [ { "items": [None], - "path": ["friendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1127,13 +1009,8 @@ def get_friends(_info): "hasNext": True, }, { - "incremental": [ - { - "items": [{"nonNullName": "Han"}], - "path": ["friendList"], - }, - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"nonNullName": "Han"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -1166,17 +1043,15 @@ def get_friends(_info): ) assert result == [ { - "data": { - "friendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["friendList"]}], + "data": {"friendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { "incremental": [ { "items": [None], - "path": ["friendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1189,13 +1064,8 @@ def get_friends(_info): "hasNext": True, }, { - "incremental": [ - { - "items": [{"nonNullName": "Han"}], - "path": ["friendList"], - } - ], - "completed": [{"path": ["friendList"]}], + "incremental": [{"items": [{"nonNullName": "Han"}], "id": "0"}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -1229,16 +1099,14 @@ def get_friends(_info): ) assert result == [ { - "data": { - "nonNullFriendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["nonNullFriendList"]}], + "data": {"nonNullFriendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1283,13 +1151,13 @@ def get_friends(_info): "data": { "nonNullFriendList": [{"nonNullName": "Luke"}], }, - "pending": [{"path": ["nonNullFriendList"]}], + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1333,17 +1201,15 @@ async def get_friends(_info): ) assert result == [ { - "data": { - "friendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["friendList"]}], + "data": {"friendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { "incremental": [ { "items": [None], - "path": ["friendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1356,18 +1222,10 @@ async def get_friends(_info): "hasNext": True, }, { - "incremental": [ - { - "items": [{"nonNullName": "Han"}], - "path": ["friendList"], - }, - ], + "incremental": [{"items": [{"nonNullName": "Han"}], "id": "0"}], "hasNext": True, }, - { - "completed": [{"path": ["friendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -1394,22 +1252,18 @@ async def get_friends(_info): result = await complete( document, - { - "nonNullFriendList": get_friends, - }, + {"nonNullFriendList": get_friends}, ) assert result == [ { - "data": { - "nonNullFriendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["nonNullFriendList"]}], + "data": {"nonNullFriendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1463,16 +1317,14 @@ async def __anext__(self): result = await complete(document, {"nonNullFriendList": async_iterable}) assert result == [ { - "data": { - "nonNullFriendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["nonNullFriendList"]}], + "data": {"nonNullFriendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1533,16 +1385,14 @@ async def aclose(self): result = await complete(document, {"nonNullFriendList": async_iterable}) assert result == [ { - "data": { - "nonNullFriendList": [{"nonNullName": "Luke"}], - }, - "pending": [{"path": ["nonNullFriendList"]}], + "data": {"nonNullFriendList": [{"nonNullName": "Luke"}]}, + "pending": [{"id": "0", "path": ["nonNullFriendList"]}], "hasNext": True, }, { "completed": [ { - "path": ["nonNullFriendList"], + "id": "0", "errors": [ { "message": "Oops", @@ -1593,18 +1443,11 @@ async def friend_list(_info): { "message": "Cannot return null for non-nullable field" " NestedObject.nonNullScalarField.", - "locations": [ - { - "line": 4, - "column": 17, - } - ], + "locations": [{"line": 4, "column": 17}], "path": ["nestedObject", "nonNullScalarField"], }, ], - "data": { - "nestedObject": None, - }, + "data": {"nestedObject": None}, } @pytest.mark.asyncio @@ -1644,9 +1487,7 @@ async def friend_list(_info): "path": ["nestedObject", "nonNullScalarField"], }, ], - "data": { - "nestedObject": None, - }, + "data": {"nestedObject": None}, } @pytest.mark.asyncio @@ -1692,8 +1533,8 @@ async def friend_list(_info): "nestedObject": {"nestedFriendList": []}, }, "pending": [ - {"path": ["otherNestedObject"]}, - {"path": ["nestedObject", "nestedFriendList"]}, + {"id": "0", "path": ["otherNestedObject"]}, + {"id": "1", "path": ["nestedObject", "nestedFriendList"]}, ], "hasNext": True, }, @@ -1701,7 +1542,7 @@ async def friend_list(_info): "incremental": [ { "data": {"scalarField": None}, - "path": ["otherNestedObject"], + "id": "0", "errors": [ { "message": "Oops", @@ -1710,18 +1551,12 @@ async def friend_list(_info): }, ], }, - { - "items": [{"name": "Luke"}], - "path": ["nestedObject", "nestedFriendList"], - }, + {"items": [{"name": "Luke"}], "id": "1"}, ], - "completed": [{"path": ["otherNestedObject"]}], + "completed": [{"id": "0"}], "hasNext": True, }, - { - "completed": [{"path": ["nestedObject", "nestedFriendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "1"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -1764,19 +1599,15 @@ async def friend_list(_info): assert result == [ { - "data": { - "nestedObject": {}, - }, - "pending": [{"path": ["nestedObject"]}], + "data": {"nestedObject": {}}, + "pending": [{"id": "0", "path": ["nestedObject"]}], "hasNext": True, }, { "incremental": [ { - "data": { - "deeperNestedObject": None, - }, - "path": ["nestedObject"], + "data": {"deeperNestedObject": None}, + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1791,7 +1622,7 @@ async def friend_list(_info): ], }, ], - "completed": [{"path": ["nestedObject"]}], + "completed": [{"id": "0"}], "hasNext": False, }, ] @@ -1828,17 +1659,15 @@ async def friend_list(_info): assert result == [ { - "data": { - "friendList": [], - }, - "pending": [{"path": ["friendList"]}], + "data": {"friendList": []}, + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { "incremental": [ { "items": [None], - "path": ["friendList"], + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1851,10 +1680,7 @@ async def friend_list(_info): ], "hasNext": True, }, - { - "completed": [{"path": ["friendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.timeout(1) @@ -1908,7 +1734,7 @@ async def iterable(_info): result1 = execute_result.initial_result assert result1 == { "data": {"nestedObject": {}}, - "pending": [{"path": ["nestedObject"]}], + "pending": [{"id": "0", "path": ["nestedObject"]}], "hasNext": True, } @@ -1919,7 +1745,7 @@ async def iterable(_info): "incremental": [ { "data": {"deeperNestedObject": None}, - "path": ["nestedObject"], + "id": "0", "errors": [ { "message": "Cannot return null for non-nullable field" @@ -1934,7 +1760,7 @@ async def iterable(_info): ], }, ], - "completed": [{"path": ["nestedObject"]}], + "completed": [{"id": "0"}], "hasNext": False, } @@ -1976,34 +1802,19 @@ async def get_friends(_info): ) assert result == [ { - "data": { - "friendList": [{"id": "1", "name": "Luke"}], - }, - "pending": [{"path": ["friendList"]}], + "data": {"friendList": [{"id": "1", "name": "Luke"}]}, + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"id": "2", "name": "Han"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"id": "2", "name": "Han"}], "id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"id": "3", "name": "Leia"}], - "path": ["friendList"], - } - ], + "incremental": [{"items": [{"id": "3", "name": "Leia"}], "id": "0"}], "hasNext": True, }, - { - "completed": [{"path": ["friendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "0"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -2043,40 +1854,23 @@ async def get_nested_friend_list(_info): assert result == [ { - "data": { - "nestedObject": { - "nestedFriendList": [], - }, - }, + "data": {"nestedObject": {"nestedFriendList": []}}, "pending": [ - {"path": ["nestedObject"]}, - {"path": ["nestedObject", "nestedFriendList"]}, + {"id": "0", "path": ["nestedObject"]}, + {"id": "1", "path": ["nestedObject", "nestedFriendList"]}, ], "hasNext": True, }, { - "incremental": [ - { - "items": [{"id": "1", "name": "Luke"}], - "path": ["nestedObject", "nestedFriendList"], - }, - ], - "completed": [{"path": ["nestedObject"]}], + "incremental": [{"items": [{"id": "1", "name": "Luke"}], "id": "1"}], + "completed": [{"id": "0"}], "hasNext": True, }, { - "incremental": [ - { - "items": [{"id": "2", "name": "Han"}], - "path": ["nestedObject", "nestedFriendList"], - }, - ], + "incremental": [{"items": [{"id": "2", "name": "Han"}], "id": "1"}], "hasNext": True, }, - { - "completed": [{"path": ["nestedObject", "nestedFriendList"]}], - "hasNext": False, - }, + {"completed": [{"id": "1"}], "hasNext": False}, ] @pytest.mark.asyncio @@ -2124,48 +1918,32 @@ async def get_friends(_info): result1 = execute_result.initial_result assert result1 == { "data": {"nestedObject": {}}, - "pending": [{"path": ["nestedObject"]}], + "pending": [{"id": "0", "path": ["nestedObject"]}], "hasNext": True, } resolve_slow_field.set() result2 = await anext(iterator) assert result2.formatted == { - "pending": [{"path": ["nestedObject", "nestedFriendList"]}], + "pending": [{"id": "1", "path": ["nestedObject", "nestedFriendList"]}], "incremental": [ - { - "data": {"scalarField": "slow", "nestedFriendList": []}, - "path": ["nestedObject"], - }, + {"data": {"scalarField": "slow", "nestedFriendList": []}, "id": "0"}, ], - "completed": [{"path": ["nestedObject"]}], + "completed": [{"id": "0"}], "hasNext": True, } result3 = await anext(iterator) assert result3.formatted == { - "incremental": [ - { - "items": [{"name": "Luke"}], - "path": ["nestedObject", "nestedFriendList"], - }, - ], + "incremental": [{"items": [{"name": "Luke"}], "id": "1"}], "hasNext": True, } result4 = await anext(iterator) assert result4.formatted == { - "incremental": [ - { - "items": [{"name": "Han"}], - "path": ["nestedObject", "nestedFriendList"], - }, - ], + "incremental": [{"items": [{"name": "Han"}], "id": "1"}], "hasNext": True, } result5 = await anext(iterator) - assert result5.formatted == { - "completed": [{"path": ["nestedObject", "nestedFriendList"]}], - "hasNext": False, - } + assert result5.formatted == {"completed": [{"id": "1"}], "hasNext": False} with pytest.raises(StopAsyncIteration): await anext(iterator) @@ -2214,8 +1992,8 @@ async def get_friends(_info): assert result1 == { "data": {"friendList": [{"id": "1"}]}, "pending": [ - {"path": ["friendList", 0], "label": "DeferName"}, - {"path": ["friendList"], "label": "stream-label"}, + {"id": "0", "path": ["friendList", 0], "label": "DeferName"}, + {"id": "1", "path": ["friendList"], "label": "stream-label"}, ], "hasNext": True, } @@ -2223,41 +2001,25 @@ async def get_friends(_info): resolve_iterable.set() result2 = await anext(iterator) assert result2.formatted == { - "pending": [{"path": ["friendList", 1], "label": "DeferName"}], + "pending": [{"id": "2", "path": ["friendList", 1], "label": "DeferName"}], "incremental": [ - { - "data": {"name": "Luke"}, - "path": ["friendList", 0], - }, - { - "items": [{"id": "2"}], - "path": ["friendList"], - }, + {"data": {"name": "Luke"}, "id": "0"}, + {"items": [{"id": "2"}], "id": "1"}, ], - "completed": [{"path": ["friendList", 0], "label": "DeferName"}], + "completed": [{"id": "0"}], "hasNext": True, } resolve_slow_field.set() result3 = await anext(iterator) assert result3.formatted == { - "completed": [ - { - "path": ["friendList"], - "label": "stream-label", - }, - ], + "completed": [{"id": "1"}], "hasNext": True, } result4 = await anext(iterator) assert result4.formatted == { - "incremental": [ - { - "data": {"name": "Han"}, - "path": ["friendList", 1], - }, - ], - "completed": [{"path": ["friendList", 1], "label": "DeferName"}], + "incremental": [{"data": {"name": "Han"}, "id": "2"}], + "completed": [{"id": "2"}], "hasNext": False, } @@ -2307,8 +2069,8 @@ async def get_friends(_info): assert result1 == { "data": {"friendList": [{"id": "1"}]}, "pending": [ - {"path": ["friendList", 0], "label": "DeferName"}, - {"path": ["friendList"], "label": "stream-label"}, + {"id": "0", "path": ["friendList", 0], "label": "DeferName"}, + {"id": "1", "path": ["friendList"], "label": "stream-label"}, ], "hasNext": True, } @@ -2316,37 +2078,28 @@ async def get_friends(_info): resolve_slow_field.set() result2 = await anext(iterator) assert result2.formatted == { - "pending": [{"path": ["friendList", 1], "label": "DeferName"}], + "pending": [{"id": "2", "path": ["friendList", 1], "label": "DeferName"}], "incremental": [ - { - "data": {"name": "Luke"}, - "path": ["friendList", 0], - }, - { - "items": [{"id": "2"}], - "path": ["friendList"], - }, + {"data": {"name": "Luke"}, "id": "0"}, + {"items": [{"id": "2"}], "id": "1"}, ], - "completed": [{"path": ["friendList", 0], "label": "DeferName"}], + "completed": [{"id": "0"}], "hasNext": True, } result3 = await anext(iterator) assert result3.formatted == { "incremental": [ - { - "data": {"name": "Han"}, - "path": ["friendList", 1], - }, + {"data": {"name": "Han"}, "id": "2"}, ], - "completed": [{"path": ["friendList", 1], "label": "DeferName"}], + "completed": [{"id": "2"}], "hasNext": True, } resolve_iterable.set() result4 = await anext(iterator) assert result4.formatted == { - "completed": [{"path": ["friendList"], "label": "stream-label"}], + "completed": [{"id": "1"}], "hasNext": False, } @@ -2385,7 +2138,10 @@ async def iterable(_info): result1 = execute_result.initial_result assert result1 == { "data": {"friendList": [{"id": "1"}]}, - "pending": [{"path": ["friendList", 0]}, {"path": ["friendList"]}], + "pending": [ + {"id": "0", "path": ["friendList", 0]}, + {"id": "1", "path": ["friendList"]}, + ], "hasNext": True, } @@ -2434,7 +2190,7 @@ async def __anext__(self): result1 = execute_result.initial_result assert result1 == { "data": {"friendList": [{"id": "1", "name": "Luke"}]}, - "pending": [{"path": ["friendList"]}], + "pending": [{"id": "0", "path": ["friendList"]}], "hasNext": True, } @@ -2476,7 +2232,10 @@ async def iterable(_info): result1 = execute_result.initial_result assert result1 == { "data": {"friendList": [{"id": "1"}]}, - "pending": [{"path": ["friendList", 0]}, {"path": ["friendList"]}], + "pending": [ + {"id": "0", "path": ["friendList", 0]}, + {"id": "1", "path": ["friendList"]}, + ], "hasNext": True, } From 1285cd4baca467d721cdc637b1088c66f112e6e6 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 19:48:38 +0100 Subject: [PATCH 31/50] skip unnecessary initialization of empty items array (#3962) Replicates graphql/graphql-js@b12dcffe83098922dcc6c0ec94eb6fc032bd9772 --- src/graphql/execution/incremental_publisher.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/graphql/execution/incremental_publisher.py b/src/graphql/execution/incremental_publisher.py index d112651e..839f62d8 100644 --- a/src/graphql/execution/incremental_publisher.py +++ b/src/graphql/execution/incremental_publisher.py @@ -1017,6 +1017,8 @@ def _process_pending( if subsequent_result_record.stream_record.errors: continue incremental_result = IncrementalStreamResult( + # safe because `items` is always defined + # when the record is completed subsequent_result_record.items, # safe because `id` is defined # once the stream has been released as pending @@ -1068,6 +1070,7 @@ def _get_incremental_defer_result( sub_path = deferred_grouped_field_set_record.path[len(longest_path) :] id_ = record_with_longest_path.id return IncrementalDeferResult( + # safe because `data` is always defined when the record is completed data, # type: ignore # safe because `id` is defined # once the fragment has been released as pending @@ -1298,7 +1301,6 @@ def __init__( self.errors = [] self.is_completed_async_iterator = self.is_completed = False self.is_final_record = self.filtered = False - self.items = [] def __repr__(self) -> str: name = self.__class__.__name__ From a070e4154bc0f1f68086b5a160eadac0b4d26f8d Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 19:55:41 +0100 Subject: [PATCH 32/50] Improve description for @oneOf directive Replicates graphql/graphql-js@acf05e365dc30b718712261b886cf7d1462ca28a --- src/graphql/type/directives.py | 5 +++-- tests/utilities/test_print_schema.py | 4 +++- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index 5fe48b94..b73d938f 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -261,12 +261,13 @@ def assert_directive(directive: Any) -> GraphQLDirective: description="Exposes a URL that specifies the behavior of this scalar.", ) -# Used to declare an Input Object as a OneOf Input Objects. +# Used to indicate an Input Object is a OneOf Input Object. GraphQLOneOfDirective = GraphQLDirective( name="oneOf", locations=[DirectiveLocation.INPUT_OBJECT], args={}, - description="Indicates an Input Object is a OneOf Input Object.", + description="Indicates exactly one field must be supplied" + " and this field must not be `null`.", ) specified_directives: tuple[GraphQLDirective, ...] = ( diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index 0e96bbbc..b12d30dc 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -771,7 +771,9 @@ def prints_introspection_schema(): url: String! ) on SCALAR - """Indicates an Input Object is a OneOf Input Object.""" + """ + Indicates exactly one field must be supplied and this field must not be `null`. + """ directive @oneOf on INPUT_OBJECT """ From facce8736b40eea4eda6fa827d80951c3e88a333 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 20:04:15 +0100 Subject: [PATCH 33/50] polish: improve add_deferred_fragments readability Replicates graphql/graphql-js@d32b99d003f8560cf0f878443fab1446f1adf20c --- src/graphql/execution/execute.py | 28 +++++++++++++++++++++++++++- 1 file changed, 27 insertions(+), 1 deletion(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index ac041392..174acadd 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1925,15 +1925,38 @@ def add_new_deferred_fragments( defer_map: RefMap[DeferUsage, DeferredFragmentRecord] | None = None, path: Path | None = None, ) -> RefMap[DeferUsage, DeferredFragmentRecord]: - """Add new deferred fragments to the defer map.""" + """Add new deferred fragments to the defer map. + + Instantiates new DeferredFragmentRecords for the given path within an + incremental data record, returning an updated map of DeferUsage + objects to DeferredFragmentRecords. + + Note: As defer directives may be used with operations returning lists, + a DeferUsage object may correspond to many DeferredFragmentRecords. + + DeferredFragmentRecord creation includes the following steps: + 1. The new DeferredFragmentRecord is instantiated at the given path. + 2. The parent result record is calculated from the given incremental data record. + 3. The IncrementalPublisher is notified that a new DeferredFragmentRecord + with the calculated parent has been added; the record will be released only + after the parent has completed. + """ new_defer_map: RefMap[DeferUsage, DeferredFragmentRecord] if not new_defer_usages: + # Given no DeferUsages, return the existing map, creating one if necessary. return RefMap() if defer_map is None else defer_map new_defer_map = RefMap() if defer_map is None else RefMap(defer_map.items()) + # For each new DeferUsage object: for defer_usage in new_defer_usages: ancestors = defer_usage.ancestors parent_defer_usage = ancestors[0] if ancestors else None + # If the parent target is defined, the parent target is a DeferUsage object + # and the parent result record is the DeferredFragmentRecord corresponding + # to that DeferUsage. + # If the parent target is not defined, the parent result record is either: + # - the InitialResultRecord, or + # - a StreamItemsRecord, as `@defer` may be nested under `@stream`. parent = ( cast(Union[InitialResultRecord, StreamItemsRecord], incremental_data_record) if parent_defer_usage is None @@ -1942,12 +1965,15 @@ def add_new_deferred_fragments( ) ) + # Instantiate the new record. deferred_fragment_record = DeferredFragmentRecord(path, defer_usage.label) + # Report the new record to the Incremental Publisher. incremental_publisher.report_new_defer_fragment_record( deferred_fragment_record, parent ) + # Update the map. new_defer_map[defer_usage] = deferred_fragment_record return new_defer_map From 965502c61ef312f4228ff4a9468e20cf65a42fe7 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 20:14:44 +0100 Subject: [PATCH 34/50] Remove an unnecessary declaration --- src/graphql/execution/execute.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 174acadd..d52eac33 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -1941,11 +1941,13 @@ def add_new_deferred_fragments( with the calculated parent has been added; the record will be released only after the parent has completed. """ - new_defer_map: RefMap[DeferUsage, DeferredFragmentRecord] if not new_defer_usages: # Given no DeferUsages, return the existing map, creating one if necessary. return RefMap() if defer_map is None else defer_map + + # Create a copy of the old map. new_defer_map = RefMap() if defer_map is None else RefMap(defer_map.items()) + # For each new DeferUsage object: for defer_usage in new_defer_usages: ancestors = defer_usage.ancestors From c685d84f15b01cd5594cc7b962631a24f14a2793 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 20:24:56 +0100 Subject: [PATCH 35/50] Update dependencies --- poetry.lock | 81 ++++++++++++++++++++++++++++++++------------------ pyproject.toml | 2 +- 2 files changed, 53 insertions(+), 30 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2208d903..20a0ff50 100644 --- a/poetry.lock +++ b/poetry.lock @@ -58,13 +58,13 @@ files = [ [[package]] name = "cachetools" -version = "5.5.0" +version = "5.5.1" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, - {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, + {file = "cachetools-5.5.1-py3-none-any.whl", hash = "sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb"}, + {file = "cachetools-5.5.1.tar.gz", hash = "sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95"}, ] [[package]] @@ -741,6 +741,29 @@ perf = ["ipython"] test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] +[[package]] +name = "importlib-metadata" +version = "8.6.1" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.9" +files = [ + {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, + {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + [[package]] name = "iniconfig" version = "2.0.0" @@ -1520,29 +1543,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.9.2" +version = "0.9.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.9.2-py3-none-linux_armv6l.whl", hash = "sha256:80605a039ba1454d002b32139e4970becf84b5fee3a3c3bf1c2af6f61a784347"}, - {file = "ruff-0.9.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b9aab82bb20afd5f596527045c01e6ae25a718ff1784cb92947bff1f83068b00"}, - {file = "ruff-0.9.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:fbd337bac1cfa96be615f6efcd4bc4d077edbc127ef30e2b8ba2a27e18c054d4"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b35259b0cbf8daa22a498018e300b9bb0174c2bbb7bcba593935158a78054d"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8b6a9701d1e371bf41dca22015c3f89769da7576884d2add7317ec1ec8cb9c3c"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9cc53e68b3c5ae41e8faf83a3b89f4a5d7b2cb666dff4b366bb86ed2a85b481f"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:8efd9da7a1ee314b910da155ca7e8953094a7c10d0c0a39bfde3fcfd2a015684"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3292c5a22ea9a5f9a185e2d131dc7f98f8534a32fb6d2ee7b9944569239c648d"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1a605fdcf6e8b2d39f9436d343d1f0ff70c365a1e681546de0104bef81ce88df"}, - {file = "ruff-0.9.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c547f7f256aa366834829a08375c297fa63386cbe5f1459efaf174086b564247"}, - {file = "ruff-0.9.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d18bba3d3353ed916e882521bc3e0af403949dbada344c20c16ea78f47af965e"}, - {file = "ruff-0.9.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b338edc4610142355ccf6b87bd356729b62bf1bc152a2fad5b0c7dc04af77bfe"}, - {file = "ruff-0.9.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:492a5e44ad9b22a0ea98cf72e40305cbdaf27fac0d927f8bc9e1df316dcc96eb"}, - {file = "ruff-0.9.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:af1e9e9fe7b1f767264d26b1075ac4ad831c7db976911fa362d09b2d0356426a"}, - {file = "ruff-0.9.2-py3-none-win32.whl", hash = "sha256:71cbe22e178c5da20e1514e1e01029c73dc09288a8028a5d3446e6bba87a5145"}, - {file = "ruff-0.9.2-py3-none-win_amd64.whl", hash = "sha256:c5e1d6abc798419cf46eed03f54f2e0c3adb1ad4b801119dedf23fcaf69b55b5"}, - {file = "ruff-0.9.2-py3-none-win_arm64.whl", hash = "sha256:a1b63fa24149918f8b37cef2ee6fff81f24f0d74b6f0bdc37bc3e1f2143e41c6"}, - {file = "ruff-0.9.2.tar.gz", hash = "sha256:b5eceb334d55fae5f316f783437392642ae18e16dcf4f1858d55d3c2a0f8f5d0"}, + {file = "ruff-0.9.3-py3-none-linux_armv6l.whl", hash = "sha256:7f39b879064c7d9670197d91124a75d118d00b0990586549949aae80cdc16624"}, + {file = "ruff-0.9.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a187171e7c09efa4b4cc30ee5d0d55a8d6c5311b3e1b74ac5cb96cc89bafc43c"}, + {file = "ruff-0.9.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c59ab92f8e92d6725b7ded9d4a31be3ef42688a115c6d3da9457a5bda140e2b4"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dc153c25e715be41bb228bc651c1e9b1a88d5c6e5ed0194fa0dfea02b026439"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:646909a1e25e0dc28fbc529eab8eb7bb583079628e8cbe738192853dbbe43af5"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a5a46e09355695fbdbb30ed9889d6cf1c61b77b700a9fafc21b41f097bfbba4"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c4bb09d2bbb394e3730d0918c00276e79b2de70ec2a5231cd4ebb51a57df9ba1"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96a87ec31dc1044d8c2da2ebbed1c456d9b561e7d087734336518181b26b3aa5"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb7554aca6f842645022fe2d301c264e6925baa708b392867b7a62645304df4"}, + {file = "ruff-0.9.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cabc332b7075a914ecea912cd1f3d4370489c8018f2c945a30bcc934e3bc06a6"}, + {file = "ruff-0.9.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:33866c3cc2a575cbd546f2cd02bdd466fed65118e4365ee538a3deffd6fcb730"}, + {file = "ruff-0.9.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:006e5de2621304c8810bcd2ee101587712fa93b4f955ed0985907a36c427e0c2"}, + {file = "ruff-0.9.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ba6eea4459dbd6b1be4e6bfc766079fb9b8dd2e5a35aff6baee4d9b1514ea519"}, + {file = "ruff-0.9.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:90230a6b8055ad47d3325e9ee8f8a9ae7e273078a66401ac66df68943ced029b"}, + {file = "ruff-0.9.3-py3-none-win32.whl", hash = "sha256:eabe5eb2c19a42f4808c03b82bd313fc84d4e395133fb3fc1b1516170a31213c"}, + {file = "ruff-0.9.3-py3-none-win_amd64.whl", hash = "sha256:040ceb7f20791dfa0e78b4230ee9dce23da3b64dd5848e40e3bf3ab76468dcf4"}, + {file = "ruff-0.9.3-py3-none-win_arm64.whl", hash = "sha256:800d773f6d4d33b0a3c60e2c6ae8f4c202ea2de056365acfa519aa48acf28e0b"}, + {file = "ruff-0.9.3.tar.gz", hash = "sha256:8293f89985a090ebc3ed1064df31f3b4b56320cdfcec8b60d3295bddb955c22a"}, ] [[package]] @@ -1869,13 +1892,13 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.23.2" +version = "4.24.1" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.23.2-py3-none-any.whl", hash = "sha256:452bc32bb031f2282881a2118923176445bac783ab97c874b8770ab4c3b76c38"}, - {file = "tox-4.23.2.tar.gz", hash = "sha256:86075e00e555df6e82e74cfc333917f91ecb47ffbc868dcafbd2672e332f4a2c"}, + {file = "tox-4.24.1-py3-none-any.whl", hash = "sha256:57ba7df7d199002c6df8c2db9e6484f3de6ca8f42013c083ea2d4d1e5c6bdc75"}, + {file = "tox-4.24.1.tar.gz", hash = "sha256:083a720adbc6166fff0b7d1df9d154f9d00bfccb9403b8abf6bc0ee435d6a62e"}, ] [package.dependencies] @@ -1883,13 +1906,13 @@ cachetools = ">=5.5" chardet = ">=5.2" colorama = ">=0.4.6" filelock = ">=3.16.1" -packaging = ">=24.1" +packaging = ">=24.2" platformdirs = ">=4.3.6" pluggy = ">=1.5" pyproject-api = ">=1.8" -tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} +tomli = {version = ">=2.1", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} -virtualenv = ">=20.26.6" +virtualenv = ">=20.27.1" [package.extras] test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.3)", "pytest-mock (>=3.14)"] @@ -2097,4 +2120,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "37c41caf594570c2c84273ca5abc41ab2ec53d4e05a7bf6440b3e10e6de122d7" +content-hash = "97f4c031d7769c7bad6adc5b4dfee58549dd3a445f991960527ec5e1212449b6" diff --git a/pyproject.toml b/pyproject.toml index bc191f97..0b0fcf5d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -77,7 +77,7 @@ pytest-codspeed = [ { version = "^2.2.1", python = "<3.8" } ] tox = [ - { version = "^4.16", python = ">=3.8" }, + { version = "^4.24", python = ">=3.8" }, { version = "^3.28", python = "<3.8" } ] From 38186538e406cc588af34696b62da9ed9152d28b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 21:16:02 +0100 Subject: [PATCH 36/50] Allow injecting custom data to custom execution context (#226) --- src/graphql/execution/execute.py | 28 ++++++++++++++-------------- tests/execution/test_customize.py | 16 +++++++++++++++- 2 files changed, 29 insertions(+), 15 deletions(-) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index d52eac33..90d3d73b 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -4,6 +4,7 @@ from asyncio import ensure_future, gather, shield, wait_for from contextlib import suppress +from copy import copy from typing import ( Any, AsyncGenerator, @@ -219,6 +220,7 @@ def build( subscribe_field_resolver: GraphQLFieldResolver | None = None, middleware: Middleware | None = None, is_awaitable: Callable[[Any], bool] | None = None, + **custom_args: Any, ) -> list[GraphQLError] | ExecutionContext: """Build an execution context @@ -292,24 +294,14 @@ def build( IncrementalPublisher(), middleware_manager, is_awaitable, + **custom_args, ) def build_per_event_execution_context(self, payload: Any) -> ExecutionContext: """Create a copy of the execution context for usage with subscribe events.""" - return self.__class__( - self.schema, - self.fragments, - payload, - self.context_value, - self.operation, - self.variable_values, - self.field_resolver, - self.type_resolver, - self.subscribe_field_resolver, - self.incremental_publisher, - self.middleware_manager, - self.is_awaitable, - ) + context = copy(self) + context.root_value = payload + return context def execute_operation( self, initial_result_record: InitialResultRecord @@ -1709,6 +1701,7 @@ def execute( middleware: Middleware | None = None, execution_context_class: type[ExecutionContext] | None = None, is_awaitable: Callable[[Any], bool] | None = None, + **custom_context_args: Any, ) -> AwaitableOrValue[ExecutionResult]: """Execute a GraphQL operation. @@ -1741,6 +1734,7 @@ def execute( middleware, execution_context_class, is_awaitable, + **custom_context_args, ) if isinstance(result, ExecutionResult): return result @@ -1769,6 +1763,7 @@ def experimental_execute_incrementally( middleware: Middleware | None = None, execution_context_class: type[ExecutionContext] | None = None, is_awaitable: Callable[[Any], bool] | None = None, + **custom_context_args: Any, ) -> AwaitableOrValue[ExecutionResult | ExperimentalIncrementalExecutionResults]: """Execute GraphQL operation incrementally (internal implementation). @@ -1797,6 +1792,7 @@ def experimental_execute_incrementally( subscribe_field_resolver, middleware, is_awaitable, + **custom_context_args, ) # Return early errors if execution context failed. @@ -2127,6 +2123,7 @@ def subscribe( subscribe_field_resolver: GraphQLFieldResolver | None = None, execution_context_class: type[ExecutionContext] | None = None, middleware: MiddlewareManager | None = None, + **custom_context_args: Any, ) -> AwaitableOrValue[AsyncIterator[ExecutionResult] | ExecutionResult]: """Create a GraphQL subscription. @@ -2167,6 +2164,7 @@ def subscribe( type_resolver, subscribe_field_resolver, middleware=middleware, + **custom_context_args, ) # Return early errors if execution context failed. @@ -2202,6 +2200,7 @@ def create_source_event_stream( type_resolver: GraphQLTypeResolver | None = None, subscribe_field_resolver: GraphQLFieldResolver | None = None, execution_context_class: type[ExecutionContext] | None = None, + **custom_context_args: Any, ) -> AwaitableOrValue[AsyncIterable[Any] | ExecutionResult]: """Create source event stream @@ -2238,6 +2237,7 @@ def create_source_event_stream( field_resolver, type_resolver, subscribe_field_resolver, + **custom_context_args, ) # Return early errors if execution context failed. diff --git a/tests/execution/test_customize.py b/tests/execution/test_customize.py index ac8b9ae1..bf1859a2 100644 --- a/tests/execution/test_customize.py +++ b/tests/execution/test_customize.py @@ -43,6 +43,10 @@ def uses_a_custom_execution_context_class(): ) class TestExecutionContext(ExecutionContext): + def __init__(self, *args, **kwargs): + assert kwargs.pop("custom_arg", None) == "baz" + super().__init__(*args, **kwargs) + def execute_field( self, parent_type, @@ -62,7 +66,12 @@ def execute_field( ) return result * 2 # type: ignore - assert execute(schema, query, execution_context_class=TestExecutionContext) == ( + assert execute( + schema, + query, + execution_context_class=TestExecutionContext, + custom_arg="baz", + ) == ( {"foo": "barbar"}, None, ) @@ -101,6 +110,10 @@ async def custom_foo(): @pytest.mark.asyncio async def uses_a_custom_execution_context_class(): class TestExecutionContext(ExecutionContext): + def __init__(self, *args, **kwargs): + assert kwargs.pop("custom_arg", None) == "baz" + super().__init__(*args, **kwargs) + def build_resolve_info(self, *args, **kwargs): resolve_info = super().build_resolve_info(*args, **kwargs) resolve_info.context["foo"] = "bar" @@ -132,6 +145,7 @@ def resolve_foo(message, _info): document, context_value={}, execution_context_class=TestExecutionContext, + custom_arg="baz", ) assert isasyncgen(subscription) From 41058d7fb26bc3231e718a1d0a411ebca53e1004 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 21:35:08 +0100 Subject: [PATCH 37/50] Improve and update badges --- README.md | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index fa10c81c..00927a42 100644 --- a/README.md +++ b/README.md @@ -6,9 +6,10 @@ a query language for APIs created by Facebook. [![PyPI version](https://badge.fury.io/py/graphql-core.svg)](https://badge.fury.io/py/graphql-core) [![Documentation Status](https://readthedocs.org/projects/graphql-core-3/badge/)](https://graphql-core-3.readthedocs.io) -![Test Status](https://github.com/graphql-python/graphql-core/actions/workflows/test.yml/badge.svg) -![Lint Status](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml/badge.svg) -[![Code Style](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/ambv/black) +[![Test Status](https://github.com/graphql-python/graphql-core/actions/workflows/test.yml/badge.svg)](https://github.com/graphql-python/graphql-core/actions/workflows/test.yml) +[![Lint Status](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml/badge.svg)](https://github.com/graphql-python/graphql-core/actions/workflows/lint.yml) +[![CodSpeed](https://img.shields.io/endpoint?url=https://codspeed.io/badge.json)](https://codspeed.io/graphql-python/graphql-core) +[![Code style](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://github.com/astral-sh/ruff) An extensive test suite with over 2200 unit tests and 100% coverage replicates the complete test suite of GraphQL.js, ensuring that this port is reliable and compatible From 41da78af788b6fad6f6c46f9ca495f4091474106 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 23:27:32 +0100 Subject: [PATCH 38/50] Deep copy schema with directive with arg of custom type (#210) --- src/graphql/type/schema.py | 22 +++++++++++++++++----- tests/utilities/test_build_ast_schema.py | 19 +++++++++++++++++++ 2 files changed, 36 insertions(+), 5 deletions(-) diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index 3099991d..befefabd 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -21,6 +21,7 @@ GraphQLAbstractType, GraphQLCompositeType, GraphQLField, + GraphQLInputType, GraphQLInterfaceType, GraphQLNamedType, GraphQLObjectType, @@ -293,6 +294,8 @@ def __deepcopy__(self, memo_: dict) -> GraphQLSchema: directive if is_specified_directive(directive) else copy(directive) for directive in self.directives ] + for directive in directives: + remap_directive(directive, type_map) return self.__class__( self.query_type and cast(GraphQLObjectType, type_map[self.query_type.name]), self.mutation_type @@ -458,11 +461,7 @@ def remapped_type(type_: GraphQLType, type_map: TypeMap) -> GraphQLType: def remap_named_type(type_: GraphQLNamedType, type_map: TypeMap) -> None: """Change all references in the given named type to use this type map.""" - if is_union_type(type_): - type_.types = [ - type_map.get(member_type.name, member_type) for member_type in type_.types - ] - elif is_object_type(type_) or is_interface_type(type_): + if is_object_type(type_) or is_interface_type(type_): type_.interfaces = [ type_map.get(interface_type.name, interface_type) for interface_type in type_.interfaces @@ -477,9 +476,22 @@ def remap_named_type(type_: GraphQLNamedType, type_map: TypeMap) -> None: arg.type = remapped_type(arg.type, type_map) args[arg_name] = arg fields[field_name] = field + elif is_union_type(type_): + type_.types = [ + type_map.get(member_type.name, member_type) for member_type in type_.types + ] elif is_input_object_type(type_): fields = type_.fields for field_name, field in fields.items(): field = copy(field) # noqa: PLW2901 field.type = remapped_type(field.type, type_map) fields[field_name] = field + + +def remap_directive(directive: GraphQLDirective, type_map: TypeMap) -> None: + """Change all references in the given directive to use this type map.""" + args = directive.args + for arg_name, arg in args.items(): + arg = copy(arg) # noqa: PLW2901 + arg.type = cast(GraphQLInputType, remapped_type(arg.type, type_map)) + args[arg_name] = arg diff --git a/tests/utilities/test_build_ast_schema.py b/tests/utilities/test_build_ast_schema.py index d4c2dff9..d0196bd7 100644 --- a/tests/utilities/test_build_ast_schema.py +++ b/tests/utilities/test_build_ast_schema.py @@ -1222,6 +1222,25 @@ def can_deep_copy_schema(): # check that printing the copied schema gives the same SDL assert print_schema(copied) == sdl + def can_deep_copy_schema_with_directive_using_args_of_custom_type(): + sdl = dedent(""" + directive @someDirective(someArg: SomeEnum) on FIELD_DEFINITION + + enum SomeEnum { + ONE + TWO + } + + type Query { + someField: String @someDirective(someArg: ONE) + } + """) + schema = build_schema(sdl) + copied = deepcopy(schema) + # custom directives on field definitions cannot be reproduced + expected_sdl = sdl.replace(" @someDirective(someArg: ONE)", "") + assert print_schema(copied) == expected_sdl + def can_pickle_and_unpickle_star_wars_schema(): # create a schema from the star wars SDL schema = build_schema(sdl, assume_valid_sdl=True) From ab78551fb6084bb64df4bbeeef1d7df00974857b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 25 Jan 2025 23:34:09 +0100 Subject: [PATCH 39/50] Update year of copyright --- docs/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index 1d7afde0..aa88b282 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -50,7 +50,7 @@ # General information about the project. project = "GraphQL-core 3" -copyright = "2024, Christoph Zwerschke" +copyright = "2025, Christoph Zwerschke" author = "Christoph Zwerschke" # The version info for the project you're documenting, acts as replacement for From 1c11f15328ff8425a7bb63054c3b379c0d7739bc Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 26 Jan 2025 15:07:39 +0100 Subject: [PATCH 40/50] Fix docstrings --- src/graphql/type/definition.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 480c1879..5b48c8b4 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -1284,7 +1284,7 @@ class GraphQLInputObjectType(GraphQLNamedType): Example:: - NonNullFloat = GraphQLNonNull(GraphQLFloat()) + NonNullFloat = GraphQLNonNull(GraphQLFloat) class GeoPoint(GraphQLInputObjectType): name = 'GeoPoint' @@ -1292,7 +1292,7 @@ class GeoPoint(GraphQLInputObjectType): 'lat': GraphQLInputField(NonNullFloat), 'lon': GraphQLInputField(NonNullFloat), 'alt': GraphQLInputField( - GraphQLFloat(), default_value=0) + GraphQLFloat, default_value=0) } The outbound values will be Python dictionaries by default, but you can have them @@ -1511,7 +1511,7 @@ class GraphQLNonNull(GraphQLWrappingType[GNT_co]): class RowType(GraphQLObjectType): name = 'Row' fields = { - 'id': GraphQLField(GraphQLNonNull(GraphQLString())) + 'id': GraphQLField(GraphQLNonNull(GraphQLString)) } Note: the enforcement of non-nullability occurs within the executor. From d3c03e638487984e2d7fff67473bc123a51f6ee4 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 26 Jan 2025 15:10:58 +0100 Subject: [PATCH 41/50] Newer Python version should use newer tox versions --- .github/workflows/test.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 77f15bf1..581528cc 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -22,7 +22,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install "tox>=3.28,<5" "tox-gh-actions>=3.2,<4" + pip install "tox>=4.24,<5" "tox-gh-actions>=3.2,<4" - name: Run unit tests with tox run: tox From d4f8b32410c8e56fd76eb10c50105ab3a9fa5a60 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 26 Jan 2025 15:51:54 +0100 Subject: [PATCH 42/50] Fix issue with older tox versions --- tox.ini | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tox.ini b/tox.ini index 7f2e07d4..b7ee6b8c 100644 --- a/tox.ini +++ b/tox.ini @@ -47,9 +47,9 @@ deps = pytest-cov>=4.1,<7 pytest-describe>=2.2,<3 pytest-timeout>=2.3,<3 - py3{7,8,9}, pypy39: typing-extensions>=4.7.1,<5 + py3{7,8,9},pypy39: typing-extensions>=4.7.1,<5 commands = # to also run the time-consuming tests: tox -e py312 -- --run-slow # to run the benchmarks: tox -e py312 -- -k benchmarks --benchmark-enable - py3{7,8,9,10,11,13}, pypy3{9,10}: pytest tests {posargs} + py3{7,8,9,10,11,13},pypy3{9,10}: pytest tests {posargs} py312: pytest tests {posargs: --cov-report=term-missing --cov=graphql --cov=tests --cov-fail-under=100} From 651ca5ceca8bb7c7cc7d8bb4fa0a545399e03854 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 26 Jan 2025 16:29:36 +0100 Subject: [PATCH 43/50] Transform input objects used as default values (#206) --- src/graphql/execution/values.py | 17 +++++-- tests/execution/test_resolve.py | 87 +++++++++++++++++++++++++++++++++ 2 files changed, 100 insertions(+), 4 deletions(-) diff --git a/src/graphql/execution/values.py b/src/graphql/execution/values.py index fda472de..5309996a 100644 --- a/src/graphql/execution/values.py +++ b/src/graphql/execution/values.py @@ -26,6 +26,7 @@ GraphQLDirective, GraphQLField, GraphQLSchema, + is_input_object_type, is_input_type, is_non_null_type, ) @@ -171,8 +172,12 @@ def get_argument_values( argument_node = arg_node_map.get(name) if argument_node is None: - if arg_def.default_value is not Undefined: - coerced_values[arg_def.out_name or name] = arg_def.default_value + value = arg_def.default_value + if value is not Undefined: + if is_input_object_type(arg_def.type): + # coerce input value so that out_names are used + value = coerce_input_value(value, arg_def.type) + coerced_values[arg_def.out_name or name] = value elif is_non_null_type(arg_type): # pragma: no cover else msg = ( f"Argument '{name}' of required type '{arg_type}' was not provided." @@ -186,8 +191,12 @@ def get_argument_values( if isinstance(value_node, VariableNode): variable_name = value_node.name.value if variable_values is None or variable_name not in variable_values: - if arg_def.default_value is not Undefined: - coerced_values[arg_def.out_name or name] = arg_def.default_value + value = arg_def.default_value + if value is not Undefined: + if is_input_object_type(arg_def.type): + # coerce input value so that out_names are used + value = coerce_input_value(value, arg_def.type) + coerced_values[arg_def.out_name or name] = value elif is_non_null_type(arg_type): # pragma: no cover else msg = ( f"Argument '{name}' of required type '{arg_type}'" diff --git a/tests/execution/test_resolve.py b/tests/execution/test_resolve.py index 1c77af8b..db52d638 100644 --- a/tests/execution/test_resolve.py +++ b/tests/execution/test_resolve.py @@ -7,9 +7,11 @@ from graphql.type import ( GraphQLArgument, GraphQLField, + GraphQLID, GraphQLInputField, GraphQLInputObjectType, GraphQLInt, + GraphQLList, GraphQLObjectType, GraphQLSchema, GraphQLString, @@ -213,6 +215,91 @@ def execute_query(query: str, root_value: Any = None) -> ExecutionResult: None, ) + def transforms_default_values_using_out_names(): + # This is an extension of GraphQL.js. + resolver_kwargs: Any + + def search_resolver(_obj: None, _info, **kwargs): + nonlocal resolver_kwargs + resolver_kwargs = kwargs + return [{"id": "42"}] + + filters_type = GraphQLInputObjectType( + "SearchFilters", + {"pageSize": GraphQLInputField(GraphQLInt, out_name="page_size")}, + ) + result_type = GraphQLObjectType("SearchResult", {"id": GraphQLField(GraphQLID)}) + query = GraphQLObjectType( + "Query", + { + "search": GraphQLField( + GraphQLList(result_type), + { + "searchFilters": GraphQLArgument( + filters_type, {"pageSize": 10}, out_name="search_filters" + ) + }, + resolve=search_resolver, + ) + }, + ) + schema = GraphQLSchema(query) + + resolver_kwargs = None + result = execute_sync(schema, parse("{ search { id } }")) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 10}} + + resolver_kwargs = None + result = execute_sync( + schema, parse("{ search(searchFilters:{pageSize: 25}) { id } }") + ) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 25}} + + resolver_kwargs = None + result = execute_sync( + schema, + parse( + """ + query ($searchFilters: SearchFilters) { + search(searchFilters: $searchFilters) { id } + } + """ + ), + ) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 10}} + + resolver_kwargs = None + result = execute_sync( + schema, + parse( + """ + query ($searchFilters: SearchFilters) { + search(searchFilters: $searchFilters) { id } + } + """ + ), + variable_values={"searchFilters": {"pageSize": 25}}, + ) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 25}} + + resolver_kwargs = None + result = execute_sync( + schema, + parse( + """ + query ($searchFilters: SearchFilters = {pageSize: 25}) { + search(searchFilters: $searchFilters) { id } + } + """ + ), + ) + assert result == ({"search": [{"id": "42"}]}, None) + assert resolver_kwargs == {"search_filters": {"page_size": 25}} + def pass_error_from_resolver_wrapped_as_located_graphql_error(): def resolve(_obj, _info): raise ValueError("Some error") From 44334f30f5e0cd9ecb7995a38548f2dc2a728f9d Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sun, 26 Jan 2025 18:02:41 +0100 Subject: [PATCH 44/50] Bump patch version and update README --- .bumpversion.cfg | 2 +- README.md | 8 ++++---- docs/conf.py | 2 +- pyproject.toml | 2 +- src/graphql/version.py | 4 ++-- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.bumpversion.cfg b/.bumpversion.cfg index e2aa0e98..e8560a6a 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 3.3.0a6 +current_version = 3.3.0a7 commit = False tag = False diff --git a/README.md b/README.md index 00927a42..58b57b1f 100644 --- a/README.md +++ b/README.md @@ -11,15 +11,15 @@ a query language for APIs created by Facebook. [![CodSpeed](https://img.shields.io/endpoint?url=https://codspeed.io/badge.json)](https://codspeed.io/graphql-python/graphql-core) [![Code style](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://github.com/astral-sh/ruff) -An extensive test suite with over 2200 unit tests and 100% coverage replicates the +An extensive test suite with over 2500 unit tests and 100% coverage replicates the complete test suite of GraphQL.js, ensuring that this port is reliable and compatible with GraphQL.js. -The current stable version 3.2.5 of GraphQL-core is up-to-date with GraphQL.js +The current stable version 3.2.6 of GraphQL-core is up-to-date with GraphQL.js version 16.8.2 and supports Python versions 3.6 to 3.13. -You can also try out the latest alpha version 3.3.0a6 of GraphQL-core, -which is up-to-date with GraphQL.js version 17.0.0a2. +You can also try out the latest alpha version 3.3.0a7 of GraphQL-core, +which is up-to-date with GraphQL.js version 17.0.0a3. Please note that this new minor version of GraphQL-core does not support Python 3.6 anymore. diff --git a/docs/conf.py b/docs/conf.py index aa88b282..e78359fe 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -60,7 +60,7 @@ # The short X.Y version. # version = '3.3' # The full version, including alpha/beta/rc tags. -version = release = "3.3.0a6" +version = release = "3.3.0a7" # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/pyproject.toml b/pyproject.toml index 0b0fcf5d..1dbd6636 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "graphql-core" -version = "3.3.0a6" +version = "3.3.0a7" description = """\ GraphQL-core is a Python port of GraphQL.js,\ the JavaScript reference implementation for GraphQL.""" diff --git a/src/graphql/version.py b/src/graphql/version.py index 7b08ac67..311c74a0 100644 --- a/src/graphql/version.py +++ b/src/graphql/version.py @@ -8,9 +8,9 @@ __all__ = ["version", "version_info", "version_info_js", "version_js"] -version = "3.3.0a6" +version = "3.3.0a7" -version_js = "17.0.0a2" +version_js = "17.0.0a3" _re_version = re.compile(r"(\d+)\.(\d+)\.(\d+)(\D*)(\d*)") From dae8e8f4b71c02bc17eec1df9311ddf256ed342c Mon Sep 17 00:00:00 2001 From: sobolevn Date: Mon, 17 Feb 2025 23:00:32 +0300 Subject: [PATCH 45/50] Fix IntrospectionQuery type definition (#234) --- src/graphql/utilities/get_introspection_query.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index c23a1533..7b8c33bb 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -302,7 +302,8 @@ class IntrospectionSchema(MaybeWithDescription): directives: list[IntrospectionDirective] -class IntrospectionQuery(TypedDict): - """The root typed dictionary for schema introspections.""" - - __schema: IntrospectionSchema +# The root typed dictionary for schema introspections. +IntrospectionQuery = TypedDict( # noqa: UP013 + "IntrospectionQuery", + {"__schema": IntrospectionSchema}, +) From 416247c1d511350445c23096f9491fbef424b69b Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Mon, 17 Feb 2025 21:09:48 +0100 Subject: [PATCH 46/50] Fix mypy issues --- poetry.lock | 320 +++++++++--------- pyproject.toml | 3 +- src/graphql/type/definition.py | 6 +- .../utilities/get_introspection_query.py | 1 + tox.ini | 2 +- 5 files changed, 173 insertions(+), 159 deletions(-) diff --git a/poetry.lock b/poetry.lock index 20a0ff50..167d9627 100644 --- a/poetry.lock +++ b/poetry.lock @@ -30,20 +30,20 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "babel" -version = "2.16.0" +version = "2.17.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ - {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, - {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, + {file = "babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2"}, + {file = "babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d"}, ] [package.dependencies] pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +dev = ["backports.zoneinfo", "freezegun (>=1.0,<2.0)", "jinja2 (>=3.0)", "pytest (>=6.0)", "pytest-cov", "pytz", "setuptools", "tzdata"] [[package]] name = "bump2version" @@ -69,13 +69,13 @@ files = [ [[package]] name = "certifi" -version = "2024.12.14" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, - {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] @@ -520,73 +520,74 @@ toml = ["tomli"] [[package]] name = "coverage" -version = "7.6.10" +version = "7.6.12" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, - {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, - {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, - {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, - {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, - {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, - {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, - {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, - {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, - {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, - {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, - {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, - {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, - {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, - {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, - {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, - {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, - {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, - {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, - {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, - {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, - {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, - {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, - {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, - {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, - {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, - {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, - {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, - {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, - {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, - {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, - {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, - {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, - {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, - {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, - {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, - {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, - {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, + {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, + {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, + {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, + {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, + {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, + {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, + {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, + {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, + {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, + {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, + {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, + {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, + {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, + {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, + {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, + {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, + {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, + {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, + {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, + {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, + {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, + {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, + {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, + {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, + {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, + {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, + {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, + {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, + {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, + {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, + {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, + {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, + {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, ] [package.dependencies] @@ -741,29 +742,6 @@ perf = ["ipython"] test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] -[[package]] -name = "importlib-metadata" -version = "8.6.1" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.9" -files = [ - {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, - {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - [[package]] name = "iniconfig" version = "2.0.0" @@ -1002,6 +980,59 @@ install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] +[[package]] +name = "mypy" +version = "1.15.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.9" +files = [ + {file = "mypy-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:979e4e1a006511dacf628e36fadfecbcc0160a8af6ca7dad2f5025529e082c13"}, + {file = "mypy-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c4bb0e1bd29f7d34efcccd71cf733580191e9a264a2202b0239da95984c5b559"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be68172e9fd9ad8fb876c6389f16d1c1b5f100ffa779f77b1fb2176fcc9ab95b"}, + {file = "mypy-1.15.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7be1e46525adfa0d97681432ee9fcd61a3964c2446795714699a998d193f1a3"}, + {file = "mypy-1.15.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2e2c2e6d3593f6451b18588848e66260ff62ccca522dd231cd4dd59b0160668b"}, + {file = "mypy-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:6983aae8b2f653e098edb77f893f7b6aca69f6cffb19b2cc7443f23cce5f4828"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2922d42e16d6de288022e5ca321cd0618b238cfc5570e0263e5ba0a77dbef56f"}, + {file = "mypy-1.15.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2ee2d57e01a7c35de00f4634ba1bbf015185b219e4dc5909e281016df43f5ee5"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:973500e0774b85d9689715feeffcc980193086551110fd678ebe1f4342fb7c5e"}, + {file = "mypy-1.15.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a95fb17c13e29d2d5195869262f8125dfdb5c134dc8d9a9d0aecf7525b10c2c"}, + {file = "mypy-1.15.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1905f494bfd7d85a23a88c5d97840888a7bd516545fc5aaedff0267e0bb54e2f"}, + {file = "mypy-1.15.0-cp311-cp311-win_amd64.whl", hash = "sha256:c9817fa23833ff189db061e6d2eff49b2f3b6ed9856b4a0a73046e41932d744f"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:aea39e0583d05124836ea645f412e88a5c7d0fd77a6d694b60d9b6b2d9f184fd"}, + {file = "mypy-1.15.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2f2147ab812b75e5b5499b01ade1f4a81489a147c01585cda36019102538615f"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ce436f4c6d218a070048ed6a44c0bbb10cd2cc5e272b29e7845f6a2f57ee4464"}, + {file = "mypy-1.15.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8023ff13985661b50a5928fc7a5ca15f3d1affb41e5f0a9952cb68ef090b31ee"}, + {file = "mypy-1.15.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1124a18bc11a6a62887e3e137f37f53fbae476dc36c185d549d4f837a2a6a14e"}, + {file = "mypy-1.15.0-cp312-cp312-win_amd64.whl", hash = "sha256:171a9ca9a40cd1843abeca0e405bc1940cd9b305eaeea2dda769ba096932bb22"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:93faf3fdb04768d44bf28693293f3904bbb555d076b781ad2530214ee53e3445"}, + {file = "mypy-1.15.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:811aeccadfb730024c5d3e326b2fbe9249bb7413553f15499a4050f7c30e801d"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98b7b9b9aedb65fe628c62a6dc57f6d5088ef2dfca37903a7d9ee374d03acca5"}, + {file = "mypy-1.15.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c43a7682e24b4f576d93072216bf56eeff70d9140241f9edec0c104d0c515036"}, + {file = "mypy-1.15.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:baefc32840a9f00babd83251560e0ae1573e2f9d1b067719479bfb0e987c6357"}, + {file = "mypy-1.15.0-cp313-cp313-win_amd64.whl", hash = "sha256:b9378e2c00146c44793c98b8d5a61039a048e31f429fb0eb546d93f4b000bedf"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e601a7fa172c2131bff456bb3ee08a88360760d0d2f8cbd7a75a65497e2df078"}, + {file = "mypy-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:712e962a6357634fef20412699a3655c610110e01cdaa6180acec7fc9f8513ba"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95579473af29ab73a10bada2f9722856792a36ec5af5399b653aa28360290a5"}, + {file = "mypy-1.15.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f8722560a14cde92fdb1e31597760dc35f9f5524cce17836c0d22841830fd5b"}, + {file = "mypy-1.15.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fbb8da62dc352133d7d7ca90ed2fb0e9d42bb1a32724c287d3c76c58cbaa9c2"}, + {file = "mypy-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:d10d994b41fb3497719bbf866f227b3489048ea4bbbb5015357db306249f7980"}, + {file = "mypy-1.15.0-py3-none-any.whl", hash = "sha256:5469affef548bd1895d86d3bf10ce2b44e33d86923c29e4d675b3e323437ea3e"}, + {file = "mypy-1.15.0.tar.gz", hash = "sha256:404534629d51d3efea5c800ee7c42b72a6554d6c400e6a79eafe15d11341fd43"}, +] + +[package.dependencies] +mypy_extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing_extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -1278,13 +1309,13 @@ testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-asyncio" -version = "0.25.2" +version = "0.25.3" description = "Pytest support for asyncio" optional = false python-versions = ">=3.9" files = [ - {file = "pytest_asyncio-0.25.2-py3-none-any.whl", hash = "sha256:0d0bb693f7b99da304a0634afc0a4b19e49d5e0de2d670f38dc4bfa5727c5075"}, - {file = "pytest_asyncio-0.25.2.tar.gz", hash = "sha256:3f8ef9a98f45948ea91a0ed3dc4268b5326c0e7bce73892acc654df4262ad45f"}, + {file = "pytest_asyncio-0.25.3-py3-none-any.whl", hash = "sha256:9e89518e0f9bd08928f97a3482fdc4e244df17529460bc038291ccaf8f85c7c3"}, + {file = "pytest_asyncio-0.25.3.tar.gz", hash = "sha256:fc1da2cf9f125ada7e710b4ddad05518d4cee187ae9412e9ac9271003497f07a"}, ] [package.dependencies] @@ -1357,23 +1388,23 @@ test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] [[package]] name = "pytest-codspeed" -version = "3.1.2" +version = "3.2.0" description = "Pytest plugin to create CodSpeed benchmarks" optional = false python-versions = ">=3.9" files = [ - {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:aed496f873670ce0ea8f980a7c1a2c6a08f415e0ebdf207bf651b2d922103374"}, - {file = "pytest_codspeed-3.1.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ee45b0b763f6b5fa5d74c7b91d694a9615561c428b320383660672f4471756e3"}, - {file = "pytest_codspeed-3.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c84e591a7a0f67d45e2dc9fd05b276971a3aabcab7478fe43363ebefec1358f4"}, - {file = "pytest_codspeed-3.1.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c6ae6d094247156407770e6b517af70b98862dd59a3c31034aede11d5f71c32c"}, - {file = "pytest_codspeed-3.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d0f264991de5b5cdc118b96fc671386cca3f0f34e411482939bf2459dc599097"}, - {file = "pytest_codspeed-3.1.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0695a4bcd5ff04e8379124dba5d9795ea5e0cadf38be7a0406432fc1467b555"}, - {file = "pytest_codspeed-3.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6dc356c8dcaaa883af83310f397ac06c96fac9b8a1146e303d4b374b2cb46a18"}, - {file = "pytest_codspeed-3.1.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cc8a5d0366322a75cf562f7d8d672d28c1cf6948695c4dddca50331e08f6b3d5"}, - {file = "pytest_codspeed-3.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6c5fe7a19b72f54f217480b3b527102579547b1de9fe3acd9e66cb4629ff46c8"}, - {file = "pytest_codspeed-3.1.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b67205755a665593f6521a98317d02a9d07d6fdc593f6634de2c94dea47a3055"}, - {file = "pytest_codspeed-3.1.2-py3-none-any.whl", hash = "sha256:5e7ed0315e33496c5c07dba262b50303b8d0bc4c3d10bf1d422a41e70783f1cb"}, - {file = "pytest_codspeed-3.1.2.tar.gz", hash = "sha256:09c1733af3aab35e94a621aa510f2d2114f65591e6f644c42ca3f67547edad4b"}, + {file = "pytest_codspeed-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c5165774424c7ab8db7e7acdb539763a0e5657996effefdf0664d7fd95158d34"}, + {file = "pytest_codspeed-3.2.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9bd55f92d772592c04a55209950c50880413ae46876e66bd349ef157075ca26c"}, + {file = "pytest_codspeed-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4cf6f56067538f4892baa8d7ab5ef4e45bb59033be1ef18759a2c7fc55b32035"}, + {file = "pytest_codspeed-3.2.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:39a687b05c3d145642061b45ea78e47e12f13ce510104d1a2cda00eee0e36f58"}, + {file = "pytest_codspeed-3.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46a1afaaa1ac4c2ca5b0700d31ac46d80a27612961d031067d73c6ccbd8d3c2b"}, + {file = "pytest_codspeed-3.2.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c48ce3af3dfa78413ed3d69d1924043aa1519048dbff46edccf8f35a25dab3c2"}, + {file = "pytest_codspeed-3.2.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:66692506d33453df48b36a84703448cb8b22953eea51f03fbb2eb758dc2bdc4f"}, + {file = "pytest_codspeed-3.2.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:479774f80d0bdfafa16112700df4dbd31bf2a6757fac74795fd79c0a7b3c389b"}, + {file = "pytest_codspeed-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:109f9f4dd1088019c3b3f887d003b7d65f98a7736ca1d457884f5aa293e8e81c"}, + {file = "pytest_codspeed-3.2.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2f69a03b52c9bb041aec1b8ee54b7b6c37a6d0a948786effa4c71157765b6da"}, + {file = "pytest_codspeed-3.2.0-py3-none-any.whl", hash = "sha256:54b5c2e986d6a28e7b0af11d610ea57bd5531cec8326abe486f1b55b09d91c39"}, + {file = "pytest_codspeed-3.2.0.tar.gz", hash = "sha256:f9d1b1a3b2c69cdc0490a1e8b1ced44bffbd0e8e21d81a7160cfdd923f6e8155"}, ] [package.dependencies] @@ -1471,13 +1502,13 @@ pytest = ">=7.0.0" [[package]] name = "pytz" -version = "2024.2" +version = "2025.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, + {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, + {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, ] [[package]] @@ -1543,29 +1574,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.9.3" +version = "0.9.6" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.9.3-py3-none-linux_armv6l.whl", hash = "sha256:7f39b879064c7d9670197d91124a75d118d00b0990586549949aae80cdc16624"}, - {file = "ruff-0.9.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:a187171e7c09efa4b4cc30ee5d0d55a8d6c5311b3e1b74ac5cb96cc89bafc43c"}, - {file = "ruff-0.9.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:c59ab92f8e92d6725b7ded9d4a31be3ef42688a115c6d3da9457a5bda140e2b4"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dc153c25e715be41bb228bc651c1e9b1a88d5c6e5ed0194fa0dfea02b026439"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:646909a1e25e0dc28fbc529eab8eb7bb583079628e8cbe738192853dbbe43af5"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a5a46e09355695fbdbb30ed9889d6cf1c61b77b700a9fafc21b41f097bfbba4"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c4bb09d2bbb394e3730d0918c00276e79b2de70ec2a5231cd4ebb51a57df9ba1"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96a87ec31dc1044d8c2da2ebbed1c456d9b561e7d087734336518181b26b3aa5"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bb7554aca6f842645022fe2d301c264e6925baa708b392867b7a62645304df4"}, - {file = "ruff-0.9.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cabc332b7075a914ecea912cd1f3d4370489c8018f2c945a30bcc934e3bc06a6"}, - {file = "ruff-0.9.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:33866c3cc2a575cbd546f2cd02bdd466fed65118e4365ee538a3deffd6fcb730"}, - {file = "ruff-0.9.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:006e5de2621304c8810bcd2ee101587712fa93b4f955ed0985907a36c427e0c2"}, - {file = "ruff-0.9.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ba6eea4459dbd6b1be4e6bfc766079fb9b8dd2e5a35aff6baee4d9b1514ea519"}, - {file = "ruff-0.9.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:90230a6b8055ad47d3325e9ee8f8a9ae7e273078a66401ac66df68943ced029b"}, - {file = "ruff-0.9.3-py3-none-win32.whl", hash = "sha256:eabe5eb2c19a42f4808c03b82bd313fc84d4e395133fb3fc1b1516170a31213c"}, - {file = "ruff-0.9.3-py3-none-win_amd64.whl", hash = "sha256:040ceb7f20791dfa0e78b4230ee9dce23da3b64dd5848e40e3bf3ab76468dcf4"}, - {file = "ruff-0.9.3-py3-none-win_arm64.whl", hash = "sha256:800d773f6d4d33b0a3c60e2c6ae8f4c202ea2de056365acfa519aa48acf28e0b"}, - {file = "ruff-0.9.3.tar.gz", hash = "sha256:8293f89985a090ebc3ed1064df31f3b4b56320cdfcec8b60d3295bddb955c22a"}, + {file = "ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba"}, + {file = "ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504"}, + {file = "ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5"}, + {file = "ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217"}, + {file = "ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6"}, + {file = "ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897"}, + {file = "ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08"}, + {file = "ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656"}, + {file = "ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d"}, + {file = "ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa"}, + {file = "ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a"}, + {file = "ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9"}, ] [[package]] @@ -2046,13 +2077,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "virtualenv" -version = "20.29.1" +version = "20.29.2" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.29.1-py3-none-any.whl", hash = "sha256:4e4cb403c0b0da39e13b46b1b2476e505cb0046b25f242bee80f62bf990b2779"}, - {file = "virtualenv-20.29.1.tar.gz", hash = "sha256:b8b8970138d32fb606192cb97f6cd4bb644fa486be9308fb9b63f81091b5dc35"}, + {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, + {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, ] [package.dependencies] @@ -2098,26 +2129,7 @@ enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] -[[package]] -name = "zipp" -version = "3.21.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.9" -files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "97f4c031d7769c7bad6adc5b4dfee58549dd3a445f991960527ec5e1212449b6" +content-hash = "c5a8f50292a01acddd1ce62c872344c676ef173170c50fdc668114f5f787afe6" diff --git a/pyproject.toml b/pyproject.toml index 1dbd6636..2ef24f7d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,7 +87,8 @@ optional = true [tool.poetry.group.lint.dependencies] ruff = ">=0.9,<0.10" mypy = [ - { version = "^1.14", python = ">=3.8" }, + { version = "^1.15", python = ">=3.9" }, + { version = "~1.14", python = ">=3.8,<3.9" }, { version = "~1.4", python = "<3.8" } ] bump2version = ">=1,<2" diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 5b48c8b4..2e557390 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -789,7 +789,7 @@ def fields(self) -> GraphQLFieldMap: return { assert_name(name): value if isinstance(value, GraphQLField) - else GraphQLField(value) # type: ignore + else GraphQLField(value) for name, value in fields.items() } @@ -894,7 +894,7 @@ def fields(self) -> GraphQLFieldMap: return { assert_name(name): value if isinstance(value, GraphQLField) - else GraphQLField(value) # type: ignore + else GraphQLField(value) for name, value in fields.items() } @@ -1361,7 +1361,7 @@ def fields(self) -> GraphQLInputFieldMap: return { assert_name(name): value if isinstance(value, GraphQLInputField) - else GraphQLInputField(value) # type: ignore + else GraphQLInputField(value) for name, value in fields.items() } diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index 7b8c33bb..d9cb160f 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -303,6 +303,7 @@ class IntrospectionSchema(MaybeWithDescription): # The root typed dictionary for schema introspections. +# Note: We don't use class syntax here since the key looks like a private attribute. IntrospectionQuery = TypedDict( # noqa: UP013 "IntrospectionQuery", {"__schema": IntrospectionSchema}, diff --git a/tox.ini b/tox.ini index b7ee6b8c..b8601559 100644 --- a/tox.ini +++ b/tox.ini @@ -26,7 +26,7 @@ commands = [testenv:mypy] basepython = python3.12 deps = - mypy>=1.14,<2 + mypy>=1.15,<2 pytest>=8.3,<9 commands = mypy src tests From 2c94f03026932e929c329456da5403a773e9dab1 Mon Sep 17 00:00:00 2001 From: Willem Date: Sat, 3 May 2025 22:02:40 +1200 Subject: [PATCH 47/50] Update README to include Typed GraphQL (#237) --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index 58b57b1f..aa36c84d 100644 --- a/README.md +++ b/README.md @@ -232,6 +232,8 @@ in addition to using [mypy](https://mypy-lang.org/) as type checker. Arminio, is a new GraphQL library for Python 3, inspired by dataclasses, that is also using GraphQL-core 3 as underpinning. +* [Typed GraphQL](https://github.com/willemt/typed-graphql), thin layer over GraphQL-core that uses native Python types for creating GraphQL schemas. + ## Changelog From 41799bb98589c4029bbc09901f39c7a4e752e610 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 3 May 2025 12:07:26 +0200 Subject: [PATCH 48/50] Update ruff --- poetry.lock | 450 ++++++++++++++++++++++++++----------------------- pyproject.toml | 2 +- tox.ini | 4 +- 3 files changed, 239 insertions(+), 217 deletions(-) diff --git a/poetry.lock b/poetry.lock index 167d9627..6af5b224 100644 --- a/poetry.lock +++ b/poetry.lock @@ -58,24 +58,24 @@ files = [ [[package]] name = "cachetools" -version = "5.5.1" +version = "5.5.2" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" files = [ - {file = "cachetools-5.5.1-py3-none-any.whl", hash = "sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb"}, - {file = "cachetools-5.5.1.tar.gz", hash = "sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95"}, + {file = "cachetools-5.5.2-py3-none-any.whl", hash = "sha256:d26a22bcc62eb95c3beabd9f1ee5e820d3d2704fe2967cbe350e20c8ffcd3f0a"}, + {file = "cachetools-5.5.2.tar.gz", hash = "sha256:1a661caa9175d26759571b2e19580f9d6393969e5dfca11fdb1f947a23e640d4"}, ] [[package]] name = "certifi" -version = "2025.1.31" +version = "2025.4.26" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, - {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, + {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, + {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, ] [[package]] @@ -246,103 +246,103 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.4.1" +version = "3.4.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7" files = [ - {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, - {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, - {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a"}, + {file = "charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a"}, + {file = "charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c"}, + {file = "charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7"}, + {file = "charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1cad5f45b3146325bb38d6855642f6fd609c3f7cad4dbaf75549bf3b904d3184"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2680962a4848b3c4f155dc2ee64505a9c57186d0d56b43123b17ca3de18f0fa"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:36b31da18b8890a76ec181c3cf44326bf2c48e36d393ca1b72b3f484113ea344"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f4074c5a429281bf056ddd4c5d3b740ebca4d43ffffe2ef4bf4d2d05114299da"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9e36a97bee9b86ef9a1cf7bb96747eb7a15c2f22bdb5b516434b00f2a599f02"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:1b1bde144d98e446b056ef98e59c256e9294f6b74d7af6846bf5ffdafd687a7d"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:915f3849a011c1f593ab99092f3cecfcb4d65d8feb4a64cf1bf2d22074dc0ec4"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:fb707f3e15060adf5b7ada797624a6c6e0138e2a26baa089df64c68ee98e040f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:25a23ea5c7edc53e0f29bae2c44fcb5a1aa10591aae107f2a2b2583a9c5cbc64"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:770cab594ecf99ae64c236bc9ee3439c3f46be49796e265ce0cc8bc17b10294f"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win32.whl", hash = "sha256:6a0289e4589e8bdfef02a80478f1dfcb14f0ab696b5a00e1f4b8a14a307a3c58"}, + {file = "charset_normalizer-3.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6fc1f5b51fa4cecaa18f2bd7a003f3dd039dd615cd69a2afd6d3b19aed6775f2"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:76af085e67e56c8816c3ccf256ebd136def2ed9654525348cfa744b6802b69eb"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e45ba65510e2647721e35323d6ef54c7974959f6081b58d4ef5d87c60c84919a"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:046595208aae0120559a67693ecc65dd75d46f7bf687f159127046628178dc45"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75d10d37a47afee94919c4fab4c22b9bc2a8bf7d4f46f87363bcf0573f3ff4f5"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6333b3aa5a12c26b2a4d4e7335a28f1475e0e5e17d69d55141ee3cab736f66d1"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8323a9b031aa0393768b87f04b4164a40037fb2a3c11ac06a03ffecd3618027"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:24498ba8ed6c2e0b56d4acbf83f2d989720a93b41d712ebd4f4979660db4417b"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:844da2b5728b5ce0e32d863af26f32b5ce61bc4273a9c720a9f3aa9df73b1455"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:65c981bdbd3f57670af8b59777cbfae75364b483fa8a9f420f08094531d54a01"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:3c21d4fca343c805a52c0c78edc01e3477f6dd1ad7c47653241cf2a206d4fc58"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:dc7039885fa1baf9be153a0626e337aa7ec8bf96b0128605fb0d77788ddc1681"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win32.whl", hash = "sha256:8272b73e1c5603666618805fe821edba66892e2870058c94c53147602eab29c7"}, + {file = "charset_normalizer-3.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:70f7172939fdf8790425ba31915bfbe8335030f05b9913d7ae00a87d4395620a"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:005fa3432484527f9732ebd315da8da8001593e2cf46a3d817669f062c3d9ed4"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e92fca20c46e9f5e1bb485887d074918b13543b1c2a1185e69bb8d17ab6236a7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:50bf98d5e563b83cc29471fa114366e6806bc06bc7a25fd59641e41445327836"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:721c76e84fe669be19c5791da68232ca2e05ba5185575086e384352e2c309597"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82d8fd25b7f4675d0c47cf95b594d4e7b158aca33b76aa63d07186e13c0e0ab7"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3daeac64d5b371dea99714f08ffc2c208522ec6b06fbc7866a450dd446f5c0f"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dccab8d5fa1ef9bfba0590ecf4d46df048d18ffe3eec01eeb73a42e0d9e7a8ba"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:aaf27faa992bfee0264dc1f03f4c75e9fcdda66a519db6b957a3f826e285cf12"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:eb30abc20df9ab0814b5a2524f23d75dcf83cde762c161917a2b4b7b55b1e518"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c72fbbe68c6f32f251bdc08b8611c7b3060612236e960ef848e0a517ddbe76c5"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:982bb1e8b4ffda883b3d0a521e23abcd6fd17418f6d2c4118d257a10199c0ce3"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win32.whl", hash = "sha256:43e0933a0eff183ee85833f341ec567c0980dae57c464d8a508e1b2ceb336471"}, + {file = "charset_normalizer-3.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:d11b54acf878eef558599658b0ffca78138c8c3655cf4f3a4a673c437e67732e"}, + {file = "charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0"}, + {file = "charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63"}, ] [[package]] @@ -520,74 +520,74 @@ toml = ["tomli"] [[package]] name = "coverage" -version = "7.6.12" +version = "7.8.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.9" files = [ - {file = "coverage-7.6.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704c8c8c6ce6569286ae9622e534b4f5b9759b6f2cd643f1c1a61f666d534fe8"}, - {file = "coverage-7.6.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ad7525bf0241e5502168ae9c643a2f6c219fa0a283001cee4cf23a9b7da75879"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06097c7abfa611c91edb9e6920264e5be1d6ceb374efb4986f38b09eed4cb2fe"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:220fa6c0ad7d9caef57f2c8771918324563ef0d8272c94974717c3909664e674"}, - {file = "coverage-7.6.12-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3688b99604a24492bcfe1c106278c45586eb819bf66a654d8a9a1433022fb2eb"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d1a987778b9c71da2fc8948e6f2656da6ef68f59298b7e9786849634c35d2c3c"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:cec6b9ce3bd2b7853d4a4563801292bfee40b030c05a3d29555fd2a8ee9bd68c"}, - {file = "coverage-7.6.12-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ace9048de91293e467b44bce0f0381345078389814ff6e18dbac8fdbf896360e"}, - {file = "coverage-7.6.12-cp310-cp310-win32.whl", hash = "sha256:ea31689f05043d520113e0552f039603c4dd71fa4c287b64cb3606140c66f425"}, - {file = "coverage-7.6.12-cp310-cp310-win_amd64.whl", hash = "sha256:676f92141e3c5492d2a1596d52287d0d963df21bf5e55c8b03075a60e1ddf8aa"}, - {file = "coverage-7.6.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e18aafdfb3e9ec0d261c942d35bd7c28d031c5855dadb491d2723ba54f4c3015"}, - {file = "coverage-7.6.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:66fe626fd7aa5982cdebad23e49e78ef7dbb3e3c2a5960a2b53632f1f703ea45"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ef01d70198431719af0b1f5dcbefc557d44a190e749004042927b2a3fed0702"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e92ae5a289a4bc4c0aae710c0948d3c7892e20fd3588224ebe242039573bf0"}, - {file = "coverage-7.6.12-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e695df2c58ce526eeab11a2e915448d3eb76f75dffe338ea613c1201b33bab2f"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d74c08e9aaef995f8c4ef6d202dbd219c318450fe2a76da624f2ebb9c8ec5d9f"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e995b3b76ccedc27fe4f477b349b7d64597e53a43fc2961db9d3fbace085d69d"}, - {file = "coverage-7.6.12-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b1f097878d74fe51e1ddd1be62d8e3682748875b461232cf4b52ddc6e6db0bba"}, - {file = "coverage-7.6.12-cp311-cp311-win32.whl", hash = "sha256:1f7ffa05da41754e20512202c866d0ebfc440bba3b0ed15133070e20bf5aeb5f"}, - {file = "coverage-7.6.12-cp311-cp311-win_amd64.whl", hash = "sha256:e216c5c45f89ef8971373fd1c5d8d1164b81f7f5f06bbf23c37e7908d19e8558"}, - {file = "coverage-7.6.12-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b172f8e030e8ef247b3104902cc671e20df80163b60a203653150d2fc204d1ad"}, - {file = "coverage-7.6.12-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:641dfe0ab73deb7069fb972d4d9725bf11c239c309ce694dd50b1473c0f641c3"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e549f54ac5f301e8e04c569dfdb907f7be71b06b88b5063ce9d6953d2d58574"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:959244a17184515f8c52dcb65fb662808767c0bd233c1d8a166e7cf74c9ea985"}, - {file = "coverage-7.6.12-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bda1c5f347550c359f841d6614fb8ca42ae5cb0b74d39f8a1e204815ebe25750"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1ceeb90c3eda1f2d8c4c578c14167dbd8c674ecd7d38e45647543f19839dd6ea"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f16f44025c06792e0fb09571ae454bcc7a3ec75eeb3c36b025eccf501b1a4c3"}, - {file = "coverage-7.6.12-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b076e625396e787448d27a411aefff867db2bffac8ed04e8f7056b07024eed5a"}, - {file = "coverage-7.6.12-cp312-cp312-win32.whl", hash = "sha256:00b2086892cf06c7c2d74983c9595dc511acca00665480b3ddff749ec4fb2a95"}, - {file = "coverage-7.6.12-cp312-cp312-win_amd64.whl", hash = "sha256:7ae6eabf519bc7871ce117fb18bf14e0e343eeb96c377667e3e5dd12095e0288"}, - {file = "coverage-7.6.12-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:488c27b3db0ebee97a830e6b5a3ea930c4a6e2c07f27a5e67e1b3532e76b9ef1"}, - {file = "coverage-7.6.12-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d1095bbee1851269f79fd8e0c9b5544e4c00c0c24965e66d8cba2eb5bb535fd"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0533adc29adf6a69c1baa88c3d7dbcaadcffa21afbed3ca7a225a440e4744bf9"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53c56358d470fa507a2b6e67a68fd002364d23c83741dbc4c2e0680d80ca227e"}, - {file = "coverage-7.6.12-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64cbb1a3027c79ca6310bf101014614f6e6e18c226474606cf725238cf5bc2d4"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:79cac3390bfa9836bb795be377395f28410811c9066bc4eefd8015258a7578c6"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9b148068e881faa26d878ff63e79650e208e95cf1c22bd3f77c3ca7b1d9821a3"}, - {file = "coverage-7.6.12-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8bec2ac5da793c2685ce5319ca9bcf4eee683b8a1679051f8e6ec04c4f2fd7dc"}, - {file = "coverage-7.6.12-cp313-cp313-win32.whl", hash = "sha256:200e10beb6ddd7c3ded322a4186313d5ca9e63e33d8fab4faa67ef46d3460af3"}, - {file = "coverage-7.6.12-cp313-cp313-win_amd64.whl", hash = "sha256:2b996819ced9f7dbb812c701485d58f261bef08f9b85304d41219b1496b591ef"}, - {file = "coverage-7.6.12-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:299cf973a7abff87a30609879c10df0b3bfc33d021e1adabc29138a48888841e"}, - {file = "coverage-7.6.12-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4b467a8c56974bf06e543e69ad803c6865249d7a5ccf6980457ed2bc50312703"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2458f275944db8129f95d91aee32c828a408481ecde3b30af31d552c2ce284a0"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a9d8be07fb0832636a0f72b80d2a652fe665e80e720301fb22b191c3434d924"}, - {file = "coverage-7.6.12-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:14d47376a4f445e9743f6c83291e60adb1b127607a3618e3185bbc8091f0467b"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b95574d06aa9d2bd6e5cc35a5bbe35696342c96760b69dc4287dbd5abd4ad51d"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:ecea0c38c9079570163d663c0433a9af4094a60aafdca491c6a3d248c7432827"}, - {file = "coverage-7.6.12-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2251fabcfee0a55a8578a9d29cecfee5f2de02f11530e7d5c5a05859aa85aee9"}, - {file = "coverage-7.6.12-cp313-cp313t-win32.whl", hash = "sha256:eb5507795caabd9b2ae3f1adc95f67b1104971c22c624bb354232d65c4fc90b3"}, - {file = "coverage-7.6.12-cp313-cp313t-win_amd64.whl", hash = "sha256:f60a297c3987c6c02ffb29effc70eadcbb412fe76947d394a1091a3615948e2f"}, - {file = "coverage-7.6.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e7575ab65ca8399c8c4f9a7d61bbd2d204c8b8e447aab9d355682205c9dd948d"}, - {file = "coverage-7.6.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8161d9fbc7e9fe2326de89cd0abb9f3599bccc1287db0aba285cb68d204ce929"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3a1e465f398c713f1b212400b4e79a09829cd42aebd360362cd89c5bdc44eb87"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f25d8b92a4e31ff1bd873654ec367ae811b3a943583e05432ea29264782dc32c"}, - {file = "coverage-7.6.12-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a936309a65cc5ca80fa9f20a442ff9e2d06927ec9a4f54bcba9c14c066323f2"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa6f302a3a0b5f240ee201297fff0bbfe2fa0d415a94aeb257d8b461032389bd"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f973643ef532d4f9be71dd88cf7588936685fdb576d93a79fe9f65bc337d9d73"}, - {file = "coverage-7.6.12-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:78f5243bb6b1060aed6213d5107744c19f9571ec76d54c99cc15938eb69e0e86"}, - {file = "coverage-7.6.12-cp39-cp39-win32.whl", hash = "sha256:69e62c5034291c845fc4df7f8155e8544178b6c774f97a99e2734b05eb5bed31"}, - {file = "coverage-7.6.12-cp39-cp39-win_amd64.whl", hash = "sha256:b01a840ecc25dce235ae4c1b6a0daefb2a203dba0e6e980637ee9c2f6ee0df57"}, - {file = "coverage-7.6.12-pp39.pp310-none-any.whl", hash = "sha256:7e39e845c4d764208e7b8f6a21c541ade741e2c41afabdfa1caa28687a3c98cf"}, - {file = "coverage-7.6.12-py3-none-any.whl", hash = "sha256:eb8668cfbc279a536c633137deeb9435d2962caec279c3f8cf8b91fff6ff8953"}, - {file = "coverage-7.6.12.tar.gz", hash = "sha256:48cfc4641d95d34766ad41d9573cc0f22a48aa88d22657a1fe01dca0dbae4de2"}, + {file = "coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe"}, + {file = "coverage-7.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52a523153c568d2c0ef8826f6cc23031dc86cffb8c6aeab92c4ff776e7951b28"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c8a5c139aae4c35cbd7cadca1df02ea8cf28a911534fc1b0456acb0b14234f3"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a26c0c795c3e0b63ec7da6efded5f0bc856d7c0b24b2ac84b4d1d7bc578d676"}, + {file = "coverage-7.8.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821f7bcbaa84318287115d54becb1915eece6918136c6f91045bb84e2f88739d"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a321c61477ff8ee705b8a5fed370b5710c56b3a52d17b983d9215861e37b642a"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ed2144b8a78f9d94d9515963ed273d620e07846acd5d4b0a642d4849e8d91a0c"}, + {file = "coverage-7.8.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:042e7841a26498fff7a37d6fda770d17519982f5b7d8bf5278d140b67b61095f"}, + {file = "coverage-7.8.0-cp310-cp310-win32.whl", hash = "sha256:f9983d01d7705b2d1f7a95e10bbe4091fabc03a46881a256c2787637b087003f"}, + {file = "coverage-7.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:5a570cd9bd20b85d1a0d7b009aaf6c110b52b5755c17be6962f8ccd65d1dbd23"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7ac22a0bb2c7c49f441f7a6d46c9c80d96e56f5a8bc6972529ed43c8b694e27"}, + {file = "coverage-7.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf13d564d310c156d1c8e53877baf2993fb3073b2fc9f69790ca6a732eb4bfea"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5761c70c017c1b0d21b0815a920ffb94a670c8d5d409d9b38857874c21f70d7"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5ff52d790c7e1628241ffbcaeb33e07d14b007b6eb00a19320c7b8a7024c040"}, + {file = "coverage-7.8.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d39fc4817fd67b3915256af5dda75fd4ee10621a3d484524487e33416c6f3543"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b44674870709017e4b4036e3d0d6c17f06a0e6d4436422e0ad29b882c40697d2"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f99eb72bf27cbb167b636eb1726f590c00e1ad375002230607a844d9e9a2318"}, + {file = "coverage-7.8.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b571bf5341ba8c6bc02e0baeaf3b061ab993bf372d982ae509807e7f112554e9"}, + {file = "coverage-7.8.0-cp311-cp311-win32.whl", hash = "sha256:e75a2ad7b647fd8046d58c3132d7eaf31b12d8a53c0e4b21fa9c4d23d6ee6d3c"}, + {file = "coverage-7.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:3043ba1c88b2139126fc72cb48574b90e2e0546d4c78b5299317f61b7f718b78"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:bbb5cc845a0292e0c520656d19d7ce40e18d0e19b22cb3e0409135a575bf79fc"}, + {file = "coverage-7.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4dfd9a93db9e78666d178d4f08a5408aa3f2474ad4d0e0378ed5f2ef71640cb6"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f017a61399f13aa6d1039f75cd467be388d157cd81f1a119b9d9a68ba6f2830d"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0915742f4c82208ebf47a2b154a5334155ed9ef9fe6190674b8a46c2fb89cb05"}, + {file = "coverage-7.8.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a40fcf208e021eb14b0fac6bdb045c0e0cab53105f93ba0d03fd934c956143a"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a1f406a8e0995d654b2ad87c62caf6befa767885301f3b8f6f73e6f3c31ec3a6"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:77af0f6447a582fdc7de5e06fa3757a3ef87769fbb0fdbdeba78c23049140a47"}, + {file = "coverage-7.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f2d32f95922927186c6dbc8bc60df0d186b6edb828d299ab10898ef3f40052fe"}, + {file = "coverage-7.8.0-cp312-cp312-win32.whl", hash = "sha256:769773614e676f9d8e8a0980dd7740f09a6ea386d0f383db6821df07d0f08545"}, + {file = "coverage-7.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:e5d2b9be5b0693cf21eb4ce0ec8d211efb43966f6657807f6859aab3814f946b"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ac46d0c2dd5820ce93943a501ac5f6548ea81594777ca585bf002aa8854cacd"}, + {file = "coverage-7.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:771eb7587a0563ca5bb6f622b9ed7f9d07bd08900f7589b4febff05f469bea00"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42421e04069fb2cbcbca5a696c4050b84a43b05392679d4068acbe65449b5c64"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:554fec1199d93ab30adaa751db68acec2b41c5602ac944bb19187cb9a41a8067"}, + {file = "coverage-7.8.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aaeb00761f985007b38cf463b1d160a14a22c34eb3f6a39d9ad6fc27cb73008"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:581a40c7b94921fffd6457ffe532259813fc68eb2bdda60fa8cc343414ce3733"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:f319bae0321bc838e205bf9e5bc28f0a3165f30c203b610f17ab5552cff90323"}, + {file = "coverage-7.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04bfec25a8ef1c5f41f5e7e5c842f6b615599ca8ba8391ec33a9290d9d2db3a3"}, + {file = "coverage-7.8.0-cp313-cp313-win32.whl", hash = "sha256:dd19608788b50eed889e13a5d71d832edc34fc9dfce606f66e8f9f917eef910d"}, + {file = "coverage-7.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:a9abbccd778d98e9c7e85038e35e91e67f5b520776781d9a1e2ee9d400869487"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:18c5ae6d061ad5b3e7eef4363fb27a0576012a7447af48be6c75b88494c6cf25"}, + {file = "coverage-7.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:95aa6ae391a22bbbce1b77ddac846c98c5473de0372ba5c463480043a07bff42"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e013b07ba1c748dacc2a80e69a46286ff145935f260eb8c72df7185bf048f502"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d766a4f0e5aa1ba056ec3496243150698dc0481902e2b8559314368717be82b1"}, + {file = "coverage-7.8.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad80e6b4a0c3cb6f10f29ae4c60e991f424e6b14219d46f1e7d442b938ee68a4"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b87eb6fc9e1bb8f98892a2458781348fa37e6925f35bb6ceb9d4afd54ba36c73"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d1ba00ae33be84066cfbe7361d4e04dec78445b2b88bdb734d0d1cbab916025a"}, + {file = "coverage-7.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f3c38e4e5ccbdc9198aecc766cedbb134b2d89bf64533973678dfcf07effd883"}, + {file = "coverage-7.8.0-cp313-cp313t-win32.whl", hash = "sha256:379fe315e206b14e21db5240f89dc0774bdd3e25c3c58c2c733c99eca96f1ada"}, + {file = "coverage-7.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:2e4b6b87bb0c846a9315e3ab4be2d52fac905100565f4b92f02c445c8799e257"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa260de59dfb143af06dcf30c2be0b200bed2a73737a8a59248fcb9fa601ef0f"}, + {file = "coverage-7.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:96121edfa4c2dfdda409877ea8608dd01de816a4dc4a0523356067b305e4e17a"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b8af63b9afa1031c0ef05b217faa598f3069148eeee6bb24b79da9012423b82"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89b1f4af0d4afe495cd4787a68e00f30f1d15939f550e869de90a86efa7e0814"}, + {file = "coverage-7.8.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94ec0be97723ae72d63d3aa41961a0b9a6f5a53ff599813c324548d18e3b9e8c"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8a1d96e780bdb2d0cbb297325711701f7c0b6f89199a57f2049e90064c29f6bd"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:f1d8a2a57b47142b10374902777e798784abf400a004b14f1b0b9eaf1e528ba4"}, + {file = "coverage-7.8.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cf60dd2696b457b710dd40bf17ad269d5f5457b96442f7f85722bdb16fa6c899"}, + {file = "coverage-7.8.0-cp39-cp39-win32.whl", hash = "sha256:be945402e03de47ba1872cd5236395e0f4ad635526185a930735f66710e1bd3f"}, + {file = "coverage-7.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:90e7fbc6216ecaffa5a880cdc9c77b7418c1dcb166166b78dbc630d07f278cc3"}, + {file = "coverage-7.8.0-pp39.pp310.pp311-none-any.whl", hash = "sha256:b8194fb8e50d556d5849753de991d390c5a1edeeba50f68e3a9253fbd8bf8ccd"}, + {file = "coverage-7.8.0-py3-none-any.whl", hash = "sha256:dbf364b4c5e7bae9250528167dfe40219b62e2d573c854d74be213e1e52069f7"}, + {file = "coverage-7.8.0.tar.gz", hash = "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501"}, ] [package.dependencies] @@ -753,15 +753,26 @@ files = [ {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + [[package]] name = "jinja2" -version = "3.1.5" +version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ - {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, - {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] @@ -1044,6 +1055,17 @@ files = [ {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + [[package]] name = "packaging" version = "24.0" @@ -1057,13 +1079,13 @@ files = [ [[package]] name = "packaging" -version = "24.2" +version = "25.0" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, ] [[package]] @@ -1250,13 +1272,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest" -version = "8.3.4" +version = "8.3.5" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.8" files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, + {file = "pytest-8.3.5-py3-none-any.whl", hash = "sha256:c69214aa47deac29fad6c2a4f590b9c4a9fdb16a403176fe154b79c0b4d4d820"}, + {file = "pytest-8.3.5.tar.gz", hash = "sha256:f4efe70cc14e511565ac476b57c279e12a855b11f48f212af1080ef2263d3845"}, ] [package.dependencies] @@ -1456,13 +1478,13 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-cov" -version = "6.0.0" +version = "6.1.1" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.9" files = [ - {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, - {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, + {file = "pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"}, + {file = "pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a"}, ] [package.dependencies] @@ -1502,13 +1524,13 @@ pytest = ">=7.0.0" [[package]] name = "pytz" -version = "2025.1" +version = "2025.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2025.1-py2.py3-none-any.whl", hash = "sha256:89dd22dca55b46eac6eda23b2d72721bf1bdfef212645d81513ef5d03038de57"}, - {file = "pytz-2025.1.tar.gz", hash = "sha256:c2db42be2a2518b28e65f9207c4d05e6ff547d1efa4086469ef855e4ab70178e"}, + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, ] [[package]] @@ -1555,13 +1577,13 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "rich" -version = "13.9.4" +version = "14.0.0" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.8.0" files = [ - {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, - {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, + {file = "rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0"}, + {file = "rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725"}, ] [package.dependencies] @@ -1574,29 +1596,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruff" -version = "0.9.6" +version = "0.11.8" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.9.6-py3-none-linux_armv6l.whl", hash = "sha256:2f218f356dd2d995839f1941322ff021c72a492c470f0b26a34f844c29cdf5ba"}, - {file = "ruff-0.9.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b908ff4df65dad7b251c9968a2e4560836d8f5487c2f0cc238321ed951ea0504"}, - {file = "ruff-0.9.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:b109c0ad2ececf42e75fa99dc4043ff72a357436bb171900714a9ea581ddef83"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1de4367cca3dac99bcbd15c161404e849bb0bfd543664db39232648dc00112dc"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ac3ee4d7c2c92ddfdaedf0bf31b2b176fa7aa8950efc454628d477394d35638b"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dc1edd1775270e6aa2386119aea692039781429f0be1e0949ea5884e011aa8e"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4a091729086dffa4bd070aa5dab7e39cc6b9d62eb2bef8f3d91172d30d599666"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d1bbc6808bf7b15796cef0815e1dfb796fbd383e7dbd4334709642649625e7c5"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:589d1d9f25b5754ff230dce914a174a7c951a85a4e9270613a2b74231fdac2f5"}, - {file = "ruff-0.9.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc61dd5131742e21103fbbdcad683a8813be0e3c204472d520d9a5021ca8b217"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5e2d9126161d0357e5c8f30b0bd6168d2c3872372f14481136d13de9937f79b6"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:68660eab1a8e65babb5229a1f97b46e3120923757a68b5413d8561f8a85d4897"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c4cae6c4cc7b9b4017c71114115db0445b00a16de3bcde0946273e8392856f08"}, - {file = "ruff-0.9.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19f505b643228b417c1111a2a536424ddde0db4ef9023b9e04a46ed8a1cb4656"}, - {file = "ruff-0.9.6-py3-none-win32.whl", hash = "sha256:194d8402bceef1b31164909540a597e0d913c0e4952015a5b40e28c146121b5d"}, - {file = "ruff-0.9.6-py3-none-win_amd64.whl", hash = "sha256:03482d5c09d90d4ee3f40d97578423698ad895c87314c4de39ed2af945633caa"}, - {file = "ruff-0.9.6-py3-none-win_arm64.whl", hash = "sha256:0e2bb706a2be7ddfea4a4af918562fdc1bcb16df255e5fa595bbd800ce322a5a"}, - {file = "ruff-0.9.6.tar.gz", hash = "sha256:81761592f72b620ec8fa1068a6fd00e98a5ebee342a3642efd84454f3031dca9"}, + {file = "ruff-0.11.8-py3-none-linux_armv6l.whl", hash = "sha256:896a37516c594805e34020c4a7546c8f8a234b679a7716a3f08197f38913e1a3"}, + {file = "ruff-0.11.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ab86d22d3d721a40dd3ecbb5e86ab03b2e053bc93c700dc68d1c3346b36ce835"}, + {file = "ruff-0.11.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:258f3585057508d317610e8a412788cf726efeefa2fec4dba4001d9e6f90d46c"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727d01702f7c30baed3fc3a34901a640001a2828c793525043c29f7614994a8c"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3dca977cc4fc8f66e89900fa415ffe4dbc2e969da9d7a54bfca81a128c5ac219"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c657fa987d60b104d2be8b052d66da0a2a88f9bd1d66b2254333e84ea2720c7f"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:f2e74b021d0de5eceb8bd32919f6ff8a9b40ee62ed97becd44993ae5b9949474"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f9b5ef39820abc0f2c62111f7045009e46b275f5b99d5e59dda113c39b7f4f38"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1dba3135ca503727aa4648152c0fa67c3b1385d3dc81c75cd8a229c4b2a1458"}, + {file = "ruff-0.11.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f024d32e62faad0f76b2d6afd141b8c171515e4fb91ce9fd6464335c81244e5"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:d365618d3ad747432e1ae50d61775b78c055fee5936d77fb4d92c6f559741948"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4d9aaa91035bdf612c8ee7266153bcf16005c7c7e2f5878406911c92a31633cb"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:0eba551324733efc76116d9f3a0d52946bc2751f0cd30661564117d6fd60897c"}, + {file = "ruff-0.11.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:161eb4cff5cfefdb6c9b8b3671d09f7def2f960cee33481dd898caf2bcd02304"}, + {file = "ruff-0.11.8-py3-none-win32.whl", hash = "sha256:5b18caa297a786465cc511d7f8be19226acf9c0a1127e06e736cd4e1878c3ea2"}, + {file = "ruff-0.11.8-py3-none-win_amd64.whl", hash = "sha256:6e70d11043bef637c5617297bdedec9632af15d53ac1e1ba29c448da9341b0c4"}, + {file = "ruff-0.11.8-py3-none-win_arm64.whl", hash = "sha256:304432e4c4a792e3da85b7699feb3426a0908ab98bf29df22a31b0cdd098fac2"}, + {file = "ruff-0.11.8.tar.gz", hash = "sha256:6d742d10626f9004b781f4558154bb226620a7242080e11caeffab1a40e99df8"}, ] [[package]] @@ -1923,17 +1945,17 @@ testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "pathlib2 (>=2.3.3)", "psu [[package]] name = "tox" -version = "4.24.1" +version = "4.25.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.24.1-py3-none-any.whl", hash = "sha256:57ba7df7d199002c6df8c2db9e6484f3de6ca8f42013c083ea2d4d1e5c6bdc75"}, - {file = "tox-4.24.1.tar.gz", hash = "sha256:083a720adbc6166fff0b7d1df9d154f9d00bfccb9403b8abf6bc0ee435d6a62e"}, + {file = "tox-4.25.0-py3-none-any.whl", hash = "sha256:4dfdc7ba2cc6fdc6688dde1b21e7b46ff6c41795fb54586c91a3533317b5255c"}, + {file = "tox-4.25.0.tar.gz", hash = "sha256:dd67f030317b80722cf52b246ff42aafd3ed27ddf331c415612d084304cf5e52"}, ] [package.dependencies] -cachetools = ">=5.5" +cachetools = ">=5.5.1" chardet = ">=5.2" colorama = ">=0.4.6" filelock = ">=3.16.1" @@ -1941,12 +1963,12 @@ packaging = ">=24.2" platformdirs = ">=4.3.6" pluggy = ">=1.5" pyproject-api = ">=1.8" -tomli = {version = ">=2.1", markers = "python_version < \"3.11\""} +tomli = {version = ">=2.2.1", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} -virtualenv = ">=20.27.1" +virtualenv = ">=20.29.1" [package.extras] -test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.3)", "pytest-mock (>=3.14)"] +test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.4)", "pytest-mock (>=3.14)"] [[package]] name = "typed-ast" @@ -2011,13 +2033,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.12.2" +version = "4.13.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, ] [[package]] @@ -2077,13 +2099,13 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "virtualenv" -version = "20.29.2" +version = "20.30.0" description = "Virtual Python Environment builder" optional = false python-versions = ">=3.8" files = [ - {file = "virtualenv-20.29.2-py3-none-any.whl", hash = "sha256:febddfc3d1ea571bdb1dc0f98d7b45d24def7428214d4fb73cc486c9568cce6a"}, - {file = "virtualenv-20.29.2.tar.gz", hash = "sha256:fdaabebf6d03b5ba83ae0a02cfe96f48a716f4fae556461d180825866f75b728"}, + {file = "virtualenv-20.30.0-py3-none-any.whl", hash = "sha256:e34302959180fca3af42d1800df014b35019490b119eba981af27f2fa486e5d6"}, + {file = "virtualenv-20.30.0.tar.gz", hash = "sha256:800863162bcaa5450a6e4d721049730e7f2dae07720e0902b0e4040bd6f9ada8"}, ] [package.dependencies] @@ -2132,4 +2154,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.7" -content-hash = "c5a8f50292a01acddd1ce62c872344c676ef173170c50fdc668114f5f787afe6" +content-hash = "73cdf582288c9a4f22ebca27df8a40982b23954061d23e7d2301dfe9877cdb8d" diff --git a/pyproject.toml b/pyproject.toml index 2ef24f7d..e8d2ec6d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -85,7 +85,7 @@ tox = [ optional = true [tool.poetry.group.lint.dependencies] -ruff = ">=0.9,<0.10" +ruff = ">=0.11,<0.12" mypy = [ { version = "^1.15", python = ">=3.9" }, { version = "~1.14", python = ">=3.8,<3.9" }, diff --git a/tox.ini b/tox.ini index b8601559..d7dc47bc 100644 --- a/tox.ini +++ b/tox.ini @@ -4,7 +4,7 @@ isolated_build = true [gh-actions] python = - 3: py311 + 3: py313 3.7: py37 3.8: py38 3.9: py39 @@ -18,7 +18,7 @@ python = [testenv:ruff] basepython = python3.12 -deps = ruff>=0.9,<0.10 +deps = ruff>=0.11,<0.12 commands = ruff check src tests ruff format --check src tests From 7b9e9226c8f7ecf7dffdd4364591c695c11c0480 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 3 May 2025 12:50:26 +0200 Subject: [PATCH 49/50] Fix ruff issues --- src/graphql/execution/collect_fields.py | 2 +- src/graphql/execution/execute.py | 24 ++++---- src/graphql/graphql.py | 8 +-- src/graphql/language/parser.py | 16 ++--- src/graphql/language/print_location.py | 2 +- src/graphql/pyutils/async_reduce.py | 4 +- src/graphql/pyutils/identity_func.py | 2 +- src/graphql/pyutils/merge_kwargs.py | 2 +- src/graphql/type/definition.py | 35 ++++++----- src/graphql/type/directives.py | 4 +- src/graphql/type/schema.py | 17 +++--- src/graphql/type/validate.py | 6 +- src/graphql/utilities/build_ast_schema.py | 6 +- src/graphql/utilities/build_client_schema.py | 61 +++++++++++-------- src/graphql/utilities/coerce_input_value.py | 2 +- src/graphql/utilities/extend_schema.py | 20 +++--- .../utilities/get_introspection_query.py | 2 +- .../utilities/introspection_from_schema.py | 2 +- .../utilities/lexicographic_sort_schema.py | 17 +++--- .../utilities/strip_ignored_characters.py | 2 +- src/graphql/utilities/type_from_ast.py | 2 +- src/graphql/utilities/value_from_ast.py | 2 +- .../defer_stream_directive_on_root_field.py | 2 +- .../rules/executable_definitions.py | 2 +- .../validation/rules/known_argument_names.py | 2 +- .../validation/rules/known_directives.py | 4 +- .../validation/rules/known_type_names.py | 4 +- .../rules/overlapping_fields_can_be_merged.py | 6 +- .../rules/provided_required_arguments.py | 4 +- .../rules/stream_directive_on_list_field.py | 2 +- .../rules/unique_directives_per_location.py | 4 +- .../rules/values_of_correct_type.py | 4 +- src/graphql/validation/validation_context.py | 2 +- tests/error/test_graphql_error.py | 6 +- tests/error/test_located_error.py | 4 +- tests/execution/test_defer.py | 4 +- tests/execution/test_executor.py | 6 +- tests/execution/test_middleware.py | 2 +- tests/execution/test_nonnull.py | 10 +-- tests/language/test_block_string.py | 6 +- tests/language/test_parser.py | 18 +++--- tests/language/test_source.py | 2 +- tests/language/test_visitor.py | 4 +- tests/pyutils/test_description.py | 10 +-- tests/test_user_registry.py | 2 +- tests/utilities/test_build_client_schema.py | 46 +++++++------- tests/utilities/test_print_schema.py | 2 +- .../assert_equal_awaitables_or_values.py | 2 +- 48 files changed, 211 insertions(+), 187 deletions(-) diff --git a/src/graphql/execution/collect_fields.py b/src/graphql/execution/collect_fields.py index 613a55c2..c3fc99cc 100644 --- a/src/graphql/execution/collect_fields.py +++ b/src/graphql/execution/collect_fields.py @@ -394,7 +394,7 @@ def build_grouped_field_sets( # All TargetSets that causes new grouped field sets consist only of DeferUsages # and have should_initiate_defer defined - new_grouped_field_set_details[cast(DeferUsageSet, masking_targets)] = ( + new_grouped_field_set_details[cast("DeferUsageSet", masking_targets)] = ( GroupedFieldSetDetails(new_grouped_field_set, should_initiate_defer) ) diff --git a/src/graphql/execution/execute.py b/src/graphql/execution/execute.py index 90d3d73b..1097e80f 100644 --- a/src/graphql/execution/execute.py +++ b/src/graphql/execution/execute.py @@ -975,7 +975,7 @@ def complete_list_value( if stream_record is not None: self.incremental_publisher.set_is_final_record( - cast(StreamItemsRecord, current_parents) + cast("StreamItemsRecord", current_parents) ) if not awaitable_indices: @@ -1113,7 +1113,7 @@ def complete_abstract_value( runtime_type = resolve_type_fn(result, info, return_type) if self.is_awaitable(runtime_type): - runtime_type = cast(Awaitable, runtime_type) + runtime_type = cast("Awaitable", runtime_type) async def await_complete_object_value() -> Any: value = self.complete_object_value( @@ -1136,7 +1136,7 @@ async def await_complete_object_value() -> Any: return value # pragma: no cover return await_complete_object_value() - runtime_type = cast(Optional[str], runtime_type) + runtime_type = cast("Optional[str]", runtime_type) return self.complete_object_value( self.ensure_valid_runtime_type( @@ -1358,9 +1358,9 @@ async def callback(payload: Any) -> ExecutionResult: # typecast to ExecutionResult, not possible to return # ExperimentalIncrementalExecutionResults when operation is 'subscription'. return ( - await cast(Awaitable[ExecutionResult], result) + await cast("Awaitable[ExecutionResult]", result) if self.is_awaitable(result) - else cast(ExecutionResult, result) + else cast("ExecutionResult", result) ) return map_async_iterable(result_or_stream, callback) @@ -1424,7 +1424,7 @@ def execute_deferred_grouped_field_set( ) if self.is_awaitable(incremental_result): - incremental_result = cast(Awaitable, incremental_result) + incremental_result = cast("Awaitable", incremental_result) async def await_incremental_result() -> None: try: @@ -1897,11 +1897,11 @@ def execute_sync( result, ExperimentalIncrementalExecutionResults ): if default_is_awaitable(result): - ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() + ensure_future(cast("Awaitable[ExecutionResult]", result)).cancel() msg = "GraphQL execution failed to complete synchronously." raise RuntimeError(msg) - return cast(ExecutionResult, result) + return cast("ExecutionResult", result) def invalid_return_type_error( @@ -1956,7 +1956,9 @@ def add_new_deferred_fragments( # - the InitialResultRecord, or # - a StreamItemsRecord, as `@defer` may be nested under `@stream`. parent = ( - cast(Union[InitialResultRecord, StreamItemsRecord], incremental_data_record) + cast( + "Union[InitialResultRecord, StreamItemsRecord]", incremental_data_record + ) if parent_defer_usage is None else deferred_fragment_record_from_defer_usage( parent_defer_usage, new_defer_map @@ -2069,7 +2071,7 @@ def default_type_resolver( is_type_of_result = type_.is_type_of(value, info) if is_awaitable(is_type_of_result): - append_awaitable_results(cast(Awaitable, is_type_of_result)) + append_awaitable_results(cast("Awaitable", is_type_of_result)) append_awaitable_types(type_) elif is_type_of_result: return type_.name @@ -2257,7 +2259,7 @@ def create_source_event_stream_impl( return ExecutionResult(None, errors=[error]) if context.is_awaitable(event_stream): - awaitable_event_stream = cast(Awaitable, event_stream) + awaitable_event_stream = cast("Awaitable", event_stream) # noinspection PyShadowingNames async def await_event_stream() -> AsyncIterable[Any] | ExecutionResult: diff --git a/src/graphql/graphql.py b/src/graphql/graphql.py index aacc7326..fe1dd5c7 100644 --- a/src/graphql/graphql.py +++ b/src/graphql/graphql.py @@ -96,9 +96,9 @@ async def graphql( ) if default_is_awaitable(result): - return await cast(Awaitable[ExecutionResult], result) + return await cast("Awaitable[ExecutionResult]", result) - return cast(ExecutionResult, result) + return cast("ExecutionResult", result) def assume_not_awaitable(_value: Any) -> bool: @@ -149,11 +149,11 @@ def graphql_sync( # Assert that the execution was synchronous. if default_is_awaitable(result): - ensure_future(cast(Awaitable[ExecutionResult], result)).cancel() + ensure_future(cast("Awaitable[ExecutionResult]", result)).cancel() msg = "GraphQL execution failed to complete synchronously." raise RuntimeError(msg) - return cast(ExecutionResult, result) + return cast("ExecutionResult", result) def graphql_impl( diff --git a/src/graphql/language/parser.py b/src/graphql/language/parser.py index 55c249ba..59299a1d 100644 --- a/src/graphql/language/parser.py +++ b/src/graphql/language/parser.py @@ -255,7 +255,7 @@ def __init__( experimental_client_controlled_nullability: bool = False, ) -> None: if not is_source(source): - source = Source(cast(str, source)) + source = Source(cast("str", source)) self._no_location = no_location self._max_tokens = max_tokens @@ -319,7 +319,7 @@ def parse_definition(self) -> DefinitionNode: ) if keyword_token.kind is TokenKind.NAME: - token_name = cast(str, keyword_token.value) + token_name = cast("str", keyword_token.value) method_name = self._parse_type_system_definition_method_names.get( token_name ) @@ -472,7 +472,9 @@ def parse_arguments(self, is_const: bool) -> list[ArgumentNode]: """Arguments[Const]: (Argument[?Const]+)""" item = self.parse_const_argument if is_const else self.parse_argument return self.optional_many( - TokenKind.PAREN_L, cast(Callable[[], ArgumentNode], item), TokenKind.PAREN_R + TokenKind.PAREN_L, + cast("Callable[[], ArgumentNode]", item), + TokenKind.PAREN_R, ) def parse_argument(self, is_const: bool = False) -> ArgumentNode: @@ -487,7 +489,7 @@ def parse_argument(self, is_const: bool = False) -> ArgumentNode: def parse_const_argument(self) -> ConstArgumentNode: """Argument[Const]: Name : Value[Const]""" - return cast(ConstArgumentNode, self.parse_argument(True)) + return cast("ConstArgumentNode", self.parse_argument(True)) # Implement the parsing rules in the Fragments section. @@ -641,7 +643,7 @@ def parse_variable_value(self, is_const: bool) -> VariableNode: return self.parse_variable() def parse_const_value_literal(self) -> ConstValueNode: - return cast(ConstValueNode, self.parse_value_literal(True)) + return cast("ConstValueNode", self.parse_value_literal(True)) # Implement the parsing rules in the Directives section. @@ -654,7 +656,7 @@ def parse_directives(self, is_const: bool) -> list[DirectiveNode]: return directives def parse_const_directives(self) -> list[ConstDirectiveNode]: - return cast(List[ConstDirectiveNode], self.parse_directives(True)) + return cast("List[ConstDirectiveNode]", self.parse_directives(True)) def parse_directive(self, is_const: bool) -> DirectiveNode: """Directive[Const]: @ Name Arguments[?Const]?""" @@ -704,7 +706,7 @@ def parse_type_system_extension(self) -> TypeSystemExtensionNode: keyword_token = self._lexer.lookahead() if keyword_token.kind == TokenKind.NAME: method_name = self._parse_type_extension_method_names.get( - cast(str, keyword_token.value) + cast("str", keyword_token.value) ) if method_name: # pragma: no cover return getattr(self, f"parse_{method_name}")() diff --git a/src/graphql/language/print_location.py b/src/graphql/language/print_location.py index 03509732..21fb1b8a 100644 --- a/src/graphql/language/print_location.py +++ b/src/graphql/language/print_location.py @@ -73,7 +73,7 @@ def print_source_location(source: Source, source_location: SourceLocation) -> st def print_prefixed_lines(*lines: tuple[str, str | None]) -> str: """Print lines specified like this: ("prefix", "string")""" existing_lines = [ - cast(Tuple[str, str], line) for line in lines if line[1] is not None + cast("Tuple[str, str]", line) for line in lines if line[1] is not None ] pad_len = max(len(line[0]) for line in existing_lines) return "\n".join( diff --git a/src/graphql/pyutils/async_reduce.py b/src/graphql/pyutils/async_reduce.py index 02fbf648..4eb79748 100644 --- a/src/graphql/pyutils/async_reduce.py +++ b/src/graphql/pyutils/async_reduce.py @@ -41,7 +41,7 @@ async def async_callback( ) return await result if is_awaitable(result) else result # type: ignore - accumulator = async_callback(cast(Awaitable[U], accumulator), value) + accumulator = async_callback(cast("Awaitable[U]", accumulator), value) else: - accumulator = callback(cast(U, accumulator), value) + accumulator = callback(cast("U", accumulator), value) return accumulator diff --git a/src/graphql/pyutils/identity_func.py b/src/graphql/pyutils/identity_func.py index 2876c570..1a13936b 100644 --- a/src/graphql/pyutils/identity_func.py +++ b/src/graphql/pyutils/identity_func.py @@ -11,7 +11,7 @@ T = TypeVar("T") -DEFAULT_VALUE = cast(Any, Undefined) +DEFAULT_VALUE = cast("Any", Undefined) def identity_func(x: T = DEFAULT_VALUE, *_args: Any) -> T: diff --git a/src/graphql/pyutils/merge_kwargs.py b/src/graphql/pyutils/merge_kwargs.py index c7cace3e..21144524 100644 --- a/src/graphql/pyutils/merge_kwargs.py +++ b/src/graphql/pyutils/merge_kwargs.py @@ -9,4 +9,4 @@ def merge_kwargs(base_dict: T, **kwargs: Any) -> T: """Return arbitrary typed dictionary with some keyword args merged in.""" - return cast(T, {**cast(Dict, base_dict), **kwargs}) + return cast("T", {**cast("Dict", base_dict), **kwargs}) diff --git a/src/graphql/type/definition.py b/src/graphql/type/definition.py index 2e557390..c334488d 100644 --- a/src/graphql/type/definition.py +++ b/src/graphql/type/definition.py @@ -2,7 +2,6 @@ from __future__ import annotations -from enum import Enum from typing import ( TYPE_CHECKING, Any, @@ -19,6 +18,18 @@ overload, ) +try: + from typing import TypedDict +except ImportError: # Python < 3.8 + from typing_extensions import TypedDict +try: + from typing import TypeAlias, TypeGuard +except ImportError: # Python < 3.10 + from typing_extensions import TypeAlias, TypeGuard + +if TYPE_CHECKING: + from enum import Enum + from ..error import GraphQLError from ..language import ( EnumTypeDefinitionNode, @@ -57,18 +68,10 @@ from ..utilities.value_from_ast_untyped import value_from_ast_untyped from .assert_name import assert_enum_value_name, assert_name -try: - from typing import TypedDict -except ImportError: # Python < 3.8 - from typing_extensions import TypedDict -try: - from typing import TypeAlias, TypeGuard -except ImportError: # Python < 3.10 - from typing_extensions import TypeAlias, TypeGuard - if TYPE_CHECKING: from .schema import GraphQLSchema + __all__ = [ "GraphQLAbstractType", "GraphQLArgument", @@ -503,7 +506,7 @@ def __init__( args = { assert_name(name): value if isinstance(value, GraphQLArgument) - else GraphQLArgument(cast(GraphQLInputType, value)) + else GraphQLArgument(cast("GraphQLInputType", value)) for name, value in args.items() } else: @@ -1077,7 +1080,7 @@ def __init__( extension_ast_nodes=extension_ast_nodes, ) try: # check for enum - values = cast(Enum, values).__members__ # type: ignore + values = cast("Enum", values).__members__ # type: ignore except AttributeError: if not isinstance(values, Mapping) or not all( isinstance(name, str) for name in values @@ -1090,9 +1093,9 @@ def __init__( " with value names as keys." ) raise TypeError(msg) from error - values = cast(Dict[str, Any], values) + values = cast("Dict[str, Any]", values) else: - values = cast(Dict[str, Enum], values) + values = cast("Dict[str, Enum]", values) if names_as_values is False: values = {key: value.value for key, value in values.items()} elif names_as_values is True: @@ -1662,7 +1665,7 @@ def get_nullable_type( """Unwrap possible non-null type""" if is_non_null_type(type_): type_ = type_.of_type - return cast(Optional[GraphQLNullableType], type_) + return cast("Optional[GraphQLNullableType]", type_) # These named types do not include modifiers like List or NonNull. @@ -1707,7 +1710,7 @@ def get_named_type(type_: GraphQLType | None) -> GraphQLNamedType | None: unwrapped_type = type_ while is_wrapping_type(unwrapped_type): unwrapped_type = unwrapped_type.of_type - return cast(GraphQLNamedType, unwrapped_type) + return cast("GraphQLNamedType", unwrapped_type) return None diff --git a/src/graphql/type/directives.py b/src/graphql/type/directives.py index b73d938f..ecd201c2 100644 --- a/src/graphql/type/directives.py +++ b/src/graphql/type/directives.py @@ -79,7 +79,7 @@ def __init__( locations = tuple( value if isinstance(value, DirectiveLocation) - else DirectiveLocation[cast(str, value)] + else DirectiveLocation[cast("str", value)] for value in locations ) except (KeyError, TypeError) as error: @@ -92,7 +92,7 @@ def __init__( args = { assert_name(name): value if isinstance(value, GraphQLArgument) - else GraphQLArgument(cast(GraphQLInputType, value)) + else GraphQLArgument(cast("GraphQLInputType", value)) for name, value in args.items() } else: diff --git a/src/graphql/type/schema.py b/src/graphql/type/schema.py index befefabd..f8ab756b 100644 --- a/src/graphql/type/schema.py +++ b/src/graphql/type/schema.py @@ -297,11 +297,12 @@ def __deepcopy__(self, memo_: dict) -> GraphQLSchema: for directive in directives: remap_directive(directive, type_map) return self.__class__( - self.query_type and cast(GraphQLObjectType, type_map[self.query_type.name]), + self.query_type + and cast("GraphQLObjectType", type_map[self.query_type.name]), self.mutation_type - and cast(GraphQLObjectType, type_map[self.mutation_type.name]), + and cast("GraphQLObjectType", type_map[self.mutation_type.name]), self.subscription_type - and cast(GraphQLObjectType, type_map[self.subscription_type.name]), + and cast("GraphQLObjectType", type_map[self.subscription_type.name]), types, directives, self.description, @@ -327,7 +328,7 @@ def get_possible_types( abstract_type.types if is_union_type(abstract_type) else self.get_implementations( - cast(GraphQLInterfaceType, abstract_type) + cast("GraphQLInterfaceType", abstract_type) ).objects ) @@ -354,7 +355,7 @@ def is_sub_type( add(type_.name) else: implementations = self.get_implementations( - cast(GraphQLInterfaceType, abstract_type) + cast("GraphQLInterfaceType", abstract_type) ) for type_ in implementations.objects: add(type_.name) @@ -410,7 +411,7 @@ class TypeSet(Dict[GraphQLNamedType, None]): @classmethod def with_initial_types(cls, types: Collection[GraphQLType]) -> TypeSet: - return cast(TypeSet, super().fromkeys(types)) + return cast("TypeSet", super().fromkeys(types)) def collect_referenced_types(self, type_: GraphQLType) -> None: """Recursive function supplementing the type starting from an initial type.""" @@ -455,7 +456,7 @@ def remapped_type(type_: GraphQLType, type_map: TypeMap) -> GraphQLType: """Get a copy of the given type that uses this type map.""" if is_wrapping_type(type_): return type_.__class__(remapped_type(type_.of_type, type_map)) - type_ = cast(GraphQLNamedType, type_) + type_ = cast("GraphQLNamedType", type_) return type_map.get(type_.name, type_) @@ -493,5 +494,5 @@ def remap_directive(directive: GraphQLDirective, type_map: TypeMap) -> None: args = directive.args for arg_name, arg in args.items(): arg = copy(arg) # noqa: PLW2901 - arg.type = cast(GraphQLInputType, remapped_type(arg.type, type_map)) + arg.type = cast("GraphQLInputType", remapped_type(arg.type, type_map)) args[arg_name] = arg diff --git a/src/graphql/type/validate.py b/src/graphql/type/validate.py index d5f8f8ce..9b22f44e 100644 --- a/src/graphql/type/validate.py +++ b/src/graphql/type/validate.py @@ -101,7 +101,7 @@ def report_error( ) -> None: if nodes and not isinstance(nodes, Node): nodes = [node for node in nodes if node] - nodes = cast(Optional[Collection[Node]], nodes) + nodes = cast("Optional[Collection[Node]]", nodes) self.errors.append(GraphQLError(message, nodes)) def validate_root_types(self) -> None: @@ -183,7 +183,7 @@ def validate_name(self, node: Any, name: str | None = None) -> None: try: if not name: name = node.name - name = cast(str, name) + name = cast("str", name) ast_node = node.ast_node except AttributeError: # pragma: no cover pass @@ -561,7 +561,7 @@ def __call__(self, input_obj: GraphQLInputObjectType) -> None: " within itself through a series of non-null fields:" f" '{'.'.join(field_names)}'.", cast( - Collection[Node], + "Collection[Node]", map(attrgetter("ast_node"), map(itemgetter(1), cycle_path)), ), ) diff --git a/src/graphql/utilities/build_ast_schema.py b/src/graphql/utilities/build_ast_schema.py index 8736e979..26ccfea2 100644 --- a/src/graphql/utilities/build_ast_schema.py +++ b/src/graphql/utilities/build_ast_schema.py @@ -68,11 +68,11 @@ def build_ast_schema( # validation with validate_schema() will produce more actionable results. type_name = type_.name if type_name == "Query": - schema_kwargs["query"] = cast(GraphQLObjectType, type_) + schema_kwargs["query"] = cast("GraphQLObjectType", type_) elif type_name == "Mutation": - schema_kwargs["mutation"] = cast(GraphQLObjectType, type_) + schema_kwargs["mutation"] = cast("GraphQLObjectType", type_) elif type_name == "Subscription": - schema_kwargs["subscription"] = cast(GraphQLObjectType, type_) + schema_kwargs["subscription"] = cast("GraphQLObjectType", type_) # If specified directives were not explicitly declared, add them. directives = schema_kwargs["directives"] diff --git a/src/graphql/utilities/build_client_schema.py b/src/graphql/utilities/build_client_schema.py index c4d05ccc..0e2cbd0e 100644 --- a/src/graphql/utilities/build_client_schema.py +++ b/src/graphql/utilities/build_client_schema.py @@ -3,7 +3,7 @@ from __future__ import annotations from itertools import chain -from typing import Callable, Collection, cast +from typing import TYPE_CHECKING, Callable, Collection, cast from ..language import DirectiveLocation, parse_value from ..pyutils import Undefined, inspect @@ -33,22 +33,25 @@ is_output_type, specified_scalar_types, ) -from .get_introspection_query import ( - IntrospectionDirective, - IntrospectionEnumType, - IntrospectionField, - IntrospectionInputObjectType, - IntrospectionInputValue, - IntrospectionInterfaceType, - IntrospectionObjectType, - IntrospectionQuery, - IntrospectionScalarType, - IntrospectionType, - IntrospectionTypeRef, - IntrospectionUnionType, -) from .value_from_ast import value_from_ast +if TYPE_CHECKING: + from .get_introspection_query import ( + IntrospectionDirective, + IntrospectionEnumType, + IntrospectionField, + IntrospectionInputObjectType, + IntrospectionInputValue, + IntrospectionInterfaceType, + IntrospectionObjectType, + IntrospectionQuery, + IntrospectionScalarType, + IntrospectionType, + IntrospectionTypeRef, + IntrospectionUnionType, + ) + + __all__ = ["build_client_schema"] @@ -90,17 +93,17 @@ def get_type(type_ref: IntrospectionTypeRef) -> GraphQLType: if not item_ref: msg = "Decorated type deeper than introspection query." raise TypeError(msg) - item_ref = cast(IntrospectionTypeRef, item_ref) + item_ref = cast("IntrospectionTypeRef", item_ref) return GraphQLList(get_type(item_ref)) if kind == TypeKind.NON_NULL.name: nullable_ref = type_ref.get("ofType") if not nullable_ref: msg = "Decorated type deeper than introspection query." raise TypeError(msg) - nullable_ref = cast(IntrospectionTypeRef, nullable_ref) + nullable_ref = cast("IntrospectionTypeRef", nullable_ref) nullable_type = get_type(nullable_ref) return GraphQLNonNull(assert_nullable_type(nullable_type)) - type_ref = cast(IntrospectionType, type_ref) + type_ref = cast("IntrospectionType", type_ref) return get_named_type(type_ref) def get_named_type(type_ref: IntrospectionType) -> GraphQLNamedType: @@ -145,7 +148,7 @@ def build_scalar_def( ) -> GraphQLScalarType: name = scalar_introspection["name"] try: - return cast(GraphQLScalarType, GraphQLScalarType.reserved_types[name]) + return cast("GraphQLScalarType", GraphQLScalarType.reserved_types[name]) except KeyError: return GraphQLScalarType( name=name, @@ -168,7 +171,7 @@ def build_implementations_list( f" {inspect(implementing_introspection)}." ) raise TypeError(msg) - interfaces = cast(Collection[IntrospectionInterfaceType], maybe_interfaces) + interfaces = cast("Collection[IntrospectionInterfaceType]", maybe_interfaces) return [get_interface_type(interface) for interface in interfaces] def build_object_def( @@ -176,7 +179,7 @@ def build_object_def( ) -> GraphQLObjectType: name = object_introspection["name"] try: - return cast(GraphQLObjectType, GraphQLObjectType.reserved_types[name]) + return cast("GraphQLObjectType", GraphQLObjectType.reserved_types[name]) except KeyError: return GraphQLObjectType( name=name, @@ -205,7 +208,9 @@ def build_union_def( f" {inspect(union_introspection)}." ) raise TypeError(msg) - possible_types = cast(Collection[IntrospectionObjectType], maybe_possible_types) + possible_types = cast( + "Collection[IntrospectionObjectType]", maybe_possible_types + ) return GraphQLUnionType( name=union_introspection["name"], description=union_introspection.get("description"), @@ -221,7 +226,7 @@ def build_enum_def(enum_introspection: IntrospectionEnumType) -> GraphQLEnumType raise TypeError(msg) name = enum_introspection["name"] try: - return cast(GraphQLEnumType, GraphQLEnumType.reserved_types[name]) + return cast("GraphQLEnumType", GraphQLEnumType.reserved_types[name]) except KeyError: return GraphQLEnumType( name=name, @@ -275,7 +280,7 @@ def build_field_def_map( } def build_field(field_introspection: IntrospectionField) -> GraphQLField: - type_introspection = cast(IntrospectionType, field_introspection["type"]) + type_introspection = cast("IntrospectionType", field_introspection["type"]) type_ = get_type(type_introspection) if not is_output_type(type_): msg = ( @@ -310,7 +315,7 @@ def build_argument_def_map( def build_argument( argument_introspection: IntrospectionInputValue, ) -> GraphQLArgument: - type_introspection = cast(IntrospectionType, argument_introspection["type"]) + type_introspection = cast("IntrospectionType", argument_introspection["type"]) type_ = get_type(type_introspection) if not is_input_type(type_): msg = ( @@ -345,7 +350,9 @@ def build_input_value_def_map( def build_input_value( input_value_introspection: IntrospectionInputValue, ) -> GraphQLInputField: - type_introspection = cast(IntrospectionType, input_value_introspection["type"]) + type_introspection = cast( + "IntrospectionType", input_value_introspection["type"] + ) type_ = get_type(type_introspection) if not is_input_type(type_): msg = ( @@ -388,7 +395,7 @@ def build_directive( is_repeatable=directive_introspection.get("isRepeatable", False), locations=list( cast( - Collection[DirectiveLocation], + "Collection[DirectiveLocation]", directive_introspection.get("locations"), ) ), diff --git a/src/graphql/utilities/coerce_input_value.py b/src/graphql/utilities/coerce_input_value.py index ab06caf1..b7452ec3 100644 --- a/src/graphql/utilities/coerce_input_value.py +++ b/src/graphql/utilities/coerce_input_value.py @@ -160,7 +160,7 @@ def coerce_input_value( # Scalars and Enums determine if an input value is valid via `parse_value()`, # which can throw to indicate failure. If it throws, maintain a reference # to the original error. - type_ = cast(GraphQLScalarType, type_) + type_ = cast("GraphQLScalarType", type_) try: parse_result = type_.parse_value(input_value) except GraphQLError as error: diff --git a/src/graphql/utilities/extend_schema.py b/src/graphql/utilities/extend_schema.py index 14adc661..aebdd2b3 100644 --- a/src/graphql/utilities/extend_schema.py +++ b/src/graphql/utilities/extend_schema.py @@ -405,7 +405,7 @@ def extend_object_type_interfaces( ) -> list[GraphQLInterfaceType]: """Extend a GraphQL object type interface.""" return [ - cast(GraphQLInterfaceType, self.replace_named_type(interface)) + cast("GraphQLInterfaceType", self.replace_named_type(interface)) for interface in kwargs["interfaces"] ] + self.build_interfaces(extensions) @@ -443,7 +443,7 @@ def extend_interface_type_interfaces( ) -> list[GraphQLInterfaceType]: """Extend GraphQL interface type interfaces.""" return [ - cast(GraphQLInterfaceType, self.replace_named_type(interface)) + cast("GraphQLInterfaceType", self.replace_named_type(interface)) for interface in kwargs["interfaces"] ] + self.build_interfaces(extensions) @@ -483,7 +483,7 @@ def extend_union_type_types( ) -> list[GraphQLObjectType]: """Extend types of a GraphQL union type.""" return [ - cast(GraphQLObjectType, self.replace_named_type(member_type)) + cast("GraphQLObjectType", self.replace_named_type(member_type)) for member_type in kwargs["types"] ] + self.build_union_types(extensions) @@ -551,9 +551,9 @@ def get_wrapped_type(self, node: TypeNode) -> GraphQLType: return GraphQLList(self.get_wrapped_type(node.type)) if isinstance(node, NonNullTypeNode): return GraphQLNonNull( - cast(GraphQLNullableType, self.get_wrapped_type(node.type)) + cast("GraphQLNullableType", self.get_wrapped_type(node.type)) ) - return self.get_named_type(cast(NamedTypeNode, node)) + return self.get_named_type(cast("NamedTypeNode", node)) def build_directive(self, node: DirectiveDefinitionNode) -> GraphQLDirective: """Build a GraphQL directive for a given directive definition node.""" @@ -585,7 +585,7 @@ def build_field_map( # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. field_map[field.name.value] = GraphQLField( - type_=cast(GraphQLOutputType, self.get_wrapped_type(field.type)), + type_=cast("GraphQLOutputType", self.get_wrapped_type(field.type)), description=field.description.value if field.description else None, args=self.build_argument_map(field.arguments), deprecation_reason=get_deprecation_reason(field), @@ -603,7 +603,7 @@ def build_argument_map( # Note: While this could make assertions to get the correctly typed # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. - type_ = cast(GraphQLInputType, self.get_wrapped_type(arg.type)) + type_ = cast("GraphQLInputType", self.get_wrapped_type(arg.type)) arg_map[arg.name.value] = GraphQLArgument( type_=type_, description=arg.description.value if arg.description else None, @@ -624,7 +624,7 @@ def build_input_field_map( # Note: While this could make assertions to get the correctly typed # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. - type_ = cast(GraphQLInputType, self.get_wrapped_type(field.type)) + type_ = cast("GraphQLInputType", self.get_wrapped_type(field.type)) input_field_map[field.name.value] = GraphQLInputField( type_=type_, description=field.description.value if field.description else None, @@ -668,7 +668,7 @@ def build_interfaces( # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. return [ - cast(GraphQLInterfaceType, self.get_named_type(type_)) + cast("GraphQLInterfaceType", self.get_named_type(type_)) for node in nodes for type_ in node.interfaces or [] ] @@ -682,7 +682,7 @@ def build_union_types( # value, that would throw immediately while type system validation # with validate_schema() will produce more actionable results. return [ - cast(GraphQLObjectType, self.get_named_type(type_)) + cast("GraphQLObjectType", self.get_named_type(type_)) for node in nodes for type_ in node.types or [] ] diff --git a/src/graphql/utilities/get_introspection_query.py b/src/graphql/utilities/get_introspection_query.py index d9cb160f..adf038ac 100644 --- a/src/graphql/utilities/get_introspection_query.py +++ b/src/graphql/utilities/get_introspection_query.py @@ -304,7 +304,7 @@ class IntrospectionSchema(MaybeWithDescription): # The root typed dictionary for schema introspections. # Note: We don't use class syntax here since the key looks like a private attribute. -IntrospectionQuery = TypedDict( # noqa: UP013 +IntrospectionQuery = TypedDict( "IntrospectionQuery", {"__schema": IntrospectionSchema}, ) diff --git a/src/graphql/utilities/introspection_from_schema.py b/src/graphql/utilities/introspection_from_schema.py index cc1e60ce..a0440a32 100644 --- a/src/graphql/utilities/introspection_from_schema.py +++ b/src/graphql/utilities/introspection_from_schema.py @@ -51,4 +51,4 @@ def introspection_from_schema( if not result.data: # pragma: no cover msg = "Introspection did not return a result" raise GraphQLError(msg) - return cast(IntrospectionQuery, result.data) + return cast("IntrospectionQuery", result.data) diff --git a/src/graphql/utilities/lexicographic_sort_schema.py b/src/graphql/utilities/lexicographic_sort_schema.py index cf0c4959..de675a94 100644 --- a/src/graphql/utilities/lexicographic_sort_schema.py +++ b/src/graphql/utilities/lexicographic_sort_schema.py @@ -51,7 +51,7 @@ def replace_type( return GraphQLList(replace_type(type_.of_type)) if is_non_null_type(type_): return GraphQLNonNull(replace_type(type_.of_type)) - return replace_named_type(cast(GraphQLNamedType, type_)) + return replace_named_type(cast("GraphQLNamedType", type_)) def replace_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: return type_map[type_.name] @@ -76,7 +76,7 @@ def sort_args(args_map: dict[str, GraphQLArgument]) -> dict[str, GraphQLArgument args[name] = GraphQLArgument( **merge_kwargs( arg.to_kwargs(), - type_=replace_type(cast(GraphQLNamedType, arg.type)), + type_=replace_type(cast("GraphQLNamedType", arg.type)), ) ) return args @@ -87,7 +87,7 @@ def sort_fields(fields_map: dict[str, GraphQLField]) -> dict[str, GraphQLField]: fields[name] = GraphQLField( **merge_kwargs( field.to_kwargs(), - type_=replace_type(cast(GraphQLNamedType, field.type)), + type_=replace_type(cast("GraphQLNamedType", field.type)), args=sort_args(field.args), ) ) @@ -99,7 +99,8 @@ def sort_input_fields( return { name: GraphQLInputField( cast( - GraphQLInputType, replace_type(cast(GraphQLNamedType, field.type)) + "GraphQLInputType", + replace_type(cast("GraphQLNamedType", field.type)), ), description=field.description, default_value=field.default_value, @@ -174,12 +175,14 @@ def sort_named_type(type_: GraphQLNamedType) -> GraphQLNamedType: sort_directive(directive) for directive in sorted(schema.directives, key=sort_by_name_key) ], - query=cast(Optional[GraphQLObjectType], replace_maybe_type(schema.query_type)), + query=cast( + "Optional[GraphQLObjectType]", replace_maybe_type(schema.query_type) + ), mutation=cast( - Optional[GraphQLObjectType], replace_maybe_type(schema.mutation_type) + "Optional[GraphQLObjectType]", replace_maybe_type(schema.mutation_type) ), subscription=cast( - Optional[GraphQLObjectType], replace_maybe_type(schema.subscription_type) + "Optional[GraphQLObjectType]", replace_maybe_type(schema.subscription_type) ), ast_node=schema.ast_node, ) diff --git a/src/graphql/utilities/strip_ignored_characters.py b/src/graphql/utilities/strip_ignored_characters.py index 6521d10b..9ffe1e26 100644 --- a/src/graphql/utilities/strip_ignored_characters.py +++ b/src/graphql/utilities/strip_ignored_characters.py @@ -68,7 +68,7 @@ def strip_ignored_characters(source: str | Source) -> str: """Type description""" type Foo{"""Field description""" bar:String} ''' if not is_source(source): - source = Source(cast(str, source)) + source = Source(cast("str", source)) body = source.body lexer = Lexer(source) diff --git a/src/graphql/utilities/type_from_ast.py b/src/graphql/utilities/type_from_ast.py index c082ebc1..10acd68f 100644 --- a/src/graphql/utilities/type_from_ast.py +++ b/src/graphql/utilities/type_from_ast.py @@ -58,7 +58,7 @@ def type_from_ast( return GraphQLList(inner_type) if inner_type else None if isinstance(type_node, NonNullTypeNode): inner_type = type_from_ast(schema, type_node.type) - inner_type = cast(GraphQLNullableType, inner_type) + inner_type = cast("GraphQLNullableType", inner_type) return GraphQLNonNull(inner_type) if inner_type else None if isinstance(type_node, NamedTypeNode): return schema.get_type(type_node.name.value) diff --git a/src/graphql/utilities/value_from_ast.py b/src/graphql/utilities/value_from_ast.py index dfefb723..399cdcb4 100644 --- a/src/graphql/utilities/value_from_ast.py +++ b/src/graphql/utilities/value_from_ast.py @@ -131,7 +131,7 @@ def value_from_ast( if is_leaf_type(type_): # Scalars fulfill parsing a literal value via `parse_literal()`. Invalid values # represent a failure to parse correctly, in which case Undefined is returned. - type_ = cast(GraphQLScalarType, type_) + type_ = cast("GraphQLScalarType", type_) # noinspection PyBroadException try: if variables: diff --git a/src/graphql/validation/rules/defer_stream_directive_on_root_field.py b/src/graphql/validation/rules/defer_stream_directive_on_root_field.py index 7a73a990..023fc2b2 100644 --- a/src/graphql/validation/rules/defer_stream_directive_on_root_field.py +++ b/src/graphql/validation/rules/defer_stream_directive_on_root_field.py @@ -29,7 +29,7 @@ def enter_directive( _path: Any, _ancestors: list[Node], ) -> None: - context = cast(ValidationContext, self.context) + context = cast("ValidationContext", self.context) parent_type = context.get_parent_type() if not parent_type: return diff --git a/src/graphql/validation/rules/executable_definitions.py b/src/graphql/validation/rules/executable_definitions.py index 1f702210..6ca01a9d 100644 --- a/src/graphql/validation/rules/executable_definitions.py +++ b/src/graphql/validation/rules/executable_definitions.py @@ -39,7 +39,7 @@ def enter_document(self, node: DocumentNode, *_args: Any) -> VisitorAction: ) else "'{}'".format( cast( - Union[DirectiveDefinitionNode, TypeDefinitionNode], + "Union[DirectiveDefinitionNode, TypeDefinitionNode]", definition, ).name.value ) diff --git a/src/graphql/validation/rules/known_argument_names.py b/src/graphql/validation/rules/known_argument_names.py index 46f9ef42..643300d0 100644 --- a/src/graphql/validation/rules/known_argument_names.py +++ b/src/graphql/validation/rules/known_argument_names.py @@ -35,7 +35,7 @@ def __init__(self, context: ValidationContext | SDLValidationContext) -> None: schema = context.schema defined_directives = schema.directives if schema else specified_directives - for directive in cast(List, defined_directives): + for directive in cast("List", defined_directives): directive_args[directive.name] = list(directive.args) ast_definitions = context.document.definitions diff --git a/src/graphql/validation/rules/known_directives.py b/src/graphql/validation/rules/known_directives.py index 8a0c76c4..da31730b 100644 --- a/src/graphql/validation/rules/known_directives.py +++ b/src/graphql/validation/rules/known_directives.py @@ -35,7 +35,7 @@ def __init__(self, context: ValidationContext | SDLValidationContext) -> None: schema = context.schema defined_directives = ( - schema.directives if schema else cast(List, specified_directives) + schema.directives if schema else cast("List", specified_directives) ) for directive in defined_directives: locations_map[directive.name] = directive.locations @@ -111,7 +111,7 @@ def get_directive_location_for_ast_path( raise TypeError(msg) kind = applied_to.kind if kind == "operation_definition": - applied_to = cast(OperationDefinitionNode, applied_to) + applied_to = cast("OperationDefinitionNode", applied_to) return _operation_location[applied_to.operation.value] if kind == "input_value_definition": parent_node = ancestors[-3] diff --git a/src/graphql/validation/rules/known_type_names.py b/src/graphql/validation/rules/known_type_names.py index 118d7c0e..5dbac00b 100644 --- a/src/graphql/validation/rules/known_type_names.py +++ b/src/graphql/validation/rules/known_type_names.py @@ -94,7 +94,7 @@ def is_sdl_node( value is not None and not isinstance(value, list) and ( - is_type_system_definition_node(cast(Node, value)) - or is_type_system_extension_node(cast(Node, value)) + is_type_system_definition_node(cast("Node", value)) + or is_type_system_extension_node(cast("Node", value)) ) ) diff --git a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py index 58a7a3b7..97939e56 100644 --- a/src/graphql/validation/rules/overlapping_fields_can_be_merged.py +++ b/src/graphql/validation/rules/overlapping_fields_can_be_merged.py @@ -538,8 +538,8 @@ def find_conflict( ) # The return type for each field. - type1 = cast(Optional[GraphQLOutputType], def1 and def1.type) - type2 = cast(Optional[GraphQLOutputType], def2 and def2.type) + type1 = cast("Optional[GraphQLOutputType]", def1 and def1.type) + type2 = cast("Optional[GraphQLOutputType]", def2 and def2.type) if not are_mutually_exclusive: # Two aliases must refer to the same field. @@ -739,7 +739,7 @@ def collect_fields_and_fragment_names( if not node_and_defs.get(response_name): node_and_defs[response_name] = [] node_and_defs[response_name].append( - cast(NodeAndDef, (parent_type, selection, field_def)) + cast("NodeAndDef", (parent_type, selection, field_def)) ) elif isinstance(selection, FragmentSpreadNode): fragment_names[selection.name.value] = True diff --git a/src/graphql/validation/rules/provided_required_arguments.py b/src/graphql/validation/rules/provided_required_arguments.py index f94515fe..9c98065e 100644 --- a/src/graphql/validation/rules/provided_required_arguments.py +++ b/src/graphql/validation/rules/provided_required_arguments.py @@ -41,7 +41,7 @@ def __init__(self, context: ValidationContext | SDLValidationContext) -> None: schema = context.schema defined_directives = schema.directives if schema else specified_directives - for directive in cast(List, defined_directives): + for directive in cast("List", defined_directives): required_args_map[directive.name] = { name: arg for name, arg in directive.args.items() @@ -71,7 +71,7 @@ def leave_directive(self, directive_node: DirectiveNode, *_args: Any) -> None: arg_type_str = ( str(arg_type) if is_type(arg_type) - else print_ast(cast(TypeNode, arg_type)) + else print_ast(cast("TypeNode", arg_type)) ) self.report_error( GraphQLError( diff --git a/src/graphql/validation/rules/stream_directive_on_list_field.py b/src/graphql/validation/rules/stream_directive_on_list_field.py index 141984c2..03015cd0 100644 --- a/src/graphql/validation/rules/stream_directive_on_list_field.py +++ b/src/graphql/validation/rules/stream_directive_on_list_field.py @@ -28,7 +28,7 @@ def enter_directive( _path: Any, _ancestors: list[Node], ) -> None: - context = cast(ValidationContext, self.context) + context = cast("ValidationContext", self.context) field_def = context.get_field_def() parent_type = context.get_parent_type() if ( diff --git a/src/graphql/validation/rules/unique_directives_per_location.py b/src/graphql/validation/rules/unique_directives_per_location.py index de9a05d0..daab2935 100644 --- a/src/graphql/validation/rules/unique_directives_per_location.py +++ b/src/graphql/validation/rules/unique_directives_per_location.py @@ -38,7 +38,7 @@ def __init__(self, context: ValidationContext | SDLValidationContext) -> None: schema = context.schema defined_directives = ( - schema.directives if schema else cast(List, specified_directives) + schema.directives if schema else cast("List", specified_directives) ) for directive in defined_directives: unique_directive_map[directive.name] = not directive.is_repeatable @@ -60,7 +60,7 @@ def enter(self, node: Node, *_args: Any) -> None: directives = getattr(node, "directives", None) if not directives: return - directives = cast(List[DirectiveNode], directives) + directives = cast("List[DirectiveNode]", directives) if isinstance(node, (SchemaDefinitionNode, SchemaExtensionNode)): seen_directives = self.schema_directives diff --git a/src/graphql/validation/rules/values_of_correct_type.py b/src/graphql/validation/rules/values_of_correct_type.py index 7df72c6e..ea4c4a3c 100644 --- a/src/graphql/validation/rules/values_of_correct_type.py +++ b/src/graphql/validation/rules/values_of_correct_type.py @@ -157,7 +157,7 @@ def is_valid_value_node(self, node: ValueNode) -> None: # Scalars determine if a literal value is valid via `parse_literal()` which may # throw or return an invalid value to indicate failure. - type_ = cast(GraphQLScalarType, type_) + type_ = cast("GraphQLScalarType", type_) try: parse_result = type_.parse_literal(node) if parse_result is Undefined: @@ -218,7 +218,7 @@ def validate_one_of_input_object( is_variable = value and isinstance(value, VariableNode) if is_variable: - variable_name = cast(VariableNode, value).name.value + variable_name = cast("VariableNode", value).name.value definition = variable_definitions[variable_name] is_nullable_variable = not isinstance(definition.type, NonNullTypeNode) diff --git a/src/graphql/validation/validation_context.py b/src/graphql/validation/validation_context.py index dec21042..055b4231 100644 --- a/src/graphql/validation/validation_context.py +++ b/src/graphql/validation/validation_context.py @@ -143,7 +143,7 @@ def get_fragment_spreads(self, node: SelectionSetNode) -> list[FragmentSpreadNod append_spread(selection) else: set_to_visit = cast( - NodeWithSelectionSet, selection + "NodeWithSelectionSet", selection ).selection_set if set_to_visit: append_set(set_to_visit) diff --git a/tests/error/test_graphql_error.py b/tests/error/test_graphql_error.py index fbc8602e..03b85dcf 100644 --- a/tests/error/test_graphql_error.py +++ b/tests/error/test_graphql_error.py @@ -25,7 +25,7 @@ ast = parse(source) operation_node = ast.definitions[0] -operation_node = cast(OperationDefinitionNode, operation_node) +operation_node = cast("OperationDefinitionNode", operation_node) assert operation_node assert operation_node.kind == "operation_definition" field_node = operation_node.selection_set.selections[0] @@ -299,7 +299,7 @@ def prints_an_error_with_nodes_from_different_sources(): ) ) op_a = doc_a.definitions[0] - op_a = cast(ObjectTypeDefinitionNode, op_a) + op_a = cast("ObjectTypeDefinitionNode", op_a) assert op_a assert op_a.kind == "object_type_definition" assert op_a.fields @@ -317,7 +317,7 @@ def prints_an_error_with_nodes_from_different_sources(): ) ) op_b = doc_b.definitions[0] - op_b = cast(ObjectTypeDefinitionNode, op_b) + op_b = cast("ObjectTypeDefinitionNode", op_b) assert op_b assert op_b.kind == "object_type_definition" assert op_b.fields diff --git a/tests/error/test_located_error.py b/tests/error/test_located_error.py index 593b24ad..f22f6fd4 100644 --- a/tests/error/test_located_error.py +++ b/tests/error/test_located_error.py @@ -11,7 +11,7 @@ def throws_without_an_original_error(): def passes_graphql_error_through(): path = ["path", 3, "to", "field"] - e = GraphQLError("msg", None, None, None, cast(Any, path)) + e = GraphQLError("msg", None, None, None, cast("Any", path)) assert located_error(e, [], []) == e def passes_graphql_error_ish_through(): @@ -21,7 +21,7 @@ def passes_graphql_error_ish_through(): def does_not_pass_through_elasticsearch_like_errors(): e = Exception("I am from elasticsearch") - cast(Any, e).path = "/something/feed/_search" + cast("Any", e).path = "/something/feed/_search" assert located_error(e, [], []) is not e def handles_lazy_error_messages(): diff --git a/tests/execution/test_defer.py b/tests/execution/test_defer.py index 62dc88bb..51133100 100644 --- a/tests/execution/test_defer.py +++ b/tests/execution/test_defer.py @@ -417,7 +417,7 @@ def can_format_and_print_subsequent_incremental_execution_result(): pending = [PendingResult("foo", ["bar"])] incremental = [ - cast(IncrementalResult, IncrementalDeferResult({"foo": 1}, "bar")) + cast("IncrementalResult", IncrementalDeferResult({"foo": 1}, "bar")) ] completed = [CompletedResult("foo")] result = SubsequentIncrementalExecutionResult( @@ -442,7 +442,7 @@ def can_format_and_print_subsequent_incremental_execution_result(): def can_compare_subsequent_incremental_execution_result(): pending = [PendingResult("foo", ["bar"])] incremental = [ - cast(IncrementalResult, IncrementalDeferResult({"foo": 1}, "bar")) + cast("IncrementalResult", IncrementalDeferResult({"foo": 1}, "bar")) ] completed = [CompletedResult("foo")] args: dict[str, Any] = { diff --git a/tests/execution/test_executor.py b/tests/execution/test_executor.py index 792066f1..a11c6b5e 100644 --- a/tests/execution/test_executor.py +++ b/tests/execution/test_executor.py @@ -245,16 +245,16 @@ def resolve(_obj, info): execute_sync(schema, document, root_value, variable_values=variable_values) assert len(resolved_infos) == 1 - operation = cast(OperationDefinitionNode, document.definitions[0]) + operation = cast("OperationDefinitionNode", document.definitions[0]) assert operation assert operation.kind == "operation_definition" - field = cast(FieldNode, operation.selection_set.selections[0]) + field = cast("FieldNode", operation.selection_set.selections[0]) assert resolved_infos[0] == GraphQLResolveInfo( field_name="test", field_nodes=[field], return_type=GraphQLString, - parent_type=cast(GraphQLObjectType, schema.query_type), + parent_type=cast("GraphQLObjectType", schema.query_type), path=ResponsePath(None, "result", "Test"), schema=schema, fragments={}, diff --git a/tests/execution/test_middleware.py b/tests/execution/test_middleware.py index 291f218c..50159995 100644 --- a/tests/execution/test_middleware.py +++ b/tests/execution/test_middleware.py @@ -323,7 +323,7 @@ def bad_middleware_object(): GraphQLSchema(test_type), doc, None, - middleware=cast(Middleware, {"bad": "value"}), + middleware=cast("Middleware", {"bad": "value"}), ) assert str(exc_info.value) == ( diff --git a/tests/execution/test_nonnull.py b/tests/execution/test_nonnull.py index 99810ed9..6c98eb67 100644 --- a/tests/execution/test_nonnull.py +++ b/tests/execution/test_nonnull.py @@ -111,7 +111,7 @@ def patch(data: str) -> str: async def execute_sync_and_async(query: str, root_value: Any) -> ExecutionResult: sync_result = execute_sync(schema, parse(query), root_value) async_result = await cast( - Awaitable[ExecutionResult], execute(schema, parse(patch(query)), root_value) + "Awaitable[ExecutionResult]", execute(schema, parse(patch(query)), root_value) ) assert repr(async_result) == patch(repr(sync_result)) @@ -218,14 +218,14 @@ def describe_nulls_a_complex_tree_of_nullable_fields_each(): @pytest.mark.asyncio async def returns_null(): result = await cast( - Awaitable[ExecutionResult], execute_query(query, NullingData()) + "Awaitable[ExecutionResult]", execute_query(query, NullingData()) ) assert result == (data, None) @pytest.mark.asyncio async def throws(): result = await cast( - Awaitable[ExecutionResult], execute_query(query, ThrowingData()) + "Awaitable[ExecutionResult]", execute_query(query, ThrowingData()) ) assert result == ( data, @@ -352,7 +352,7 @@ def describe_nulls_first_nullable_after_long_chain_of_non_null_fields(): @pytest.mark.asyncio async def returns_null(): result = await cast( - Awaitable[ExecutionResult], execute_query(query, NullingData()) + "Awaitable[ExecutionResult]", execute_query(query, NullingData()) ) assert result == ( data, @@ -415,7 +415,7 @@ async def returns_null(): @pytest.mark.asyncio async def throws(): result = await cast( - Awaitable[ExecutionResult], execute_query(query, ThrowingData()) + "Awaitable[ExecutionResult]", execute_query(query, ThrowingData()) ) assert result == ( data, diff --git a/tests/language/test_block_string.py b/tests/language/test_block_string.py index 74f99734..d135dde9 100644 --- a/tests/language/test_block_string.py +++ b/tests/language/test_block_string.py @@ -148,8 +148,8 @@ def __init__(self, string: str) -> None: def __str__(self) -> str: return self.string - _assert_printable(cast(str, LazyString(""))) - _assert_non_printable(cast(str, LazyString(" "))) + _assert_printable(cast("str", LazyString(""))) + _assert_non_printable(cast("str", LazyString(" "))) def describe_print_block_string(): @@ -212,4 +212,4 @@ class LazyString: def __str__(self) -> str: return "lazy" - _assert_block_string(cast(str, LazyString()), '"""lazy"""') + _assert_block_string(cast("str", LazyString()), '"""lazy"""') diff --git a/tests/language/test_parser.py b/tests/language/test_parser.py index e6d33064..0121db23 100644 --- a/tests/language/test_parser.py +++ b/tests/language/test_parser.py @@ -181,11 +181,11 @@ def parses_multi_byte_characters(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - selection_set = cast(OperationDefinitionNode, definitions[0]).selection_set + selection_set = cast("OperationDefinitionNode", definitions[0]).selection_set selections = selection_set.selections assert isinstance(selections, tuple) assert len(selections) == 1 - arguments = cast(FieldNode, selections[0]).arguments + arguments = cast("FieldNode", selections[0]).arguments assert isinstance(arguments, tuple) assert len(arguments) == 1 value = arguments[0].value @@ -263,7 +263,7 @@ def parses_required_field(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections @@ -328,7 +328,7 @@ def parses_field_with_required_list_elements(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections @@ -352,7 +352,7 @@ def parses_field_with_optional_list_elements(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections @@ -376,7 +376,7 @@ def parses_field_with_required_list(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections @@ -400,7 +400,7 @@ def parses_field_with_optional_list(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections @@ -424,7 +424,7 @@ def parses_field_with_mixed_list_elements(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) selection_set: SelectionSetNode | None = definition.selection_set assert isinstance(selection_set, SelectionSetNode) selections = selection_set.selections @@ -483,7 +483,7 @@ def creates_ast(): definitions = doc.definitions assert isinstance(definitions, tuple) assert len(definitions) == 1 - definition = cast(OperationDefinitionNode, definitions[0]) + definition = cast("OperationDefinitionNode", definitions[0]) assert isinstance(definition, DefinitionNode) assert definition.loc == (0, 40) assert definition.operation == OperationType.QUERY diff --git a/tests/language/test_source.py b/tests/language/test_source.py index 24008605..b973410d 100644 --- a/tests/language/test_source.py +++ b/tests/language/test_source.py @@ -81,7 +81,7 @@ def can_create_custom_attribute(): def rejects_invalid_location_offset(): def create_source(location_offset: tuple[int, int]) -> Source: - return Source("", "", cast(SourceLocation, location_offset)) + return Source("", "", cast("SourceLocation", location_offset)) with pytest.raises(TypeError): create_source(None) # type: ignore diff --git a/tests/language/test_visitor.py b/tests/language/test_visitor.py index 00283fe1..f3fdb370 100644 --- a/tests/language/test_visitor.py +++ b/tests/language/test_visitor.py @@ -581,7 +581,9 @@ class CustomFieldNode(SelectionNode): name: NameNode selection_set: SelectionSetNode | None - custom_selection_set = cast(FieldNode, custom_ast.definitions[0]).selection_set + custom_selection_set = cast( + "FieldNode", custom_ast.definitions[0] + ).selection_set assert custom_selection_set is not None custom_selection_set.selections = ( *custom_selection_set.selections, diff --git a/tests/pyutils/test_description.py b/tests/pyutils/test_description.py index 3148520b..781ab14e 100644 --- a/tests/pyutils/test_description.py +++ b/tests/pyutils/test_description.py @@ -34,7 +34,7 @@ def __str__(self) -> str: return str(self.text) -lazy_string = cast(str, LazyString("Why am I so lazy?")) +lazy_string = cast("str", LazyString("Why am I so lazy?")) @contextmanager @@ -186,8 +186,8 @@ def __str__(self) -> str: with registered(Lazy): field = GraphQLField( GraphQLString, - description=cast(str, description), - deprecation_reason=cast(str, deprecation_reason), + description=cast("str", description), + deprecation_reason=cast("str", deprecation_reason), ) schema = GraphQLSchema(GraphQLObjectType("Query", {"lazyField": field})) @@ -222,8 +222,8 @@ def __str__(self) -> str: with registered(Lazy): field = GraphQLField( GraphQLString, - description=cast(str, description), - deprecation_reason=cast(str, deprecation_reason), + description=cast("str", description), + deprecation_reason=cast("str", deprecation_reason), ) schema = GraphQLSchema(GraphQLObjectType("Query", {"lazyField": field})) diff --git a/tests/test_user_registry.py b/tests/test_user_registry.py index 147e01bd..0cb2b5b9 100644 --- a/tests/test_user_registry.py +++ b/tests/test_user_registry.py @@ -262,7 +262,7 @@ def receive(msg): return receive # noinspection PyProtectedMember - pubsub = context["registry"]._pubsub # noqa: SLF001s + pubsub = context["registry"]._pubsub # noqa: SLF001 pubsub[None].subscribers.add(subscriber("User")) pubsub["0"].subscribers.add(subscriber("User 0")) diff --git a/tests/utilities/test_build_client_schema.py b/tests/utilities/test_build_client_schema.py index 8a4cecba..1455f473 100644 --- a/tests/utilities/test_build_client_schema.py +++ b/tests/utilities/test_build_client_schema.py @@ -1,4 +1,4 @@ -from typing import cast +from typing import TYPE_CHECKING, cast import pytest @@ -23,14 +23,16 @@ introspection_from_schema, print_schema, ) -from graphql.utilities.get_introspection_query import ( - IntrospectionEnumType, - IntrospectionInputObjectType, - IntrospectionInterfaceType, - IntrospectionObjectType, - IntrospectionType, - IntrospectionUnionType, -) + +if TYPE_CHECKING: + from graphql.utilities.get_introspection_query import ( + IntrospectionEnumType, + IntrospectionInputObjectType, + IntrospectionInterfaceType, + IntrospectionObjectType, + IntrospectionType, + IntrospectionUnionType, + ) from ..utils import dedent @@ -715,7 +717,9 @@ def throws_when_missing_definition_for_one_of_the_standard_scalars(): def throws_when_type_reference_is_missing_name(): introspection = introspection_from_schema(dummy_schema) - query_type = cast(IntrospectionType, introspection["__schema"]["queryType"]) + query_type = cast( + "IntrospectionType", introspection["__schema"]["queryType"] + ) assert query_type["name"] == "Query" del query_type["name"] # type: ignore @@ -745,7 +749,7 @@ def throws_when_missing_kind(): def throws_when_missing_interfaces(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -767,7 +771,7 @@ def throws_when_missing_interfaces(): def legacy_support_for_interfaces_with_null_as_interfaces_field(): introspection = introspection_from_schema(dummy_schema) some_interface_introspection = cast( - IntrospectionInterfaceType, + "IntrospectionInterfaceType", next( type_ for type_ in introspection["__schema"]["types"] @@ -784,7 +788,7 @@ def legacy_support_for_interfaces_with_null_as_interfaces_field(): def throws_when_missing_fields(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -806,7 +810,7 @@ def throws_when_missing_fields(): def throws_when_missing_field_args(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -828,7 +832,7 @@ def throws_when_missing_field_args(): def throws_when_output_type_is_used_as_an_arg_type(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -852,7 +856,7 @@ def throws_when_output_type_is_used_as_an_arg_type(): def throws_when_output_type_is_used_as_an_input_value_type(): introspection = introspection_from_schema(dummy_schema) input_object_type_introspection = cast( - IntrospectionInputObjectType, + "IntrospectionInputObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -876,7 +880,7 @@ def throws_when_output_type_is_used_as_an_input_value_type(): def throws_when_input_type_is_used_as_a_field_type(): introspection = introspection_from_schema(dummy_schema) query_type_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -900,7 +904,7 @@ def throws_when_input_type_is_used_as_a_field_type(): def throws_when_missing_possible_types(): introspection = introspection_from_schema(dummy_schema) some_union_introspection = cast( - IntrospectionUnionType, + "IntrospectionUnionType", next( type_ for type_ in introspection["__schema"]["types"] @@ -921,7 +925,7 @@ def throws_when_missing_possible_types(): def throws_when_missing_enum_values(): introspection = introspection_from_schema(dummy_schema) some_enum_introspection = cast( - IntrospectionEnumType, + "IntrospectionEnumType", next( type_ for type_ in introspection["__schema"]["types"] @@ -942,7 +946,7 @@ def throws_when_missing_enum_values(): def throws_when_missing_input_fields(): introspection = introspection_from_schema(dummy_schema) some_input_object_introspection = cast( - IntrospectionInputObjectType, + "IntrospectionInputObjectType", next( type_ for type_ in introspection["__schema"]["types"] @@ -1055,7 +1059,7 @@ def recursive_interfaces(): schema = build_schema(sdl, assume_valid=True) introspection = introspection_from_schema(schema) foo_introspection = cast( - IntrospectionObjectType, + "IntrospectionObjectType", next( type_ for type_ in introspection["__schema"]["types"] diff --git a/tests/utilities/test_print_schema.py b/tests/utilities/test_print_schema.py index b12d30dc..ab997610 100644 --- a/tests/utilities/test_print_schema.py +++ b/tests/utilities/test_print_schema.py @@ -555,7 +555,7 @@ def prints_enum(): def prints_empty_types(): schema = GraphQLSchema( types=[ - GraphQLEnumType("SomeEnum", cast(Dict[str, Any], {})), + GraphQLEnumType("SomeEnum", cast("Dict[str, Any]", {})), GraphQLInputObjectType("SomeInputObject", {}), GraphQLInterfaceType("SomeInterface", {}), GraphQLObjectType("SomeObject", {}), diff --git a/tests/utils/assert_equal_awaitables_or_values.py b/tests/utils/assert_equal_awaitables_or_values.py index 8ed8d175..964db1a8 100644 --- a/tests/utils/assert_equal_awaitables_or_values.py +++ b/tests/utils/assert_equal_awaitables_or_values.py @@ -15,7 +15,7 @@ def assert_equal_awaitables_or_values(*items: T) -> T: """Check whether the items are the same and either all awaitables or all values.""" if all(is_awaitable(item) for item in items): - awaitable_items = cast(Tuple[Awaitable], items) + awaitable_items = cast("Tuple[Awaitable]", items) async def assert_matching_awaitables(): return assert_matching_values(*(await asyncio.gather(*awaitable_items))) From 0107e309cde181035cc806a5abd0358c37924251 Mon Sep 17 00:00:00 2001 From: Christoph Zwerschke Date: Sat, 3 May 2025 13:47:30 +0200 Subject: [PATCH 50/50] Fix Sphinx issues --- docs/conf.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/docs/conf.py b/docs/conf.py index e78359fe..f70b6d03 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -142,7 +142,7 @@ """ GNT GT KT T VT TContext -enum.Enum +Enum traceback types.TracebackType TypeMap @@ -157,6 +157,7 @@ FormattedSourceLocation GraphQLAbstractType GraphQLCompositeType +GraphQLEnumValueMap GraphQLErrorExtensions GraphQLFieldResolver GraphQLInputType @@ -175,6 +176,7 @@ asyncio.events.AbstractEventLoop collections.abc.MutableMapping collections.abc.MutableSet +enum.Enum graphql.execution.collect_fields.DeferUsage graphql.execution.collect_fields.CollectFieldsResult graphql.execution.collect_fields.FieldGroup