diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index 9bd43ec60..3738bf68f 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -2,7 +2,7 @@ There's lots to do, and we're working hard, so any help is welcome! -- :speech_balloon: Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)! +- :speech_balloon: Join us on [Discord](https://discord.gg/DEVteTupPb)! What can you do? @@ -15,9 +15,9 @@ What can you do? - File a bug (please check its not a duplicate) - Propose an enhancement - :white_check_mark: Create a PR: - - [Creating a failing test-case](https://github.com/danielgtaylor/python-betterproto/blob/master/betterproto/tests/README.md) to make bug-fixing easier + - [Creating a failing test-case](https://github.com/danielgtaylor/python-betterproto/blob/master/tests/README.md) to make bug-fixing easier - Fix any of the open issues - [Good first issues](https://github.com/danielgtaylor/python-betterproto/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22) - [Issues with tests](https://github.com/danielgtaylor/python-betterproto/issues?q=is%3Aissue+is%3Aopen+label%3A%22has+test%22) - New bugfix or idea - - If you'd like to discuss your idea first, join us on Slack! + - If you'd like to discuss your idea first, join us on Discord! diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml new file mode 100644 index 000000000..618d63150 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -0,0 +1,63 @@ +name: Bug Report +description: Report broken or incorrect behaviour +labels: ["bug", "investigation needed"] + +body: + - type: markdown + attributes: + value: > + Thanks for taking the time to fill out a bug report! + + If you're not sure it's a bug and you just have a question, the [community Discord channel](https://discord.gg/DEVteTupPb) is a better place for general questions than a GitHub issue. + + - type: input + attributes: + label: Summary + description: A simple summary of your bug report + validations: + required: true + + - type: textarea + attributes: + label: Reproduction Steps + description: > + What you did to make it happen. + Ideally there should be a short code snippet in this section to help reproduce the bug. + validations: + required: true + + - type: textarea + attributes: + label: Expected Results + description: > + What did you expect to happen? + validations: + required: true + + - type: textarea + attributes: + label: Actual Results + description: > + What actually happened? + validations: + required: true + + - type: textarea + attributes: + label: System Information + description: > + Paste the result of `protoc --version; python --version; pip show betterproto` below. + validations: + required: true + + - type: checkboxes + attributes: + label: Checklist + options: + - label: I have searched the issues for duplicates. + required: true + - label: I have shown the entire traceback, if possible. + required: true + - label: I have verified this issue occurs on the latest prelease of betterproto which can be installed using `pip install -U --pre betterproto`, if possible. + required: true + diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 000000000..46bf156b8 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,6 @@ +name: +description: +contact_links: + - name: For questions about the library + about: Support questions are better answered in our Discord group. + url: https://discord.gg/DEVteTupPb diff --git a/.github/ISSUE_TEMPLATE/feature_request.yml b/.github/ISSUE_TEMPLATE/feature_request.yml new file mode 100644 index 000000000..697b5041e --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature_request.yml @@ -0,0 +1,49 @@ +name: Feature Request +description: Suggest a feature for this library +labels: ["enhancement"] + +body: + - type: input + attributes: + label: Summary + description: > + What problem is your feature trying to solve? What would become easier or possible if feature was implemented? + validations: + required: true + + - type: dropdown + attributes: + multiple: false + label: What is the feature request for? + options: + - The core library + - RPC handling + - The documentation + validations: + required: true + + - type: textarea + attributes: + label: The Problem + description: > + What problem is your feature trying to solve? + What would become easier or possible if feature was implemented? + validations: + required: true + + - type: textarea + attributes: + label: The Ideal Solution + description: > + What is your ideal solution to the problem? + What would you like this feature to do? + validations: + required: true + + - type: textarea + attributes: + label: The Current Solution + description: > + What is the current solution to the problem, if any? + validations: + required: false diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 000000000..c1965a210 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,16 @@ +## Summary + + + +## Checklist + + + +- [ ] If code changes were made then they have been tested. + - [ ] I have updated the documentation to reflect the changes. +- [ ] This PR fixes an issue. +- [ ] This PR adds something new (e.g. new method or parameters). + - [ ] This change has an associated test. +- [ ] This PR is a breaking change (e.g. methods or parameters removed/renamed) +- [ ] This PR is **not** a code change (e.g. documentation, README, ...) + diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 66e7abab1..609465ee8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -13,24 +13,22 @@ jobs: name: ${{ matrix.os }} / ${{ matrix.python-version }} runs-on: ${{ matrix.os }}-latest strategy: + fail-fast: false matrix: os: [Ubuntu, MacOS, Windows] - python-version: [3.6, 3.7, 3.8, 3.9] - exclude: - - os: Windows - python-version: 3.6 + python-version: ['3.9', '3.10', '3.11', '3.12', '3.13'] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Get full Python version id: full-python-version shell: bash - run: echo ::set-output name=version::$(python -c "import sys; print('-'.join(str(v) for v in sys.version_info))") + run: echo "version=$(python -c "import sys; print('-'.join(str(v) for v in sys.version_info))")" >> "$GITHUB_OUTPUT" - name: Install poetry shell: bash @@ -43,7 +41,7 @@ jobs: run: poetry config virtualenvs.in-project true - name: Set up cache - uses: actions/cache@v2 + uses: actions/cache@v4 id: cache with: path: .venv @@ -56,9 +54,7 @@ jobs: - name: Install dependencies shell: bash - run: | - poetry run python -m pip install pip -U - poetry install + run: poetry install -E compiler - name: Generate code from proto files shell: bash @@ -66,4 +62,4 @@ jobs: - name: Execute test suite shell: bash - run: poetry run pytest tests/ + run: poetry run python -m pytest tests/ diff --git a/.github/workflows/code-quality.yml b/.github/workflows/code-quality.yml index a3d483f40..c853f7936 100644 --- a/.github/workflows/code-quality.yml +++ b/.github/workflows/code-quality.yml @@ -13,14 +13,6 @@ jobs: name: Check code/doc formatting runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Run Black - uses: lgeiger/black-action@master - with: - args: --check src/ tests/ benchmarks/ - - - name: Install rST dependcies - run: python -m pip install doc8 - - name: Lint documentation for errors - run: python -m doc8 docs --max-line-length 88 --ignore-path-errors "docs/migrating.rst;D001" - # it has a table which is longer than 88 characters long + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + - uses: pre-commit/action@v3.0.1 diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml new file mode 100644 index 000000000..81803b673 --- /dev/null +++ b/.github/workflows/codeql-analysis.yml @@ -0,0 +1,46 @@ +name: "CodeQL" + +on: + push: + branches: [ "master" ] + pull_request: + branches: + - '**' + schedule: + - cron: '19 1 * * 6' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'python' ] + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + + # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs + # queries: security-extended,security-and-quality + + - name: Autobuild + uses: github/codeql-action/autobuild@v3 + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 755f34f6c..30dc61984 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -15,11 +15,11 @@ jobs: name: Distribution runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - name: Set up Python 3.8 - uses: actions/setup-python@v2 + - uses: actions/checkout@v4 + - name: Set up Python 3.9 + uses: actions/setup-python@v5 with: - python-version: 3.8 + python-version: 3.9 - name: Install poetry run: python -m pip install poetry - name: Build package diff --git a/.gitignore b/.gitignore index 67d07686a..de01ba67c 100644 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,5 @@ output .venv .asv venv +.devcontainer +.ruff_cache \ No newline at end of file diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 000000000..84082d00d --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,25 @@ +ci: + autofix_prs: false + +repos: + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.9.1 + hooks: + - id: ruff-format + args: ["--diff", "src", "tests"] + - id: ruff + args: ["--select", "I", "src", "tests"] + + - repo: https://github.com/PyCQA/doc8 + rev: 0.10.1 + hooks: + - id: doc8 + additional_dependencies: + - toml + + - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks + rev: v2.14.0 + hooks: + - id: pretty-format-java + args: [--autofix, --aosp] + files: ^.*\.java$ diff --git a/CHANGELOG.md b/CHANGELOG.md index 074b3e915..bda844d63 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,112 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 - Versions suffixed with `b*` are in `beta` and can be installed with `pip install --pre betterproto`. +## [2.0.0b7] - 2024-08-11 + +- **Breaking**: Support `Pydantic` v2 and dropping support for v1 [#588](https://github.com/danielgtaylor/python-betterproto/pull/588) +- **Breaking**: The attempting to access an unset `oneof` now raises an `AttributeError` + field. To see how to access `oneof` fields now, refer to [#558](https://github.com/danielgtaylor/python-betterproto/pull/558) + and [README.md](https://github.com/danielgtaylor/python-betterproto#one-of-support). +- **Breaking**: A custom `Enum` has been implemented to match the behaviour of being an open set. Any checks for `isinstance(enum_member, enum.Enum)` and `issubclass(EnumSubclass, enum.Enum)` will now return `False`. This change also has the side effect of + preventing any passthrough of `Enum` members (i.e. `Foo.RED.GREEN` doesn't work any more). See [#293](https://github.com/danielgtaylor/python-betterproto/pull/293) for more info, this fixed many bugs related to `Enum` handling. + +- Add support for `pickle` methods [#535](https://github.com/danielgtaylor/python-betterproto/pull/535) +- Add support for `Struct` and `Value` types [#551](https://github.com/danielgtaylor/python-betterproto/pull/551) +- Add support for [`Rich` package](https://rich.readthedocs.io/en/latest/index.html) for pretty printing [#508](https://github.com/danielgtaylor/python-betterproto/pull/508) +- Improve support for streaming messages [#518](https://github.com/danielgtaylor/python-betterproto/pull/518) [#529](https://github.com/danielgtaylor/python-betterproto/pull/529) +- Improve performance of serializing / de-serializing messages [#545](https://github.com/danielgtaylor/python-betterproto/pull/545) +- Improve the handling of message name collisions with typing by allowing the method / type of imports to be configured. + Refer to [#582](https://github.com/danielgtaylor/python-betterproto/pull/582) + and [README.md](https://github.com/danielgtaylor/python-betterproto#configuration-typing-imports). +- Fix roundtrip parsing of `datetime`s [#534](https://github.com/danielgtaylor/python-betterproto/pull/534) +- Fix accessing unset optional fields [#523](https://github.com/danielgtaylor/python-betterproto/pull/523) +- Fix `Message` equality comparison [#513](https://github.com/danielgtaylor/python-betterproto/pull/513) +- Fix behaviour with long comment messages [#532](https://github.com/danielgtaylor/python-betterproto/pull/532) +- Add a warning when calling a deprecated message [#596](https://github.com/danielgtaylor/python-betterproto/pull/596) + +## [2.0.0b6] - 2023-06-25 + +- **Breaking**: the minimum Python version has been bumped to `3.7` [#444](https://github.com/danielgtaylor/python-betterproto/pull/444) + +- Support generating [Pydantic dataclasses](https://docs.pydantic.dev/latest/usage/dataclasses). + Pydantic dataclasses are are drop-in replacement for dataclasses in the standard library that additionally supports validation. + Pass `--python_betterproto_opt=pydantic_dataclasses` to enable this feature. + Refer to [#406](https://github.com/danielgtaylor/python-betterproto/pull/406) + and [README.md](https://github.com/danielgtaylor/python-betterproto#generating-pydantic-models) for more information. + +- Added support for `@generated` marker [#382](https://github.com/danielgtaylor/python-betterproto/pull/382) +- Pull down the `include_default_values` argument to `to_json()` [#405](https://github.com/danielgtaylor/python-betterproto/pull/405) +- Pythonize input_type name in py_input_message [#436](https://github.com/danielgtaylor/python-betterproto/pull/436) +- Widen `from_dict()` to accept any `Mapping` [#451](https://github.com/danielgtaylor/python-betterproto/pull/451) +- Replace `pkg_resources` with `importlib` [#462](https://github.com/danielgtaylor/python-betterproto/pull/462) + +- Fix typechecker compatiblity checks in server streaming methods [#413](https://github.com/danielgtaylor/python-betterproto/pull/413) +- Fix "empty-valued" repeated fields not being serialised [#417](https://github.com/danielgtaylor/python-betterproto/pull/417) +- Fix `dict` encoding for timezone-aware `datetimes` [#468](https://github.com/danielgtaylor/python-betterproto/pull/468) +- Fix `to_pydict()` serialization for optional fields [#495](https://github.com/danielgtaylor/python-betterproto/pull/495) +- Handle empty value objects properly [#481](https://github.com/danielgtaylor/python-betterproto/pull/481) + +## [2.0.0b5] - 2022-08-01 + +- **Breaking**: Client and Service Stubs no longer pack and unpack the input message fields as parameters [#331](https://github.com/danielgtaylor/python-betterproto/pull/311) + + Update your client calls and server handlers as follows: + + Clients before: + + ```py + response = await service.echo(value="hello", extra_times=1) + ``` + + Clients after: + + ```py + response = await service.echo(EchoRequest(value="hello", extra_times=1)) + ``` + + Servers before: + + ```py + async def echo(self, value: str, extra_times: int) -> EchoResponse: ... + ``` + + Servers after: + + ```py + async def echo(self, echo_request: EchoRequest) -> EchoResponse: + # Use echo_request.value + # Use echo_request.extra_times + ... + ``` + +- Add `to/from_pydict()` for `Message` [#203](https://github.com/danielgtaylor/python-betterproto/pull/203) +- Format field comments also as docstrings [#304](https://github.com/danielgtaylor/python-betterproto/pull/304) +- Implement `__deepcopy__` for `Message` [#339](https://github.com/danielgtaylor/python-betterproto/pull/339) +- Run isort on compiled code [#355](https://github.com/danielgtaylor/python-betterproto/pull/355) +- Expose timeout, deadline and metadata parameters from grpclib [#352](https://github.com/danielgtaylor/python-betterproto/pull/352) +- Make `Message.__getattribute__` invisible to type checkers [#359](https://github.com/danielgtaylor/python-betterproto/pull/359) + +- Fix map field edge-case [#254](https://github.com/danielgtaylor/python-betterproto/pull/254) +- Fix message text in `NotImplementedError` [#325](https://github.com/danielgtaylor/python-betterproto/pull/325) +- Fix `Message.from_dict()` in the presence of optional datetime fields [#329](https://github.com/danielgtaylor/python-betterproto/pull/329) +- Support Jinja2 3.0 to prevent version conflicts [#330](https://github.com/danielgtaylor/python-betterproto/pull/330) +- Fix overwriting top level `__init__.py` [#337](https://github.com/danielgtaylor/python-betterproto/pull/337) +- Remove deprecation warnings when fields are initialised with non-default values [#348](https://github.com/danielgtaylor/python-betterproto/pull/348) +- Ensure nested class names are converted to PascalCase [#353](https://github.com/danielgtaylor/python-betterproto/pull/353) +- Fix `Message.to_dict()` mutating the underlying Message [#378](https://github.com/danielgtaylor/python-betterproto/pull/378) +- Fix some parameters being missing from services [#381](https://github.com/danielgtaylor/python-betterproto/pull/381) + +## [2.0.0b4] - 2022-01-03 + +- **Breaking**: the minimum Python version has been bumped to `3.6.2` + +- Always add `AsyncIterator` to imports if there are services [#264](https://github.com/danielgtaylor/python-betterproto/pull/264) +- Allow parsing of messages from `ByteStrings` [#266](https://github.com/danielgtaylor/python-betterproto/pull/266) +- Add support for proto3 optional [#281](https://github.com/danielgtaylor/python-betterproto/pull/281) + +- Fix compilation of fields with names identical to builtin types [#294](https://github.com/danielgtaylor/python-betterproto/pull/294) +- Fix default values for enum service args [#299](https://github.com/danielgtaylor/python-betterproto/pull/299) + ## [2.0.0b3] - 2021-04-07 - Generate grpclib service stubs [#170](https://github.com/danielgtaylor/python-betterproto/pull/170) @@ -54,7 +160,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0 ## [2.0.0b1] - 2020-07-04 -[Upgrade Guide](./docs/upgrading.md) +[Upgrade Guide](./docs/upgrading.md) > Several bugfixes and improvements required or will require small breaking changes, necessitating a new version. > `2.0.0` will be released once the interface is stable. diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 000000000..938e844c4 --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Daniel G. Taylor + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index 79b7cd61c..29f346a70 100644 --- a/README.md +++ b/README.md @@ -1,19 +1,24 @@ # Better Protobuf / gRPC Support for Python -![](https://github.com/danielgtaylor/python-betterproto/workflows/CI/badge.svg) +![](https://github.com/danielgtaylor/python-betterproto/actions/workflows/ci.yml/badge.svg) + +> :warning: Betterproto has moved to a new repository: [https://github.com/betterproto/python-betterproto2](https://github.com/betterproto/python-betterproto2). This new version is a major redesign, addressing several bugs and introducing new features. It is still under active development: documentation is incomplete, work is ongoing, and breaking changes may occur. If you'd like to contribute, please submit your pull requests to the new repository. + + > :octocat: If you're reading this on github, please be aware that it might mention unreleased features! See the latest released README on [pypi](https://pypi.org/project/betterproto/). This project aims to provide an improved experience when using Protobuf / gRPC in a modern Python environment by making use of modern language features and generating readable, understandable, idiomatic Python code. It will not support legacy features or environments (e.g. Protobuf 2). The following are supported: - Protobuf 3 & gRPC code generation - Both binary & JSON serialization is built-in -- Python 3.6+ making use of: +- Python 3.7+ making use of: - Enums - Dataclasses - `async`/`await` - Timezone-aware `datetime` and `timedelta` objects - Relative imports - Mypy type checking +- [Pydantic Models](https://docs.pydantic.dev/) generation (see [Generating pydantic models](#generating-pydantic-models)) This project is heavily inspired by, and borrows functionality from: @@ -38,6 +43,8 @@ This project exists because I am unhappy with the state of the official Google p - Uses `SerializeToString()` rather than the built-in `__bytes__()` - Special wrapped types don't use Python's `None` - Timestamp/duration types don't use Python's built-in `datetime` module + + This project is a reimplementation from the ground up focused on idiomatic modern Python to help fix some of the above. While it may not be a 1:1 drop-in replacement due to changed method names and call patterns, the wire format is identical. ## Installation @@ -58,7 +65,7 @@ pip install betterproto ### Compiling proto files -Now, given you installed the compiler and have a proto file, e.g `example.proto`: +Given you installed the compiler and have a proto file, e.g `example.proto`: ```protobuf syntax = "proto3"; @@ -160,6 +167,12 @@ service Echo { } ``` +Generate echo proto file: + +``` +python -m grpc_tools.protoc -I . --python_betterproto_out=. echo.proto +``` + A client can be implemented as follows: ```python import asyncio @@ -171,10 +184,10 @@ from grpclib.client import Channel async def main(): channel = Channel(host="127.0.0.1", port=50051) service = echo.EchoStub(channel) - response = await service.echo(value="hello", extra_times=1) + response = await service.echo(echo.EchoRequest(value="hello", extra_times=1)) print(response) - async for response in service.echo_stream(value="hello", extra_times=1): + async for response in service.echo_stream(echo.EchoRequest(value="hello", extra_times=1)): print(response) # don't forget to close the channel when done! @@ -186,6 +199,7 @@ if __name__ == "__main__": loop.run_until_complete(main()) ``` + which would output ```python EchoResponse(values=['hello', 'hello']) @@ -199,28 +213,29 @@ To use them, simply subclass the base class in the generated files and override service methods: ```python -from echo import EchoBase +import asyncio +from echo import EchoBase, EchoRequest, EchoResponse, EchoStreamResponse from grpclib.server import Server from typing import AsyncIterator class EchoService(EchoBase): - async def echo(self, value: str, extra_times: int) -> "EchoResponse": - return value + async def echo(self, echo_request: "EchoRequest") -> "EchoResponse": + return EchoResponse([echo_request.value for _ in range(echo_request.extra_times)]) - async def echo_stream( - self, value: str, extra_times: int - ) -> AsyncIterator["EchoStreamResponse"]: - for _ in range(extra_times): - yield value + async def echo_stream(self, echo_request: "EchoRequest") -> AsyncIterator["EchoStreamResponse"]: + for _ in range(echo_request.extra_times): + yield EchoStreamResponse(echo_request.value) -async def start_server(): - HOST = "127.0.0.1" - PORT = 1337 +async def main(): server = Server([EchoService()]) - await server.start(HOST, PORT) - await server.serve_forever() + await server.start("127.0.0.1", 50051) + await server.wait_closed() + +if __name__ == '__main__': + loop = asyncio.get_event_loop() + loop.run_until_complete(main()) ``` ### JSON @@ -266,7 +281,22 @@ message Test { } ``` -You can use `betterproto.which_one_of(message, group_name)` to determine which of the fields was set. It returns a tuple of the field name and value, or a blank string and `None` if unset. +On Python 3.10 and later, you can use a `match` statement to access the provided one-of field, which supports type-checking: + +```py +test = Test() +match test: + case Test(on=value): + print(value) # value: bool + case Test(count=value): + print(value) # value: int + case Test(name=value): + print(value) # value: str + case _: + print("No value provided") +``` + +You can also use `betterproto.which_one_of(message, group_name)` to determine which of the fields was set. It returns a tuple of the field name and value, or a blank string and `None` if unset. ```py >>> test = Test() @@ -281,17 +311,11 @@ You can use `betterproto.which_one_of(message, group_name)` to determine which o >>> test.count = 57 >>> betterproto.which_one_of(test, "foo") ["count", 57] ->>> test.on -False # Default (zero) values also work. >>> test.name = "" >>> betterproto.which_one_of(test, "foo") ["name", ""] ->>> test.count -0 ->>> test.on -False ``` Again this is a little different than the official Google code generator: @@ -354,14 +378,76 @@ datetime.datetime(2019, 1, 1, 11, 59, 58, 800000, tzinfo=datetime.timezone.utc) {'ts': '2019-01-01T12:00:00Z', 'duration': '1.200s'} ``` +## Generating Pydantic Models + +You can use python-betterproto to generate pydantic based models, using +pydantic dataclasses. This means the results of the protobuf unmarshalling will +be typed checked. The usage is the same, but you need to add a custom option +when calling the protobuf compiler: + + +``` +protoc -I . --python_betterproto_opt=pydantic_dataclasses --python_betterproto_out=lib example.proto +``` + +With the important change being `--python_betterproto_opt=pydantic_dataclasses`. This will +swap the dataclass implementation from the builtin python dataclass to the +pydantic dataclass. You must have pydantic as a dependency in your project for +this to work. + +## Configuration typing imports + +By default typing types will be imported directly from typing. This sometimes can lead to issues in generation if types that are being generated conflict with the name. In this case you can configure the way types are imported from 3 different options: + +### Direct +``` +protoc -I . --python_betterproto_opt=typing.direct --python_betterproto_out=lib example.proto +``` +this configuration is the default, and will import types as follows: +``` +from typing import ( + List, + Optional, + Union +) +... +value: List[str] = [] +value2: Optional[str] = None +value3: Union[str, int] = 1 +``` +### Root +``` +protoc -I . --python_betterproto_opt=typing.root --python_betterproto_out=lib example.proto +``` +this configuration loads the root typing module, and then access the types off of it directly: +``` +import typing +... +value: typing.List[str] = [] +value2: typing.Optional[str] = None +value3: typing.Union[str, int] = 1 +``` + +### 310 +``` +protoc -I . --python_betterproto_opt=typing.310 --python_betterproto_out=lib example.proto +``` +this configuration avoid loading typing all together if possible and uses the python 3.10 pattern: +``` +... +value: list[str] = [] +value2: str | None = None +value3: str | int = 1 +``` + ## Development -- _Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)!_ +- _Join us on [Discord](https://discord.gg/DEVteTupPb)!_ - _See how you can help → [Contributing](.github/CONTRIBUTING.md)_ ### Requirements -- Python (3.6 or higher) +- Python (3.7 or higher) - [poetry](https://python-poetry.org/docs/#installation) *Needed to install dependencies in a virtual environment* @@ -374,8 +460,7 @@ datetime.datetime(2019, 1, 1, 11, 59, 58, 800000, tzinfo=datetime.timezone.utc) ```sh # Get set up with the virtual env & dependencies -poetry run pip install --upgrade pip -poetry install +poetry install -E compiler # Activate the poetry environment poetry shell @@ -410,7 +495,7 @@ Adding a standard test case is easy. It will be picked up automatically when you run the tests. -- See also: [Standard Tests Development Guide](betterproto/tests/README.md) +- See also: [Standard Tests Development Guide](tests/README.md) #### Custom tests @@ -435,10 +520,10 @@ poe full-test ### (Re)compiling Google Well-known Types -Betterproto includes compiled versions for Google's well-known types at [betterproto/lib/google](betterproto/lib/google). +Betterproto includes compiled versions for Google's well-known types at [src/betterproto/lib/google](src/betterproto/lib/google). Be sure to regenerate these files when modifying the plugin output format, and validate by running the tests. -Normally, the plugin does not compile any references to `google.protobuf`, since they are pre-compiled. To force compilation of `google.protobuf`, use the option `--custom_opt=INCLUDE_GOOGLE`. +Normally, the plugin does not compile any references to `google.protobuf`, since they are pre-compiled. To force compilation of `google.protobuf`, use the option `--custom_opt=INCLUDE_GOOGLE`. Assuming your `google.protobuf` source files (included with all releases of `protoc`) are located in `/usr/local/include`, you can regenerate them as follows: @@ -493,7 +578,7 @@ protoc \ ## Community -Join us on [Slack](https://join.slack.com/t/betterproto/shared_invite/zt-f0n0uolx-iN8gBNrkPxtKHTLpG3o1OQ)! +Join us on [Discord](https://discord.gg/DEVteTupPb)! ## License diff --git a/benchmarks/__init__.py b/benchmarks/__init__.py index 8b1378917..e69de29bb 100644 --- a/benchmarks/__init__.py +++ b/benchmarks/__init__.py @@ -1 +0,0 @@ - diff --git a/benchmarks/benchmarks.py b/benchmarks/benchmarks.py index 76fb90633..5768cbf0e 100644 --- a/benchmarks/benchmarks.py +++ b/benchmarks/benchmarks.py @@ -1,12 +1,37 @@ -import betterproto from dataclasses import dataclass +from typing import List + +import betterproto @dataclass class TestMessage(betterproto.Message): - foo: int = betterproto.uint32_field(0) - bar: str = betterproto.string_field(1) - baz: float = betterproto.float_field(2) + foo: int = betterproto.uint32_field(1) + bar: str = betterproto.string_field(2) + baz: float = betterproto.float_field(3) + + +@dataclass +class TestNestedChildMessage(betterproto.Message): + str_key: str = betterproto.string_field(1) + bytes_key: bytes = betterproto.bytes_field(2) + bool_key: bool = betterproto.bool_field(3) + float_key: float = betterproto.float_field(4) + int_key: int = betterproto.uint64_field(5) + + +@dataclass +class TestNestedMessage(betterproto.Message): + foo: TestNestedChildMessage = betterproto.message_field(1) + bar: TestNestedChildMessage = betterproto.message_field(2) + baz: TestNestedChildMessage = betterproto.message_field(3) + + +@dataclass +class TestRepeatedMessage(betterproto.Message): + foo_repeat: List[str] = betterproto.string_field(1) + bar_repeat: List[int] = betterproto.int64_field(2) + baz_repeat: List[bool] = betterproto.bool_field(3) class BenchMessage: @@ -16,15 +41,28 @@ def setup(self): self.cls = TestMessage self.instance = TestMessage() self.instance_filled = TestMessage(0, "test", 0.0) + self.instance_filled_bytes = bytes(self.instance_filled) + self.instance_filled_nested = TestNestedMessage( + TestNestedChildMessage("foo", bytearray(b"test1"), True, 0.1234, 500), + TestNestedChildMessage("bar", bytearray(b"test2"), True, 3.1415, 302), + TestNestedChildMessage("baz", bytearray(b"test3"), False, 1e5, 300), + ) + self.instance_filled_nested_bytes = bytes(self.instance_filled_nested) + self.instance_filled_repeated = TestRepeatedMessage( + [f"test{i}" for i in range(1_000)], + [(i - 500) ** 3 for i in range(1_000)], + [i % 2 == 0 for i in range(1_000)], + ) + self.instance_filled_repeated_bytes = bytes(self.instance_filled_repeated) def time_overhead(self): """Overhead in class definition.""" @dataclass class Message(betterproto.Message): - foo: int = betterproto.uint32_field(0) - bar: str = betterproto.string_field(1) - baz: float = betterproto.float_field(2) + foo: int = betterproto.uint32_field(1) + bar: str = betterproto.string_field(2) + baz: float = betterproto.float_field(3) def time_instantiation(self): """Time instantiation""" @@ -50,6 +88,26 @@ def time_serialize(self): """Time serializing a message to wire.""" bytes(self.instance_filled) + def time_deserialize(self): + """Time deserialize a message.""" + TestMessage().parse(self.instance_filled_bytes) + + def time_serialize_nested(self): + """Time serializing a nested message to wire.""" + bytes(self.instance_filled_nested) + + def time_deserialize_nested(self): + """Time deserialize a nested message.""" + TestNestedMessage().parse(self.instance_filled_nested_bytes) + + def time_serialize_repeated(self): + """Time serializing a repeated message to wire.""" + bytes(self.instance_filled_repeated) + + def time_deserialize_repeated(self): + """Time deserialize a repeated message.""" + TestRepeatedMessage().parse(self.instance_filled_repeated_bytes) + class MemSuite: def setup(self): diff --git a/docs/migrating.rst b/docs/migrating.rst index 0f18eac5f..3d02650db 100644 --- a/docs/migrating.rst +++ b/docs/migrating.rst @@ -85,17 +85,19 @@ wrappers used to provide optional zero value support. Each of these has a specia representation and is handled a little differently from normal messages. The Python mapping for these is as follows: -+-------------------------------+-----------------------------------------------+--------------------------+ -| ``Google Message`` | ``Python Type`` | ``Default`` | -+===============================+===============================================+==========================+ -| ``google.protobuf.duration`` | :class:`datetime.timedelta` | ``0`` | -+-------------------------------+-----------------------------------------------+--------------------------+ -| ``google.protobuf.timestamp`` | ``Timezone-aware`` :class:`datetime.datetime` | ``1970-01-01T00:00:00Z`` | -+-------------------------------+-----------------------------------------------+--------------------------+ -| ``google.protobuf.*Value`` | ``Optional[...]``/``None`` | ``None`` | -+-------------------------------+-----------------------------------------------+--------------------------+ -| ``google.protobuf.*`` | ``betterproto.lib.google.protobuf.*`` | ``None`` | -+-------------------------------+-----------------------------------------------+--------------------------+ ++-------------------------------+-------------------------------------------------+--------------------------+ +| ``Google Message`` | ``Python Type`` | ``Default`` | ++===============================+=================================================+==========================+ +| ``google.protobuf.duration`` | :class:`datetime.timedelta` | ``0`` | ++-------------------------------+-------------------------------------------------+--------------------------+ +| ``google.protobuf.timestamp`` | ``Timezone-aware`` :class:`datetime.datetime` | ``1970-01-01T00:00:00Z`` | ++-------------------------------+-------------------------------------------------+--------------------------+ +| ``google.protobuf.*Value`` | ``Optional[...]``/``None`` | ``None`` | ++-------------------------------+-------------------------------------------------+--------------------------+ +| ``google.protobuf.*`` | ``betterproto.lib.std.google.protobuf.*`` | ``None`` | ++-------------------------------+-------------------------------------------------+--------------------------+ +| ``google.protobuf.*`` | ``betterproto.lib.pydantic.google.protobuf.*`` | ``None`` | ++-------------------------------+-------------------------------------------------+--------------------------+ For the wrapper types, the Python type corresponds to the wrapped type, e.g. diff --git a/poetry.lock b/poetry.lock index f56b1e4a7..374226890 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,1505 +1,2259 @@ +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. + [[package]] name = "alabaster" -version = "0.7.12" -description = "A configurable sidebar-enabled Sphinx theme" -category = "dev" +version = "0.7.16" +description = "A light, configurable Sphinx theme" optional = false -python-versions = "*" +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, + {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, +] [[package]] -name = "appdirs" -version = "1.4.4" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "main" +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["test"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "ansicon" +version = "1.89.0" +description = "Python wrapper for loading Jason Hood's ANSICON" optional = false python-versions = "*" +groups = ["dev"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "ansicon-1.89.0-py2.py3-none-any.whl", hash = "sha256:f1def52d17f65c2c9682cf8370c03f541f410c1752d6a14029f97318e4b9dfec"}, + {file = "ansicon-1.89.0.tar.gz", hash = "sha256:e4d039def5768a47e4afec8e89e83ec3ae5a26bf00ad851f914d1240b444d2b1"}, +] [[package]] name = "asv" -version = "0.4.2" +version = "0.6.4" description = "Airspeed Velocity: A simple Python history benchmarking tool" -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "asv-0.6.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e32b4cc435bdb6f2ef83d8092e977962f6fa20471542d6341e596324d350cbea"}, + {file = "asv-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fdfb9090623fc45cbeb77ab40b394779794083c155128e3d320fa06af2e0fdf5"}, + {file = "asv-0.6.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5dfee8a415f4b5da0be4bedf4c9cb3b041c2148d28d2327cf3b54f9cb565cefd"}, + {file = "asv-0.6.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:abc13331bb8bb1880dbc33e75175ae90bca439038a1f7e246528481ecebd15dd"}, + {file = "asv-0.6.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b67eec004f8218bba25dcdbdda2e6676dd6c4ac3e97a80b691b27dcfbfbda38d"}, + {file = "asv-0.6.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aef14496a34552308d054db71181bfb1ca45d7ef29028747d388be9f00a5b45c"}, + {file = "asv-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:0c8931e7a8aeda75f90b3ac422cbb7c46a5ce50d8c0a8e821cdf3e4d0705dd76"}, + {file = "asv-0.6.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74666c5896b4aec92b4a12cf9aa7494dec3398bb9ea602a9f8dc1656b53e8e10"}, + {file = "asv-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26166a7bd7fe05b5a8507247d1a7ab1dfc4256414b0505d124a7b9d46a618a1c"}, + {file = "asv-0.6.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe6161c5616f5aed936947866b6376e09c937d628aa81115b3c72e90a151c1f9"}, + {file = "asv-0.6.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d6122b5e86bf9071b9ff7136672d50da0d460dfc958f43429843f7a3cd3e86a"}, + {file = "asv-0.6.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:79554f125033ecbcb599cd704b4b5b525d254e5e05b1dd24bab3bbd83ae5502e"}, + {file = "asv-0.6.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2e80f39501628fd4cac972f08fa4c9b8e211a86fc43dd6e58c95d106cbaf54e7"}, + {file = "asv-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:363dfdee98cc072e6a1468137eed640985e48ccbb11c175d04ee420f05459872"}, + {file = "asv-0.6.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:244b71778f91aa6672e1f16feb9eecac78ef7cee95228ef8f0315a2e2deecfed"}, + {file = "asv-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3e798b275de2889748d43d42305bfce68c015a3e38ae935d231835cb836fef73"}, + {file = "asv-0.6.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d064c5ac1ab18efc62467f65ed4989a2e2ac1a4d21886119fa0ef0f91d548438"}, + {file = "asv-0.6.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c51e5862bdac0f1fe11886bdd40b30a9691a65cb7feac40f0676fe9206d5bb43"}, + {file = "asv-0.6.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46a7ca838e8c49109c43b1cda0eb64abc5e0a045538da718abe981d115ed47aa"}, + {file = "asv-0.6.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f5f722178c7e36b797f764c837fc03c462f68c8f2cba5145b2e64119e46231ff"}, + {file = "asv-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:f972ca71316d46a0242eb69e53dadfeab1e4d0546773b0f722462f97b3e5fbd9"}, + {file = "asv-0.6.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e8c728707d417268560d1e1a5cb0b638c10b346648b3338ca4dce373c0a0608b"}, + {file = "asv-0.6.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cfe2796c87960c90809a891e0805df7017fea58b86a739fbc901de9703f7685"}, + {file = "asv-0.6.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb74b1726280422c22e69010ede8bbd13309408b046d93af2ef199728d5f341a"}, + {file = "asv-0.6.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2223db773e31ffb4583f44ab8adbe3676e41db8d08e9ca59a9b95c7c26954133"}, + {file = "asv-0.6.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d7426c1d7c18c7f19689b0f060e77d7dce8ff32697e194aca236a8c100bf8b78"}, + {file = "asv-0.6.4-cp37-cp37m-win_amd64.whl", hash = "sha256:7d792a650e2f6bcab7c0f4278b305ee8cc9a16479dc7297bafbd5197a553d812"}, + {file = "asv-0.6.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7e396f602966c92e21e34c2a46f2be5161b0c4c1e3e87397e04a848e62a3c90b"}, + {file = "asv-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:14788182ab143e7c7af755b83c34887873a0bde6faa3b263a9f732247a4ae84f"}, + {file = "asv-0.6.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59ff949fae7c4b006aa94f46c9a9c02d9b79b1b836a6e3fcc5da633a2ab60aa2"}, + {file = "asv-0.6.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27fcce30845de881a58ee98eb9b51e3deb520356ee8423bf471585e62c7c2a60"}, + {file = "asv-0.6.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ea066af743856d983fbd1973788032ef98cc28dc8e821ee065d25a3af4b791a0"}, + {file = "asv-0.6.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aa248b4ad640310fd6d1a8265ee2672d6dbf019b32569a37a01aece49fe72d1b"}, + {file = "asv-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:9419c426b441df380ff35f08a5323b73def19e17a13bee7a12ef0cbabbe8640b"}, + {file = "asv-0.6.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:755f2ec48b8277f68be6ba6325c16d76665a9807245ac4f40bb223cf266701bf"}, + {file = "asv-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8091787fd5219dc63e1c8dc2786da5f9ad5302b15b22c70cf14ee76bc20b3443"}, + {file = "asv-0.6.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cff89881dc7036f3fb4e50fb23dfef6768ae9651daf2efff18bd487339ab1f14"}, + {file = "asv-0.6.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b22bbe5a8bcea54b9d71bd02e78a814b1bfe7edeec171b1ecdeea839b78735a2"}, + {file = "asv-0.6.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:76b7ee6d6c63825065b5b250271d0576d39cc610674a128f5a39cc040b6a7d86"}, + {file = "asv-0.6.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:758d9982f6be463711dca19dda59bc51a2fee27ab2494132f453d92f3c121d43"}, + {file = "asv-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:9a16c3b8d533cc6a05a9a217a03c631b738047fca711c95aa3f07e4a83723198"}, + {file = "asv-0.6.4-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0305e9eee21f71c3d3f8b046beb35e571f6dd7ed2fcd0e8405f8a208bcd3228a"}, + {file = "asv-0.6.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6cd23fa20edf8cb30354fda3388a8835a15158e21559c86f0d997e5d30dbf91"}, + {file = "asv-0.6.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7424d2dbfcb98aa3c099311100ceb9aabfd83fed0b41420f70f142852ed392a"}, + {file = "asv-0.6.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e7f4b95583cf379015d35b747a1bb4df99c05dd4107d6081b2cf4a577f4caeca"}, + {file = "asv-0.6.4-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e54b3e7c6a22af2ac779bf2767dcb6ee09760d9c4272b73e4d63a5ed938145d8"}, + {file = "asv-0.6.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f34b1568d353d6cddcfa074eba0aaaa82b29540df10614cf66f43930ba7827c1"}, + {file = "asv-0.6.4-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ccfbbc4f12e145ffb7a653275d75d54f72768f1ff1fdb300e0603dbf33deaf6"}, + {file = "asv-0.6.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:64637299bcbd7743da0140d8a19a732c33d9e41d28aa4db0bf1e58e12eb8b4e4"}, + {file = "asv-0.6.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bad0f37940c5ad7c39d81eecfc3c515f55c51bbca094e0efda4d70c74363532b"}, + {file = "asv-0.6.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfc9f90a7dd45f042f947f4c3a3d98ee591f5ac7d1751b541632e5f14fc35c54"}, + {file = "asv-0.6.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:356fbc8abf3f4c2b13bc37af78f08c008f1ef4320549e44c02a5a3f6a783f892"}, + {file = "asv-0.6.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:647a6ba8f6e9a23455aabc7a6365aa1feeeb82a6bf99696e0bc964aebe337730"}, + {file = "asv-0.6.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:80c791206e7c01b5883e8facd7ef27432a01fd1cbc4977d38f7bfe08ee98150a"}, + {file = "asv-0.6.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc49bb48295a4b1d902590b87e7920ee51e95d72bcf1c44d83303dfbecc68e2"}, + {file = "asv-0.6.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:022723563d770b43c50615e4b18d1ad861c00fcd91343bfbd51d21bfff708d4c"}, + {file = "asv-0.6.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:71d2ba7b16c462b92cd36c2a4d07753bb6c995149a830ce1d4246f6061bf3f1d"}, + {file = "asv-0.6.4.tar.gz", hash = "sha256:1d124184171cfe106e3e57ac04e3221b8d4571c9bd6ca2c6498a8c7407339df1"}, +] [package.dependencies] -six = ">=1.4" +asv-runner = ">=v0.2.1" +build = "*" +colorama = {version = "*", markers = "platform_system == \"Windows\""} +json5 = "*" +pympler = {version = "*", markers = "platform_python_implementation != \"PyPy\""} +pyyaml = {version = "*", markers = "platform_python_implementation != \"PyPy\""} +tabulate = "*" +tomli = {version = "*", markers = "python_version < \"3.11\""} +virtualenv = "*" [package.extras] -hg = ["python-hglib (>=1.5)"] +dev = ["isort (>=5.11.5)", "ruff"] +doc = ["sphinx", "sphinx-bootstrap-theme"] +hg = ["python-hglib"] +plugs = ["asv-bench-memray"] +test = ["feedparser", "filelock", "flaky", "numpy", "pytest", "pytest-rerunfailures", "pytest-rerunfailures (>=10.0)", "pytest-timeout", "pytest-xdist", "python-hglib", "rpy2", "scipy", "selenium", "virtualenv"] +virtualenv = ["packaging", "virtualenv"] [[package]] -name = "atomicwrites" -version = "1.4.0" -description = "Atomic file writes." -category = "dev" +name = "asv-runner" +version = "0.2.1" +description = "Core Python benchmark code for ASV" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "asv_runner-0.2.1-py3-none-any.whl", hash = "sha256:655d466208ce311768071f5003a61611481b24b3ad5ac41fb8a6374197e647e9"}, + {file = "asv_runner-0.2.1.tar.gz", hash = "sha256:945dd301a06fa9102f221b1e9ddd048f5ecd863796d4c8cd487f5577fe0db66d"}, +] -[[package]] -name = "attrs" -version = "20.3.0" -description = "Classes Without Boilerplate" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[package.dependencies] +importlib-metadata = "*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] -docs = ["furo", "sphinx", "zope.interface"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] +docs = ["furo", "myst-parser (>=2)", "sphinx", "sphinx-autobuild", "sphinx-autodoc2 (>=0.4.2)", "sphinx-contributors", "sphinx-copybutton", "sphinx-design", "sphinxcontrib-spelling"] [[package]] name = "babel" -version = "2.9.0" +version = "2.16.0" description = "Internationalization utilities" -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, + {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, +] -[package.dependencies] -pytz = ">=2015.7" +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] -name = "black" -version = "20.8b1" -description = "The uncompromising code formatter." -category = "main" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -appdirs = "*" -click = ">=7.1.2" -dataclasses = {version = ">=0.6", markers = "python_version < \"3.7\""} -mypy-extensions = ">=0.4.3" -pathspec = ">=0.6,<1" -regex = ">=2020.1.8" -toml = ">=0.10.1" -typed-ast = ">=1.4.0" -typing-extensions = ">=3.7.4" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] +name = "betterproto-rust-codec" +version = "0.1.1" +description = "Fast conversion between betterproto messages and Protobuf wire format." +optional = true +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "betterproto_rust_codec-0.1.1-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:38ec2ec1743d815a04ffc020e8e3791955601b239b097e4ae0721528d4d8b608"}, + {file = "betterproto_rust_codec-0.1.1-cp37-abi3-macosx_11_0_arm64.whl", hash = "sha256:96a6deef8cda4b4d084df98b621e39a3123d8878dab551b86bbe733d885c4965"}, + {file = "betterproto_rust_codec-0.1.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72ce9f153c83b1d0559ab40b0d6a31d8b83ac486230cefc298c8a08f4a97738b"}, + {file = "betterproto_rust_codec-0.1.1-cp37-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b8a8485aabbe843208307a9a2c3fc8a8c09295fb22c840cebd5fa7ec6b8ddb36"}, + {file = "betterproto_rust_codec-0.1.1-cp37-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a395bf0c9dc86b7d3783ba43f161cd9f7a42809f38c70673cd9999d40eb4f1"}, + {file = "betterproto_rust_codec-0.1.1-cp37-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ea99bee659b33500bb1afc4e0dbfa63530f50a7c549d0687565a10a0de63d18f"}, + {file = "betterproto_rust_codec-0.1.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:913d73365780d8f3da04cbaa1b2428ca5dc5372a5ee6f4ff2b9f30127362dff7"}, + {file = "betterproto_rust_codec-0.1.1-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1a16dbbbc48f4a27b3b70205a2a71baa53fe0e915bc347b75d9b3864326446fa"}, + {file = "betterproto_rust_codec-0.1.1-cp37-abi3-win32.whl", hash = "sha256:06f95ac4c92aa1f28bd1be884c6db86f0bed05c9b93a1e4e3d80bbe2fc66847c"}, + {file = "betterproto_rust_codec-0.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:5b70b3aea76f336cc243b966f2f7496cb6366ad2679d7a999ff521d873f9de48"}, + {file = "betterproto_rust_codec-0.1.1.tar.gz", hash = "sha256:6f7cbe80c8e3f87df992d71568771082c869ed6856521e01db833d9d3b012af5"}, +] [[package]] -name = "blessings" -version = "1.7" -description = "A thin, practical wrapper around terminal coloring, styling, and positioning" -category = "dev" +name = "blessed" +version = "1.20.0" +description = "Easy, practical library for making terminal apps, by providing an elegant, well-documented interface to Colors, Keyboard input, and screen Positioning capabilities." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7" +groups = ["dev"] +files = [ + {file = "blessed-1.20.0-py2.py3-none-any.whl", hash = "sha256:0c542922586a265e699188e52d5f5ac5ec0dd517e5a1041d90d2bbf23f906058"}, + {file = "blessed-1.20.0.tar.gz", hash = "sha256:2cdd67f8746e048f00df47a2880f4d6acbcdb399031b604e34ba8f71d5787680"}, +] [package.dependencies] -six = "*" +jinxed = {version = ">=1.1.0", markers = "platform_system == \"Windows\""} +six = ">=1.9.0" +wcwidth = ">=0.1.4" [[package]] name = "bpython" -version = "0.19" -description = "Fancy Interface to the Python Interpreter" -category = "dev" +version = "0.24" +description = "" optional = false -python-versions = "*" +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "bpython-0.24-py3-none-any.whl", hash = "sha256:0d196ae3d1ce3dcd559a3fb89ed2c468dfbd1504af0d680b906dd65a9c7a32eb"}, + {file = "bpython-0.24.tar.gz", hash = "sha256:98736ffd7a8c48fd2bfb53d898a475f4241bde0b672125706af04d9d08fd3dbd"}, +] [package.dependencies] -curtsies = ">=0.1.18" +curtsies = ">=0.4.0" +cwcwidth = "*" greenlet = "*" pygments = "*" +pyxdg = "*" requests = "*" -six = ">=1.5" [package.extras] -jedi = ["jedi"] +clipboard = ["pyperclip"] +jedi = ["jedi (>=0.16)"] urwid = ["urwid"] watch = ["watchdog"] +[[package]] +name = "build" +version = "1.2.2.post1" +description = "A simple, correct Python build frontend" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, + {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "os_name == \"nt\""} +importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""} +packaging = ">=19.1" +pyproject_hooks = "*" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] +test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0)", "setuptools (>=56.0.0)", "setuptools (>=56.0.0)", "setuptools (>=67.8.0)", "wheel (>=0.36.0)"] +typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] +uv = ["uv (>=0.1.18)"] +virtualenv = ["virtualenv (>=20.0.35)"] + +[[package]] +name = "cachelib" +version = "0.13.0" +description = "A collection of cache libraries in the same API interface." +optional = false +python-versions = ">=3.8" +groups = ["test"] +files = [ + {file = "cachelib-0.13.0-py3-none-any.whl", hash = "sha256:8c8019e53b6302967d4e8329a504acf75e7bc46130291d30188a6e4e58162516"}, + {file = "cachelib-0.13.0.tar.gz", hash = "sha256:209d8996e3c57595bee274ff97116d1d73c4980b2fd9a34c7846cd07fd2e1a48"}, +] + +[[package]] +name = "cachetools" +version = "5.5.0" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, + {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, +] + [[package]] name = "certifi" -version = "2020.12.5" +version = "2024.12.14" description = "Python package for providing Mozilla's CA Bundle." -category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "certifi-2024.12.14-py3-none-any.whl", hash = "sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56"}, + {file = "certifi-2024.12.14.tar.gz", hash = "sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db"}, +] [[package]] -name = "chardet" -version = "4.0.0" -description = "Universal encoding detector for Python 2 and 3" -category = "dev" +name = "cfgv" +version = "3.4.0" +description = "Validate configuration and produce human readable error messages." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, + {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, +] [[package]] -name = "click" -version = "7.1.2" -description = "Composable command line interface toolkit" -category = "main" +name = "chardet" +version = "5.2.0" +description = "Universal encoding detector for Python 3" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "chardet-5.2.0-py3-none-any.whl", hash = "sha256:e1cf59446890a00105fe7b7912492ea04b6e6f06d4b742b2c788469e34c82970"}, + {file = "chardet-5.2.0.tar.gz", hash = "sha256:1b3b6ff479a8c414bc3fa2c0852995695c4a026dcd6d0633b2dd092ca39c1cf7"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +] [[package]] name = "colorama" -version = "0.4.4" +version = "0.4.6" description = "Cross-platform colored terminal text." -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev", "test"] +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] +markers = {test = "sys_platform == \"win32\""} [[package]] name = "coverage" -version = "5.5" +version = "7.6.10" description = "Code coverage measurement for Python" -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=3.9" +groups = ["test"] +files = [ + {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, + {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, + {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, + {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, + {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, + {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, + {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, + {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, + {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, + {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, + {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, + {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, + {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, + {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, + {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] -toml = ["toml"] +toml = ["tomli"] [[package]] name = "curtsies" -version = "0.3.5" +version = "0.4.2" description = "Curses-like terminal wrapper, with colored strings!" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "curtsies-0.4.2-py3-none-any.whl", hash = "sha256:f24d676a8c4711fb9edba1ab7e6134bc52305a222980b3b717bb303f5e94cec6"}, + {file = "curtsies-0.4.2.tar.gz", hash = "sha256:6ebe33215bd7c92851a506049c720cca4cf5c192c1665c1d7a98a04c4702760e"}, +] [package.dependencies] -blessings = ">=1.5" +blessed = ">=1.5" cwcwidth = "*" [[package]] name = "cwcwidth" -version = "0.1.4" +version = "0.1.9" description = "Python bindings for wc(s)width" -category = "dev" optional = false -python-versions = "*" - -[[package]] -name = "dataclasses" -version = "0.7" -description = "A backport of the dataclasses module for Python 3.6" -category = "main" -optional = false -python-versions = ">=3.6, <3.7" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "cwcwidth-0.1.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:704f0d6888aa5e81e76d9f76709385f9f55aca8c450ee82cc722054814a7791f"}, + {file = "cwcwidth-0.1.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0633158205b50f253ad04e301156807e309a9fb9479a520418e010da6df13604"}, + {file = "cwcwidth-0.1.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a5407d0933c3aab8ee92cffd9e4f09010f25af10ebdfa19776748402bba9261"}, + {file = "cwcwidth-0.1.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:72490e07dfbc599fdf6efe26a13cfbf725f0513b181c3386d65bfd84f6175924"}, + {file = "cwcwidth-0.1.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bf71151ae06e95f266bef91392c1562539b2eed847fd1f00f7b5b4ca3fd41a67"}, + {file = "cwcwidth-0.1.9-cp310-cp310-win32.whl", hash = "sha256:3e3c186b5c171d85f2b7f093e7efb33fd9b6e55b791ff75a0f101b18ec0433cd"}, + {file = "cwcwidth-0.1.9-cp310-cp310-win_amd64.whl", hash = "sha256:ae17e493ffc18497c2602f8f42a0d8e490ea42ab3ccfbe5e4a6069a6d24f3b36"}, + {file = "cwcwidth-0.1.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b1c3eb0a8c1b25c4a17b6b9bbf7d25ce9df3ea43b6f87903c51bc12434a2cc29"}, + {file = "cwcwidth-0.1.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c8752815ce4e40e7b34b7fe039276a5fbfb1b077131614381b13ef3b7bb21ff"}, + {file = "cwcwidth-0.1.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:368ace13080dbaacdc247370d8a965a749b124aa50d0b1b6eb87601826db870f"}, + {file = "cwcwidth-0.1.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ca9a653661e152a426bdb51a272f36bc79f9830e6a7169abe8110ec367c3518c"}, + {file = "cwcwidth-0.1.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f96386cc29e6eef8ef066d7dd3c767c5119d66506dabea20dd344dabb3f2d225"}, + {file = "cwcwidth-0.1.9-cp311-cp311-win32.whl", hash = "sha256:f6ba88970ec12fdbed5554beb1b9a25d8271fc3d0d9e60639db700a79bed1863"}, + {file = "cwcwidth-0.1.9-cp311-cp311-win_amd64.whl", hash = "sha256:aa6725e7b3571fdf6ce7c02d1dd2d69e00d166bb6df44e46ab215067028b3a03"}, + {file = "cwcwidth-0.1.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:42de102d5191fc68ef3ff6530f60c4895148ddc21aa0acaaf4612e5f7f0c38c4"}, + {file = "cwcwidth-0.1.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:877e48c615b3fec88b7e640f9cf9d96704497657fb5aad2b7c0b0c59ecabff69"}, + {file = "cwcwidth-0.1.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdbaf0a8dad20eb685df11a195a2449fe230b08a5b356d036c8d7e59d4128a88"}, + {file = "cwcwidth-0.1.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:f6e0e023c4b127c47fd4c44cf537be209b9a28d8725f4f576f4d63744a23aa38"}, + {file = "cwcwidth-0.1.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b4f7d24236ce3c9d3b5e07fd75d232452f19bdddb6ae8bbfdcb97b6cb02835e8"}, + {file = "cwcwidth-0.1.9-cp312-cp312-win32.whl", hash = "sha256:ba9da6c911bf108334426890bc9f57b839a38e1afc4383a41bd70adbce470db3"}, + {file = "cwcwidth-0.1.9-cp312-cp312-win_amd64.whl", hash = "sha256:40466f16e85c338e8fc3eee87a8c9ca23416cc68b3049f68cb4cead5fb8b71b3"}, + {file = "cwcwidth-0.1.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:167f59c3c1e1d8e231a1abd666af4e73dd8a94917efb6522e9b610ac4587903a"}, + {file = "cwcwidth-0.1.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afc745f18c9e3c38851a931c0c0a7e479d6494911ba1353f998d707f95a895b4"}, + {file = "cwcwidth-0.1.9-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8d55c47cbec4796e89cfedc89c52e6c4c2faeb77489a763415b9f76d8fc14db"}, + {file = "cwcwidth-0.1.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c858842849ce2cfdf207095253da83831d9407771c8073f6b75f24d3faf1a1eb"}, + {file = "cwcwidth-0.1.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc049ce273f32b632f5ead649b2120f8b2b78035d7b069fdc460c4be9affddb5"}, + {file = "cwcwidth-0.1.9-cp38-cp38-win32.whl", hash = "sha256:1bafe978a5b7915848244a952829e3f8757c0cebef581c8250da6064c906c38c"}, + {file = "cwcwidth-0.1.9-cp38-cp38-win_amd64.whl", hash = "sha256:024d1b21e6123bf30a849e60eea3482f556bbd00d39215f86c904e5bd81fc1b6"}, + {file = "cwcwidth-0.1.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7d367da5e6fb538388817bf5b2d6dd4db90e5e631d99c34055589d007b5c94bc"}, + {file = "cwcwidth-0.1.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad721d9dbc14eafd06176e4f5594942336b1e813de2a5ab7bd64254393c5713f"}, + {file = "cwcwidth-0.1.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:711ace9796cb6767ff29095ff5b0ec4619e7297854eb4b91ba99154590eddcc9"}, + {file = "cwcwidth-0.1.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:945615a7b8cdcbcd8e06d399f96a2b09440c3a4c5cb3c2d0109f00d80da27a12"}, + {file = "cwcwidth-0.1.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ffaf706abe400282f299463594d8887566e2a280cd0255110bd4397cc7be2910"}, + {file = "cwcwidth-0.1.9-cp39-cp39-win32.whl", hash = "sha256:03093cac6f8e4326b1c30169e024fe2894f76c6ffddf6464e489bb33cb3a2897"}, + {file = "cwcwidth-0.1.9-cp39-cp39-win_amd64.whl", hash = "sha256:0ddef2c504e6f4fd6122b46d55061f487add1ebb86596ae70ffc2a8b8955c8bc"}, + {file = "cwcwidth-0.1.9.tar.gz", hash = "sha256:f19d11a0148d4a8cacd064c96e93bca8ce3415a186ae8204038f45e108db76b8"}, +] [[package]] name = "distlib" -version = "0.3.1" +version = "0.3.9" description = "Distribution utilities" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] +files = [ + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, +] [[package]] name = "docutils" -version = "0.17" +version = "0.21.2" description = "Docutils -- Python Documentation Utilities" -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, + {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["test"] +markers = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] [[package]] name = "filelock" -version = "3.0.12" +version = "3.16.1" description = "A platform independent file lock." -category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, + {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "greenlet" -version = "1.0.0" +version = "3.1.1" description = "Lightweight in-process concurrent programming" -category = "dev" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, + {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, + {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, + {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, + {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, + {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, + {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, + {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, + {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, + {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, + {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, + {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, + {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, + {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, + {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, + {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, + {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, + {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, + {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, + {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, + {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, + {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, + {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, + {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, + {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, + {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, + {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, + {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, + {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, + {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, + {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, + {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, + {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, +] [package.extras] -docs = ["sphinx"] +docs = ["Sphinx", "furo"] +test = ["objgraph", "psutil"] [[package]] name = "grpcio" -version = "1.36.1" +version = "1.69.0" description = "HTTP/2-based RPC framework" -category = "dev" optional = false -python-versions = "*" - -[package.dependencies] -six = ">=1.5.2" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "grpcio-1.69.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:2060ca95a8db295ae828d0fc1c7f38fb26ccd5edf9aa51a0f44251f5da332e97"}, + {file = "grpcio-1.69.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:2e52e107261fd8fa8fa457fe44bfadb904ae869d87c1280bf60f93ecd3e79278"}, + {file = "grpcio-1.69.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:316463c0832d5fcdb5e35ff2826d9aa3f26758d29cdfb59a368c1d6c39615a11"}, + {file = "grpcio-1.69.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26c9a9c4ac917efab4704b18eed9082ed3b6ad19595f047e8173b5182fec0d5e"}, + {file = "grpcio-1.69.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90b3646ced2eae3a0599658eeccc5ba7f303bf51b82514c50715bdd2b109e5ec"}, + {file = "grpcio-1.69.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3b75aea7c6cb91b341c85e7c1d9db1e09e1dd630b0717f836be94971e015031e"}, + {file = "grpcio-1.69.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5cfd14175f9db33d4b74d63de87c64bb0ee29ce475ce3c00c01ad2a3dc2a9e51"}, + {file = "grpcio-1.69.0-cp310-cp310-win32.whl", hash = "sha256:9031069d36cb949205293cf0e243abd5e64d6c93e01b078c37921493a41b72dc"}, + {file = "grpcio-1.69.0-cp310-cp310-win_amd64.whl", hash = "sha256:cc89b6c29f3dccbe12d7a3b3f1b3999db4882ae076c1c1f6df231d55dbd767a5"}, + {file = "grpcio-1.69.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:8de1b192c29b8ce45ee26a700044717bcbbd21c697fa1124d440548964328561"}, + {file = "grpcio-1.69.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:7e76accf38808f5c5c752b0ab3fd919eb14ff8fafb8db520ad1cc12afff74de6"}, + {file = "grpcio-1.69.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:d5658c3c2660417d82db51e168b277e0ff036d0b0f859fa7576c0ffd2aec1442"}, + {file = "grpcio-1.69.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5494d0e52bf77a2f7eb17c6da662886ca0a731e56c1c85b93505bece8dc6cf4c"}, + {file = "grpcio-1.69.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ed866f9edb574fd9be71bf64c954ce1b88fc93b2a4cbf94af221e9426eb14d6"}, + {file = "grpcio-1.69.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c5ba38aeac7a2fe353615c6b4213d1fbb3a3c34f86b4aaa8be08baaaee8cc56d"}, + {file = "grpcio-1.69.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f79e05f5bbf551c4057c227d1b041ace0e78462ac8128e2ad39ec58a382536d2"}, + {file = "grpcio-1.69.0-cp311-cp311-win32.whl", hash = "sha256:bf1f8be0da3fcdb2c1e9f374f3c2d043d606d69f425cd685110dd6d0d2d61258"}, + {file = "grpcio-1.69.0-cp311-cp311-win_amd64.whl", hash = "sha256:fb9302afc3a0e4ba0b225cd651ef8e478bf0070cf11a529175caecd5ea2474e7"}, + {file = "grpcio-1.69.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:fc18a4de8c33491ad6f70022af5c460b39611e39578a4d84de0fe92f12d5d47b"}, + {file = "grpcio-1.69.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:0f0270bd9ffbff6961fe1da487bdcd594407ad390cc7960e738725d4807b18c4"}, + {file = "grpcio-1.69.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:dc48f99cc05e0698e689b51a05933253c69a8c8559a47f605cff83801b03af0e"}, + {file = "grpcio-1.69.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e925954b18d41aeb5ae250262116d0970893b38232689c4240024e4333ac084"}, + {file = "grpcio-1.69.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d222569273720366f68a99cb62e6194681eb763ee1d3b1005840678d4884f9"}, + {file = "grpcio-1.69.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:b62b0f41e6e01a3e5082000b612064c87c93a49b05f7602fe1b7aa9fd5171a1d"}, + {file = "grpcio-1.69.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:db6f9fd2578dbe37db4b2994c94a1d9c93552ed77dca80e1657bb8a05b898b55"}, + {file = "grpcio-1.69.0-cp312-cp312-win32.whl", hash = "sha256:b192b81076073ed46f4b4dd612b8897d9a1e39d4eabd822e5da7b38497ed77e1"}, + {file = "grpcio-1.69.0-cp312-cp312-win_amd64.whl", hash = "sha256:1227ff7836f7b3a4ab04e5754f1d001fa52a730685d3dc894ed8bc262cc96c01"}, + {file = "grpcio-1.69.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:a78a06911d4081a24a1761d16215a08e9b6d4d29cdbb7e427e6c7e17b06bcc5d"}, + {file = "grpcio-1.69.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:dc5a351927d605b2721cbb46158e431dd49ce66ffbacb03e709dc07a491dde35"}, + {file = "grpcio-1.69.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:3629d8a8185f5139869a6a17865d03113a260e311e78fbe313f1a71603617589"}, + {file = "grpcio-1.69.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9a281878feeb9ae26db0622a19add03922a028d4db684658f16d546601a4870"}, + {file = "grpcio-1.69.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cc614e895177ab7e4b70f154d1a7c97e152577ea101d76026d132b7aaba003b"}, + {file = "grpcio-1.69.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:1ee76cd7e2e49cf9264f6812d8c9ac1b85dda0eaea063af07292400f9191750e"}, + {file = "grpcio-1.69.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:0470fa911c503af59ec8bc4c82b371ee4303ececbbdc055f55ce48e38b20fd67"}, + {file = "grpcio-1.69.0-cp313-cp313-win32.whl", hash = "sha256:b650f34aceac8b2d08a4c8d7dc3e8a593f4d9e26d86751ebf74ebf5107d927de"}, + {file = "grpcio-1.69.0-cp313-cp313-win_amd64.whl", hash = "sha256:028337786f11fecb5d7b7fa660475a06aabf7e5e52b5ac2df47414878c0ce7ea"}, + {file = "grpcio-1.69.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:b7f693db593d6bf285e015d5538bf1c86cf9c60ed30b6f7da04a00ed052fe2f3"}, + {file = "grpcio-1.69.0-cp38-cp38-macosx_10_14_universal2.whl", hash = "sha256:8b94e83f66dbf6fd642415faca0608590bc5e8d30e2c012b31d7d1b91b1de2fd"}, + {file = "grpcio-1.69.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:b634851b92c090763dde61df0868c730376cdb73a91bcc821af56ae043b09596"}, + {file = "grpcio-1.69.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf5f680d3ed08c15330d7830d06bc65f58ca40c9999309517fd62880d70cb06e"}, + {file = "grpcio-1.69.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:200e48a6e7b00f804cf00a1c26292a5baa96507c7749e70a3ec10ca1a288936e"}, + {file = "grpcio-1.69.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:45a4704339b6e5b24b0e136dea9ad3815a94f30eb4f1e1d44c4ac484ef11d8dd"}, + {file = "grpcio-1.69.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:85d347cb8237751b23539981dbd2d9d8f6e9ff90082b427b13022b948eb6347a"}, + {file = "grpcio-1.69.0-cp38-cp38-win32.whl", hash = "sha256:60e5de105dc02832dc8f120056306d0ef80932bcf1c0e2b4ca3b676de6dc6505"}, + {file = "grpcio-1.69.0-cp38-cp38-win_amd64.whl", hash = "sha256:282f47d0928e40f25d007f24eb8fa051cb22551e3c74b8248bc9f9bea9c35fe0"}, + {file = "grpcio-1.69.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:dd034d68a2905464c49479b0c209c773737a4245d616234c79c975c7c90eca03"}, + {file = "grpcio-1.69.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:01f834732c22a130bdf3dc154d1053bdbc887eb3ccb7f3e6285cfbfc33d9d5cc"}, + {file = "grpcio-1.69.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:a7f4ed0dcf202a70fe661329f8874bc3775c14bb3911d020d07c82c766ce0eb1"}, + {file = "grpcio-1.69.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd7ea241b10bc5f0bb0f82c0d7896822b7ed122b3ab35c9851b440c1ccf81588"}, + {file = "grpcio-1.69.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f03dc9b4da4c0dc8a1db7a5420f575251d7319b7a839004d8916257ddbe4816"}, + {file = "grpcio-1.69.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ca71d73a270dff052fe4edf74fef142d6ddd1f84175d9ac4a14b7280572ac519"}, + {file = "grpcio-1.69.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5ccbed100dc43704e94ccff9e07680b540d64e4cc89213ab2832b51b4f68a520"}, + {file = "grpcio-1.69.0-cp39-cp39-win32.whl", hash = "sha256:1514341def9c6ec4b7f0b9628be95f620f9d4b99331b7ef0a1845fd33d9b579c"}, + {file = "grpcio-1.69.0-cp39-cp39-win_amd64.whl", hash = "sha256:c1fea55d26d647346acb0069b08dca70984101f2dc95066e003019207212e303"}, + {file = "grpcio-1.69.0.tar.gz", hash = "sha256:936fa44241b5379c5afc344e1260d467bee495747eaf478de825bab2791da6f5"}, +] [package.extras] -protobuf = ["grpcio-tools (>=1.36.1)"] +protobuf = ["grpcio-tools (>=1.69.0)"] [[package]] name = "grpcio-tools" -version = "1.36.1" +version = "1.69.0" description = "Protobuf code generator for gRPC" -category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "grpcio_tools-1.69.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:8c210630faa581c3bd08953dac4ad21a7f49862f3b92d69686e9b436d2f1265d"}, + {file = "grpcio_tools-1.69.0-cp310-cp310-macosx_12_0_universal2.whl", hash = "sha256:09b66ea279fcdaebae4ec34b1baf7577af3b14322738aa980c1c33cfea71f7d7"}, + {file = "grpcio_tools-1.69.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:be94a4bfa56d356aae242cc54072c9ccc2704b659eaae2fd599a94afebf791ce"}, + {file = "grpcio_tools-1.69.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28778debad73a8c8e0a0e07e6a2f76eecce43adbc205d17dd244d2d58bb0f0aa"}, + {file = "grpcio_tools-1.69.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:449308d93e4c97ae3a4503510c6d64978748ff5e21429c85da14fdc783c0f498"}, + {file = "grpcio_tools-1.69.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b9343651e73bc6e0df6bb518c2638bf9cc2194b50d060cdbcf1b2121cd4e4ae3"}, + {file = "grpcio_tools-1.69.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2f08b063612553e726e328aef3a27adfaea8d92712b229012afc54d59da88a02"}, + {file = "grpcio_tools-1.69.0-cp310-cp310-win32.whl", hash = "sha256:599ffd39525e7bbb6412a63e56a2e6c1af8f3493fe4305260efd4a11d064cce0"}, + {file = "grpcio_tools-1.69.0-cp310-cp310-win_amd64.whl", hash = "sha256:02f92e3c2bae67ece818787f8d3d89df0fa1e5e6bbb7c1493824fd5dfad886dd"}, + {file = "grpcio_tools-1.69.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:c18df5d1c8e163a29863583ec51237d08d7059ef8d4f7661ee6d6363d3e38fe3"}, + {file = "grpcio_tools-1.69.0-cp311-cp311-macosx_10_14_universal2.whl", hash = "sha256:37876ae49235ef2e61e5059faf45dc5e7142ca54ae61aec378bb9483e0cd7e95"}, + {file = "grpcio_tools-1.69.0-cp311-cp311-manylinux_2_17_aarch64.whl", hash = "sha256:33120920e29959eaa37a1268c6a22af243d086b1a5e5222b4203e29560ece9ce"}, + {file = "grpcio_tools-1.69.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:788bb3ecd1b44664d829d319b3c1ebc15c7d7b5e7d1f22706ab57d6acd2c6301"}, + {file = "grpcio_tools-1.69.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f453b11a112e3774c8957ec2570669f3da1f7fbc8ee242482c38981496e88da2"}, + {file = "grpcio_tools-1.69.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:7e5c5dc2b656755cb58b11a7e87b65258a4a8eaff01b6c30ffcb230dd447c03d"}, + {file = "grpcio_tools-1.69.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8eabf0a7a98c14322bc74f9910c96f98feebe311e085624b2d022924d4f652ca"}, + {file = "grpcio_tools-1.69.0-cp311-cp311-win32.whl", hash = "sha256:ad567bea43d018c2215e1db10316eda94ca19229a834a3221c15d132d24c1b8a"}, + {file = "grpcio_tools-1.69.0-cp311-cp311-win_amd64.whl", hash = "sha256:3d64e801586dbea3530f245d48b9ed031738cc3eb099d5ce2fdb1b3dc2e1fb20"}, + {file = "grpcio_tools-1.69.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:8ef8efe8beac4cc1e30d41893e4096ca2601da61001897bd17441645de2d4d3c"}, + {file = "grpcio_tools-1.69.0-cp312-cp312-macosx_10_14_universal2.whl", hash = "sha256:a00e87a0c5a294028115a098819899b08dd18449df5b2aac4a2b87ba865e8681"}, + {file = "grpcio_tools-1.69.0-cp312-cp312-manylinux_2_17_aarch64.whl", hash = "sha256:7722700346d5b223159532e046e51f2ff743ed4342e5fe3e0457120a4199015e"}, + {file = "grpcio_tools-1.69.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a934116fdf202cb675246056ee54645c743e2240632f86a37e52f91a405c7143"}, + {file = "grpcio_tools-1.69.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e6a6d44359ca836acfbc58103daf94b3bb8ac919d659bb348dcd7fbecedc293"}, + {file = "grpcio_tools-1.69.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e27662c0597fd1ab5399a583d358b5203edcb6fc2b29d6245099dfacd51a6ddc"}, + {file = "grpcio_tools-1.69.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:7bbb2b2fb81d95bcdd1d8331defb5f5dc256dbe423bb98b682cf129cdd432366"}, + {file = "grpcio_tools-1.69.0-cp312-cp312-win32.whl", hash = "sha256:e11accd10cf4af5031ac86c45f1a13fb08f55e005cea070917c12e78fe6d2aa2"}, + {file = "grpcio_tools-1.69.0-cp312-cp312-win_amd64.whl", hash = "sha256:6df4c6ac109af338a8ccde29d184e0b0bdab13d78490cb360ff9b192a1aec7e2"}, + {file = "grpcio_tools-1.69.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:8c320c4faa1431f2e1252ef2325a970ac23b2fd04ffef6c12f96dd4552c3445c"}, + {file = "grpcio_tools-1.69.0-cp313-cp313-macosx_10_14_universal2.whl", hash = "sha256:5f1224596ad74dd14444b20c37122b361c5d203b67e14e018b995f3c5d76eede"}, + {file = "grpcio_tools-1.69.0-cp313-cp313-manylinux_2_17_aarch64.whl", hash = "sha256:965a0cf656a113bc32d15ac92ca51ed702a75d5370ae0afbdd36f818533a708a"}, + {file = "grpcio_tools-1.69.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:978835768c11a7f28778b3b7c40f839d8a57f765c315e80c4246c23900d56149"}, + {file = "grpcio_tools-1.69.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:094c7cec9bd271a32dfb7c620d4a558c63fcb0122fd1651b9ed73d6afd4ae6fe"}, + {file = "grpcio_tools-1.69.0-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:b51bf4981b3d7e47c2569efadff08284787124eb3dea0f63f491d39703231d3c"}, + {file = "grpcio_tools-1.69.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ea7aaf0dc1a828e2133357a9e9553fd1bb4e766890d52a506cc132e40632acdc"}, + {file = "grpcio_tools-1.69.0-cp313-cp313-win32.whl", hash = "sha256:4320f11b79d3a148cc23bad1b81719ce1197808dc2406caa8a8ba0a5cfb0260d"}, + {file = "grpcio_tools-1.69.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9bae733654e0eb8ca83aa1d0d6b6c2f4a3525ce70d5ffc07df68d28f6520137"}, + {file = "grpcio_tools-1.69.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:c78d3a7d9ba4292ba7abcc43430df426fc805e79a1dcd147509af0668332885b"}, + {file = "grpcio_tools-1.69.0-cp38-cp38-macosx_10_14_universal2.whl", hash = "sha256:497bdaa996a4de70f643c008a08813b4d20e114de50a384ae5e29d849c24c9c8"}, + {file = "grpcio_tools-1.69.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:aea33dd5a07a3b250b02a1b3f435e86d4abc94936b3ce634a2d70bc224189495"}, + {file = "grpcio_tools-1.69.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d3101c8d6f890f9d978e400843cc29992c5e03ae74f359e73dade09f2469a08"}, + {file = "grpcio_tools-1.69.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1163ba3f829141206dce1ceb67cfca73b57d279cd7183f188276443700a4980e"}, + {file = "grpcio_tools-1.69.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a85785058c31bac3d0b26c158b576eed536e4ce1af72c1d05a3518e745d44aac"}, + {file = "grpcio_tools-1.69.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4ee934bbe8aa8035eea2711c12a6e537ab4c4a35a6d742ccf34bfa3a0492f412"}, + {file = "grpcio_tools-1.69.0-cp38-cp38-win32.whl", hash = "sha256:808d1b963bda8ca3c9f55cb8aa051ed2f2c98cc1fb89f79b4f67e8218580f8f3"}, + {file = "grpcio_tools-1.69.0-cp38-cp38-win_amd64.whl", hash = "sha256:afa8cd6b93e4f607c3750a976a96f874830ec7dc5f408e0fac270d0464147024"}, + {file = "grpcio_tools-1.69.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:01121b6570932bfb7d8b2ce2c0055dba902a415477079e249d85fe4494f72db2"}, + {file = "grpcio_tools-1.69.0-cp39-cp39-macosx_10_14_universal2.whl", hash = "sha256:9861e282aa7b3656c67e84d0c25ee0e9210b955e0ec2c64699b8f80483f90853"}, + {file = "grpcio_tools-1.69.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:00adf628259e8c314a02ca1580d6a8b14eeef266f5dd5e15bf92c1efbbcf63c0"}, + {file = "grpcio_tools-1.69.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:371d03ac31b76ba77d44bdba6a8560f344c6d1ed558babab64760da085e392b7"}, + {file = "grpcio_tools-1.69.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6730414c01fe9027ba12538fd6e192e1bea94d5b819a1e03d15e89aab1b4573"}, + {file = "grpcio_tools-1.69.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:5562a1b1b67deffd04fbb1bcf8f1634580538ce35895b77cdfaec1fb115efd95"}, + {file = "grpcio_tools-1.69.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f8996efddc867134f22bbf8a368b1b2a018d0a9b0ac9d3185cfd81d1abd8066"}, + {file = "grpcio_tools-1.69.0-cp39-cp39-win32.whl", hash = "sha256:8f5959d8a453d613e7137831f6885b43b5c378ec317943b4ec599046baa97bfc"}, + {file = "grpcio_tools-1.69.0-cp39-cp39-win_amd64.whl", hash = "sha256:5d47abf7e0662dd5dbb9cc252c3616e5fbc5f71d34e3f6332cd24bcdf2940abd"}, + {file = "grpcio_tools-1.69.0.tar.gz", hash = "sha256:3e1a98f4d9decb84979e1ddd3deb09c0a33a84b6e3c0776d5bde4097e3ab66dd"}, +] [package.dependencies] -grpcio = ">=1.36.1" -protobuf = ">=3.5.0.post1,<4.0dev" +grpcio = ">=1.69.0" +protobuf = ">=5.26.1,<6.0dev" +setuptools = "*" [[package]] name = "grpclib" -version = "0.4.1" +version = "0.4.7" description = "Pure-Python gRPC implementation for asyncio" -category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "grpclib-0.4.7.tar.gz", hash = "sha256:2988ef57c02b22b7a2e8e961792c41ccf97efc2ace91ae7a5b0de03c363823c3"}, +] [package.dependencies] -dataclasses = {version = "*", markers = "python_version < \"3.7\""} h2 = ">=3.1.0,<5" multidict = "*" +[package.extras] +protobuf = ["protobuf (>=3.20.0)"] + [[package]] name = "h2" -version = "3.2.0" +version = "4.1.0" description = "HTTP/2 State-Machine based protocol implementation" -category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6.1" +groups = ["main"] +files = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] [package.dependencies] -hpack = ">=3.0,<4" -hyperframe = ">=5.2.0,<6" +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" [[package]] name = "hpack" -version = "3.0.0" +version = "4.0.0" description = "Pure-Python HPACK header compression" -category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6.1" +groups = ["main"] +files = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] [[package]] name = "hyperframe" -version = "5.2.0" +version = "6.0.1" description = "HTTP/2 framing layer for Python" -category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6.1" +groups = ["main"] +files = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] + +[[package]] +name = "identify" +version = "2.6.5" +description = "File identification library for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "identify-2.6.5-py2.py3-none-any.whl", hash = "sha256:14181a47091eb75b337af4c23078c9d09225cd4c48929f521f3bf16b09d02566"}, + {file = "identify-2.6.5.tar.gz", hash = "sha256:c10b33f250e5bba374fae86fb57f3adcebf1161bce7cdf92031915fd480c13bc"}, +] + +[package.extras] +license = ["ukkonen"] [[package]] name = "idna" -version = "2.10" +version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" +groups = ["dev"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] name = "imagesize" -version = "1.2.0" +version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["dev"] +files = [ + {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, + {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, +] [[package]] name = "importlib-metadata" -version = "3.10.0" +version = "8.5.0" description = "Read metadata from Python packages" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, + {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, +] [package.dependencies] -typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} -zipp = ">=0.5" +zipp = ">=3.20" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] [[package]] -name = "importlib-resources" -version = "5.1.2" -description = "Read resources from Python packages" -category = "dev" +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" +groups = ["test"] +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "jinja2" +version = "3.1.5" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +groups = ["main", "dev"] +files = [ + {file = "jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb"}, + {file = "jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb"}, +] [package.dependencies] -zipp = {version = ">=0.4", markers = "python_version < \"3.8\""} +MarkupSafe = ">=2.0" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "pytest-black (>=0.3.7)", "pytest-mypy"] +i18n = ["Babel (>=2.7)"] [[package]] -name = "jinja2" -version = "2.11.3" -description = "A very fast and expressive template engine." -category = "main" +name = "jinxed" +version = "1.3.0" +description = "Jinxed Terminal Library" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = "*" +groups = ["dev"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "jinxed-1.3.0-py2.py3-none-any.whl", hash = "sha256:b993189f39dc2d7504d802152671535b06d380b26d78070559551cbf92df4fc5"}, + {file = "jinxed-1.3.0.tar.gz", hash = "sha256:1593124b18a41b7a3da3b078471442e51dbad3d77b4d4f2b0c26ab6f7d660dbf"}, +] [package.dependencies] -MarkupSafe = ">=0.23" +ansicon = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "json5" +version = "0.10.0" +description = "A Python implementation of the JSON5 data format." +optional = false +python-versions = ">=3.8.0" +groups = ["dev"] +files = [ + {file = "json5-0.10.0-py3-none-any.whl", hash = "sha256:19b23410220a7271e8377f81ba8aacba2fdd56947fbb137ee5977cbe1f5e8dfa"}, + {file = "json5-0.10.0.tar.gz", hash = "sha256:e66941c8f0a02026943c52c2eb34ebeb2a6f819a0be05920a6f5243cd30fd559"}, +] [package.extras] -i18n = ["Babel (>=0.8)"] +dev = ["build (==1.2.2.post1)", "coverage (==7.5.3)", "mypy (==1.13.0)", "pip (==24.3.1)", "pylint (==3.2.3)", "ruff (==0.7.3)", "twine (==5.1.1)", "uv (==0.5.1)"] [[package]] name = "markupsafe" -version = "1.1.1" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." -category = "main" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" - -[[package]] -name = "more-itertools" -version = "8.7.0" -description = "More routines for operating on iterables, beyond itertools" -category = "dev" -optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, +] [[package]] name = "multidict" -version = "5.1.0" +version = "6.1.0" description = "multidict implementation" -category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3380252550e372e8511d49481bd836264c009adb826b23fefcc5dd3c69692f60"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99f826cbf970077383d7de805c0681799491cb939c25450b9b5b3ced03ca99f1"}, + {file = "multidict-6.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a114d03b938376557927ab23f1e950827c3b893ccb94b62fd95d430fd0e5cf53"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1c416351ee6271b2f49b56ad7f308072f6f44b37118d69c2cad94f3fa8a40d5"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6b5d83030255983181005e6cfbac1617ce9746b219bc2aad52201ad121226581"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3e97b5e938051226dc025ec80980c285b053ffb1e25a3db2a3aa3bc046bf7f56"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d618649d4e70ac6efcbba75be98b26ef5078faad23592f9b51ca492953012429"}, + {file = "multidict-6.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10524ebd769727ac77ef2278390fb0068d83f3acb7773792a5080f2b0abf7748"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ff3827aef427c89a25cc96ded1759271a93603aba9fb977a6d264648ebf989db"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:06809f4f0f7ab7ea2cabf9caca7d79c22c0758b58a71f9d32943ae13c7ace056"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:f179dee3b863ab1c59580ff60f9d99f632f34ccb38bf67a33ec6b3ecadd0fd76"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:aaed8b0562be4a0876ee3b6946f6869b7bcdb571a5d1496683505944e268b160"}, + {file = "multidict-6.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c8b88a2ccf5493b6c8da9076fb151ba106960a2df90c2633f342f120751a9e7"}, + {file = "multidict-6.1.0-cp310-cp310-win32.whl", hash = "sha256:4a9cb68166a34117d6646c0023c7b759bf197bee5ad4272f420a0141d7eb03a0"}, + {file = "multidict-6.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:20b9b5fbe0b88d0bdef2012ef7dee867f874b72528cf1d08f1d59b0e3850129d"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3efe2c2cb5763f2f1b275ad2bf7a287d3f7ebbef35648a9726e3b69284a4f3d6"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c7053d3b0353a8b9de430a4f4b4268ac9a4fb3481af37dfe49825bf45ca24156"}, + {file = "multidict-6.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27e5fc84ccef8dfaabb09d82b7d179c7cf1a3fbc8a966f8274fcb4ab2eb4cadb"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0e2b90b43e696f25c62656389d32236e049568b39320e2735d51f08fd362761b"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d83a047959d38a7ff552ff94be767b7fd79b831ad1cd9920662db05fec24fe72"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d1a9dd711d0877a1ece3d2e4fea11a8e75741ca21954c919406b44e7cf971304"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec2abea24d98246b94913b76a125e855eb5c434f7c46546046372fe60f666351"}, + {file = "multidict-6.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4867cafcbc6585e4b678876c489b9273b13e9fff9f6d6d66add5e15d11d926cb"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5b48204e8d955c47c55b72779802b219a39acc3ee3d0116d5080c388970b76e3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:d8fff389528cad1618fb4b26b95550327495462cd745d879a8c7c2115248e399"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a7a9541cd308eed5e30318430a9c74d2132e9a8cb46b901326272d780bf2d423"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:da1758c76f50c39a2efd5e9859ce7d776317eb1dd34317c8152ac9251fc574a3"}, + {file = "multidict-6.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:c943a53e9186688b45b323602298ab727d8865d8c9ee0b17f8d62d14b56f0753"}, + {file = "multidict-6.1.0-cp311-cp311-win32.whl", hash = "sha256:90f8717cb649eea3504091e640a1b8568faad18bd4b9fcd692853a04475a4b80"}, + {file = "multidict-6.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:82176036e65644a6cc5bd619f65f6f19781e8ec2e5330f51aa9ada7504cc1926"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:b04772ed465fa3cc947db808fa306d79b43e896beb677a56fb2347ca1a49c1fa"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6180c0ae073bddeb5a97a38c03f30c233e0a4d39cd86166251617d1bbd0af436"}, + {file = "multidict-6.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:071120490b47aa997cca00666923a83f02c7fbb44f71cf7f136df753f7fa8761"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50b3a2710631848991d0bf7de077502e8994c804bb805aeb2925a981de58ec2e"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58c621844d55e71c1b7f7c498ce5aa6985d743a1a59034c57a905b3f153c1ef"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55b6d90641869892caa9ca42ff913f7ff1c5ece06474fbd32fb2cf6834726c95"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b820514bfc0b98a30e3d85462084779900347e4d49267f747ff54060cc33925"}, + {file = "multidict-6.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10a9b09aba0c5b48c53761b7c720aaaf7cf236d5fe394cd399c7ba662d5f9966"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1e16bf3e5fc9f44632affb159d30a437bfe286ce9e02754759be5536b169b305"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76f364861c3bfc98cbbcbd402d83454ed9e01a5224bb3a28bf70002a230f73e2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:820c661588bd01a0aa62a1283f20d2be4281b086f80dad9e955e690c75fb54a2"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:0e5f362e895bc5b9e67fe6e4ded2492d8124bdf817827f33c5b46c2fe3ffaca6"}, + {file = "multidict-6.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ec660d19bbc671e3a6443325f07263be452c453ac9e512f5eb935e7d4ac28b3"}, + {file = "multidict-6.1.0-cp312-cp312-win32.whl", hash = "sha256:58130ecf8f7b8112cdb841486404f1282b9c86ccb30d3519faf301b2e5659133"}, + {file = "multidict-6.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:188215fc0aafb8e03341995e7c4797860181562380f81ed0a87ff455b70bf1f1"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:d569388c381b24671589335a3be6e1d45546c2988c2ebe30fdcada8457a31008"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:052e10d2d37810b99cc170b785945421141bf7bb7d2f8799d431e7db229c385f"}, + {file = "multidict-6.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f90c822a402cb865e396a504f9fc8173ef34212a342d92e362ca498cad308e28"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b225d95519a5bf73860323e633a664b0d85ad3d5bede6d30d95b35d4dfe8805b"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:23bfd518810af7de1116313ebd9092cb9aa629beb12f6ed631ad53356ed6b86c"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5c09fcfdccdd0b57867577b719c69e347a436b86cd83747f179dbf0cc0d4c1f3"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf6bea52ec97e95560af5ae576bdac3aa3aae0b6758c6efa115236d9e07dae44"}, + {file = "multidict-6.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57feec87371dbb3520da6192213c7d6fc892d5589a93db548331954de8248fd2"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0c3f390dc53279cbc8ba976e5f8035eab997829066756d811616b652b00a23a3"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:59bfeae4b25ec05b34f1956eaa1cb38032282cd4dfabc5056d0a1ec4d696d3aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b2f59caeaf7632cc633b5cf6fc449372b83bbdf0da4ae04d5be36118e46cc0aa"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:37bb93b2178e02b7b618893990941900fd25b6b9ac0fa49931a40aecdf083fe4"}, + {file = "multidict-6.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4e9f48f58c2c523d5a06faea47866cd35b32655c46b443f163d08c6d0ddb17d6"}, + {file = "multidict-6.1.0-cp313-cp313-win32.whl", hash = "sha256:3a37ffb35399029b45c6cc33640a92bef403c9fd388acce75cdc88f58bd19a81"}, + {file = "multidict-6.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:e9aa71e15d9d9beaad2c6b9319edcdc0a49a43ef5c0a4c8265ca9ee7d6c67774"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:db7457bac39421addd0c8449933ac32d8042aae84a14911a757ae6ca3eef1392"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d094ddec350a2fb899fec68d8353c78233debde9b7d8b4beeafa70825f1c281a"}, + {file = "multidict-6.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5845c1fd4866bb5dd3125d89b90e57ed3138241540897de748cdf19de8a2fca2"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9079dfc6a70abe341f521f78405b8949f96db48da98aeb43f9907f342f627cdc"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3914f5aaa0f36d5d60e8ece6a308ee1c9784cd75ec8151062614657a114c4478"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c08be4f460903e5a9d0f76818db3250f12e9c344e79314d1d570fc69d7f4eae4"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d093be959277cb7dee84b801eb1af388b6ad3ca6a6b6bf1ed7585895789d027d"}, + {file = "multidict-6.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3702ea6872c5a2a4eeefa6ffd36b042e9773f05b1f37ae3ef7264b1163c2dcf6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:2090f6a85cafc5b2db085124d752757c9d251548cedabe9bd31afe6363e0aff2"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:f67f217af4b1ff66c68a87318012de788dd95fcfeb24cc889011f4e1c7454dfd"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:189f652a87e876098bbc67b4da1049afb5f5dfbaa310dd67c594b01c10388db6"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:6bb5992037f7a9eff7991ebe4273ea7f51f1c1c511e6a2ce511d0e7bdb754492"}, + {file = "multidict-6.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:ac10f4c2b9e770c4e393876e35a7046879d195cd123b4f116d299d442b335bcd"}, + {file = "multidict-6.1.0-cp38-cp38-win32.whl", hash = "sha256:e27bbb6d14416713a8bd7aaa1313c0fc8d44ee48d74497a0ff4c3a1b6ccb5167"}, + {file = "multidict-6.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:22f3105d4fb15c8f57ff3959a58fcab6ce36814486500cd7485651230ad4d4ef"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:4e18b656c5e844539d506a0a06432274d7bd52a7487e6828c63a63d69185626c"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a185f876e69897a6f3325c3f19f26a297fa058c5e456bfcff8015e9a27e83ae1"}, + {file = "multidict-6.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ab7c4ceb38d91570a650dba194e1ca87c2b543488fe9309b4212694174fd539c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e617fb6b0b6953fffd762669610c1c4ffd05632c138d61ac7e14ad187870669c"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16e5f4bf4e603eb1fdd5d8180f1a25f30056f22e55ce51fb3d6ad4ab29f7d96f"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c035da3f544b1882bac24115f3e2e8760f10a0107614fc9839fd232200b875"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:957cf8e4b6e123a9eea554fa7ebc85674674b713551de587eb318a2df3e00255"}, + {file = "multidict-6.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:483a6aea59cb89904e1ceabd2b47368b5600fb7de78a6e4a2c2987b2d256cf30"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:87701f25a2352e5bf7454caa64757642734da9f6b11384c1f9d1a8e699758057"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:682b987361e5fd7a139ed565e30d81fd81e9629acc7d925a205366877d8c8657"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ce2186a7df133a9c895dea3331ddc5ddad42cdd0d1ea2f0a51e5d161e4762f28"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:9f636b730f7e8cb19feb87094949ba54ee5357440b9658b2a32a5ce4bce53972"}, + {file = "multidict-6.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:73eae06aa53af2ea5270cc066dcaf02cc60d2994bbb2c4ef5764949257d10f43"}, + {file = "multidict-6.1.0-cp39-cp39-win32.whl", hash = "sha256:1ca0083e80e791cffc6efce7660ad24af66c8d4079d2a750b29001b53ff59ada"}, + {file = "multidict-6.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:aa466da5b15ccea564bdab9c89175c762bc12825f4659c11227f515cee76fa4a"}, + {file = "multidict-6.1.0-py3-none-any.whl", hash = "sha256:48e171e52d1c4d33888e529b999e5900356b9ae588c2f09a52dcefb158b27506"}, + {file = "multidict-6.1.0.tar.gz", hash = "sha256:22ae2ebf9b0c69d206c003e2f6a914ea33f0a932d4aa16f236afc049d9958f4a"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} [[package]] name = "mypy" -version = "0.770" +version = "1.14.1" description = "Optional static typing for Python" -category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, + {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b"}, + {file = "mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427"}, + {file = "mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f"}, + {file = "mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1"}, + {file = "mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e"}, + {file = "mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89"}, + {file = "mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9"}, + {file = "mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd"}, + {file = "mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac"}, + {file = "mypy-1.14.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b"}, + {file = "mypy-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb"}, + {file = "mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60"}, + {file = "mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c"}, + {file = "mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1"}, + {file = "mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6"}, +] [package.dependencies] -mypy-extensions = ">=0.4.3,<0.5.0" -typed-ast = ">=1.4.0,<1.5.0" -typing-extensions = ">=3.7.4" +mypy_extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing_extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -category = "main" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." optional = false -python-versions = "*" +python-versions = ">=3.5" +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +description = "Node.js virtual environment builder" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +files = [ + {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, + {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, +] [[package]] name = "packaging" -version = "20.9" +version = "24.2" description = "Core utilities for Python packages" -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -pyparsing = ">=2.0.2" +python-versions = ">=3.8" +groups = ["dev", "test"] +files = [ + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, +] [[package]] name = "pastel" version = "0.2.1" description = "Bring colors to your terminal." -category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["test"] +files = [ + {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, + {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"}, +] [[package]] -name = "pathspec" -version = "0.8.1" -description = "Utility library for gitignore style pattern matching of file paths." -category = "main" +name = "platformdirs" +version = "4.3.6" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, + {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, +] + +[package.extras] +docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.2)", "pytest-cov (>=5)", "pytest-mock (>=3.14)"] +type = ["mypy (>=1.11.2)"] [[package]] name = "pluggy" -version = "0.13.1" +version = "1.5.0" description = "plugin and hook calling mechanisms for python" -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +python-versions = ">=3.8" +groups = ["dev", "test"] +files = [ + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, +] [package.extras] dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] [[package]] name = "poethepoet" -version = "0.10.0" +version = "0.32.1" description = "A task runner that works well with poetry." -category = "dev" optional = false -python-versions = ">=3.6,<4.0" +python-versions = ">=3.9" +groups = ["test"] +files = [ + {file = "poethepoet-0.32.1-py3-none-any.whl", hash = "sha256:d1e0a52a2f677870fac17dfb26bfe4910242756ac821443ef31f90ad26227c2d"}, + {file = "poethepoet-0.32.1.tar.gz", hash = "sha256:471e1a025812dcd3d2997e30989681be5ab0a49232ee5fba94859629671c9584"}, +] [package.dependencies] -pastel = ">=0.2.0,<0.3.0" -tomlkit = ">=0.6.0,<1.0.0" +pastel = ">=0.2.1,<0.3.0" +pyyaml = ">=6.0.2,<7.0.0" +tomli = {version = ">=1.2.2", markers = "python_version < \"3.11\""} + +[package.extras] +poetry-plugin = ["poetry (>=1.0,<3.0)"] [[package]] -name = "protobuf" -version = "3.15.7" -description = "Protocol Buffers" -category = "dev" +name = "pre-commit" +version = "4.0.1" +description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false -python-versions = "*" +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pre_commit-4.0.1-py2.py3-none-any.whl", hash = "sha256:efde913840816312445dc98787724647c65473daefe420785f885e8ed9a06878"}, + {file = "pre_commit-4.0.1.tar.gz", hash = "sha256:80905ac375958c0444c65e9cebebd948b3cdb518f335a091a670a89d652139d2"}, +] [package.dependencies] -six = ">=1.9" +cfgv = ">=2.0.0" +identify = ">=1.0.0" +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" [[package]] -name = "py" -version = "1.10.0" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" +name = "protobuf" +version = "5.29.3" +description = "" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" +groups = ["dev", "test"] +files = [ + {file = "protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888"}, + {file = "protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a"}, + {file = "protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e"}, + {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84"}, + {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f"}, + {file = "protobuf-5.29.3-cp38-cp38-win32.whl", hash = "sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252"}, + {file = "protobuf-5.29.3-cp38-cp38-win_amd64.whl", hash = "sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107"}, + {file = "protobuf-5.29.3-cp39-cp39-win32.whl", hash = "sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7"}, + {file = "protobuf-5.29.3-cp39-cp39-win_amd64.whl", hash = "sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da"}, + {file = "protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f"}, + {file = "protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620"}, +] + +[[package]] +name = "pydantic" +version = "2.10.5" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.8" +groups = ["test"] +files = [ + {file = "pydantic-2.10.5-py3-none-any.whl", hash = "sha256:4dd4e322dbe55472cb7ca7e73f4b63574eecccf2835ffa2af9021ce113c83c53"}, + {file = "pydantic-2.10.5.tar.gz", hash = "sha256:278b38dbbaec562011d659ee05f63346951b3a248a6f3642e1bc68894ea2b4ff"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata"] + +[[package]] +name = "pydantic-core" +version = "2.27.2" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.8" +groups = ["test"] +files = [ + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" [[package]] name = "pygments" -version = "2.8.1" +version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pympler" +version = "1.1" +description = "A development tool to measure, monitor and analyze the memory behavior of Python objects." +optional = false +python-versions = ">=3.6" +groups = ["dev"] +markers = "platform_python_implementation != \"PyPy\"" +files = [ + {file = "Pympler-1.1-py3-none-any.whl", hash = "sha256:5b223d6027d0619584116a0cbc28e8d2e378f7a79c1e5e024f9ff3b673c58506"}, + {file = "pympler-1.1.tar.gz", hash = "sha256:1eaa867cb8992c218430f1708fdaccda53df064144d1c5656b1e6f1ee6000424"}, +] + +[package.dependencies] +pywin32 = {version = ">=226", markers = "platform_system == \"Windows\""} + +[[package]] +name = "pyproject-api" +version = "1.8.0" +description = "API to interact with the python pyproject.toml based projects" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pyproject_api-1.8.0-py3-none-any.whl", hash = "sha256:3d7d347a047afe796fd5d1885b1e391ba29be7169bd2f102fcd378f04273d228"}, + {file = "pyproject_api-1.8.0.tar.gz", hash = "sha256:77b8049f2feb5d33eefcc21b57f1e279636277a8ac8ad6b5871037b243778496"}, +] + +[package.dependencies] +packaging = ">=24.1" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} + +[package.extras] +docs = ["furo (>=2024.8.6)", "sphinx-autodoc-typehints (>=2.4.1)"] +testing = ["covdefaults (>=2.3)", "pytest (>=8.3.3)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "setuptools (>=75.1)"] [[package]] -name = "pyparsing" -version = "2.4.7" -description = "Python parsing module" -category = "dev" +name = "pyproject-hooks" +version = "1.2.0" +description = "Wrappers to call pyproject.toml-based build backend hooks." optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"}, + {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"}, +] [[package]] name = "pytest" -version = "5.4.3" +version = "7.4.4" description = "pytest: simple powerful testing with Python" -category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.7" +groups = ["test"] +files = [ + {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, + {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, +] [package.dependencies] -atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=17.4.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -more-itertools = ">=4.0.0" +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<1.0" -py = ">=1.5.0" -wcwidth = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] -checkqa-mypy = ["mypy (==v0.761)"] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-asyncio" -version = "0.12.0" -description = "Pytest support for asyncio." -category = "dev" +version = "0.23.8" +description = "Pytest support for asyncio" optional = false -python-versions = ">= 3.5" +python-versions = ">=3.8" +groups = ["test"] +files = [ + {file = "pytest_asyncio-0.23.8-py3-none-any.whl", hash = "sha256:50265d892689a5faefb84df80819d1ecef566eb3549cf915dfb33569359d1ce2"}, + {file = "pytest_asyncio-0.23.8.tar.gz", hash = "sha256:759b10b33a6dc61cce40a8bd5205e302978bbbcc00e279a8b61d9a6a3c82e4d3"}, +] [package.dependencies] -pytest = ">=5.4.0" +pytest = ">=7.0.0,<9" [package.extras] -testing = ["async_generator (>=1.3)", "coverage", "hypothesis (>=5.7.1)"] +docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] +testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] [[package]] name = "pytest-cov" -version = "2.11.1" +version = "6.0.0" description = "Pytest plugin for measuring coverage." -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.9" +groups = ["test"] +files = [ + {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, + {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, +] [package.dependencies] -coverage = ">=5.2.1" +coverage = {version = ">=7.5", extras = ["toml"]} pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests (==2.0.2)", "six", "pytest-xdist", "virtualenv"] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] [[package]] name = "pytest-mock" -version = "3.5.1" +version = "3.14.0" description = "Thin-wrapper around the mock package for easier use with pytest" -category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" +groups = ["test"] +files = [ + {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, + {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, +] [package.dependencies] -pytest = ">=5.0" +pytest = ">=6.2.5" [package.extras] -dev = ["pre-commit", "tox", "pytest-asyncio"] +dev = ["pre-commit", "pytest-asyncio", "tox"] [[package]] name = "python-dateutil" -version = "2.8.1" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" -category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] [package.dependencies] six = ">=1.5" [[package]] -name = "pytz" -version = "2021.1" -description = "World timezone definitions, modern and historical" -category = "dev" +name = "pywin32" +version = "308" +description = "Python for Window Extensions" optional = false python-versions = "*" - -[[package]] -name = "regex" -version = "2021.4.4" -description = "Alternative regular expression module, to replace re." -category = "main" +groups = ["dev"] +markers = "platform_python_implementation != \"PyPy\" and platform_system == \"Windows\"" +files = [ + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, + {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, + {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, + {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, + {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, + {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, + {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, + {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, + {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, + {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, +] + +[[package]] +name = "pyxdg" +version = "0.28" +description = "PyXDG contains implementations of freedesktop.org standards in python." optional = false python-versions = "*" +groups = ["dev"] +files = [ + {file = "pyxdg-0.28-py2.py3-none-any.whl", hash = "sha256:bdaf595999a0178ecea4052b7f4195569c1ff4d344567bccdc12dfdf02d545ab"}, + {file = "pyxdg-0.28.tar.gz", hash = "sha256:3267bb3074e934df202af2ee0868575484108581e6f3cb006af1da35395e88b4"}, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.8" +groups = ["dev", "test"] +files = [ + {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, + {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, + {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, + {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, + {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, + {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, + {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, + {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, + {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, + {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, + {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, + {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, + {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, + {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, + {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, + {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, + {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, + {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, + {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, + {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, + {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, + {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, + {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, + {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, + {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, + {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, + {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, + {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, + {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, + {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, + {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, + {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, +] [[package]] name = "requests" -version = "2.25.1" +version = "2.32.3" description = "Python HTTP for Humans." -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<5" -idna = ">=2.5,<3" -urllib3 = ">=1.21.1,<1.27" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" [package.extras] -security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] -socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "ruff" +version = "0.9.1" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = true +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "ruff-0.9.1-py3-none-linux_armv6l.whl", hash = "sha256:84330dda7abcc270e6055551aca93fdde1b0685fc4fd358f26410f9349cf1743"}, + {file = "ruff-0.9.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3cae39ba5d137054b0e5b472aee3b78a7c884e61591b100aeb544bcd1fc38d4f"}, + {file = "ruff-0.9.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:50c647ff96f4ba288db0ad87048257753733763b409b2faf2ea78b45c8bb7fcb"}, + {file = "ruff-0.9.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0c8b149e9c7353cace7d698e1656ffcf1e36e50f8ea3b5d5f7f87ff9986a7ca"}, + {file = "ruff-0.9.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:beb3298604540c884d8b282fe7625651378e1986c25df51dec5b2f60cafc31ce"}, + {file = "ruff-0.9.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:39d0174ccc45c439093971cc06ed3ac4dc545f5e8bdacf9f067adf879544d969"}, + {file = "ruff-0.9.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:69572926c0f0c9912288915214ca9b2809525ea263603370b9e00bed2ba56dbd"}, + {file = "ruff-0.9.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:937267afce0c9170d6d29f01fcd1f4378172dec6760a9f4dface48cdabf9610a"}, + {file = "ruff-0.9.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:186c2313de946f2c22bdf5954b8dd083e124bcfb685732cfb0beae0c47233d9b"}, + {file = "ruff-0.9.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f94942a3bb767675d9a051867c036655fe9f6c8a491539156a6f7e6b5f31831"}, + {file = "ruff-0.9.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:728d791b769cc28c05f12c280f99e8896932e9833fef1dd8756a6af2261fd1ab"}, + {file = "ruff-0.9.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2f312c86fb40c5c02b44a29a750ee3b21002bd813b5233facdaf63a51d9a85e1"}, + {file = "ruff-0.9.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:ae017c3a29bee341ba584f3823f805abbe5fe9cd97f87ed07ecbf533c4c88366"}, + {file = "ruff-0.9.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5dc40a378a0e21b4cfe2b8a0f1812a6572fc7b230ef12cd9fac9161aa91d807f"}, + {file = "ruff-0.9.1-py3-none-win32.whl", hash = "sha256:46ebf5cc106cf7e7378ca3c28ce4293b61b449cd121b98699be727d40b79ba72"}, + {file = "ruff-0.9.1-py3-none-win_amd64.whl", hash = "sha256:342a824b46ddbcdddd3abfbb332fa7fcaac5488bf18073e841236aadf4ad5c19"}, + {file = "ruff-0.9.1-py3-none-win_arm64.whl", hash = "sha256:1cd76c7f9c679e6e8f2af8f778367dca82b95009bc7b1a85a47f1521ae524fa7"}, + {file = "ruff-0.9.1.tar.gz", hash = "sha256:fd2b25ecaf907d6458fa842675382c8597b3c746a2dde6717fe3415425df0c17"}, +] + +[[package]] +name = "setuptools" +version = "75.8.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "setuptools-75.8.0-py3-none-any.whl", hash = "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3"}, + {file = "setuptools-75.8.0.tar.gz", hash = "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] +core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "six" -version = "1.15.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" -category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] [[package]] name = "snowballstemmer" -version = "2.1.0" +version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" optional = false python-versions = "*" +groups = ["dev"] +files = [ + {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, + {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, +] [[package]] name = "sphinx" -version = "3.1.2" +version = "7.4.7" description = "Python documentation generator" -category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, + {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, +] [package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=1.3" -colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.12" -imagesize = "*" -Jinja2 = ">=2.3" -packaging = "*" -Pygments = ">=2.0" -requests = ">=2.5.0" -snowballstemmer = ">=1.1" +alabaster = ">=0.7.14,<0.8.0" +babel = ">=2.13" +colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} +docutils = ">=0.20,<0.22" +imagesize = ">=1.3" +importlib-metadata = {version = ">=6.0", markers = "python_version < \"3.10\""} +Jinja2 = ">=3.1" +packaging = ">=23.0" +Pygments = ">=2.17" +requests = ">=2.30.0" +snowballstemmer = ">=2.2" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = "*" +sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = "*" +sphinxcontrib-serializinghtml = ">=1.1.9" +tomli = {version = ">=2", markers = "python_version < \"3.11\""} [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.780)", "docutils-stubs"] -test = ["pytest", "pytest-cov", "html5lib", "typed-ast", "cython"] +lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] +test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] [[package]] name = "sphinx-rtd-theme" -version = "0.5.0" +version = "3.0.2" description = "Read the Docs theme for Sphinx" -category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "sphinx_rtd_theme-3.0.2-py2.py3-none-any.whl", hash = "sha256:422ccc750c3a3a311de4ae327e82affdaf59eb695ba4936538552f3b00f4ee13"}, + {file = "sphinx_rtd_theme-3.0.2.tar.gz", hash = "sha256:b7457bc25dda723b20b086a670b9953c859eab60a2a03ee8eb2bb23e176e5f85"}, +] [package.dependencies] -sphinx = "*" +docutils = ">0.18,<0.22" +sphinx = ">=6,<9" +sphinxcontrib-jquery = ">=4,<5" [package.extras] -dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] +dev = ["bump2version", "transifex-client", "twine", "wheel"] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" -category = "dev" +version = "2.0.0" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, + {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, +] [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "dev" +version = "2.0.0" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, + {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, +] [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "1.0.3" +version = "2.1.0" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, + {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, +] [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest", "html5lib"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["html5lib", "pytest"] + +[[package]] +name = "sphinxcontrib-jquery" +version = "4.1" +description = "Extension to include jQuery on newer Sphinx releases" +optional = false +python-versions = ">=2.7" +groups = ["dev"] +files = [ + {file = "sphinxcontrib-jquery-4.1.tar.gz", hash = "sha256:1620739f04e36a2c779f1a131a2dfd49b2fd07351bf1968ced074365933abc7a"}, + {file = "sphinxcontrib_jquery-4.1-py2.py3-none-any.whl", hash = "sha256:f936030d7d0147dd026a4f2b5a57343d233f1fc7b363f68b3d4f1cb0993878ae"}, +] + +[package.dependencies] +Sphinx = ">=1.8" [[package]] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "dev" optional = false python-versions = ">=3.5" +groups = ["dev"] +files = [ + {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, + {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, +] [package.extras] -test = ["pytest", "flake8", "mypy"] +test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "dev" +version = "2.0.0" +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, + {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, +] [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] +test = ["defusedxml (>=0.7.1)", "pytest"] [[package]] name = "sphinxcontrib-serializinghtml" -version = "1.1.4" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "dev" +version = "2.0.0" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" optional = false -python-versions = ">=3.5" +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, + {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, +] [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] +standalone = ["Sphinx (>=5)"] test = ["pytest"] [[package]] -name = "toml" -version = "0.10.2" -description = "Python Library for Tom's Obvious, Minimal Language" -category = "main" +name = "tabulate" +version = "0.9.0" +description = "Pretty-print tabular data" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, + {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +] + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["dev", "test"] +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] +markers = {dev = "python_version < \"3.11\"", test = "python_full_version <= \"3.11.0a6\""} [[package]] name = "tomlkit" -version = "0.7.0" +version = "0.13.2" description = "Style preserving TOML library" -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.8" +groups = ["test"] +files = [ + {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, + {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, +] [[package]] name = "tox" -version = "3.23.0" +version = "4.23.2" description = "tox is a generic virtualenv management and test command line tool" -category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "tox-4.23.2-py3-none-any.whl", hash = "sha256:452bc32bb031f2282881a2118923176445bac783ab97c874b8770ab4c3b76c38"}, + {file = "tox-4.23.2.tar.gz", hash = "sha256:86075e00e555df6e82e74cfc333917f91ecb47ffbc868dcafbd2672e332f4a2c"}, +] [package.dependencies] -colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} -filelock = ">=3.0.0" -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -packaging = ">=14" -pluggy = ">=0.12.0" -py = ">=1.4.17" -six = ">=1.14.0" -toml = ">=0.9.4" -virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" +cachetools = ">=5.5" +chardet = ">=5.2" +colorama = ">=0.4.6" +filelock = ">=3.16.1" +packaging = ">=24.1" +platformdirs = ">=4.3.6" +pluggy = ">=1.5" +pyproject-api = ">=1.8" +tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} +typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} +virtualenv = ">=20.26.6" [package.extras] -docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] -testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "pytest-xdist (>=1.22.2)", "pathlib2 (>=2.3.3)"] - -[[package]] -name = "typed-ast" -version = "1.4.2" -description = "a fork of Python 2 and 3 ast modules with type comment support" -category = "main" -optional = false -python-versions = "*" +test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.3)", "pytest-mock (>=3.14)"] [[package]] name = "typing-extensions" -version = "3.7.4.3" -description = "Backported and Experimental Type Hints for Python 3.5+" -category = "main" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = "*" +python-versions = ">=3.8" +groups = ["main", "dev", "test"] +files = [ + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, +] [[package]] name = "urllib3" -version = "1.26.4" +version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." -category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, +] [package.extras] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] -socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] -brotli = ["brotlipy (>=0.6.0)"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] [[package]] name = "virtualenv" -version = "20.4.3" +version = "20.28.1" description = "Virtual Python Environment builder" -category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "virtualenv-20.28.1-py3-none-any.whl", hash = "sha256:412773c85d4dab0409b83ec36f7a6499e72eaf08c80e81e9576bca61831c71cb"}, + {file = "virtualenv-20.28.1.tar.gz", hash = "sha256:5d34ab240fdb5d21549b76f9e8ff3af28252f5499fb6d6f031adac4e5a8c5329"}, +] [package.dependencies] -appdirs = ">=1.4.3,<2" -distlib = ">=0.3.1,<1" -filelock = ">=3.0.0,<4" -importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -importlib-resources = {version = ">=1.0", markers = "python_version < \"3.7\""} -six = ">=1.9.0,<2" +distlib = ">=0.3.7,<1" +filelock = ">=3.12.2,<4" +platformdirs = ">=3.9.1,<5" [package.extras] -docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] +docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"] [[package]] name = "wcwidth" -version = "0.2.5" +version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" -category = "dev" optional = false python-versions = "*" +groups = ["dev"] +files = [ + {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, + {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, +] [[package]] name = "zipp" -version = "3.4.1" +version = "3.21.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] [package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] [extras] -compiler = ["black", "jinja2"] +compiler = ["jinja2", "ruff"] +rust-codec = ["betterproto-rust-codec"] [metadata] -lock-version = "1.1" -python-versions = "^3.6" -content-hash = "c0a0589a01ba432403b80c01c13fb9e139d75d6d2760fa074b723f773bd61518" - -[metadata.files] -alabaster = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, -] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] -asv = [ - {file = "asv-0.4.2.tar.gz", hash = "sha256:9134f56b7a2f465420f17b5bb0dee16047a70f01029c996b7ab3f197de2d0779"}, -] -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] -attrs = [ - {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, - {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, -] -babel = [ - {file = "Babel-2.9.0-py2.py3-none-any.whl", hash = "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5"}, - {file = "Babel-2.9.0.tar.gz", hash = "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05"}, -] -black = [ - {file = "black-20.8b1.tar.gz", hash = "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea"}, -] -blessings = [ - {file = "blessings-1.7-py2-none-any.whl", hash = "sha256:caad5211e7ba5afe04367cdd4cfc68fa886e2e08f6f35e76b7387d2109ccea6e"}, - {file = "blessings-1.7-py3-none-any.whl", hash = "sha256:b1fdd7e7a675295630f9ae71527a8ebc10bfefa236b3d6aa4932ee4462c17ba3"}, - {file = "blessings-1.7.tar.gz", hash = "sha256:98e5854d805f50a5b58ac2333411b0482516a8210f23f43308baeb58d77c157d"}, -] -bpython = [ - {file = "bpython-0.19-py2.py3-none-any.whl", hash = "sha256:95d95783bfadfa0a25300a648de5aba4423b0ee76b034022a81dde2b5e853c00"}, - {file = "bpython-0.19.tar.gz", hash = "sha256:476ce09a896c4d34bf5e56aca64650c56fdcfce45781a20dc1521221df8cc49c"}, -] -certifi = [ - {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, - {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, -] -chardet = [ - {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, - {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, -] -click = [ - {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, - {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, -] -colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] -coverage = [ - {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, - {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, - {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, - {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, - {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, - {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, - {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, - {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, - {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, - {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, - {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, - {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, - {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, - {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, - {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, - {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, - {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, - {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, - {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, - {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, - {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, - {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, - {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, - {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, - {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, - {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, - {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, - {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, - {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, - {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, - {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, - {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, - {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, - {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, - {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, - {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, - {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, -] -curtsies = [ - {file = "curtsies-0.3.5.tar.gz", hash = "sha256:a587ff3335667a32be7afed163f60a1c82c5d9c848d8297534a06fd29de20dbd"}, -] -cwcwidth = [ - {file = "cwcwidth-0.1.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0614e892110401284fec5850ee45846d5ff163654574d3df040f86f02ec05399"}, - {file = "cwcwidth-0.1.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:ffb278e25d3ff9789dca99dcb666469a390ff226b181f846cc8736f1554ff085"}, - {file = "cwcwidth-0.1.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:77281cd94e6d582f3459e1535305cb3ad0afd3fbed0bacbe2e84b7e5cb3e9123"}, - {file = "cwcwidth-0.1.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:3a93491f4cbe5fc821bae02ebcccfa5b9206f441fa3ef618dc6f62fdccde0f07"}, - {file = "cwcwidth-0.1.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:ede2a05f88e3ddc4be22591fd5c5491e8a94f6e7fd3c93a3a06164f4ce8690d0"}, - {file = "cwcwidth-0.1.4-cp36-cp36m-win32.whl", hash = "sha256:d76c3b5078355e78ca3aa0fd06939a9793f5a9f9bf4522738fff90fb58b47429"}, - {file = "cwcwidth-0.1.4-cp36-cp36m-win_amd64.whl", hash = "sha256:d5a487c6981bf157b67f83514a754df5e6713a9090df71558a2625788c4a448a"}, - {file = "cwcwidth-0.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d8f8464656b48549d2a8ac776eed5c6f10906ee2dcc3767ef8228cc582857f6d"}, - {file = "cwcwidth-0.1.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a85539ec3b879177eb1715bda5bd2bb9753d84569f8717684f07016efb92a5c7"}, - {file = "cwcwidth-0.1.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:53ec58c6478af6062e979a89fc11ec6ca1e4254e93f3305ac62da28809745185"}, - {file = "cwcwidth-0.1.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:3bec2366e89de99c6ca8dcd1c92156d60efdbb47dc3a9cdb86d7064773c05d65"}, - {file = "cwcwidth-0.1.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:5a7da558423d32064bb8cabe461824543c6072ecdf2d0c2adf521dc63b3f0073"}, - {file = "cwcwidth-0.1.4-cp37-cp37m-win32.whl", hash = "sha256:ec9d57742804a975a75dc633ee3a0bb5bffe67dc897def6a3d84717805584dbd"}, - {file = "cwcwidth-0.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9faa4adcdb0c74fb8350da8eee6f893dde5b9a0f817ee0b83607b8e0e4d12929"}, - {file = "cwcwidth-0.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9d0188488c55d947f71d48f47e7f8e4355d75a86afcc8932a500cd84e32e278"}, - {file = "cwcwidth-0.1.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:73d66da4f1807cc673cf924c9fd83f9f61465af13693f5ef2b5b4b9c32faa0c7"}, - {file = "cwcwidth-0.1.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ef08bc8af421e5991ff6c2e67124add008e73ed7fd4fb8767f44c07b789fe114"}, - {file = "cwcwidth-0.1.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:3011f108504e3ad6472f53df0b7a12b9a978e6e0e41bd841a768a6a5f678bc0e"}, - {file = "cwcwidth-0.1.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:55969b269a11f317c29b206e74cae02267af92a3a9a2fb86860a84f64366705a"}, - {file = "cwcwidth-0.1.4-cp38-cp38-win32.whl", hash = "sha256:51481cb731c6d9c46a5d751bafff03ea3072f856c590fe8d4a27a1d404bb20be"}, - {file = "cwcwidth-0.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:146069bc61cb5db11d3c037b057454d78ef2254932f4f4871ae355e0923ce8e7"}, - {file = "cwcwidth-0.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2fc0d1c4214f76ba7fec60aac6e1467588d865a0005ce9063c5471c57751f895"}, - {file = "cwcwidth-0.1.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:b1d75b2c9edc19a579dd5d92e93dc7087b6430a250928a06527aa6ebd627b06c"}, - {file = "cwcwidth-0.1.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:63190cb5b87a568ed89cfae3be203935a14dea0c10b116160a15031273771b44"}, - {file = "cwcwidth-0.1.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:99fb16a3b0258ee2fa952e7dab80b839b990aebdb96b98b648211a99e8a0c906"}, - {file = "cwcwidth-0.1.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:01b630049fdd8fc37f0e929d24012fee7855d8aa3f304c8a0c26caf2415c7d85"}, - {file = "cwcwidth-0.1.4-cp39-cp39-win32.whl", hash = "sha256:0e05498c57629bf6c8445b17b2e5a9ec26c0f97080cb7ae2602e14a5db67209b"}, - {file = "cwcwidth-0.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:7779cb2ccc04694f95134d3f660216f32be5de82101dcbd8f1c8f81ff748ae41"}, - {file = "cwcwidth-0.1.4.tar.gz", hash = "sha256:482a937891a6918667436e0a7041aab576c26e4bcbcdddd178ef79362fbcf9ab"}, -] -dataclasses = [ - {file = "dataclasses-0.7-py3-none-any.whl", hash = "sha256:3459118f7ede7c8bea0fe795bff7c6c2ce287d01dd226202f7c9ebc0610a7836"}, - {file = "dataclasses-0.7.tar.gz", hash = "sha256:494a6dcae3b8bcf80848eea2ef64c0cc5cd307ffc263e17cdf42f3e5420808e6"}, -] -distlib = [ - {file = "distlib-0.3.1-py2.py3-none-any.whl", hash = "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb"}, - {file = "distlib-0.3.1.zip", hash = "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1"}, -] -docutils = [ - {file = "docutils-0.17-py2.py3-none-any.whl", hash = "sha256:a71042bb7207c03d5647f280427f14bfbd1a65c9eb84f4b341d85fafb6bb4bdf"}, - {file = "docutils-0.17.tar.gz", hash = "sha256:e2ffeea817964356ba4470efba7c2f42b6b0de0b04e66378507e3e2504bbff4c"}, -] -filelock = [ - {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, - {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, -] -greenlet = [ - {file = "greenlet-1.0.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:1d1d4473ecb1c1d31ce8fd8d91e4da1b1f64d425c1dc965edc4ed2a63cfa67b2"}, - {file = "greenlet-1.0.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:cfd06e0f0cc8db2a854137bd79154b61ecd940dce96fad0cba23fe31de0b793c"}, - {file = "greenlet-1.0.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:eb333b90036358a0e2c57373f72e7648d7207b76ef0bd00a4f7daad1f79f5203"}, - {file = "greenlet-1.0.0-cp27-cp27m-win32.whl", hash = "sha256:1a1ada42a1fd2607d232ae11a7b3195735edaa49ea787a6d9e6a53afaf6f3476"}, - {file = "greenlet-1.0.0-cp27-cp27m-win_amd64.whl", hash = "sha256:f6f65bf54215e4ebf6b01e4bb94c49180a589573df643735107056f7a910275b"}, - {file = "greenlet-1.0.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:f59eded163d9752fd49978e0bab7a1ff21b1b8d25c05f0995d140cc08ac83379"}, - {file = "greenlet-1.0.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:875d4c60a6299f55df1c3bb870ebe6dcb7db28c165ab9ea6cdc5d5af36bb33ce"}, - {file = "greenlet-1.0.0-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:1bb80c71de788b36cefb0c3bb6bfab306ba75073dbde2829c858dc3ad70f867c"}, - {file = "greenlet-1.0.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:b5f1b333015d53d4b381745f5de842f19fe59728b65f0fbb662dafbe2018c3a5"}, - {file = "greenlet-1.0.0-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:5352c15c1d91d22902582e891f27728d8dac3bd5e0ee565b6a9f575355e6d92f"}, - {file = "greenlet-1.0.0-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:2c65320774a8cd5fdb6e117c13afa91c4707548282464a18cf80243cf976b3e6"}, - {file = "greenlet-1.0.0-cp35-cp35m-manylinux2014_ppc64le.whl", hash = "sha256:111cfd92d78f2af0bc7317452bd93a477128af6327332ebf3c2be7df99566683"}, - {file = "greenlet-1.0.0-cp35-cp35m-win32.whl", hash = "sha256:cdb90267650c1edb54459cdb51dab865f6c6594c3a47ebd441bc493360c7af70"}, - {file = "greenlet-1.0.0-cp35-cp35m-win_amd64.whl", hash = "sha256:eac8803c9ad1817ce3d8d15d1bb82c2da3feda6bee1153eec5c58fa6e5d3f770"}, - {file = "greenlet-1.0.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:c93d1a71c3fe222308939b2e516c07f35a849c5047f0197442a4d6fbcb4128ee"}, - {file = "greenlet-1.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:122c63ba795fdba4fc19c744df6277d9cfd913ed53d1a286f12189a0265316dd"}, - {file = "greenlet-1.0.0-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:c5b22b31c947ad8b6964d4ed66776bcae986f73669ba50620162ba7c832a6b6a"}, - {file = "greenlet-1.0.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:4365eccd68e72564c776418c53ce3c5af402bc526fe0653722bc89efd85bf12d"}, - {file = "greenlet-1.0.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:da7d09ad0f24270b20f77d56934e196e982af0d0a2446120cb772be4e060e1a2"}, - {file = "greenlet-1.0.0-cp36-cp36m-win32.whl", hash = "sha256:647ba1df86d025f5a34043451d7c4a9f05f240bee06277a524daad11f997d1e7"}, - {file = "greenlet-1.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:e6e9fdaf6c90d02b95e6b0709aeb1aba5affbbb9ccaea5502f8638e4323206be"}, - {file = "greenlet-1.0.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:62afad6e5fd70f34d773ffcbb7c22657e1d46d7fd7c95a43361de979f0a45aef"}, - {file = "greenlet-1.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d3789c1c394944084b5e57c192889985a9f23bd985f6d15728c745d380318128"}, - {file = "greenlet-1.0.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f5e2d36c86c7b03c94b8459c3bd2c9fe2c7dab4b258b8885617d44a22e453fb7"}, - {file = "greenlet-1.0.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:292e801fcb3a0b3a12d8c603c7cf340659ea27fd73c98683e75800d9fd8f704c"}, - {file = "greenlet-1.0.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:f3dc68272990849132d6698f7dc6df2ab62a88b0d36e54702a8fd16c0490e44f"}, - {file = "greenlet-1.0.0-cp37-cp37m-win32.whl", hash = "sha256:7cd5a237f241f2764324396e06298b5dee0df580cf06ef4ada0ff9bff851286c"}, - {file = "greenlet-1.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:0ddd77586553e3daf439aa88b6642c5f252f7ef79a39271c25b1d4bf1b7cbb85"}, - {file = "greenlet-1.0.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:90b6a25841488cf2cb1c8623a53e6879573010a669455046df5f029d93db51b7"}, - {file = "greenlet-1.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:ed1d1351f05e795a527abc04a0d82e9aecd3bdf9f46662c36ff47b0b00ecaf06"}, - {file = "greenlet-1.0.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:94620ed996a7632723a424bccb84b07e7b861ab7bb06a5aeb041c111dd723d36"}, - {file = "greenlet-1.0.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f97d83049715fd9dec7911860ecf0e17b48d8725de01e45de07d8ac0bd5bc378"}, - {file = "greenlet-1.0.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:0a77691f0080c9da8dfc81e23f4e3cffa5accf0f5b56478951016d7cfead9196"}, - {file = "greenlet-1.0.0-cp38-cp38-win32.whl", hash = "sha256:e1128e022d8dce375362e063754e129750323b67454cac5600008aad9f54139e"}, - {file = "greenlet-1.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:5d4030b04061fdf4cbc446008e238e44936d77a04b2b32f804688ad64197953c"}, - {file = "greenlet-1.0.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:f8450d5ef759dbe59f84f2c9f77491bb3d3c44bc1a573746daf086e70b14c243"}, - {file = "greenlet-1.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:df8053867c831b2643b2c489fe1d62049a98566b1646b194cc815f13e27b90df"}, - {file = "greenlet-1.0.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:df3e83323268594fa9755480a442cabfe8d82b21aba815a71acf1bb6c1776218"}, - {file = "greenlet-1.0.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:181300f826625b7fd1182205b830642926f52bd8cdb08b34574c9d5b2b1813f7"}, - {file = "greenlet-1.0.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:58ca0f078d1c135ecf1879d50711f925ee238fe773dfe44e206d7d126f5bc664"}, - {file = "greenlet-1.0.0-cp39-cp39-win32.whl", hash = "sha256:5f297cb343114b33a13755032ecf7109b07b9a0020e841d1c3cedff6602cc139"}, - {file = "greenlet-1.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:5d69bbd9547d3bc49f8a545db7a0bd69f407badd2ff0f6e1a163680b5841d2b0"}, - {file = "greenlet-1.0.0.tar.gz", hash = "sha256:719e169c79255816cdcf6dccd9ed2d089a72a9f6c42273aae12d55e8d35bdcf8"}, -] -grpcio = [ - {file = "grpcio-1.36.1-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:e3a83c5db16f95daac1d96cf3c9018d765579b5a29bb336758d793028e729921"}, - {file = "grpcio-1.36.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:c18739fecb90760b183bfcb4da1cf2c6bf57e38f7baa2c131d5f67d9a4c8365d"}, - {file = "grpcio-1.36.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:f6efa62ca1fe02cd34ec35f53446f04a15fe2c886a4e825f5679936a573d2cbf"}, - {file = "grpcio-1.36.1-cp27-cp27m-win32.whl", hash = "sha256:9a18299827a70be0507f98a65393b1c7f6c004fe2ca995fe23ffac534dd187a7"}, - {file = "grpcio-1.36.1-cp27-cp27m-win_amd64.whl", hash = "sha256:8a89190de1985a54ef311650cf9687ffb81de038973fd32e452636ddae36b29f"}, - {file = "grpcio-1.36.1-cp27-cp27mu-linux_armv7l.whl", hash = "sha256:3e75643d21db7d68acd541d3fec66faaa8061d12b511e101b529ff12a276bb9b"}, - {file = "grpcio-1.36.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:3c5204e05e18268dd6a1099ca6c106fd9d00bcae1e37d5a5186094c55044c941"}, - {file = "grpcio-1.36.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:24d4c2c5e540e666c52225953d6813afc8ccf9bf46db6a72edd4e8d606656248"}, - {file = "grpcio-1.36.1-cp35-cp35m-linux_armv7l.whl", hash = "sha256:4dc7295dc9673f7af22c1e38c2a2c24ecbd6773a4c5ed5a46ed38ad4dcf2bf6c"}, - {file = "grpcio-1.36.1-cp35-cp35m-macosx_10_10_intel.whl", hash = "sha256:f241116d4bf1a8037ff87f16914b606390824e50902bdbfa2262e855fbf07fe5"}, - {file = "grpcio-1.36.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:1056b558acfd575d774644826df449e1402a03e456a3192fafb6b06d1069bf80"}, - {file = "grpcio-1.36.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:52ec563da45d06319224ebbda53501d25594de64ee1b2786e119ba4a2f1ce40c"}, - {file = "grpcio-1.36.1-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:7cbeac9bbe6a4a7fce4a89c892c249135dd9f5f5219ede157174c34a456188f0"}, - {file = "grpcio-1.36.1-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:2abaa9f0d83bd0b26f6d0d1fc4b97d73bde3ceac36ab857f70d3cabcf31c5c79"}, - {file = "grpcio-1.36.1-cp35-cp35m-win32.whl", hash = "sha256:02030e1afd3247f2b159df9dff959ec79dd4047b1c4dd4eec9e3d1642efbd504"}, - {file = "grpcio-1.36.1-cp35-cp35m-win_amd64.whl", hash = "sha256:eafafc7e040e36aa926edc731ab52c23465981888779ae64bfc8ad85888ed4f3"}, - {file = "grpcio-1.36.1-cp36-cp36m-linux_armv7l.whl", hash = "sha256:1030e74ddd0fa6e3bad7944f0c68cf1251b15bcd70641f0ad3858fdf2b8602a0"}, - {file = "grpcio-1.36.1-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:b003e24339030ed356f59505d1065b89e1f443ef41ce71ca9069be944c0d2e6b"}, - {file = "grpcio-1.36.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:76daa3c4d58fcf40f7969bdb4270335e96ee0382a050cadcd97d7332cd0251a3"}, - {file = "grpcio-1.36.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f591597bb25eae0094ead5a965555e911453e5f35fdbdaa83be11ef107865697"}, - {file = "grpcio-1.36.1-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:cbd82c479338fc1c0e5c3db09752b61fe47d40c6e38e4be8657153712fa76674"}, - {file = "grpcio-1.36.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:7e32bc01dfaa7a51c547379644ea619a2161d6969affdac3bbd173478d26673d"}, - {file = "grpcio-1.36.1-cp36-cp36m-win32.whl", hash = "sha256:5378189fb897567f4929f75ab67a3e0da4f8967806246cb9cfa1fa06bfbdb0d5"}, - {file = "grpcio-1.36.1-cp36-cp36m-win_amd64.whl", hash = "sha256:3a6295aa692806218e97bb687a71cd768450ed99e2acddc488f18d738edef463"}, - {file = "grpcio-1.36.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:6f6f8a8b57e40347d0bf32c2135037dae31d63d3b19007b4c426a11b76deaf65"}, - {file = "grpcio-1.36.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:4c05ed54b2a00df01e633bebec819b512bf0c60f8f5b3b36dd344dc673b02fea"}, - {file = "grpcio-1.36.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:e1b9e906aa6f7577016e86ed7f3a69cae7dab4e41356584dc7980f76ea65035f"}, - {file = "grpcio-1.36.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:a602d6b30760bbbb2fe776caaa914a0d404636cafc3f2322718bf8002d7b1e55"}, - {file = "grpcio-1.36.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:dee9971aef20fc09ed897420446c4d0926cd1d7630f343333288523ca5b44bb2"}, - {file = "grpcio-1.36.1-cp37-cp37m-win32.whl", hash = "sha256:ed16bfeda02268e75e038c58599d52afc7097d749916c079b26bc27a66900f7d"}, - {file = "grpcio-1.36.1-cp37-cp37m-win_amd64.whl", hash = "sha256:85a6035ae75ce964f78f19cf913938596ccf068b149fcd79f4371268bcb9aa7c"}, - {file = "grpcio-1.36.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:6b30682180053eebc87802c2f249d2f59b430e1a18e8808575dde0d22a968b2c"}, - {file = "grpcio-1.36.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:5e4920a8fb5d17b2c5ba980db0ac1c925bbee3e5d70e96da3ec4fb1c8600d68f"}, - {file = "grpcio-1.36.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f7740d9d9451f3663df11b241ac05cafc0efaa052d2fdca6640c4d3748eaf6e2"}, - {file = "grpcio-1.36.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:20b7c4c5513e1135a2261e56830c0e710f205fee92019b92fe132d7f16a5cfd8"}, - {file = "grpcio-1.36.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:216fbd2a488e74c3b96e240e4054c85c4c99102a439bc9f556936991643f43bc"}, - {file = "grpcio-1.36.1-cp38-cp38-win32.whl", hash = "sha256:7863c2a140e829b1f4c6d67bf0bf15e5321ac4766d0a295e2682970d9dd4b091"}, - {file = "grpcio-1.36.1-cp38-cp38-win_amd64.whl", hash = "sha256:f214076eb13da9e65c1aa9877b51fca03f51a82bd8691358e1a1edd9ff341330"}, - {file = "grpcio-1.36.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:ec753c022b39656f88409fbf9f2d3b28497e3f17aa678f884d78776b41ebe6bd"}, - {file = "grpcio-1.36.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:0648a6d5d7ddcd9c8462d7d961660ee024dad6b88152ee3a521819e611830edf"}, - {file = "grpcio-1.36.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:45ea10dd133a43b10c0b4326834107ebccfee25dab59b312b78e018c2d72a1f0"}, - {file = "grpcio-1.36.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:bab743cdac1d6d8326c65d1d091d0740b39966dfab06519f74a03b3d128b8454"}, - {file = "grpcio-1.36.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:09af8ceb91860086216edc6e5ea15f9beb2cf81687faa43b7c03216f5b73e244"}, - {file = "grpcio-1.36.1-cp39-cp39-win32.whl", hash = "sha256:f3f70505207ee1cee65f60a799fd8e06e07861409aa0d55d834825a79b40c297"}, - {file = "grpcio-1.36.1-cp39-cp39-win_amd64.whl", hash = "sha256:f22c11772eff25ba1ca536e760b8c34ba56f2a9d66b6842cb11770a8f61f879d"}, - {file = "grpcio-1.36.1.tar.gz", hash = "sha256:a66ea59b20f3669df0f0c6a3bd57b985e5b2d1dcf3e4c29819bb8dc232d0fd38"}, -] -grpcio-tools = [ - {file = "grpcio-tools-1.36.1.tar.gz", hash = "sha256:80ef584f7b917f575e4b8f2ec59cd4a4d98c2046e801a735f3136b05742a36a6"}, - {file = "grpcio_tools-1.36.1-cp27-cp27m-macosx_10_10_x86_64.whl", hash = "sha256:ebbfdbff079bfc303a4e1d3da59302147d5cf4f1db2c412a074366149d93e89e"}, - {file = "grpcio_tools-1.36.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:d95dfefe156be02bcce4eb044ac7ff166c8a6c288d71bc3ed960d8b26bce2786"}, - {file = "grpcio_tools-1.36.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:7969915ac252d0e67f9cfd4f8b9d6bb546efc7b26bce34978a940e37ee4078d5"}, - {file = "grpcio_tools-1.36.1-cp27-cp27m-win32.whl", hash = "sha256:582b77e7a4905063d8071ac3685cefa38941799d5f4ea7b4519281a28cfc6752"}, - {file = "grpcio_tools-1.36.1-cp27-cp27m-win_amd64.whl", hash = "sha256:66d2a6237941199df0493e46b8a3123005b4dfde9af1b9572e8c54eb605a7567"}, - {file = "grpcio_tools-1.36.1-cp27-cp27mu-linux_armv7l.whl", hash = "sha256:702c3eb61a3cfddcaea04d2358c0390c2e189fe42b64a92239df8292366ab4df"}, - {file = "grpcio_tools-1.36.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:ce621375bc7dfaeac93e23e202771a6e567a8ea7e9a7cc690b87d8b1950e3da6"}, - {file = "grpcio_tools-1.36.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:ff1792b188183e977e2feccb1f3b3d4580f921df8f61385d7ae8eace10578a23"}, - {file = "grpcio_tools-1.36.1-cp35-cp35m-linux_armv7l.whl", hash = "sha256:a3a64797840fd4917ec98532d17b9b7c6a954dcfd7862657c750255556d369a5"}, - {file = "grpcio_tools-1.36.1-cp35-cp35m-macosx_10_10_intel.whl", hash = "sha256:0873697064cdbb116ba9f88ff524e13e9afd78bf7905ecd6a0f0f743bf40ca64"}, - {file = "grpcio_tools-1.36.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6df49b402f387decaaf57784c3e74bea6f34cf446cc45d4bf7b9adb34f97fb20"}, - {file = "grpcio_tools-1.36.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:aec997dafa90a29b399bdb23d968ab43da223faeac005d384a1194f43ee0f46e"}, - {file = "grpcio_tools-1.36.1-cp35-cp35m-manylinux2014_i686.whl", hash = "sha256:92336c60db1052c865ab7c9936680187d16d2f565c470ba03199e817120714e8"}, - {file = "grpcio_tools-1.36.1-cp35-cp35m-manylinux2014_x86_64.whl", hash = "sha256:bbe8da70ccbe21c72599eb8de5ad26bd053c01f4f03c48ea16323f96f1ec7095"}, - {file = "grpcio_tools-1.36.1-cp35-cp35m-win32.whl", hash = "sha256:96e1c0d267eb03b819a31bcf973579220ec3b8b53178952daa9e2f1ad696783f"}, - {file = "grpcio_tools-1.36.1-cp35-cp35m-win_amd64.whl", hash = "sha256:f4326b1a5352e85480629bf888b132f0aec79bb791d29cd3e2322586cd70433a"}, - {file = "grpcio_tools-1.36.1-cp36-cp36m-linux_armv7l.whl", hash = "sha256:f2befead0395e8aaab1e8f76825c8c9fa93d69249a513c26107a55183f91ccd9"}, - {file = "grpcio_tools-1.36.1-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:125859be6458e65e348c50ddb7a964ba48945d521af3f46ce35aca9a2b752296"}, - {file = "grpcio_tools-1.36.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:c669f1ee5642631ad93fa51d298306124d26bccc76ce63a3bc143ddcf01c58af"}, - {file = "grpcio_tools-1.36.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:693dc16a65b1766037fca8cddc173c0f45e79dd14e05d61128a30dbfd02f6503"}, - {file = "grpcio_tools-1.36.1-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:add07eb3c79478b003ac3af7b636275c37fa6bac56e6a29f79128bea09b37488"}, - {file = "grpcio_tools-1.36.1-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:747b547c487231a6325eda820d1d6a7c6080eda2cd1f68a7d4b2f8d9cc0a3e95"}, - {file = "grpcio_tools-1.36.1-cp36-cp36m-win32.whl", hash = "sha256:fd5eed43f5764209a95a58db82c064c1958525f30ad8ebb57df38dd2c9e86aa7"}, - {file = "grpcio_tools-1.36.1-cp36-cp36m-win_amd64.whl", hash = "sha256:bc6257b5533c66143f4f084aea3ae52c1c01f99997a8b81d2259d0cf083176b5"}, - {file = "grpcio_tools-1.36.1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:f35fad86d99743cc15fccf11ec74d8c9b76e997cd233dc1fd031457d3f0fd7fc"}, - {file = "grpcio_tools-1.36.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:dcdfe82237e7498eb49dd12751716c55d189a5e49b4bda0bb53f85acbe51bbb1"}, - {file = "grpcio_tools-1.36.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d6b3c868c6ac15a0e288d3a5380ad5f01802cbbed8645333e496fa31ecea19af"}, - {file = "grpcio_tools-1.36.1-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:6898776449485feedb6d0fd98d3a36c8882c32a5603b86b2511e2557ee765d40"}, - {file = "grpcio_tools-1.36.1-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:6fee070c18be66a282ceb56245191dabf80986aee333e74d2fdea58118b452d4"}, - {file = "grpcio_tools-1.36.1-cp37-cp37m-win32.whl", hash = "sha256:55ed5c5de883defacd899123ba5a9f0077b7fb87d8f1778cb5996b4391604447"}, - {file = "grpcio_tools-1.36.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f74f0c3eedc0de72c402e82bb1199ffe5e359ccdac70bf789d65444042a84f42"}, - {file = "grpcio_tools-1.36.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:93e3ba4696b69fc4356a0823ecddd8b29ebb1fba0571f27574b1182ef5c262f6"}, - {file = "grpcio_tools-1.36.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:e730845677e45c6829d212be6e4fb69768979c3b35b5884293be02a7f436e18c"}, - {file = "grpcio_tools-1.36.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:f7ba8d631f8f5c089958285545bd9e307fd752cdd1fa31515a51cfc1e04b833d"}, - {file = "grpcio_tools-1.36.1-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:af392594ba30b5ee470b7538cf792df970e2097edc299685d8e0d76b2b1bef7b"}, - {file = "grpcio_tools-1.36.1-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:facda541209a0b0edfccf6a5b18ce344c4e90bc8950c995482c85936a23ba939"}, - {file = "grpcio_tools-1.36.1-cp38-cp38-win32.whl", hash = "sha256:9fa491aaccd455e3aec35d12bcef5dce307c674f08e98bbbf33bf6774e6e2ec5"}, - {file = "grpcio_tools-1.36.1-cp38-cp38-win_amd64.whl", hash = "sha256:76900dde111192900c6eb5ed491cf0d8a13403e502c74859f2e2c3116842668a"}, - {file = "grpcio_tools-1.36.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0a7b85758e44d9585f27fc7692b58e63952a2e9130cfbbd16defda8c2ffbb2ad"}, - {file = "grpcio_tools-1.36.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:9b8556e2938ef9437ef07d028b46198f299533497df878f96785502e6f74250d"}, - {file = "grpcio_tools-1.36.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:cd44135fb8b45acc79424e7354bb4548911a6202ca2fac384574099f8d998f06"}, - {file = "grpcio_tools-1.36.1-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:c02b5b6d185b1af86342381ddd1ad3d0482c4116e203e52a7145636fb1b2ad12"}, - {file = "grpcio_tools-1.36.1-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:377cd9b8d2098d2ced48d3dee466fd73fb19128aa0edc6f1799077cf4dbda606"}, - {file = "grpcio_tools-1.36.1-cp39-cp39-win32.whl", hash = "sha256:120bad5a3f3288ae8acd07d839a13d7873304ae35a1d717033295e90ed9bd8ac"}, - {file = "grpcio_tools-1.36.1-cp39-cp39-win_amd64.whl", hash = "sha256:5cec989d219164312bdfa9389aedaea5887fb8133bb1a247fcde5901775b5427"}, -] -grpclib = [ - {file = "grpclib-0.4.1.tar.gz", hash = "sha256:8c0021cd038634c268249e4cd168d9f3570e66ceceec1c9416094b788ebc8372"}, -] -h2 = [ - {file = "h2-3.2.0-py2.py3-none-any.whl", hash = "sha256:61e0f6601fa709f35cdb730863b4e5ec7ad449792add80d1410d4174ed139af5"}, - {file = "h2-3.2.0.tar.gz", hash = "sha256:875f41ebd6f2c44781259005b157faed1a5031df3ae5aa7bcb4628a6c0782f14"}, -] -hpack = [ - {file = "hpack-3.0.0-py2.py3-none-any.whl", hash = "sha256:0edd79eda27a53ba5be2dfabf3b15780928a0dff6eb0c60a3d6767720e970c89"}, - {file = "hpack-3.0.0.tar.gz", hash = "sha256:8eec9c1f4bfae3408a3f30500261f7e6a65912dc138526ea054f9ad98892e9d2"}, -] -hyperframe = [ - {file = "hyperframe-5.2.0-py2.py3-none-any.whl", hash = "sha256:5187962cb16dcc078f23cb5a4b110098d546c3f41ff2d4038a9896893bbd0b40"}, - {file = "hyperframe-5.2.0.tar.gz", hash = "sha256:a9f5c17f2cc3c719b917c4f33ed1c61bd1f8dfac4b1bd23b7c80b3400971b41f"}, -] -idna = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, -] -imagesize = [ - {file = "imagesize-1.2.0-py2.py3-none-any.whl", hash = "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1"}, - {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, -] -importlib-metadata = [ - {file = "importlib_metadata-3.10.0-py3-none-any.whl", hash = "sha256:d2d46ef77ffc85cbf7dac7e81dd663fde71c45326131bea8033b9bad42268ebe"}, - {file = "importlib_metadata-3.10.0.tar.gz", hash = "sha256:c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a"}, -] -importlib-resources = [ - {file = "importlib_resources-5.1.2-py3-none-any.whl", hash = "sha256:ebab3efe74d83b04d6bf5cd9a17f0c5c93e60fb60f30c90f56265fce4682a469"}, - {file = "importlib_resources-5.1.2.tar.gz", hash = "sha256:642586fc4740bd1cad7690f836b3321309402b20b332529f25617ff18e8e1370"}, -] -jinja2 = [ - {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, - {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, -] -markupsafe = [ - {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, - {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"}, - {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183"}, - {file = "MarkupSafe-1.1.1-cp27-cp27m-win32.whl", hash = "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b"}, - {file = "MarkupSafe-1.1.1-cp27-cp27m-win_amd64.whl", hash = "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e"}, - {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f"}, - {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-win32.whl", hash = "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-win_amd64.whl", hash = "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, - {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, -] -more-itertools = [ - {file = "more-itertools-8.7.0.tar.gz", hash = "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713"}, - {file = "more_itertools-8.7.0-py3-none-any.whl", hash = "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced"}, -] -multidict = [ - {file = "multidict-5.1.0-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_s390x.whl", hash = "sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da"}, - {file = "multidict-5.1.0-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224"}, - {file = "multidict-5.1.0-cp36-cp36m-win32.whl", hash = "sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26"}, - {file = "multidict-5.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6"}, - {file = "multidict-5.1.0-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_s390x.whl", hash = "sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9"}, - {file = "multidict-5.1.0-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37"}, - {file = "multidict-5.1.0-cp37-cp37m-win32.whl", hash = "sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5"}, - {file = "multidict-5.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632"}, - {file = "multidict-5.1.0-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_s390x.whl", hash = "sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a"}, - {file = "multidict-5.1.0-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea"}, - {file = "multidict-5.1.0-cp38-cp38-win32.whl", hash = "sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656"}, - {file = "multidict-5.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3"}, - {file = "multidict-5.1.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_s390x.whl", hash = "sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841"}, - {file = "multidict-5.1.0-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda"}, - {file = "multidict-5.1.0-cp39-cp39-win32.whl", hash = "sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80"}, - {file = "multidict-5.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359"}, - {file = "multidict-5.1.0.tar.gz", hash = "sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5"}, -] -mypy = [ - {file = "mypy-0.770-cp35-cp35m-macosx_10_6_x86_64.whl", hash = "sha256:a34b577cdf6313bf24755f7a0e3f3c326d5c1f4fe7422d1d06498eb25ad0c600"}, - {file = "mypy-0.770-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:86c857510a9b7c3104cf4cde1568f4921762c8f9842e987bc03ed4f160925754"}, - {file = "mypy-0.770-cp35-cp35m-win_amd64.whl", hash = "sha256:a8ffcd53cb5dfc131850851cc09f1c44689c2812d0beb954d8138d4f5fc17f65"}, - {file = "mypy-0.770-cp36-cp36m-macosx_10_6_x86_64.whl", hash = "sha256:7687f6455ec3ed7649d1ae574136835a4272b65b3ddcf01ab8704ac65616c5ce"}, - {file = "mypy-0.770-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3beff56b453b6ef94ecb2996bea101a08f1f8a9771d3cbf4988a61e4d9973761"}, - {file = "mypy-0.770-cp36-cp36m-win_amd64.whl", hash = "sha256:15b948e1302682e3682f11f50208b726a246ab4e6c1b39f9264a8796bb416aa2"}, - {file = "mypy-0.770-cp37-cp37m-macosx_10_6_x86_64.whl", hash = "sha256:b90928f2d9eb2f33162405f32dde9f6dcead63a0971ca8a1b50eb4ca3e35ceb8"}, - {file = "mypy-0.770-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c56ffe22faa2e51054c5f7a3bc70a370939c2ed4de308c690e7949230c995913"}, - {file = "mypy-0.770-cp37-cp37m-win_amd64.whl", hash = "sha256:8dfb69fbf9f3aeed18afffb15e319ca7f8da9642336348ddd6cab2713ddcf8f9"}, - {file = "mypy-0.770-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:219a3116ecd015f8dca7b5d2c366c973509dfb9a8fc97ef044a36e3da66144a1"}, - {file = "mypy-0.770-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7ec45a70d40ede1ec7ad7f95b3c94c9cf4c186a32f6bacb1795b60abd2f9ef27"}, - {file = "mypy-0.770-cp38-cp38-win_amd64.whl", hash = "sha256:f91c7ae919bbc3f96cd5e5b2e786b2b108343d1d7972ea130f7de27fdd547cf3"}, - {file = "mypy-0.770-py3-none-any.whl", hash = "sha256:3b1fc683fb204c6b4403a1ef23f0b1fac8e4477091585e0c8c54cbdf7d7bb164"}, - {file = "mypy-0.770.tar.gz", hash = "sha256:8a627507ef9b307b46a1fea9513d5c98680ba09591253082b4c48697ba05a4ae"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -packaging = [ - {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, - {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, -] -pastel = [ - {file = "pastel-0.2.1-py2.py3-none-any.whl", hash = "sha256:4349225fcdf6c2bb34d483e523475de5bb04a5c10ef711263452cb37d7dd4364"}, - {file = "pastel-0.2.1.tar.gz", hash = "sha256:e6581ac04e973cac858828c6202c1e1e81fee1dc7de7683f3e1ffe0bfd8a573d"}, -] -pathspec = [ - {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, - {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, -] -pluggy = [ - {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, - {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, -] -poethepoet = [ - {file = "poethepoet-0.10.0-py3-none-any.whl", hash = "sha256:6fb3021603d4421c6fcc40072bbcf150a6c52ef70ff4d3be089b8b04e015ef5a"}, - {file = "poethepoet-0.10.0.tar.gz", hash = "sha256:70b97cb194b978dc464c70793e85e6f746cddf82b84a38bfb135946ad71ae19c"}, -] -protobuf = [ - {file = "protobuf-3.15.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a14141d5c967362d2eedff8825d2b69cc36a5b3ed6b1f618557a04e58a3cf787"}, - {file = "protobuf-3.15.7-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d54d78f621852ec4fdd1484d1263ca04d4bf5ffdf7abffdbb939e444b6ff3385"}, - {file = "protobuf-3.15.7-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:462085acdb410b06335315fe7e63cb281a1902856e0f4657f341c283cedc1d56"}, - {file = "protobuf-3.15.7-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:849c92ce112e1ef648705c29ce044248e350f71d9d54a2026830623198f0bd38"}, - {file = "protobuf-3.15.7-cp35-cp35m-win32.whl", hash = "sha256:1f6083382f7714700deadf3014e921711e2f807de7f27e40c32b744701ae5b99"}, - {file = "protobuf-3.15.7-cp35-cp35m-win_amd64.whl", hash = "sha256:e17f60f00081adcb32068ee0bb51e418f6474acf83424244ff3512ffd2166385"}, - {file = "protobuf-3.15.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c75e563c6fb2ca5b8f21dd75c15659aa2c4a0025b9da3a7711ae661cd6a488d"}, - {file = "protobuf-3.15.7-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d939f41b4108350841c4790ebbadb61729e1363522fdb8434eb4e6f2065d0db1"}, - {file = "protobuf-3.15.7-cp36-cp36m-win32.whl", hash = "sha256:24f14c09d4c0a3641f1b0e9b552d026361de65b01686fdd3e5fdf8f9512cd79b"}, - {file = "protobuf-3.15.7-cp36-cp36m-win_amd64.whl", hash = "sha256:1247170191bcb2a8d978d11a58afe391004ec6c2184e4d961baf8102d43ff500"}, - {file = "protobuf-3.15.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:364cadaeec0756afdc099cbd88cb5659bd1bb7d547168d063abcb0272ccbb2f6"}, - {file = "protobuf-3.15.7-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0c3a6941b1e6e6e22d812a8e5c46bfe83082ea60d262a46f2cfb22d9b9fb17db"}, - {file = "protobuf-3.15.7-cp37-cp37m-win32.whl", hash = "sha256:eb5668f3f6a83b6603ca2e09be5b20de89521ea5914aabe032cce981e4129cc8"}, - {file = "protobuf-3.15.7-cp37-cp37m-win_amd64.whl", hash = "sha256:1001e671cf8476edce7fb72778358d026390649cc35a79d47b2a291684ccfbb2"}, - {file = "protobuf-3.15.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a5ba7dd6f97964655aa7b234c95d80886425a31b7010764f042cdeb985314d18"}, - {file = "protobuf-3.15.7-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:46674bd6fcf8c63b4b9869ba579685db67cf51ae966443dd6bd9a8fa00fcef62"}, - {file = "protobuf-3.15.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4c4399156fb27e3768313b7a59352c861a893252bda6fb9f3643beb3ebb7047e"}, - {file = "protobuf-3.15.7-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:85cd29faf056036167d87445d5a5059034c298881c044e71a73d3b61a4be1c23"}, - {file = "protobuf-3.15.7-py2.py3-none-any.whl", hash = "sha256:22054432b923c0086f9cf1e1c0c52d39bf3c6e31014ea42eec2dabc22ee26d78"}, - {file = "protobuf-3.15.7.tar.gz", hash = "sha256:2d03fc2591543cd2456d0b72230b50c4519546a8d379ac6fd3ecd84c6df61e5d"}, -] -py = [ - {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, - {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, -] -pygments = [ - {file = "Pygments-2.8.1-py3-none-any.whl", hash = "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8"}, - {file = "Pygments-2.8.1.tar.gz", hash = "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94"}, -] -pyparsing = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, -] -pytest = [ - {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, - {file = "pytest-5.4.3.tar.gz", hash = "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8"}, -] -pytest-asyncio = [ - {file = "pytest-asyncio-0.12.0.tar.gz", hash = "sha256:475bd2f3dc0bc11d2463656b3cbaafdbec5a47b47508ea0b329ee693040eebd2"}, -] -pytest-cov = [ - {file = "pytest-cov-2.11.1.tar.gz", hash = "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7"}, - {file = "pytest_cov-2.11.1-py2.py3-none-any.whl", hash = "sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da"}, -] -pytest-mock = [ - {file = "pytest-mock-3.5.1.tar.gz", hash = "sha256:a1e2aba6af9560d313c642dae7e00a2a12b022b80301d9d7fc8ec6858e1dd9fc"}, - {file = "pytest_mock-3.5.1-py3-none-any.whl", hash = "sha256:379b391cfad22422ea2e252bdfc008edd08509029bcde3c25b2c0bd741e0424e"}, -] -python-dateutil = [ - {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, - {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, -] -pytz = [ - {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, - {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, -] -regex = [ - {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, - {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, - {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, - {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, - {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, - {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, - {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, - {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, - {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, - {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, - {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, - {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, - {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, -] -requests = [ - {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, - {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, -] -six = [ - {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, - {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, -] -snowballstemmer = [ - {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, - {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, -] -sphinx = [ - {file = "Sphinx-3.1.2-py3-none-any.whl", hash = "sha256:97dbf2e31fc5684bb805104b8ad34434ed70e6c588f6896991b2fdfd2bef8c00"}, - {file = "Sphinx-3.1.2.tar.gz", hash = "sha256:b9daeb9b39aa1ffefc2809b43604109825300300b987a24f45976c001ba1a8fd"}, -] -sphinx-rtd-theme = [ - {file = "sphinx_rtd_theme-0.5.0-py2.py3-none-any.whl", hash = "sha256:373413d0f82425aaa28fb288009bf0d0964711d347763af2f1b65cafcb028c82"}, - {file = "sphinx_rtd_theme-0.5.0.tar.gz", hash = "sha256:22c795ba2832a169ca301cd0a083f7a434e09c538c70beb42782c073651b707d"}, -] -sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, -] -sphinxcontrib-devhelp = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, -] -sphinxcontrib-htmlhelp = [ - {file = "sphinxcontrib-htmlhelp-1.0.3.tar.gz", hash = "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"}, - {file = "sphinxcontrib_htmlhelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f"}, -] -sphinxcontrib-jsmath = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] -sphinxcontrib-qthelp = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, -] -sphinxcontrib-serializinghtml = [ - {file = "sphinxcontrib-serializinghtml-1.1.4.tar.gz", hash = "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc"}, - {file = "sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl", hash = "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -tomlkit = [ - {file = "tomlkit-0.7.0-py2.py3-none-any.whl", hash = "sha256:6babbd33b17d5c9691896b0e68159215a9387ebfa938aa3ac42f4a4beeb2b831"}, - {file = "tomlkit-0.7.0.tar.gz", hash = "sha256:ac57f29693fab3e309ea789252fcce3061e19110085aa31af5446ca749325618"}, -] -tox = [ - {file = "tox-3.23.0-py2.py3-none-any.whl", hash = "sha256:e007673f3595cede9b17a7c4962389e4305d4a3682a6c5a4159a1453b4f326aa"}, - {file = "tox-3.23.0.tar.gz", hash = "sha256:05a4dbd5e4d3d8269b72b55600f0b0303e2eb47ad5c6fe76d3576f4c58d93661"}, -] -typed-ast = [ - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"}, - {file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"}, - {file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"}, - {file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"}, - {file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"}, - {file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"}, - {file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"}, - {file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"}, - {file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"}, - {file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"}, - {file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"}, - {file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"}, - {file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"}, - {file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"}, - {file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"}, - {file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"}, - {file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"}, - {file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"}, - {file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"}, - {file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"}, - {file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"}, -] -typing-extensions = [ - {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, - {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, - {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, -] -urllib3 = [ - {file = "urllib3-1.26.4-py2.py3-none-any.whl", hash = "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df"}, - {file = "urllib3-1.26.4.tar.gz", hash = "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"}, -] -virtualenv = [ - {file = "virtualenv-20.4.3-py2.py3-none-any.whl", hash = "sha256:83f95875d382c7abafe06bd2a4cdd1b363e1bb77e02f155ebe8ac082a916b37c"}, - {file = "virtualenv-20.4.3.tar.gz", hash = "sha256:49ec4eb4c224c6f7dd81bb6d0a28a09ecae5894f4e593c89b0db0885f565a107"}, -] -wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, -] -zipp = [ - {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, - {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, -] +lock-version = "2.1" +python-versions = ">=3.9,<4.0" +content-hash = "cf90b82485ce6837f190477b98778fedc112e9efb6b0dde487da9d65cd92db3b" diff --git a/pyproject.toml b/pyproject.toml index c13c84a2a..7b1b6e741 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,8 +1,10 @@ -[tool.poetry] +[project] name = "betterproto" -version = "2.0.0b3" +version = "2.0.0b7" description = "A better Protobuf / gRPC generator & library" -authors = ["Daniel G. Taylor "] +authors = [ + {name = "Daniel G. Taylor", email = "danielgtaylor@gmail.com"} +] readme = "README.md" repository = "https://github.com/danielgtaylor/python-betterproto" keywords = ["protobuf", "gRPC"] @@ -10,40 +12,54 @@ license = "MIT" packages = [ { include = "betterproto", from = "src" } ] +requires-python = ">=3.9,<4.0" +dynamic = ["dependencies"] [tool.poetry.dependencies] -python = "^3.6" -black = { version = ">=19.3b0", optional = true } -dataclasses = { version = "^0.7", python = ">=3.6, <3.7" } +# The Ruff version is pinned. To update it, also update it in .pre-commit-config.yaml +ruff = { version = "~0.9.1", optional = true } grpclib = "^0.4.1" -jinja2 = { version = "^2.11.2", optional = true } +jinja2 = { version = ">=3.0.3", optional = true } python-dateutil = "^2.8" - -[tool.poetry.dev-dependencies] -asv = "^0.4.2" -black = "^20.8b1" -bpython = "^0.19" -grpcio-tools = "^1.30.0" -jinja2 = "^2.11.2" -mypy = "^0.770" +typing-extensions = "^4.7.1" +betterproto-rust-codec = { version = "0.1.1", optional = true } + +[tool.poetry.group.dev.dependencies] +asv = "^0.6.4" +bpython = "^0.24" +jinja2 = ">=3.0.3" +mypy = "^1.11.2" +sphinx = "7.4.7" +sphinx-rtd-theme = "3.0.2" +pre-commit = "^4.0.1" +grpcio-tools = "^1.54.2" +tox = "^4.0.0" + +[tool.poetry.group.test.dependencies] poethepoet = ">=0.9.0" -protobuf = "^3.12.2" -pytest = "^5.4.2" -pytest-asyncio = "^0.12.0" -pytest-cov = "^2.9.0" +pytest = "^7.4.4" +pytest-asyncio = "^0.23.8" +pytest-cov = "^6.0.0" pytest-mock = "^3.1.1" -sphinx = "3.1.2" -sphinx-rtd-theme = "0.5.0" -tomlkit = "^0.7.0" -tox = "^3.15.1" - +pydantic = ">=2.0,<3" +protobuf = "^5" +cachelib = "^0.13.0" +tomlkit = ">=0.7.0" -[tool.poetry.scripts] +[project.scripts] protoc-gen-python_betterproto = "betterproto.plugin:main" -[tool.poetry.extras] -compiler = ["black", "jinja2"] +[project.optional-dependencies] +compiler = ["ruff", "jinja2"] +rust-codec = ["betterproto-rust-codec"] +[tool.ruff] +extend-exclude = ["tests/output_*"] +target-version = "py38" + +[tool.ruff.lint.isort] +combine-as-imports = true +lines-after-imports = 2 # Dev workflow tasks @@ -60,8 +76,28 @@ cmd = "mypy src --ignore-missing-imports" help = "Check types with mypy" [tool.poe.tasks.format] -cmd = "black . --exclude tests/output_" -help = "Apply black formatting to source code" +sequence = ["_format", "_sort-imports"] +help = "Format the source code, and sort the imports" + +[tool.poe.tasks.check] +sequence = ["_check-format", "_check-imports"] +help = "Check that the source code is formatted and the imports sorted" + +[tool.poe.tasks._format] +cmd = "ruff format src tests" +help = "Format the source code without sorting the imports" + +[tool.poe.tasks._sort-imports] +cmd = "ruff check --select I --fix src tests" +help = "Sort the imports" + +[tool.poe.tasks._check-format] +cmd = "ruff format --diff src tests" +help = "Check that the source code is formatted" + +[tool.poe.tasks._check-imports] +cmd = "ruff check --select I src tests" +help = "Check that the imports are sorted" [tool.poe.tasks.docs] cmd = "sphinx-build docs docs/build" @@ -84,11 +120,11 @@ cmd = """ protoc --plugin=protoc-gen-custom=src/betterproto/plugin/main.py --custom_opt=INCLUDE_GOOGLE - --custom_out=src/betterproto/lib + --custom_out=src/betterproto/lib/std -I /usr/local/include/ /usr/local/include/google/protobuf/**/*.proto """ -help = "Regenerate the types in betterproto.lib.google" +help = "Regenerate the types in betterproto.lib.std.google" # CI tasks @@ -96,13 +132,14 @@ help = "Regenerate the types in betterproto.lib.google" shell = "poe generate && tox" help = "Run tests with multiple pythons" -[tool.poe.tasks.check-style] -cmd = "black . --check --diff --exclude tests/output_" -help = "Check if code style is correct" +[tool.doc8] +paths = ["docs"] +max_line_length = 88 - -[tool.black] -target-version = ['py36'] +[tool.doc8.ignore_path_errors] +"docs/migrating.rst" = [ + "D001", # contains table which is longer than 88 characters long +] [tool.coverage.run] omit = ["betterproto/tests/*"] @@ -110,16 +147,23 @@ omit = ["betterproto/tests/*"] [tool.tox] legacy_tox_ini = """ [tox] -isolated_build = true -envlist = py36, py37, py38 +requires = + tox>=4.2 + tox-poetry-installer[poetry]==1.0.0b1 +env_list = + py311 + py38 + py37 [testenv] -whitelist_externals = poetry commands = - poetry install -v --extras compiler - poetry run pytest --cov betterproto + pytest {posargs: --cov betterproto} +poetry_dep_groups = + test +require_locked_deps = true +require_poetry = true """ [build-system] -requires = ["poetry-core>=1.0.0,<2"] +requires = ["poetry-core>=2.0.0,<3"] build-backend = "poetry.core.masonry.api" diff --git a/src/betterproto/__init__.py b/src/betterproto/__init__.py index 05b8b7cd1..ce8a26a4d 100644 --- a/src/betterproto/__init__.py +++ b/src/betterproto/__init__.py @@ -1,22 +1,35 @@ +from __future__ import annotations + import dataclasses -import enum -import inspect +import enum as builtin_enum import json import math import struct import sys import typing +import warnings from abc import ABC -from base64 import b64decode, b64encode -from datetime import datetime, timedelta, timezone -from dateutil.parser import isoparse +from base64 import ( + b64decode, + b64encode, +) +from copy import deepcopy +from datetime import ( + datetime, + timedelta, + timezone, +) +from io import BytesIO +from itertools import count from typing import ( + TYPE_CHECKING, Any, Callable, + ClassVar, Dict, Generator, Iterable, - List, + Mapping, Optional, Set, Tuple, @@ -25,10 +38,35 @@ get_type_hints, ) +from dateutil.parser import isoparse +from typing_extensions import Self + from ._types import T from ._version import __version__ -from .casing import camel_case, safe_snake_case, snake_case -from .grpc.grpclib_client import ServiceStub +from .casing import ( + camel_case, + safe_snake_case, + snake_case, +) +from .enum import Enum as Enum +from .grpc.grpclib_client import ServiceStub as ServiceStub +from .utils import ( + classproperty, + hybridmethod, +) + + +if TYPE_CHECKING: + from _typeshed import ( + SupportsRead, + SupportsWrite, + ) + +if sys.version_info >= (3, 10): + from types import UnionType as _types_UnionType +else: + + class _types_UnionType: ... # Proto 3 data types @@ -51,7 +89,6 @@ TYPE_MESSAGE = "message" TYPE_MAP = "map" - # Fields that use a fixed amount of space (4 or 8 bytes) FIXED_TYPES = [ TYPE_FLOAT, @@ -106,6 +143,9 @@ WIRE_FIXED_64_TYPES = [TYPE_DOUBLE, TYPE_FIXED64, TYPE_SFIXED64] WIRE_LEN_DELIM_TYPES = [TYPE_STRING, TYPE_BYTES, TYPE_MESSAGE, TYPE_MAP] +# Indicator of message delimitation in streams +SIZE_DELIMITED = -1 + # Protobuf datetimes start at the Unix Epoch in 1970 in UTC. def datetime_default_gen() -> datetime: @@ -121,14 +161,29 @@ def datetime_default_gen() -> datetime: NAN = "NaN" -class Casing(enum.Enum): +class Casing(builtin_enum.Enum): """Casing constants for serialization.""" CAMEL = camel_case #: A camelCase sterilization function. SNAKE = snake_case #: A snake_case sterilization function. -PLACEHOLDER: Any = object() +class Placeholder: + __slots__ = () + + def __repr__(self) -> str: + return "" + + def __copy__(self) -> Self: + return self + + def __deepcopy__(self, _) -> Self: + return self + + +# We can't simply use object() here because pydantic automatically performs deep-copy of mutable default values +# See #606 +PLACEHOLDER: Any = Placeholder() @dataclasses.dataclass(frozen=True) @@ -145,6 +200,8 @@ class FieldMetadata: group: Optional[str] = None # Describes the wrapped type (e.g. when using google.protobuf.BoolValue) wraps: Optional[str] = None + # Is the field optional + optional: Optional[bool] = False @staticmethod def get(field: dataclasses.Field) -> "FieldMetadata": @@ -159,12 +216,15 @@ def dataclass_field( map_types: Optional[Tuple[str, str]] = None, group: Optional[str] = None, wraps: Optional[str] = None, + optional: bool = False, ) -> dataclasses.Field: """Creates a dataclass field with attached protobuf metadata.""" return dataclasses.field( - default=PLACEHOLDER, + default=None if optional else PLACEHOLDER, # type: ignore metadata={ - "betterproto": FieldMetadata(number, proto_type, map_types, group, wraps) + "betterproto": FieldMetadata( + number, proto_type, map_types, group, wraps, optional + ) }, ) @@ -174,74 +234,107 @@ def dataclass_field( # out at runtime. The generated dataclass variables are still typed correctly. -def enum_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_ENUM, group=group) +def enum_field(number: int, group: Optional[str] = None, optional: bool = False) -> Any: + return dataclass_field(number, TYPE_ENUM, group=group, optional=optional) -def bool_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_BOOL, group=group) +def bool_field(number: int, group: Optional[str] = None, optional: bool = False) -> Any: + return dataclass_field(number, TYPE_BOOL, group=group, optional=optional) -def int32_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_INT32, group=group) +def int32_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_INT32, group=group, optional=optional) -def int64_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_INT64, group=group) +def int64_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_INT64, group=group, optional=optional) -def uint32_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_UINT32, group=group) +def uint32_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_UINT32, group=group, optional=optional) -def uint64_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_UINT64, group=group) +def uint64_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_UINT64, group=group, optional=optional) -def sint32_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_SINT32, group=group) +def sint32_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_SINT32, group=group, optional=optional) -def sint64_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_SINT64, group=group) +def sint64_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_SINT64, group=group, optional=optional) -def float_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_FLOAT, group=group) +def float_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_FLOAT, group=group, optional=optional) -def double_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_DOUBLE, group=group) +def double_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_DOUBLE, group=group, optional=optional) -def fixed32_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_FIXED32, group=group) +def fixed32_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_FIXED32, group=group, optional=optional) -def fixed64_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_FIXED64, group=group) +def fixed64_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_FIXED64, group=group, optional=optional) -def sfixed32_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_SFIXED32, group=group) +def sfixed32_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_SFIXED32, group=group, optional=optional) -def sfixed64_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_SFIXED64, group=group) +def sfixed64_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_SFIXED64, group=group, optional=optional) -def string_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_STRING, group=group) +def string_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_STRING, group=group, optional=optional) -def bytes_field(number: int, group: Optional[str] = None) -> Any: - return dataclass_field(number, TYPE_BYTES, group=group) +def bytes_field( + number: int, group: Optional[str] = None, optional: bool = False +) -> Any: + return dataclass_field(number, TYPE_BYTES, group=group, optional=optional) def message_field( - number: int, group: Optional[str] = None, wraps: Optional[str] = None + number: int, + group: Optional[str] = None, + wraps: Optional[str] = None, + optional: bool = False, ) -> Any: - return dataclass_field(number, TYPE_MESSAGE, group=group, wraps=wraps) + return dataclass_field( + number, TYPE_MESSAGE, group=group, wraps=wraps, optional=optional + ) def map_field( @@ -252,32 +345,6 @@ def map_field( ) -class Enum(enum.IntEnum): - """ - The base class for protobuf enumerations, all generated enumerations will inherit - from this. Bases :class:`enum.IntEnum`. - """ - - @classmethod - def from_string(cls, name: str) -> "Enum": - """Return the value which corresponds to the string name. - - Parameters - ----------- - name: :class:`str` - The name of the enum member to get - - Raises - ------- - :exc:`ValueError` - The member was not found in the Enum. - """ - try: - return cls._member_map_[name] # type: ignore - except KeyError as e: - raise ValueError(f"Unknown value {name} for enum {cls.__name__}") from e - - def _pack_fmt(proto_type: str) -> str: """Returns a little-endian format string for reading/writing binary.""" return { @@ -290,20 +357,43 @@ def _pack_fmt(proto_type: str) -> str: }[proto_type] -def encode_varint(value: int) -> bytes: - """Encodes a single varint value for serialization.""" - b: List[int] = [] - - if value < 0: +def dump_varint(value: int, stream: "SupportsWrite[bytes]") -> None: + """Encodes a single varint and dumps it into the provided stream.""" + if value < -(1 << 63): + raise ValueError( + "Negative value is not representable as a 64-bit integer - unable to encode a varint within 10 bytes." + ) + elif value < 0: value += 1 << 64 bits = value & 0x7F value >>= 7 while value: - b.append(0x80 | bits) + stream.write((0x80 | bits).to_bytes(1, "little")) bits = value & 0x7F value >>= 7 - return bytes(b + [bits]) + stream.write(bits.to_bytes(1, "little")) + + +def encode_varint(value: int) -> bytes: + """Encodes a single varint value for serialization.""" + with BytesIO() as stream: + dump_varint(value, stream) + return stream.getvalue() + + +def size_varint(value: int) -> int: + """Calculates the size in bytes that a value would take as a varint.""" + if value < -(1 << 63): + raise ValueError( + "Negative value is not representable as a 64-bit integer - unable to encode a varint within 10 bytes." + ) + elif value < 0: + return 10 + elif value == 0: + return 1 + else: + return math.ceil(value.bit_length() / 7) def _preprocess_single(proto_type: str, wraps: str, value: Any) -> bytes: @@ -327,15 +417,10 @@ def _preprocess_single(proto_type: str, wraps: str, value: Any) -> bytes: elif proto_type == TYPE_MESSAGE: if isinstance(value, datetime): # Convert the `datetime` to a timestamp message. - seconds = int(value.timestamp()) - nanos = int(value.microsecond * 1e3) - value = _Timestamp(seconds=seconds, nanos=nanos) + value = _Timestamp.from_datetime(value) elif isinstance(value, timedelta): # Convert the `timedelta` to a duration message. - total_ms = value // timedelta(microseconds=1) - seconds = int(total_ms / 1e6) - nanos = int((total_ms % 1e6) * 1e3) - value = _Duration(seconds=seconds, nanos=nanos) + value = _Duration.from_timedelta(value) elif wraps: if value is None: return b"" @@ -346,6 +431,41 @@ def _preprocess_single(proto_type: str, wraps: str, value: Any) -> bytes: return value +def _len_preprocessed_single(proto_type: str, wraps: str, value: Any) -> int: + """Calculate the size of adjusted values for serialization without fully serializing them.""" + if proto_type in ( + TYPE_ENUM, + TYPE_BOOL, + TYPE_INT32, + TYPE_INT64, + TYPE_UINT32, + TYPE_UINT64, + ): + return size_varint(value) + elif proto_type in (TYPE_SINT32, TYPE_SINT64): + # Handle zig-zag encoding. + return size_varint(value << 1 if value >= 0 else (value << 1) ^ (~0)) + elif proto_type in FIXED_TYPES: + return len(struct.pack(_pack_fmt(proto_type), value)) + elif proto_type == TYPE_STRING: + return len(value.encode("utf-8")) + elif proto_type == TYPE_MESSAGE: + if isinstance(value, datetime): + # Convert the `datetime` to a timestamp message. + value = _Timestamp.from_datetime(value) + elif isinstance(value, timedelta): + # Convert the `timedelta` to a duration message. + value = _Duration.from_timedelta(value) + elif wraps: + if value is None: + return 0 + value = _get_wrapper(wraps)(value=value) + + return len(bytes(value)) + + return len(value) + + def _serialize_single( field_number: int, proto_type: str, @@ -377,12 +497,37 @@ def _serialize_single( return bytes(output) +def _len_single( + field_number: int, + proto_type: str, + value: Any, + *, + serialize_empty: bool = False, + wraps: str = "", +) -> int: + """Calculates the size of a serialized single field and value.""" + size = _len_preprocessed_single(proto_type, wraps, value) + if proto_type in WIRE_VARINT_TYPES: + size += size_varint(field_number << 3) + elif proto_type in WIRE_FIXED_32_TYPES: + size += size_varint((field_number << 3) | 5) + elif proto_type in WIRE_FIXED_64_TYPES: + size += size_varint((field_number << 3) | 1) + elif proto_type in WIRE_LEN_DELIM_TYPES: + if size or serialize_empty or wraps: + size += size_varint((field_number << 3) | 2) + size_varint(size) + else: + raise NotImplementedError(proto_type) + + return size + + def _parse_float(value: Any) -> float: """Parse the given value to a float Parameters ---------- - value : Any + value: Any Value to parse Returns @@ -404,40 +549,51 @@ def _dump_float(value: float) -> Union[float, str]: Parameters ---------- - value : float + value: float Value to dump Returns ------- Union[float, str] - Dumped valid, either a float or the strings - "Infinity" or "-Infinity" + Dumped value, either a float or the strings """ if value == float("inf"): return INFINITY if value == -float("inf"): return NEG_INFINITY - if value == float("nan"): + if isinstance(value, float) and math.isnan(value): return NAN return value -def decode_varint(buffer: bytes, pos: int) -> Tuple[int, int]: +def load_varint(stream: "SupportsRead[bytes]") -> Tuple[int, bytes]: """ - Decode a single varint value from a byte buffer. Returns the value and the - new position in the buffer. + Load a single varint value from a stream. Returns the value and the raw bytes read. """ result = 0 - shift = 0 - while 1: - b = buffer[pos] - result |= (b & 0x7F) << shift - pos += 1 - if not (b & 0x80): - return result, pos - shift += 7 + raw = b"" + for shift in count(0, 7): if shift >= 64: raise ValueError("Too many bytes when decoding varint.") + b = stream.read(1) + if not b: + raise EOFError("Stream ended unexpectedly while attempting to load varint.") + raw += b + b_int = int.from_bytes(b, byteorder="little") + result |= (b_int & 0x7F) << shift + if not (b_int & 0x80): + return result, raw + + +def decode_varint(buffer: bytes, pos: int) -> Tuple[int, int]: + """ + Decode a single varint value from a byte buffer. Returns the value and the + new position in the buffer. + """ + with BytesIO(buffer) as stream: + stream.seek(pos) + value, raw = load_varint(stream) + return value, pos + len(raw) @dataclasses.dataclass(frozen=True) @@ -448,6 +604,34 @@ class ParsedField: raw: bytes +def load_fields(stream: "SupportsRead[bytes]") -> Generator[ParsedField, None, None]: + while True: + try: + num_wire, raw = load_varint(stream) + except EOFError: + return + number = num_wire >> 3 + wire_type = num_wire & 0x7 + + decoded: Any = None + if wire_type == WIRE_VARINT: + decoded, r = load_varint(stream) + raw += r + elif wire_type == WIRE_FIXED_64: + decoded = stream.read(8) + raw += decoded + elif wire_type == WIRE_LEN_DELIM: + length, r = load_varint(stream) + decoded = stream.read(length) + raw += r + raw += decoded + elif wire_type == WIRE_FIXED_32: + decoded = stream.read(4) + raw += decoded + + yield ParsedField(number=number, wire_type=wire_type, value=decoded, raw=raw) + + def parse_fields(value: bytes) -> Generator[ParsedField, None, None]: i = 0 while i < len(value): @@ -574,6 +758,7 @@ class Message(ABC): _serialized_on_wire: bool _unknown_fields: bytes _group_current: Dict[str, str] + _betterproto_meta: ClassVar[ProtoClassMetadata] def __post_init__(self) -> None: # Keep track of whether every field was default @@ -582,11 +767,11 @@ def __post_init__(self) -> None: # Set current field of each group after `__init__` has already been run. group_current: Dict[str, Optional[str]] = {} for field_name, meta in self._betterproto.meta_by_field_name.items(): - if meta.group: group_current.setdefault(meta.group) - if self.__raw_get(field_name) != PLACEHOLDER: + value = self.__raw_get(field_name) + if value is not PLACEHOLDER and not (meta.optional and value is None): # Found a non-sentinel value all_sentinel = False @@ -604,7 +789,7 @@ def __raw_get(self, name: str) -> Any: def __eq__(self, other) -> bool: if type(self) is not type(other): - return False + return NotImplemented for field_name in self._betterproto.meta_by_field_name: self_val = self.__raw_get(field_name) @@ -641,20 +826,53 @@ def __repr__(self) -> str: ] return f"{self.__class__.__name__}({', '.join(parts)})" - def __getattribute__(self, name: str) -> Any: - """ - Lazily initialize default values to avoid infinite recursion for recursive - message types - """ - value = super().__getattribute__(name) - if value is not PLACEHOLDER: - return value + def __rich_repr__(self) -> Iterable[Tuple[str, Any, Any]]: + for field_name in self._betterproto.sorted_field_names: + yield field_name, self.__raw_get(field_name), PLACEHOLDER + + if not TYPE_CHECKING: + + def __getattribute__(self, name: str) -> Any: + """ + Lazily initialize default values to avoid infinite recursion for recursive + message types. + Raise :class:`AttributeError` on attempts to access unset ``oneof`` fields. + """ + try: + group_current = super().__getattribute__("_group_current") + except AttributeError: + pass + else: + if name not in {"__class__", "_betterproto"}: + group = self._betterproto.oneof_group_by_field.get(name) + if group is not None and group_current[group] != name: + if sys.version_info < (3, 10): + raise AttributeError( + f"{group!r} is set to {group_current[group]!r}, not {name!r}" + ) + else: + raise AttributeError( + f"{group!r} is set to {group_current[group]!r}, not {name!r}", + name=name, + obj=self, + ) - value = self._get_field_default(name) - super().__setattr__(name, value) - return value + value = super().__getattribute__(name) + if value is not PLACEHOLDER: + return value + + value = self._get_field_default(name) + super().__setattr__(name, value) + return value def __setattr__(self, attr: str, value: Any) -> None: + if ( + isinstance(value, Message) + and hasattr(value, "_betterproto") + and not value._betterproto.meta_by_field_name + ): + value._serialized_on_wire = True + if attr != "_serialized_on_wire": # Track when a field has been set. self.__dict__["_serialized_on_wire"] = True @@ -678,38 +896,68 @@ def __bool__(self) -> bool: for field_name in self._betterproto.meta_by_field_name ) - @property - def _betterproto(self) -> ProtoClassMetadata: + def __deepcopy__(self: T, _: Any = {}) -> T: + kwargs = {} + for name in self._betterproto.sorted_field_names: + value = self.__raw_get(name) + if value is not PLACEHOLDER: + kwargs[name] = deepcopy(value) + return self.__class__(**kwargs) # type: ignore + + def __copy__(self: T, _: Any = {}) -> T: + kwargs = {} + for name in self._betterproto.sorted_field_names: + value = self.__raw_get(name) + if value is not PLACEHOLDER: + kwargs[name] = value + return self.__class__(**kwargs) # type: ignore + + @classproperty + def _betterproto(cls: type[Self]) -> ProtoClassMetadata: # type: ignore """ Lazy initialize metadata for each protobuf class. It may be initialized multiple times in a multi-threaded environment, but that won't affect the correctness. """ - meta = getattr(self.__class__, "_betterproto_meta", None) - if not meta: - meta = ProtoClassMetadata(self.__class__) - self.__class__._betterproto_meta = meta # type: ignore - return meta + try: + return cls._betterproto_meta + except AttributeError: + cls._betterproto_meta = meta = ProtoClassMetadata(cls) + return meta - def __bytes__(self) -> bytes: + def dump(self, stream: "SupportsWrite[bytes]", delimit: bool = False) -> None: """ - Get the binary encoded Protobuf representation of this message instance. + Dumps the binary encoded Protobuf message to the stream. + + Parameters + ----------- + stream: :class:`BinaryIO` + The stream to dump the message to. + delimit: + Whether to prefix the message with a varint declaring its size. """ - output = bytearray() + if delimit == SIZE_DELIMITED: + dump_varint(len(self), stream) + for field_name, meta in self._betterproto.meta_by_field_name.items(): - value = getattr(self, field_name) + try: + value = getattr(self, field_name) + except AttributeError: + continue if value is None: # Optional items should be skipped. This is used for the Google - # wrapper types. + # wrapper types and proto3 field presence/optional fields. continue # Being selected in a a group means this field is the one that is # currently set in a `oneof` group, so it must be serialized even # if the value is the default zero value. - selected_in_group = ( - meta.group and self._group_current[meta.group] == field_name - ) + # + # Note that proto3 field presence/optional fields are put in a + # synthetic single-item oneof by protoc, which helps us ensure we + # send the value even if the value is the default zero value. + selected_in_group = bool(meta.group) or meta.optional # Empty messages can still be sent on the wire if they were # set (or received empty). @@ -736,15 +984,16 @@ def __bytes__(self) -> bytes: buf = bytearray() for item in value: buf += _preprocess_single(meta.proto_type, "", item) - output += _serialize_single(meta.number, TYPE_BYTES, buf) + stream.write(_serialize_single(meta.number, TYPE_BYTES, buf)) else: for item in value: - output += ( + stream.write( _serialize_single( meta.number, meta.proto_type, item, wraps=meta.wraps or "", + serialize_empty=True, ) # if it's an empty message it still needs to be represented # as an item in the repeated list @@ -756,7 +1005,113 @@ def __bytes__(self) -> bytes: assert meta.map_types sk = _serialize_single(1, meta.map_types[0], k) sv = _serialize_single(2, meta.map_types[1], v) - output += _serialize_single(meta.number, meta.proto_type, sk + sv) + stream.write( + _serialize_single(meta.number, meta.proto_type, sk + sv) + ) + else: + # If we have an empty string and we're including the default value for + # a oneof, make sure we serialize it. This ensures that the byte string + # output isn't simply an empty string. This also ensures that round trip + # serialization will keep `which_one_of` calls consistent. + if ( + isinstance(value, str) + and value == "" + and include_default_value_for_oneof + ): + serialize_empty = True + + stream.write( + _serialize_single( + meta.number, + meta.proto_type, + value, + serialize_empty=serialize_empty or bool(selected_in_group), + wraps=meta.wraps or "", + ) + ) + + stream.write(self._unknown_fields) + + def __bytes__(self) -> bytes: + """ + Get the binary encoded Protobuf representation of this message instance. + """ + with BytesIO() as stream: + self.dump(stream) + return stream.getvalue() + + def __len__(self) -> int: + """ + Get the size of the encoded Protobuf representation of this message instance. + """ + size = 0 + for field_name, meta in self._betterproto.meta_by_field_name.items(): + try: + value = getattr(self, field_name) + except AttributeError: + continue + + if value is None: + # Optional items should be skipped. This is used for the Google + # wrapper types and proto3 field presence/optional fields. + continue + + # Being selected in a group means this field is the one that is + # currently set in a `oneof` group, so it must be serialized even + # if the value is the default zero value. + # + # Note that proto3 field presence/optional fields are put in a + # synthetic single-item oneof by protoc, which helps us ensure we + # send the value even if the value is the default zero value. + selected_in_group = bool(meta.group) + + # Empty messages can still be sent on the wire if they were + # set (or received empty). + serialize_empty = isinstance(value, Message) and value._serialized_on_wire + + include_default_value_for_oneof = self._include_default_value_for_oneof( + field_name=field_name, meta=meta + ) + + if value == self._get_field_default(field_name) and not ( + selected_in_group or serialize_empty or include_default_value_for_oneof + ): + # Default (zero) values are not serialized. Two exceptions are + # if this is the selected oneof item or if we know we have to + # serialize an empty message (i.e. zero value was explicitly + # set by the user). + continue + + if isinstance(value, list): + if meta.proto_type in PACKED_TYPES: + # Packed lists look like a length-delimited field. First, + # preprocess/encode each value into a buffer and then + # treat it like a field of raw bytes. + buf = bytearray() + for item in value: + buf += _preprocess_single(meta.proto_type, "", item) + size += _len_single(meta.number, TYPE_BYTES, buf) + else: + for item in value: + size += ( + _len_single( + meta.number, + meta.proto_type, + item, + wraps=meta.wraps or "", + serialize_empty=True, + ) + # if it's an empty message it still needs to be represented + # as an item in the repeated list + or 2 + ) + + elif isinstance(value, dict): + for k, v in value.items(): + assert meta.map_types + sk = _serialize_single(1, meta.map_types[0], k) + sv = _serialize_single(2, meta.map_types[1], v) + size += _len_single(meta.number, meta.proto_type, sk + sv) else: # If we have an empty string and we're including the default value for # a oneof, make sure we serialize it. This ensures that the byte string @@ -769,7 +1124,7 @@ def __bytes__(self) -> bytes: ): serialize_empty = True - output += _serialize_single( + size += _len_single( meta.number, meta.proto_type, value, @@ -777,8 +1132,8 @@ def __bytes__(self) -> bytes: wraps=meta.wraps or "", ) - output += self._unknown_fields - return bytes(output) + size += len(self._unknown_fields) + return size # For compatibility with other libraries def SerializeToString(self: T) -> bytes: @@ -796,6 +1151,15 @@ def SerializeToString(self: T) -> bytes: """ return bytes(self) + def __getstate__(self) -> bytes: + return bytes(self) + + def __setstate__(self: T, pickled_bytes: bytes) -> T: + return self.parse(pickled_bytes) + + def __reduce__(self) -> Tuple[Any, ...]: + return (self.__class__.FromString, (bytes(self),)) + @classmethod def _type_hint(cls, field_name: str) -> Type: return cls._type_hints()[field_name] @@ -803,7 +1167,7 @@ def _type_hint(cls, field_name: str) -> Type: @classmethod def _type_hints(cls) -> Dict[str, Type]: module = sys.modules[cls.__module__] - return get_type_hints(cls, vars(module)) + return get_type_hints(cls, module.__dict__, {}) @classmethod def _cls_for(cls, field: dataclasses.Field, index: int = 0) -> Type: @@ -815,35 +1179,38 @@ def _cls_for(cls, field: dataclasses.Field, index: int = 0) -> Type: return field_cls def _get_field_default(self, field_name: str) -> Any: - return self._betterproto.default_gen[field_name]() + with warnings.catch_warnings(): + # ignore warnings when initialising deprecated field defaults + warnings.filterwarnings("ignore", category=DeprecationWarning) + return self._betterproto.default_gen[field_name]() @classmethod def _get_field_default_gen(cls, field: dataclasses.Field) -> Any: t = cls._type_hint(field.name) - if hasattr(t, "__origin__"): - if t.__origin__ in (dict, Dict): - # This is some kind of map (dict in Python). - return dict - elif t.__origin__ in (list, List): + is_310_union = isinstance(t, _types_UnionType) + if hasattr(t, "__origin__") or is_310_union: + if is_310_union or t.__origin__ is Union: + # This is an optional field (either wrapped, or using proto3 + # field presence). For setting the default we really don't care + # what kind of field it is. + return type(None) + if t.__origin__ is list: # This is some kind of list (repeated) field. return list - elif t.__origin__ is Union and t.__args__[1] is type(None): - # This is an optional (wrapped) field. For setting the default we - # really don't care what kind of field it is. - return type(None) - else: - return t - elif issubclass(t, Enum): + if t.__origin__ is dict: + # This is some kind of map (dict in Python). + return dict + return t + if issubclass(t, Enum): # Enums always default to zero. - return int - elif t is datetime: + return t.try_value + if t is datetime: # Offsets are relative to 1970-01-01T00:00:00Z return datetime_default_gen - else: - # This is either a primitive scalar or another message type. Calling - # it should result in its zero value. - return t + # This is either a primitive scalar or another message type. Calling + # it should result in its zero value. + return t def _postprocess_single( self, wire_type: int, meta: FieldMetadata, field_name: str, value: Any @@ -861,12 +1228,15 @@ def _postprocess_single( elif meta.proto_type == TYPE_BOOL: # Booleans use a varint encoding, so convert it to true/false. value = value > 0 + elif meta.proto_type == TYPE_ENUM: + # Convert enum ints to python enum instances + value = self._betterproto.cls_by_field[field_name].try_value(value) elif wire_type in (WIRE_FIXED_32, WIRE_FIXED_64): fmt = _pack_fmt(meta.proto_type) value = struct.unpack(fmt, value)[0] elif wire_type == WIRE_LEN_DELIM: if meta.proto_type == TYPE_STRING: - value = value.decode("utf-8") + value = str(value, "utf-8") elif meta.proto_type == TYPE_MESSAGE: cls = self._betterproto.cls_by_field[field_name] @@ -893,25 +1263,38 @@ def _include_default_value_for_oneof( meta.group is not None and self._group_current.get(meta.group) == field_name ) - def parse(self: T, data: bytes) -> T: + def load( + self: T, + stream: "SupportsRead[bytes]", + size: Optional[int] = None, + ) -> T: """ - Parse the binary encoded Protobuf into this message instance. This + Load the binary encoded Protobuf from a stream into this message instance. This returns the instance itself and is therefore assignable and chainable. Parameters ----------- - data: :class:`bytes` - The data to parse the protobuf from. + stream: :class:`bytes` + The stream to load the message from. + size: :class:`Optional[int]` + The size of the message in the stream. + Reads stream until EOF if ``None`` is given. + Reads based on a size delimiter prefix varint if SIZE_DELIMITED is given. Returns -------- :class:`Message` The initialized message. """ + # If the message is delimited, parse the message delimiter + if size == SIZE_DELIMITED: + size, _ = load_varint(stream) + # Got some data over the wire self._serialized_on_wire = True proto_meta = self._betterproto - for parsed in parse_fields(data): + read = 0 + for parsed in load_fields(stream): field_name = proto_meta.field_name_by_number.get(parsed.number) if not field_name: self._unknown_fields += parsed.raw @@ -943,7 +1326,12 @@ def parse(self: T, data: bytes) -> T: parsed.wire_type, meta, field_name, parsed.value ) - current = getattr(self, field_name) + try: + current = getattr(self, field_name) + except AttributeError: + current = self._get_field_default(field_name) + setattr(self, field_name, current) + if meta.proto_type == TYPE_MAP: # Value represents a single key/value pair entry in the map. current[value.key] = value.value @@ -952,8 +1340,46 @@ def parse(self: T, data: bytes) -> T: else: setattr(self, field_name, value) + # If we have now loaded the expected length of the message, stop + if size is not None: + prev = read + read += len(parsed.raw) + if read == size: + break + elif read > size: + raise ValueError( + f"Expected message of size {size}, can only read " + f"either {prev} or {read} bytes - there is no " + "message of the expected size in the stream." + ) + + if size is not None and read < size: + raise ValueError( + f"Expected message of size {size}, but was only able to " + f"read {read} bytes - the stream may have ended too soon," + " or the expected size may have been incorrect." + ) + return self + def parse(self: T, data: bytes) -> T: + """ + Parse the binary encoded Protobuf into this message instance. This + returns the instance itself and is therefore assignable and chainable. + + Parameters + ----------- + data: :class:`bytes` + The data to parse the message from. + + Returns + -------- + :class:`Message` + The initialized message. + """ + with BytesIO(data) as stream: + return self.load(stream) + # For compatibility with other libraries. @classmethod def FromString(cls: Type[T], data: bytes) -> T: @@ -1004,7 +1430,10 @@ def to_dict( defaults = self._betterproto.default_gen for field_name, meta in self._betterproto.meta_by_field_name.items(): field_is_repeated = defaults[field_name] is list - value = getattr(self, field_name) + try: + value = getattr(self, field_name) + except AttributeError: + value = self._get_field_default(field_name) cased_name = casing(field_name).rstrip("_") # type: ignore if meta.proto_type == TYPE_MESSAGE: if isinstance(value, datetime): @@ -1041,6 +1470,9 @@ def to_dict( ] if value or include_default_values: output[cased_name] = value + elif value is None: + if include_default_values: + output[cased_name] = value elif ( value._serialized_on_wire or include_default_values @@ -1050,12 +1482,13 @@ def to_dict( ): output[cased_name] = value.to_dict(casing, include_default_values) elif meta.proto_type == TYPE_MAP: + output_map = {**value} for k in value: if hasattr(value[k], "to_dict"): - value[k] = value[k].to_dict(casing, include_default_values) + output_map[k] = value[k].to_dict(casing, include_default_values) if value or include_default_values: - output[cased_name] = value + output[cased_name] = output_map elif ( value != self._get_field_default(field_name) or include_default_values @@ -1066,6 +1499,9 @@ def to_dict( if meta.proto_type in INT_64_TYPES: if field_is_repeated: output[cased_name] = [str(n) for n in value] + elif value is None: + if include_default_values: + output[cased_name] = value else: output[cased_name] = str(value) elif meta.proto_type == TYPE_BYTES: @@ -1073,6 +1509,8 @@ def to_dict( output[cased_name] = [ b64encode(b).decode("utf8") for b in value ] + elif value is None and include_default_values: + output[cased_name] = value else: output[cased_name] = b64encode(value).decode("utf8") elif meta.proto_type == TYPE_ENUM: @@ -1085,6 +1523,12 @@ def to_dict( else: # transparently upgrade single value to repeated output[cased_name] = [enum_class(value).name] + elif value is None: + if include_default_values: + output[cased_name] = value + elif meta.optional: + enum_class = field_types[field_name].__args__[0] + output[cased_name] = enum_class(value).name else: enum_class = field_types[field_name] # noqa output[cased_name] = enum_class(value).name @@ -1097,10 +1541,74 @@ def to_dict( output[cased_name] = value return output - def from_dict(self: T, value: Dict[str, Any]) -> T: + @classmethod + def _from_dict_init(cls, mapping: Mapping[str, Any]) -> Mapping[str, Any]: + init_kwargs: Dict[str, Any] = {} + for key, value in mapping.items(): + field_name = safe_snake_case(key) + try: + meta = cls._betterproto.meta_by_field_name[field_name] + except KeyError: + continue + if value is None: + continue + + if meta.proto_type == TYPE_MESSAGE: + sub_cls = cls._betterproto.cls_by_field[field_name] + if sub_cls == datetime: + value = ( + [isoparse(item) for item in value] + if isinstance(value, list) + else isoparse(value) + ) + elif sub_cls == timedelta: + value = ( + [timedelta(seconds=float(item[:-1])) for item in value] + if isinstance(value, list) + else timedelta(seconds=float(value[:-1])) + ) + elif not meta.wraps: + value = ( + [sub_cls.from_dict(item) for item in value] + if isinstance(value, list) + else sub_cls.from_dict(value) + ) + elif meta.map_types and meta.map_types[1] == TYPE_MESSAGE: + sub_cls = cls._betterproto.cls_by_field[f"{field_name}.value"] + value = {k: sub_cls.from_dict(v) for k, v in value.items()} + else: + if meta.proto_type in INT_64_TYPES: + value = ( + [int(n) for n in value] + if isinstance(value, list) + else int(value) + ) + elif meta.proto_type == TYPE_BYTES: + value = ( + [b64decode(n) for n in value] + if isinstance(value, list) + else b64decode(value) + ) + elif meta.proto_type == TYPE_ENUM: + enum_cls = cls._betterproto.cls_by_field[field_name] + if isinstance(value, list): + value = [enum_cls.from_string(e) for e in value] + elif isinstance(value, str): + value = enum_cls.from_string(value) + elif meta.proto_type in (TYPE_FLOAT, TYPE_DOUBLE): + value = ( + [_parse_float(n) for n in value] + if isinstance(value, list) + else _parse_float(value) + ) + + init_kwargs[field_name] = value + return init_kwargs + + @hybridmethod + def from_dict(cls: type[Self], value: Mapping[str, Any]) -> Self: # type: ignore """ - Parse the key/value pairs into the current message instance. This returns the - instance itself and is therefore assignable and chainable. + Parse the key/value pairs into the a new message instance. Parameters ----------- @@ -1112,73 +1620,37 @@ def from_dict(self: T, value: Dict[str, Any]) -> T: :class:`Message` The initialized message. """ + self = cls(**cls._from_dict_init(value)) self._serialized_on_wire = True - for key in value: - field_name = safe_snake_case(key) - meta = self._betterproto.meta_by_field_name.get(field_name) - if not meta: - continue + return self - if value[key] is not None: - if meta.proto_type == TYPE_MESSAGE: - v = getattr(self, field_name) - if isinstance(v, list): - cls = self._betterproto.cls_by_field[field_name] - if cls == datetime: - v = [isoparse(item) for item in value[key]] - elif cls == timedelta: - v = [ - timedelta(seconds=float(item[:-1])) - for item in value[key] - ] - else: - v = [cls().from_dict(item) for item in value[key]] - elif isinstance(v, datetime): - v = isoparse(value[key]) - setattr(self, field_name, v) - elif isinstance(v, timedelta): - v = timedelta(seconds=float(value[key][:-1])) - setattr(self, field_name, v) - elif meta.wraps: - setattr(self, field_name, value[key]) - else: - # NOTE: `from_dict` mutates the underlying message, so no - # assignment here is necessary. - v.from_dict(value[key]) - elif meta.map_types and meta.map_types[1] == TYPE_MESSAGE: - v = getattr(self, field_name) - cls = self._betterproto.cls_by_field[f"{field_name}.value"] - for k in value[key]: - v[k] = cls().from_dict(value[key][k]) - else: - v = value[key] - if meta.proto_type in INT_64_TYPES: - if isinstance(value[key], list): - v = [int(n) for n in value[key]] - else: - v = int(value[key]) - elif meta.proto_type == TYPE_BYTES: - if isinstance(value[key], list): - v = [b64decode(n) for n in value[key]] - else: - v = b64decode(value[key]) - elif meta.proto_type == TYPE_ENUM: - enum_cls = self._betterproto.cls_by_field[field_name] - if isinstance(v, list): - v = [enum_cls.from_string(e) for e in v] - elif isinstance(v, str): - v = enum_cls.from_string(v) - elif meta.proto_type in (TYPE_FLOAT, TYPE_DOUBLE): - if isinstance(value[key], list): - v = [_parse_float(n) for n in value[key]] - else: - v = _parse_float(value[key]) + @from_dict.instancemethod + def from_dict(self, value: Mapping[str, Any]) -> Self: + """ + Parse the key/value pairs into the current message instance. This returns the + instance itself and is therefore assignable and chainable. - if v is not None: - setattr(self, field_name, v) + Parameters + ----------- + value: Dict[:class:`str`, Any] + The dictionary to parse from. + + Returns + -------- + :class:`Message` + The initialized message. + """ + self._serialized_on_wire = True + for field, value in self._from_dict_init(value).items(): + setattr(self, field, value) return self - def to_json(self, indent: Union[None, int, str] = None) -> str: + def to_json( + self, + indent: Union[None, int, str] = None, + include_default_values: bool = False, + casing: Casing = Casing.CAMEL, + ) -> str: """A helper function to parse the message instance into its JSON representation. @@ -1191,12 +1663,24 @@ def to_json(self, indent: Union[None, int, str] = None) -> str: indent: Optional[Union[:class:`int`, :class:`str`]] The indent to pass to :func:`json.dumps`. + include_default_values: :class:`bool` + If ``True`` will include the default values of fields. Default is ``False``. + E.g. an ``int32`` field will be included with a value of ``0`` if this is + set to ``True``, otherwise this would be ignored. + + casing: :class:`Casing` + The casing to use for key values. Default is :attr:`Casing.CAMEL` for + compatibility purposes. + Returns -------- :class:`str` The JSON representation of the message. """ - return json.dumps(self.to_dict(), indent=indent) + return json.dumps( + self.to_dict(include_default_values=include_default_values, casing=casing), + indent=indent, + ) def from_json(self: T, value: Union[str, bytes]) -> T: """A helper function to return the message instance from its JSON @@ -1219,6 +1703,209 @@ def from_json(self: T, value: Union[str, bytes]) -> T: """ return self.from_dict(json.loads(value)) + def to_pydict( + self, casing: Casing = Casing.CAMEL, include_default_values: bool = False + ) -> Dict[str, Any]: + """ + Returns a python dict representation of this object. + + Parameters + ----------- + casing: :class:`Casing` + The casing to use for key values. Default is :attr:`Casing.CAMEL` for + compatibility purposes. + include_default_values: :class:`bool` + If ``True`` will include the default values of fields. Default is ``False``. + E.g. an ``int32`` field will be included with a value of ``0`` if this is + set to ``True``, otherwise this would be ignored. + + Returns + -------- + Dict[:class:`str`, Any] + The python dict representation of this object. + """ + output: Dict[str, Any] = {} + defaults = self._betterproto.default_gen + for field_name, meta in self._betterproto.meta_by_field_name.items(): + field_is_repeated = defaults[field_name] is list + value = getattr(self, field_name) + cased_name = casing(field_name).rstrip("_") # type: ignore + if meta.proto_type == TYPE_MESSAGE: + if isinstance(value, datetime): + if ( + value != DATETIME_ZERO + or include_default_values + or self._include_default_value_for_oneof( + field_name=field_name, meta=meta + ) + ): + output[cased_name] = value + elif isinstance(value, timedelta): + if ( + value != timedelta(0) + or include_default_values + or self._include_default_value_for_oneof( + field_name=field_name, meta=meta + ) + ): + output[cased_name] = value + elif meta.wraps: + if value is not None or include_default_values: + output[cased_name] = value + elif field_is_repeated: + # Convert each item. + value = [i.to_pydict(casing, include_default_values) for i in value] + if value or include_default_values: + output[cased_name] = value + elif value is None: + if include_default_values: + output[cased_name] = None + elif ( + value._serialized_on_wire + or include_default_values + or self._include_default_value_for_oneof( + field_name=field_name, meta=meta + ) + ): + output[cased_name] = value.to_pydict(casing, include_default_values) + elif meta.proto_type == TYPE_MAP: + for k in value: + if hasattr(value[k], "to_pydict"): + value[k] = value[k].to_pydict(casing, include_default_values) + + if value or include_default_values: + output[cased_name] = value + elif ( + value != self._get_field_default(field_name) + or include_default_values + or self._include_default_value_for_oneof( + field_name=field_name, meta=meta + ) + ): + output[cased_name] = value + return output + + def from_pydict(self: T, value: Mapping[str, Any]) -> T: + """ + Parse the key/value pairs into the current message instance. This returns the + instance itself and is therefore assignable and chainable. + + Parameters + ----------- + value: Dict[:class:`str`, Any] + The dictionary to parse from. + + Returns + -------- + :class:`Message` + The initialized message. + """ + self._serialized_on_wire = True + for key in value: + field_name = safe_snake_case(key) + meta = self._betterproto.meta_by_field_name.get(field_name) + if not meta: + continue + + if value[key] is not None: + if meta.proto_type == TYPE_MESSAGE: + v = getattr(self, field_name) + if isinstance(v, list): + cls = self._betterproto.cls_by_field[field_name] + for item in value[key]: + v.append(cls().from_pydict(item)) + elif isinstance(v, datetime): + v = value[key] + elif isinstance(v, timedelta): + v = value[key] + elif meta.wraps: + v = value[key] + else: + # NOTE: `from_pydict` mutates the underlying message, so no + # assignment here is necessary. + v.from_pydict(value[key]) + elif meta.map_types and meta.map_types[1] == TYPE_MESSAGE: + v = getattr(self, field_name) + cls = self._betterproto.cls_by_field[f"{field_name}.value"] + for k in value[key]: + v[k] = cls().from_pydict(value[key][k]) + else: + v = value[key] + + if v is not None: + setattr(self, field_name, v) + return self + + def is_set(self, name: str) -> bool: + """ + Check if field with the given name has been set. + + Parameters + ----------- + name: :class:`str` + The name of the field to check for. + + Returns + -------- + :class:`bool` + `True` if field has been set, otherwise `False`. + """ + default = ( + PLACEHOLDER + if not self._betterproto.meta_by_field_name[name].optional + else None + ) + return self.__raw_get(name) is not default + + @classmethod + def _validate_field_groups(cls, values): + group_to_one_ofs = cls._betterproto.oneof_field_by_group + field_name_to_meta = cls._betterproto.meta_by_field_name + + for group, field_set in group_to_one_ofs.items(): + if len(field_set) == 1: + (field,) = field_set + field_name = field.name + meta = field_name_to_meta[field_name] + + # This is a synthetic oneof; we should ignore it's presence and not consider it as a oneof. + if meta.optional: + continue + + set_fields = [ + field.name + for field in field_set + if getattr(values, field.name, None) is not None + ] + + if len(set_fields) > 1: + set_fields_str = ", ".join(set_fields) + raise ValueError( + f"Group {group} has more than one value; fields {set_fields_str} are not None" + ) + + return values + + +Message.__annotations__ = {} # HACK to avoid typing.get_type_hints breaking :) + +# monkey patch (de-)serialization functions of class `Message` +# with functions from `betterproto-rust-codec` if available +try: + import betterproto_rust_codec + + def __parse_patch(self: T, data: bytes) -> T: + betterproto_rust_codec.deserialize(self, data) + return self + + def __bytes_patch(self) -> bytes: + return betterproto_rust_codec.serialize(self) + + Message.parse = __parse_patch + Message.__bytes__ = __bytes_patch +except ModuleNotFoundError: + pass + def serialized_on_wire(message: Message) -> bool: """ @@ -1267,6 +1954,15 @@ def which_one_of(message: Message, group_name: str) -> Tuple[str, Optional[Any]] class _Duration(Duration): + @classmethod + def from_timedelta( + cls, delta: timedelta, *, _1_microsecond: timedelta = timedelta(microseconds=1) + ) -> "_Duration": + total_ms = delta // _1_microsecond + seconds = int(total_ms / 1e6) + nanos = int((total_ms % 1e6) * 1e3) + return cls(seconds, nanos) + def to_timedelta(self) -> timedelta: return timedelta(seconds=self.seconds, microseconds=self.nanos / 1e3) @@ -1280,13 +1976,33 @@ def delta_to_json(delta: timedelta) -> str: class _Timestamp(Timestamp): + @classmethod + def from_datetime(cls, dt: datetime) -> "_Timestamp": + # manual epoch offset calulation to avoid rounding errors, + # to support negative timestamps (before 1970) and skirt + # around datetime bugs (apparently 0 isn't a year in [0, 9999]??) + offset = dt - DATETIME_ZERO + # below is the same as timedelta.total_seconds() but without dividing by 1e6 + # so we end up with microseconds as integers instead of seconds as float + offset_us = ( + offset.days * 24 * 60 * 60 + offset.seconds + ) * 10**6 + offset.microseconds + seconds, us = divmod(offset_us, 10**6) + return cls(seconds, us * 1000) + def to_datetime(self) -> datetime: - ts = self.seconds + (self.nanos / 1e9) - return datetime.fromtimestamp(ts, tz=timezone.utc) + # datetime.fromtimestamp() expects a timestamp in seconds, not microseconds + # if we pass it as a floating point number, we will run into rounding errors + # see also #407 + offset = timedelta(seconds=self.seconds, microseconds=self.nanos // 1000) + return DATETIME_ZERO + offset @staticmethod def timestamp_to_json(dt: datetime) -> str: nanos = dt.microsecond * 1e3 + if dt.tzinfo is not None: + # change timezone aware datetime objects to utc + dt = dt.astimezone(timezone.utc) copy = dt.replace(microsecond=0, tzinfo=None) result = copy.isoformat() if (nanos % 1e9) == 0: @@ -1295,10 +2011,10 @@ def timestamp_to_json(dt: datetime) -> str: return f"{result}Z" if (nanos % 1e6) == 0: # Serialize 3 fractional digits. - return f"{result}.{int(nanos // 1e6) :03d}Z" + return f"{result}.{int(nanos // 1e6):03d}Z" if (nanos % 1e3) == 0: # Serialize 6 fractional digits. - return f"{result}.{int(nanos // 1e3) :06d}Z" + return f"{result}.{int(nanos // 1e3):06d}Z" # Serialize 9 fractional digits. return f"{result}.{nanos:09d}" diff --git a/src/betterproto/_types.py b/src/betterproto/_types.py index 26b734406..616d550de 100644 --- a/src/betterproto/_types.py +++ b/src/betterproto/_types.py @@ -1,7 +1,12 @@ -from typing import TYPE_CHECKING, TypeVar +from typing import ( + TYPE_CHECKING, + TypeVar, +) + if TYPE_CHECKING: from grpclib._typing import IProtoMessage + from . import Message # Bound type variable to allow methods to return `self` of subclasses diff --git a/src/betterproto/_version.py b/src/betterproto/_version.py index efb50824f..484794b0f 100644 --- a/src/betterproto/_version.py +++ b/src/betterproto/_version.py @@ -1,3 +1,7 @@ -from pkg_resources import get_distribution +try: + from importlib import metadata +except ImportError: # for Python<3.8 + import importlib_metadata as metadata # type: ignore -__version__ = get_distribution("betterproto").version + +__version__ = metadata.version("betterproto") diff --git a/src/betterproto/casing.py b/src/betterproto/casing.py index ed8299135..f7d0832b8 100644 --- a/src/betterproto/casing.py +++ b/src/betterproto/casing.py @@ -1,6 +1,7 @@ import keyword import re + # Word delimiters and symbols that will not be preserved when re-casing. # language=PythonRegExp SYMBOLS = "[^a-zA-Z0-9]*" @@ -133,16 +134,10 @@ def lowercase_first(value: str) -> str: return value[0:1].lower() + value[1:] -def is_reserved_name(value: str) -> bool: - if keyword.iskeyword(value): - return True - - if value in ("bytes", "str"): - return True - - return False - - def sanitize_name(value: str) -> str: # https://www.python.org/dev/peps/pep-0008/#descriptive-naming-styles - return f"{value}_" if is_reserved_name(value) else value + if keyword.iskeyword(value): + return f"{value}_" + if not value.isidentifier(): + return f"_{value}" + return value diff --git a/src/betterproto/compile/importing.py b/src/betterproto/compile/importing.py index 6793a5b31..b216dfc59 100644 --- a/src/betterproto/compile/importing.py +++ b/src/betterproto/compile/importing.py @@ -1,11 +1,24 @@ +from __future__ import annotations + import os import re -from typing import Dict, List, Set, Tuple, Type +from typing import ( + TYPE_CHECKING, + Dict, + List, + Set, + Tuple, + Type, +) from ..casing import safe_snake_case from ..lib.google import protobuf as google_protobuf from .naming import pythonize_class_name + +if TYPE_CHECKING: + from ..plugin.typing_compiler import TypingCompiler + WRAPPER_TYPES: Dict[str, Type] = { ".google.protobuf.DoubleValue": google_protobuf.DoubleValue, ".google.protobuf.FloatValue": google_protobuf.FloatValue, @@ -36,7 +49,13 @@ def parse_source_type_name(field_type_name: str) -> Tuple[str, str]: def get_type_reference( - package: str, imports: set, source_type: str, unwrap: bool = True + *, + package: str, + imports: set, + source_type: str, + typing_compiler: TypingCompiler, + unwrap: bool = True, + pydantic: bool = False, ) -> str: """ Return a Python type name for a proto type reference. Adds the import if @@ -45,7 +64,7 @@ def get_type_reference( if unwrap: if source_type in WRAPPER_TYPES: wrapped_type = type(WRAPPER_TYPES[source_type]().value) - return f"Optional[{wrapped_type.__name__}]" + return typing_compiler.optional(wrapped_type.__name__) if source_type == ".google.protobuf.Duration": return "timedelta" @@ -62,7 +81,9 @@ def get_type_reference( compiling_google_protobuf = current_package == ["google", "protobuf"] importing_google_protobuf = py_package == ["google", "protobuf"] if importing_google_protobuf and not compiling_google_protobuf: - py_package = ["betterproto", "lib"] + py_package + py_package = ( + ["betterproto", "lib"] + (["pydantic"] if pydantic else []) + py_package + ) if py_package[:1] == ["betterproto"]: return reference_absolute(imports, py_package, py_type) diff --git a/src/betterproto/compile/naming.py b/src/betterproto/compile/naming.py index 1c2dbabee..baa9fc387 100644 --- a/src/betterproto/compile/naming.py +++ b/src/betterproto/compile/naming.py @@ -11,3 +11,11 @@ def pythonize_field_name(name: str) -> str: def pythonize_method_name(name: str) -> str: return casing.safe_snake_case(name) + + +def pythonize_enum_member_name(name: str, enum_name: str) -> str: + enum_name = casing.snake_case(enum_name).upper() + find = name.find(enum_name) + if find != -1: + name = name[find + len(enum_name) :].strip("_") + return casing.sanitize_name(name) diff --git a/src/betterproto/enum.py b/src/betterproto/enum.py new file mode 100644 index 000000000..6b1b7e0a4 --- /dev/null +++ b/src/betterproto/enum.py @@ -0,0 +1,197 @@ +from __future__ import annotations + +from enum import ( + EnumMeta, + IntEnum, +) +from types import MappingProxyType +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Optional, + Tuple, +) + + +if TYPE_CHECKING: + from collections.abc import ( + Generator, + Mapping, + ) + + from typing_extensions import ( + Never, + Self, + ) + + +def _is_descriptor(obj: object) -> bool: + return ( + hasattr(obj, "__get__") or hasattr(obj, "__set__") or hasattr(obj, "__delete__") + ) + + +class EnumType(EnumMeta if TYPE_CHECKING else type): + _value_map_: Mapping[int, Enum] + _member_map_: Mapping[str, Enum] + + def __new__( + mcs, name: str, bases: Tuple[type, ...], namespace: Dict[str, Any] + ) -> Self: + value_map = {} + member_map = {} + + new_mcs = type( + f"{name}Type", + tuple( + dict.fromkeys( + [base.__class__ for base in bases if base.__class__ is not type] + + [EnumType, type] + ) + ), # reorder the bases so EnumType and type are last to avoid conflicts + {"_value_map_": value_map, "_member_map_": member_map}, + ) + + members = { + name: value + for name, value in namespace.items() + if not _is_descriptor(value) and not name.startswith("__") + } + + cls = type.__new__( + new_mcs, + name, + bases, + {key: value for key, value in namespace.items() if key not in members}, + ) + # this allows us to disallow member access from other members as + # members become proper class variables + + for name, value in members.items(): + member = value_map.get(value) + if member is None: + member = cls.__new__(cls, name=name, value=value) # type: ignore + value_map[value] = member + member_map[name] = member + type.__setattr__(new_mcs, name, member) + + return cls + + if not TYPE_CHECKING: + + def __call__(cls, value: int) -> Enum: + try: + return cls._value_map_[value] + except (KeyError, TypeError): + raise ValueError(f"{value!r} is not a valid {cls.__name__}") from None + + def __iter__(cls) -> Generator[Enum, None, None]: + yield from cls._member_map_.values() + + def __reversed__(cls) -> Generator[Enum, None, None]: + yield from reversed(cls._member_map_.values()) + + def __getitem__(cls, key: str) -> Enum: + return cls._member_map_[key] + + @property + def __members__(cls) -> MappingProxyType[str, Enum]: + return MappingProxyType(cls._member_map_) + + def __repr__(cls) -> str: + return f"" + + def __len__(cls) -> int: + return len(cls._member_map_) + + def __setattr__(cls, name: str, value: Any) -> Never: + raise AttributeError(f"{cls.__name__}: cannot reassign Enum members.") + + def __delattr__(cls, name: str) -> Never: + raise AttributeError(f"{cls.__name__}: cannot delete Enum members.") + + def __contains__(cls, member: object) -> bool: + return isinstance(member, cls) and member.name in cls._member_map_ + + +class Enum(IntEnum if TYPE_CHECKING else int, metaclass=EnumType): + """ + The base class for protobuf enumerations, all generated enumerations will + inherit from this. Emulates `enum.IntEnum`. + """ + + name: Optional[str] + value: int + + if not TYPE_CHECKING: + + def __new__(cls, *, name: Optional[str], value: int) -> Self: + self = super().__new__(cls, value) + super().__setattr__(self, "name", name) + super().__setattr__(self, "value", value) + return self + + def __getnewargs_ex__(self) -> Tuple[Tuple[()], Dict[str, Any]]: + return (), {"name": self.name, "value": self.value} + + def __str__(self) -> str: + return self.name or "None" + + def __repr__(self) -> str: + return f"{self.__class__.__name__}.{self.name}" + + def __setattr__(self, key: str, value: Any) -> Never: + raise AttributeError( + f"{self.__class__.__name__} Cannot reassign a member's attributes." + ) + + def __delattr__(self, item: Any) -> Never: + raise AttributeError( + f"{self.__class__.__name__} Cannot delete a member's attributes." + ) + + def __copy__(self) -> Self: + return self + + def __deepcopy__(self, memo: Any) -> Self: + return self + + @classmethod + def try_value(cls, value: int = 0) -> Self: + """Return the value which corresponds to the value. + + Parameters + ----------- + value: :class:`int` + The value of the enum member to get. + + Returns + ------- + :class:`Enum` + The corresponding member or a new instance of the enum if + ``value`` isn't actually a member. + """ + try: + return cls._value_map_[value] + except (KeyError, TypeError): + return cls.__new__(cls, name=None, value=value) + + @classmethod + def from_string(cls, name: str) -> Self: + """Return the value which corresponds to the string name. + + Parameters + ----------- + name: :class:`str` + The name of the enum member to get. + + Raises + ------- + :exc:`ValueError` + The member was not found in the Enum. + """ + try: + return cls._member_map_[name] + except KeyError as e: + raise ValueError(f"Unknown value {name} for enum {cls.__name__}") from e diff --git a/src/betterproto/grpc/grpclib_client.py b/src/betterproto/grpc/grpclib_client.py index a22b7e358..b19e80615 100644 --- a/src/betterproto/grpc/grpclib_client.py +++ b/src/betterproto/grpc/grpclib_client.py @@ -15,17 +15,22 @@ import grpclib.const -from .._types import ST, T if TYPE_CHECKING: from grpclib.client import Channel from grpclib.metadata import Deadline + from .._types import ( + ST, + IProtoMessage, + Message, + T, + ) -_Value = Union[str, bytes] -_MetadataLike = Union[Mapping[str, _Value], Collection[Tuple[str, _Value]]] -_MessageLike = Union[T, ST] -_MessageSource = Union[Iterable[ST], AsyncIterable[ST]] + +Value = Union[str, bytes] +MetadataLike = Union[Mapping[str, Value], Collection[Tuple[str, Value]]] +MessageSource = Union[Iterable["IProtoMessage"], AsyncIterable["IProtoMessage"]] class ServiceStub(ABC): @@ -39,7 +44,7 @@ def __init__( *, timeout: Optional[float] = None, deadline: Optional["Deadline"] = None, - metadata: Optional[_MetadataLike] = None, + metadata: Optional[MetadataLike] = None, ) -> None: self.channel = channel self.timeout = timeout @@ -50,7 +55,7 @@ def __resolve_request_kwargs( self, timeout: Optional[float], deadline: Optional["Deadline"], - metadata: Optional[_MetadataLike], + metadata: Optional[MetadataLike], ): return { "timeout": self.timeout if timeout is None else timeout, @@ -61,13 +66,13 @@ def __resolve_request_kwargs( async def _unary_unary( self, route: str, - request: _MessageLike, - response_type: Type[T], + request: "IProtoMessage", + response_type: Type["T"], *, timeout: Optional[float] = None, deadline: Optional["Deadline"] = None, - metadata: Optional[_MetadataLike] = None, - ) -> T: + metadata: Optional[MetadataLike] = None, + ) -> "T": """Make a unary request and return the response.""" async with self.channel.request( route, @@ -84,13 +89,13 @@ async def _unary_unary( async def _unary_stream( self, route: str, - request: _MessageLike, - response_type: Type[T], + request: "IProtoMessage", + response_type: Type["T"], *, timeout: Optional[float] = None, deadline: Optional["Deadline"] = None, - metadata: Optional[_MetadataLike] = None, - ) -> AsyncIterator[T]: + metadata: Optional[MetadataLike] = None, + ) -> AsyncIterator["T"]: """Make a unary request and return the stream response iterator.""" async with self.channel.request( route, @@ -106,14 +111,14 @@ async def _unary_stream( async def _stream_unary( self, route: str, - request_iterator: _MessageSource, - request_type: Type[ST], - response_type: Type[T], + request_iterator: MessageSource, + request_type: Type["IProtoMessage"], + response_type: Type["T"], *, timeout: Optional[float] = None, deadline: Optional["Deadline"] = None, - metadata: Optional[_MetadataLike] = None, - ) -> T: + metadata: Optional[MetadataLike] = None, + ) -> "T": """Make a stream request and return the response.""" async with self.channel.request( route, @@ -122,6 +127,7 @@ async def _stream_unary( response_type, **self.__resolve_request_kwargs(timeout, deadline, metadata), ) as stream: + await stream.send_request() await self._send_messages(stream, request_iterator) response = await stream.recv_message() assert response is not None @@ -130,14 +136,14 @@ async def _stream_unary( async def _stream_stream( self, route: str, - request_iterator: _MessageSource, - request_type: Type[ST], - response_type: Type[T], + request_iterator: MessageSource, + request_type: Type["IProtoMessage"], + response_type: Type["T"], *, timeout: Optional[float] = None, deadline: Optional["Deadline"] = None, - metadata: Optional[_MetadataLike] = None, - ) -> AsyncIterator[T]: + metadata: Optional[MetadataLike] = None, + ) -> AsyncIterator["T"]: """ Make a stream request and return an AsyncIterator to iterate over response messages. @@ -161,7 +167,7 @@ async def _stream_stream( raise @staticmethod - async def _send_messages(stream, messages: _MessageSource): + async def _send_messages(stream, messages: MessageSource): if isinstance(messages, AsyncIterable): async for message in messages: await stream.send_message(message) diff --git a/src/betterproto/grpc/grpclib_server.py b/src/betterproto/grpc/grpclib_server.py index 59bc7d435..3e2803113 100644 --- a/src/betterproto/grpc/grpclib_server.py +++ b/src/betterproto/grpc/grpclib_server.py @@ -1,6 +1,10 @@ from abc import ABC -from collections import AsyncIterable -from typing import Callable, Any, Dict +from collections.abc import AsyncIterable +from typing import ( + Any, + Callable, + Dict, +) import grpclib import grpclib.server @@ -15,10 +19,9 @@ async def _call_rpc_handler_server_stream( self, handler: Callable, stream: grpclib.server.Stream, - request_kwargs: Dict[str, Any], + request: Any, ) -> None: - - response_iter = handler(**request_kwargs) + response_iter = handler(request) # check if response is actually an AsyncIterator # this might be false if the method just returns without # yielding at least once diff --git a/src/betterproto/grpc/util/async_channel.py b/src/betterproto/grpc/util/async_channel.py index 5cb3f899d..9f18dbfd2 100644 --- a/src/betterproto/grpc/util/async_channel.py +++ b/src/betterproto/grpc/util/async_channel.py @@ -1,5 +1,13 @@ import asyncio -from typing import AsyncIterable, AsyncIterator, Iterable, Optional, TypeVar, Union +from typing import ( + AsyncIterable, + AsyncIterator, + Iterable, + Optional, + TypeVar, + Union, +) + T = TypeVar("T") diff --git a/src/betterproto/lib/google/protobuf/__init__.py b/src/betterproto/lib/google/protobuf/__init__.py index b361c04ee..dfc9d5586 100644 --- a/src/betterproto/lib/google/protobuf/__init__.py +++ b/src/betterproto/lib/google/protobuf/__init__.py @@ -1,1317 +1 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: google/protobuf/any.proto, google/protobuf/api.proto, google/protobuf/descriptor.proto, google/protobuf/duration.proto, google/protobuf/empty.proto, google/protobuf/field_mask.proto, google/protobuf/source_context.proto, google/protobuf/struct.proto, google/protobuf/timestamp.proto, google/protobuf/type.proto, google/protobuf/wrappers.proto -# plugin: python-betterproto -import warnings -from dataclasses import dataclass -from typing import Dict, List - -import betterproto - - -class Syntax(betterproto.Enum): - """The syntax in which a protocol buffer element is defined.""" - - # Syntax `proto2`. - SYNTAX_PROTO2 = 0 - # Syntax `proto3`. - SYNTAX_PROTO3 = 1 - - -class FieldKind(betterproto.Enum): - TYPE_UNKNOWN = 0 - TYPE_DOUBLE = 1 - TYPE_FLOAT = 2 - TYPE_INT64 = 3 - TYPE_UINT64 = 4 - TYPE_INT32 = 5 - TYPE_FIXED64 = 6 - TYPE_FIXED32 = 7 - TYPE_BOOL = 8 - TYPE_STRING = 9 - TYPE_GROUP = 10 - TYPE_MESSAGE = 11 - TYPE_BYTES = 12 - TYPE_UINT32 = 13 - TYPE_ENUM = 14 - TYPE_SFIXED32 = 15 - TYPE_SFIXED64 = 16 - TYPE_SINT32 = 17 - TYPE_SINT64 = 18 - - -class FieldCardinality(betterproto.Enum): - CARDINALITY_UNKNOWN = 0 - CARDINALITY_OPTIONAL = 1 - CARDINALITY_REQUIRED = 2 - CARDINALITY_REPEATED = 3 - - -class NullValue(betterproto.Enum): - """ - `NullValue` is a singleton enumeration to represent the null value for the - `Value` type union. The JSON representation for `NullValue` is JSON - `null`. - """ - - # Null value. - NULL_VALUE = 0 - - -class FieldDescriptorProtoType(betterproto.Enum): - TYPE_DOUBLE = 1 - TYPE_FLOAT = 2 - TYPE_INT64 = 3 - TYPE_UINT64 = 4 - TYPE_INT32 = 5 - TYPE_FIXED64 = 6 - TYPE_FIXED32 = 7 - TYPE_BOOL = 8 - TYPE_STRING = 9 - TYPE_GROUP = 10 - TYPE_MESSAGE = 11 - TYPE_BYTES = 12 - TYPE_UINT32 = 13 - TYPE_ENUM = 14 - TYPE_SFIXED32 = 15 - TYPE_SFIXED64 = 16 - TYPE_SINT32 = 17 - TYPE_SINT64 = 18 - - -class FieldDescriptorProtoLabel(betterproto.Enum): - LABEL_OPTIONAL = 1 - LABEL_REQUIRED = 2 - LABEL_REPEATED = 3 - - -class FileOptionsOptimizeMode(betterproto.Enum): - SPEED = 1 - CODE_SIZE = 2 - LITE_RUNTIME = 3 - - -class FieldOptionsCType(betterproto.Enum): - STRING = 0 - CORD = 1 - STRING_PIECE = 2 - - -class FieldOptionsJsType(betterproto.Enum): - JS_NORMAL = 0 - JS_STRING = 1 - JS_NUMBER = 2 - - -class MethodOptionsIdempotencyLevel(betterproto.Enum): - IDEMPOTENCY_UNKNOWN = 0 - NO_SIDE_EFFECTS = 1 - IDEMPOTENT = 2 - - -@dataclass(eq=False, repr=False) -class Timestamp(betterproto.Message): - """ - A Timestamp represents a point in time independent of any time zone or - local calendar, encoded as a count of seconds and fractions of seconds at - nanosecond resolution. The count is relative to an epoch at UTC midnight on - January 1, 1970, in the proleptic Gregorian calendar which extends the - Gregorian calendar backwards to year one. All minutes are 60 seconds long. - Leap seconds are "smeared" so that no leap second table is needed for - interpretation, using a [24-hour linear - smear](https://developers.google.com/time/smear). The range is from - 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By restricting to - that range, we ensure that we can convert to and from [RFC - 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. # Examples - Example 1: Compute Timestamp from POSIX `time()`. Timestamp timestamp; - timestamp.set_seconds(time(NULL)); timestamp.set_nanos(0); Example 2: - Compute Timestamp from POSIX `gettimeofday()`. struct timeval tv; - gettimeofday(&tv, NULL); Timestamp timestamp; - timestamp.set_seconds(tv.tv_sec); timestamp.set_nanos(tv.tv_usec * - 1000); Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. - FILETIME ft; GetSystemTimeAsFileTime(&ft); UINT64 ticks = - (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; // A Windows - tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z // is - 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. Timestamp - timestamp; timestamp.set_seconds((INT64) ((ticks / 10000000) - - 11644473600LL)); timestamp.set_nanos((INT32) ((ticks % 10000000) * - 100)); Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. - long millis = System.currentTimeMillis(); Timestamp timestamp = - Timestamp.newBuilder().setSeconds(millis / 1000) .setNanos((int) - ((millis % 1000) * 1000000)).build(); Example 5: Compute Timestamp from - current time in Python. timestamp = Timestamp() - timestamp.GetCurrentTime() # JSON Mapping In JSON format, the Timestamp - type is encoded as a string in the [RFC - 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the format is - "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" where {year} is - always expressed using four digits while {month}, {day}, {hour}, {min}, and - {sec} are zero-padded to two digits each. The fractional seconds, which can - go up to 9 digits (i.e. up to 1 nanosecond resolution), are optional. The - "Z" suffix indicates the timezone ("UTC"); the timezone is required. A - proto3 JSON serializer should always use UTC (as indicated by "Z") when - printing the Timestamp type and a proto3 JSON parser should be able to - accept both UTC and other timezones (as indicated by an offset). For - example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 01:30 UTC on - January 15, 2017. In JavaScript, one can convert a Date object to this - format using the standard [toISOString()](https://developer.mozilla.org/en- - US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) method. - In Python, a standard `datetime.datetime` object can be converted to this - format using - [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) - with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one - can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( - http://www.joda.org/joda- - time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D ) - to obtain a formatter capable of generating timestamps in this format. - """ - - # Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must - # be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. - seconds: int = betterproto.int64_field(1) - # Non-negative fractions of a second at nanosecond resolution. Negative - # second values with fractions must still have non-negative nanos values that - # count forward in time. Must be from 0 to 999,999,999 inclusive. - nanos: int = betterproto.int32_field(2) - - -@dataclass(eq=False, repr=False) -class FieldMask(betterproto.Message): - """ - `FieldMask` represents a set of symbolic field paths, for example: - paths: "f.a" paths: "f.b.d" Here `f` represents a field in some root - message, `a` and `b` fields in the message found in `f`, and `d` a field - found in the message in `f.b`. Field masks are used to specify a subset of - fields that should be returned by a get operation or modified by an update - operation. Field masks also have a custom JSON encoding (see below). # - Field Masks in Projections When used in the context of a projection, a - response message or sub-message is filtered by the API to only contain - those fields as specified in the mask. For example, if the mask in the - previous example is applied to a response message as follows: f { - a : 22 b { d : 1 x : 2 } y : 13 } - z: 8 The result will not contain specific values for fields x,y and z - (their value will be set to the default, and omitted in proto text output): - f { a : 22 b { d : 1 } } A repeated field is - not allowed except at the last position of a paths string. If a FieldMask - object is not present in a get operation, the operation applies to all - fields (as if a FieldMask of all fields had been specified). Note that a - field mask does not necessarily apply to the top-level response message. In - case of a REST get operation, the field mask applies directly to the - response, but in case of a REST list operation, the mask instead applies to - each individual message in the returned resource list. In case of a REST - custom method, other definitions may be used. Where the mask applies will - be clearly documented together with its declaration in the API. In any - case, the effect on the returned resource/resources is required behavior - for APIs. # Field Masks in Update Operations A field mask in update - operations specifies which fields of the targeted resource are going to be - updated. The API is required to only change the values of the fields as - specified in the mask and leave the others untouched. If a resource is - passed in to describe the updated values, the API ignores the values of all - fields not covered by the mask. If a repeated field is specified for an - update operation, new values will be appended to the existing repeated - field in the target resource. Note that a repeated field is only allowed in - the last position of a `paths` string. If a sub-message is specified in the - last position of the field mask for an update operation, then new value - will be merged into the existing sub-message in the target resource. For - example, given the target message: f { b { d: 1 - x: 2 } c: [1] } And an update message: f { b { - d: 10 } c: [2] } then if the field mask is: paths: ["f.b", - "f.c"] then the result will be: f { b { d: 10 x: - 2 } c: [1, 2] } An implementation may provide options to - override this default behavior for repeated and message fields. In order to - reset a field's value to the default, the field must be in the mask and set - to the default value in the provided resource. Hence, in order to reset all - fields of a resource, provide a default instance of the resource and set - all fields in the mask, or do not provide a mask as described below. If a - field mask is not present on update, the operation applies to all fields - (as if a field mask of all fields has been specified). Note that in the - presence of schema evolution, this may mean that fields the client does not - know and has therefore not filled into the request will be reset to their - default. If this is unwanted behavior, a specific service may require a - client to always specify a field mask, producing an error if not. As with - get operations, the location of the resource which describes the updated - values in the request message depends on the operation kind. In any case, - the effect of the field mask is required to be honored by the API. ## - Considerations for HTTP REST The HTTP kind of an update operation which - uses a field mask must be set to PATCH instead of PUT in order to satisfy - HTTP semantics (PUT must only be used for full updates). # JSON Encoding of - Field Masks In JSON, a field mask is encoded as a single string where paths - are separated by a comma. Fields name in each path are converted to/from - lower-camel naming conventions. As an example, consider the following - message declarations: message Profile { User user = 1; - Photo photo = 2; } message User { string display_name = 1; - string address = 2; } In proto a field mask for `Profile` may look as - such: mask { paths: "user.display_name" paths: "photo" - } In JSON, the same mask is represented as below: { mask: - "user.displayName,photo" } # Field Masks and Oneof Fields Field masks - treat fields in oneofs just as regular fields. Consider the following - message: message SampleMessage { oneof test_oneof { - string name = 4; SubMessage sub_message = 9; } } The - field mask can be: mask { paths: "name" } Or: mask { - paths: "sub_message" } Note that oneof type names ("test_oneof" in this - case) cannot be used in paths. ## Field Mask Verification The - implementation of any API method which has a FieldMask type field in the - request should verify the included field paths, and return an - `INVALID_ARGUMENT` error if any path is unmappable. - """ - - # The set of field mask paths. - paths: List[str] = betterproto.string_field(1) - - -@dataclass(eq=False, repr=False) -class SourceContext(betterproto.Message): - """ - `SourceContext` represents information about the source of a protobuf - element, like the file in which it is defined. - """ - - # The path-qualified name of the .proto file that contained the associated - # protobuf element. For example: `"google/protobuf/source_context.proto"`. - file_name: str = betterproto.string_field(1) - - -@dataclass(eq=False, repr=False) -class Any(betterproto.Message): - """ - `Any` contains an arbitrary serialized protocol buffer message along with a - URL that describes the type of the serialized message. Protobuf library - provides support to pack/unpack Any values in the form of utility functions - or additional generated methods of the Any type. Example 1: Pack and unpack - a message in C++. Foo foo = ...; Any any; any.PackFrom(foo); - ... if (any.UnpackTo(&foo)) { ... } Example 2: Pack and - unpack a message in Java. Foo foo = ...; Any any = Any.pack(foo); - ... if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } - Example 3: Pack and unpack a message in Python. foo = Foo(...) any - = Any() any.Pack(foo) ... if any.Is(Foo.DESCRIPTOR): - any.Unpack(foo) ... Example 4: Pack and unpack a message in Go - foo := &pb.Foo{...} any, err := ptypes.MarshalAny(foo) ... - foo := &pb.Foo{} if err := ptypes.UnmarshalAny(any, foo); err != nil { - ... } The pack methods provided by protobuf library will by default - use 'type.googleapis.com/full.type.name' as the type URL and the unpack - methods only use the fully qualified type name after the last '/' in the - type URL, for example "foo.bar.com/x/y.z" will yield type name "y.z". JSON - ==== The JSON representation of an `Any` value uses the regular - representation of the deserialized, embedded message, with an additional - field `@type` which contains the type URL. Example: package - google.profile; message Person { string first_name = 1; - string last_name = 2; } { "@type": - "type.googleapis.com/google.profile.Person", "firstName": , - "lastName": } If the embedded message type is well-known and - has a custom JSON representation, that representation will be embedded - adding a field `value` which holds the custom JSON in addition to the - `@type` field. Example (for message [google.protobuf.Duration][]): { - "@type": "type.googleapis.com/google.protobuf.Duration", "value": - "1.212s" } - """ - - # A URL/resource name that uniquely identifies the type of the serialized - # protocol buffer message. This string must contain at least one "/" - # character. The last segment of the URL's path must represent the fully - # qualified name of the type (as in `path/google.protobuf.Duration`). The - # name should be in a canonical form (e.g., leading "." is not accepted). In - # practice, teams usually precompile into the binary all types that they - # expect it to use in the context of Any. However, for URLs which use the - # scheme `http`, `https`, or no scheme, one can optionally set up a type - # server that maps type URLs to message definitions as follows: * If no - # scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield - # a [google.protobuf.Type][] value in binary format, or produce an error. * - # Applications are allowed to cache lookup results based on the URL, or - # have them precompiled into a binary to avoid any lookup. Therefore, - # binary compatibility needs to be preserved on changes to types. (Use - # versioned type names to manage breaking changes.) Note: this - # functionality is not currently available in the official protobuf release, - # and it is not used for type URLs beginning with type.googleapis.com. - # Schemes other than `http`, `https` (or the empty scheme) might be used with - # implementation specific semantics. - type_url: str = betterproto.string_field(1) - # Must be a valid serialized protocol buffer of the above specified type. - value: bytes = betterproto.bytes_field(2) - - -@dataclass(eq=False, repr=False) -class Type(betterproto.Message): - """A protocol buffer message type.""" - - # The fully qualified message name. - name: str = betterproto.string_field(1) - # The list of fields. - fields: List["Field"] = betterproto.message_field(2) - # The list of types appearing in `oneof` definitions in this type. - oneofs: List[str] = betterproto.string_field(3) - # The protocol buffer options. - options: List["Option"] = betterproto.message_field(4) - # The source context. - source_context: "SourceContext" = betterproto.message_field(5) - # The source syntax. - syntax: "Syntax" = betterproto.enum_field(6) - - -@dataclass(eq=False, repr=False) -class Field(betterproto.Message): - """A single field of a message type.""" - - # The field type. - kind: "FieldKind" = betterproto.enum_field(1) - # The field cardinality. - cardinality: "FieldCardinality" = betterproto.enum_field(2) - # The field number. - number: int = betterproto.int32_field(3) - # The field name. - name: str = betterproto.string_field(4) - # The field type URL, without the scheme, for message or enumeration types. - # Example: `"type.googleapis.com/google.protobuf.Timestamp"`. - type_url: str = betterproto.string_field(6) - # The index of the field type in `Type.oneofs`, for message or enumeration - # types. The first type has index 1; zero means the type is not in the list. - oneof_index: int = betterproto.int32_field(7) - # Whether to use alternative packed wire representation. - packed: bool = betterproto.bool_field(8) - # The protocol buffer options. - options: List["Option"] = betterproto.message_field(9) - # The field JSON name. - json_name: str = betterproto.string_field(10) - # The string value of the default value of this field. Proto2 syntax only. - default_value: str = betterproto.string_field(11) - - -@dataclass(eq=False, repr=False) -class Enum(betterproto.Message): - """Enum type definition.""" - - # Enum type name. - name: str = betterproto.string_field(1) - # Enum value definitions. - enumvalue: List["EnumValue"] = betterproto.message_field( - 2, wraps=betterproto.TYPE_ENUM - ) - # Protocol buffer options. - options: List["Option"] = betterproto.message_field(3) - # The source context. - source_context: "SourceContext" = betterproto.message_field(4) - # The source syntax. - syntax: "Syntax" = betterproto.enum_field(5) - - -@dataclass(eq=False, repr=False) -class EnumValue(betterproto.Message): - """Enum value definition.""" - - # Enum value name. - name: str = betterproto.string_field(1) - # Enum value number. - number: int = betterproto.int32_field(2) - # Protocol buffer options. - options: List["Option"] = betterproto.message_field(3) - - -@dataclass(eq=False, repr=False) -class Option(betterproto.Message): - """ - A protocol buffer option, which can be attached to a message, field, - enumeration, etc. - """ - - # The option's name. For protobuf built-in options (options defined in - # descriptor.proto), this is the short name. For example, `"map_entry"`. For - # custom options, it should be the fully-qualified name. For example, - # `"google.api.http"`. - name: str = betterproto.string_field(1) - # The option's value packed in an Any message. If the value is a primitive, - # the corresponding wrapper type defined in google/protobuf/wrappers.proto - # should be used. If the value is an enum, it should be stored as an int32 - # value using the google.protobuf.Int32Value type. - value: "Any" = betterproto.message_field(2) - - -@dataclass(eq=False, repr=False) -class Api(betterproto.Message): - """ - Api is a light-weight descriptor for an API Interface. Interfaces are also - described as "protocol buffer services" in some contexts, such as by the - "service" keyword in a .proto file, but they are different from API - Services, which represent a concrete implementation of an interface as - opposed to simply a description of methods and bindings. They are also - sometimes simply referred to as "APIs" in other contexts, such as the name - of this message itself. See https://cloud.google.com/apis/design/glossary - for detailed terminology. - """ - - # The fully qualified name of this interface, including package name followed - # by the interface's simple name. - name: str = betterproto.string_field(1) - # The methods of this interface, in unspecified order. - methods: List["Method"] = betterproto.message_field(2) - # Any metadata attached to the interface. - options: List["Option"] = betterproto.message_field(3) - # A version string for this interface. If specified, must have the form - # `major-version.minor-version`, as in `1.10`. If the minor version is - # omitted, it defaults to zero. If the entire version field is empty, the - # major version is derived from the package name, as outlined below. If the - # field is not empty, the version in the package name will be verified to be - # consistent with what is provided here. The versioning schema uses [semantic - # versioning](http://semver.org) where the major version number indicates a - # breaking change and the minor version an additive, non-breaking change. - # Both version numbers are signals to users what to expect from different - # versions, and should be carefully chosen based on the product plan. The - # major version is also reflected in the package name of the interface, which - # must end in `v`, as in `google.feature.v1`. For major - # versions 0 and 1, the suffix can be omitted. Zero major versions must only - # be used for experimental, non-GA interfaces. - version: str = betterproto.string_field(4) - # Source context for the protocol buffer service represented by this message. - source_context: "SourceContext" = betterproto.message_field(5) - # Included interfaces. See [Mixin][]. - mixins: List["Mixin"] = betterproto.message_field(6) - # The source syntax of the service. - syntax: "Syntax" = betterproto.enum_field(7) - - -@dataclass(eq=False, repr=False) -class Method(betterproto.Message): - """Method represents a method of an API interface.""" - - # The simple name of this method. - name: str = betterproto.string_field(1) - # A URL of the input message type. - request_type_url: str = betterproto.string_field(2) - # If true, the request is streamed. - request_streaming: bool = betterproto.bool_field(3) - # The URL of the output message type. - response_type_url: str = betterproto.string_field(4) - # If true, the response is streamed. - response_streaming: bool = betterproto.bool_field(5) - # Any metadata attached to the method. - options: List["Option"] = betterproto.message_field(6) - # The source syntax of this method. - syntax: "Syntax" = betterproto.enum_field(7) - - -@dataclass(eq=False, repr=False) -class Mixin(betterproto.Message): - """ - Declares an API Interface to be included in this interface. The including - interface must redeclare all the methods from the included interface, but - documentation and options are inherited as follows: - If after comment and - whitespace stripping, the documentation string of the redeclared method - is empty, it will be inherited from the original method. - Each - annotation belonging to the service config (http, visibility) which is - not set in the redeclared method will be inherited. - If an http - annotation is inherited, the path pattern will be modified as follows. - Any version prefix will be replaced by the version of the including - interface plus the [root][] path if specified. Example of a simple mixin: - package google.acl.v1; service AccessControl { // Get the - underlying ACL object. rpc GetAcl(GetAclRequest) returns (Acl) { - option (google.api.http).get = "/v1/{resource=**}:getAcl"; } } - package google.storage.v2; service Storage { rpc - GetAcl(GetAclRequest) returns (Acl); // Get a data record. rpc - GetData(GetDataRequest) returns (Data) { option - (google.api.http).get = "/v2/{resource=**}"; } } Example of a - mixin configuration: apis: - name: google.storage.v2.Storage - mixins: - name: google.acl.v1.AccessControl The mixin construct - implies that all methods in `AccessControl` are also declared with same - name and request/response types in `Storage`. A documentation generator or - annotation processor will see the effective `Storage.GetAcl` method after - inherting documentation and annotations as follows: service Storage { - // Get the underlying ACL object. rpc GetAcl(GetAclRequest) returns - (Acl) { option (google.api.http).get = "/v2/{resource=**}:getAcl"; - } ... } Note how the version in the path pattern changed from - `v1` to `v2`. If the `root` field in the mixin is specified, it should be a - relative path under which inherited HTTP paths are placed. Example: - apis: - name: google.storage.v2.Storage mixins: - name: - google.acl.v1.AccessControl root: acls This implies the following - inherited HTTP annotation: service Storage { // Get the - underlying ACL object. rpc GetAcl(GetAclRequest) returns (Acl) { - option (google.api.http).get = "/v2/acls/{resource=**}:getAcl"; } - ... } - """ - - # The fully qualified name of the interface which is included. - name: str = betterproto.string_field(1) - # If non-empty specifies a path under which inherited HTTP paths are rooted. - root: str = betterproto.string_field(2) - - -@dataclass(eq=False, repr=False) -class Duration(betterproto.Message): - """ - A Duration represents a signed, fixed-length span of time represented as a - count of seconds and fractions of seconds at nanosecond resolution. It is - independent of any calendar and concepts like "day" or "month". It is - related to Timestamp in that the difference between two Timestamp values is - a Duration and it can be added or subtracted from a Timestamp. Range is - approximately +-10,000 years. # Examples Example 1: Compute Duration from - two Timestamps in pseudo code. Timestamp start = ...; Timestamp end - = ...; Duration duration = ...; duration.seconds = end.seconds - - start.seconds; duration.nanos = end.nanos - start.nanos; if - (duration.seconds < 0 && duration.nanos > 0) { duration.seconds += 1; - duration.nanos -= 1000000000; } else if (duration.seconds > 0 && - duration.nanos < 0) { duration.seconds -= 1; duration.nanos += - 1000000000; } Example 2: Compute Timestamp from Timestamp + Duration in - pseudo code. Timestamp start = ...; Duration duration = ...; - Timestamp end = ...; end.seconds = start.seconds + duration.seconds; - end.nanos = start.nanos + duration.nanos; if (end.nanos < 0) { - end.seconds -= 1; end.nanos += 1000000000; } else if (end.nanos - >= 1000000000) { end.seconds += 1; end.nanos -= 1000000000; - } Example 3: Compute Duration from datetime.timedelta in Python. td = - datetime.timedelta(days=3, minutes=10) duration = Duration() - duration.FromTimedelta(td) # JSON Mapping In JSON format, the Duration type - is encoded as a string rather than an object, where the string ends in the - suffix "s" (indicating seconds) and is preceded by the number of seconds, - with nanoseconds expressed as fractional seconds. For example, 3 seconds - with 0 nanoseconds should be encoded in JSON format as "3s", while 3 - seconds and 1 nanosecond should be expressed in JSON format as - "3.000000001s", and 3 seconds and 1 microsecond should be expressed in JSON - format as "3.000001s". - """ - - # Signed seconds of the span of time. Must be from -315,576,000,000 to - # +315,576,000,000 inclusive. Note: these bounds are computed from: 60 - # sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years - seconds: int = betterproto.int64_field(1) - # Signed fractions of a second at nanosecond resolution of the span of time. - # Durations less than one second are represented with a 0 `seconds` field and - # a positive or negative `nanos` field. For durations of one second or more, - # a non-zero value for the `nanos` field must be of the same sign as the - # `seconds` field. Must be from -999,999,999 to +999,999,999 inclusive. - nanos: int = betterproto.int32_field(2) - - -@dataclass(eq=False, repr=False) -class Struct(betterproto.Message): - """ - `Struct` represents a structured data value, consisting of fields which map - to dynamically typed values. In some languages, `Struct` might be supported - by a native representation. For example, in scripting languages like JS a - struct is represented as an object. The details of that representation are - described together with the proto support for the language. The JSON - representation for `Struct` is JSON object. - """ - - # Unordered map of dynamically typed values. - fields: Dict[str, "Value"] = betterproto.map_field( - 1, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE - ) - - -@dataclass(eq=False, repr=False) -class Value(betterproto.Message): - """ - `Value` represents a dynamically typed value which can be either null, a - number, a string, a boolean, a recursive struct value, or a list of values. - A producer of value is expected to set one of that variants, absence of any - variant indicates an error. The JSON representation for `Value` is JSON - value. - """ - - # Represents a null value. - null_value: "NullValue" = betterproto.enum_field(1, group="kind") - # Represents a double value. - number_value: float = betterproto.double_field(2, group="kind") - # Represents a string value. - string_value: str = betterproto.string_field(3, group="kind") - # Represents a boolean value. - bool_value: bool = betterproto.bool_field(4, group="kind") - # Represents a structured value. - struct_value: "Struct" = betterproto.message_field(5, group="kind") - # Represents a repeated `Value`. - list_value: "ListValue" = betterproto.message_field(6, group="kind") - - -@dataclass(eq=False, repr=False) -class ListValue(betterproto.Message): - """ - `ListValue` is a wrapper around a repeated field of values. The JSON - representation for `ListValue` is JSON array. - """ - - # Repeated field of dynamically typed values. - values: List["Value"] = betterproto.message_field(1) - - -@dataclass(eq=False, repr=False) -class DoubleValue(betterproto.Message): - """ - Wrapper message for `double`. The JSON representation for `DoubleValue` is - JSON number. - """ - - # The double value. - value: float = betterproto.double_field(1) - - -@dataclass(eq=False, repr=False) -class FloatValue(betterproto.Message): - """ - Wrapper message for `float`. The JSON representation for `FloatValue` is - JSON number. - """ - - # The float value. - value: float = betterproto.float_field(1) - - -@dataclass(eq=False, repr=False) -class Int64Value(betterproto.Message): - """ - Wrapper message for `int64`. The JSON representation for `Int64Value` is - JSON string. - """ - - # The int64 value. - value: int = betterproto.int64_field(1) - - -@dataclass(eq=False, repr=False) -class UInt64Value(betterproto.Message): - """ - Wrapper message for `uint64`. The JSON representation for `UInt64Value` is - JSON string. - """ - - # The uint64 value. - value: int = betterproto.uint64_field(1) - - -@dataclass(eq=False, repr=False) -class Int32Value(betterproto.Message): - """ - Wrapper message for `int32`. The JSON representation for `Int32Value` is - JSON number. - """ - - # The int32 value. - value: int = betterproto.int32_field(1) - - -@dataclass(eq=False, repr=False) -class UInt32Value(betterproto.Message): - """ - Wrapper message for `uint32`. The JSON representation for `UInt32Value` is - JSON number. - """ - - # The uint32 value. - value: int = betterproto.uint32_field(1) - - -@dataclass(eq=False, repr=False) -class BoolValue(betterproto.Message): - """ - Wrapper message for `bool`. The JSON representation for `BoolValue` is JSON - `true` and `false`. - """ - - # The bool value. - value: bool = betterproto.bool_field(1) - - -@dataclass(eq=False, repr=False) -class StringValue(betterproto.Message): - """ - Wrapper message for `string`. The JSON representation for `StringValue` is - JSON string. - """ - - # The string value. - value: str = betterproto.string_field(1) - - -@dataclass(eq=False, repr=False) -class BytesValue(betterproto.Message): - """ - Wrapper message for `bytes`. The JSON representation for `BytesValue` is - JSON string. - """ - - # The bytes value. - value: bytes = betterproto.bytes_field(1) - - -@dataclass(eq=False, repr=False) -class Empty(betterproto.Message): - """ - A generic empty message that you can re-use to avoid defining duplicated - empty messages in your APIs. A typical example is to use it as the request - or the response type of an API method. For instance: service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); } The - JSON representation for `Empty` is empty JSON object `{}`. - """ - - pass - - -@dataclass(eq=False, repr=False) -class FileDescriptorSet(betterproto.Message): - """ - The protocol compiler can output a FileDescriptorSet containing the .proto - files it parses. - """ - - file: List["FileDescriptorProto"] = betterproto.message_field(1) - - -@dataclass(eq=False, repr=False) -class FileDescriptorProto(betterproto.Message): - """Describes a complete .proto file.""" - - name: str = betterproto.string_field(1) - package: str = betterproto.string_field(2) - # Names of files imported by this file. - dependency: List[str] = betterproto.string_field(3) - # Indexes of the public imported files in the dependency list above. - public_dependency: List[int] = betterproto.int32_field(10) - # Indexes of the weak imported files in the dependency list. For Google- - # internal migration only. Do not use. - weak_dependency: List[int] = betterproto.int32_field(11) - # All top-level definitions in this file. - message_type: List["DescriptorProto"] = betterproto.message_field(4) - enum_type: List["EnumDescriptorProto"] = betterproto.message_field(5) - service: List["ServiceDescriptorProto"] = betterproto.message_field(6) - extension: List["FieldDescriptorProto"] = betterproto.message_field(7) - options: "FileOptions" = betterproto.message_field(8) - # This field contains optional information about the original source code. - # You may safely remove this entire field without harming runtime - # functionality of the descriptors -- the information is needed only by - # development tools. - source_code_info: "SourceCodeInfo" = betterproto.message_field(9) - # The syntax of the proto file. The supported values are "proto2" and - # "proto3". - syntax: str = betterproto.string_field(12) - - -@dataclass(eq=False, repr=False) -class DescriptorProto(betterproto.Message): - """Describes a message type.""" - - name: str = betterproto.string_field(1) - field: List["FieldDescriptorProto"] = betterproto.message_field(2) - extension: List["FieldDescriptorProto"] = betterproto.message_field(6) - nested_type: List["DescriptorProto"] = betterproto.message_field(3) - enum_type: List["EnumDescriptorProto"] = betterproto.message_field(4) - extension_range: List["DescriptorProtoExtensionRange"] = betterproto.message_field( - 5 - ) - oneof_decl: List["OneofDescriptorProto"] = betterproto.message_field(8) - options: "MessageOptions" = betterproto.message_field(7) - reserved_range: List["DescriptorProtoReservedRange"] = betterproto.message_field(9) - # Reserved field names, which may not be used by fields in the same message. - # A given name may only be reserved once. - reserved_name: List[str] = betterproto.string_field(10) - - -@dataclass(eq=False, repr=False) -class DescriptorProtoExtensionRange(betterproto.Message): - start: int = betterproto.int32_field(1) - end: int = betterproto.int32_field(2) - options: "ExtensionRangeOptions" = betterproto.message_field(3) - - -@dataclass(eq=False, repr=False) -class DescriptorProtoReservedRange(betterproto.Message): - """ - Range of reserved tag numbers. Reserved tag numbers may not be used by - fields or extension ranges in the same message. Reserved ranges may not - overlap. - """ - - start: int = betterproto.int32_field(1) - end: int = betterproto.int32_field(2) - - -@dataclass(eq=False, repr=False) -class ExtensionRangeOptions(betterproto.Message): - # The parser stores options it doesn't recognize here. See above. - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - - -@dataclass(eq=False, repr=False) -class FieldDescriptorProto(betterproto.Message): - """Describes a field within a message.""" - - name: str = betterproto.string_field(1) - number: int = betterproto.int32_field(3) - label: "FieldDescriptorProtoLabel" = betterproto.enum_field(4) - # If type_name is set, this need not be set. If both this and type_name are - # set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. - type: "FieldDescriptorProtoType" = betterproto.enum_field(5) - # For message and enum types, this is the name of the type. If the name - # starts with a '.', it is fully-qualified. Otherwise, C++-like scoping - # rules are used to find the type (i.e. first the nested types within this - # message are searched, then within the parent, on up to the root namespace). - type_name: str = betterproto.string_field(6) - # For extensions, this is the name of the type being extended. It is - # resolved in the same manner as type_name. - extendee: str = betterproto.string_field(2) - # For numeric types, contains the original text representation of the value. - # For booleans, "true" or "false". For strings, contains the default text - # contents (not escaped in any way). For bytes, contains the C escaped value. - # All bytes >= 128 are escaped. TODO(kenton): Base-64 encode? - default_value: str = betterproto.string_field(7) - # If set, gives the index of a oneof in the containing type's oneof_decl - # list. This field is a member of that oneof. - oneof_index: int = betterproto.int32_field(9) - # JSON name of this field. The value is set by protocol compiler. If the user - # has set a "json_name" option on this field, that option's value will be - # used. Otherwise, it's deduced from the field's name by converting it to - # camelCase. - json_name: str = betterproto.string_field(10) - options: "FieldOptions" = betterproto.message_field(8) - - -@dataclass(eq=False, repr=False) -class OneofDescriptorProto(betterproto.Message): - """Describes a oneof.""" - - name: str = betterproto.string_field(1) - options: "OneofOptions" = betterproto.message_field(2) - - -@dataclass(eq=False, repr=False) -class EnumDescriptorProto(betterproto.Message): - """Describes an enum type.""" - - name: str = betterproto.string_field(1) - value: List["EnumValueDescriptorProto"] = betterproto.message_field(2) - options: "EnumOptions" = betterproto.message_field(3) - # Range of reserved numeric values. Reserved numeric values may not be used - # by enum values in the same enum declaration. Reserved ranges may not - # overlap. - reserved_range: List[ - "EnumDescriptorProtoEnumReservedRange" - ] = betterproto.message_field(4) - # Reserved enum value names, which may not be reused. A given name may only - # be reserved once. - reserved_name: List[str] = betterproto.string_field(5) - - -@dataclass(eq=False, repr=False) -class EnumDescriptorProtoEnumReservedRange(betterproto.Message): - """ - Range of reserved numeric values. Reserved values may not be used by - entries in the same enum. Reserved ranges may not overlap. Note that this - is distinct from DescriptorProto.ReservedRange in that it is inclusive such - that it can appropriately represent the entire int32 domain. - """ - - start: int = betterproto.int32_field(1) - end: int = betterproto.int32_field(2) - - -@dataclass(eq=False, repr=False) -class EnumValueDescriptorProto(betterproto.Message): - """Describes a value within an enum.""" - - name: str = betterproto.string_field(1) - number: int = betterproto.int32_field(2) - options: "EnumValueOptions" = betterproto.message_field(3) - - -@dataclass(eq=False, repr=False) -class ServiceDescriptorProto(betterproto.Message): - """Describes a service.""" - - name: str = betterproto.string_field(1) - method: List["MethodDescriptorProto"] = betterproto.message_field(2) - options: "ServiceOptions" = betterproto.message_field(3) - - -@dataclass(eq=False, repr=False) -class MethodDescriptorProto(betterproto.Message): - """Describes a method of a service.""" - - name: str = betterproto.string_field(1) - # Input and output type names. These are resolved in the same way as - # FieldDescriptorProto.type_name, but must refer to a message type. - input_type: str = betterproto.string_field(2) - output_type: str = betterproto.string_field(3) - options: "MethodOptions" = betterproto.message_field(4) - # Identifies if client streams multiple client messages - client_streaming: bool = betterproto.bool_field(5) - # Identifies if server streams multiple server messages - server_streaming: bool = betterproto.bool_field(6) - - -@dataclass(eq=False, repr=False) -class FileOptions(betterproto.Message): - # Sets the Java package where classes generated from this .proto will be - # placed. By default, the proto package is used, but this is often - # inappropriate because proto packages do not normally start with backwards - # domain names. - java_package: str = betterproto.string_field(1) - # If set, all the classes from the .proto file are wrapped in a single outer - # class with the given name. This applies to both Proto1 (equivalent to the - # old "--one_java_file" option) and Proto2 (where a .proto always translates - # to a single class, but you may want to explicitly choose the class name). - java_outer_classname: str = betterproto.string_field(8) - # If set true, then the Java code generator will generate a separate .java - # file for each top-level message, enum, and service defined in the .proto - # file. Thus, these types will *not* be nested inside the outer class named - # by java_outer_classname. However, the outer class will still be generated - # to contain the file's getDescriptor() method as well as any top-level - # extensions defined in the file. - java_multiple_files: bool = betterproto.bool_field(10) - # This option does nothing. - java_generate_equals_and_hash: bool = betterproto.bool_field(20) - # If set true, then the Java2 code generator will generate code that throws - # an exception whenever an attempt is made to assign a non-UTF-8 byte - # sequence to a string field. Message reflection will do the same. However, - # an extension field still accepts non-UTF-8 byte sequences. This option has - # no effect on when used with the lite runtime. - java_string_check_utf8: bool = betterproto.bool_field(27) - optimize_for: "FileOptionsOptimizeMode" = betterproto.enum_field(9) - # Sets the Go package where structs generated from this .proto will be - # placed. If omitted, the Go package will be derived from the following: - - # The basename of the package import path, if provided. - Otherwise, the - # package statement in the .proto file, if present. - Otherwise, the - # basename of the .proto file, without extension. - go_package: str = betterproto.string_field(11) - # Should generic services be generated in each language? "Generic" services - # are not specific to any particular RPC system. They are generated by the - # main code generators in each language (without additional plugins). Generic - # services were the only kind of service generation supported by early - # versions of google.protobuf. Generic services are now considered deprecated - # in favor of using plugins that generate code specific to your particular - # RPC system. Therefore, these default to false. Old code which depends on - # generic services should explicitly set them to true. - cc_generic_services: bool = betterproto.bool_field(16) - java_generic_services: bool = betterproto.bool_field(17) - py_generic_services: bool = betterproto.bool_field(18) - php_generic_services: bool = betterproto.bool_field(42) - # Is this file deprecated? Depending on the target platform, this can emit - # Deprecated annotations for everything in the file, or it will be completely - # ignored; in the very least, this is a formalization for deprecating files. - deprecated: bool = betterproto.bool_field(23) - # Enables the use of arenas for the proto messages in this file. This applies - # only to generated classes for C++. - cc_enable_arenas: bool = betterproto.bool_field(31) - # Sets the objective c class prefix which is prepended to all objective c - # generated classes from this .proto. There is no default. - objc_class_prefix: str = betterproto.string_field(36) - # Namespace for generated classes; defaults to the package. - csharp_namespace: str = betterproto.string_field(37) - # By default Swift generators will take the proto package and CamelCase it - # replacing '.' with underscore and use that to prefix the types/symbols - # defined. When this options is provided, they will use this value instead to - # prefix the types/symbols defined. - swift_prefix: str = betterproto.string_field(39) - # Sets the php class prefix which is prepended to all php generated classes - # from this .proto. Default is empty. - php_class_prefix: str = betterproto.string_field(40) - # Use this option to change the namespace of php generated classes. Default - # is empty. When this option is empty, the package name will be used for - # determining the namespace. - php_namespace: str = betterproto.string_field(41) - # Use this option to change the namespace of php generated metadata classes. - # Default is empty. When this option is empty, the proto file name will be - # used for determining the namespace. - php_metadata_namespace: str = betterproto.string_field(44) - # Use this option to change the package of ruby generated classes. Default is - # empty. When this option is not set, the package name will be used for - # determining the ruby package. - ruby_package: str = betterproto.string_field(45) - # The parser stores options it doesn't recognize here. See the documentation - # for the "Options" section above. - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - - def __post_init__(self) -> None: - super().__post_init__() - if self.java_generate_equals_and_hash: - warnings.warn( - "FileOptions.java_generate_equals_and_hash is deprecated", - DeprecationWarning, - ) - - -@dataclass(eq=False, repr=False) -class MessageOptions(betterproto.Message): - # Set true to use the old proto1 MessageSet wire format for extensions. This - # is provided for backwards-compatibility with the MessageSet wire format. - # You should not use this for any other reason: It's less efficient, has - # fewer features, and is more complicated. The message must be defined - # exactly as follows: message Foo { option message_set_wire_format = - # true; extensions 4 to max; } Note that the message cannot have any - # defined fields; MessageSets only have extensions. All extensions of your - # type must be singular messages; e.g. they cannot be int32s, enums, or - # repeated messages. Because this is an option, the above two restrictions - # are not enforced by the protocol compiler. - message_set_wire_format: bool = betterproto.bool_field(1) - # Disables the generation of the standard "descriptor()" accessor, which can - # conflict with a field of the same name. This is meant to make migration - # from proto1 easier; new code should avoid fields named "descriptor". - no_standard_descriptor_accessor: bool = betterproto.bool_field(2) - # Is this message deprecated? Depending on the target platform, this can emit - # Deprecated annotations for the message, or it will be completely ignored; - # in the very least, this is a formalization for deprecating messages. - deprecated: bool = betterproto.bool_field(3) - # Whether the message is an automatically generated map entry type for the - # maps field. For maps fields: map map_field = 1; The - # parsed descriptor looks like: message MapFieldEntry { option - # map_entry = true; optional KeyType key = 1; optional - # ValueType value = 2; } repeated MapFieldEntry map_field = 1; - # Implementations may choose not to generate the map_entry=true message, but - # use a native map in the target language to hold the keys and values. The - # reflection APIs in such implementations still need to work as if the field - # is a repeated message field. NOTE: Do not set the option in .proto files. - # Always use the maps syntax instead. The option should only be implicitly - # set by the proto compiler parser. - map_entry: bool = betterproto.bool_field(7) - # The parser stores options it doesn't recognize here. See above. - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - - -@dataclass(eq=False, repr=False) -class FieldOptions(betterproto.Message): - # The ctype option instructs the C++ code generator to use a different - # representation of the field than it normally would. See the specific - # options below. This option is not yet implemented in the open source - # release -- sorry, we'll try to include it in a future version! - ctype: "FieldOptionsCType" = betterproto.enum_field(1) - # The packed option can be enabled for repeated primitive fields to enable a - # more efficient representation on the wire. Rather than repeatedly writing - # the tag and type for each element, the entire array is encoded as a single - # length-delimited blob. In proto3, only explicit setting it to false will - # avoid using packed encoding. - packed: bool = betterproto.bool_field(2) - # The jstype option determines the JavaScript type used for values of the - # field. The option is permitted only for 64 bit integral and fixed types - # (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING - # is represented as JavaScript string, which avoids loss of precision that - # can happen when a large value is converted to a floating point JavaScript. - # Specifying JS_NUMBER for the jstype causes the generated JavaScript code to - # use the JavaScript "number" type. The behavior of the default option - # JS_NORMAL is implementation dependent. This option is an enum to permit - # additional types to be added, e.g. goog.math.Integer. - jstype: "FieldOptionsJsType" = betterproto.enum_field(6) - # Should this field be parsed lazily? Lazy applies only to message-type - # fields. It means that when the outer message is initially parsed, the - # inner message's contents will not be parsed but instead stored in encoded - # form. The inner message will actually be parsed when it is first accessed. - # This is only a hint. Implementations are free to choose whether to use - # eager or lazy parsing regardless of the value of this option. However, - # setting this option true suggests that the protocol author believes that - # using lazy parsing on this field is worth the additional bookkeeping - # overhead typically needed to implement it. This option does not affect the - # public interface of any generated code; all method signatures remain the - # same. Furthermore, thread-safety of the interface is not affected by this - # option; const methods remain safe to call from multiple threads - # concurrently, while non-const methods continue to require exclusive access. - # Note that implementations may choose not to check required fields within a - # lazy sub-message. That is, calling IsInitialized() on the outer message - # may return true even if the inner message has missing required fields. This - # is necessary because otherwise the inner message would have to be parsed in - # order to perform the check, defeating the purpose of lazy parsing. An - # implementation which chooses not to check required fields must be - # consistent about it. That is, for any particular sub-message, the - # implementation must either *always* check its required fields, or *never* - # check its required fields, regardless of whether or not the message has - # been parsed. - lazy: bool = betterproto.bool_field(5) - # Is this field deprecated? Depending on the target platform, this can emit - # Deprecated annotations for accessors, or it will be completely ignored; in - # the very least, this is a formalization for deprecating fields. - deprecated: bool = betterproto.bool_field(3) - # For Google-internal migration only. Do not use. - weak: bool = betterproto.bool_field(10) - # The parser stores options it doesn't recognize here. See above. - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - - -@dataclass(eq=False, repr=False) -class OneofOptions(betterproto.Message): - # The parser stores options it doesn't recognize here. See above. - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - - -@dataclass(eq=False, repr=False) -class EnumOptions(betterproto.Message): - # Set this option to true to allow mapping different tag names to the same - # value. - allow_alias: bool = betterproto.bool_field(2) - # Is this enum deprecated? Depending on the target platform, this can emit - # Deprecated annotations for the enum, or it will be completely ignored; in - # the very least, this is a formalization for deprecating enums. - deprecated: bool = betterproto.bool_field(3) - # The parser stores options it doesn't recognize here. See above. - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - - -@dataclass(eq=False, repr=False) -class EnumValueOptions(betterproto.Message): - # Is this enum value deprecated? Depending on the target platform, this can - # emit Deprecated annotations for the enum value, or it will be completely - # ignored; in the very least, this is a formalization for deprecating enum - # values. - deprecated: bool = betterproto.bool_field(1) - # The parser stores options it doesn't recognize here. See above. - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - - -@dataclass(eq=False, repr=False) -class ServiceOptions(betterproto.Message): - # Is this service deprecated? Depending on the target platform, this can emit - # Deprecated annotations for the service, or it will be completely ignored; - # in the very least, this is a formalization for deprecating services. - deprecated: bool = betterproto.bool_field(33) - # The parser stores options it doesn't recognize here. See above. - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - - -@dataclass(eq=False, repr=False) -class MethodOptions(betterproto.Message): - # Is this method deprecated? Depending on the target platform, this can emit - # Deprecated annotations for the method, or it will be completely ignored; in - # the very least, this is a formalization for deprecating methods. - deprecated: bool = betterproto.bool_field(33) - idempotency_level: "MethodOptionsIdempotencyLevel" = betterproto.enum_field(34) - # The parser stores options it doesn't recognize here. See above. - uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) - - -@dataclass(eq=False, repr=False) -class UninterpretedOption(betterproto.Message): - """ - A message representing a option the parser does not recognize. This only - appears in options protos created by the compiler::Parser class. - DescriptorPool resolves these when building Descriptor objects. Therefore, - options protos in descriptor objects (e.g. returned by - Descriptor::options(), or produced by Descriptor::CopyTo()) will never have - UninterpretedOptions in them. - """ - - name: List["UninterpretedOptionNamePart"] = betterproto.message_field(2) - # The value of the uninterpreted option, in whatever type the tokenizer - # identified it as during parsing. Exactly one of these should be set. - identifier_value: str = betterproto.string_field(3) - positive_int_value: int = betterproto.uint64_field(4) - negative_int_value: int = betterproto.int64_field(5) - double_value: float = betterproto.double_field(6) - string_value: bytes = betterproto.bytes_field(7) - aggregate_value: str = betterproto.string_field(8) - - -@dataclass(eq=False, repr=False) -class UninterpretedOptionNamePart(betterproto.Message): - """ - The name of the uninterpreted option. Each string represents a segment in - a dot-separated name. is_extension is true iff a segment represents an - extension (denoted with parentheses in options specs in .proto files). - E.g.,{ ["foo", false], ["bar.baz", true], ["qux", false] } represents - "foo.(bar.baz).qux". - """ - - name_part: str = betterproto.string_field(1) - is_extension: bool = betterproto.bool_field(2) - - -@dataclass(eq=False, repr=False) -class SourceCodeInfo(betterproto.Message): - """ - Encapsulates information about the original source file from which a - FileDescriptorProto was generated. - """ - - # A Location identifies a piece of source code in a .proto file which - # corresponds to a particular definition. This information is intended to be - # useful to IDEs, code indexers, documentation generators, and similar tools. - # For example, say we have a file like: message Foo { optional string - # foo = 1; } Let's look at just the field definition: optional string foo - # = 1; ^ ^^ ^^ ^ ^^^ a bc de f ghi We have the - # following locations: span path represents [a,i) [ 4, - # 0, 2, 0 ] The whole field definition. [a,b) [ 4, 0, 2, 0, 4 ] The - # label (optional). [c,d) [ 4, 0, 2, 0, 5 ] The type (string). [e,f) [ - # 4, 0, 2, 0, 1 ] The name (foo). [g,h) [ 4, 0, 2, 0, 3 ] The number - # (1). Notes: - A location may refer to a repeated field itself (i.e. not to - # any particular index within it). This is used whenever a set of elements - # are logically enclosed in a single code segment. For example, an entire - # extend block (possibly containing multiple extension definitions) will - # have an outer location whose path refers to the "extensions" repeated - # field without an index. - Multiple locations may have the same path. This - # happens when a single logical declaration is spread out across multiple - # places. The most obvious example is the "extend" block again -- there - # may be multiple extend blocks in the same scope, each of which will have - # the same path. - A location's span is not always a subset of its parent's - # span. For example, the "extendee" of an extension declaration appears at - # the beginning of the "extend" block and is shared by all extensions - # within the block. - Just because a location's span is a subset of some - # other location's span does not mean that it is a descendant. For - # example, a "group" defines both a type and a field in a single - # declaration. Thus, the locations corresponding to the type and field and - # their components will overlap. - Code which tries to interpret locations - # should probably be designed to ignore those that it doesn't understand, - # as more types of locations could be recorded in the future. - location: List["SourceCodeInfoLocation"] = betterproto.message_field(1) - - -@dataclass(eq=False, repr=False) -class SourceCodeInfoLocation(betterproto.Message): - # Identifies which part of the FileDescriptorProto was defined at this - # location. Each element is a field number or an index. They form a path - # from the root FileDescriptorProto to the place where the definition. For - # example, this path: [ 4, 3, 2, 7, 1 ] refers to: file.message_type(3) - # // 4, 3 .field(7) // 2, 7 .name() // 1 This - # is because FileDescriptorProto.message_type has field number 4: repeated - # DescriptorProto message_type = 4; and DescriptorProto.field has field - # number 2: repeated FieldDescriptorProto field = 2; and - # FieldDescriptorProto.name has field number 1: optional string name = 1; - # Thus, the above path gives the location of a field name. If we removed the - # last element: [ 4, 3, 2, 7 ] this path refers to the whole field - # declaration (from the beginning of the label to the terminating semicolon). - path: List[int] = betterproto.int32_field(1) - # Always has exactly three or four elements: start line, start column, end - # line (optional, otherwise assumed same as start line), end column. These - # are packed into a single field for efficiency. Note that line and column - # numbers are zero-based -- typically you will want to add 1 to each before - # displaying to a user. - span: List[int] = betterproto.int32_field(2) - # If this SourceCodeInfo represents a complete declaration, these are any - # comments appearing before and after the declaration which appear to be - # attached to the declaration. A series of line comments appearing on - # consecutive lines, with no other tokens appearing on those lines, will be - # treated as a single comment. leading_detached_comments will keep paragraphs - # of comments that appear before (but not connected to) the current element. - # Each paragraph, separated by empty lines, will be one comment element in - # the repeated field. Only the comment content is provided; comment markers - # (e.g. //) are stripped out. For block comments, leading whitespace and an - # asterisk will be stripped from the beginning of each line other than the - # first. Newlines are included in the output. Examples: optional int32 foo - # = 1; // Comment attached to foo. // Comment attached to bar. optional - # int32 bar = 2; optional string baz = 3; // Comment attached to baz. - # // Another line attached to baz. // Comment attached to qux. // // - # Another line attached to qux. optional double qux = 4; // Detached - # comment for corge. This is not leading or trailing comments // to qux or - # corge because there are blank lines separating it from // both. // - # Detached comment for corge paragraph 2. optional string corge = 5; /* - # Block comment attached * to corge. Leading asterisks * will be - # removed. */ /* Block comment attached to * grault. */ optional int32 - # grault = 6; // ignored detached comments. - leading_comments: str = betterproto.string_field(3) - trailing_comments: str = betterproto.string_field(4) - leading_detached_comments: List[str] = betterproto.string_field(6) - - -@dataclass(eq=False, repr=False) -class GeneratedCodeInfo(betterproto.Message): - """ - Describes the relationship between generated code and its original source - file. A GeneratedCodeInfo message is associated with only one generated - source file, but may contain references to different source .proto files. - """ - - # An Annotation connects some span of text in generated code to an element of - # its generating .proto file. - annotation: List["GeneratedCodeInfoAnnotation"] = betterproto.message_field(1) - - -@dataclass(eq=False, repr=False) -class GeneratedCodeInfoAnnotation(betterproto.Message): - # Identifies the element in the original source .proto file. This field is - # formatted the same as SourceCodeInfo.Location.path. - path: List[int] = betterproto.int32_field(1) - # Identifies the filesystem path to the original source .proto. - source_file: str = betterproto.string_field(2) - # Identifies the starting offset in bytes in the generated code that relates - # to the identified object. - begin: int = betterproto.int32_field(3) - # Identifies the ending offset in bytes in the generated code that relates to - # the identified offset. The end offset should be one past the last relevant - # byte (so the length of the text = end - begin). - end: int = betterproto.int32_field(4) +from betterproto.lib.std.google.protobuf import * diff --git a/src/betterproto/lib/google/protobuf/compiler/__init__.py b/src/betterproto/lib/google/protobuf/compiler/__init__.py index e5c8b60f6..59bf56f17 100644 --- a/src/betterproto/lib/google/protobuf/compiler/__init__.py +++ b/src/betterproto/lib/google/protobuf/compiler/__init__.py @@ -1,113 +1 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: google/protobuf/compiler/plugin.proto -# plugin: python-betterproto -from dataclasses import dataclass -from typing import List - -import betterproto - - -@dataclass(eq=False, repr=False) -class Version(betterproto.Message): - """The version number of protocol compiler.""" - - major: int = betterproto.int32_field(1) - minor: int = betterproto.int32_field(2) - patch: int = betterproto.int32_field(3) - # A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should - # be empty for mainline stable releases. - suffix: str = betterproto.string_field(4) - - -@dataclass(eq=False, repr=False) -class CodeGeneratorRequest(betterproto.Message): - """An encoded CodeGeneratorRequest is written to the plugin's stdin.""" - - # The .proto files that were explicitly listed on the command-line. The code - # generator should generate code only for these files. Each file's - # descriptor will be included in proto_file, below. - file_to_generate: List[str] = betterproto.string_field(1) - # The generator parameter passed on the command-line. - parameter: str = betterproto.string_field(2) - # FileDescriptorProtos for all files in files_to_generate and everything they - # import. The files will appear in topological order, so each file appears - # before any file that imports it. protoc guarantees that all proto_files - # will be written after the fields above, even though this is not technically - # guaranteed by the protobuf wire format. This theoretically could allow a - # plugin to stream in the FileDescriptorProtos and handle them one by one - # rather than read the entire set into memory at once. However, as of this - # writing, this is not similarly optimized on protoc's end -- it will store - # all fields in memory at once before sending them to the plugin. Type names - # of fields and extensions in the FileDescriptorProto are always fully - # qualified. - proto_file: List[ - "betterproto_lib_google_protobuf.FileDescriptorProto" - ] = betterproto.message_field(15) - # The version number of protocol compiler. - compiler_version: "Version" = betterproto.message_field(3) - - -@dataclass(eq=False, repr=False) -class CodeGeneratorResponse(betterproto.Message): - """The plugin writes an encoded CodeGeneratorResponse to stdout.""" - - # Error message. If non-empty, code generation failed. The plugin process - # should exit with status code zero even if it reports an error in this way. - # This should be used to indicate errors in .proto files which prevent the - # code generator from generating correct code. Errors which indicate a - # problem in protoc itself -- such as the input CodeGeneratorRequest being - # unparseable -- should be reported by writing a message to stderr and - # exiting with a non-zero status code. - error: str = betterproto.string_field(1) - file: List["CodeGeneratorResponseFile"] = betterproto.message_field(15) - - -@dataclass(eq=False, repr=False) -class CodeGeneratorResponseFile(betterproto.Message): - """Represents a single generated file.""" - - # The file name, relative to the output directory. The name must not contain - # "." or ".." components and must be relative, not be absolute (so, the file - # cannot lie outside the output directory). "/" must be used as the path - # separator, not "\". If the name is omitted, the content will be appended to - # the previous file. This allows the generator to break large files into - # small chunks, and allows the generated text to be streamed back to protoc - # so that large files need not reside completely in memory at one time. Note - # that as of this writing protoc does not optimize for this -- it will read - # the entire CodeGeneratorResponse before writing files to disk. - name: str = betterproto.string_field(1) - # If non-empty, indicates that the named file should already exist, and the - # content here is to be inserted into that file at a defined insertion point. - # This feature allows a code generator to extend the output produced by - # another code generator. The original generator may provide insertion - # points by placing special annotations in the file that look like: - # @@protoc_insertion_point(NAME) The annotation can have arbitrary text - # before and after it on the line, which allows it to be placed in a comment. - # NAME should be replaced with an identifier naming the point -- this is what - # other generators will use as the insertion_point. Code inserted at this - # point will be placed immediately above the line containing the insertion - # point (thus multiple insertions to the same point will come out in the - # order they were added). The double-@ is intended to make it unlikely that - # the generated code could contain things that look like insertion points by - # accident. For example, the C++ code generator places the following line in - # the .pb.h files that it generates: // - # @@protoc_insertion_point(namespace_scope) This line appears within the - # scope of the file's package namespace, but outside of any particular class. - # Another plugin can then specify the insertion_point "namespace_scope" to - # generate additional classes or other declarations that should be placed in - # this scope. Note that if the line containing the insertion point begins - # with whitespace, the same whitespace will be added to every line of the - # inserted text. This is useful for languages like Python, where indentation - # matters. In these languages, the insertion point comment should be - # indented the same amount as any inserted code will need to be in order to - # work correctly in that context. The code generator that generates the - # initial file and the one which inserts into it must both run as part of a - # single invocation of protoc. Code generators are executed in the order in - # which they appear on the command line. If |insertion_point| is present, - # |name| must also be present. - insertion_point: str = betterproto.string_field(2) - # The file contents. - content: str = betterproto.string_field(15) - - -import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf +from betterproto.lib.std.google.protobuf.compiler import * diff --git a/src/betterproto/lib/pydantic/__init__.py b/src/betterproto/lib/pydantic/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/betterproto/lib/pydantic/google/__init__.py b/src/betterproto/lib/pydantic/google/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/betterproto/lib/pydantic/google/protobuf/__init__.py b/src/betterproto/lib/pydantic/google/protobuf/__init__.py new file mode 100644 index 000000000..d147decea --- /dev/null +++ b/src/betterproto/lib/pydantic/google/protobuf/__init__.py @@ -0,0 +1,2673 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# sources: google/protobuf/any.proto, google/protobuf/api.proto, google/protobuf/descriptor.proto, google/protobuf/duration.proto, google/protobuf/empty.proto, google/protobuf/field_mask.proto, google/protobuf/source_context.proto, google/protobuf/struct.proto, google/protobuf/timestamp.proto, google/protobuf/type.proto, google/protobuf/wrappers.proto +# plugin: python-betterproto +# This file has been @generated +import warnings +from typing import ( + TYPE_CHECKING, + Mapping, +) + +from typing_extensions import Self + +from betterproto import hybridmethod + + +if TYPE_CHECKING: + from dataclasses import dataclass +else: + from pydantic.dataclasses import dataclass + +from typing import ( + Dict, + List, + Optional, +) + +from pydantic import model_validator +from pydantic.dataclasses import rebuild_dataclass + +import betterproto + + +class Syntax(betterproto.Enum): + """The syntax in which a protocol buffer element is defined.""" + + PROTO2 = 0 + """Syntax `proto2`.""" + + PROTO3 = 1 + """Syntax `proto3`.""" + + EDITIONS = 2 + """Syntax `editions`.""" + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FieldKind(betterproto.Enum): + """Basic field types.""" + + TYPE_UNKNOWN = 0 + """Field type unknown.""" + + TYPE_DOUBLE = 1 + """Field type double.""" + + TYPE_FLOAT = 2 + """Field type float.""" + + TYPE_INT64 = 3 + """Field type int64.""" + + TYPE_UINT64 = 4 + """Field type uint64.""" + + TYPE_INT32 = 5 + """Field type int32.""" + + TYPE_FIXED64 = 6 + """Field type fixed64.""" + + TYPE_FIXED32 = 7 + """Field type fixed32.""" + + TYPE_BOOL = 8 + """Field type bool.""" + + TYPE_STRING = 9 + """Field type string.""" + + TYPE_GROUP = 10 + """Field type group. Proto2 syntax only, and deprecated.""" + + TYPE_MESSAGE = 11 + """Field type message.""" + + TYPE_BYTES = 12 + """Field type bytes.""" + + TYPE_UINT32 = 13 + """Field type uint32.""" + + TYPE_ENUM = 14 + """Field type enum.""" + + TYPE_SFIXED32 = 15 + """Field type sfixed32.""" + + TYPE_SFIXED64 = 16 + """Field type sfixed64.""" + + TYPE_SINT32 = 17 + """Field type sint32.""" + + TYPE_SINT64 = 18 + """Field type sint64.""" + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FieldCardinality(betterproto.Enum): + """Whether a field is optional, required, or repeated.""" + + CARDINALITY_UNKNOWN = 0 + """For fields with unknown cardinality.""" + + CARDINALITY_OPTIONAL = 1 + """For optional fields.""" + + CARDINALITY_REQUIRED = 2 + """For required fields. Proto2 syntax only.""" + + CARDINALITY_REPEATED = 3 + """For repeated fields.""" + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class Edition(betterproto.Enum): + """The full set of known editions.""" + + UNKNOWN = 0 + """A placeholder for an unknown edition value.""" + + PROTO2 = 998 + """ + Legacy syntax "editions". These pre-date editions, but behave much like + distinct editions. These can't be used to specify the edition of proto + files, but feature definitions must supply proto2/proto3 defaults for + backwards compatibility. + """ + + PROTO3 = 999 + _2023 = 1000 + """ + Editions that have been released. The specific values are arbitrary and + should not be depended on, but they will always be time-ordered for easy + comparison. + """ + + _2024 = 1001 + _1_TEST_ONLY = 1 + """ + Placeholder editions for testing feature resolution. These should not be + used or relyed on outside of tests. + """ + + _2_TEST_ONLY = 2 + _99997_TEST_ONLY = 99997 + _99998_TEST_ONLY = 99998 + _99999_TEST_ONLY = 99999 + MAX = 2147483647 + """ + Placeholder for specifying unbounded edition support. This should only + ever be used by plugins that can expect to never require any changes to + support a new edition. + """ + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class ExtensionRangeOptionsVerificationState(betterproto.Enum): + """The verification state of the extension range.""" + + DECLARATION = 0 + """All the extensions of the range must be declared.""" + + UNVERIFIED = 1 + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FieldDescriptorProtoType(betterproto.Enum): + TYPE_DOUBLE = 1 + """ + 0 is reserved for errors. + Order is weird for historical reasons. + """ + + TYPE_FLOAT = 2 + TYPE_INT64 = 3 + """ + Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + negative values are likely. + """ + + TYPE_UINT64 = 4 + TYPE_INT32 = 5 + """ + Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + negative values are likely. + """ + + TYPE_FIXED64 = 6 + TYPE_FIXED32 = 7 + TYPE_BOOL = 8 + TYPE_STRING = 9 + TYPE_GROUP = 10 + """ + Tag-delimited aggregate. + Group type is deprecated and not supported after google.protobuf. However, Proto3 + implementations should still be able to parse the group wire format and + treat group fields as unknown fields. In Editions, the group wire format + can be enabled via the `message_encoding` feature. + """ + + TYPE_MESSAGE = 11 + TYPE_BYTES = 12 + """New in version 2.""" + + TYPE_UINT32 = 13 + TYPE_ENUM = 14 + TYPE_SFIXED32 = 15 + TYPE_SFIXED64 = 16 + TYPE_SINT32 = 17 + TYPE_SINT64 = 18 + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FieldDescriptorProtoLabel(betterproto.Enum): + LABEL_OPTIONAL = 1 + """0 is reserved for errors""" + + LABEL_REPEATED = 3 + LABEL_REQUIRED = 2 + """ + The required label is only allowed in google.protobuf. In proto3 and Editions + it's explicitly prohibited. In Editions, the `field_presence` feature + can be used to get this behavior. + """ + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FileOptionsOptimizeMode(betterproto.Enum): + """Generated classes can be optimized for speed or code size.""" + + SPEED = 1 + CODE_SIZE = 2 + """etc.""" + + LITE_RUNTIME = 3 + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FieldOptionsCType(betterproto.Enum): + STRING = 0 + """Default mode.""" + + CORD = 1 + """ + The option [ctype=CORD] may be applied to a non-repeated field of type + "bytes". It indicates that in C++, the data should be stored in a Cord + instead of a string. For very large strings, this may reduce memory + fragmentation. It may also allow better performance when parsing from a + Cord, or when parsing with aliasing enabled, as the parsed Cord may then + alias the original buffer. + """ + + STRING_PIECE = 2 + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FieldOptionsJsType(betterproto.Enum): + JS_NORMAL = 0 + """Use the default type.""" + + JS_STRING = 1 + """Use JavaScript strings.""" + + JS_NUMBER = 2 + """Use JavaScript numbers.""" + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FieldOptionsOptionRetention(betterproto.Enum): + """ + If set to RETENTION_SOURCE, the option will be omitted from the binary. + Note: as of January 2023, support for this is in progress and does not yet + have an effect (b/264593489). + """ + + RETENTION_UNKNOWN = 0 + RETENTION_RUNTIME = 1 + RETENTION_SOURCE = 2 + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FieldOptionsOptionTargetType(betterproto.Enum): + """ + This indicates the types of entities that the field may apply to when used + as an option. If it is unset, then the field may be freely used as an + option on any kind of entity. Note: as of January 2023, support for this is + in progress and does not yet have an effect (b/264593489). + """ + + TARGET_TYPE_UNKNOWN = 0 + TARGET_TYPE_FILE = 1 + TARGET_TYPE_EXTENSION_RANGE = 2 + TARGET_TYPE_MESSAGE = 3 + TARGET_TYPE_FIELD = 4 + TARGET_TYPE_ONEOF = 5 + TARGET_TYPE_ENUM = 6 + TARGET_TYPE_ENUM_ENTRY = 7 + TARGET_TYPE_SERVICE = 8 + TARGET_TYPE_METHOD = 9 + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class MethodOptionsIdempotencyLevel(betterproto.Enum): + """ + Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + or neither? HTTP based RPC implementation may choose GET verb for safe + methods, and PUT verb for idempotent methods instead of the default POST. + """ + + IDEMPOTENCY_UNKNOWN = 0 + NO_SIDE_EFFECTS = 1 + IDEMPOTENT = 2 + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FeatureSetFieldPresence(betterproto.Enum): + FIELD_PRESENCE_UNKNOWN = 0 + EXPLICIT = 1 + IMPLICIT = 2 + LEGACY_REQUIRED = 3 + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FeatureSetEnumType(betterproto.Enum): + ENUM_TYPE_UNKNOWN = 0 + OPEN = 1 + CLOSED = 2 + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FeatureSetRepeatedFieldEncoding(betterproto.Enum): + REPEATED_FIELD_ENCODING_UNKNOWN = 0 + PACKED = 1 + EXPANDED = 2 + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FeatureSetUtf8Validation(betterproto.Enum): + UTF8_VALIDATION_UNKNOWN = 0 + VERIFY = 2 + NONE = 3 + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FeatureSetMessageEncoding(betterproto.Enum): + MESSAGE_ENCODING_UNKNOWN = 0 + LENGTH_PREFIXED = 1 + DELIMITED = 2 + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class FeatureSetJsonFormat(betterproto.Enum): + JSON_FORMAT_UNKNOWN = 0 + ALLOW = 1 + LEGACY_BEST_EFFORT = 2 + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class GeneratedCodeInfoAnnotationSemantic(betterproto.Enum): + """ + Represents the identified object's effect on the element in the original + .proto file. + """ + + NONE = 0 + """There is no effect or the effect is indescribable.""" + + SET = 1 + """The element is set or otherwise mutated.""" + + ALIAS = 2 + """An alias to the element is returned.""" + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +class NullValue(betterproto.Enum): + """ + `NullValue` is a singleton enumeration to represent the null value for the + `Value` type union. + + The JSON representation for `NullValue` is JSON `null`. + """ + + _ = 0 + """Null value.""" + + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + + +@dataclass(eq=False, repr=False) +class Any(betterproto.Message): + """ + `Any` contains an arbitrary serialized protocol buffer message along with a + URL that describes the type of the serialized message. + + Protobuf library provides support to pack/unpack Any values in the form + of utility functions or additional generated methods of the Any type. + + Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + + Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + + The pack methods provided by protobuf library will by default use + 'type.googleapis.com/full.type.name' as the type URL and the unpack + methods only use the fully qualified type name after the last '/' + in the type URL, for example "foo.bar.com/x/y.z" will yield type + name "y.z". + + JSON + ==== + The JSON representation of an `Any` value uses the regular + representation of the deserialized, embedded message, with an + additional field `@type` which contains the type URL. Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + + If the embedded message type is well-known and has a custom JSON + representation, that representation will be embedded adding a field + `value` which holds the custom JSON in addition to the `@type` + field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } + """ + + type_url: str = betterproto.string_field(1) + """ + A URL/resource name that uniquely identifies the type of the serialized + protocol buffer message. This string must contain at least + one "/" character. The last segment of the URL's path must represent + the fully qualified name of the type (as in + `path/google.protobuf.Duration`). The name should be in a canonical form + (e.g., leading "." is not accepted). + + In practice, teams usually precompile into the binary all types that they + expect it to use in the context of Any. However, for URLs which use the + scheme `http`, `https`, or no scheme, one can optionally set up a type + server that maps type URLs to message definitions as follows: + + * If no scheme is provided, `https` is assumed. + * An HTTP GET on the URL must yield a [google.protobuf.Type][] + value in binary format, or produce an error. + * Applications are allowed to cache lookup results based on the + URL, or have them precompiled into a binary to avoid any + lookup. Therefore, binary compatibility needs to be preserved + on changes to types. (Use versioned type names to manage + breaking changes.) + + Note: this functionality is not currently available in the official + protobuf release, and it is not used for type URLs beginning with + type.googleapis.com. As of May 2023, there are no widely used type server + implementations and no plans to implement one. + + Schemes other than `http`, `https` (or the empty scheme) might be + used with implementation specific semantics. + """ + + value: bytes = betterproto.bytes_field(2) + """ + Must be a valid serialized protocol buffer of the above specified type. + """ + + +@dataclass(eq=False, repr=False) +class SourceContext(betterproto.Message): + """ + `SourceContext` represents information about the source of a + protobuf element, like the file in which it is defined. + """ + + file_name: str = betterproto.string_field(1) + """ + The path-qualified name of the .proto file that contained the associated + protobuf element. For example: `"google/protobuf/source_context.proto"`. + """ + + +@dataclass(eq=False, repr=False) +class Type(betterproto.Message): + """A protocol buffer message type.""" + + name: str = betterproto.string_field(1) + """The fully qualified message name.""" + + fields: List["Field"] = betterproto.message_field(2) + """The list of fields.""" + + oneofs: List[str] = betterproto.string_field(3) + """The list of types appearing in `oneof` definitions in this type.""" + + options: List["Option"] = betterproto.message_field(4) + """The protocol buffer options.""" + + source_context: "SourceContext" = betterproto.message_field(5) + """The source context.""" + + syntax: "Syntax" = betterproto.enum_field(6) + """The source syntax.""" + + edition: str = betterproto.string_field(7) + """ + The source edition string, only valid when syntax is SYNTAX_EDITIONS. + """ + + +@dataclass(eq=False, repr=False) +class Field(betterproto.Message): + """A single field of a message type.""" + + kind: "FieldKind" = betterproto.enum_field(1) + """The field type.""" + + cardinality: "FieldCardinality" = betterproto.enum_field(2) + """The field cardinality.""" + + number: int = betterproto.int32_field(3) + """The field number.""" + + name: str = betterproto.string_field(4) + """The field name.""" + + type_url: str = betterproto.string_field(6) + """ + The field type URL, without the scheme, for message or enumeration + types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. + """ + + oneof_index: int = betterproto.int32_field(7) + """ + The index of the field type in `Type.oneofs`, for message or enumeration + types. The first type has index 1; zero means the type is not in the list. + """ + + packed: bool = betterproto.bool_field(8) + """Whether to use alternative packed wire representation.""" + + options: List["Option"] = betterproto.message_field(9) + """The protocol buffer options.""" + + json_name: str = betterproto.string_field(10) + """The field JSON name.""" + + default_value: str = betterproto.string_field(11) + """ + The string value of the default value of this field. Proto2 syntax only. + """ + + +@dataclass(eq=False, repr=False) +class Enum(betterproto.Message): + """Enum type definition.""" + + name: str = betterproto.string_field(1) + """Enum type name.""" + + enumvalue: List["EnumValue"] = betterproto.message_field( + 2, wraps=betterproto.TYPE_ENUM + ) + """Enum value definitions.""" + + options: List["Option"] = betterproto.message_field(3) + """Protocol buffer options.""" + + source_context: "SourceContext" = betterproto.message_field(4) + """The source context.""" + + syntax: "Syntax" = betterproto.enum_field(5) + """The source syntax.""" + + edition: str = betterproto.string_field(6) + """ + The source edition string, only valid when syntax is SYNTAX_EDITIONS. + """ + + +@dataclass(eq=False, repr=False) +class EnumValue(betterproto.Message): + """Enum value definition.""" + + name: str = betterproto.string_field(1) + """Enum value name.""" + + number: int = betterproto.int32_field(2) + """Enum value number.""" + + options: List["Option"] = betterproto.message_field(3) + """Protocol buffer options.""" + + +@dataclass(eq=False, repr=False) +class Option(betterproto.Message): + """ + A protocol buffer option, which can be attached to a message, field, + enumeration, etc. + """ + + name: str = betterproto.string_field(1) + """ + The option's name. For protobuf built-in options (options defined in + descriptor.proto), this is the short name. For example, `"map_entry"`. + For custom options, it should be the fully-qualified name. For example, + `"google.api.http"`. + """ + + value: "Any" = betterproto.message_field(2) + """ + The option's value packed in an Any message. If the value is a primitive, + the corresponding wrapper type defined in google/protobuf/wrappers.proto + should be used. If the value is an enum, it should be stored as an int32 + value using the google.protobuf.Int32Value type. + """ + + +@dataclass(eq=False, repr=False) +class Api(betterproto.Message): + """ + Api is a light-weight descriptor for an API Interface. + + Interfaces are also described as "protocol buffer services" in some contexts, + such as by the "service" keyword in a .proto file, but they are different + from API Services, which represent a concrete implementation of an interface + as opposed to simply a description of methods and bindings. They are also + sometimes simply referred to as "APIs" in other contexts, such as the name of + this message itself. See https://cloud.google.com/apis/design/glossary for + detailed terminology. + """ + + name: str = betterproto.string_field(1) + """ + The fully qualified name of this interface, including package name + followed by the interface's simple name. + """ + + methods: List["Method"] = betterproto.message_field(2) + """The methods of this interface, in unspecified order.""" + + options: List["Option"] = betterproto.message_field(3) + """Any metadata attached to the interface.""" + + version: str = betterproto.string_field(4) + """ + A version string for this interface. If specified, must have the form + `major-version.minor-version`, as in `1.10`. If the minor version is + omitted, it defaults to zero. If the entire version field is empty, the + major version is derived from the package name, as outlined below. If the + field is not empty, the version in the package name will be verified to be + consistent with what is provided here. + + The versioning schema uses [semantic + versioning](http://semver.org) where the major version number + indicates a breaking change and the minor version an additive, + non-breaking change. Both version numbers are signals to users + what to expect from different versions, and should be carefully + chosen based on the product plan. + + The major version is also reflected in the package name of the + interface, which must end in `v`, as in + `google.feature.v1`. For major versions 0 and 1, the suffix can + be omitted. Zero major versions must only be used for + experimental, non-GA interfaces. + """ + + source_context: "SourceContext" = betterproto.message_field(5) + """ + Source context for the protocol buffer service represented by this + message. + """ + + mixins: List["Mixin"] = betterproto.message_field(6) + """Included interfaces. See [Mixin][].""" + + syntax: "Syntax" = betterproto.enum_field(7) + """The source syntax of the service.""" + + +@dataclass(eq=False, repr=False) +class Method(betterproto.Message): + """Method represents a method of an API interface.""" + + name: str = betterproto.string_field(1) + """The simple name of this method.""" + + request_type_url: str = betterproto.string_field(2) + """A URL of the input message type.""" + + request_streaming: bool = betterproto.bool_field(3) + """If true, the request is streamed.""" + + response_type_url: str = betterproto.string_field(4) + """The URL of the output message type.""" + + response_streaming: bool = betterproto.bool_field(5) + """If true, the response is streamed.""" + + options: List["Option"] = betterproto.message_field(6) + """Any metadata attached to the method.""" + + syntax: "Syntax" = betterproto.enum_field(7) + """The source syntax of this method.""" + + +@dataclass(eq=False, repr=False) +class Mixin(betterproto.Message): + """ + Declares an API Interface to be included in this interface. The including + interface must redeclare all the methods from the included interface, but + documentation and options are inherited as follows: + + - If after comment and whitespace stripping, the documentation + string of the redeclared method is empty, it will be inherited + from the original method. + + - Each annotation belonging to the service config (http, + visibility) which is not set in the redeclared method will be + inherited. + + - If an http annotation is inherited, the path pattern will be + modified as follows. Any version prefix will be replaced by the + version of the including interface plus the [root][] path if + specified. + + Example of a simple mixin: + + package google.acl.v1; + service AccessControl { + // Get the underlying ACL object. + rpc GetAcl(GetAclRequest) returns (Acl) { + option (google.api.http).get = "/v1/{resource=**}:getAcl"; + } + } + + package google.storage.v2; + service Storage { + rpc GetAcl(GetAclRequest) returns (Acl); + + // Get a data record. + rpc GetData(GetDataRequest) returns (Data) { + option (google.api.http).get = "/v2/{resource=**}"; + } + } + + Example of a mixin configuration: + + apis: + - name: google.storage.v2.Storage + mixins: + - name: google.acl.v1.AccessControl + + The mixin construct implies that all methods in `AccessControl` are + also declared with same name and request/response types in + `Storage`. A documentation generator or annotation processor will + see the effective `Storage.GetAcl` method after inherting + documentation and annotations as follows: + + service Storage { + // Get the underlying ACL object. + rpc GetAcl(GetAclRequest) returns (Acl) { + option (google.api.http).get = "/v2/{resource=**}:getAcl"; + } + ... + } + + Note how the version in the path pattern changed from `v1` to `v2`. + + If the `root` field in the mixin is specified, it should be a + relative path under which inherited HTTP paths are placed. Example: + + apis: + - name: google.storage.v2.Storage + mixins: + - name: google.acl.v1.AccessControl + root: acls + + This implies the following inherited HTTP annotation: + + service Storage { + // Get the underlying ACL object. + rpc GetAcl(GetAclRequest) returns (Acl) { + option (google.api.http).get = "/v2/acls/{resource=**}:getAcl"; + } + ... + } + """ + + name: str = betterproto.string_field(1) + """The fully qualified name of the interface which is included.""" + + root: str = betterproto.string_field(2) + """ + If non-empty specifies a path under which inherited HTTP paths + are rooted. + """ + + +@dataclass(eq=False, repr=False) +class FileDescriptorSet(betterproto.Message): + """ + The protocol compiler can output a FileDescriptorSet containing the .proto + files it parses. + """ + + file: List["FileDescriptorProto"] = betterproto.message_field(1) + + +@dataclass(eq=False, repr=False) +class FileDescriptorProto(betterproto.Message): + """Describes a complete .proto file.""" + + name: str = betterproto.string_field(1) + package: str = betterproto.string_field(2) + dependency: List[str] = betterproto.string_field(3) + """Names of files imported by this file.""" + + public_dependency: List[int] = betterproto.int32_field(10) + """Indexes of the public imported files in the dependency list above.""" + + weak_dependency: List[int] = betterproto.int32_field(11) + """ + Indexes of the weak imported files in the dependency list. + For Google-internal migration only. Do not use. + """ + + message_type: List["DescriptorProto"] = betterproto.message_field(4) + """All top-level definitions in this file.""" + + enum_type: List["EnumDescriptorProto"] = betterproto.message_field(5) + service: List["ServiceDescriptorProto"] = betterproto.message_field(6) + extension: List["FieldDescriptorProto"] = betterproto.message_field(7) + options: "FileOptions" = betterproto.message_field(8) + source_code_info: "SourceCodeInfo" = betterproto.message_field(9) + """ + This field contains optional information about the original source code. + You may safely remove this entire field without harming runtime + functionality of the descriptors -- the information is needed only by + development tools. + """ + + syntax: str = betterproto.string_field(12) + """ + The syntax of the proto file. + The supported values are "proto2", "proto3", and "editions". + + If `edition` is present, this value must be "editions". + """ + + edition: "Edition" = betterproto.enum_field(14) + """The edition of the proto file.""" + + +@dataclass(eq=False, repr=False) +class DescriptorProto(betterproto.Message): + """Describes a message type.""" + + name: str = betterproto.string_field(1) + field: List["FieldDescriptorProto"] = betterproto.message_field(2) + extension: List["FieldDescriptorProto"] = betterproto.message_field(6) + nested_type: List["DescriptorProto"] = betterproto.message_field(3) + enum_type: List["EnumDescriptorProto"] = betterproto.message_field(4) + extension_range: List["DescriptorProtoExtensionRange"] = betterproto.message_field( + 5 + ) + oneof_decl: List["OneofDescriptorProto"] = betterproto.message_field(8) + options: "MessageOptions" = betterproto.message_field(7) + reserved_range: List["DescriptorProtoReservedRange"] = betterproto.message_field(9) + reserved_name: List[str] = betterproto.string_field(10) + """ + Reserved field names, which may not be used by fields in the same message. + A given name may only be reserved once. + """ + + +@dataclass(eq=False, repr=False) +class DescriptorProtoExtensionRange(betterproto.Message): + start: int = betterproto.int32_field(1) + end: int = betterproto.int32_field(2) + options: "ExtensionRangeOptions" = betterproto.message_field(3) + + +@dataclass(eq=False, repr=False) +class DescriptorProtoReservedRange(betterproto.Message): + """ + Range of reserved tag numbers. Reserved tag numbers may not be used by + fields or extension ranges in the same message. Reserved ranges may + not overlap. + """ + + start: int = betterproto.int32_field(1) + end: int = betterproto.int32_field(2) + + +@dataclass(eq=False, repr=False) +class ExtensionRangeOptions(betterproto.Message): + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + declaration: List["ExtensionRangeOptionsDeclaration"] = betterproto.message_field(2) + """ + For external users: DO NOT USE. We are in the process of open sourcing + extension declaration and executing internal cleanups before it can be + used externally. + """ + + features: "FeatureSet" = betterproto.message_field(50) + """Any features defined in the specific edition.""" + + verification: "ExtensionRangeOptionsVerificationState" = betterproto.enum_field(3) + """ + The verification state of the range. + TODO: flip the default to DECLARATION once all empty ranges + are marked as UNVERIFIED. + """ + + +@dataclass(eq=False, repr=False) +class ExtensionRangeOptionsDeclaration(betterproto.Message): + number: int = betterproto.int32_field(1) + """The extension number declared within the extension range.""" + + full_name: str = betterproto.string_field(2) + """ + The fully-qualified name of the extension field. There must be a leading + dot in front of the full name. + """ + + type: str = betterproto.string_field(3) + """ + The fully-qualified type name of the extension field. Unlike + Metadata.type, Declaration.type must have a leading dot for messages + and enums. + """ + + reserved: bool = betterproto.bool_field(5) + """ + If true, indicates that the number is reserved in the extension range, + and any extension field with the number will fail to compile. Set this + when a declared extension field is deleted. + """ + + repeated: bool = betterproto.bool_field(6) + """ + If true, indicates that the extension must be defined as repeated. + Otherwise the extension must be defined as optional. + """ + + +@dataclass(eq=False, repr=False) +class FieldDescriptorProto(betterproto.Message): + """Describes a field within a message.""" + + name: str = betterproto.string_field(1) + number: int = betterproto.int32_field(3) + label: "FieldDescriptorProtoLabel" = betterproto.enum_field(4) + type: "FieldDescriptorProtoType" = betterproto.enum_field(5) + """ + If type_name is set, this need not be set. If both this and type_name + are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + """ + + type_name: str = betterproto.string_field(6) + """ + For message and enum types, this is the name of the type. If the name + starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + rules are used to find the type (i.e. first the nested types within this + message are searched, then within the parent, on up to the root + namespace). + """ + + extendee: str = betterproto.string_field(2) + """ + For extensions, this is the name of the type being extended. It is + resolved in the same manner as type_name. + """ + + default_value: str = betterproto.string_field(7) + """ + For numeric types, contains the original text representation of the value. + For booleans, "true" or "false". + For strings, contains the default text contents (not escaped in any way). + For bytes, contains the C escaped value. All bytes >= 128 are escaped. + """ + + oneof_index: int = betterproto.int32_field(9) + """ + If set, gives the index of a oneof in the containing type's oneof_decl + list. This field is a member of that oneof. + """ + + json_name: str = betterproto.string_field(10) + """ + JSON name of this field. The value is set by protocol compiler. If the + user has set a "json_name" option on this field, that option's value + will be used. Otherwise, it's deduced from the field's name by converting + it to camelCase. + """ + + options: "FieldOptions" = betterproto.message_field(8) + proto3_optional: bool = betterproto.bool_field(17) + """ + If true, this is a proto3 "optional". When a proto3 field is optional, it + tracks presence regardless of field type. + + When proto3_optional is true, this field must belong to a oneof to signal + to old proto3 clients that presence is tracked for this field. This oneof + is known as a "synthetic" oneof, and this field must be its sole member + (each proto3 optional field gets its own synthetic oneof). Synthetic oneofs + exist in the descriptor only, and do not generate any API. Synthetic oneofs + must be ordered after all "real" oneofs. + + For message fields, proto3_optional doesn't create any semantic change, + since non-repeated message fields always track presence. However it still + indicates the semantic detail of whether the user wrote "optional" or not. + This can be useful for round-tripping the .proto file. For consistency we + give message fields a synthetic oneof also, even though it is not required + to track presence. This is especially important because the parser can't + tell if a field is a message or an enum, so it must always create a + synthetic oneof. + + Proto2 optional fields do not set this flag, because they already indicate + optional with `LABEL_OPTIONAL`. + """ + + +@dataclass(eq=False, repr=False) +class OneofDescriptorProto(betterproto.Message): + """Describes a oneof.""" + + name: str = betterproto.string_field(1) + options: "OneofOptions" = betterproto.message_field(2) + + +@dataclass(eq=False, repr=False) +class EnumDescriptorProto(betterproto.Message): + """Describes an enum type.""" + + name: str = betterproto.string_field(1) + value: List["EnumValueDescriptorProto"] = betterproto.message_field(2) + options: "EnumOptions" = betterproto.message_field(3) + reserved_range: List["EnumDescriptorProtoEnumReservedRange"] = ( + betterproto.message_field(4) + ) + """ + Range of reserved numeric values. Reserved numeric values may not be used + by enum values in the same enum declaration. Reserved ranges may not + overlap. + """ + + reserved_name: List[str] = betterproto.string_field(5) + """ + Reserved enum value names, which may not be reused. A given name may only + be reserved once. + """ + + +@dataclass(eq=False, repr=False) +class EnumDescriptorProtoEnumReservedRange(betterproto.Message): + """ + Range of reserved numeric values. Reserved values may not be used by + entries in the same enum. Reserved ranges may not overlap. + + Note that this is distinct from DescriptorProto.ReservedRange in that it + is inclusive such that it can appropriately represent the entire int32 + domain. + """ + + start: int = betterproto.int32_field(1) + end: int = betterproto.int32_field(2) + + +@dataclass(eq=False, repr=False) +class EnumValueDescriptorProto(betterproto.Message): + """Describes a value within an enum.""" + + name: str = betterproto.string_field(1) + number: int = betterproto.int32_field(2) + options: "EnumValueOptions" = betterproto.message_field(3) + + +@dataclass(eq=False, repr=False) +class ServiceDescriptorProto(betterproto.Message): + """Describes a service.""" + + name: str = betterproto.string_field(1) + method: List["MethodDescriptorProto"] = betterproto.message_field(2) + options: "ServiceOptions" = betterproto.message_field(3) + + +@dataclass(eq=False, repr=False) +class MethodDescriptorProto(betterproto.Message): + """Describes a method of a service.""" + + name: str = betterproto.string_field(1) + input_type: str = betterproto.string_field(2) + """ + Input and output type names. These are resolved in the same way as + FieldDescriptorProto.type_name, but must refer to a message type. + """ + + output_type: str = betterproto.string_field(3) + options: "MethodOptions" = betterproto.message_field(4) + client_streaming: bool = betterproto.bool_field(5) + """Identifies if client streams multiple client messages""" + + server_streaming: bool = betterproto.bool_field(6) + """Identifies if server streams multiple server messages""" + + +@dataclass(eq=False, repr=False) +class FileOptions(betterproto.Message): + java_package: str = betterproto.string_field(1) + """ + Sets the Java package where classes generated from this .proto will be + placed. By default, the proto package is used, but this is often + inappropriate because proto packages do not normally start with backwards + domain names. + """ + + java_outer_classname: str = betterproto.string_field(8) + """ + Controls the name of the wrapper Java class generated for the .proto file. + That class will always contain the .proto file's getDescriptor() method as + well as any top-level extensions defined in the .proto file. + If java_multiple_files is disabled, then all the other classes from the + .proto file will be nested inside the single wrapper outer class. + """ + + java_multiple_files: bool = betterproto.bool_field(10) + """ + If enabled, then the Java code generator will generate a separate .java + file for each top-level message, enum, and service defined in the .proto + file. Thus, these types will *not* be nested inside the wrapper class + named by java_outer_classname. However, the wrapper class will still be + generated to contain the file's getDescriptor() method as well as any + top-level extensions defined in the file. + """ + + java_generate_equals_and_hash: bool = betterproto.bool_field(20) + """This option does nothing.""" + + java_string_check_utf8: bool = betterproto.bool_field(27) + """ + If set true, then the Java2 code generator will generate code that + throws an exception whenever an attempt is made to assign a non-UTF-8 + byte sequence to a string field. + Message reflection will do the same. + However, an extension field still accepts non-UTF-8 byte sequences. + This option has no effect on when used with the lite runtime. + """ + + optimize_for: "FileOptionsOptimizeMode" = betterproto.enum_field(9) + go_package: str = betterproto.string_field(11) + """ + Sets the Go package where structs generated from this .proto will be + placed. If omitted, the Go package will be derived from the following: + - The basename of the package import path, if provided. + - Otherwise, the package statement in the .proto file, if present. + - Otherwise, the basename of the .proto file, without extension. + """ + + cc_generic_services: bool = betterproto.bool_field(16) + """ + Should generic services be generated in each language? "Generic" services + are not specific to any particular RPC system. They are generated by the + main code generators in each language (without additional plugins). + Generic services were the only kind of service generation supported by + early versions of google.protobuf. + + Generic services are now considered deprecated in favor of using plugins + that generate code specific to your particular RPC system. Therefore, + these default to false. Old code which depends on generic services should + explicitly set them to true. + """ + + java_generic_services: bool = betterproto.bool_field(17) + py_generic_services: bool = betterproto.bool_field(18) + deprecated: bool = betterproto.bool_field(23) + """ + Is this file deprecated? + Depending on the target platform, this can emit Deprecated annotations + for everything in the file, or it will be completely ignored; in the very + least, this is a formalization for deprecating files. + """ + + cc_enable_arenas: bool = betterproto.bool_field(31) + """ + Enables the use of arenas for the proto messages in this file. This applies + only to generated classes for C++. + """ + + objc_class_prefix: str = betterproto.string_field(36) + """ + Sets the objective c class prefix which is prepended to all objective c + generated classes from this .proto. There is no default. + """ + + csharp_namespace: str = betterproto.string_field(37) + """Namespace for generated classes; defaults to the package.""" + + swift_prefix: str = betterproto.string_field(39) + """ + By default Swift generators will take the proto package and CamelCase it + replacing '.' with underscore and use that to prefix the types/symbols + defined. When this options is provided, they will use this value instead + to prefix the types/symbols defined. + """ + + php_class_prefix: str = betterproto.string_field(40) + """ + Sets the php class prefix which is prepended to all php generated classes + from this .proto. Default is empty. + """ + + php_namespace: str = betterproto.string_field(41) + """ + Use this option to change the namespace of php generated classes. Default + is empty. When this option is empty, the package name will be used for + determining the namespace. + """ + + php_metadata_namespace: str = betterproto.string_field(44) + """ + Use this option to change the namespace of php generated metadata classes. + Default is empty. When this option is empty, the proto file name will be + used for determining the namespace. + """ + + ruby_package: str = betterproto.string_field(45) + """ + Use this option to change the package of ruby generated classes. Default + is empty. When this option is not set, the package name will be used for + determining the ruby package. + """ + + features: "FeatureSet" = betterproto.message_field(50) + """Any features defined in the specific edition.""" + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """ + The parser stores options it doesn't recognize here. + See the documentation for the "Options" section above. + """ + + def __post_init__(self) -> None: + super().__post_init__() + if self.is_set("java_generate_equals_and_hash"): + warnings.warn( + "FileOptions.java_generate_equals_and_hash is deprecated", + DeprecationWarning, + ) + + +@dataclass(eq=False, repr=False) +class MessageOptions(betterproto.Message): + message_set_wire_format: bool = betterproto.bool_field(1) + """ + Set true to use the old proto1 MessageSet wire format for extensions. + This is provided for backwards-compatibility with the MessageSet wire + format. You should not use this for any other reason: It's less + efficient, has fewer features, and is more complicated. + + The message must be defined exactly as follows: + message Foo { + option message_set_wire_format = true; + extensions 4 to max; + } + Note that the message cannot have any defined fields; MessageSets only + have extensions. + + All extensions of your type must be singular messages; e.g. they cannot + be int32s, enums, or repeated messages. + + Because this is an option, the above two restrictions are not enforced by + the protocol compiler. + """ + + no_standard_descriptor_accessor: bool = betterproto.bool_field(2) + """ + Disables the generation of the standard "descriptor()" accessor, which can + conflict with a field of the same name. This is meant to make migration + from proto1 easier; new code should avoid fields named "descriptor". + """ + + deprecated: bool = betterproto.bool_field(3) + """ + Is this message deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the message, or it will be completely ignored; in the very least, + this is a formalization for deprecating messages. + """ + + map_entry: bool = betterproto.bool_field(7) + """ + Whether the message is an automatically generated map entry type for the + maps field. + + For maps fields: + map map_field = 1; + The parsed descriptor looks like: + message MapFieldEntry { + option map_entry = true; + optional KeyType key = 1; + optional ValueType value = 2; + } + repeated MapFieldEntry map_field = 1; + + Implementations may choose not to generate the map_entry=true message, but + use a native map in the target language to hold the keys and values. + The reflection APIs in such implementations still need to work as + if the field is a repeated message field. + + NOTE: Do not set the option in .proto files. Always use the maps syntax + instead. The option should only be implicitly set by the proto compiler + parser. + """ + + deprecated_legacy_json_field_conflicts: bool = betterproto.bool_field(11) + """ + Enable the legacy handling of JSON field name conflicts. This lowercases + and strips underscored from the fields before comparison in proto3 only. + The new behavior takes `json_name` into account and applies to proto2 as + well. + + This should only be used as a temporary measure against broken builds due + to the change in behavior for JSON field name conflicts. + + TODO This is legacy behavior we plan to remove once downstream + teams have had time to migrate. + """ + + features: "FeatureSet" = betterproto.message_field(12) + """Any features defined in the specific edition.""" + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + def __post_init__(self) -> None: + super().__post_init__() + if self.is_set("deprecated_legacy_json_field_conflicts"): + warnings.warn( + "MessageOptions.deprecated_legacy_json_field_conflicts is deprecated", + DeprecationWarning, + ) + + +@dataclass(eq=False, repr=False) +class FieldOptions(betterproto.Message): + ctype: "FieldOptionsCType" = betterproto.enum_field(1) + """ + The ctype option instructs the C++ code generator to use a different + representation of the field than it normally would. See the specific + options below. This option is only implemented to support use of + [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of + type "bytes" in the open source release -- sorry, we'll try to include + other types in a future version! + """ + + packed: bool = betterproto.bool_field(2) + """ + The packed option can be enabled for repeated primitive fields to enable + a more efficient representation on the wire. Rather than repeatedly + writing the tag and type for each element, the entire array is encoded as + a single length-delimited blob. In proto3, only explicit setting it to + false will avoid using packed encoding. This option is prohibited in + Editions, but the `repeated_field_encoding` feature can be used to control + the behavior. + """ + + jstype: "FieldOptionsJsType" = betterproto.enum_field(6) + """ + The jstype option determines the JavaScript type used for values of the + field. The option is permitted only for 64 bit integral and fixed types + (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + is represented as JavaScript string, which avoids loss of precision that + can happen when a large value is converted to a floating point JavaScript. + Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + use the JavaScript "number" type. The behavior of the default option + JS_NORMAL is implementation dependent. + + This option is an enum to permit additional types to be added, e.g. + goog.math.Integer. + """ + + lazy: bool = betterproto.bool_field(5) + """ + Should this field be parsed lazily? Lazy applies only to message-type + fields. It means that when the outer message is initially parsed, the + inner message's contents will not be parsed but instead stored in encoded + form. The inner message will actually be parsed when it is first accessed. + + This is only a hint. Implementations are free to choose whether to use + eager or lazy parsing regardless of the value of this option. However, + setting this option true suggests that the protocol author believes that + using lazy parsing on this field is worth the additional bookkeeping + overhead typically needed to implement it. + + This option does not affect the public interface of any generated code; + all method signatures remain the same. Furthermore, thread-safety of the + interface is not affected by this option; const methods remain safe to + call from multiple threads concurrently, while non-const methods continue + to require exclusive access. + + Note that lazy message fields are still eagerly verified to check + ill-formed wireformat or missing required fields. Calling IsInitialized() + on the outer message would fail if the inner message has missing required + fields. Failed verification would result in parsing failure (except when + uninitialized messages are acceptable). + """ + + unverified_lazy: bool = betterproto.bool_field(15) + """ + unverified_lazy does no correctness checks on the byte stream. This should + only be used where lazy with verification is prohibitive for performance + reasons. + """ + + deprecated: bool = betterproto.bool_field(3) + """ + Is this field deprecated? + Depending on the target platform, this can emit Deprecated annotations + for accessors, or it will be completely ignored; in the very least, this + is a formalization for deprecating fields. + """ + + weak: bool = betterproto.bool_field(10) + """For Google-internal migration only. Do not use.""" + + debug_redact: bool = betterproto.bool_field(16) + """ + Indicate that the field value should not be printed out when using debug + formats, e.g. when the field contains sensitive credentials. + """ + + retention: "FieldOptionsOptionRetention" = betterproto.enum_field(17) + targets: List["FieldOptionsOptionTargetType"] = betterproto.enum_field(19) + edition_defaults: List["FieldOptionsEditionDefault"] = betterproto.message_field(20) + features: "FeatureSet" = betterproto.message_field(21) + """Any features defined in the specific edition.""" + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + +@dataclass(eq=False, repr=False) +class FieldOptionsEditionDefault(betterproto.Message): + edition: "Edition" = betterproto.enum_field(3) + value: str = betterproto.string_field(2) + + +@dataclass(eq=False, repr=False) +class OneofOptions(betterproto.Message): + features: "FeatureSet" = betterproto.message_field(1) + """Any features defined in the specific edition.""" + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + +@dataclass(eq=False, repr=False) +class EnumOptions(betterproto.Message): + allow_alias: bool = betterproto.bool_field(2) + """ + Set this option to true to allow mapping different tag names to the same + value. + """ + + deprecated: bool = betterproto.bool_field(3) + """ + Is this enum deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the enum, or it will be completely ignored; in the very least, this + is a formalization for deprecating enums. + """ + + deprecated_legacy_json_field_conflicts: bool = betterproto.bool_field(6) + """ + Enable the legacy handling of JSON field name conflicts. This lowercases + and strips underscored from the fields before comparison in proto3 only. + The new behavior takes `json_name` into account and applies to proto2 as + well. + TODO Remove this legacy behavior once downstream teams have + had time to migrate. + """ + + features: "FeatureSet" = betterproto.message_field(7) + """Any features defined in the specific edition.""" + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + def __post_init__(self) -> None: + super().__post_init__() + if self.is_set("deprecated_legacy_json_field_conflicts"): + warnings.warn( + "EnumOptions.deprecated_legacy_json_field_conflicts is deprecated", + DeprecationWarning, + ) + + +@dataclass(eq=False, repr=False) +class EnumValueOptions(betterproto.Message): + deprecated: bool = betterproto.bool_field(1) + """ + Is this enum value deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the enum value, or it will be completely ignored; in the very least, + this is a formalization for deprecating enum values. + """ + + features: "FeatureSet" = betterproto.message_field(2) + """Any features defined in the specific edition.""" + + debug_redact: bool = betterproto.bool_field(3) + """ + Indicate that fields annotated with this enum value should not be printed + out when using debug formats, e.g. when the field contains sensitive + credentials. + """ + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + +@dataclass(eq=False, repr=False) +class ServiceOptions(betterproto.Message): + features: "FeatureSet" = betterproto.message_field(34) + """Any features defined in the specific edition.""" + + deprecated: bool = betterproto.bool_field(33) + """ + Is this service deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the service, or it will be completely ignored; in the very least, + this is a formalization for deprecating services. + """ + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + +@dataclass(eq=False, repr=False) +class MethodOptions(betterproto.Message): + deprecated: bool = betterproto.bool_field(33) + """ + Is this method deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the method, or it will be completely ignored; in the very least, + this is a formalization for deprecating methods. + """ + + idempotency_level: "MethodOptionsIdempotencyLevel" = betterproto.enum_field(34) + features: "FeatureSet" = betterproto.message_field(35) + """Any features defined in the specific edition.""" + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + +@dataclass(eq=False, repr=False) +class UninterpretedOption(betterproto.Message): + """ + A message representing a option the parser does not recognize. This only + appears in options protos created by the compiler::Parser class. + DescriptorPool resolves these when building Descriptor objects. Therefore, + options protos in descriptor objects (e.g. returned by Descriptor::options(), + or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + in them. + """ + + name: List["UninterpretedOptionNamePart"] = betterproto.message_field(2) + identifier_value: str = betterproto.string_field(3) + """ + The value of the uninterpreted option, in whatever type the tokenizer + identified it as during parsing. Exactly one of these should be set. + """ + + positive_int_value: int = betterproto.uint64_field(4) + negative_int_value: int = betterproto.int64_field(5) + double_value: float = betterproto.double_field(6) + string_value: bytes = betterproto.bytes_field(7) + aggregate_value: str = betterproto.string_field(8) + + +@dataclass(eq=False, repr=False) +class UninterpretedOptionNamePart(betterproto.Message): + """ + The name of the uninterpreted option. Each string represents a segment in + a dot-separated name. is_extension is true iff a segment represents an + extension (denoted with parentheses in options specs in .proto files). + E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents + "foo.(bar.baz).moo". + """ + + name_part: str = betterproto.string_field(1) + is_extension: bool = betterproto.bool_field(2) + + +@dataclass(eq=False, repr=False) +class FeatureSet(betterproto.Message): + """ + TODO Enums in C++ gencode (and potentially other languages) are + not well scoped. This means that each of the feature enums below can clash + with each other. The short names we've chosen maximize call-site + readability, but leave us very open to this scenario. A future feature will + be designed and implemented to handle this, hopefully before we ever hit a + conflict here. + """ + + field_presence: "FeatureSetFieldPresence" = betterproto.enum_field(1) + enum_type: "FeatureSetEnumType" = betterproto.enum_field(2) + repeated_field_encoding: "FeatureSetRepeatedFieldEncoding" = betterproto.enum_field( + 3 + ) + utf8_validation: "FeatureSetUtf8Validation" = betterproto.enum_field(4) + message_encoding: "FeatureSetMessageEncoding" = betterproto.enum_field(5) + json_format: "FeatureSetJsonFormat" = betterproto.enum_field(6) + + +@dataclass(eq=False, repr=False) +class FeatureSetDefaults(betterproto.Message): + """ + A compiled specification for the defaults of a set of features. These + messages are generated from FeatureSet extensions and can be used to seed + feature resolution. The resolution with this object becomes a simple search + for the closest matching edition, followed by proto merges. + """ + + defaults: List["FeatureSetDefaultsFeatureSetEditionDefault"] = ( + betterproto.message_field(1) + ) + minimum_edition: "Edition" = betterproto.enum_field(4) + """ + The minimum supported edition (inclusive) when this was constructed. + Editions before this will not have defaults. + """ + + maximum_edition: "Edition" = betterproto.enum_field(5) + """ + The maximum known edition (inclusive) when this was constructed. Editions + after this will not have reliable defaults. + """ + + +@dataclass(eq=False, repr=False) +class FeatureSetDefaultsFeatureSetEditionDefault(betterproto.Message): + """ + A map from every known edition with a unique set of defaults to its + defaults. Not all editions may be contained here. For a given edition, + the defaults at the closest matching edition ordered at or before it should + be used. This field must be in strict ascending order by edition. + """ + + edition: "Edition" = betterproto.enum_field(3) + features: "FeatureSet" = betterproto.message_field(2) + + +@dataclass(eq=False, repr=False) +class SourceCodeInfo(betterproto.Message): + """ + Encapsulates information about the original source file from which a + FileDescriptorProto was generated. + """ + + location: List["SourceCodeInfoLocation"] = betterproto.message_field(1) + """ + A Location identifies a piece of source code in a .proto file which + corresponds to a particular definition. This information is intended + to be useful to IDEs, code indexers, documentation generators, and similar + tools. + + For example, say we have a file like: + message Foo { + optional string foo = 1; + } + Let's look at just the field definition: + optional string foo = 1; + ^ ^^ ^^ ^ ^^^ + a bc de f ghi + We have the following locations: + span path represents + [a,i) [ 4, 0, 2, 0 ] The whole field definition. + [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + + Notes: + - A location may refer to a repeated field itself (i.e. not to any + particular index within it). This is used whenever a set of elements are + logically enclosed in a single code segment. For example, an entire + extend block (possibly containing multiple extension definitions) will + have an outer location whose path refers to the "extensions" repeated + field without an index. + - Multiple locations may have the same path. This happens when a single + logical declaration is spread out across multiple places. The most + obvious example is the "extend" block again -- there may be multiple + extend blocks in the same scope, each of which will have the same path. + - A location's span is not always a subset of its parent's span. For + example, the "extendee" of an extension declaration appears at the + beginning of the "extend" block and is shared by all extensions within + the block. + - Just because a location's span is a subset of some other location's span + does not mean that it is a descendant. For example, a "group" defines + both a type and a field in a single declaration. Thus, the locations + corresponding to the type and field and their components will overlap. + - Code which tries to interpret locations should probably be designed to + ignore those that it doesn't understand, as more types of locations could + be recorded in the future. + """ + + +@dataclass(eq=False, repr=False) +class SourceCodeInfoLocation(betterproto.Message): + path: List[int] = betterproto.int32_field(1) + """ + Identifies which part of the FileDescriptorProto was defined at this + location. + + Each element is a field number or an index. They form a path from + the root FileDescriptorProto to the place where the definition appears. + For example, this path: + [ 4, 3, 2, 7, 1 ] + refers to: + file.message_type(3) // 4, 3 + .field(7) // 2, 7 + .name() // 1 + This is because FileDescriptorProto.message_type has field number 4: + repeated DescriptorProto message_type = 4; + and DescriptorProto.field has field number 2: + repeated FieldDescriptorProto field = 2; + and FieldDescriptorProto.name has field number 1: + optional string name = 1; + + Thus, the above path gives the location of a field name. If we removed + the last element: + [ 4, 3, 2, 7 ] + this path refers to the whole field declaration (from the beginning + of the label to the terminating semicolon). + """ + + span: List[int] = betterproto.int32_field(2) + """ + Always has exactly three or four elements: start line, start column, + end line (optional, otherwise assumed same as start line), end column. + These are packed into a single field for efficiency. Note that line + and column numbers are zero-based -- typically you will want to add + 1 to each before displaying to a user. + """ + + leading_comments: str = betterproto.string_field(3) + """ + If this SourceCodeInfo represents a complete declaration, these are any + comments appearing before and after the declaration which appear to be + attached to the declaration. + + A series of line comments appearing on consecutive lines, with no other + tokens appearing on those lines, will be treated as a single comment. + + leading_detached_comments will keep paragraphs of comments that appear + before (but not connected to) the current element. Each paragraph, + separated by empty lines, will be one comment element in the repeated + field. + + Only the comment content is provided; comment markers (e.g. //) are + stripped out. For block comments, leading whitespace and an asterisk + will be stripped from the beginning of each line other than the first. + Newlines are included in the output. + + Examples: + + optional int32 foo = 1; // Comment attached to foo. + // Comment attached to bar. + optional int32 bar = 2; + + optional string baz = 3; + // Comment attached to baz. + // Another line attached to baz. + + // Comment attached to moo. + // + // Another line attached to moo. + optional double moo = 4; + + // Detached comment for corge. This is not leading or trailing comments + // to moo or corge because there are blank lines separating it from + // both. + + // Detached comment for corge paragraph 2. + + optional string corge = 5; + /* Block comment attached + * to corge. Leading asterisks + * will be removed. */ + /* Block comment attached to + * grault. */ + optional int32 grault = 6; + + // ignored detached comments. + """ + + trailing_comments: str = betterproto.string_field(4) + leading_detached_comments: List[str] = betterproto.string_field(6) + + +@dataclass(eq=False, repr=False) +class GeneratedCodeInfo(betterproto.Message): + """ + Describes the relationship between generated code and its original source + file. A GeneratedCodeInfo message is associated with only one generated + source file, but may contain references to different source .proto files. + """ + + annotation: List["GeneratedCodeInfoAnnotation"] = betterproto.message_field(1) + """ + An Annotation connects some span of text in generated code to an element + of its generating .proto file. + """ + + +@dataclass(eq=False, repr=False) +class GeneratedCodeInfoAnnotation(betterproto.Message): + path: List[int] = betterproto.int32_field(1) + """ + Identifies the element in the original source .proto file. This field + is formatted the same as SourceCodeInfo.Location.path. + """ + + source_file: str = betterproto.string_field(2) + """Identifies the filesystem path to the original source .proto.""" + + begin: int = betterproto.int32_field(3) + """ + Identifies the starting offset in bytes in the generated code + that relates to the identified object. + """ + + end: int = betterproto.int32_field(4) + """ + Identifies the ending offset in bytes in the generated code that + relates to the identified object. The end offset should be one past + the last relevant byte (so the length of the text = end - begin). + """ + + semantic: "GeneratedCodeInfoAnnotationSemantic" = betterproto.enum_field(5) + + +@dataclass(eq=False, repr=False) +class Duration(betterproto.Message): + """ + A Duration represents a signed, fixed-length span of time represented + as a count of seconds and fractions of seconds at nanosecond + resolution. It is independent of any calendar and concepts like "day" + or "month". It is related to Timestamp in that the difference between + two Timestamp values is a Duration and it can be added or subtracted + from a Timestamp. Range is approximately +-10,000 years. + + # Examples + + Example 1: Compute Duration from two Timestamps in pseudo code. + + Timestamp start = ...; + Timestamp end = ...; + Duration duration = ...; + + duration.seconds = end.seconds - start.seconds; + duration.nanos = end.nanos - start.nanos; + + if (duration.seconds < 0 && duration.nanos > 0) { + duration.seconds += 1; + duration.nanos -= 1000000000; + } else if (duration.seconds > 0 && duration.nanos < 0) { + duration.seconds -= 1; + duration.nanos += 1000000000; + } + + Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + + Timestamp start = ...; + Duration duration = ...; + Timestamp end = ...; + + end.seconds = start.seconds + duration.seconds; + end.nanos = start.nanos + duration.nanos; + + if (end.nanos < 0) { + end.seconds -= 1; + end.nanos += 1000000000; + } else if (end.nanos >= 1000000000) { + end.seconds += 1; + end.nanos -= 1000000000; + } + + Example 3: Compute Duration from datetime.timedelta in Python. + + td = datetime.timedelta(days=3, minutes=10) + duration = Duration() + duration.FromTimedelta(td) + + # JSON Mapping + + In JSON format, the Duration type is encoded as a string rather than an + object, where the string ends in the suffix "s" (indicating seconds) and + is preceded by the number of seconds, with nanoseconds expressed as + fractional seconds. For example, 3 seconds with 0 nanoseconds should be + encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + microsecond should be expressed in JSON format as "3.000001s". + """ + + seconds: int = betterproto.int64_field(1) + """ + Signed seconds of the span of time. Must be from -315,576,000,000 + to +315,576,000,000 inclusive. Note: these bounds are computed from: + 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + """ + + nanos: int = betterproto.int32_field(2) + """ + Signed fractions of a second at nanosecond resolution of the span + of time. Durations less than one second are represented with a 0 + `seconds` field and a positive or negative `nanos` field. For durations + of one second or more, a non-zero value for the `nanos` field must be + of the same sign as the `seconds` field. Must be from -999,999,999 + to +999,999,999 inclusive. + """ + + +@dataclass(eq=False, repr=False) +class Empty(betterproto.Message): + """ + A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to use it as the request + or the response type of an API method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + } + """ + + pass + + +@dataclass(eq=False, repr=False) +class FieldMask(betterproto.Message): + """ + `FieldMask` represents a set of symbolic field paths, for example: + + paths: "f.a" + paths: "f.b.d" + + Here `f` represents a field in some root message, `a` and `b` + fields in the message found in `f`, and `d` a field found in the + message in `f.b`. + + Field masks are used to specify a subset of fields that should be + returned by a get operation or modified by an update operation. + Field masks also have a custom JSON encoding (see below). + + # Field Masks in Projections + + When used in the context of a projection, a response message or + sub-message is filtered by the API to only contain those fields as + specified in the mask. For example, if the mask in the previous + example is applied to a response message as follows: + + f { + a : 22 + b { + d : 1 + x : 2 + } + y : 13 + } + z: 8 + + The result will not contain specific values for fields x,y and z + (their value will be set to the default, and omitted in proto text + output): + + + f { + a : 22 + b { + d : 1 + } + } + + A repeated field is not allowed except at the last position of a + paths string. + + If a FieldMask object is not present in a get operation, the + operation applies to all fields (as if a FieldMask of all fields + had been specified). + + Note that a field mask does not necessarily apply to the + top-level response message. In case of a REST get operation, the + field mask applies directly to the response, but in case of a REST + list operation, the mask instead applies to each individual message + in the returned resource list. In case of a REST custom method, + other definitions may be used. Where the mask applies will be + clearly documented together with its declaration in the API. In + any case, the effect on the returned resource/resources is required + behavior for APIs. + + # Field Masks in Update Operations + + A field mask in update operations specifies which fields of the + targeted resource are going to be updated. The API is required + to only change the values of the fields as specified in the mask + and leave the others untouched. If a resource is passed in to + describe the updated values, the API ignores the values of all + fields not covered by the mask. + + If a repeated field is specified for an update operation, new values will + be appended to the existing repeated field in the target resource. Note that + a repeated field is only allowed in the last position of a `paths` string. + + If a sub-message is specified in the last position of the field mask for an + update operation, then new value will be merged into the existing sub-message + in the target resource. + + For example, given the target message: + + f { + b { + d: 1 + x: 2 + } + c: [1] + } + + And an update message: + + f { + b { + d: 10 + } + c: [2] + } + + then if the field mask is: + + paths: ["f.b", "f.c"] + + then the result will be: + + f { + b { + d: 10 + x: 2 + } + c: [1, 2] + } + + An implementation may provide options to override this default behavior for + repeated and message fields. + + In order to reset a field's value to the default, the field must + be in the mask and set to the default value in the provided resource. + Hence, in order to reset all fields of a resource, provide a default + instance of the resource and set all fields in the mask, or do + not provide a mask as described below. + + If a field mask is not present on update, the operation applies to + all fields (as if a field mask of all fields has been specified). + Note that in the presence of schema evolution, this may mean that + fields the client does not know and has therefore not filled into + the request will be reset to their default. If this is unwanted + behavior, a specific service may require a client to always specify + a field mask, producing an error if not. + + As with get operations, the location of the resource which + describes the updated values in the request message depends on the + operation kind. In any case, the effect of the field mask is + required to be honored by the API. + + ## Considerations for HTTP REST + + The HTTP kind of an update operation which uses a field mask must + be set to PATCH instead of PUT in order to satisfy HTTP semantics + (PUT must only be used for full updates). + + # JSON Encoding of Field Masks + + In JSON, a field mask is encoded as a single string where paths are + separated by a comma. Fields name in each path are converted + to/from lower-camel naming conventions. + + As an example, consider the following message declarations: + + message Profile { + User user = 1; + Photo photo = 2; + } + message User { + string display_name = 1; + string address = 2; + } + + In proto a field mask for `Profile` may look as such: + + mask { + paths: "user.display_name" + paths: "photo" + } + + In JSON, the same mask is represented as below: + + { + mask: "user.displayName,photo" + } + + # Field Masks and Oneof Fields + + Field masks treat fields in oneofs just as regular fields. Consider the + following message: + + message SampleMessage { + oneof test_oneof { + string name = 4; + SubMessage sub_message = 9; + } + } + + The field mask can be: + + mask { + paths: "name" + } + + Or: + + mask { + paths: "sub_message" + } + + Note that oneof type names ("test_oneof" in this case) cannot be used in + paths. + + ## Field Mask Verification + + The implementation of any API method which has a FieldMask type field in the + request should verify the included field paths, and return an + `INVALID_ARGUMENT` error if any path is unmappable. + """ + + paths: List[str] = betterproto.string_field(1) + """The set of field mask paths.""" + + +@dataclass(eq=False, repr=False) +class Struct(betterproto.Message): + """ + `Struct` represents a structured data value, consisting of fields + which map to dynamically typed values. In some languages, `Struct` + might be supported by a native representation. For example, in + scripting languages like JS a struct is represented as an + object. The details of that representation are described together + with the proto support for the language. + + The JSON representation for `Struct` is JSON object. + """ + + fields: Dict[str, "Value"] = betterproto.map_field( + 1, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE + ) + """Unordered map of dynamically typed values.""" + + @hybridmethod + def from_dict(cls: "type[Self]", value: Mapping[str, Any]) -> Self: # type: ignore + self = cls() + return self.from_dict(value) + + @from_dict.instancemethod + def from_dict(self, value: Mapping[str, Any]) -> Self: + fields = {**value} + for k in fields: + if hasattr(fields[k], "from_dict"): + fields[k] = fields[k].from_dict() + + self.fields = fields + return self + + def to_dict( + self, + casing: betterproto.Casing = betterproto.Casing.CAMEL, + include_default_values: bool = False, + ) -> Dict[str, Any]: + output = {**self.fields} + for k in self.fields: + if hasattr(self.fields[k], "to_dict"): + output[k] = self.fields[k].to_dict(casing, include_default_values) + return output + + +@dataclass(eq=False, repr=False) +class Value(betterproto.Message): + """ + `Value` represents a dynamically typed value which can be either + null, a number, a string, a boolean, a recursive struct value, or a + list of values. A producer of value is expected to set one of these + variants. Absence of any variant indicates an error. + + The JSON representation for `Value` is JSON value. + """ + + null_value: Optional["NullValue"] = betterproto.enum_field( + 1, optional=True, group="kind" + ) + """Represents a null value.""" + + number_value: Optional[float] = betterproto.double_field( + 2, optional=True, group="kind" + ) + """Represents a double value.""" + + string_value: Optional[str] = betterproto.string_field( + 3, optional=True, group="kind" + ) + """Represents a string value.""" + + bool_value: Optional[bool] = betterproto.bool_field(4, optional=True, group="kind") + """Represents a boolean value.""" + + struct_value: Optional["Struct"] = betterproto.message_field( + 5, optional=True, group="kind" + ) + """Represents a structured value.""" + + list_value: Optional["ListValue"] = betterproto.message_field( + 6, optional=True, group="kind" + ) + """Represents a repeated `Value`.""" + + @model_validator(mode="after") + def check_oneof(cls, values): + return cls._validate_field_groups(values) + + +@dataclass(eq=False, repr=False) +class ListValue(betterproto.Message): + """ + `ListValue` is a wrapper around a repeated field of values. + + The JSON representation for `ListValue` is JSON array. + """ + + values: List["Value"] = betterproto.message_field(1) + """Repeated field of dynamically typed values.""" + + +@dataclass(eq=False, repr=False) +class Timestamp(betterproto.Message): + """ + A Timestamp represents a point in time independent of any time zone or local + calendar, encoded as a count of seconds and fractions of seconds at + nanosecond resolution. The count is relative to an epoch at UTC midnight on + January 1, 1970, in the proleptic Gregorian calendar which extends the + Gregorian calendar backwards to year one. + + All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + second table is needed for interpretation, using a [24-hour linear + smear](https://developers.google.com/time/smear). + + The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + restricting to that range, we ensure that we can convert to and from [RFC + 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + + # Examples + + Example 1: Compute Timestamp from POSIX `time()`. + + Timestamp timestamp; + timestamp.set_seconds(time(NULL)); + timestamp.set_nanos(0); + + Example 2: Compute Timestamp from POSIX `gettimeofday()`. + + struct timeval tv; + gettimeofday(&tv, NULL); + + Timestamp timestamp; + timestamp.set_seconds(tv.tv_sec); + timestamp.set_nanos(tv.tv_usec * 1000); + + Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + + FILETIME ft; + GetSystemTimeAsFileTime(&ft); + UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + + // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + Timestamp timestamp; + timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + + Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + + long millis = System.currentTimeMillis(); + + Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + .setNanos((int) ((millis % 1000) * 1000000)).build(); + + Example 5: Compute Timestamp from Java `Instant.now()`. + + Instant now = Instant.now(); + + Timestamp timestamp = + Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + .setNanos(now.getNano()).build(); + + Example 6: Compute Timestamp from current time in Python. + + timestamp = Timestamp() + timestamp.GetCurrentTime() + + # JSON Mapping + + In JSON format, the Timestamp type is encoded as a string in the + [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + where {year} is always expressed using four digits while {month}, {day}, + {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + is required. A proto3 JSON serializer should always use UTC (as indicated by + "Z") when printing the Timestamp type and a proto3 JSON parser should be + able to accept both UTC and other timezones (as indicated by an offset). + + For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + 01:30 UTC on January 15, 2017. + + In JavaScript, one can convert a Date object to this format using the + standard + [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + method. In Python, a standard `datetime.datetime` object can be converted + to this format using + [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + the Joda Time's [`ISODateTimeFormat.dateTime()`]( + http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() + ) to obtain a formatter capable of generating timestamps in this format. + """ + + seconds: int = betterproto.int64_field(1) + """ + Represents seconds of UTC time since Unix epoch + 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + 9999-12-31T23:59:59Z inclusive. + """ + + nanos: int = betterproto.int32_field(2) + """ + Non-negative fractions of a second at nanosecond resolution. Negative + second values with fractions must still have non-negative nanos values + that count forward in time. Must be from 0 to 999,999,999 + inclusive. + """ + + +@dataclass(eq=False, repr=False) +class DoubleValue(betterproto.Message): + """ + Wrapper message for `double`. + + The JSON representation for `DoubleValue` is JSON number. + """ + + value: float = betterproto.double_field(1) + """The double value.""" + + +@dataclass(eq=False, repr=False) +class FloatValue(betterproto.Message): + """ + Wrapper message for `float`. + + The JSON representation for `FloatValue` is JSON number. + """ + + value: float = betterproto.float_field(1) + """The float value.""" + + +@dataclass(eq=False, repr=False) +class Int64Value(betterproto.Message): + """ + Wrapper message for `int64`. + + The JSON representation for `Int64Value` is JSON string. + """ + + value: int = betterproto.int64_field(1) + """The int64 value.""" + + +@dataclass(eq=False, repr=False) +class UInt64Value(betterproto.Message): + """ + Wrapper message for `uint64`. + + The JSON representation for `UInt64Value` is JSON string. + """ + + value: int = betterproto.uint64_field(1) + """The uint64 value.""" + + +@dataclass(eq=False, repr=False) +class Int32Value(betterproto.Message): + """ + Wrapper message for `int32`. + + The JSON representation for `Int32Value` is JSON number. + """ + + value: int = betterproto.int32_field(1) + """The int32 value.""" + + +@dataclass(eq=False, repr=False) +class UInt32Value(betterproto.Message): + """ + Wrapper message for `uint32`. + + The JSON representation for `UInt32Value` is JSON number. + """ + + value: int = betterproto.uint32_field(1) + """The uint32 value.""" + + +@dataclass(eq=False, repr=False) +class BoolValue(betterproto.Message): + """ + Wrapper message for `bool`. + + The JSON representation for `BoolValue` is JSON `true` and `false`. + """ + + value: bool = betterproto.bool_field(1) + """The bool value.""" + + +@dataclass(eq=False, repr=False) +class StringValue(betterproto.Message): + """ + Wrapper message for `string`. + + The JSON representation for `StringValue` is JSON string. + """ + + value: str = betterproto.string_field(1) + """The string value.""" + + +@dataclass(eq=False, repr=False) +class BytesValue(betterproto.Message): + """ + Wrapper message for `bytes`. + + The JSON representation for `BytesValue` is JSON string. + """ + + value: bytes = betterproto.bytes_field(1) + """The bytes value.""" + + +rebuild_dataclass(Type) # type: ignore +rebuild_dataclass(Field) # type: ignore +rebuild_dataclass(Enum) # type: ignore +rebuild_dataclass(EnumValue) # type: ignore +rebuild_dataclass(Option) # type: ignore +rebuild_dataclass(Api) # type: ignore +rebuild_dataclass(Method) # type: ignore +rebuild_dataclass(FileDescriptorSet) # type: ignore +rebuild_dataclass(FileDescriptorProto) # type: ignore +rebuild_dataclass(DescriptorProto) # type: ignore +rebuild_dataclass(DescriptorProtoExtensionRange) # type: ignore +rebuild_dataclass(ExtensionRangeOptions) # type: ignore +rebuild_dataclass(FieldDescriptorProto) # type: ignore +rebuild_dataclass(OneofDescriptorProto) # type: ignore +rebuild_dataclass(EnumDescriptorProto) # type: ignore +rebuild_dataclass(EnumValueDescriptorProto) # type: ignore +rebuild_dataclass(ServiceDescriptorProto) # type: ignore +rebuild_dataclass(MethodDescriptorProto) # type: ignore +rebuild_dataclass(FileOptions) # type: ignore +rebuild_dataclass(MessageOptions) # type: ignore +rebuild_dataclass(FieldOptions) # type: ignore +rebuild_dataclass(FieldOptionsEditionDefault) # type: ignore +rebuild_dataclass(OneofOptions) # type: ignore +rebuild_dataclass(EnumOptions) # type: ignore +rebuild_dataclass(EnumValueOptions) # type: ignore +rebuild_dataclass(ServiceOptions) # type: ignore +rebuild_dataclass(MethodOptions) # type: ignore +rebuild_dataclass(UninterpretedOption) # type: ignore +rebuild_dataclass(FeatureSet) # type: ignore +rebuild_dataclass(FeatureSetDefaults) # type: ignore +rebuild_dataclass(FeatureSetDefaultsFeatureSetEditionDefault) # type: ignore +rebuild_dataclass(SourceCodeInfo) # type: ignore +rebuild_dataclass(GeneratedCodeInfo) # type: ignore +rebuild_dataclass(GeneratedCodeInfoAnnotation) # type: ignore +rebuild_dataclass(Struct) # type: ignore +rebuild_dataclass(Value) # type: ignore +rebuild_dataclass(ListValue) # type: ignore diff --git a/src/betterproto/lib/pydantic/google/protobuf/compiler/__init__.py b/src/betterproto/lib/pydantic/google/protobuf/compiler/__init__.py new file mode 100644 index 000000000..ba16fac99 --- /dev/null +++ b/src/betterproto/lib/pydantic/google/protobuf/compiler/__init__.py @@ -0,0 +1,210 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# sources: google/protobuf/compiler/plugin.proto +# plugin: python-betterproto +# This file has been @generated + +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from dataclasses import dataclass +else: + from pydantic.dataclasses import dataclass + +from typing import List + +import betterproto +import betterproto.lib.pydantic.google.protobuf as betterproto_lib_pydantic_google_protobuf + + +class CodeGeneratorResponseFeature(betterproto.Enum): + """Sync with code_generator.h.""" + + FEATURE_NONE = 0 + FEATURE_PROTO3_OPTIONAL = 1 + FEATURE_SUPPORTS_EDITIONS = 2 + + +@dataclass(eq=False, repr=False) +class Version(betterproto.Message): + """The version number of protocol compiler.""" + + major: int = betterproto.int32_field(1) + minor: int = betterproto.int32_field(2) + patch: int = betterproto.int32_field(3) + suffix: str = betterproto.string_field(4) + """ + A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + be empty for mainline stable releases. + """ + + +@dataclass(eq=False, repr=False) +class CodeGeneratorRequest(betterproto.Message): + """An encoded CodeGeneratorRequest is written to the plugin's stdin.""" + + file_to_generate: List[str] = betterproto.string_field(1) + """ + The .proto files that were explicitly listed on the command-line. The + code generator should generate code only for these files. Each file's + descriptor will be included in proto_file, below. + """ + + parameter: str = betterproto.string_field(2) + """The generator parameter passed on the command-line.""" + + proto_file: List["betterproto_lib_pydantic_google_protobuf.FileDescriptorProto"] = ( + betterproto.message_field(15) + ) + """ + FileDescriptorProtos for all files in files_to_generate and everything + they import. The files will appear in topological order, so each file + appears before any file that imports it. + + Note: the files listed in files_to_generate will include runtime-retention + options only, but all other files will include source-retention options. + The source_file_descriptors field below is available in case you need + source-retention options for files_to_generate. + + protoc guarantees that all proto_files will be written after + the fields above, even though this is not technically guaranteed by the + protobuf wire format. This theoretically could allow a plugin to stream + in the FileDescriptorProtos and handle them one by one rather than read + the entire set into memory at once. However, as of this writing, this + is not similarly optimized on protoc's end -- it will store all fields in + memory at once before sending them to the plugin. + + Type names of fields and extensions in the FileDescriptorProto are always + fully qualified. + """ + + source_file_descriptors: List[ + "betterproto_lib_pydantic_google_protobuf.FileDescriptorProto" + ] = betterproto.message_field(17) + """ + File descriptors with all options, including source-retention options. + These descriptors are only provided for the files listed in + files_to_generate. + """ + + compiler_version: "Version" = betterproto.message_field(3) + """The version number of protocol compiler.""" + + +@dataclass(eq=False, repr=False) +class CodeGeneratorResponse(betterproto.Message): + """The plugin writes an encoded CodeGeneratorResponse to stdout.""" + + error: str = betterproto.string_field(1) + """ + Error message. If non-empty, code generation failed. The plugin process + should exit with status code zero even if it reports an error in this way. + + This should be used to indicate errors in .proto files which prevent the + code generator from generating correct code. Errors which indicate a + problem in protoc itself -- such as the input CodeGeneratorRequest being + unparseable -- should be reported by writing a message to stderr and + exiting with a non-zero status code. + """ + + supported_features: int = betterproto.uint64_field(2) + """ + A bitmask of supported features that the code generator supports. + This is a bitwise "or" of values from the Feature enum. + """ + + minimum_edition: int = betterproto.int32_field(3) + """ + The minimum edition this plugin supports. This will be treated as an + Edition enum, but we want to allow unknown values. It should be specified + according the edition enum value, *not* the edition number. Only takes + effect for plugins that have FEATURE_SUPPORTS_EDITIONS set. + """ + + maximum_edition: int = betterproto.int32_field(4) + """ + The maximum edition this plugin supports. This will be treated as an + Edition enum, but we want to allow unknown values. It should be specified + according the edition enum value, *not* the edition number. Only takes + effect for plugins that have FEATURE_SUPPORTS_EDITIONS set. + """ + + file: List["CodeGeneratorResponseFile"] = betterproto.message_field(15) + + +@dataclass(eq=False, repr=False) +class CodeGeneratorResponseFile(betterproto.Message): + """Represents a single generated file.""" + + name: str = betterproto.string_field(1) + """ + The file name, relative to the output directory. The name must not + contain "." or ".." components and must be relative, not be absolute (so, + the file cannot lie outside the output directory). "/" must be used as + the path separator, not "\". + + If the name is omitted, the content will be appended to the previous + file. This allows the generator to break large files into small chunks, + and allows the generated text to be streamed back to protoc so that large + files need not reside completely in memory at one time. Note that as of + this writing protoc does not optimize for this -- it will read the entire + CodeGeneratorResponse before writing files to disk. + """ + + insertion_point: str = betterproto.string_field(2) + """ + If non-empty, indicates that the named file should already exist, and the + content here is to be inserted into that file at a defined insertion + point. This feature allows a code generator to extend the output + produced by another code generator. The original generator may provide + insertion points by placing special annotations in the file that look + like: + @@protoc_insertion_point(NAME) + The annotation can have arbitrary text before and after it on the line, + which allows it to be placed in a comment. NAME should be replaced with + an identifier naming the point -- this is what other generators will use + as the insertion_point. Code inserted at this point will be placed + immediately above the line containing the insertion point (thus multiple + insertions to the same point will come out in the order they were added). + The double-@ is intended to make it unlikely that the generated code + could contain things that look like insertion points by accident. + + For example, the C++ code generator places the following line in the + .pb.h files that it generates: + // @@protoc_insertion_point(namespace_scope) + This line appears within the scope of the file's package namespace, but + outside of any particular class. Another plugin can then specify the + insertion_point "namespace_scope" to generate additional classes or + other declarations that should be placed in this scope. + + Note that if the line containing the insertion point begins with + whitespace, the same whitespace will be added to every line of the + inserted text. This is useful for languages like Python, where + indentation matters. In these languages, the insertion point comment + should be indented the same amount as any inserted code will need to be + in order to work correctly in that context. + + The code generator that generates the initial file and the one which + inserts into it must both run as part of a single invocation of protoc. + Code generators are executed in the order in which they appear on the + command line. + + If |insertion_point| is present, |name| must also be present. + """ + + content: str = betterproto.string_field(15) + """The file contents.""" + + generated_code_info: "betterproto_lib_pydantic_google_protobuf.GeneratedCodeInfo" = betterproto.message_field( + 16 + ) + """ + Information describing the file content being inserted. If an insertion + point is used, this information will be appropriately offset and inserted + into the code generation metadata for the generated files. + """ + + +CodeGeneratorRequest.__pydantic_model__.update_forward_refs() # type: ignore +CodeGeneratorResponse.__pydantic_model__.update_forward_refs() # type: ignore +CodeGeneratorResponseFile.__pydantic_model__.update_forward_refs() # type: ignore diff --git a/src/betterproto/lib/std/__init__.py b/src/betterproto/lib/std/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/betterproto/lib/std/google/__init__.py b/src/betterproto/lib/std/google/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/betterproto/lib/std/google/protobuf/__init__.py b/src/betterproto/lib/std/google/protobuf/__init__.py new file mode 100644 index 000000000..2b87b3bc2 --- /dev/null +++ b/src/betterproto/lib/std/google/protobuf/__init__.py @@ -0,0 +1,2526 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# sources: google/protobuf/any.proto, google/protobuf/api.proto, google/protobuf/descriptor.proto, google/protobuf/duration.proto, google/protobuf/empty.proto, google/protobuf/field_mask.proto, google/protobuf/source_context.proto, google/protobuf/struct.proto, google/protobuf/timestamp.proto, google/protobuf/type.proto, google/protobuf/wrappers.proto +# plugin: python-betterproto + +import warnings +from dataclasses import dataclass +from typing import ( + Dict, + List, + Mapping, +) + +from typing_extensions import Self + +import betterproto +from betterproto.utils import hybridmethod + + +class Syntax(betterproto.Enum): + """The syntax in which a protocol buffer element is defined.""" + + PROTO2 = 0 + """Syntax `proto2`.""" + + PROTO3 = 1 + """Syntax `proto3`.""" + + EDITIONS = 2 + """Syntax `editions`.""" + + +class FieldKind(betterproto.Enum): + """Basic field types.""" + + TYPE_UNKNOWN = 0 + """Field type unknown.""" + + TYPE_DOUBLE = 1 + """Field type double.""" + + TYPE_FLOAT = 2 + """Field type float.""" + + TYPE_INT64 = 3 + """Field type int64.""" + + TYPE_UINT64 = 4 + """Field type uint64.""" + + TYPE_INT32 = 5 + """Field type int32.""" + + TYPE_FIXED64 = 6 + """Field type fixed64.""" + + TYPE_FIXED32 = 7 + """Field type fixed32.""" + + TYPE_BOOL = 8 + """Field type bool.""" + + TYPE_STRING = 9 + """Field type string.""" + + TYPE_GROUP = 10 + """Field type group. Proto2 syntax only, and deprecated.""" + + TYPE_MESSAGE = 11 + """Field type message.""" + + TYPE_BYTES = 12 + """Field type bytes.""" + + TYPE_UINT32 = 13 + """Field type uint32.""" + + TYPE_ENUM = 14 + """Field type enum.""" + + TYPE_SFIXED32 = 15 + """Field type sfixed32.""" + + TYPE_SFIXED64 = 16 + """Field type sfixed64.""" + + TYPE_SINT32 = 17 + """Field type sint32.""" + + TYPE_SINT64 = 18 + """Field type sint64.""" + + +class FieldCardinality(betterproto.Enum): + """Whether a field is optional, required, or repeated.""" + + CARDINALITY_UNKNOWN = 0 + """For fields with unknown cardinality.""" + + CARDINALITY_OPTIONAL = 1 + """For optional fields.""" + + CARDINALITY_REQUIRED = 2 + """For required fields. Proto2 syntax only.""" + + CARDINALITY_REPEATED = 3 + """For repeated fields.""" + + +class Edition(betterproto.Enum): + """The full set of known editions.""" + + UNKNOWN = 0 + """A placeholder for an unknown edition value.""" + + PROTO2 = 998 + """ + Legacy syntax "editions". These pre-date editions, but behave much like + distinct editions. These can't be used to specify the edition of proto + files, but feature definitions must supply proto2/proto3 defaults for + backwards compatibility. + """ + + PROTO3 = 999 + _2023 = 1000 + """ + Editions that have been released. The specific values are arbitrary and + should not be depended on, but they will always be time-ordered for easy + comparison. + """ + + _2024 = 1001 + _1_TEST_ONLY = 1 + """ + Placeholder editions for testing feature resolution. These should not be + used or relyed on outside of tests. + """ + + _2_TEST_ONLY = 2 + _99997_TEST_ONLY = 99997 + _99998_TEST_ONLY = 99998 + _99999_TEST_ONLY = 99999 + MAX = 2147483647 + """ + Placeholder for specifying unbounded edition support. This should only + ever be used by plugins that can expect to never require any changes to + support a new edition. + """ + + +class ExtensionRangeOptionsVerificationState(betterproto.Enum): + """The verification state of the extension range.""" + + DECLARATION = 0 + """All the extensions of the range must be declared.""" + + UNVERIFIED = 1 + + +class FieldDescriptorProtoType(betterproto.Enum): + TYPE_DOUBLE = 1 + """ + 0 is reserved for errors. + Order is weird for historical reasons. + """ + + TYPE_FLOAT = 2 + TYPE_INT64 = 3 + """ + Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if + negative values are likely. + """ + + TYPE_UINT64 = 4 + TYPE_INT32 = 5 + """ + Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if + negative values are likely. + """ + + TYPE_FIXED64 = 6 + TYPE_FIXED32 = 7 + TYPE_BOOL = 8 + TYPE_STRING = 9 + TYPE_GROUP = 10 + """ + Tag-delimited aggregate. + Group type is deprecated and not supported after google.protobuf. However, Proto3 + implementations should still be able to parse the group wire format and + treat group fields as unknown fields. In Editions, the group wire format + can be enabled via the `message_encoding` feature. + """ + + TYPE_MESSAGE = 11 + TYPE_BYTES = 12 + """New in version 2.""" + + TYPE_UINT32 = 13 + TYPE_ENUM = 14 + TYPE_SFIXED32 = 15 + TYPE_SFIXED64 = 16 + TYPE_SINT32 = 17 + TYPE_SINT64 = 18 + + +class FieldDescriptorProtoLabel(betterproto.Enum): + LABEL_OPTIONAL = 1 + """0 is reserved for errors""" + + LABEL_REPEATED = 3 + LABEL_REQUIRED = 2 + """ + The required label is only allowed in google.protobuf. In proto3 and Editions + it's explicitly prohibited. In Editions, the `field_presence` feature + can be used to get this behavior. + """ + + +class FileOptionsOptimizeMode(betterproto.Enum): + """Generated classes can be optimized for speed or code size.""" + + SPEED = 1 + CODE_SIZE = 2 + """etc.""" + + LITE_RUNTIME = 3 + + +class FieldOptionsCType(betterproto.Enum): + STRING = 0 + """Default mode.""" + + CORD = 1 + """ + The option [ctype=CORD] may be applied to a non-repeated field of type + "bytes". It indicates that in C++, the data should be stored in a Cord + instead of a string. For very large strings, this may reduce memory + fragmentation. It may also allow better performance when parsing from a + Cord, or when parsing with aliasing enabled, as the parsed Cord may then + alias the original buffer. + """ + + STRING_PIECE = 2 + + +class FieldOptionsJsType(betterproto.Enum): + JS_NORMAL = 0 + """Use the default type.""" + + JS_STRING = 1 + """Use JavaScript strings.""" + + JS_NUMBER = 2 + """Use JavaScript numbers.""" + + +class FieldOptionsOptionRetention(betterproto.Enum): + """ + If set to RETENTION_SOURCE, the option will be omitted from the binary. + Note: as of January 2023, support for this is in progress and does not yet + have an effect (b/264593489). + """ + + RETENTION_UNKNOWN = 0 + RETENTION_RUNTIME = 1 + RETENTION_SOURCE = 2 + + +class FieldOptionsOptionTargetType(betterproto.Enum): + """ + This indicates the types of entities that the field may apply to when used + as an option. If it is unset, then the field may be freely used as an + option on any kind of entity. Note: as of January 2023, support for this is + in progress and does not yet have an effect (b/264593489). + """ + + TARGET_TYPE_UNKNOWN = 0 + TARGET_TYPE_FILE = 1 + TARGET_TYPE_EXTENSION_RANGE = 2 + TARGET_TYPE_MESSAGE = 3 + TARGET_TYPE_FIELD = 4 + TARGET_TYPE_ONEOF = 5 + TARGET_TYPE_ENUM = 6 + TARGET_TYPE_ENUM_ENTRY = 7 + TARGET_TYPE_SERVICE = 8 + TARGET_TYPE_METHOD = 9 + + +class MethodOptionsIdempotencyLevel(betterproto.Enum): + """ + Is this method side-effect-free (or safe in HTTP parlance), or idempotent, + or neither? HTTP based RPC implementation may choose GET verb for safe + methods, and PUT verb for idempotent methods instead of the default POST. + """ + + IDEMPOTENCY_UNKNOWN = 0 + NO_SIDE_EFFECTS = 1 + IDEMPOTENT = 2 + + +class FeatureSetFieldPresence(betterproto.Enum): + FIELD_PRESENCE_UNKNOWN = 0 + EXPLICIT = 1 + IMPLICIT = 2 + LEGACY_REQUIRED = 3 + + +class FeatureSetEnumType(betterproto.Enum): + ENUM_TYPE_UNKNOWN = 0 + OPEN = 1 + CLOSED = 2 + + +class FeatureSetRepeatedFieldEncoding(betterproto.Enum): + REPEATED_FIELD_ENCODING_UNKNOWN = 0 + PACKED = 1 + EXPANDED = 2 + + +class FeatureSetUtf8Validation(betterproto.Enum): + UTF8_VALIDATION_UNKNOWN = 0 + VERIFY = 2 + NONE = 3 + + +class FeatureSetMessageEncoding(betterproto.Enum): + MESSAGE_ENCODING_UNKNOWN = 0 + LENGTH_PREFIXED = 1 + DELIMITED = 2 + + +class FeatureSetJsonFormat(betterproto.Enum): + JSON_FORMAT_UNKNOWN = 0 + ALLOW = 1 + LEGACY_BEST_EFFORT = 2 + + +class GeneratedCodeInfoAnnotationSemantic(betterproto.Enum): + """ + Represents the identified object's effect on the element in the original + .proto file. + """ + + NONE = 0 + """There is no effect or the effect is indescribable.""" + + SET = 1 + """The element is set or otherwise mutated.""" + + ALIAS = 2 + """An alias to the element is returned.""" + + +class NullValue(betterproto.Enum): + """ + `NullValue` is a singleton enumeration to represent the null value for the + `Value` type union. + + The JSON representation for `NullValue` is JSON `null`. + """ + + _ = 0 + """Null value.""" + + +@dataclass(eq=False, repr=False) +class Any(betterproto.Message): + """ + `Any` contains an arbitrary serialized protocol buffer message along with a + URL that describes the type of the serialized message. + + Protobuf library provides support to pack/unpack Any values in the form + of utility functions or additional generated methods of the Any type. + + Example 1: Pack and unpack a message in C++. + + Foo foo = ...; + Any any; + any.PackFrom(foo); + ... + if (any.UnpackTo(&foo)) { + ... + } + + Example 2: Pack and unpack a message in Java. + + Foo foo = ...; + Any any = Any.pack(foo); + ... + if (any.is(Foo.class)) { + foo = any.unpack(Foo.class); + } + // or ... + if (any.isSameTypeAs(Foo.getDefaultInstance())) { + foo = any.unpack(Foo.getDefaultInstance()); + } + + Example 3: Pack and unpack a message in Python. + + foo = Foo(...) + any = Any() + any.Pack(foo) + ... + if any.Is(Foo.DESCRIPTOR): + any.Unpack(foo) + ... + + Example 4: Pack and unpack a message in Go + + foo := &pb.Foo{...} + any, err := anypb.New(foo) + if err != nil { + ... + } + ... + foo := &pb.Foo{} + if err := any.UnmarshalTo(foo); err != nil { + ... + } + + The pack methods provided by protobuf library will by default use + 'type.googleapis.com/full.type.name' as the type URL and the unpack + methods only use the fully qualified type name after the last '/' + in the type URL, for example "foo.bar.com/x/y.z" will yield type + name "y.z". + + JSON + ==== + The JSON representation of an `Any` value uses the regular + representation of the deserialized, embedded message, with an + additional field `@type` which contains the type URL. Example: + + package google.profile; + message Person { + string first_name = 1; + string last_name = 2; + } + + { + "@type": "type.googleapis.com/google.profile.Person", + "firstName": , + "lastName": + } + + If the embedded message type is well-known and has a custom JSON + representation, that representation will be embedded adding a field + `value` which holds the custom JSON in addition to the `@type` + field. Example (for message [google.protobuf.Duration][]): + + { + "@type": "type.googleapis.com/google.protobuf.Duration", + "value": "1.212s" + } + """ + + type_url: str = betterproto.string_field(1) + """ + A URL/resource name that uniquely identifies the type of the serialized + protocol buffer message. This string must contain at least + one "/" character. The last segment of the URL's path must represent + the fully qualified name of the type (as in + `path/google.protobuf.Duration`). The name should be in a canonical form + (e.g., leading "." is not accepted). + + In practice, teams usually precompile into the binary all types that they + expect it to use in the context of Any. However, for URLs which use the + scheme `http`, `https`, or no scheme, one can optionally set up a type + server that maps type URLs to message definitions as follows: + + * If no scheme is provided, `https` is assumed. + * An HTTP GET on the URL must yield a [google.protobuf.Type][] + value in binary format, or produce an error. + * Applications are allowed to cache lookup results based on the + URL, or have them precompiled into a binary to avoid any + lookup. Therefore, binary compatibility needs to be preserved + on changes to types. (Use versioned type names to manage + breaking changes.) + + Note: this functionality is not currently available in the official + protobuf release, and it is not used for type URLs beginning with + type.googleapis.com. As of May 2023, there are no widely used type server + implementations and no plans to implement one. + + Schemes other than `http`, `https` (or the empty scheme) might be + used with implementation specific semantics. + """ + + value: bytes = betterproto.bytes_field(2) + """ + Must be a valid serialized protocol buffer of the above specified type. + """ + + +@dataclass(eq=False, repr=False) +class SourceContext(betterproto.Message): + """ + `SourceContext` represents information about the source of a + protobuf element, like the file in which it is defined. + """ + + file_name: str = betterproto.string_field(1) + """ + The path-qualified name of the .proto file that contained the associated + protobuf element. For example: `"google/protobuf/source_context.proto"`. + """ + + +@dataclass(eq=False, repr=False) +class Type(betterproto.Message): + """A protocol buffer message type.""" + + name: str = betterproto.string_field(1) + """The fully qualified message name.""" + + fields: List["Field"] = betterproto.message_field(2) + """The list of fields.""" + + oneofs: List[str] = betterproto.string_field(3) + """The list of types appearing in `oneof` definitions in this type.""" + + options: List["Option"] = betterproto.message_field(4) + """The protocol buffer options.""" + + source_context: "SourceContext" = betterproto.message_field(5) + """The source context.""" + + syntax: "Syntax" = betterproto.enum_field(6) + """The source syntax.""" + + edition: str = betterproto.string_field(7) + """ + The source edition string, only valid when syntax is SYNTAX_EDITIONS. + """ + + +@dataclass(eq=False, repr=False) +class Field(betterproto.Message): + """A single field of a message type.""" + + kind: "FieldKind" = betterproto.enum_field(1) + """The field type.""" + + cardinality: "FieldCardinality" = betterproto.enum_field(2) + """The field cardinality.""" + + number: int = betterproto.int32_field(3) + """The field number.""" + + name: str = betterproto.string_field(4) + """The field name.""" + + type_url: str = betterproto.string_field(6) + """ + The field type URL, without the scheme, for message or enumeration + types. Example: `"type.googleapis.com/google.protobuf.Timestamp"`. + """ + + oneof_index: int = betterproto.int32_field(7) + """ + The index of the field type in `Type.oneofs`, for message or enumeration + types. The first type has index 1; zero means the type is not in the list. + """ + + packed: bool = betterproto.bool_field(8) + """Whether to use alternative packed wire representation.""" + + options: List["Option"] = betterproto.message_field(9) + """The protocol buffer options.""" + + json_name: str = betterproto.string_field(10) + """The field JSON name.""" + + default_value: str = betterproto.string_field(11) + """ + The string value of the default value of this field. Proto2 syntax only. + """ + + +@dataclass(eq=False, repr=False) +class Enum(betterproto.Message): + """Enum type definition.""" + + name: str = betterproto.string_field(1) + """Enum type name.""" + + enumvalue: List["EnumValue"] = betterproto.message_field( + 2, wraps=betterproto.TYPE_ENUM + ) + """Enum value definitions.""" + + options: List["Option"] = betterproto.message_field(3) + """Protocol buffer options.""" + + source_context: "SourceContext" = betterproto.message_field(4) + """The source context.""" + + syntax: "Syntax" = betterproto.enum_field(5) + """The source syntax.""" + + edition: str = betterproto.string_field(6) + """ + The source edition string, only valid when syntax is SYNTAX_EDITIONS. + """ + + +@dataclass(eq=False, repr=False) +class EnumValue(betterproto.Message): + """Enum value definition.""" + + name: str = betterproto.string_field(1) + """Enum value name.""" + + number: int = betterproto.int32_field(2) + """Enum value number.""" + + options: List["Option"] = betterproto.message_field(3) + """Protocol buffer options.""" + + +@dataclass(eq=False, repr=False) +class Option(betterproto.Message): + """ + A protocol buffer option, which can be attached to a message, field, + enumeration, etc. + """ + + name: str = betterproto.string_field(1) + """ + The option's name. For protobuf built-in options (options defined in + descriptor.proto), this is the short name. For example, `"map_entry"`. + For custom options, it should be the fully-qualified name. For example, + `"google.api.http"`. + """ + + value: "Any" = betterproto.message_field(2) + """ + The option's value packed in an Any message. If the value is a primitive, + the corresponding wrapper type defined in google/protobuf/wrappers.proto + should be used. If the value is an enum, it should be stored as an int32 + value using the google.protobuf.Int32Value type. + """ + + +@dataclass(eq=False, repr=False) +class Api(betterproto.Message): + """ + Api is a light-weight descriptor for an API Interface. + + Interfaces are also described as "protocol buffer services" in some contexts, + such as by the "service" keyword in a .proto file, but they are different + from API Services, which represent a concrete implementation of an interface + as opposed to simply a description of methods and bindings. They are also + sometimes simply referred to as "APIs" in other contexts, such as the name of + this message itself. See https://cloud.google.com/apis/design/glossary for + detailed terminology. + """ + + name: str = betterproto.string_field(1) + """ + The fully qualified name of this interface, including package name + followed by the interface's simple name. + """ + + methods: List["Method"] = betterproto.message_field(2) + """The methods of this interface, in unspecified order.""" + + options: List["Option"] = betterproto.message_field(3) + """Any metadata attached to the interface.""" + + version: str = betterproto.string_field(4) + """ + A version string for this interface. If specified, must have the form + `major-version.minor-version`, as in `1.10`. If the minor version is + omitted, it defaults to zero. If the entire version field is empty, the + major version is derived from the package name, as outlined below. If the + field is not empty, the version in the package name will be verified to be + consistent with what is provided here. + + The versioning schema uses [semantic + versioning](http://semver.org) where the major version number + indicates a breaking change and the minor version an additive, + non-breaking change. Both version numbers are signals to users + what to expect from different versions, and should be carefully + chosen based on the product plan. + + The major version is also reflected in the package name of the + interface, which must end in `v`, as in + `google.feature.v1`. For major versions 0 and 1, the suffix can + be omitted. Zero major versions must only be used for + experimental, non-GA interfaces. + """ + + source_context: "SourceContext" = betterproto.message_field(5) + """ + Source context for the protocol buffer service represented by this + message. + """ + + mixins: List["Mixin"] = betterproto.message_field(6) + """Included interfaces. See [Mixin][].""" + + syntax: "Syntax" = betterproto.enum_field(7) + """The source syntax of the service.""" + + +@dataclass(eq=False, repr=False) +class Method(betterproto.Message): + """Method represents a method of an API interface.""" + + name: str = betterproto.string_field(1) + """The simple name of this method.""" + + request_type_url: str = betterproto.string_field(2) + """A URL of the input message type.""" + + request_streaming: bool = betterproto.bool_field(3) + """If true, the request is streamed.""" + + response_type_url: str = betterproto.string_field(4) + """The URL of the output message type.""" + + response_streaming: bool = betterproto.bool_field(5) + """If true, the response is streamed.""" + + options: List["Option"] = betterproto.message_field(6) + """Any metadata attached to the method.""" + + syntax: "Syntax" = betterproto.enum_field(7) + """The source syntax of this method.""" + + +@dataclass(eq=False, repr=False) +class Mixin(betterproto.Message): + """ + Declares an API Interface to be included in this interface. The including + interface must redeclare all the methods from the included interface, but + documentation and options are inherited as follows: + + - If after comment and whitespace stripping, the documentation + string of the redeclared method is empty, it will be inherited + from the original method. + + - Each annotation belonging to the service config (http, + visibility) which is not set in the redeclared method will be + inherited. + + - If an http annotation is inherited, the path pattern will be + modified as follows. Any version prefix will be replaced by the + version of the including interface plus the [root][] path if + specified. + + Example of a simple mixin: + + package google.acl.v1; + service AccessControl { + // Get the underlying ACL object. + rpc GetAcl(GetAclRequest) returns (Acl) { + option (google.api.http).get = "/v1/{resource=**}:getAcl"; + } + } + + package google.storage.v2; + service Storage { + rpc GetAcl(GetAclRequest) returns (Acl); + + // Get a data record. + rpc GetData(GetDataRequest) returns (Data) { + option (google.api.http).get = "/v2/{resource=**}"; + } + } + + Example of a mixin configuration: + + apis: + - name: google.storage.v2.Storage + mixins: + - name: google.acl.v1.AccessControl + + The mixin construct implies that all methods in `AccessControl` are + also declared with same name and request/response types in + `Storage`. A documentation generator or annotation processor will + see the effective `Storage.GetAcl` method after inherting + documentation and annotations as follows: + + service Storage { + // Get the underlying ACL object. + rpc GetAcl(GetAclRequest) returns (Acl) { + option (google.api.http).get = "/v2/{resource=**}:getAcl"; + } + ... + } + + Note how the version in the path pattern changed from `v1` to `v2`. + + If the `root` field in the mixin is specified, it should be a + relative path under which inherited HTTP paths are placed. Example: + + apis: + - name: google.storage.v2.Storage + mixins: + - name: google.acl.v1.AccessControl + root: acls + + This implies the following inherited HTTP annotation: + + service Storage { + // Get the underlying ACL object. + rpc GetAcl(GetAclRequest) returns (Acl) { + option (google.api.http).get = "/v2/acls/{resource=**}:getAcl"; + } + ... + } + """ + + name: str = betterproto.string_field(1) + """The fully qualified name of the interface which is included.""" + + root: str = betterproto.string_field(2) + """ + If non-empty specifies a path under which inherited HTTP paths + are rooted. + """ + + +@dataclass(eq=False, repr=False) +class FileDescriptorSet(betterproto.Message): + """ + The protocol compiler can output a FileDescriptorSet containing the .proto + files it parses. + """ + + file: List["FileDescriptorProto"] = betterproto.message_field(1) + + +@dataclass(eq=False, repr=False) +class FileDescriptorProto(betterproto.Message): + """Describes a complete .proto file.""" + + name: str = betterproto.string_field(1) + package: str = betterproto.string_field(2) + dependency: List[str] = betterproto.string_field(3) + """Names of files imported by this file.""" + + public_dependency: List[int] = betterproto.int32_field(10) + """Indexes of the public imported files in the dependency list above.""" + + weak_dependency: List[int] = betterproto.int32_field(11) + """ + Indexes of the weak imported files in the dependency list. + For Google-internal migration only. Do not use. + """ + + message_type: List["DescriptorProto"] = betterproto.message_field(4) + """All top-level definitions in this file.""" + + enum_type: List["EnumDescriptorProto"] = betterproto.message_field(5) + service: List["ServiceDescriptorProto"] = betterproto.message_field(6) + extension: List["FieldDescriptorProto"] = betterproto.message_field(7) + options: "FileOptions" = betterproto.message_field(8) + source_code_info: "SourceCodeInfo" = betterproto.message_field(9) + """ + This field contains optional information about the original source code. + You may safely remove this entire field without harming runtime + functionality of the descriptors -- the information is needed only by + development tools. + """ + + syntax: str = betterproto.string_field(12) + """ + The syntax of the proto file. + The supported values are "proto2", "proto3", and "editions". + + If `edition` is present, this value must be "editions". + """ + + edition: "Edition" = betterproto.enum_field(14) + """The edition of the proto file.""" + + +@dataclass(eq=False, repr=False) +class DescriptorProto(betterproto.Message): + """Describes a message type.""" + + name: str = betterproto.string_field(1) + field: List["FieldDescriptorProto"] = betterproto.message_field(2) + extension: List["FieldDescriptorProto"] = betterproto.message_field(6) + nested_type: List["DescriptorProto"] = betterproto.message_field(3) + enum_type: List["EnumDescriptorProto"] = betterproto.message_field(4) + extension_range: List["DescriptorProtoExtensionRange"] = betterproto.message_field( + 5 + ) + oneof_decl: List["OneofDescriptorProto"] = betterproto.message_field(8) + options: "MessageOptions" = betterproto.message_field(7) + reserved_range: List["DescriptorProtoReservedRange"] = betterproto.message_field(9) + reserved_name: List[str] = betterproto.string_field(10) + """ + Reserved field names, which may not be used by fields in the same message. + A given name may only be reserved once. + """ + + +@dataclass(eq=False, repr=False) +class DescriptorProtoExtensionRange(betterproto.Message): + start: int = betterproto.int32_field(1) + end: int = betterproto.int32_field(2) + options: "ExtensionRangeOptions" = betterproto.message_field(3) + + +@dataclass(eq=False, repr=False) +class DescriptorProtoReservedRange(betterproto.Message): + """ + Range of reserved tag numbers. Reserved tag numbers may not be used by + fields or extension ranges in the same message. Reserved ranges may + not overlap. + """ + + start: int = betterproto.int32_field(1) + end: int = betterproto.int32_field(2) + + +@dataclass(eq=False, repr=False) +class ExtensionRangeOptions(betterproto.Message): + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + declaration: List["ExtensionRangeOptionsDeclaration"] = betterproto.message_field(2) + """ + For external users: DO NOT USE. We are in the process of open sourcing + extension declaration and executing internal cleanups before it can be + used externally. + """ + + features: "FeatureSet" = betterproto.message_field(50) + """Any features defined in the specific edition.""" + + verification: "ExtensionRangeOptionsVerificationState" = betterproto.enum_field(3) + """ + The verification state of the range. + TODO: flip the default to DECLARATION once all empty ranges + are marked as UNVERIFIED. + """ + + +@dataclass(eq=False, repr=False) +class ExtensionRangeOptionsDeclaration(betterproto.Message): + number: int = betterproto.int32_field(1) + """The extension number declared within the extension range.""" + + full_name: str = betterproto.string_field(2) + """ + The fully-qualified name of the extension field. There must be a leading + dot in front of the full name. + """ + + type: str = betterproto.string_field(3) + """ + The fully-qualified type name of the extension field. Unlike + Metadata.type, Declaration.type must have a leading dot for messages + and enums. + """ + + reserved: bool = betterproto.bool_field(5) + """ + If true, indicates that the number is reserved in the extension range, + and any extension field with the number will fail to compile. Set this + when a declared extension field is deleted. + """ + + repeated: bool = betterproto.bool_field(6) + """ + If true, indicates that the extension must be defined as repeated. + Otherwise the extension must be defined as optional. + """ + + +@dataclass(eq=False, repr=False) +class FieldDescriptorProto(betterproto.Message): + """Describes a field within a message.""" + + name: str = betterproto.string_field(1) + number: int = betterproto.int32_field(3) + label: "FieldDescriptorProtoLabel" = betterproto.enum_field(4) + type: "FieldDescriptorProtoType" = betterproto.enum_field(5) + """ + If type_name is set, this need not be set. If both this and type_name + are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. + """ + + type_name: str = betterproto.string_field(6) + """ + For message and enum types, this is the name of the type. If the name + starts with a '.', it is fully-qualified. Otherwise, C++-like scoping + rules are used to find the type (i.e. first the nested types within this + message are searched, then within the parent, on up to the root + namespace). + """ + + extendee: str = betterproto.string_field(2) + """ + For extensions, this is the name of the type being extended. It is + resolved in the same manner as type_name. + """ + + default_value: str = betterproto.string_field(7) + """ + For numeric types, contains the original text representation of the value. + For booleans, "true" or "false". + For strings, contains the default text contents (not escaped in any way). + For bytes, contains the C escaped value. All bytes >= 128 are escaped. + """ + + oneof_index: int = betterproto.int32_field(9) + """ + If set, gives the index of a oneof in the containing type's oneof_decl + list. This field is a member of that oneof. + """ + + json_name: str = betterproto.string_field(10) + """ + JSON name of this field. The value is set by protocol compiler. If the + user has set a "json_name" option on this field, that option's value + will be used. Otherwise, it's deduced from the field's name by converting + it to camelCase. + """ + + options: "FieldOptions" = betterproto.message_field(8) + proto3_optional: bool = betterproto.bool_field(17) + """ + If true, this is a proto3 "optional". When a proto3 field is optional, it + tracks presence regardless of field type. + + When proto3_optional is true, this field must belong to a oneof to signal + to old proto3 clients that presence is tracked for this field. This oneof + is known as a "synthetic" oneof, and this field must be its sole member + (each proto3 optional field gets its own synthetic oneof). Synthetic oneofs + exist in the descriptor only, and do not generate any API. Synthetic oneofs + must be ordered after all "real" oneofs. + + For message fields, proto3_optional doesn't create any semantic change, + since non-repeated message fields always track presence. However it still + indicates the semantic detail of whether the user wrote "optional" or not. + This can be useful for round-tripping the .proto file. For consistency we + give message fields a synthetic oneof also, even though it is not required + to track presence. This is especially important because the parser can't + tell if a field is a message or an enum, so it must always create a + synthetic oneof. + + Proto2 optional fields do not set this flag, because they already indicate + optional with `LABEL_OPTIONAL`. + """ + + +@dataclass(eq=False, repr=False) +class OneofDescriptorProto(betterproto.Message): + """Describes a oneof.""" + + name: str = betterproto.string_field(1) + options: "OneofOptions" = betterproto.message_field(2) + + +@dataclass(eq=False, repr=False) +class EnumDescriptorProto(betterproto.Message): + """Describes an enum type.""" + + name: str = betterproto.string_field(1) + value: List["EnumValueDescriptorProto"] = betterproto.message_field(2) + options: "EnumOptions" = betterproto.message_field(3) + reserved_range: List["EnumDescriptorProtoEnumReservedRange"] = ( + betterproto.message_field(4) + ) + """ + Range of reserved numeric values. Reserved numeric values may not be used + by enum values in the same enum declaration. Reserved ranges may not + overlap. + """ + + reserved_name: List[str] = betterproto.string_field(5) + """ + Reserved enum value names, which may not be reused. A given name may only + be reserved once. + """ + + +@dataclass(eq=False, repr=False) +class EnumDescriptorProtoEnumReservedRange(betterproto.Message): + """ + Range of reserved numeric values. Reserved values may not be used by + entries in the same enum. Reserved ranges may not overlap. + + Note that this is distinct from DescriptorProto.ReservedRange in that it + is inclusive such that it can appropriately represent the entire int32 + domain. + """ + + start: int = betterproto.int32_field(1) + end: int = betterproto.int32_field(2) + + +@dataclass(eq=False, repr=False) +class EnumValueDescriptorProto(betterproto.Message): + """Describes a value within an enum.""" + + name: str = betterproto.string_field(1) + number: int = betterproto.int32_field(2) + options: "EnumValueOptions" = betterproto.message_field(3) + + +@dataclass(eq=False, repr=False) +class ServiceDescriptorProto(betterproto.Message): + """Describes a service.""" + + name: str = betterproto.string_field(1) + method: List["MethodDescriptorProto"] = betterproto.message_field(2) + options: "ServiceOptions" = betterproto.message_field(3) + + +@dataclass(eq=False, repr=False) +class MethodDescriptorProto(betterproto.Message): + """Describes a method of a service.""" + + name: str = betterproto.string_field(1) + input_type: str = betterproto.string_field(2) + """ + Input and output type names. These are resolved in the same way as + FieldDescriptorProto.type_name, but must refer to a message type. + """ + + output_type: str = betterproto.string_field(3) + options: "MethodOptions" = betterproto.message_field(4) + client_streaming: bool = betterproto.bool_field(5) + """Identifies if client streams multiple client messages""" + + server_streaming: bool = betterproto.bool_field(6) + """Identifies if server streams multiple server messages""" + + +@dataclass(eq=False, repr=False) +class FileOptions(betterproto.Message): + java_package: str = betterproto.string_field(1) + """ + Sets the Java package where classes generated from this .proto will be + placed. By default, the proto package is used, but this is often + inappropriate because proto packages do not normally start with backwards + domain names. + """ + + java_outer_classname: str = betterproto.string_field(8) + """ + Controls the name of the wrapper Java class generated for the .proto file. + That class will always contain the .proto file's getDescriptor() method as + well as any top-level extensions defined in the .proto file. + If java_multiple_files is disabled, then all the other classes from the + .proto file will be nested inside the single wrapper outer class. + """ + + java_multiple_files: bool = betterproto.bool_field(10) + """ + If enabled, then the Java code generator will generate a separate .java + file for each top-level message, enum, and service defined in the .proto + file. Thus, these types will *not* be nested inside the wrapper class + named by java_outer_classname. However, the wrapper class will still be + generated to contain the file's getDescriptor() method as well as any + top-level extensions defined in the file. + """ + + java_generate_equals_and_hash: bool = betterproto.bool_field(20) + """This option does nothing.""" + + java_string_check_utf8: bool = betterproto.bool_field(27) + """ + A proto2 file can set this to true to opt in to UTF-8 checking for Java, + which will throw an exception if invalid UTF-8 is parsed from the wire or + assigned to a string field. + + TODO: clarify exactly what kinds of field types this option + applies to, and update these docs accordingly. + + Proto3 files already perform these checks. Setting the option explicitly to + false has no effect: it cannot be used to opt proto3 files out of UTF-8 + checks. + """ + + optimize_for: "FileOptionsOptimizeMode" = betterproto.enum_field(9) + go_package: str = betterproto.string_field(11) + """ + Sets the Go package where structs generated from this .proto will be + placed. If omitted, the Go package will be derived from the following: + - The basename of the package import path, if provided. + - Otherwise, the package statement in the .proto file, if present. + - Otherwise, the basename of the .proto file, without extension. + """ + + cc_generic_services: bool = betterproto.bool_field(16) + """ + Should generic services be generated in each language? "Generic" services + are not specific to any particular RPC system. They are generated by the + main code generators in each language (without additional plugins). + Generic services were the only kind of service generation supported by + early versions of google.protobuf. + + Generic services are now considered deprecated in favor of using plugins + that generate code specific to your particular RPC system. Therefore, + these default to false. Old code which depends on generic services should + explicitly set them to true. + """ + + java_generic_services: bool = betterproto.bool_field(17) + py_generic_services: bool = betterproto.bool_field(18) + deprecated: bool = betterproto.bool_field(23) + """ + Is this file deprecated? + Depending on the target platform, this can emit Deprecated annotations + for everything in the file, or it will be completely ignored; in the very + least, this is a formalization for deprecating files. + """ + + cc_enable_arenas: bool = betterproto.bool_field(31) + """ + Enables the use of arenas for the proto messages in this file. This applies + only to generated classes for C++. + """ + + objc_class_prefix: str = betterproto.string_field(36) + """ + Sets the objective c class prefix which is prepended to all objective c + generated classes from this .proto. There is no default. + """ + + csharp_namespace: str = betterproto.string_field(37) + """Namespace for generated classes; defaults to the package.""" + + swift_prefix: str = betterproto.string_field(39) + """ + By default Swift generators will take the proto package and CamelCase it + replacing '.' with underscore and use that to prefix the types/symbols + defined. When this options is provided, they will use this value instead + to prefix the types/symbols defined. + """ + + php_class_prefix: str = betterproto.string_field(40) + """ + Sets the php class prefix which is prepended to all php generated classes + from this .proto. Default is empty. + """ + + php_namespace: str = betterproto.string_field(41) + """ + Use this option to change the namespace of php generated classes. Default + is empty. When this option is empty, the package name will be used for + determining the namespace. + """ + + php_metadata_namespace: str = betterproto.string_field(44) + """ + Use this option to change the namespace of php generated metadata classes. + Default is empty. When this option is empty, the proto file name will be + used for determining the namespace. + """ + + ruby_package: str = betterproto.string_field(45) + """ + Use this option to change the package of ruby generated classes. Default + is empty. When this option is not set, the package name will be used for + determining the ruby package. + """ + + features: "FeatureSet" = betterproto.message_field(50) + """Any features defined in the specific edition.""" + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """ + The parser stores options it doesn't recognize here. + See the documentation for the "Options" section above. + """ + + def __post_init__(self) -> None: + super().__post_init__() + if self.is_set("java_generate_equals_and_hash"): + warnings.warn( + "FileOptions.java_generate_equals_and_hash is deprecated", + DeprecationWarning, + ) + + +@dataclass(eq=False, repr=False) +class MessageOptions(betterproto.Message): + message_set_wire_format: bool = betterproto.bool_field(1) + """ + Set true to use the old proto1 MessageSet wire format for extensions. + This is provided for backwards-compatibility with the MessageSet wire + format. You should not use this for any other reason: It's less + efficient, has fewer features, and is more complicated. + + The message must be defined exactly as follows: + message Foo { + option message_set_wire_format = true; + extensions 4 to max; + } + Note that the message cannot have any defined fields; MessageSets only + have extensions. + + All extensions of your type must be singular messages; e.g. they cannot + be int32s, enums, or repeated messages. + + Because this is an option, the above two restrictions are not enforced by + the protocol compiler. + """ + + no_standard_descriptor_accessor: bool = betterproto.bool_field(2) + """ + Disables the generation of the standard "descriptor()" accessor, which can + conflict with a field of the same name. This is meant to make migration + from proto1 easier; new code should avoid fields named "descriptor". + """ + + deprecated: bool = betterproto.bool_field(3) + """ + Is this message deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the message, or it will be completely ignored; in the very least, + this is a formalization for deprecating messages. + """ + + map_entry: bool = betterproto.bool_field(7) + """ + Whether the message is an automatically generated map entry type for the + maps field. + + For maps fields: + map map_field = 1; + The parsed descriptor looks like: + message MapFieldEntry { + option map_entry = true; + optional KeyType key = 1; + optional ValueType value = 2; + } + repeated MapFieldEntry map_field = 1; + + Implementations may choose not to generate the map_entry=true message, but + use a native map in the target language to hold the keys and values. + The reflection APIs in such implementations still need to work as + if the field is a repeated message field. + + NOTE: Do not set the option in .proto files. Always use the maps syntax + instead. The option should only be implicitly set by the proto compiler + parser. + """ + + deprecated_legacy_json_field_conflicts: bool = betterproto.bool_field(11) + """ + Enable the legacy handling of JSON field name conflicts. This lowercases + and strips underscored from the fields before comparison in proto3 only. + The new behavior takes `json_name` into account and applies to proto2 as + well. + + This should only be used as a temporary measure against broken builds due + to the change in behavior for JSON field name conflicts. + + TODO This is legacy behavior we plan to remove once downstream + teams have had time to migrate. + """ + + features: "FeatureSet" = betterproto.message_field(12) + """Any features defined in the specific edition.""" + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + def __post_init__(self) -> None: + super().__post_init__() + if self.is_set("deprecated_legacy_json_field_conflicts"): + warnings.warn( + "MessageOptions.deprecated_legacy_json_field_conflicts is deprecated", + DeprecationWarning, + ) + + +@dataclass(eq=False, repr=False) +class FieldOptions(betterproto.Message): + ctype: "FieldOptionsCType" = betterproto.enum_field(1) + """ + The ctype option instructs the C++ code generator to use a different + representation of the field than it normally would. See the specific + options below. This option is only implemented to support use of + [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of + type "bytes" in the open source release -- sorry, we'll try to include + other types in a future version! + """ + + packed: bool = betterproto.bool_field(2) + """ + The packed option can be enabled for repeated primitive fields to enable + a more efficient representation on the wire. Rather than repeatedly + writing the tag and type for each element, the entire array is encoded as + a single length-delimited blob. In proto3, only explicit setting it to + false will avoid using packed encoding. This option is prohibited in + Editions, but the `repeated_field_encoding` feature can be used to control + the behavior. + """ + + jstype: "FieldOptionsJsType" = betterproto.enum_field(6) + """ + The jstype option determines the JavaScript type used for values of the + field. The option is permitted only for 64 bit integral and fixed types + (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING + is represented as JavaScript string, which avoids loss of precision that + can happen when a large value is converted to a floating point JavaScript. + Specifying JS_NUMBER for the jstype causes the generated JavaScript code to + use the JavaScript "number" type. The behavior of the default option + JS_NORMAL is implementation dependent. + + This option is an enum to permit additional types to be added, e.g. + goog.math.Integer. + """ + + lazy: bool = betterproto.bool_field(5) + """ + Should this field be parsed lazily? Lazy applies only to message-type + fields. It means that when the outer message is initially parsed, the + inner message's contents will not be parsed but instead stored in encoded + form. The inner message will actually be parsed when it is first accessed. + + This is only a hint. Implementations are free to choose whether to use + eager or lazy parsing regardless of the value of this option. However, + setting this option true suggests that the protocol author believes that + using lazy parsing on this field is worth the additional bookkeeping + overhead typically needed to implement it. + + This option does not affect the public interface of any generated code; + all method signatures remain the same. Furthermore, thread-safety of the + interface is not affected by this option; const methods remain safe to + call from multiple threads concurrently, while non-const methods continue + to require exclusive access. + + Note that lazy message fields are still eagerly verified to check + ill-formed wireformat or missing required fields. Calling IsInitialized() + on the outer message would fail if the inner message has missing required + fields. Failed verification would result in parsing failure (except when + uninitialized messages are acceptable). + """ + + unverified_lazy: bool = betterproto.bool_field(15) + """ + unverified_lazy does no correctness checks on the byte stream. This should + only be used where lazy with verification is prohibitive for performance + reasons. + """ + + deprecated: bool = betterproto.bool_field(3) + """ + Is this field deprecated? + Depending on the target platform, this can emit Deprecated annotations + for accessors, or it will be completely ignored; in the very least, this + is a formalization for deprecating fields. + """ + + weak: bool = betterproto.bool_field(10) + """For Google-internal migration only. Do not use.""" + + debug_redact: bool = betterproto.bool_field(16) + """ + Indicate that the field value should not be printed out when using debug + formats, e.g. when the field contains sensitive credentials. + """ + + retention: "FieldOptionsOptionRetention" = betterproto.enum_field(17) + targets: List["FieldOptionsOptionTargetType"] = betterproto.enum_field(19) + edition_defaults: List["FieldOptionsEditionDefault"] = betterproto.message_field(20) + features: "FeatureSet" = betterproto.message_field(21) + """Any features defined in the specific edition.""" + + feature_support: "FieldOptionsFeatureSupport" = betterproto.message_field(22) + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + +@dataclass(eq=False, repr=False) +class FieldOptionsEditionDefault(betterproto.Message): + edition: "Edition" = betterproto.enum_field(3) + value: str = betterproto.string_field(2) + + +@dataclass(eq=False, repr=False) +class FieldOptionsFeatureSupport(betterproto.Message): + """Information about the support window of a feature.""" + + edition_introduced: "Edition" = betterproto.enum_field(1) + """ + The edition that this feature was first available in. In editions + earlier than this one, the default assigned to EDITION_LEGACY will be + used, and proto files will not be able to override it. + """ + + edition_deprecated: "Edition" = betterproto.enum_field(2) + """ + The edition this feature becomes deprecated in. Using this after this + edition may trigger warnings. + """ + + deprecation_warning: str = betterproto.string_field(3) + """ + The deprecation warning text if this feature is used after the edition it + was marked deprecated in. + """ + + edition_removed: "Edition" = betterproto.enum_field(4) + """ + The edition this feature is no longer available in. In editions after + this one, the last default assigned will be used, and proto files will + not be able to override it. + """ + + +@dataclass(eq=False, repr=False) +class OneofOptions(betterproto.Message): + features: "FeatureSet" = betterproto.message_field(1) + """Any features defined in the specific edition.""" + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + +@dataclass(eq=False, repr=False) +class EnumOptions(betterproto.Message): + allow_alias: bool = betterproto.bool_field(2) + """ + Set this option to true to allow mapping different tag names to the same + value. + """ + + deprecated: bool = betterproto.bool_field(3) + """ + Is this enum deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the enum, or it will be completely ignored; in the very least, this + is a formalization for deprecating enums. + """ + + deprecated_legacy_json_field_conflicts: bool = betterproto.bool_field(6) + """ + Enable the legacy handling of JSON field name conflicts. This lowercases + and strips underscored from the fields before comparison in proto3 only. + The new behavior takes `json_name` into account and applies to proto2 as + well. + TODO Remove this legacy behavior once downstream teams have + had time to migrate. + """ + + features: "FeatureSet" = betterproto.message_field(7) + """Any features defined in the specific edition.""" + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + def __post_init__(self) -> None: + super().__post_init__() + if self.is_set("deprecated_legacy_json_field_conflicts"): + warnings.warn( + "EnumOptions.deprecated_legacy_json_field_conflicts is deprecated", + DeprecationWarning, + ) + + +@dataclass(eq=False, repr=False) +class EnumValueOptions(betterproto.Message): + deprecated: bool = betterproto.bool_field(1) + """ + Is this enum value deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the enum value, or it will be completely ignored; in the very least, + this is a formalization for deprecating enum values. + """ + + features: "FeatureSet" = betterproto.message_field(2) + """Any features defined in the specific edition.""" + + debug_redact: bool = betterproto.bool_field(3) + """ + Indicate that fields annotated with this enum value should not be printed + out when using debug formats, e.g. when the field contains sensitive + credentials. + """ + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + +@dataclass(eq=False, repr=False) +class ServiceOptions(betterproto.Message): + features: "FeatureSet" = betterproto.message_field(34) + """Any features defined in the specific edition.""" + + deprecated: bool = betterproto.bool_field(33) + """ + Is this service deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the service, or it will be completely ignored; in the very least, + this is a formalization for deprecating services. + """ + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + +@dataclass(eq=False, repr=False) +class MethodOptions(betterproto.Message): + deprecated: bool = betterproto.bool_field(33) + """ + Is this method deprecated? + Depending on the target platform, this can emit Deprecated annotations + for the method, or it will be completely ignored; in the very least, + this is a formalization for deprecating methods. + """ + + idempotency_level: "MethodOptionsIdempotencyLevel" = betterproto.enum_field(34) + features: "FeatureSet" = betterproto.message_field(35) + """Any features defined in the specific edition.""" + + uninterpreted_option: List["UninterpretedOption"] = betterproto.message_field(999) + """The parser stores options it doesn't recognize here. See above.""" + + +@dataclass(eq=False, repr=False) +class UninterpretedOption(betterproto.Message): + """ + A message representing a option the parser does not recognize. This only + appears in options protos created by the compiler::Parser class. + DescriptorPool resolves these when building Descriptor objects. Therefore, + options protos in descriptor objects (e.g. returned by Descriptor::options(), + or produced by Descriptor::CopyTo()) will never have UninterpretedOptions + in them. + """ + + name: List["UninterpretedOptionNamePart"] = betterproto.message_field(2) + identifier_value: str = betterproto.string_field(3) + """ + The value of the uninterpreted option, in whatever type the tokenizer + identified it as during parsing. Exactly one of these should be set. + """ + + positive_int_value: int = betterproto.uint64_field(4) + negative_int_value: int = betterproto.int64_field(5) + double_value: float = betterproto.double_field(6) + string_value: bytes = betterproto.bytes_field(7) + aggregate_value: str = betterproto.string_field(8) + + +@dataclass(eq=False, repr=False) +class UninterpretedOptionNamePart(betterproto.Message): + """ + The name of the uninterpreted option. Each string represents a segment in + a dot-separated name. is_extension is true iff a segment represents an + extension (denoted with parentheses in options specs in .proto files). + E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents + "foo.(bar.baz).moo". + """ + + name_part: str = betterproto.string_field(1) + is_extension: bool = betterproto.bool_field(2) + + +@dataclass(eq=False, repr=False) +class FeatureSet(betterproto.Message): + """ + TODO Enums in C++ gencode (and potentially other languages) are + not well scoped. This means that each of the feature enums below can clash + with each other. The short names we've chosen maximize call-site + readability, but leave us very open to this scenario. A future feature will + be designed and implemented to handle this, hopefully before we ever hit a + conflict here. + """ + + field_presence: "FeatureSetFieldPresence" = betterproto.enum_field(1) + enum_type: "FeatureSetEnumType" = betterproto.enum_field(2) + repeated_field_encoding: "FeatureSetRepeatedFieldEncoding" = betterproto.enum_field( + 3 + ) + utf8_validation: "FeatureSetUtf8Validation" = betterproto.enum_field(4) + message_encoding: "FeatureSetMessageEncoding" = betterproto.enum_field(5) + json_format: "FeatureSetJsonFormat" = betterproto.enum_field(6) + + +@dataclass(eq=False, repr=False) +class FeatureSetDefaults(betterproto.Message): + """ + A compiled specification for the defaults of a set of features. These + messages are generated from FeatureSet extensions and can be used to seed + feature resolution. The resolution with this object becomes a simple search + for the closest matching edition, followed by proto merges. + """ + + defaults: List["FeatureSetDefaultsFeatureSetEditionDefault"] = ( + betterproto.message_field(1) + ) + minimum_edition: "Edition" = betterproto.enum_field(4) + """ + The minimum supported edition (inclusive) when this was constructed. + Editions before this will not have defaults. + """ + + maximum_edition: "Edition" = betterproto.enum_field(5) + """ + The maximum known edition (inclusive) when this was constructed. Editions + after this will not have reliable defaults. + """ + + +@dataclass(eq=False, repr=False) +class FeatureSetDefaultsFeatureSetEditionDefault(betterproto.Message): + """ + A map from every known edition with a unique set of defaults to its + defaults. Not all editions may be contained here. For a given edition, + the defaults at the closest matching edition ordered at or before it should + be used. This field must be in strict ascending order by edition. + """ + + edition: "Edition" = betterproto.enum_field(3) + overridable_features: "FeatureSet" = betterproto.message_field(4) + """Defaults of features that can be overridden in this edition.""" + + fixed_features: "FeatureSet" = betterproto.message_field(5) + """Defaults of features that can't be overridden in this edition.""" + + features: "FeatureSet" = betterproto.message_field(2) + """ + TODO Deprecate and remove this field, which is just the + above two merged. + """ + + +@dataclass(eq=False, repr=False) +class SourceCodeInfo(betterproto.Message): + """ + Encapsulates information about the original source file from which a + FileDescriptorProto was generated. + """ + + location: List["SourceCodeInfoLocation"] = betterproto.message_field(1) + """ + A Location identifies a piece of source code in a .proto file which + corresponds to a particular definition. This information is intended + to be useful to IDEs, code indexers, documentation generators, and similar + tools. + + For example, say we have a file like: + message Foo { + optional string foo = 1; + } + Let's look at just the field definition: + optional string foo = 1; + ^ ^^ ^^ ^ ^^^ + a bc de f ghi + We have the following locations: + span path represents + [a,i) [ 4, 0, 2, 0 ] The whole field definition. + [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). + [c,d) [ 4, 0, 2, 0, 5 ] The type (string). + [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). + [g,h) [ 4, 0, 2, 0, 3 ] The number (1). + + Notes: + - A location may refer to a repeated field itself (i.e. not to any + particular index within it). This is used whenever a set of elements are + logically enclosed in a single code segment. For example, an entire + extend block (possibly containing multiple extension definitions) will + have an outer location whose path refers to the "extensions" repeated + field without an index. + - Multiple locations may have the same path. This happens when a single + logical declaration is spread out across multiple places. The most + obvious example is the "extend" block again -- there may be multiple + extend blocks in the same scope, each of which will have the same path. + - A location's span is not always a subset of its parent's span. For + example, the "extendee" of an extension declaration appears at the + beginning of the "extend" block and is shared by all extensions within + the block. + - Just because a location's span is a subset of some other location's span + does not mean that it is a descendant. For example, a "group" defines + both a type and a field in a single declaration. Thus, the locations + corresponding to the type and field and their components will overlap. + - Code which tries to interpret locations should probably be designed to + ignore those that it doesn't understand, as more types of locations could + be recorded in the future. + """ + + +@dataclass(eq=False, repr=False) +class SourceCodeInfoLocation(betterproto.Message): + path: List[int] = betterproto.int32_field(1) + """ + Identifies which part of the FileDescriptorProto was defined at this + location. + + Each element is a field number or an index. They form a path from + the root FileDescriptorProto to the place where the definition appears. + For example, this path: + [ 4, 3, 2, 7, 1 ] + refers to: + file.message_type(3) // 4, 3 + .field(7) // 2, 7 + .name() // 1 + This is because FileDescriptorProto.message_type has field number 4: + repeated DescriptorProto message_type = 4; + and DescriptorProto.field has field number 2: + repeated FieldDescriptorProto field = 2; + and FieldDescriptorProto.name has field number 1: + optional string name = 1; + + Thus, the above path gives the location of a field name. If we removed + the last element: + [ 4, 3, 2, 7 ] + this path refers to the whole field declaration (from the beginning + of the label to the terminating semicolon). + """ + + span: List[int] = betterproto.int32_field(2) + """ + Always has exactly three or four elements: start line, start column, + end line (optional, otherwise assumed same as start line), end column. + These are packed into a single field for efficiency. Note that line + and column numbers are zero-based -- typically you will want to add + 1 to each before displaying to a user. + """ + + leading_comments: str = betterproto.string_field(3) + """ + If this SourceCodeInfo represents a complete declaration, these are any + comments appearing before and after the declaration which appear to be + attached to the declaration. + + A series of line comments appearing on consecutive lines, with no other + tokens appearing on those lines, will be treated as a single comment. + + leading_detached_comments will keep paragraphs of comments that appear + before (but not connected to) the current element. Each paragraph, + separated by empty lines, will be one comment element in the repeated + field. + + Only the comment content is provided; comment markers (e.g. //) are + stripped out. For block comments, leading whitespace and an asterisk + will be stripped from the beginning of each line other than the first. + Newlines are included in the output. + + Examples: + + optional int32 foo = 1; // Comment attached to foo. + // Comment attached to bar. + optional int32 bar = 2; + + optional string baz = 3; + // Comment attached to baz. + // Another line attached to baz. + + // Comment attached to moo. + // + // Another line attached to moo. + optional double moo = 4; + + // Detached comment for corge. This is not leading or trailing comments + // to moo or corge because there are blank lines separating it from + // both. + + // Detached comment for corge paragraph 2. + + optional string corge = 5; + /* Block comment attached + * to corge. Leading asterisks + * will be removed. */ + /* Block comment attached to + * grault. */ + optional int32 grault = 6; + + // ignored detached comments. + """ + + trailing_comments: str = betterproto.string_field(4) + leading_detached_comments: List[str] = betterproto.string_field(6) + + +@dataclass(eq=False, repr=False) +class GeneratedCodeInfo(betterproto.Message): + """ + Describes the relationship between generated code and its original source + file. A GeneratedCodeInfo message is associated with only one generated + source file, but may contain references to different source .proto files. + """ + + annotation: List["GeneratedCodeInfoAnnotation"] = betterproto.message_field(1) + """ + An Annotation connects some span of text in generated code to an element + of its generating .proto file. + """ + + +@dataclass(eq=False, repr=False) +class GeneratedCodeInfoAnnotation(betterproto.Message): + path: List[int] = betterproto.int32_field(1) + """ + Identifies the element in the original source .proto file. This field + is formatted the same as SourceCodeInfo.Location.path. + """ + + source_file: str = betterproto.string_field(2) + """Identifies the filesystem path to the original source .proto.""" + + begin: int = betterproto.int32_field(3) + """ + Identifies the starting offset in bytes in the generated code + that relates to the identified object. + """ + + end: int = betterproto.int32_field(4) + """ + Identifies the ending offset in bytes in the generated code that + relates to the identified object. The end offset should be one past + the last relevant byte (so the length of the text = end - begin). + """ + + semantic: "GeneratedCodeInfoAnnotationSemantic" = betterproto.enum_field(5) + + +@dataclass(eq=False, repr=False) +class Duration(betterproto.Message): + """ + A Duration represents a signed, fixed-length span of time represented + as a count of seconds and fractions of seconds at nanosecond + resolution. It is independent of any calendar and concepts like "day" + or "month". It is related to Timestamp in that the difference between + two Timestamp values is a Duration and it can be added or subtracted + from a Timestamp. Range is approximately +-10,000 years. + + # Examples + + Example 1: Compute Duration from two Timestamps in pseudo code. + + Timestamp start = ...; + Timestamp end = ...; + Duration duration = ...; + + duration.seconds = end.seconds - start.seconds; + duration.nanos = end.nanos - start.nanos; + + if (duration.seconds < 0 && duration.nanos > 0) { + duration.seconds += 1; + duration.nanos -= 1000000000; + } else if (duration.seconds > 0 && duration.nanos < 0) { + duration.seconds -= 1; + duration.nanos += 1000000000; + } + + Example 2: Compute Timestamp from Timestamp + Duration in pseudo code. + + Timestamp start = ...; + Duration duration = ...; + Timestamp end = ...; + + end.seconds = start.seconds + duration.seconds; + end.nanos = start.nanos + duration.nanos; + + if (end.nanos < 0) { + end.seconds -= 1; + end.nanos += 1000000000; + } else if (end.nanos >= 1000000000) { + end.seconds += 1; + end.nanos -= 1000000000; + } + + Example 3: Compute Duration from datetime.timedelta in Python. + + td = datetime.timedelta(days=3, minutes=10) + duration = Duration() + duration.FromTimedelta(td) + + # JSON Mapping + + In JSON format, the Duration type is encoded as a string rather than an + object, where the string ends in the suffix "s" (indicating seconds) and + is preceded by the number of seconds, with nanoseconds expressed as + fractional seconds. For example, 3 seconds with 0 nanoseconds should be + encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should + be expressed in JSON format as "3.000000001s", and 3 seconds and 1 + microsecond should be expressed in JSON format as "3.000001s". + """ + + seconds: int = betterproto.int64_field(1) + """ + Signed seconds of the span of time. Must be from -315,576,000,000 + to +315,576,000,000 inclusive. Note: these bounds are computed from: + 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years + """ + + nanos: int = betterproto.int32_field(2) + """ + Signed fractions of a second at nanosecond resolution of the span + of time. Durations less than one second are represented with a 0 + `seconds` field and a positive or negative `nanos` field. For durations + of one second or more, a non-zero value for the `nanos` field must be + of the same sign as the `seconds` field. Must be from -999,999,999 + to +999,999,999 inclusive. + """ + + +@dataclass(eq=False, repr=False) +class Empty(betterproto.Message): + """ + A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to use it as the request + or the response type of an API method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); + } + """ + + pass + + +@dataclass(eq=False, repr=False) +class FieldMask(betterproto.Message): + """ + `FieldMask` represents a set of symbolic field paths, for example: + + paths: "f.a" + paths: "f.b.d" + + Here `f` represents a field in some root message, `a` and `b` + fields in the message found in `f`, and `d` a field found in the + message in `f.b`. + + Field masks are used to specify a subset of fields that should be + returned by a get operation or modified by an update operation. + Field masks also have a custom JSON encoding (see below). + + # Field Masks in Projections + + When used in the context of a projection, a response message or + sub-message is filtered by the API to only contain those fields as + specified in the mask. For example, if the mask in the previous + example is applied to a response message as follows: + + f { + a : 22 + b { + d : 1 + x : 2 + } + y : 13 + } + z: 8 + + The result will not contain specific values for fields x,y and z + (their value will be set to the default, and omitted in proto text + output): + + + f { + a : 22 + b { + d : 1 + } + } + + A repeated field is not allowed except at the last position of a + paths string. + + If a FieldMask object is not present in a get operation, the + operation applies to all fields (as if a FieldMask of all fields + had been specified). + + Note that a field mask does not necessarily apply to the + top-level response message. In case of a REST get operation, the + field mask applies directly to the response, but in case of a REST + list operation, the mask instead applies to each individual message + in the returned resource list. In case of a REST custom method, + other definitions may be used. Where the mask applies will be + clearly documented together with its declaration in the API. In + any case, the effect on the returned resource/resources is required + behavior for APIs. + + # Field Masks in Update Operations + + A field mask in update operations specifies which fields of the + targeted resource are going to be updated. The API is required + to only change the values of the fields as specified in the mask + and leave the others untouched. If a resource is passed in to + describe the updated values, the API ignores the values of all + fields not covered by the mask. + + If a repeated field is specified for an update operation, new values will + be appended to the existing repeated field in the target resource. Note that + a repeated field is only allowed in the last position of a `paths` string. + + If a sub-message is specified in the last position of the field mask for an + update operation, then new value will be merged into the existing sub-message + in the target resource. + + For example, given the target message: + + f { + b { + d: 1 + x: 2 + } + c: [1] + } + + And an update message: + + f { + b { + d: 10 + } + c: [2] + } + + then if the field mask is: + + paths: ["f.b", "f.c"] + + then the result will be: + + f { + b { + d: 10 + x: 2 + } + c: [1, 2] + } + + An implementation may provide options to override this default behavior for + repeated and message fields. + + In order to reset a field's value to the default, the field must + be in the mask and set to the default value in the provided resource. + Hence, in order to reset all fields of a resource, provide a default + instance of the resource and set all fields in the mask, or do + not provide a mask as described below. + + If a field mask is not present on update, the operation applies to + all fields (as if a field mask of all fields has been specified). + Note that in the presence of schema evolution, this may mean that + fields the client does not know and has therefore not filled into + the request will be reset to their default. If this is unwanted + behavior, a specific service may require a client to always specify + a field mask, producing an error if not. + + As with get operations, the location of the resource which + describes the updated values in the request message depends on the + operation kind. In any case, the effect of the field mask is + required to be honored by the API. + + ## Considerations for HTTP REST + + The HTTP kind of an update operation which uses a field mask must + be set to PATCH instead of PUT in order to satisfy HTTP semantics + (PUT must only be used for full updates). + + # JSON Encoding of Field Masks + + In JSON, a field mask is encoded as a single string where paths are + separated by a comma. Fields name in each path are converted + to/from lower-camel naming conventions. + + As an example, consider the following message declarations: + + message Profile { + User user = 1; + Photo photo = 2; + } + message User { + string display_name = 1; + string address = 2; + } + + In proto a field mask for `Profile` may look as such: + + mask { + paths: "user.display_name" + paths: "photo" + } + + In JSON, the same mask is represented as below: + + { + mask: "user.displayName,photo" + } + + # Field Masks and Oneof Fields + + Field masks treat fields in oneofs just as regular fields. Consider the + following message: + + message SampleMessage { + oneof test_oneof { + string name = 4; + SubMessage sub_message = 9; + } + } + + The field mask can be: + + mask { + paths: "name" + } + + Or: + + mask { + paths: "sub_message" + } + + Note that oneof type names ("test_oneof" in this case) cannot be used in + paths. + + ## Field Mask Verification + + The implementation of any API method which has a FieldMask type field in the + request should verify the included field paths, and return an + `INVALID_ARGUMENT` error if any path is unmappable. + """ + + paths: List[str] = betterproto.string_field(1) + """The set of field mask paths.""" + + +@dataclass(eq=False, repr=False) +class Struct(betterproto.Message): + """ + `Struct` represents a structured data value, consisting of fields + which map to dynamically typed values. In some languages, `Struct` + might be supported by a native representation. For example, in + scripting languages like JS a struct is represented as an + object. The details of that representation are described together + with the proto support for the language. + + The JSON representation for `Struct` is JSON object. + """ + + fields: Dict[str, "Value"] = betterproto.map_field( + 1, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE + ) + """Unordered map of dynamically typed values.""" + + @hybridmethod + def from_dict(cls: "type[Self]", value: Mapping[str, Any]) -> Self: # type: ignore + self = cls() + return self.from_dict(value) + + @from_dict.instancemethod + def from_dict(self, value: Mapping[str, Any]) -> Self: + fields = {**value} + for k in fields: + if hasattr(fields[k], "from_dict"): + fields[k] = fields[k].from_dict() + + self.fields = fields + return self + + def to_dict( + self, + casing: betterproto.Casing = betterproto.Casing.CAMEL, + include_default_values: bool = False, + ) -> Dict[str, Any]: + output = {**self.fields} + for k in self.fields: + if hasattr(self.fields[k], "to_dict"): + output[k] = self.fields[k].to_dict(casing, include_default_values) + return output + + +@dataclass(eq=False, repr=False) +class Value(betterproto.Message): + """ + `Value` represents a dynamically typed value which can be either + null, a number, a string, a boolean, a recursive struct value, or a + list of values. A producer of value is expected to set one of these + variants. Absence of any variant indicates an error. + + The JSON representation for `Value` is JSON value. + """ + + null_value: "NullValue" = betterproto.enum_field(1, group="kind") + """Represents a null value.""" + + number_value: float = betterproto.double_field(2, group="kind") + """Represents a double value.""" + + string_value: str = betterproto.string_field(3, group="kind") + """Represents a string value.""" + + bool_value: bool = betterproto.bool_field(4, group="kind") + """Represents a boolean value.""" + + struct_value: "Struct" = betterproto.message_field(5, group="kind") + """Represents a structured value.""" + + list_value: "ListValue" = betterproto.message_field(6, group="kind") + """Represents a repeated `Value`.""" + + +@dataclass(eq=False, repr=False) +class ListValue(betterproto.Message): + """ + `ListValue` is a wrapper around a repeated field of values. + + The JSON representation for `ListValue` is JSON array. + """ + + values: List["Value"] = betterproto.message_field(1) + """Repeated field of dynamically typed values.""" + + +@dataclass(eq=False, repr=False) +class Timestamp(betterproto.Message): + """ + A Timestamp represents a point in time independent of any time zone or local + calendar, encoded as a count of seconds and fractions of seconds at + nanosecond resolution. The count is relative to an epoch at UTC midnight on + January 1, 1970, in the proleptic Gregorian calendar which extends the + Gregorian calendar backwards to year one. + + All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap + second table is needed for interpretation, using a [24-hour linear + smear](https://developers.google.com/time/smear). + + The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By + restricting to that range, we ensure that we can convert to and from [RFC + 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. + + # Examples + + Example 1: Compute Timestamp from POSIX `time()`. + + Timestamp timestamp; + timestamp.set_seconds(time(NULL)); + timestamp.set_nanos(0); + + Example 2: Compute Timestamp from POSIX `gettimeofday()`. + + struct timeval tv; + gettimeofday(&tv, NULL); + + Timestamp timestamp; + timestamp.set_seconds(tv.tv_sec); + timestamp.set_nanos(tv.tv_usec * 1000); + + Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. + + FILETIME ft; + GetSystemTimeAsFileTime(&ft); + UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; + + // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z + // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. + Timestamp timestamp; + timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); + timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); + + Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. + + long millis = System.currentTimeMillis(); + + Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) + .setNanos((int) ((millis % 1000) * 1000000)).build(); + + Example 5: Compute Timestamp from Java `Instant.now()`. + + Instant now = Instant.now(); + + Timestamp timestamp = + Timestamp.newBuilder().setSeconds(now.getEpochSecond()) + .setNanos(now.getNano()).build(); + + Example 6: Compute Timestamp from current time in Python. + + timestamp = Timestamp() + timestamp.GetCurrentTime() + + # JSON Mapping + + In JSON format, the Timestamp type is encoded as a string in the + [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the + format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" + where {year} is always expressed using four digits while {month}, {day}, + {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional + seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), + are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone + is required. A proto3 JSON serializer should always use UTC (as indicated by + "Z") when printing the Timestamp type and a proto3 JSON parser should be + able to accept both UTC and other timezones (as indicated by an offset). + + For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past + 01:30 UTC on January 15, 2017. + + In JavaScript, one can convert a Date object to this format using the + standard + [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) + method. In Python, a standard `datetime.datetime` object can be converted + to this format using + [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with + the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use + the Joda Time's [`ISODateTimeFormat.dateTime()`]( + http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() + ) to obtain a formatter capable of generating timestamps in this format. + """ + + seconds: int = betterproto.int64_field(1) + """ + Represents seconds of UTC time since Unix epoch + 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to + 9999-12-31T23:59:59Z inclusive. + """ + + nanos: int = betterproto.int32_field(2) + """ + Non-negative fractions of a second at nanosecond resolution. Negative + second values with fractions must still have non-negative nanos values + that count forward in time. Must be from 0 to 999,999,999 + inclusive. + """ + + +@dataclass(eq=False, repr=False) +class DoubleValue(betterproto.Message): + """ + Wrapper message for `double`. + + The JSON representation for `DoubleValue` is JSON number. + """ + + value: float = betterproto.double_field(1) + """The double value.""" + + +@dataclass(eq=False, repr=False) +class FloatValue(betterproto.Message): + """ + Wrapper message for `float`. + + The JSON representation for `FloatValue` is JSON number. + """ + + value: float = betterproto.float_field(1) + """The float value.""" + + +@dataclass(eq=False, repr=False) +class Int64Value(betterproto.Message): + """ + Wrapper message for `int64`. + + The JSON representation for `Int64Value` is JSON string. + """ + + value: int = betterproto.int64_field(1) + """The int64 value.""" + + +@dataclass(eq=False, repr=False) +class UInt64Value(betterproto.Message): + """ + Wrapper message for `uint64`. + + The JSON representation for `UInt64Value` is JSON string. + """ + + value: int = betterproto.uint64_field(1) + """The uint64 value.""" + + +@dataclass(eq=False, repr=False) +class Int32Value(betterproto.Message): + """ + Wrapper message for `int32`. + + The JSON representation for `Int32Value` is JSON number. + """ + + value: int = betterproto.int32_field(1) + """The int32 value.""" + + +@dataclass(eq=False, repr=False) +class UInt32Value(betterproto.Message): + """ + Wrapper message for `uint32`. + + The JSON representation for `UInt32Value` is JSON number. + """ + + value: int = betterproto.uint32_field(1) + """The uint32 value.""" + + +@dataclass(eq=False, repr=False) +class BoolValue(betterproto.Message): + """ + Wrapper message for `bool`. + + The JSON representation for `BoolValue` is JSON `true` and `false`. + """ + + value: bool = betterproto.bool_field(1) + """The bool value.""" + + +@dataclass(eq=False, repr=False) +class StringValue(betterproto.Message): + """ + Wrapper message for `string`. + + The JSON representation for `StringValue` is JSON string. + """ + + value: str = betterproto.string_field(1) + """The string value.""" + + +@dataclass(eq=False, repr=False) +class BytesValue(betterproto.Message): + """ + Wrapper message for `bytes`. + + The JSON representation for `BytesValue` is JSON string. + """ + + value: bytes = betterproto.bytes_field(1) + """The bytes value.""" diff --git a/src/betterproto/lib/std/google/protobuf/compiler/__init__.py b/src/betterproto/lib/std/google/protobuf/compiler/__init__.py new file mode 100644 index 000000000..acacce43d --- /dev/null +++ b/src/betterproto/lib/std/google/protobuf/compiler/__init__.py @@ -0,0 +1,198 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# sources: google/protobuf/compiler/plugin.proto +# plugin: python-betterproto +# This file has been @generated + +from dataclasses import dataclass +from typing import List + +import betterproto +import betterproto.lib.google.protobuf as betterproto_lib_google_protobuf + + +class CodeGeneratorResponseFeature(betterproto.Enum): + """Sync with code_generator.h.""" + + FEATURE_NONE = 0 + FEATURE_PROTO3_OPTIONAL = 1 + FEATURE_SUPPORTS_EDITIONS = 2 + + +@dataclass(eq=False, repr=False) +class Version(betterproto.Message): + """The version number of protocol compiler.""" + + major: int = betterproto.int32_field(1) + minor: int = betterproto.int32_field(2) + patch: int = betterproto.int32_field(3) + suffix: str = betterproto.string_field(4) + """ + A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should + be empty for mainline stable releases. + """ + + +@dataclass(eq=False, repr=False) +class CodeGeneratorRequest(betterproto.Message): + """An encoded CodeGeneratorRequest is written to the plugin's stdin.""" + + file_to_generate: List[str] = betterproto.string_field(1) + """ + The .proto files that were explicitly listed on the command-line. The + code generator should generate code only for these files. Each file's + descriptor will be included in proto_file, below. + """ + + parameter: str = betterproto.string_field(2) + """The generator parameter passed on the command-line.""" + + proto_file: List["betterproto_lib_google_protobuf.FileDescriptorProto"] = ( + betterproto.message_field(15) + ) + """ + FileDescriptorProtos for all files in files_to_generate and everything + they import. The files will appear in topological order, so each file + appears before any file that imports it. + + Note: the files listed in files_to_generate will include runtime-retention + options only, but all other files will include source-retention options. + The source_file_descriptors field below is available in case you need + source-retention options for files_to_generate. + + protoc guarantees that all proto_files will be written after + the fields above, even though this is not technically guaranteed by the + protobuf wire format. This theoretically could allow a plugin to stream + in the FileDescriptorProtos and handle them one by one rather than read + the entire set into memory at once. However, as of this writing, this + is not similarly optimized on protoc's end -- it will store all fields in + memory at once before sending them to the plugin. + + Type names of fields and extensions in the FileDescriptorProto are always + fully qualified. + """ + + source_file_descriptors: List[ + "betterproto_lib_google_protobuf.FileDescriptorProto" + ] = betterproto.message_field(17) + """ + File descriptors with all options, including source-retention options. + These descriptors are only provided for the files listed in + files_to_generate. + """ + + compiler_version: "Version" = betterproto.message_field(3) + """The version number of protocol compiler.""" + + +@dataclass(eq=False, repr=False) +class CodeGeneratorResponse(betterproto.Message): + """The plugin writes an encoded CodeGeneratorResponse to stdout.""" + + error: str = betterproto.string_field(1) + """ + Error message. If non-empty, code generation failed. The plugin process + should exit with status code zero even if it reports an error in this way. + + This should be used to indicate errors in .proto files which prevent the + code generator from generating correct code. Errors which indicate a + problem in protoc itself -- such as the input CodeGeneratorRequest being + unparseable -- should be reported by writing a message to stderr and + exiting with a non-zero status code. + """ + + supported_features: int = betterproto.uint64_field(2) + """ + A bitmask of supported features that the code generator supports. + This is a bitwise "or" of values from the Feature enum. + """ + + minimum_edition: int = betterproto.int32_field(3) + """ + The minimum edition this plugin supports. This will be treated as an + Edition enum, but we want to allow unknown values. It should be specified + according the edition enum value, *not* the edition number. Only takes + effect for plugins that have FEATURE_SUPPORTS_EDITIONS set. + """ + + maximum_edition: int = betterproto.int32_field(4) + """ + The maximum edition this plugin supports. This will be treated as an + Edition enum, but we want to allow unknown values. It should be specified + according the edition enum value, *not* the edition number. Only takes + effect for plugins that have FEATURE_SUPPORTS_EDITIONS set. + """ + + file: List["CodeGeneratorResponseFile"] = betterproto.message_field(15) + + +@dataclass(eq=False, repr=False) +class CodeGeneratorResponseFile(betterproto.Message): + """Represents a single generated file.""" + + name: str = betterproto.string_field(1) + """ + The file name, relative to the output directory. The name must not + contain "." or ".." components and must be relative, not be absolute (so, + the file cannot lie outside the output directory). "/" must be used as + the path separator, not "\". + + If the name is omitted, the content will be appended to the previous + file. This allows the generator to break large files into small chunks, + and allows the generated text to be streamed back to protoc so that large + files need not reside completely in memory at one time. Note that as of + this writing protoc does not optimize for this -- it will read the entire + CodeGeneratorResponse before writing files to disk. + """ + + insertion_point: str = betterproto.string_field(2) + """ + If non-empty, indicates that the named file should already exist, and the + content here is to be inserted into that file at a defined insertion + point. This feature allows a code generator to extend the output + produced by another code generator. The original generator may provide + insertion points by placing special annotations in the file that look + like: + @@protoc_insertion_point(NAME) + The annotation can have arbitrary text before and after it on the line, + which allows it to be placed in a comment. NAME should be replaced with + an identifier naming the point -- this is what other generators will use + as the insertion_point. Code inserted at this point will be placed + immediately above the line containing the insertion point (thus multiple + insertions to the same point will come out in the order they were added). + The double-@ is intended to make it unlikely that the generated code + could contain things that look like insertion points by accident. + + For example, the C++ code generator places the following line in the + .pb.h files that it generates: + // @@protoc_insertion_point(namespace_scope) + This line appears within the scope of the file's package namespace, but + outside of any particular class. Another plugin can then specify the + insertion_point "namespace_scope" to generate additional classes or + other declarations that should be placed in this scope. + + Note that if the line containing the insertion point begins with + whitespace, the same whitespace will be added to every line of the + inserted text. This is useful for languages like Python, where + indentation matters. In these languages, the insertion point comment + should be indented the same amount as any inserted code will need to be + in order to work correctly in that context. + + The code generator that generates the initial file and the one which + inserts into it must both run as part of a single invocation of protoc. + Code generators are executed in the order in which they appear on the + command line. + + If |insertion_point| is present, |name| must also be present. + """ + + content: str = betterproto.string_field(15) + """The file contents.""" + + generated_code_info: "betterproto_lib_google_protobuf.GeneratedCodeInfo" = ( + betterproto.message_field(16) + ) + """ + Information describing the file content being inserted. If an insertion + point is used, this information will be appropriately offset and inserted + into the code generation metadata for the generated files. + """ diff --git a/src/betterproto/plugin/compiler.py b/src/betterproto/plugin/compiler.py index 617a65046..3cefdd76e 100644 --- a/src/betterproto/plugin/compiler.py +++ b/src/betterproto/plugin/compiler.py @@ -1,8 +1,12 @@ import os.path +import subprocess +import sys + +from .module_validation import ModuleValidator + try: # betterproto[compiler] specific dependencies - import black import jinja2 except ImportError as err: print( @@ -19,7 +23,6 @@ def outputfile_compiler(output_file: OutputTemplate) -> str: - templates_folder = os.path.abspath( os.path.join(os.path.dirname(__file__), "..", "templates") ) @@ -28,10 +31,34 @@ def outputfile_compiler(output_file: OutputTemplate) -> str: trim_blocks=True, lstrip_blocks=True, loader=jinja2.FileSystemLoader(templates_folder), + undefined=jinja2.StrictUndefined, ) - template = env.get_template("template.py.j2") + # Load the body first so we have a compleate list of imports needed. + body_template = env.get_template("template.py.j2") + header_template = env.get_template("header.py.j2") + + code = body_template.render(output_file=output_file) + code = header_template.render(output_file=output_file) + code - return black.format_str( - template.render(output_file=output_file), - mode=black.FileMode(target_versions={black.TargetVersion.PY37}), + # Sort imports, delete unused ones + code = subprocess.check_output( + ["ruff", "check", "--select", "I,F401", "--fix", "--silent", "-"], + input=code, + encoding="utf-8", ) + + # Format the code + code = subprocess.check_output( + ["ruff", "format", "-"], input=code, encoding="utf-8" + ) + + # Validate the generated code. + validator = ModuleValidator(iter(code.splitlines())) + if not validator.validate(): + message_builder = ["[WARNING]: Generated code has collisions in the module:"] + for collision, lines in validator.collisions.items(): + message_builder.append(f' "{collision}" on lines:') + for num, line in lines: + message_builder.append(f" {num}:{line}") + print("\n".join(message_builder), file=sys.stderr) + return code diff --git a/src/betterproto/plugin/main.py b/src/betterproto/plugin/main.py index e0b2557d2..62382e2e9 100755 --- a/src/betterproto/plugin/main.py +++ b/src/betterproto/plugin/main.py @@ -7,9 +7,8 @@ CodeGeneratorRequest, CodeGeneratorResponse, ) - -from betterproto.plugin.parser import generate_code from betterproto.plugin.models import monkey_patch_oneof_index +from betterproto.plugin.parser import generate_code def main() -> None: @@ -28,11 +27,8 @@ def main() -> None: if dump_file: dump_request(dump_file, request) - # Create response - response = CodeGeneratorResponse() - # Generate code - generate_code(request, response) + response = generate_code(request) # Serialise response message output = response.SerializeToString() diff --git a/src/betterproto/plugin/models.py b/src/betterproto/plugin/models.py index 36fa3b7ce..e330e6884 100644 --- a/src/betterproto/plugin/models.py +++ b/src/betterproto/plugin/models.py @@ -29,14 +29,24 @@ reference to `A` to `B`'s `fields` attribute. """ +import builtins +import re +from dataclasses import ( + dataclass, + field, +) +from typing import ( + Dict, + Iterable, + Iterator, + List, + Optional, + Set, + Type, + Union, +) import betterproto -from betterproto import which_one_of -from betterproto.casing import sanitize_name -from betterproto.compile.importing import ( - get_type_reference, - parse_source_type_name, -) from betterproto.compile.naming import ( pythonize_class_name, pythonize_field_name, @@ -45,29 +55,30 @@ from betterproto.lib.google.protobuf import ( DescriptorProto, EnumDescriptorProto, - FileDescriptorProto, - MethodDescriptorProto, Field, FieldDescriptorProto, - FieldDescriptorProtoType, FieldDescriptorProtoLabel, + FieldDescriptorProtoType, + FileDescriptorProto, + MethodDescriptorProto, ) from betterproto.lib.google.protobuf.compiler import CodeGeneratorRequest - -import re -import textwrap -from dataclasses import dataclass, field -from typing import Dict, Iterable, Iterator, List, Optional, Set, Text, Type, Union -import sys - -from ..casing import sanitize_name -from ..compile.importing import get_type_reference, parse_source_type_name +from .. import which_one_of +from ..compile.importing import ( + get_type_reference, + parse_source_type_name, +) from ..compile.naming import ( pythonize_class_name, + pythonize_enum_member_name, pythonize_field_name, pythonize_method_name, ) +from .typing_compiler import ( + DirectImportTypingCompiler, + TypingCompiler, +) # Create a unique placeholder to deal with @@ -142,22 +153,37 @@ def get_comment( ) -> str: pad = " " * indent for sci_loc in proto_file.source_code_info.location: - if list(sci_loc.path) == path and sci_loc.leading_comments: - lines = textwrap.wrap( - sci_loc.leading_comments.strip().replace("\n", ""), width=79 - indent - ) - - if path[-2] == 2 and path[-4] != 6: - # This is a field - return f"{pad}# " + f"\n{pad}# ".join(lines) + if list(sci_loc.path) == path: + all_comments = list(sci_loc.leading_detached_comments) + if sci_loc.leading_comments: + all_comments.append(sci_loc.leading_comments) + if sci_loc.trailing_comments: + all_comments.append(sci_loc.trailing_comments) + + lines = [] + + for comment in all_comments: + lines += comment.split("\n") + lines.append("") + + # Remove consecutive empty lines + lines = [ + line for i, line in enumerate(lines) if line or (i == 0 or lines[i - 1]) + ] + + if lines and not lines[-1]: + lines.pop() # Remove the last empty line + + # It is common for one line comments to start with a space, for example: // comment + # We don't add this space to the generated file. + lines = [line[1:] if line and line[0] == " " else line for line in lines] + + # This is a field, message, enum, service, or method + if len(lines) == 1 and len(lines[0]) < 79 - indent - 6: + return f'{pad}"""{lines[0]}"""' else: - # This is a message, enum, service, or method - if len(lines) == 1 and len(lines[0]) < 79 - indent - 6: - lines[0] = lines[0].strip('"') - return f'{pad}"""{lines[0]}"""' - else: - joined = f"\n{pad}".join(lines) - return f'{pad}"""\n{pad}{joined}\n{pad}"""' + joined = f"\n{pad}".join(lines) + return f'{pad}"""\n{pad}{joined}\n{pad}"""' return "" @@ -166,6 +192,7 @@ class ProtoContentBase: """Methods common to MessageCompiler, ServiceCompiler and ServiceMethodCompiler.""" source_file: FileDescriptorProto + typing_compiler: TypingCompiler path: List[int] comment_indent: int = 4 parent: Union["betterproto.Message", "OutputTemplate"] @@ -204,7 +231,6 @@ def comment(self) -> str: @dataclass class PluginRequestCompiler: - plugin_request_obj: CodeGeneratorRequest output_packages: Dict[str, "OutputTemplate"] = field(default_factory=dict) @@ -234,12 +260,17 @@ class OutputTemplate: parent_request: PluginRequestCompiler package_proto_obj: FileDescriptorProto input_files: List[str] = field(default_factory=list) - imports: Set[str] = field(default_factory=set) + imports_end: Set[str] = field(default_factory=set) datetime_imports: Set[str] = field(default_factory=set) - typing_imports: Set[str] = field(default_factory=set) + pydantic_imports: Set[str] = field(default_factory=set) + builtins_import: bool = False messages: List["MessageCompiler"] = field(default_factory=list) enums: List["EnumDefinitionCompiler"] = field(default_factory=list) services: List["ServiceCompiler"] = field(default_factory=list) + imports_type_checking_only: Set[str] = field(default_factory=set) + pydantic_dataclasses: bool = False + output: bool = True + typing_compiler: TypingCompiler = field(default_factory=DirectImportTypingCompiler) @property def package(self) -> str: @@ -266,8 +297,23 @@ def input_filenames(self) -> Iterable[str]: @property def python_module_imports(self) -> Set[str]: imports = set() + + has_deprecated = False + if any(m.deprecated for m in self.messages): + has_deprecated = True if any(x for x in self.messages if any(x.deprecated_fields)): + has_deprecated = True + if any( + any(m.proto_obj.options.deprecated for m in s.methods) + for s in self.services + ): + has_deprecated = True + + if has_deprecated: imports.add("warnings") + + if self.builtins_import: + imports.add("builtins") return imports @@ -276,6 +322,7 @@ class MessageCompiler(ProtoContentBase): """Representation of a protobuf message.""" source_file: FileDescriptorProto + typing_compiler: TypingCompiler parent: Union["MessageCompiler", OutputTemplate] = PLACEHOLDER proto_obj: DescriptorProto = PLACEHOLDER path: List[int] = PLACEHOLDER @@ -283,6 +330,7 @@ class MessageCompiler(ProtoContentBase): default_factory=list ) deprecated: bool = field(default=False, init=False) + builtins_types: Set[str] = field(default_factory=set) def __post_init__(self) -> None: # Add message to output file @@ -302,12 +350,6 @@ def proto_name(self) -> str: def py_name(self) -> str: return pythonize_class_name(self.proto_name) - @property - def annotation(self) -> str: - if self.repeated: - return f"List[{self.py_name}]" - return self.py_name - @property def deprecated_fields(self) -> Iterator[str]: for f in self.fields: @@ -318,12 +360,29 @@ def deprecated_fields(self) -> Iterator[str]: def has_deprecated_fields(self) -> bool: return any(self.deprecated_fields) + @property + def has_oneof_fields(self) -> bool: + return any(isinstance(field, OneOfFieldCompiler) for field in self.fields) + + @property + def has_message_field(self) -> bool: + return any( + ( + field.proto_obj.type in PROTO_MESSAGE_TYPES + for field in self.fields + if isinstance(field.proto_obj, FieldDescriptorProto) + ) + ) + def is_map( proto_field_obj: FieldDescriptorProto, parent_message: DescriptorProto ) -> bool: """True if proto_field_obj is a map, otherwise False.""" if proto_field_obj.type == FieldDescriptorProtoType.TYPE_MESSAGE: + if not hasattr(parent_message, "nested_type"): + return False + # This might be a map... message_type = proto_field_obj.type_name.split(".").pop().lower() map_entry = f"{proto_field_obj.name.replace('_', '').lower()}entry" @@ -351,7 +410,10 @@ def is_oneof(proto_field_obj: FieldDescriptorProto) -> bool: us to tell whether it was set, via the which_one_of interface. """ - return which_one_of(proto_field_obj, "oneof_index")[0] == "oneof_index" + return ( + not proto_field_obj.proto3_optional + and which_one_of(proto_field_obj, "oneof_index")[0] == "oneof_index" + ) @dataclass @@ -376,6 +438,8 @@ def get_field_string(self, indent: int = 4) -> str: betterproto_field_type = ( f"betterproto.{self.field_type}_field({self.proto_obj.number}{field_args})" ) + if self.py_name in dir(builtins): + self.parent.builtins_types.add(self.py_name) return f"{name}{annotations} = {betterproto_field_type}" @property @@ -383,6 +447,8 @@ def betterproto_field_args(self) -> List[str]: args = [] if self.field_wraps: args.append(f"wraps={self.field_wraps}") + if self.optional: + args.append(f"optional=True") return args @property @@ -397,20 +463,19 @@ def datetime_imports(self) -> Set[str]: return imports @property - def typing_imports(self) -> Set[str]: - imports = set() - annotation = self.annotation - if "Optional[" in annotation: - imports.add("Optional") - if "List[" in annotation: - imports.add("List") - if "Dict[" in annotation: - imports.add("Dict") - return imports + def pydantic_imports(self) -> Set[str]: + return set() + + @property + def use_builtins(self) -> bool: + return self.py_type in self.parent.builtins_types or ( + self.py_type == self.py_name and self.py_name in dir(builtins) + ) def add_imports_to(self, output_file: OutputTemplate) -> None: output_file.datetime_imports.update(self.datetime_imports) - output_file.typing_imports.update(self.typing_imports) + output_file.pydantic_imports.update(self.pydantic_imports) + output_file.builtins_import = output_file.builtins_import or self.use_builtins @property def field_wraps(self) -> Optional[str]: @@ -432,9 +497,8 @@ def repeated(self) -> bool: ) @property - def mutable(self) -> bool: - """True if the field is a mutable type, otherwise False.""" - return self.annotation.startswith(("List[", "Dict[")) + def optional(self) -> bool: + return self.proto_obj.proto3_optional @property def field_type(self) -> str: @@ -445,25 +509,6 @@ def field_type(self) -> str: .replace("type_", "") ) - @property - def default_value_string(self) -> Union[Text, None, float, int]: - """Python representation of the default proto value.""" - if self.repeated: - return "[]" - if self.py_type == "int": - return "0" - if self.py_type == "float": - return "0.0" - elif self.py_type == "bool": - return "False" - elif self.py_type == "str": - return '""' - elif self.py_type == "bytes": - return 'b""' - else: - # Message type - return "None" - @property def packed(self) -> bool: """True if the wire representation is a packed format.""" @@ -496,17 +541,24 @@ def py_type(self) -> str: # Type referencing another defined Message or a named enum return get_type_reference( package=self.output_file.package, - imports=self.output_file.imports, + imports=self.output_file.imports_end, source_type=self.proto_obj.type_name, + typing_compiler=self.typing_compiler, + pydantic=self.output_file.pydantic_dataclasses, ) else: - raise NotImplementedError(f"Unknown type {field.type}") + raise NotImplementedError(f"Unknown type {self.proto_obj.type}") @property def annotation(self) -> str: + py_type = self.py_type + if self.use_builtins: + py_type = f"builtins.{py_type}" if self.repeated: - return f"List[{self.py_type}]" - return self.py_type + return self.typing_compiler.list(py_type) + if self.optional: + return self.typing_compiler.optional(py_type) + return py_type @dataclass @@ -519,6 +571,20 @@ def betterproto_field_args(self) -> List[str]: return args +@dataclass +class PydanticOneOfFieldCompiler(OneOfFieldCompiler): + @property + def optional(self) -> bool: + # Force the optional to be True. This will allow the pydantic dataclass + # to validate the object correctly by allowing the field to be let empty. + # We add a pydantic validator later to ensure exactly one field is defined. + return True + + @property + def pydantic_imports(self) -> Set[str]: + return {"model_validator"} + + @dataclass class MapEntryCompiler(FieldCompiler): py_k_type: Type = PLACEHOLDER @@ -539,11 +605,13 @@ def __post_init__(self) -> None: source_file=self.source_file, parent=self, proto_obj=nested.field[0], # key + typing_compiler=self.typing_compiler, ).py_type self.py_v_type = FieldCompiler( source_file=self.source_file, parent=self, proto_obj=nested.field[1], # value + typing_compiler=self.typing_compiler, ).py_type # Get proto types @@ -561,7 +629,7 @@ def field_type(self) -> str: @property def annotation(self) -> str: - return f"Dict[{self.py_k_type}, {self.py_v_type}]" + return self.typing_compiler.dict(self.py_k_type, self.py_v_type) @property def repeated(self) -> bool: @@ -587,7 +655,9 @@ def __post_init__(self) -> None: # Get entries/allowed values for this Enum self.entries = [ self.EnumEntry( - name=sanitize_name(entry_proto_value.name), + name=pythonize_enum_member_name( + entry_proto_value.name, self.proto_obj.name + ), value=entry_proto_value.number, comment=get_comment( proto_file=self.source_file, path=self.path + [2, entry_number] @@ -597,17 +667,10 @@ def __post_init__(self) -> None: ] super().__post_init__() # call MessageCompiler __post_init__ - @property - def default_value_string(self) -> str: - """Python representation of the default value for Enums. - - As per the spec, this is the first value of the Enum. - """ - return str(self.entries[0].value) # ideally, should ALWAYS be int(0)! - @dataclass class ServiceCompiler(ProtoContentBase): + source_file: FileDescriptorProto parent: OutputTemplate = PLACEHOLDER proto_obj: DescriptorProto = PLACEHOLDER path: List[int] = PLACEHOLDER @@ -616,7 +679,6 @@ class ServiceCompiler(ProtoContentBase): def __post_init__(self) -> None: # Add service to output file self.output_file.services.append(self) - self.output_file.typing_imports.add("Dict") super().__post_init__() # check for unset fields @property @@ -630,7 +692,7 @@ def py_name(self) -> str: @dataclass class ServiceMethodCompiler(ProtoContentBase): - + source_file: FileDescriptorProto parent: ServiceCompiler proto_obj: MethodDescriptorProto path: List[int] = PLACEHOLDER @@ -640,55 +702,16 @@ def __post_init__(self) -> None: # Add method to service self.parent.methods.append(self) - # Check for imports - if self.py_input_message: - for f in self.py_input_message.fields: - f.add_imports_to(self.output_file) - if "Optional" in self.py_output_message_type: - self.output_file.typing_imports.add("Optional") - self.mutable_default_args # ensure this is called before rendering - - # Check for Async imports - if self.client_streaming: - self.output_file.typing_imports.add("AsyncIterable") - self.output_file.typing_imports.add("Iterable") - self.output_file.typing_imports.add("Union") - if self.server_streaming: - self.output_file.typing_imports.add("AsyncIterator") + self.output_file.imports_type_checking_only.add("import grpclib.server") + self.output_file.imports_type_checking_only.add( + "from betterproto.grpc.grpclib_client import MetadataLike" + ) + self.output_file.imports_type_checking_only.add( + "from grpclib.metadata import Deadline" + ) super().__post_init__() # check for unset fields - @property - def mutable_default_args(self) -> Dict[str, str]: - """Handle mutable default arguments. - - Returns a list of tuples containing the name and default value - for arguments to this message who's default value is mutable. - The defaults are swapped out for None and replaced back inside - the method's body. - Reference: - https://docs.python-guide.org/writing/gotchas/#mutable-default-arguments - - Returns - ------- - Dict[str, str] - Name and actual default value (as a string) - for each argument with mutable default values. - """ - mutable_default_args = {} - - if self.py_input_message: - for f in self.py_input_message.fields: - if ( - not self.client_streaming - and f.default_value_string != "None" - and f.mutable - ): - mutable_default_args[f.py_name] = f.default_value_string - self.output_file.typing_imports.add("Optional") - - return mutable_default_args - @property def py_name(self) -> str: """Pythonized method name.""" @@ -706,30 +729,6 @@ def route(self) -> str: ) return f"/{package_part}{self.parent.proto_name}/{self.proto_name}" - @property - def py_input_message(self) -> Optional[MessageCompiler]: - """Find the input message object. - - Returns - ------- - Optional[MessageCompiler] - Method instance representing the input message. - If not input message could be found or there are no - input messages, None is returned. - """ - package, name = parse_source_type_name(self.proto_obj.input_type) - - # Nested types are currently flattened without dots. - # Todo: keep a fully quantified name in types, that is - # comparable with method.input_type - for msg in self.request.all_messages: - if ( - msg.py_name == name.replace(".", "") - and msg.output_file.package == package - ): - return msg - return None - @property def py_input_message_type(self) -> str: """String representation of the Python type corresponding to the @@ -742,10 +741,24 @@ def py_input_message_type(self) -> str: """ return get_type_reference( package=self.output_file.package, - imports=self.output_file.imports, + imports=self.output_file.imports_end, source_type=self.proto_obj.input_type, + typing_compiler=self.output_file.typing_compiler, + unwrap=False, + pydantic=self.output_file.pydantic_dataclasses, ).strip('"') + @property + def py_input_message_param(self) -> str: + """Param name corresponding to py_input_message_type. + + Returns + ------- + str + Param name corresponding to py_input_message_type. + """ + return pythonize_field_name(self.py_input_message_type) + @property def py_output_message_type(self) -> str: """String representation of the Python type corresponding to the @@ -758,9 +771,11 @@ def py_output_message_type(self) -> str: """ return get_type_reference( package=self.output_file.package, - imports=self.output_file.imports, + imports=self.output_file.imports_end, source_type=self.proto_obj.output_type, + typing_compiler=self.output_file.typing_compiler, unwrap=False, + pydantic=self.output_file.pydantic_dataclasses, ).strip('"') @property diff --git a/src/betterproto/plugin/module_validation.py b/src/betterproto/plugin/module_validation.py new file mode 100644 index 000000000..4cf05fdca --- /dev/null +++ b/src/betterproto/plugin/module_validation.py @@ -0,0 +1,163 @@ +import re +from collections import defaultdict +from dataclasses import ( + dataclass, + field, +) +from typing import ( + Dict, + Iterator, + List, + Tuple, +) + + +@dataclass +class ModuleValidator: + line_iterator: Iterator[str] + line_number: int = field(init=False, default=0) + + collisions: Dict[str, List[Tuple[int, str]]] = field( + init=False, default_factory=lambda: defaultdict(list) + ) + + def add_import(self, imp: str, number: int, full_line: str): + """ + Adds an import to be tracked. + """ + self.collisions[imp].append((number, full_line)) + + def process_import(self, imp: str): + """ + Filters out the import to its actual value. + """ + if " as " in imp: + imp = imp[imp.index(" as ") + 4 :] + + imp = imp.strip() + assert " " not in imp, imp + return imp + + def evaluate_multiline_import(self, line: str): + """ + Evaluates a multiline import from a starting line + """ + # Filter the first line and remove anything before the import statement. + full_line = line + line = line.split("import", 1)[1] + if "(" in line: + conditional = lambda line: ")" not in line + else: + conditional = lambda line: "\\" in line + + # Remove open parenthesis if it exists. + if "(" in line: + line = line[line.index("(") + 1 :] + + # Choose the conditional based on how multiline imports are formatted. + while conditional(line): + # Split the line by commas + imports = line.split(",") + + for imp in imports: + # Add the import to the namespace + imp = self.process_import(imp) + if imp: + self.add_import(imp, self.line_number, full_line) + # Get the next line + full_line = line = next(self.line_iterator) + # Increment the line number + self.line_number += 1 + + # validate the last line + if ")" in line: + line = line[: line.index(")")] + imports = line.split(",") + for imp in imports: + imp = self.process_import(imp) + if imp: + self.add_import(imp, self.line_number, full_line) + + def evaluate_import(self, line: str): + """ + Extracts an import from a line. + """ + whole_line = line + line = line[line.index("import") + 6 :] + values = line.split(",") + for v in values: + self.add_import(self.process_import(v), self.line_number, whole_line) + + def next(self): + """ + Evaluate each line for names in the module. + """ + line = next(self.line_iterator) + + # Skip lines with indentation or comments + if ( + # Skip indents and whitespace. + line.startswith(" ") + or line == "\n" + or line.startswith("\t") + or + # Skip comments + line.startswith("#") + or + # Skip decorators + line.startswith("@") + ): + self.line_number += 1 + return + + # Skip docstrings. + if line.startswith('"""') or line.startswith("'''"): + quote = line[0] * 3 + line = line[3:] + while quote not in line: + line = next(self.line_iterator) + self.line_number += 1 + return + + # Evaluate Imports. + if line.startswith("from ") or line.startswith("import "): + if "(" in line or "\\" in line: + self.evaluate_multiline_import(line) + else: + self.evaluate_import(line) + + # Evaluate Classes. + elif line.startswith("class "): + class_name = re.search(r"class (\w+)", line).group(1) + if class_name: + self.add_import(class_name, self.line_number, line) + + # Evaluate Functions. + elif line.startswith("def "): + function_name = re.search(r"def (\w+)", line).group(1) + if function_name: + self.add_import(function_name, self.line_number, line) + + # Evaluate direct assignments. + elif "=" in line: + assignment = re.search(r"(\w+)\s*=", line).group(1) + if assignment: + self.add_import(assignment, self.line_number, line) + + self.line_number += 1 + + def validate(self) -> bool: + """ + Run Validation. + """ + try: + while True: + self.next() + except StopIteration: + pass + + # Filter collisions for those with more than one value. + self.collisions = {k: v for k, v in self.collisions.items() if len(v) > 1} + + # Return True if no collisions are found. + return not bool(self.collisions) diff --git a/src/betterproto/plugin/parser.py b/src/betterproto/plugin/parser.py index 5e929d8de..5f7b72c40 100644 --- a/src/betterproto/plugin/parser.py +++ b/src/betterproto/plugin/parser.py @@ -1,3 +1,13 @@ +import pathlib +import sys +from typing import ( + Generator, + List, + Set, + Tuple, + Union, +) + from betterproto.lib.google.protobuf import ( DescriptorProto, EnumDescriptorProto, @@ -8,12 +18,10 @@ from betterproto.lib.google.protobuf.compiler import ( CodeGeneratorRequest, CodeGeneratorResponse, + CodeGeneratorResponseFeature, CodeGeneratorResponseFile, ) -import itertools -import pathlib -import sys -from typing import Iterator, List, Set, Tuple, TYPE_CHECKING, Union + from .compiler import outputfile_compiler from .models import ( EnumDefinitionCompiler, @@ -23,59 +31,57 @@ OneOfFieldCompiler, OutputTemplate, PluginRequestCompiler, + PydanticOneOfFieldCompiler, ServiceCompiler, ServiceMethodCompiler, is_map, is_oneof, ) - -if TYPE_CHECKING: - from google.protobuf.descriptor import Descriptor +from .typing_compiler import ( + DirectImportTypingCompiler, + NoTyping310TypingCompiler, + TypingCompiler, + TypingImportTypingCompiler, +) def traverse( - proto_file: FieldDescriptorProto, -) -> "itertools.chain[Tuple[Union[str, EnumDescriptorProto], List[int]]]": + proto_file: FileDescriptorProto, +) -> Generator[ + Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None +]: # Todo: Keep information about nested hierarchy def _traverse( - path: List[int], items: List["EnumDescriptorProto"], prefix="" - ) -> Iterator[Tuple[Union[str, EnumDescriptorProto], List[int]]]: + path: List[int], + items: Union[List[EnumDescriptorProto], List[DescriptorProto]], + prefix: str = "", + ) -> Generator[ + Tuple[Union[EnumDescriptorProto, DescriptorProto], List[int]], None, None + ]: for i, item in enumerate(items): # Adjust the name since we flatten the hierarchy. # Todo: don't change the name, but include full name in returned tuple - item.name = next_prefix = prefix + item.name - yield item, path + [i] + item.name = next_prefix = f"{prefix}_{item.name}" + yield item, [*path, i] if isinstance(item, DescriptorProto): - for enum in item.enum_type: - enum.name = next_prefix + enum.name - yield enum, path + [i, 4] + # Get nested types. + yield from _traverse([*path, i, 4], item.enum_type, next_prefix) + yield from _traverse([*path, i, 3], item.nested_type, next_prefix) - if item.nested_type: - for n, p in _traverse(path + [i, 3], item.nested_type, next_prefix): - yield n, p + yield from _traverse([5], proto_file.enum_type) + yield from _traverse([4], proto_file.message_type) - return itertools.chain( - _traverse([5], proto_file.enum_type), _traverse([4], proto_file.message_type) - ) +def generate_code(request: CodeGeneratorRequest) -> CodeGeneratorResponse: + response = CodeGeneratorResponse() -def generate_code( - request: CodeGeneratorRequest, response: CodeGeneratorResponse -) -> None: plugin_options = request.parameter.split(",") if request.parameter else [] + response.supported_features = CodeGeneratorResponseFeature.FEATURE_PROTO3_OPTIONAL request_data = PluginRequestCompiler(plugin_request_obj=request) # Gather output packages for proto_file in request.proto_file: - if ( - proto_file.package == "google.protobuf" - and "INCLUDE_GOOGLE" not in plugin_options - ): - # If not INCLUDE_GOOGLE, - # skip re-compiling Google's well-known types - continue - output_package_name = proto_file.package if output_package_name not in request_data.output_packages: # Create a new output if there is no output for this package @@ -85,6 +91,41 @@ def generate_code( # Add this input file to the output corresponding to this package request_data.output_packages[output_package_name].input_files.append(proto_file) + if ( + proto_file.package == "google.protobuf" + and "INCLUDE_GOOGLE" not in plugin_options + ): + # If not INCLUDE_GOOGLE, + # skip outputting Google's well-known types + request_data.output_packages[output_package_name].output = False + + if "pydantic_dataclasses" in plugin_options: + request_data.output_packages[ + output_package_name + ].pydantic_dataclasses = True + + # Gather any typing generation options. + typing_opts = [ + opt[len("typing.") :] for opt in plugin_options if opt.startswith("typing.") + ] + + if len(typing_opts) > 1: + raise ValueError("Multiple typing options provided") + # Set the compiler type. + typing_opt = typing_opts[0] if typing_opts else "direct" + if typing_opt == "direct": + request_data.output_packages[ + output_package_name + ].typing_compiler = DirectImportTypingCompiler() + elif typing_opt == "root": + request_data.output_packages[ + output_package_name + ].typing_compiler = TypingImportTypingCompiler() + elif typing_opt == "310": + request_data.output_packages[ + output_package_name + ].typing_compiler = NoTyping310TypingCompiler() + # Read Messages and Enums # We need to read Messages before Services in so that we can # get the references to input/output messages for each service @@ -102,11 +143,13 @@ def generate_code( for output_package_name, output_package in request_data.output_packages.items(): for proto_input_file in output_package.input_files: for index, service in enumerate(proto_input_file.service): - read_protobuf_service(service, index, output_package) + read_protobuf_service(proto_input_file, service, index, output_package) # Generate output files output_paths: Set[pathlib.Path] = set() for output_package_name, output_package in request_data.output_packages.items(): + if not output_package.output: + continue # Add files to the response object output_path = pathlib.Path(*output_package_name.split("."), "__init__.py") @@ -125,6 +168,7 @@ def generate_code( directory.joinpath("__init__.py") for path in output_paths for directory in path.parents + if not directory.joinpath("__init__.py").exists() } - output_paths for init_file in init_files: @@ -133,6 +177,26 @@ def generate_code( for output_package_name in sorted(output_paths.union(init_files)): print(f"Writing {output_package_name}", file=sys.stderr) + return response + + +def _make_one_of_field_compiler( + output_package: OutputTemplate, + source_file: "FileDescriptorProto", + parent: MessageCompiler, + proto_obj: "FieldDescriptorProto", + path: List[int], +) -> FieldCompiler: + pydantic = output_package.pydantic_dataclasses + Cls = PydanticOneOfFieldCompiler if pydantic else OneOfFieldCompiler + return Cls( + source_file=source_file, + parent=parent, + proto_obj=proto_obj, + path=path, + typing_compiler=output_package.typing_compiler, + ) + def read_protobuf_type( item: DescriptorProto, @@ -146,7 +210,11 @@ def read_protobuf_type( return # Process Message message_data = MessageCompiler( - source_file=source_file, parent=output_package, proto_obj=item, path=path + source_file=source_file, + parent=output_package, + proto_obj=item, + path=path, + typing_compiler=output_package.typing_compiler, ) for index, field in enumerate(item.field): if is_map(field, item): @@ -155,13 +223,11 @@ def read_protobuf_type( parent=message_data, proto_obj=field, path=path + [2, index], + typing_compiler=output_package.typing_compiler, ) elif is_oneof(field): - OneOfFieldCompiler( - source_file=source_file, - parent=message_data, - proto_obj=field, - path=path + [2, index], + _make_one_of_field_compiler( + output_package, source_file, message_data, field, path + [2, index] ) else: FieldCompiler( @@ -169,21 +235,35 @@ def read_protobuf_type( parent=message_data, proto_obj=field, path=path + [2, index], + typing_compiler=output_package.typing_compiler, ) elif isinstance(item, EnumDescriptorProto): # Enum EnumDefinitionCompiler( - source_file=source_file, parent=output_package, proto_obj=item, path=path + source_file=source_file, + parent=output_package, + proto_obj=item, + path=path, + typing_compiler=output_package.typing_compiler, ) def read_protobuf_service( - service: ServiceDescriptorProto, index: int, output_package: OutputTemplate + source_file: FileDescriptorProto, + service: ServiceDescriptorProto, + index: int, + output_package: OutputTemplate, ) -> None: service_data = ServiceCompiler( - parent=output_package, proto_obj=service, path=[6, index] + source_file=source_file, + parent=output_package, + proto_obj=service, + path=[6, index], ) for j, method in enumerate(service.method): ServiceMethodCompiler( - parent=service_data, proto_obj=method, path=[6, index, 2, j] + source_file=source_file, + parent=service_data, + proto_obj=method, + path=[6, index, 2, j], ) diff --git a/src/betterproto/plugin/typing_compiler.py b/src/betterproto/plugin/typing_compiler.py new file mode 100644 index 000000000..eca3691f9 --- /dev/null +++ b/src/betterproto/plugin/typing_compiler.py @@ -0,0 +1,173 @@ +import abc +from collections import defaultdict +from dataclasses import ( + dataclass, + field, +) +from typing import ( + Dict, + Iterator, + Optional, + Set, +) + + +class TypingCompiler(metaclass=abc.ABCMeta): + @abc.abstractmethod + def optional(self, type: str) -> str: + raise NotImplementedError() + + @abc.abstractmethod + def list(self, type: str) -> str: + raise NotImplementedError() + + @abc.abstractmethod + def dict(self, key: str, value: str) -> str: + raise NotImplementedError() + + @abc.abstractmethod + def union(self, *types: str) -> str: + raise NotImplementedError() + + @abc.abstractmethod + def iterable(self, type: str) -> str: + raise NotImplementedError() + + @abc.abstractmethod + def async_iterable(self, type: str) -> str: + raise NotImplementedError() + + @abc.abstractmethod + def async_iterator(self, type: str) -> str: + raise NotImplementedError() + + @abc.abstractmethod + def imports(self) -> Dict[str, Optional[Set[str]]]: + """ + Returns either the direct import as a key with none as value, or a set of + values to import from the key. + """ + raise NotImplementedError() + + def import_lines(self) -> Iterator: + imports = self.imports() + for key, value in imports.items(): + if value is None: + yield f"import {key}" + else: + yield f"from {key} import (" + for v in sorted(value): + yield f" {v}," + yield ")" + + +@dataclass +class DirectImportTypingCompiler(TypingCompiler): + _imports: Dict[str, Set[str]] = field(default_factory=lambda: defaultdict(set)) + + def optional(self, type: str) -> str: + self._imports["typing"].add("Optional") + return f"Optional[{type}]" + + def list(self, type: str) -> str: + self._imports["typing"].add("List") + return f"List[{type}]" + + def dict(self, key: str, value: str) -> str: + self._imports["typing"].add("Dict") + return f"Dict[{key}, {value}]" + + def union(self, *types: str) -> str: + self._imports["typing"].add("Union") + return f"Union[{', '.join(types)}]" + + def iterable(self, type: str) -> str: + self._imports["typing"].add("Iterable") + return f"Iterable[{type}]" + + def async_iterable(self, type: str) -> str: + self._imports["typing"].add("AsyncIterable") + return f"AsyncIterable[{type}]" + + def async_iterator(self, type: str) -> str: + self._imports["typing"].add("AsyncIterator") + return f"AsyncIterator[{type}]" + + def imports(self) -> Dict[str, Optional[Set[str]]]: + return {k: v if v else None for k, v in self._imports.items()} + + +@dataclass +class TypingImportTypingCompiler(TypingCompiler): + _imported: bool = False + + def optional(self, type: str) -> str: + self._imported = True + return f"typing.Optional[{type}]" + + def list(self, type: str) -> str: + self._imported = True + return f"typing.List[{type}]" + + def dict(self, key: str, value: str) -> str: + self._imported = True + return f"typing.Dict[{key}, {value}]" + + def union(self, *types: str) -> str: + self._imported = True + return f"typing.Union[{', '.join(types)}]" + + def iterable(self, type: str) -> str: + self._imported = True + return f"typing.Iterable[{type}]" + + def async_iterable(self, type: str) -> str: + self._imported = True + return f"typing.AsyncIterable[{type}]" + + def async_iterator(self, type: str) -> str: + self._imported = True + return f"typing.AsyncIterator[{type}]" + + def imports(self) -> Dict[str, Optional[Set[str]]]: + if self._imported: + return {"typing": None} + return {} + + +@dataclass +class NoTyping310TypingCompiler(TypingCompiler): + _imports: Dict[str, Set[str]] = field(default_factory=lambda: defaultdict(set)) + + @staticmethod + def _fmt(type: str) -> str: # for now this is necessary till 3.14 + if type.startswith('"'): + return type[1:-1] + return type + + def optional(self, type: str) -> str: + return f'"{self._fmt(type)} | None"' + + def list(self, type: str) -> str: + return f'"list[{self._fmt(type)}]"' + + def dict(self, key: str, value: str) -> str: + return f'"dict[{key}, {self._fmt(value)}]"' + + def union(self, *types: str) -> str: + return f'"{" | ".join(map(self._fmt, types))}"' + + def iterable(self, type: str) -> str: + self._imports["collections.abc"].add("Iterable") + return f'"Iterable[{type}]"' + + def async_iterable(self, type: str) -> str: + self._imports["collections.abc"].add("AsyncIterable") + return f'"AsyncIterable[{type}]"' + + def async_iterator(self, type: str) -> str: + self._imports["collections.abc"].add("AsyncIterator") + return f'"AsyncIterator[{type}]"' + + def imports(self) -> Dict[str, Optional[Set[str]]]: + return {k: v if v else None for k, v in self._imports.items()} diff --git a/src/betterproto/templates/header.py.j2 b/src/betterproto/templates/header.py.j2 new file mode 100644 index 000000000..b6d0a6c44 --- /dev/null +++ b/src/betterproto/templates/header.py.j2 @@ -0,0 +1,57 @@ +# Generated by the protocol buffer compiler. DO NOT EDIT! +# sources: {{ ', '.join(output_file.input_filenames) }} +# plugin: python-betterproto +# This file has been @generated + +__all__ = ( + {%- for enum in output_file.enums -%} + "{{ enum.py_name }}", + {%- endfor -%} + {%- for message in output_file.messages -%} + "{{ message.py_name }}", + {%- endfor -%} + {%- for service in output_file.services -%} + "{{ service.py_name }}Stub", + "{{ service.py_name }}Base", + {%- endfor -%} +) + +{% for i in output_file.python_module_imports|sort %} +import {{ i }} +{% endfor %} + +{% if output_file.pydantic_dataclasses %} +from pydantic.dataclasses import dataclass +{%- else -%} +from dataclasses import dataclass +{% endif %} + +{% if output_file.datetime_imports %} +from datetime import {% for i in output_file.datetime_imports|sort %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %} + +{% endif%} +{% set typing_imports = output_file.typing_compiler.imports() %} +{% if typing_imports %} +{% for line in output_file.typing_compiler.import_lines() %} +{{ line }} +{% endfor %} +{% endif %} + +{% if output_file.pydantic_imports %} +from pydantic import {% for i in output_file.pydantic_imports|sort %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %} + +{% endif %} + +import betterproto +{% if output_file.services %} +from betterproto.grpc.grpclib_server import ServiceBase +import grpclib +{% endif %} + +{% if output_file.imports_type_checking_only %} +from typing import TYPE_CHECKING + +if TYPE_CHECKING: +{% for i in output_file.imports_type_checking_only|sort %} {{ i }} +{% endfor %} +{% endif %} diff --git a/src/betterproto/templates/template.py.j2 b/src/betterproto/templates/template.py.j2 index d27cff610..4a252aec6 100644 --- a/src/betterproto/templates/template.py.j2 +++ b/src/betterproto/templates/template.py.j2 @@ -1,26 +1,3 @@ -# Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: {{ ', '.join(output_file.input_filenames) }} -# plugin: python-betterproto -{% for i in output_file.python_module_imports|sort %} -import {{ i }} -{% endfor %} -from dataclasses import dataclass -{% if output_file.datetime_imports %} -from datetime import {% for i in output_file.datetime_imports|sort %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %} - -{% endif%} -{% if output_file.typing_imports %} -from typing import {% for i in output_file.typing_imports|sort %}{{ i }}{% if not loop.last %}, {% endif %}{% endfor %} - -{% endif %} - -import betterproto -from betterproto.grpc.grpclib_server import ServiceBase -{% if output_file.services %} -import grpclib -{% endif %} - - {% if output_file.enums %}{% for enum in output_file.enums %} class {{ enum.py_name }}(betterproto.Enum): {% if enum.comment %} @@ -28,27 +5,40 @@ class {{ enum.py_name }}(betterproto.Enum): {% endif %} {% for entry in enum.entries %} + {{ entry.name }} = {{ entry.value }} {% if entry.comment %} {{ entry.comment }} + {% endif %} - {{ entry.name }} = {{ entry.value }} {% endfor %} + {% if output_file.pydantic_dataclasses %} + @classmethod + def __get_pydantic_core_schema__(cls, _source_type, _handler): + from pydantic_core import core_schema + + return core_schema.int_schema(ge=0) + {% endif %} {% endfor %} {% endif %} {% for message in output_file.messages %} +{% if output_file.pydantic_dataclasses %} +@dataclass(eq=False, repr=False, config={"extra": "forbid"}) +{% else %} @dataclass(eq=False, repr=False) +{% endif %} class {{ message.py_name }}(betterproto.Message): {% if message.comment %} {{ message.comment }} {% endif %} {% for field in message.fields %} + {{ field.get_field_string() }} {% if field.comment %} {{ field.comment }} + {% endif %} - {{ field.get_field_string() }} {% endfor %} {% if not message.fields %} pass @@ -61,11 +51,16 @@ class {{ message.py_name }}(betterproto.Message): {% endif %} super().__post_init__() {% for field in message.deprecated_fields %} - if self.{{ field }}: + if self.is_set("{{ field }}"): warnings.warn("{{ message.py_name }}.{{ field }} is deprecated", DeprecationWarning) {% endfor %} {% endif %} + {% if output_file.pydantic_dataclasses and message.has_oneof_fields %} + @model_validator(mode='after') + def check_oneof(cls, values): + return cls._validate_field_groups(values) + {% endif %} {% endfor %} {% for service in output_file.services %} @@ -79,60 +74,45 @@ class {{ service.py_name }}Stub(betterproto.ServiceStub): {% for method in service.methods %} async def {{ method.py_name }}(self {%- if not method.client_streaming -%} - {%- if method.py_input_message and method.py_input_message.fields -%}, *, - {%- for field in method.py_input_message.fields -%} - {{ field.py_name }}: {% if field.py_name in method.mutable_default_args and not field.annotation.startswith("Optional[") -%} - Optional[{{ field.annotation }}] - {%- else -%} - {{ field.annotation }} - {%- endif -%} = - {%- if field.py_name not in method.mutable_default_args -%} - {{ field.default_value_string }} - {%- else -%} - None - {% endif -%} - {%- if not loop.last %}, {% endif -%} - {%- endfor -%} - {%- endif -%} + , {{ method.py_input_message_param }}: "{{ method.py_input_message_type }}" {%- else -%} {# Client streaming: need a request iterator instead #} - , request_iterator: Union[AsyncIterable["{{ method.py_input_message_type }}"], Iterable["{{ method.py_input_message_type }}"]] + , {{ method.py_input_message_param }}_iterator: "{{ output_file.typing_compiler.union(output_file.typing_compiler.async_iterable(method.py_input_message_type), output_file.typing_compiler.iterable(method.py_input_message_type)) }}" {%- endif -%} - ) -> {% if method.server_streaming %}AsyncIterator["{{ method.py_output_message_type }}"]{% else %}"{{ method.py_output_message_type }}"{% endif %}: + , + * + , timeout: {{ output_file.typing_compiler.optional("float") }} = None + , deadline: {{ output_file.typing_compiler.optional('"Deadline"') }} = None + , metadata: {{ output_file.typing_compiler.optional('"MetadataLike"') }} = None + ) -> "{% if method.server_streaming %}{{ output_file.typing_compiler.async_iterator(method.py_output_message_type ) }}{% else %}{{ method.py_output_message_type }}{% endif %}": {% if method.comment %} {{ method.comment }} {% endif %} - {%- for py_name, zero in method.mutable_default_args.items() %} - {{ py_name }} = {{ py_name }} or {{ zero }} - {% endfor %} + {% if method.proto_obj.options.deprecated %} + warnings.warn("{{ service.py_name }}.{{ method.py_name }} is deprecated", DeprecationWarning) - {% if not method.client_streaming %} - request = {{ method.py_input_message_type }}() - {% for field in method.py_input_message.fields %} - {% if field.field_type == 'message' %} - if {{ field.py_name }} is not None: - request.{{ field.py_name }} = {{ field.py_name }} - {% else %} - request.{{ field.py_name }} = {{ field.py_name }} - {% endif %} - {% endfor %} {% endif %} - {% if method.server_streaming %} {% if method.client_streaming %} async for response in self._stream_stream( "{{ method.route }}", - request_iterator, + {{ method.py_input_message_param }}_iterator, {{ method.py_input_message_type }}, {{ method.py_output_message_type.strip('"') }}, + timeout=timeout, + deadline=deadline, + metadata=metadata, ): yield response {% else %}{# i.e. not client streaming #} async for response in self._unary_stream( "{{ method.route }}", - request, + {{ method.py_input_message_param }}, {{ method.py_output_message_type.strip('"') }}, + timeout=timeout, + deadline=deadline, + metadata=metadata, ): yield response @@ -141,15 +121,21 @@ class {{ service.py_name }}Stub(betterproto.ServiceStub): {% if method.client_streaming %} return await self._stream_unary( "{{ method.route }}", - request_iterator, + {{ method.py_input_message_param }}_iterator, {{ method.py_input_message_type }}, - {{ method.py_output_message_type.strip('"') }} + {{ method.py_output_message_type.strip('"') }}, + timeout=timeout, + deadline=deadline, + metadata=metadata, ) {% else %}{# i.e. not client streaming #} return await self._unary_unary( "{{ method.route }}", - request, - {{ method.py_output_message_type.strip('"') }} + {{ method.py_input_message_param }}, + {{ method.py_output_message_type.strip('"') }}, + timeout=timeout, + deadline=deadline, + metadata=metadata, ) {% endif %}{# client streaming #} {% endif %} @@ -157,6 +143,10 @@ class {{ service.py_name }}Stub(betterproto.ServiceStub): {% endfor %} {% endfor %} +{% for i in output_file.imports_end %} +{{ i }} +{% endfor %} + {% for service in output_file.services %} class {{ service.py_name }}Base(ServiceBase): {% if service.comment %} @@ -167,58 +157,44 @@ class {{ service.py_name }}Base(ServiceBase): {% for method in service.methods %} async def {{ method.py_name }}(self {%- if not method.client_streaming -%} - {%- if method.py_input_message and method.py_input_message.fields -%}, - {%- for field in method.py_input_message.fields -%} - {{ field.py_name }}: {% if field.py_name in method.mutable_default_args and not field.annotation.startswith("Optional[") -%} - Optional[{{ field.annotation }}] - {%- else -%} - {{ field.annotation }} - {%- endif -%} - {%- if not loop.last %}, {% endif -%} - {%- endfor -%} - {%- endif -%} + , {{ method.py_input_message_param }}: "{{ method.py_input_message_type }}" {%- else -%} {# Client streaming: need a request iterator instead #} - , request_iterator: AsyncIterator["{{ method.py_input_message_type }}"] + , {{ method.py_input_message_param }}_iterator: {{ output_file.typing_compiler.async_iterator(method.py_input_message_type) }} {%- endif -%} - ) -> {% if method.server_streaming %}AsyncIterator["{{ method.py_output_message_type }}"]{% else %}"{{ method.py_output_message_type }}"{% endif %}: + ) -> {% if method.server_streaming %}{{ output_file.typing_compiler.async_iterator(method.py_output_message_type) }}{% else %}"{{ method.py_output_message_type }}"{% endif %}: {% if method.comment %} {{ method.comment }} {% endif %} raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + {% if method.server_streaming %} + yield {{ method.py_output_message_type }}() + {% endif %} {% endfor %} {% for method in service.methods %} - async def __rpc_{{ method.py_name }}(self, stream: grpclib.server.Stream) -> None: + async def __rpc_{{ method.py_name }}(self, stream: "grpclib.server.Stream[{{ method.py_input_message_type }}, {{ method.py_output_message_type }}]") -> None: {% if not method.client_streaming %} request = await stream.recv_message() - - request_kwargs = { - {% for field in method.py_input_message.fields %} - "{{ field.py_name }}": request.{{ field.py_name }}, - {% endfor %} - } - {% else %} - request_kwargs = {"request_iterator": stream.__aiter__()} + request = stream.__aiter__() {% endif %} - {% if not method.server_streaming %} - response = await self.{{ method.py_name }}(**request_kwargs) + response = await self.{{ method.py_name }}(request) await stream.send_message(response) {% else %} await self._call_rpc_handler_server_stream( self.{{ method.py_name }}, stream, - request_kwargs, + request, ) {% endif %} {% endfor %} - def __mapping__(self) -> Dict[str, grpclib.const.Handler]: + def __mapping__(self) -> {{ output_file.typing_compiler.dict("str", "grpclib.const.Handler") }}: return { {% for method in service.methods %} "{{ method.route }}": grpclib.const.Handler( @@ -239,7 +215,3 @@ class {{ service.py_name }}Base(ServiceBase): } {% endfor %} - -{% for i in output_file.imports|sort %} -{{ i }} -{% endfor %} diff --git a/src/betterproto/utils.py b/src/betterproto/utils.py new file mode 100644 index 000000000..b977fc713 --- /dev/null +++ b/src/betterproto/utils.py @@ -0,0 +1,56 @@ +from __future__ import annotations + +from typing import ( + Any, + Callable, + Generic, + Optional, + Type, + TypeVar, +) + +from typing_extensions import ( + Concatenate, + ParamSpec, + Self, +) + + +SelfT = TypeVar("SelfT") +P = ParamSpec("P") +HybridT = TypeVar("HybridT", covariant=True) + + +class hybridmethod(Generic[SelfT, P, HybridT]): + def __init__( + self, + func: Callable[ + Concatenate[type[SelfT], P], HybridT + ], # Must be the classmethod version + ): + self.cls_func = func + self.__doc__ = func.__doc__ + + def instancemethod(self, func: Callable[Concatenate[SelfT, P], HybridT]) -> Self: + self.instance_func = func + return self + + def __get__( + self, instance: Optional[SelfT], owner: Type[SelfT] + ) -> Callable[P, HybridT]: + if instance is None or self.instance_func is None: + # either bound to the class, or no instance method available + return self.cls_func.__get__(owner, None) + return self.instance_func.__get__(instance, owner) + + +T_co = TypeVar("T_co") +TT_co = TypeVar("TT_co", bound="type[Any]") + + +class classproperty(Generic[TT_co, T_co]): + def __init__(self, func: Callable[[TT_co], T_co]): + self.__func__ = func + + def __get__(self, instance: Any, type: TT_co) -> T_co: + return self.__func__(type) diff --git a/tests/conftest.py b/tests/conftest.py index 6ab4f030a..34c044e0c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,12 +1,11 @@ -import pytest - +import copy +import sys -def pytest_addoption(parser): - parser.addoption( - "--repeat", type=int, default=1, help="repeat the operation multiple times" - ) +import pytest -@pytest.fixture(scope="session") -def repeat(request): - return request.config.getoption("repeat") +@pytest.fixture +def reset_sys_path(): + original = copy.deepcopy(sys.path) + yield + sys.path = original diff --git a/tests/generate.py b/tests/generate.py index 6795ae6f6..91bdbb8a2 100755 --- a/tests/generate.py +++ b/tests/generate.py @@ -1,20 +1,22 @@ #!/usr/bin/env python import asyncio import os -from pathlib import Path import platform import shutil import sys +from pathlib import Path from typing import Set from tests.util import ( get_directories, inputs_path, output_path_betterproto, + output_path_betterproto_pydantic, output_path_reference, protoc, ) + # Force pure-python implementation instead of C++, otherwise imports # break things because we can't properly reset the symbol database. os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" @@ -60,13 +62,15 @@ async def generate(whitelist: Set[str], verbose: bool): if result != 0: failed_test_cases.append(test_case_name) - if failed_test_cases: + if len(failed_test_cases) > 0: sys.stderr.write( "\n\033[31;1;4mFailed to generate the following test cases:\033[0m\n" ) for failed_test_case in failed_test_cases: sys.stderr.write(f"- {failed_test_case}\n") + sys.exit(1) + async def generate_test_case_output( test_case_input_path: Path, test_case_name: str, verbose: bool @@ -76,10 +80,12 @@ async def generate_test_case_output( """ test_case_output_path_reference = output_path_reference.joinpath(test_case_name) - test_case_output_path_betterproto = output_path_betterproto.joinpath(test_case_name) + test_case_output_path_betterproto = output_path_betterproto + test_case_output_path_betterproto_pyd = output_path_betterproto_pydantic os.makedirs(test_case_output_path_reference, exist_ok=True) os.makedirs(test_case_output_path_betterproto, exist_ok=True) + os.makedirs(test_case_output_path_betterproto_pyd, exist_ok=True) clear_directory(test_case_output_path_reference) clear_directory(test_case_output_path_betterproto) @@ -87,28 +93,75 @@ async def generate_test_case_output( ( (ref_out, ref_err, ref_code), (plg_out, plg_err, plg_code), + (plg_out_pyd, plg_err_pyd, plg_code_pyd), ) = await asyncio.gather( protoc(test_case_input_path, test_case_output_path_reference, True), protoc(test_case_input_path, test_case_output_path_betterproto, False), + protoc( + test_case_input_path, test_case_output_path_betterproto_pyd, False, True + ), ) - message = f"Generated output for {test_case_name!r}" + if ref_code == 0: + print(f"\033[31;1;4mGenerated reference output for {test_case_name!r}\033[0m") + else: + print( + f"\033[31;1;4mFailed to generate reference output for {test_case_name!r}\033[0m" + ) + print(ref_err.decode()) + if verbose: - print(f"\033[31;1;4m{message}\033[0m") if ref_out: + print("Reference stdout:") sys.stdout.buffer.write(ref_out) + sys.stdout.buffer.flush() + if ref_err: + print("Reference stderr:") sys.stderr.buffer.write(ref_err) + sys.stderr.buffer.flush() + + if plg_code == 0: + print(f"\033[31;1;4mGenerated plugin output for {test_case_name!r}\033[0m") + else: + print( + f"\033[31;1;4mFailed to generate plugin output for {test_case_name!r}\033[0m" + ) + print(plg_err.decode()) + + if verbose: if plg_out: + print("Plugin stdout:") sys.stdout.buffer.write(plg_out) + sys.stdout.buffer.flush() + if plg_err: + print("Plugin stderr:") sys.stderr.buffer.write(plg_err) - sys.stdout.buffer.flush() - sys.stderr.buffer.flush() + sys.stderr.buffer.flush() + + if plg_code_pyd == 0: + print( + f"\033[31;1;4mGenerated plugin (pydantic compatible) output for {test_case_name!r}\033[0m" + ) else: - print(message) + print( + f"\033[31;1;4mFailed to generate plugin (pydantic compatible) output for {test_case_name!r}\033[0m" + ) + print(plg_err_pyd.decode()) + + if verbose: + if plg_out_pyd: + print("Plugin stdout:") + sys.stdout.buffer.write(plg_out_pyd) + sys.stdout.buffer.flush() - return max(ref_code, plg_code) + if plg_err_pyd: + print("Plugin stderr:") + sys.stderr.buffer.write(plg_err_pyd) + sys.stderr.buffer.flush() + + return max(ref_code, plg_code, plg_code_pyd) HELP = "\n".join( @@ -137,9 +190,19 @@ def main(): whitelist = set(sys.argv[1:]) if platform.system() == "Windows": - asyncio.set_event_loop(asyncio.ProactorEventLoop()) - - asyncio.get_event_loop().run_until_complete(generate(whitelist, verbose)) + # for python version prior to 3.8, loop policy needs to be set explicitly + # https://docs.python.org/3/library/asyncio-policy.html#asyncio.DefaultEventLoopPolicy + try: + asyncio.set_event_loop_policy(asyncio.WindowsProactorEventLoopPolicy()) + except AttributeError: + # python < 3.7 does not have asyncio.WindowsProactorEventLoopPolicy + asyncio.get_event_loop_policy().set_event_loop(asyncio.ProactorEventLoop()) + + try: + asyncio.run(generate(whitelist, verbose)) + except AttributeError: + # compatibility code for python < 3.7 + asyncio.get_event_loop().run_until_complete(generate(whitelist, verbose)) if __name__ == "__main__": diff --git a/tests/grpc/test_grpclib_client.py b/tests/grpc/test_grpclib_client.py index 0f88a9ba1..b9eb55915 100644 --- a/tests/grpc/test_grpclib_client.py +++ b/tests/grpc/test_grpclib_client.py @@ -1,34 +1,37 @@ import asyncio -import sys +import uuid -from tests.output_betterproto.service.service import ( - DoThingRequest, - DoThingResponse, - GetThingRequest, - TestStub as ThingServiceClient, -) import grpclib +import grpclib.client import grpclib.metadata import grpclib.server -from grpclib.testing import ChannelFor import pytest +from grpclib.testing import ChannelFor + from betterproto.grpc.util.async_channel import AsyncChannel +from tests.output_betterproto.service import ( + DoThingRequest, + DoThingResponse, + GetThingRequest, + TestStub as ThingServiceClient, +) + from .thing_service import ThingService -async def _test_client(client, name="clean room", **kwargs): - response = await client.do_thing(name=name) +async def _test_client(client: ThingServiceClient, name="clean room", **kwargs): + response = await client.do_thing(DoThingRequest(name=name), **kwargs) assert response.names == [name] def _assert_request_meta_received(deadline, metadata): def server_side_test(stream): - assert stream.deadline._timestamp == pytest.approx( - deadline._timestamp, 1 - ), "The provided deadline should be received serverside" - assert ( - stream.metadata["authorization"] == metadata["authorization"] - ), "The provided authorization metadata should be received serverside" + assert stream.deadline._timestamp == pytest.approx(deadline._timestamp, 1), ( + "The provided deadline should be received serverside" + ) + assert stream.metadata["authorization"] == metadata["authorization"], ( + "The provided authorization metadata should be received serverside" + ) return server_side_test @@ -62,7 +65,7 @@ async def test_trailer_only_error_unary_unary( ) async with ChannelFor([service]) as channel: with pytest.raises(grpclib.exceptions.GRPCError) as e: - await ThingServiceClient(channel).do_thing(name="something") + await ThingServiceClient(channel).do_thing(DoThingRequest(name="something")) assert e.value.status == grpclib.Status.UNAUTHENTICATED @@ -80,16 +83,13 @@ async def test_trailer_only_error_stream_unary( async with ChannelFor([service]) as channel: with pytest.raises(grpclib.exceptions.GRPCError) as e: await ThingServiceClient(channel).do_many_things( - request_iterator=[DoThingRequest(name="something")] + do_thing_request_iterator=[DoThingRequest(name="something")] ) await _test_client(ThingServiceClient(channel)) assert e.value.status == grpclib.Status.UNAUTHENTICATED @pytest.mark.asyncio -@pytest.mark.skipif( - sys.version_info < (3, 8), reason="async mock spy does works for python3.8+" -) async def test_service_call_mutable_defaults(mocker): async with ChannelFor([ThingService()]) as channel: client = ThingServiceClient(channel) @@ -171,6 +171,56 @@ async def test_service_call_lower_level_with_overrides(): assert response.names == [THING_TO_DO] +@pytest.mark.asyncio +@pytest.mark.parametrize( + ("overrides_gen",), + [ + (lambda: dict(timeout=10),), + (lambda: dict(deadline=grpclib.metadata.Deadline.from_timeout(10)),), + (lambda: dict(metadata={"authorization": str(uuid.uuid4())}),), + (lambda: dict(timeout=20, metadata={"authorization": str(uuid.uuid4())}),), + ], +) +async def test_service_call_high_level_with_overrides(mocker, overrides_gen): + overrides = overrides_gen() + request_spy = mocker.spy(grpclib.client.Channel, "request") + name = str(uuid.uuid4()) + defaults = dict( + timeout=99, + deadline=grpclib.metadata.Deadline.from_timeout(99), + metadata={"authorization": name}, + ) + + async with ChannelFor( + [ + ThingService( + test_hook=_assert_request_meta_received( + deadline=grpclib.metadata.Deadline.from_timeout( + overrides.get("timeout", 99) + ), + metadata=overrides.get("metadata", defaults.get("metadata")), + ) + ) + ] + ) as channel: + client = ThingServiceClient(channel, **defaults) + await _test_client(client, name=name, **overrides) + assert request_spy.call_count == 1 + + # for python <3.8 request_spy.call_args.kwargs do not work + _, request_spy_call_kwargs = request_spy.call_args_list[0] + + # ensure all overrides were successful + for key, value in overrides.items(): + assert key in request_spy_call_kwargs + assert request_spy_call_kwargs[key] == value + + # ensure default values were retained + for key in set(defaults.keys()) - set(overrides.keys()): + assert key in request_spy_call_kwargs + assert request_spy_call_kwargs[key] == defaults[key] + + @pytest.mark.asyncio async def test_async_gen_for_unary_stream_request(): thing_name = "my milkshakes" @@ -178,7 +228,9 @@ async def test_async_gen_for_unary_stream_request(): async with ChannelFor([ThingService()]) as channel: client = ThingServiceClient(channel) expected_versions = [5, 4, 3, 2, 1] - async for response in client.get_thing_versions(name=thing_name): + async for response in client.get_thing_versions( + GetThingRequest(name=thing_name) + ): assert response.name == thing_name assert response.version == expected_versions.pop() @@ -213,6 +265,30 @@ async def test_async_gen_for_stream_stream_request(): else: # No more things to send make sure channel is closed request_chan.close() - assert response_index == len( - expected_things - ), "Didn't receive all expected responses" + assert response_index == len(expected_things), ( + "Didn't receive all expected responses" + ) + + +@pytest.mark.asyncio +async def test_stream_unary_with_empty_iterable(): + things = [] # empty + + async with ChannelFor([ThingService()]) as channel: + client = ThingServiceClient(channel) + requests = [DoThingRequest(name) for name in things] + response = await client.do_many_things(requests) + assert len(response.names) == 0 + + +@pytest.mark.asyncio +async def test_stream_stream_with_empty_iterable(): + things = [] # empty + + async with ChannelFor([ThingService()]) as channel: + client = ThingServiceClient(channel) + requests = [GetThingRequest(name) for name in things] + responses = [ + response async for response in client.get_different_things(requests) + ] + assert len(responses) == 0 diff --git a/tests/grpc/test_stream_stream.py b/tests/grpc/test_stream_stream.py index 020262d4f..9a1e5b89e 100644 --- a/tests/grpc/test_stream_stream.py +++ b/tests/grpc/test_stream_stream.py @@ -1,10 +1,12 @@ import asyncio -import betterproto -from betterproto.grpc.util.async_channel import AsyncChannel from dataclasses import dataclass -import pytest from typing import AsyncIterator +import pytest + +import betterproto +from betterproto.grpc.util.async_channel import AsyncChannel + @dataclass class Message(betterproto.Message): diff --git a/tests/grpc/thing_service.py b/tests/grpc/thing_service.py index ae0585286..7723a29f8 100644 --- a/tests/grpc/thing_service.py +++ b/tests/grpc/thing_service.py @@ -1,12 +1,14 @@ -from tests.output_betterproto.service.service import ( - DoThingResponse, +from typing import Dict + +import grpclib +import grpclib.server + +from tests.output_betterproto.service import ( DoThingRequest, + DoThingResponse, GetThingRequest, GetThingResponse, ) -import grpclib -import grpclib.server -from typing import Dict class ThingService: @@ -25,7 +27,7 @@ async def do_thing( async def do_many_things( self, stream: "grpclib.server.Stream[DoThingRequest, DoThingResponse]" ): - thing_names = [request.name for request in stream] + thing_names = [request.name async for request in stream] if self.test_hook is not None: self.test_hook(stream) await stream.send_message(DoThingResponse(thing_names)) diff --git a/tests/inputs/bool/bool.proto b/tests/inputs/bool/bool.proto index 237e7dbf9..77836b8e0 100644 --- a/tests/inputs/bool/bool.proto +++ b/tests/inputs/bool/bool.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package bool; + message Test { bool value = 1; } diff --git a/tests/inputs/bool/test_bool.py b/tests/inputs/bool/test_bool.py index e91bf0ab7..6b0ad0be5 100644 --- a/tests/inputs/bool/test_bool.py +++ b/tests/inputs/bool/test_bool.py @@ -1,6 +1,24 @@ +import pytest + from tests.output_betterproto.bool import Test +from tests.output_betterproto_pydantic.bool import Test as TestPyd def test_value(): message = Test() assert not message.value, "Boolean is False by default" + + +def test_pydantic_no_value(): + message = TestPyd() + assert not message.value, "Boolean is False by default" + + +def test_pydantic_value(): + message = TestPyd(value=False) + assert not message.value + + +def test_pydantic_bad_value(): + with pytest.raises(ValueError): + TestPyd(value=123) diff --git a/tests/inputs/bytes/bytes.proto b/tests/inputs/bytes/bytes.proto index de677e315..989546857 100644 --- a/tests/inputs/bytes/bytes.proto +++ b/tests/inputs/bytes/bytes.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package bytes; + message Test { bytes data = 1; } diff --git a/tests/inputs/casing/casing.proto b/tests/inputs/casing/casing.proto index ca458b527..2023d9340 100644 --- a/tests/inputs/casing/casing.proto +++ b/tests/inputs/casing/casing.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package casing; + enum my_enum { ZERO = 0; ONE = 1; diff --git a/tests/inputs/casing/test_casing.py b/tests/inputs/casing/test_casing.py index 9ca424357..b05004346 100644 --- a/tests/inputs/casing/test_casing.py +++ b/tests/inputs/casing/test_casing.py @@ -4,20 +4,20 @@ def test_message_attributes(): message = Test() - assert hasattr( - message, "snake_case_message" - ), "snake_case field name is same in python" + assert hasattr(message, "snake_case_message"), ( + "snake_case field name is same in python" + ) assert hasattr(message, "camel_case"), "CamelCase field is snake_case in python" assert hasattr(message, "uppercase"), "UPPERCASE field is lowercase in python" def test_message_casing(): - assert hasattr( - casing, "SnakeCaseMessage" - ), "snake_case Message name is converted to CamelCase in python" + assert hasattr(casing, "SnakeCaseMessage"), ( + "snake_case Message name is converted to CamelCase in python" + ) def test_enum_casing(): - assert hasattr( - casing, "MyEnum" - ), "snake_case Enum name is converted to CamelCase in python" + assert hasattr(casing, "MyEnum"), ( + "snake_case Enum name is converted to CamelCase in python" + ) diff --git a/tests/inputs/casing_inner_class/casing_inner_class.proto b/tests/inputs/casing_inner_class/casing_inner_class.proto new file mode 100644 index 000000000..7d231beb5 --- /dev/null +++ b/tests/inputs/casing_inner_class/casing_inner_class.proto @@ -0,0 +1,11 @@ +// https://github.com/danielgtaylor/python-betterproto/issues/344 +syntax = "proto3"; + +package casing_inner_class; + +message Test { + message inner_class { + sint32 old_exp = 1; + } + inner_class inner = 2; +} \ No newline at end of file diff --git a/tests/inputs/casing_inner_class/test_casing_inner_class.py b/tests/inputs/casing_inner_class/test_casing_inner_class.py new file mode 100644 index 000000000..466b53230 --- /dev/null +++ b/tests/inputs/casing_inner_class/test_casing_inner_class.py @@ -0,0 +1,14 @@ +import tests.output_betterproto.casing_inner_class as casing_inner_class + + +def test_message_casing_inner_class_name(): + assert hasattr(casing_inner_class, "TestInnerClass"), ( + "Inline defined Message is correctly converted to CamelCase" + ) + + +def test_message_casing_inner_class_attributes(): + message = casing_inner_class.Test() + assert hasattr(message.inner, "old_exp"), ( + "Inline defined Message attribute is snake_case" + ) diff --git a/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.proto b/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.proto index 9964dfa0d..c6d42c314 100644 --- a/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.proto +++ b/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package casing_message_field_uppercase; + message Test { int32 UPPERCASE = 1; int32 UPPERCASE_V2 = 2; diff --git a/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.py b/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.py index 2b32b5308..c1a973c19 100644 --- a/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.py +++ b/tests/inputs/casing_message_field_uppercase/casing_message_field_uppercase.py @@ -3,12 +3,12 @@ def test_message_casing(): message = Test() - assert hasattr( - message, "uppercase" - ), "UPPERCASE attribute is converted to 'uppercase' in python" - assert hasattr( - message, "uppercase_v2" - ), "UPPERCASE_V2 attribute is converted to 'uppercase_v2' in python" - assert hasattr( - message, "upper_camel_case" - ), "UPPER_CAMEL_CASE attribute is converted to upper_camel_case in python" + assert hasattr(message, "uppercase"), ( + "UPPERCASE attribute is converted to 'uppercase' in python" + ) + assert hasattr(message, "uppercase_v2"), ( + "UPPERCASE_V2 attribute is converted to 'uppercase_v2' in python" + ) + assert hasattr(message, "upper_camel_case"), ( + "UPPER_CAMEL_CASE attribute is converted to upper_camel_case in python" + ) diff --git a/tests/inputs/config.py b/tests/inputs/config.py index 49882b0d5..6da1f887d 100644 --- a/tests/inputs/config.py +++ b/tests/inputs/config.py @@ -9,6 +9,7 @@ } services = { + "googletypes_request", "googletypes_response", "googletypes_response_embedded", "service", @@ -18,6 +19,7 @@ "googletypes_service_returns_googletype", "example_service", "empty_service", + "service_uppercase", } diff --git a/tests/inputs/deprecated/deprecated.json b/tests/inputs/deprecated/deprecated.json index 9da52bbb5..43b2b65a3 100644 --- a/tests/inputs/deprecated/deprecated.json +++ b/tests/inputs/deprecated/deprecated.json @@ -1,4 +1,6 @@ { - "v": 10, + "message": { + "value": "hello" + }, "value": 10 } diff --git a/tests/inputs/deprecated/deprecated.proto b/tests/inputs/deprecated/deprecated.proto index aa1f8183e..f504d03af 100644 --- a/tests/inputs/deprecated/deprecated.proto +++ b/tests/inputs/deprecated/deprecated.proto @@ -1,9 +1,21 @@ syntax = "proto3"; +package deprecated; + // Some documentation about the Test message. message Test { - // Some documentation about the value. - option deprecated = true; - int32 v = 1 [deprecated=true]; + Message message = 1 [deprecated=true]; int32 value = 2; } + +message Message { + option deprecated = true; + string value = 1; +} + +message Empty {} + +service TestService { + rpc func(Empty) returns (Empty); + rpc deprecated_func(Empty) returns (Empty) { option deprecated = true; }; +} diff --git a/tests/inputs/deprecated_field/deprecated_field.json b/tests/inputs/deprecated_field/deprecated_field.json deleted file mode 100644 index 9da52bbb5..000000000 --- a/tests/inputs/deprecated_field/deprecated_field.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "v": 10, - "value": 10 -} diff --git a/tests/inputs/deprecated_field/deprecated_field.proto b/tests/inputs/deprecated_field/deprecated_field.proto deleted file mode 100644 index 04de1a964..000000000 --- a/tests/inputs/deprecated_field/deprecated_field.proto +++ /dev/null @@ -1,8 +0,0 @@ -syntax = "proto3"; - -// Some documentation about the Test message. -message Test { - // Some documentation about the value. - int32 v = 1 [deprecated=true]; - int32 value = 2; -} diff --git a/tests/inputs/documentation/documentation.proto b/tests/inputs/documentation/documentation.proto new file mode 100644 index 000000000..7fc6c83b4 --- /dev/null +++ b/tests/inputs/documentation/documentation.proto @@ -0,0 +1,44 @@ +syntax = "proto3"; +package documentation; + +// Documentation of message 1 +// other line 1 + +// Documentation of message 2 +// other line 2 +message Test { // Documentation of message 3 + // Documentation of field 1 + // other line 1 + + // Documentation of field 2 + // other line 2 + uint32 x = 1; // Documentation of field 3 +} + +// Documentation of enum 1 +// other line 1 + +// Documentation of enum 2 +// other line 2 +enum Enum { // Documentation of enum 3 + // Documentation of variant 1 + // other line 1 + + // Documentation of variant 2 + // other line 2 + Enum_Variant = 0; // Documentation of variant 3 +} + +// Documentation of service 1 +// other line 1 + +// Documentation of service 2 +// other line 2 +service Service { // Documentation of service 3 + // Documentation of method 1 + // other line 1 + + // Documentation of method 2 + // other line 2 + rpc get(Test) returns (Test); // Documentation of method 3 +} diff --git a/tests/inputs/double/double.proto b/tests/inputs/double/double.proto index 88525d925..66aea95d1 100644 --- a/tests/inputs/double/double.proto +++ b/tests/inputs/double/double.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package double; + message Test { double count = 1; } diff --git a/tests/inputs/empty_repeated/empty_repeated.proto b/tests/inputs/empty_repeated/empty_repeated.proto index 3be831ace..f787301fb 100644 --- a/tests/inputs/empty_repeated/empty_repeated.proto +++ b/tests/inputs/empty_repeated/empty_repeated.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package empty_repeated; + message MessageA { repeated float values = 1; } diff --git a/tests/inputs/entry/entry.proto b/tests/inputs/entry/entry.proto new file mode 100644 index 000000000..3f2af4d18 --- /dev/null +++ b/tests/inputs/entry/entry.proto @@ -0,0 +1,20 @@ +syntax = "proto3"; + +package entry; + +// This is a minimal example of a repeated message field that caused issues when +// checking whether a message is a map. +// +// During the check wheter a field is a "map", the string "entry" is added to +// the field name, checked against the type name and then further checks are +// made against the nested type of a parent message. In this edge-case, the +// first check would pass even though it shouldn't and that would cause an +// error because the parent type does not have a "nested_type" attribute. + +message Test { + repeated ExportEntry export = 1; +} + +message ExportEntry { + string name = 1; +} diff --git a/tests/inputs/enum/enum.proto b/tests/inputs/enum/enum.proto index a2dfe437e..5e2e80c1f 100644 --- a/tests/inputs/enum/enum.proto +++ b/tests/inputs/enum/enum.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package enum; + // Tests that enums are correctly serialized and that it correctly handles skipped and out-of-order enum values message Test { Choice choice = 1; @@ -13,3 +15,11 @@ enum Choice { FOUR = 4; THREE = 3; } + +// A "C" like enum with the enum name prefixed onto members, these should be stripped +enum ArithmeticOperator { + ARITHMETIC_OPERATOR_NONE = 0; + ARITHMETIC_OPERATOR_PLUS = 1; + ARITHMETIC_OPERATOR_MINUS = 2; + ARITHMETIC_OPERATOR_0_PREFIXED = 3; +} diff --git a/tests/inputs/enum/test_enum.py b/tests/inputs/enum/test_enum.py index 3005c43a8..0663beac4 100644 --- a/tests/inputs/enum/test_enum.py +++ b/tests/inputs/enum/test_enum.py @@ -1,6 +1,7 @@ from tests.output_betterproto.enum import ( - Test, + ArithmeticOperator, Choice, + Test, ) @@ -26,9 +27,9 @@ def test_enum_is_comparable_with_int(): def test_enum_to_dict(): - assert ( - "choice" not in Test(choice=Choice.ZERO).to_dict() - ), "Default enum value is not serialized" + assert "choice" not in Test(choice=Choice.ZERO).to_dict(), ( + "Default enum value is not serialized" + ) assert ( Test(choice=Choice.ZERO).to_dict(include_default_values=True)["choice"] == "ZERO" @@ -82,3 +83,32 @@ def enum_generator(): yield Choice.THREE assert Test(choices=enum_generator()).to_dict()["choices"] == ["ONE", "THREE"] + + +def test_enum_mapped_on_parse(): + # test default value + b = Test().parse(bytes(Test())) + assert b.choice.name == Choice.ZERO.name + assert b.choices == [] + + # test non default value + a = Test().parse(bytes(Test(choice=Choice.ONE))) + assert a.choice.name == Choice.ONE.name + assert b.choices == [] + + # test repeated + c = Test().parse(bytes(Test(choices=[Choice.THREE, Choice.FOUR]))) + assert c.choices[0].name == Choice.THREE.name + assert c.choices[1].name == Choice.FOUR.name + + # bonus: defaults after empty init are also mapped + assert Test().choice.name == Choice.ZERO.name + + +def test_renamed_enum_members(): + assert set(ArithmeticOperator.__members__) == { + "NONE", + "PLUS", + "MINUS", + "_0_PREFIXED", + } diff --git a/tests/inputs/example/example.proto b/tests/inputs/example/example.proto index 311105888..56bd36470 100644 --- a/tests/inputs/example/example.proto +++ b/tests/inputs/example/example.proto @@ -39,6 +39,8 @@ syntax = "proto2"; +package example; + // package google.protobuf; option go_package = "google.golang.org/protobuf/types/descriptorpb"; diff --git a/tests/inputs/example_service/test_example_service.py b/tests/inputs/example_service/test_example_service.py index 12d646b14..23b2e3b4c 100644 --- a/tests/inputs/example_service/test_example_service.py +++ b/tests/inputs/example_service/test_example_service.py @@ -1,49 +1,52 @@ -from typing import AsyncIterator, AsyncIterable +from typing import ( + AsyncIterable, + AsyncIterator, +) import pytest from grpclib.testing import ChannelFor -from tests.output_betterproto.example_service.example_service import ( - TestBase, - TestStub, +from tests.output_betterproto.example_service import ( ExampleRequest, ExampleResponse, + TestBase, + TestStub, ) class ExampleService(TestBase): async def example_unary_unary( - self, example_string: str, example_integer: int + self, example_request: ExampleRequest ) -> "ExampleResponse": return ExampleResponse( - example_string=example_string, - example_integer=example_integer, + example_string=example_request.example_string, + example_integer=example_request.example_integer, ) async def example_unary_stream( - self, example_string: str, example_integer: int + self, example_request: ExampleRequest ) -> AsyncIterator["ExampleResponse"]: response = ExampleResponse( - example_string=example_string, - example_integer=example_integer, + example_string=example_request.example_string, + example_integer=example_request.example_integer, ) yield response yield response yield response async def example_stream_unary( - self, request_iterator: AsyncIterator["ExampleRequest"] + self, example_request_iterator: AsyncIterator["ExampleRequest"] ) -> "ExampleResponse": - async for example_request in request_iterator: + async for example_request in example_request_iterator: return ExampleResponse( example_string=example_request.example_string, example_integer=example_request.example_integer, ) async def example_stream_stream( - self, request_iterator: AsyncIterator["ExampleRequest"] + self, example_request_iterator: AsyncIterator["ExampleRequest"] ) -> AsyncIterator["ExampleResponse"]: - async for example_request in request_iterator: + async for example_request in example_request_iterator: yield ExampleResponse( example_string=example_request.example_string, example_integer=example_request.example_integer, @@ -52,44 +55,32 @@ async def example_stream_stream( @pytest.mark.asyncio async def test_calls_with_different_cardinalities(): - test_string = "test string" - test_int = 42 + example_request = ExampleRequest("test string", 42) async with ChannelFor([ExampleService()]) as channel: stub = TestStub(channel) # unary unary - response = await stub.example_unary_unary( - example_string="test string", - example_integer=42, - ) - assert response.example_string == test_string - assert response.example_integer == test_int + response = await stub.example_unary_unary(example_request) + assert response.example_string == example_request.example_string + assert response.example_integer == example_request.example_integer # unary stream - async for response in stub.example_unary_stream( - example_string="test string", - example_integer=42, - ): - assert response.example_string == test_string - assert response.example_integer == test_int + async for response in stub.example_unary_stream(example_request): + assert response.example_string == example_request.example_string + assert response.example_integer == example_request.example_integer # stream unary - request = ExampleRequest( - example_string=test_string, - example_integer=42, - ) - async def request_iterator(): - yield request - yield request - yield request + yield example_request + yield example_request + yield example_request response = await stub.example_stream_unary(request_iterator()) - assert response.example_string == test_string - assert response.example_integer == test_int + assert response.example_string == example_request.example_string + assert response.example_integer == example_request.example_integer # stream stream async for response in stub.example_stream_stream(request_iterator()): - assert response.example_string == test_string - assert response.example_integer == test_int + assert response.example_string == example_request.example_string + assert response.example_integer == example_request.example_integer diff --git a/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.json b/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.json new file mode 100644 index 000000000..7a6e7ae85 --- /dev/null +++ b/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.json @@ -0,0 +1,7 @@ +{ + "int": 26, + "float": 26.0, + "str": "value-for-str", + "bytes": "001a", + "bool": true +} \ No newline at end of file diff --git a/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.proto b/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.proto new file mode 100644 index 000000000..81a0fc43d --- /dev/null +++ b/tests/inputs/field_name_identical_to_type/field_name_identical_to_type.proto @@ -0,0 +1,13 @@ +syntax = "proto3"; + +package field_name_identical_to_type; + +// Tests that messages may contain fields with names that are identical to their python types (PR #294) + +message Test { + int32 int = 1; + float float = 2; + string str = 3; + bytes bytes = 4; + bool bool = 5; +} \ No newline at end of file diff --git a/tests/inputs/fixed/fixed.proto b/tests/inputs/fixed/fixed.proto index 50dad84d6..0f0ffb4ef 100644 --- a/tests/inputs/fixed/fixed.proto +++ b/tests/inputs/fixed/fixed.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package fixed; + message Test { fixed32 foo = 1; sfixed32 bar = 2; diff --git a/tests/inputs/float/float.proto b/tests/inputs/float/float.proto index 79922af27..fea12b3dc 100644 --- a/tests/inputs/float/float.proto +++ b/tests/inputs/float/float.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package float; + // Some documentation about the Test message. message Test { double positive = 1; diff --git a/tests/inputs/google_impl_behavior_equivalence/google_impl_behavior_equivalence.proto b/tests/inputs/google_impl_behavior_equivalence/google_impl_behavior_equivalence.proto index 31b6bd3e8..66ef8a645 100644 --- a/tests/inputs/google_impl_behavior_equivalence/google_impl_behavior_equivalence.proto +++ b/tests/inputs/google_impl_behavior_equivalence/google_impl_behavior_equivalence.proto @@ -1,13 +1,22 @@ syntax = "proto3"; -message Foo{ - int64 bar = 1; -} +import "google/protobuf/timestamp.proto"; +package google_impl_behavior_equivalence; + +message Foo { int64 bar = 1; } -message Test{ - oneof group{ +message Test { + oneof group { string string = 1; int64 integer = 2; Foo foo = 3; } } + +message Spam { + google.protobuf.Timestamp ts = 1; +} + +message Request { Empty foo = 1; } + +message Empty {} diff --git a/tests/inputs/google_impl_behavior_equivalence/test_google_impl_behavior_equivalence.py b/tests/inputs/google_impl_behavior_equivalence/test_google_impl_behavior_equivalence.py index abe5d66d1..6d2991bdd 100644 --- a/tests/inputs/google_impl_behavior_equivalence/test_google_impl_behavior_equivalence.py +++ b/tests/inputs/google_impl_behavior_equivalence/test_google_impl_behavior_equivalence.py @@ -1,19 +1,30 @@ -import pytest +from datetime import ( + datetime, + timezone, +) +import pytest from google.protobuf import json_format +from google.protobuf.timestamp_pb2 import Timestamp + import betterproto from tests.output_betterproto.google_impl_behavior_equivalence import ( - Test, + Empty, Foo, + Request, + Spam, + Test, ) from tests.output_reference.google_impl_behavior_equivalence.google_impl_behavior_equivalence_pb2 import ( - Test as ReferenceTest, + Empty as ReferenceEmpty, Foo as ReferenceFoo, + Request as ReferenceRequest, + Spam as ReferenceSpam, + Test as ReferenceTest, ) def test_oneof_serializes_similar_to_google_oneof(): - tests = [ (Test(string="abc"), ReferenceTest(string="abc")), (Test(integer=2), ReferenceTest(integer=2)), @@ -30,7 +41,6 @@ def test_oneof_serializes_similar_to_google_oneof(): def test_bytes_are_the_same_for_oneof(): - message = Test(string="") message_reference = ReferenceTest(string="") @@ -48,8 +58,36 @@ def test_bytes_are_the_same_for_oneof(): # None of these fields were explicitly set BUT they should not actually be null # themselves - assert isinstance(message.foo, Foo) - assert isinstance(message2.foo, Foo) + assert not hasattr(message, "foo") + assert object.__getattribute__(message, "foo") == betterproto.PLACEHOLDER + assert not hasattr(message2, "foo") + assert object.__getattribute__(message2, "foo") == betterproto.PLACEHOLDER assert isinstance(message_reference.foo, ReferenceFoo) assert isinstance(message_reference2.foo, ReferenceFoo) + + +@pytest.mark.parametrize("dt", (datetime.min.replace(tzinfo=timezone.utc),)) +def test_datetime_clamping(dt): # see #407 + ts = Timestamp() + ts.FromDatetime(dt) + assert bytes(Spam(dt)) == ReferenceSpam(ts=ts).SerializeToString() + message_bytes = bytes(Spam(dt)) + + assert ( + Spam().parse(message_bytes).ts.timestamp() + == ReferenceSpam.FromString(message_bytes).ts.seconds + ) + + +def test_empty_message_field(): + message = Request() + reference_message = ReferenceRequest() + + message.foo = Empty() + reference_message.foo.CopyFrom(ReferenceEmpty()) + + assert betterproto.serialized_on_wire(message.foo) + assert reference_message.HasField("foo") + + assert bytes(message) == reference_message.SerializeToString() diff --git a/tests/inputs/googletypes/googletypes.proto b/tests/inputs/googletypes/googletypes.proto index ba3db12b2..ef8cb4a1e 100644 --- a/tests/inputs/googletypes/googletypes.proto +++ b/tests/inputs/googletypes/googletypes.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package googletypes; + import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; import "google/protobuf/wrappers.proto"; diff --git a/tests/inputs/googletypes_request/googletypes_request.proto b/tests/inputs/googletypes_request/googletypes_request.proto new file mode 100644 index 000000000..1cedcaafc --- /dev/null +++ b/tests/inputs/googletypes_request/googletypes_request.proto @@ -0,0 +1,29 @@ +syntax = "proto3"; + +package googletypes_request; + +import "google/protobuf/duration.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/timestamp.proto"; +import "google/protobuf/wrappers.proto"; + +// Tests that google types can be used as params + +service Test { + rpc SendDouble (google.protobuf.DoubleValue) returns (Input); + rpc SendFloat (google.protobuf.FloatValue) returns (Input); + rpc SendInt64 (google.protobuf.Int64Value) returns (Input); + rpc SendUInt64 (google.protobuf.UInt64Value) returns (Input); + rpc SendInt32 (google.protobuf.Int32Value) returns (Input); + rpc SendUInt32 (google.protobuf.UInt32Value) returns (Input); + rpc SendBool (google.protobuf.BoolValue) returns (Input); + rpc SendString (google.protobuf.StringValue) returns (Input); + rpc SendBytes (google.protobuf.BytesValue) returns (Input); + rpc SendDatetime (google.protobuf.Timestamp) returns (Input); + rpc SendTimedelta (google.protobuf.Duration) returns (Input); + rpc SendEmpty (google.protobuf.Empty) returns (Input); +} + +message Input { + +} diff --git a/tests/inputs/googletypes_request/test_googletypes_request.py b/tests/inputs/googletypes_request/test_googletypes_request.py new file mode 100644 index 000000000..ffb2608f1 --- /dev/null +++ b/tests/inputs/googletypes_request/test_googletypes_request.py @@ -0,0 +1,47 @@ +from datetime import ( + datetime, + timedelta, +) +from typing import ( + Any, + Callable, +) + +import pytest + +import betterproto.lib.google.protobuf as protobuf +from tests.mocks import MockChannel +from tests.output_betterproto.googletypes_request import ( + Input, + TestStub, +) + + +test_cases = [ + (TestStub.send_double, protobuf.DoubleValue, 2.5), + (TestStub.send_float, protobuf.FloatValue, 2.5), + (TestStub.send_int64, protobuf.Int64Value, -64), + (TestStub.send_u_int64, protobuf.UInt64Value, 64), + (TestStub.send_int32, protobuf.Int32Value, -32), + (TestStub.send_u_int32, protobuf.UInt32Value, 32), + (TestStub.send_bool, protobuf.BoolValue, True), + (TestStub.send_string, protobuf.StringValue, "string"), + (TestStub.send_bytes, protobuf.BytesValue, bytes(0xFF)[0:4]), + (TestStub.send_datetime, protobuf.Timestamp, datetime(2038, 1, 19, 3, 14, 8)), + (TestStub.send_timedelta, protobuf.Duration, timedelta(seconds=123456)), +] + + +@pytest.mark.asyncio +@pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases) +async def test_channel_receives_wrapped_type( + service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value +): + wrapped_value = wrapper_class() + wrapped_value.value = value + channel = MockChannel(responses=[Input()]) + service = TestStub(channel) + + await service_method(service, wrapped_value) + + assert channel.requests[0]["request"] == type(wrapped_value) diff --git a/tests/inputs/googletypes_response/googletypes_response.proto b/tests/inputs/googletypes_response/googletypes_response.proto index 9b0be5ccc..8917d1c72 100644 --- a/tests/inputs/googletypes_response/googletypes_response.proto +++ b/tests/inputs/googletypes_response/googletypes_response.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package googletypes_response; + import "google/protobuf/wrappers.proto"; // Tests that wrapped values can be used directly as return values diff --git a/tests/inputs/googletypes_response/test_googletypes_response.py b/tests/inputs/googletypes_response/test_googletypes_response.py index 2d4d62fa6..6e1ed29c3 100644 --- a/tests/inputs/googletypes_response/test_googletypes_response.py +++ b/tests/inputs/googletypes_response/test_googletypes_response.py @@ -1,10 +1,18 @@ -from typing import Any, Callable, Optional +from typing import ( + Any, + Callable, + Optional, +) -import betterproto.lib.google.protobuf as protobuf import pytest +import betterproto.lib.google.protobuf as protobuf from tests.mocks import MockChannel -from tests.output_betterproto.googletypes_response import TestStub +from tests.output_betterproto.googletypes_response import ( + Input, + TestStub, +) + test_cases = [ (TestStub.get_double, protobuf.DoubleValue, 2.5), @@ -22,14 +30,15 @@ @pytest.mark.asyncio @pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases) async def test_channel_receives_wrapped_type( - service_method: Callable[[TestStub], Any], wrapper_class: Callable, value + service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value ): wrapped_value = wrapper_class() wrapped_value.value = value channel = MockChannel(responses=[wrapped_value]) service = TestStub(channel) + method_param = Input() - await service_method(service) + await service_method(service, method_param) assert channel.requests[0]["response_type"] != Optional[type(value)] assert channel.requests[0]["response_type"] == type(wrapped_value) @@ -39,7 +48,7 @@ async def test_channel_receives_wrapped_type( @pytest.mark.xfail @pytest.mark.parametrize(["service_method", "wrapper_class", "value"], test_cases) async def test_service_unwraps_response( - service_method: Callable[[TestStub], Any], wrapper_class: Callable, value + service_method: Callable[[TestStub, Input], Any], wrapper_class: Callable, value ): """ grpclib does not unwrap wrapper values returned by services @@ -47,8 +56,9 @@ async def test_service_unwraps_response( wrapped_value = wrapper_class() wrapped_value.value = value service = TestStub(MockChannel(responses=[wrapped_value])) + method_param = Input() - response_value = await service_method(service) + response_value = await service_method(service, method_param) assert response_value == value assert type(response_value) == type(value) diff --git a/tests/inputs/googletypes_response_embedded/googletypes_response_embedded.proto b/tests/inputs/googletypes_response_embedded/googletypes_response_embedded.proto index 89ae4cce3..47284e3aa 100644 --- a/tests/inputs/googletypes_response_embedded/googletypes_response_embedded.proto +++ b/tests/inputs/googletypes_response_embedded/googletypes_response_embedded.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package googletypes_response_embedded; + import "google/protobuf/wrappers.proto"; // Tests that wrapped values are supported as part of output message diff --git a/tests/inputs/googletypes_response_embedded/test_googletypes_response_embedded.py b/tests/inputs/googletypes_response_embedded/test_googletypes_response_embedded.py index 6a1760f80..57ebce1bd 100644 --- a/tests/inputs/googletypes_response_embedded/test_googletypes_response_embedded.py +++ b/tests/inputs/googletypes_response_embedded/test_googletypes_response_embedded.py @@ -2,6 +2,7 @@ from tests.mocks import MockChannel from tests.output_betterproto.googletypes_response_embedded import ( + Input, Output, TestStub, ) @@ -26,7 +27,7 @@ async def test_service_passes_through_unwrapped_values_embedded_in_response(): ) service = TestStub(MockChannel(responses=[output])) - response = await service.get_output() + response = await service.get_output(Input()) assert response.double_value == 10.0 assert response.float_value == 12.0 diff --git a/tests/inputs/googletypes_service_returns_empty/googletypes_service_returns_empty.proto b/tests/inputs/googletypes_service_returns_empty/googletypes_service_returns_empty.proto index c45469141..2153ad5e9 100644 --- a/tests/inputs/googletypes_service_returns_empty/googletypes_service_returns_empty.proto +++ b/tests/inputs/googletypes_service_returns_empty/googletypes_service_returns_empty.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package googletypes_service_returns_empty; + import "google/protobuf/empty.proto"; service Test { diff --git a/tests/inputs/googletypes_service_returns_googletype/googletypes_service_returns_googletype.proto b/tests/inputs/googletypes_service_returns_googletype/googletypes_service_returns_googletype.proto index 49b2a55d7..457707b73 100644 --- a/tests/inputs/googletypes_service_returns_googletype/googletypes_service_returns_googletype.proto +++ b/tests/inputs/googletypes_service_returns_googletype/googletypes_service_returns_googletype.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package googletypes_service_returns_googletype; + import "google/protobuf/empty.proto"; import "google/protobuf/struct.proto"; diff --git a/tests/inputs/googletypes_struct/googletypes_struct.proto b/tests/inputs/googletypes_struct/googletypes_struct.proto index 1dbd64a71..2b8b5c55d 100644 --- a/tests/inputs/googletypes_struct/googletypes_struct.proto +++ b/tests/inputs/googletypes_struct/googletypes_struct.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package googletypes_struct; + import "google/protobuf/struct.proto"; message Test { diff --git a/tests/inputs/googletypes_value/googletypes_value.proto b/tests/inputs/googletypes_value/googletypes_value.proto index 379d33641..d5089d5ef 100644 --- a/tests/inputs/googletypes_value/googletypes_value.proto +++ b/tests/inputs/googletypes_value/googletypes_value.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package googletypes_value; + import "google/protobuf/struct.proto"; // Tests that fields of type google.protobuf.Value can contain arbitrary JSON-values. diff --git a/tests/inputs/import_capitalized_package/capitalized.proto b/tests/inputs/import_capitalized_package/capitalized.proto index 0b73babdf..e80c95cd7 100644 --- a/tests/inputs/import_capitalized_package/capitalized.proto +++ b/tests/inputs/import_capitalized_package/capitalized.proto @@ -1,7 +1,7 @@ syntax = "proto3"; -package Capitalized; +package import_capitalized_package.Capitalized; message Message { diff --git a/tests/inputs/import_capitalized_package/test.proto b/tests/inputs/import_capitalized_package/test.proto index f94bbc9c2..38c9b2d73 100644 --- a/tests/inputs/import_capitalized_package/test.proto +++ b/tests/inputs/import_capitalized_package/test.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package import_capitalized_package; + import "capitalized.proto"; // Tests that we can import from a package with a capital name, that looks like a nested type, but isn't. diff --git a/tests/inputs/import_child_package_from_package/child.proto b/tests/inputs/import_child_package_from_package/child.proto index 0865fc8cd..d99c7c315 100644 --- a/tests/inputs/import_child_package_from_package/child.proto +++ b/tests/inputs/import_child_package_from_package/child.proto @@ -1,6 +1,6 @@ syntax = "proto3"; -package package.childpackage; +package import_child_package_from_package.package.childpackage; message ChildMessage { diff --git a/tests/inputs/import_child_package_from_package/import_child_package_from_package.proto b/tests/inputs/import_child_package_from_package/import_child_package_from_package.proto index 0d0913267..66e0aa819 100644 --- a/tests/inputs/import_child_package_from_package/import_child_package_from_package.proto +++ b/tests/inputs/import_child_package_from_package/import_child_package_from_package.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package import_child_package_from_package; + import "package_message.proto"; // Tests generated imports when a message in a package refers to a message in a nested child package. diff --git a/tests/inputs/import_child_package_from_package/package_message.proto b/tests/inputs/import_child_package_from_package/package_message.proto index 943282ce9..79d66f373 100644 --- a/tests/inputs/import_child_package_from_package/package_message.proto +++ b/tests/inputs/import_child_package_from_package/package_message.proto @@ -2,7 +2,7 @@ syntax = "proto3"; import "child.proto"; -package package; +package import_child_package_from_package.package; message PackageMessage { package.childpackage.ChildMessage c = 1; diff --git a/tests/inputs/import_child_package_from_root/child.proto b/tests/inputs/import_child_package_from_root/child.proto index c874e14bd..2a46d5f52 100644 --- a/tests/inputs/import_child_package_from_root/child.proto +++ b/tests/inputs/import_child_package_from_root/child.proto @@ -1,6 +1,6 @@ syntax = "proto3"; -package childpackage; +package import_child_package_from_root.childpackage; message Message { diff --git a/tests/inputs/import_child_package_from_root/import_child_package_from_root.proto b/tests/inputs/import_child_package_from_root/import_child_package_from_root.proto index d0c111f2c..62998310c 100644 --- a/tests/inputs/import_child_package_from_root/import_child_package_from_root.proto +++ b/tests/inputs/import_child_package_from_root/import_child_package_from_root.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package import_child_package_from_root; + import "child.proto"; // Tests generated imports when a message in root refers to a message in a child package. diff --git a/tests/inputs/import_circular_dependency/import_circular_dependency.proto b/tests/inputs/import_circular_dependency/import_circular_dependency.proto index 7d02aad21..4441be9ff 100644 --- a/tests/inputs/import_circular_dependency/import_circular_dependency.proto +++ b/tests/inputs/import_circular_dependency/import_circular_dependency.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package import_circular_dependency; + import "root.proto"; import "other.proto"; @@ -24,5 +26,5 @@ import "other.proto"; // (root: Test & RootPackageMessage) <-------> (other: OtherPackageMessage) message Test { RootPackageMessage message = 1; - other.OtherPackageMessage other = 2; + other.OtherPackageMessage other_value = 2; } diff --git a/tests/inputs/import_circular_dependency/other.proto b/tests/inputs/import_circular_dependency/other.proto index 2b936a9b1..833b86992 100644 --- a/tests/inputs/import_circular_dependency/other.proto +++ b/tests/inputs/import_circular_dependency/other.proto @@ -1,7 +1,7 @@ syntax = "proto3"; import "root.proto"; -package other; +package import_circular_dependency.other; message OtherPackageMessage { RootPackageMessage rootPackageMessage = 1; diff --git a/tests/inputs/import_circular_dependency/root.proto b/tests/inputs/import_circular_dependency/root.proto index 63d15bf6c..738394777 100644 --- a/tests/inputs/import_circular_dependency/root.proto +++ b/tests/inputs/import_circular_dependency/root.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package import_circular_dependency; + message RootPackageMessage { } diff --git a/tests/inputs/import_cousin_package/cousin.proto b/tests/inputs/import_cousin_package/cousin.proto index 4361545a1..2870dfe9c 100644 --- a/tests/inputs/import_cousin_package/cousin.proto +++ b/tests/inputs/import_cousin_package/cousin.proto @@ -1,6 +1,6 @@ syntax = "proto3"; -package cousin.cousin_subpackage; +package import_cousin_package.cousin.cousin_subpackage; message CousinMessage { } diff --git a/tests/inputs/import_cousin_package/test.proto b/tests/inputs/import_cousin_package/test.proto index 53f3b7fc0..89ec3d84a 100644 --- a/tests/inputs/import_cousin_package/test.proto +++ b/tests/inputs/import_cousin_package/test.proto @@ -1,6 +1,6 @@ syntax = "proto3"; -package test.subpackage; +package import_cousin_package.test.subpackage; import "cousin.proto"; diff --git a/tests/inputs/import_cousin_package_same_name/cousin.proto b/tests/inputs/import_cousin_package_same_name/cousin.proto index 9253b9565..84b6a407b 100644 --- a/tests/inputs/import_cousin_package_same_name/cousin.proto +++ b/tests/inputs/import_cousin_package_same_name/cousin.proto @@ -1,6 +1,6 @@ syntax = "proto3"; -package cousin.subpackage; +package import_cousin_package_same_name.cousin.subpackage; message CousinMessage { } diff --git a/tests/inputs/import_cousin_package_same_name/test.proto b/tests/inputs/import_cousin_package_same_name/test.proto index fe31b5f2d..7b420d309 100644 --- a/tests/inputs/import_cousin_package_same_name/test.proto +++ b/tests/inputs/import_cousin_package_same_name/test.proto @@ -1,6 +1,6 @@ syntax = "proto3"; -package test.subpackage; +package import_cousin_package_same_name.test.subpackage; import "cousin.proto"; diff --git a/tests/inputs/import_packages_same_name/import_packages_same_name.proto b/tests/inputs/import_packages_same_name/import_packages_same_name.proto index c1f28e91c..dff7efeda 100644 --- a/tests/inputs/import_packages_same_name/import_packages_same_name.proto +++ b/tests/inputs/import_packages_same_name/import_packages_same_name.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package import_packages_same_name; + import "users_v1.proto"; import "posts_v1.proto"; diff --git a/tests/inputs/import_packages_same_name/posts_v1.proto b/tests/inputs/import_packages_same_name/posts_v1.proto index 506bf11e7..d3b9b1ca4 100644 --- a/tests/inputs/import_packages_same_name/posts_v1.proto +++ b/tests/inputs/import_packages_same_name/posts_v1.proto @@ -1,6 +1,6 @@ syntax = "proto3"; -package posts.v1; +package import_packages_same_name.posts.v1; message Post { diff --git a/tests/inputs/import_packages_same_name/users_v1.proto b/tests/inputs/import_packages_same_name/users_v1.proto index 6e46ba8ed..d3a17e920 100644 --- a/tests/inputs/import_packages_same_name/users_v1.proto +++ b/tests/inputs/import_packages_same_name/users_v1.proto @@ -1,6 +1,6 @@ syntax = "proto3"; -package users.v1; +package import_packages_same_name.users.v1; message User { diff --git a/tests/inputs/import_parent_package_from_child/import_parent_package_from_child.proto b/tests/inputs/import_parent_package_from_child/import_parent_package_from_child.proto index c43c1bc2d..edc473626 100644 --- a/tests/inputs/import_parent_package_from_child/import_parent_package_from_child.proto +++ b/tests/inputs/import_parent_package_from_child/import_parent_package_from_child.proto @@ -2,7 +2,7 @@ syntax = "proto3"; import "parent_package_message.proto"; -package parent.child; +package import_parent_package_from_child.parent.child; // Tests generated imports when a message refers to a message defined in its parent package diff --git a/tests/inputs/import_parent_package_from_child/parent_package_message.proto b/tests/inputs/import_parent_package_from_child/parent_package_message.proto index cea306602..fb3fd31dd 100644 --- a/tests/inputs/import_parent_package_from_child/parent_package_message.proto +++ b/tests/inputs/import_parent_package_from_child/parent_package_message.proto @@ -1,6 +1,6 @@ syntax = "proto3"; -package parent; +package import_parent_package_from_child.parent; message ParentPackageMessage { } diff --git a/tests/inputs/import_root_package_from_child/child.proto b/tests/inputs/import_root_package_from_child/child.proto index d2b29cc31..bd5196772 100644 --- a/tests/inputs/import_root_package_from_child/child.proto +++ b/tests/inputs/import_root_package_from_child/child.proto @@ -1,6 +1,6 @@ syntax = "proto3"; -package child; +package import_root_package_from_child.child; import "root.proto"; diff --git a/tests/inputs/import_root_package_from_child/root.proto b/tests/inputs/import_root_package_from_child/root.proto index 650b29b54..6ae955ad6 100644 --- a/tests/inputs/import_root_package_from_child/root.proto +++ b/tests/inputs/import_root_package_from_child/root.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package import_root_package_from_child; + message RootMessage { } diff --git a/tests/inputs/import_root_sibling/import_root_sibling.proto b/tests/inputs/import_root_sibling/import_root_sibling.proto index 1d671b8a5..759e606f7 100644 --- a/tests/inputs/import_root_sibling/import_root_sibling.proto +++ b/tests/inputs/import_root_sibling/import_root_sibling.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package import_root_sibling; + import "sibling.proto"; // Tests generated imports when a message in the root package refers to another message in the root package diff --git a/tests/inputs/import_root_sibling/sibling.proto b/tests/inputs/import_root_sibling/sibling.proto index 870bafffc..6b6ba2eaf 100644 --- a/tests/inputs/import_root_sibling/sibling.proto +++ b/tests/inputs/import_root_sibling/sibling.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package import_root_sibling; + message SiblingMessage { } diff --git a/tests/inputs/import_service_input_message/child_package_request_message.proto b/tests/inputs/import_service_input_message/child_package_request_message.proto index 6380db24c..54fc1123f 100644 --- a/tests/inputs/import_service_input_message/child_package_request_message.proto +++ b/tests/inputs/import_service_input_message/child_package_request_message.proto @@ -1,6 +1,6 @@ syntax = "proto3"; -package child; +package import_service_input_message.child; message ChildRequestMessage { int32 child_argument = 1; diff --git a/tests/inputs/import_service_input_message/import_service_input_message.proto b/tests/inputs/import_service_input_message/import_service_input_message.proto index 7ca9c46f0..cbf48fa9a 100644 --- a/tests/inputs/import_service_input_message/import_service_input_message.proto +++ b/tests/inputs/import_service_input_message/import_service_input_message.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package import_service_input_message; + import "request_message.proto"; import "child_package_request_message.proto"; diff --git a/tests/inputs/import_service_input_message/request_message.proto b/tests/inputs/import_service_input_message/request_message.proto index 5bfceed68..36a6e788e 100644 --- a/tests/inputs/import_service_input_message/request_message.proto +++ b/tests/inputs/import_service_input_message/request_message.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package import_service_input_message; + message RequestMessage { int32 argument = 1; } \ No newline at end of file diff --git a/tests/inputs/import_service_input_message/test_import_service_input_message.py b/tests/inputs/import_service_input_message/test_import_service_input_message.py index 23d215840..60c3e2024 100644 --- a/tests/inputs/import_service_input_message/test_import_service_input_message.py +++ b/tests/inputs/import_service_input_message/test_import_service_input_message.py @@ -2,16 +2,21 @@ from tests.mocks import MockChannel from tests.output_betterproto.import_service_input_message import ( + NestedRequestMessage, + RequestMessage, RequestResponse, TestStub, ) +from tests.output_betterproto.import_service_input_message.child import ( + ChildRequestMessage, +) @pytest.mark.asyncio async def test_service_correctly_imports_reference_message(): mock_response = RequestResponse(value=10) service = TestStub(MockChannel([mock_response])) - response = await service.do_thing(argument=1) + response = await service.do_thing(RequestMessage(1)) assert mock_response == response @@ -19,7 +24,7 @@ async def test_service_correctly_imports_reference_message(): async def test_service_correctly_imports_reference_message_from_child_package(): mock_response = RequestResponse(value=10) service = TestStub(MockChannel([mock_response])) - response = await service.do_thing2(child_argument=1) + response = await service.do_thing2(ChildRequestMessage(1)) assert mock_response == response @@ -27,5 +32,5 @@ async def test_service_correctly_imports_reference_message_from_child_package(): async def test_service_correctly_imports_nested_reference(): mock_response = RequestResponse(value=10) service = TestStub(MockChannel([mock_response])) - response = await service.do_thing3(nested_argument=1) + response = await service.do_thing3(NestedRequestMessage(1)) assert mock_response == response diff --git a/tests/inputs/int32/int32.proto b/tests/inputs/int32/int32.proto index cae0dc77f..4721c2352 100644 --- a/tests/inputs/int32/int32.proto +++ b/tests/inputs/int32/int32.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package int32; + // Some documentation about the Test message. message Test { // Some documentation about the count. diff --git a/tests/inputs/invalid_field/invalid_field.proto b/tests/inputs/invalid_field/invalid_field.proto new file mode 100644 index 000000000..e3a73ce16 --- /dev/null +++ b/tests/inputs/invalid_field/invalid_field.proto @@ -0,0 +1,7 @@ +syntax = "proto3"; + +package invalid_field; + +message Test { + int32 x = 1; +} diff --git a/tests/inputs/invalid_field/test_invalid_field.py b/tests/inputs/invalid_field/test_invalid_field.py new file mode 100644 index 000000000..947b8e13d --- /dev/null +++ b/tests/inputs/invalid_field/test_invalid_field.py @@ -0,0 +1,17 @@ +import pytest + + +def test_invalid_field(): + from tests.output_betterproto.invalid_field import Test + + with pytest.raises(TypeError): + Test(unknown_field=12) + + +def test_invalid_field_pydantic(): + from pydantic import ValidationError + + from tests.output_betterproto_pydantic.invalid_field import Test + + with pytest.raises(ValidationError): + Test(unknown_field=12) diff --git a/tests/inputs/map/map.proto b/tests/inputs/map/map.proto index 669e28756..ecef3ccb3 100644 --- a/tests/inputs/map/map.proto +++ b/tests/inputs/map/map.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package map; + message Test { map counts = 1; } diff --git a/tests/inputs/mapmessage/mapmessage.proto b/tests/inputs/mapmessage/mapmessage.proto index 07dcce524..2c704a49e 100644 --- a/tests/inputs/mapmessage/mapmessage.proto +++ b/tests/inputs/mapmessage/mapmessage.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package mapmessage; + message Test { map items = 1; } diff --git a/tests/inputs/namespace_builtin_types/namespace_builtin_types.proto b/tests/inputs/namespace_builtin_types/namespace_builtin_types.proto index 636bb5518..71cb02987 100644 --- a/tests/inputs/namespace_builtin_types/namespace_builtin_types.proto +++ b/tests/inputs/namespace_builtin_types/namespace_builtin_types.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package namespace_builtin_types; + // Tests that messages may contain fields with names that are python types message Test { diff --git a/tests/inputs/namespace_keywords/namespace_keywords.proto b/tests/inputs/namespace_keywords/namespace_keywords.proto index 6d1a7c5fd..ac3e5c52c 100644 --- a/tests/inputs/namespace_keywords/namespace_keywords.proto +++ b/tests/inputs/namespace_keywords/namespace_keywords.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package namespace_keywords; + // Tests that messages may contain fields that are Python keywords // // Generated with Python 3.7.6 diff --git a/tests/inputs/nested/nested.json b/tests/inputs/nested/nested.json index f34f1d70c..f460cadb6 100644 --- a/tests/inputs/nested/nested.json +++ b/tests/inputs/nested/nested.json @@ -2,5 +2,6 @@ "nested": { "count": 150 }, - "sibling": {} + "sibling": {}, + "msg": "THIS" } diff --git a/tests/inputs/nested/nested.proto b/tests/inputs/nested/nested.proto index 98bafd9a9..619c721c7 100644 --- a/tests/inputs/nested/nested.proto +++ b/tests/inputs/nested/nested.proto @@ -1,16 +1,24 @@ syntax = "proto3"; +package nested; + // A test message with a nested message inside of it. message Test { // This is the nested type. message Nested { - // Stores a simple counter. - int32 count = 1; + // Stores a simple counter. + int32 count = 1; + } + // This is the nested enum. + enum Msg { + NONE = 0; + THIS = 1; } Nested nested = 1; Sibling sibling = 2; Sibling sibling2 = 3; + Msg msg = 4; } message Sibling { diff --git a/tests/inputs/nested2/nested2.proto b/tests/inputs/nested2/nested2.proto index 3e39918a1..cd6510c58 100644 --- a/tests/inputs/nested2/nested2.proto +++ b/tests/inputs/nested2/nested2.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package nested2; + import "package.proto"; message Game { diff --git a/tests/inputs/nested2/package.proto b/tests/inputs/nested2/package.proto index 44662564f..e12abb129 100644 --- a/tests/inputs/nested2/package.proto +++ b/tests/inputs/nested2/package.proto @@ -1,6 +1,6 @@ syntax = "proto3"; -package equipment; +package nested2.equipment; message Weapon { diff --git a/tests/inputs/nestedtwice/nestedtwice.proto b/tests/inputs/nestedtwice/nestedtwice.proto index 7e9c20690..84d142a33 100644 --- a/tests/inputs/nestedtwice/nestedtwice.proto +++ b/tests/inputs/nestedtwice/nestedtwice.proto @@ -1,26 +1,40 @@ syntax = "proto3"; +package nestedtwice; + +/* Test doc. */ message Test { + /* Top doc. */ message Top { + /* Middle doc. */ message Middle { + /* TopMiddleBottom doc.*/ message TopMiddleBottom { + // TopMiddleBottom.a doc. string a = 1; } + /* EnumBottom doc. */ enum EnumBottom{ + /* EnumBottom.A doc. */ A = 0; B = 1; } + /* Bottom doc. */ message Bottom { + /* Bottom.foo doc. */ string foo = 1; } reserved 1; + /* Middle.bottom doc. */ repeated Bottom bottom = 2; repeated EnumBottom enumBottom=3; repeated TopMiddleBottom topMiddleBottom=4; bool bar = 5; } + /* Top.name doc. */ string name = 1; Middle middle = 2; } + /* Test.top doc. */ Top top = 1; } diff --git a/tests/inputs/nestedtwice/test_nestedtwice.py b/tests/inputs/nestedtwice/test_nestedtwice.py new file mode 100644 index 000000000..606467c22 --- /dev/null +++ b/tests/inputs/nestedtwice/test_nestedtwice.py @@ -0,0 +1,25 @@ +import pytest + +from tests.output_betterproto.nestedtwice import ( + Test, + TestTop, + TestTopMiddle, + TestTopMiddleBottom, + TestTopMiddleEnumBottom, + TestTopMiddleTopMiddleBottom, +) + + +@pytest.mark.parametrize( + ("cls", "expected_comment"), + [ + (Test, "Test doc."), + (TestTopMiddleEnumBottom, "EnumBottom doc."), + (TestTop, "Top doc."), + (TestTopMiddle, "Middle doc."), + (TestTopMiddleTopMiddleBottom, "TopMiddleBottom doc."), + (TestTopMiddleBottom, "Bottom doc."), + ], +) +def test_comment(cls, expected_comment): + assert cls.__doc__ == expected_comment diff --git a/tests/inputs/oneof/oneof.proto b/tests/inputs/oneof/oneof.proto index 1f9c4b279..41f93b0e0 100644 --- a/tests/inputs/oneof/oneof.proto +++ b/tests/inputs/oneof/oneof.proto @@ -1,5 +1,11 @@ syntax = "proto3"; +package oneof; + +message MixedDrink { + int32 shots = 1; +} + message Test { oneof foo { int32 pitied = 1; @@ -11,6 +17,7 @@ message Test { oneof bar { int32 drinks = 11; string bar_name = 12; + MixedDrink mixed_drink = 13; } } diff --git a/tests/inputs/oneof/test_oneof.py b/tests/inputs/oneof/test_oneof.py index d1267659f..b7d2d94af 100644 --- a/tests/inputs/oneof/test_oneof.py +++ b/tests/inputs/oneof/test_oneof.py @@ -1,5 +1,11 @@ +import pytest + import betterproto -from tests.output_betterproto.oneof import Test +from tests.output_betterproto.oneof import ( + MixedDrink, + Test, +) +from tests.output_betterproto_pydantic.oneof import Test as TestPyd from tests.util import get_test_case_json_data @@ -13,3 +19,25 @@ def test_which_name(): message = Test() message.from_json(get_test_case_json_data("oneof", "oneof_name.json")[0].json) assert betterproto.which_one_of(message, "foo") == ("pitier", "Mr. T") + + +def test_which_count_pyd(): + message = TestPyd(pitier="Mr. T", just_a_regular_field=2, bar_name="a_bar") + assert betterproto.which_one_of(message, "foo") == ("pitier", "Mr. T") + + +def test_oneof_constructor_assign(): + message = Test(mixed_drink=MixedDrink(shots=42)) + field, value = betterproto.which_one_of(message, "bar") + assert field == "mixed_drink" + assert value.shots == 42 + + +# Issue #305: +@pytest.mark.xfail +def test_oneof_nested_assign(): + message = Test() + message.mixed_drink.shots = 42 + field, value = betterproto.which_one_of(message, "bar") + assert field == "mixed_drink" + assert value.shots == 42 diff --git a/tests/inputs/oneof_default_value_serialization/oneof_default_value_serialization.proto b/tests/inputs/oneof_default_value_serialization/oneof_default_value_serialization.proto index 44163c70a..f7ac6fe86 100644 --- a/tests/inputs/oneof_default_value_serialization/oneof_default_value_serialization.proto +++ b/tests/inputs/oneof_default_value_serialization/oneof_default_value_serialization.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package oneof_default_value_serialization; + import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; import "google/protobuf/wrappers.proto"; diff --git a/tests/inputs/oneof_default_value_serialization/test_oneof_default_value_serialization.py b/tests/inputs/oneof_default_value_serialization/test_oneof_default_value_serialization.py index 0c928cb89..29dd81636 100644 --- a/tests/inputs/oneof_default_value_serialization/test_oneof_default_value_serialization.py +++ b/tests/inputs/oneof_default_value_serialization/test_oneof_default_value_serialization.py @@ -1,11 +1,12 @@ -import pytest import datetime +import pytest + import betterproto from tests.output_betterproto.oneof_default_value_serialization import ( - Test, Message, NestedMessage, + Test, ) diff --git a/tests/inputs/oneof_empty/oneof_empty.proto b/tests/inputs/oneof_empty/oneof_empty.proto index 45ca371c8..ca51d5ae9 100644 --- a/tests/inputs/oneof_empty/oneof_empty.proto +++ b/tests/inputs/oneof_empty/oneof_empty.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package oneof_empty; + message Nothing {} message MaybeNothing { diff --git a/tests/inputs/oneof_enum/oneof_enum.proto b/tests/inputs/oneof_enum/oneof_enum.proto index dfe19d445..906abcb1b 100644 --- a/tests/inputs/oneof_enum/oneof_enum.proto +++ b/tests/inputs/oneof_enum/oneof_enum.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package oneof_enum; + message Test { oneof action { Signal signal = 1; diff --git a/tests/inputs/oneof_enum/test_oneof_enum.py b/tests/inputs/oneof_enum/test_oneof_enum.py index 7e287d4a4..e54fa3859 100644 --- a/tests/inputs/oneof_enum/test_oneof_enum.py +++ b/tests/inputs/oneof_enum/test_oneof_enum.py @@ -18,9 +18,8 @@ def test_which_one_of_returns_enum_with_default_value(): get_test_case_json_data("oneof_enum", "oneof_enum-enum-0.json")[0].json ) - assert message.move == Move( - x=0, y=0 - ) # Proto3 will default this as there is no null + assert not hasattr(message, "move") + assert object.__getattribute__(message, "move") == betterproto.PLACEHOLDER assert message.signal == Signal.PASS assert betterproto.which_one_of(message, "action") == ("signal", Signal.PASS) @@ -33,9 +32,8 @@ def test_which_one_of_returns_enum_with_non_default_value(): message.from_json( get_test_case_json_data("oneof_enum", "oneof_enum-enum-1.json")[0].json ) - assert message.move == Move( - x=0, y=0 - ) # Proto3 will default this as there is no null + assert not hasattr(message, "move") + assert object.__getattribute__(message, "move") == betterproto.PLACEHOLDER assert message.signal == Signal.RESIGN assert betterproto.which_one_of(message, "action") == ("signal", Signal.RESIGN) @@ -44,5 +42,6 @@ def test_which_one_of_returns_second_field_when_set(): message = Test() message.from_json(get_test_case_json_data("oneof_enum")[0].json) assert message.move == Move(x=2, y=3) - assert message.signal == Signal.PASS + assert not hasattr(message, "signal") + assert object.__getattribute__(message, "signal") == betterproto.PLACEHOLDER assert betterproto.which_one_of(message, "action") == ("move", Move(x=2, y=3)) diff --git a/tests/inputs/proto3_field_presence/proto3_field_presence.json b/tests/inputs/proto3_field_presence/proto3_field_presence.json new file mode 100644 index 000000000..988df8e87 --- /dev/null +++ b/tests/inputs/proto3_field_presence/proto3_field_presence.json @@ -0,0 +1,13 @@ +{ + "test1": 128, + "test2": true, + "test3": "A value", + "test4": "aGVsbG8=", + "test5": { + "test": "Hello" + }, + "test6": "B", + "test7": "8589934592", + "test8": 2.5, + "test9": "2022-01-24T12:12:42Z" +} diff --git a/tests/inputs/proto3_field_presence/proto3_field_presence.proto b/tests/inputs/proto3_field_presence/proto3_field_presence.proto new file mode 100644 index 000000000..f28123dfd --- /dev/null +++ b/tests/inputs/proto3_field_presence/proto3_field_presence.proto @@ -0,0 +1,26 @@ +syntax = "proto3"; + +package proto3_field_presence; + +import "google/protobuf/timestamp.proto"; + +message InnerTest { + string test = 1; +} + +message Test { + optional uint32 test1 = 1; + optional bool test2 = 2; + optional string test3 = 3; + optional bytes test4 = 4; + optional InnerTest test5 = 5; + optional TestEnum test6 = 6; + optional uint64 test7 = 7; + optional float test8 = 8; + optional google.protobuf.Timestamp test9 = 9; +} + +enum TestEnum { + A = 0; + B = 1; +} diff --git a/tests/inputs/proto3_field_presence/proto3_field_presence_default.json b/tests/inputs/proto3_field_presence/proto3_field_presence_default.json new file mode 100644 index 000000000..0967ef424 --- /dev/null +++ b/tests/inputs/proto3_field_presence/proto3_field_presence_default.json @@ -0,0 +1 @@ +{} diff --git a/tests/inputs/proto3_field_presence/proto3_field_presence_missing.json b/tests/inputs/proto3_field_presence/proto3_field_presence_missing.json new file mode 100644 index 000000000..b19ae9804 --- /dev/null +++ b/tests/inputs/proto3_field_presence/proto3_field_presence_missing.json @@ -0,0 +1,9 @@ +{ + "test1": 0, + "test2": false, + "test3": "", + "test4": "", + "test6": "A", + "test7": "0", + "test8": 0 +} diff --git a/tests/inputs/proto3_field_presence/test_proto3_field_presence.py b/tests/inputs/proto3_field_presence/test_proto3_field_presence.py new file mode 100644 index 000000000..e31119982 --- /dev/null +++ b/tests/inputs/proto3_field_presence/test_proto3_field_presence.py @@ -0,0 +1,48 @@ +import json + +from tests.output_betterproto.proto3_field_presence import ( + InnerTest, + Test, + TestEnum, +) + + +def test_null_fields_json(): + """Ensure that using "null" in JSON is equivalent to not specifying a + field, for fields with explicit presence""" + + def test_json(ref_json: str, obj_json: str) -> None: + """`ref_json` and `obj_json` are JSON strings describing a `Test` object. + Test that deserializing both leads to the same object, and that + `ref_json` is the normalized format.""" + ref_obj = Test().from_json(ref_json) + obj = Test().from_json(obj_json) + + assert obj == ref_obj + assert json.loads(obj.to_json(0)) == json.loads(ref_json) + + test_json("{}", '{ "test1": null, "test2": null, "test3": null }') + test_json("{}", '{ "test4": null, "test5": null, "test6": null }') + test_json("{}", '{ "test7": null, "test8": null }') + test_json('{ "test5": {} }', '{ "test3": null, "test5": {} }') + + # Make sure that if include_default_values is set, None values are + # exported. + obj = Test() + assert obj.to_dict() == {} + assert obj.to_dict(include_default_values=True) == { + "test1": None, + "test2": None, + "test3": None, + "test4": None, + "test5": None, + "test6": None, + "test7": None, + "test8": None, + "test9": None, + } + + +def test_unset_access(): # see #523 + assert Test().test1 is None + assert Test(test1=None).test1 is None diff --git a/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.json b/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.json new file mode 100644 index 000000000..da0819278 --- /dev/null +++ b/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.json @@ -0,0 +1,3 @@ +{ + "nested": {} +} diff --git a/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.proto b/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.proto new file mode 100644 index 000000000..caa76ec89 --- /dev/null +++ b/tests/inputs/proto3_field_presence_oneof/proto3_field_presence_oneof.proto @@ -0,0 +1,22 @@ +syntax = "proto3"; + +package proto3_field_presence_oneof; + +message Test { + oneof kind { + Nested nested = 1; + WithOptional with_optional = 2; + } +} + +message InnerNested { + optional bool a = 1; +} + +message Nested { + InnerNested inner = 1; +} + +message WithOptional { + optional bool b = 2; +} diff --git a/tests/inputs/proto3_field_presence_oneof/test_proto3_field_presence_oneof.py b/tests/inputs/proto3_field_presence_oneof/test_proto3_field_presence_oneof.py new file mode 100644 index 000000000..d5f69d01a --- /dev/null +++ b/tests/inputs/proto3_field_presence_oneof/test_proto3_field_presence_oneof.py @@ -0,0 +1,29 @@ +from tests.output_betterproto.proto3_field_presence_oneof import ( + InnerNested, + Nested, + Test, + WithOptional, +) + + +def test_serialization(): + """Ensure that serialization of fields unset but with explicit field + presence do not bloat the serialized payload with length-delimited fields + with length 0""" + + def test_empty_nested(message: Test) -> None: + # '0a' => tag 1, length delimited + # '00' => length: 0 + assert bytes(message) == bytearray.fromhex("0a 00") + + test_empty_nested(Test(nested=Nested())) + test_empty_nested(Test(nested=Nested(inner=None))) + test_empty_nested(Test(nested=Nested(inner=InnerNested(a=None)))) + + def test_empty_with_optional(message: Test) -> None: + # '12' => tag 2, length delimited + # '00' => length: 0 + assert bytes(message) == bytearray.fromhex("12 00") + + test_empty_with_optional(Test(with_optional=WithOptional())) + test_empty_with_optional(Test(with_optional=WithOptional(b=None))) diff --git a/tests/inputs/recursivemessage/recursivemessage.proto b/tests/inputs/recursivemessage/recursivemessage.proto index f988316e3..1da2b57e9 100644 --- a/tests/inputs/recursivemessage/recursivemessage.proto +++ b/tests/inputs/recursivemessage/recursivemessage.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package recursivemessage; + message Test { string name = 1; Test child = 2; diff --git a/tests/inputs/ref/ref.proto b/tests/inputs/ref/ref.proto index e09fb15f2..694559099 100644 --- a/tests/inputs/ref/ref.proto +++ b/tests/inputs/ref/ref.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package ref; + import "repeatedmessage.proto"; message Test { diff --git a/tests/inputs/regression_387/regression_387.proto b/tests/inputs/regression_387/regression_387.proto new file mode 100644 index 000000000..57bd95440 --- /dev/null +++ b/tests/inputs/regression_387/regression_387.proto @@ -0,0 +1,12 @@ +syntax = "proto3"; + +package regression_387; + +message Test { + uint64 id = 1; +} + +message ParentElement { + string name = 1; + repeated Test elems = 2; +} \ No newline at end of file diff --git a/tests/inputs/regression_387/test_regression_387.py b/tests/inputs/regression_387/test_regression_387.py new file mode 100644 index 000000000..7bb40b2e9 --- /dev/null +++ b/tests/inputs/regression_387/test_regression_387.py @@ -0,0 +1,12 @@ +from tests.output_betterproto.regression_387 import ( + ParentElement, + Test, +) + + +def test_regression_387(): + el = ParentElement(name="test", elems=[Test(id=0), Test(id=42)]) + binary = bytes(el) + decoded = ParentElement().parse(binary) + assert decoded == el + assert decoded.elems == [Test(id=0), Test(id=42)] diff --git a/tests/inputs/regression_414/regression_414.proto b/tests/inputs/regression_414/regression_414.proto new file mode 100644 index 000000000..d20dddab1 --- /dev/null +++ b/tests/inputs/regression_414/regression_414.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package regression_414; + +message Test { + bytes body = 1; + bytes auth = 2; + repeated bytes signatures = 3; +} \ No newline at end of file diff --git a/tests/inputs/regression_414/test_regression_414.py b/tests/inputs/regression_414/test_regression_414.py new file mode 100644 index 000000000..742c97b41 --- /dev/null +++ b/tests/inputs/regression_414/test_regression_414.py @@ -0,0 +1,15 @@ +from tests.output_betterproto.regression_414 import Test + + +def test_full_cycle(): + body = bytes([0, 1]) + auth = bytes([2, 3]) + sig = [b""] + + obj = Test(body=body, auth=auth, signatures=sig) + + decoded = Test().parse(bytes(obj)) + assert decoded == obj + assert decoded.body == body + assert decoded.auth == auth + assert decoded.signatures == sig diff --git a/tests/inputs/repeated/repeated.proto b/tests/inputs/repeated/repeated.proto index 42c113298..4f3c788c9 100644 --- a/tests/inputs/repeated/repeated.proto +++ b/tests/inputs/repeated/repeated.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package repeated; + message Test { repeated string names = 1; } diff --git a/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.proto b/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.proto index 7b7bf3631..38f1eaa39 100644 --- a/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.proto +++ b/tests/inputs/repeated_duration_timestamp/repeated_duration_timestamp.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package repeated_duration_timestamp; + import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; diff --git a/tests/inputs/repeated_duration_timestamp/test_repeated_duration_timestamp.py b/tests/inputs/repeated_duration_timestamp/test_repeated_duration_timestamp.py index b1b13e5eb..efc348663 100644 --- a/tests/inputs/repeated_duration_timestamp/test_repeated_duration_timestamp.py +++ b/tests/inputs/repeated_duration_timestamp/test_repeated_duration_timestamp.py @@ -1,4 +1,7 @@ -from datetime import datetime, timedelta +from datetime import ( + datetime, + timedelta, +) from tests.output_betterproto.repeated_duration_timestamp import Test diff --git a/tests/inputs/repeatedpacked/repeatedpacked.proto b/tests/inputs/repeatedpacked/repeatedpacked.proto index ea86dde20..a037d1b8f 100644 --- a/tests/inputs/repeatedpacked/repeatedpacked.proto +++ b/tests/inputs/repeatedpacked/repeatedpacked.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package repeatedpacked; + message Test { repeated int32 counts = 1; repeated sint64 signed = 2; diff --git a/tests/inputs/service/service.proto b/tests/inputs/service/service.proto index 9ca0d252d..53d84fbdc 100644 --- a/tests/inputs/service/service.proto +++ b/tests/inputs/service/service.proto @@ -2,9 +2,16 @@ syntax = "proto3"; package service; +enum ThingType { + UNKNOWN = 0; + LIVING = 1; + DEAD = 2; +} + message DoThingRequest { string name = 1; repeated string comments = 2; + ThingType type = 3; } message DoThingResponse { diff --git a/tests/inputs/service_separate_packages/messages.proto b/tests/inputs/service_separate_packages/messages.proto index add0ed805..270b188f2 100644 --- a/tests/inputs/service_separate_packages/messages.proto +++ b/tests/inputs/service_separate_packages/messages.proto @@ -3,7 +3,7 @@ syntax = "proto3"; import "google/protobuf/duration.proto"; import "google/protobuf/timestamp.proto"; -package things.messages; +package service_separate_packages.things.messages; message DoThingRequest { string name = 1; diff --git a/tests/inputs/service_separate_packages/service.proto b/tests/inputs/service_separate_packages/service.proto index 48acc2569..950eab491 100644 --- a/tests/inputs/service_separate_packages/service.proto +++ b/tests/inputs/service_separate_packages/service.proto @@ -2,7 +2,7 @@ syntax = "proto3"; import "messages.proto"; -package things.service; +package service_separate_packages.things.service; service Test { rpc DoThing (things.messages.DoThingRequest) returns (things.messages.DoThingResponse); diff --git a/tests/inputs/service_uppercase/service.proto b/tests/inputs/service_uppercase/service.proto new file mode 100644 index 000000000..786eec2c0 --- /dev/null +++ b/tests/inputs/service_uppercase/service.proto @@ -0,0 +1,16 @@ +syntax = "proto3"; + +package service_uppercase; + +message DoTHINGRequest { + string name = 1; + repeated string comments = 2; +} + +message DoTHINGResponse { + repeated string names = 1; +} + +service Test { + rpc DoThing (DoTHINGRequest) returns (DoTHINGResponse); +} diff --git a/tests/inputs/service_uppercase/test_service.py b/tests/inputs/service_uppercase/test_service.py new file mode 100644 index 000000000..35405e134 --- /dev/null +++ b/tests/inputs/service_uppercase/test_service.py @@ -0,0 +1,8 @@ +import inspect + +from tests.output_betterproto.service_uppercase import TestStub + + +def test_parameters(): + sig = inspect.signature(TestStub.do_thing) + assert len(sig.parameters) == 5, "Expected 5 parameters" diff --git a/tests/inputs/signed/signed.proto b/tests/inputs/signed/signed.proto index 23fc9eec9..b40aad49f 100644 --- a/tests/inputs/signed/signed.proto +++ b/tests/inputs/signed/signed.proto @@ -1,5 +1,7 @@ syntax = "proto3"; +package signed; + message Test { // todo: rename fields after fixing bug where 'signed_32_positive' will map to 'signed_32Positive' as output json sint32 signed32 = 1; // signed_32_positive diff --git a/tests/inputs/timestamp_dict_encode/test_timestamp_dict_encode.py b/tests/inputs/timestamp_dict_encode/test_timestamp_dict_encode.py new file mode 100644 index 000000000..a4438488c --- /dev/null +++ b/tests/inputs/timestamp_dict_encode/test_timestamp_dict_encode.py @@ -0,0 +1,82 @@ +from datetime import ( + datetime, + timedelta, + timezone, +) + +import pytest + +from tests.output_betterproto.timestamp_dict_encode import Test + + +# Current World Timezone range (UTC-12 to UTC+14) +MIN_UTC_OFFSET_MIN = -12 * 60 +MAX_UTC_OFFSET_MIN = 14 * 60 + +# Generate all timezones in range in 15 min increments +timezones = [ + timezone(timedelta(minutes=x)) + for x in range(MIN_UTC_OFFSET_MIN, MAX_UTC_OFFSET_MIN + 1, 15) +] + + +@pytest.mark.parametrize("tz", timezones) +def test_timezone_aware_datetime_dict_encode(tz: timezone): + original_time = datetime.now(tz=tz) + original_message = Test() + original_message.ts = original_time + encoded = original_message.to_dict() + decoded_message = Test() + decoded_message.from_dict(encoded) + + # check that the timestamps are equal after decoding from dict + assert original_message.ts.tzinfo is not None + assert decoded_message.ts.tzinfo is not None + assert original_message.ts == decoded_message.ts + + +def test_naive_datetime_dict_encode(): + # make suer naive datetime objects are still treated as utc + original_time = datetime.now() + assert original_time.tzinfo is None + original_message = Test() + original_message.ts = original_time + original_time_utc = original_time.replace(tzinfo=timezone.utc) + encoded = original_message.to_dict() + decoded_message = Test() + decoded_message.from_dict(encoded) + + # check that the timestamps are equal after decoding from dict + assert decoded_message.ts.tzinfo is not None + assert original_time_utc == decoded_message.ts + + +@pytest.mark.parametrize("tz", timezones) +def test_timezone_aware_json_serialize(tz: timezone): + original_time = datetime.now(tz=tz) + original_message = Test() + original_message.ts = original_time + json_serialized = original_message.to_json() + decoded_message = Test() + decoded_message.from_json(json_serialized) + + # check that the timestamps are equal after decoding from dict + assert original_message.ts.tzinfo is not None + assert decoded_message.ts.tzinfo is not None + assert original_message.ts == decoded_message.ts + + +def test_naive_datetime_json_serialize(): + # make suer naive datetime objects are still treated as utc + original_time = datetime.now() + assert original_time.tzinfo is None + original_message = Test() + original_message.ts = original_time + original_time_utc = original_time.replace(tzinfo=timezone.utc) + json_serialized = original_message.to_json() + decoded_message = Test() + decoded_message.from_json(json_serialized) + + # check that the timestamps are equal after decoding from dict + assert decoded_message.ts.tzinfo is not None + assert original_time_utc == decoded_message.ts diff --git a/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.json b/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.json new file mode 100644 index 000000000..3f455587e --- /dev/null +++ b/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.json @@ -0,0 +1,3 @@ +{ + "ts" : "2023-03-15T22:35:51.253277Z" +} \ No newline at end of file diff --git a/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.proto b/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.proto new file mode 100644 index 000000000..9c4081ac7 --- /dev/null +++ b/tests/inputs/timestamp_dict_encode/timestamp_dict_encode.proto @@ -0,0 +1,9 @@ +syntax = "proto3"; + +package timestamp_dict_encode; + +import "google/protobuf/timestamp.proto"; + +message Test { + google.protobuf.Timestamp ts = 1; +} \ No newline at end of file diff --git a/tests/oneof_pattern_matching.py b/tests/oneof_pattern_matching.py new file mode 100644 index 000000000..d4f18aab2 --- /dev/null +++ b/tests/oneof_pattern_matching.py @@ -0,0 +1,46 @@ +from dataclasses import dataclass + +import pytest + +import betterproto + + +def test_oneof_pattern_matching(): + @dataclass + class Sub(betterproto.Message): + val: int = betterproto.int32_field(1) + + @dataclass + class Foo(betterproto.Message): + bar: int = betterproto.int32_field(1, group="group1") + baz: str = betterproto.string_field(2, group="group1") + sub: Sub = betterproto.message_field(3, group="group2") + abc: str = betterproto.string_field(4, group="group2") + + foo = Foo(baz="test1", abc="test2") + + match foo: + case Foo(bar=_): + pytest.fail("Matched 'bar' instead of 'baz'") + case Foo(baz=v): + assert v == "test1" + case _: + pytest.fail("Matched neither 'bar' nor 'baz'") + + match foo: + case Foo(sub=_): + pytest.fail("Matched 'sub' instead of 'abc'") + case Foo(abc=v): + assert v == "test2" + case _: + pytest.fail("Matched neither 'sub' nor 'abc'") + + foo.sub = Sub(val=1) + + match foo: + case Foo(sub=Sub(val=v)): + assert v == 1 + case Foo(abc=v): + pytest.fail("Matched 'abc' instead of 'sub'") + case _: + pytest.fail("Matched neither 'sub' nor 'abc'") diff --git a/tests/streams/delimited_messages.in b/tests/streams/delimited_messages.in new file mode 100644 index 000000000..5993ac6f8 --- /dev/null +++ b/tests/streams/delimited_messages.in @@ -0,0 +1,2 @@ +•šï:bTesting•šï:bTesting +  \ No newline at end of file diff --git a/tests/streams/dump_varint_negative.expected b/tests/streams/dump_varint_negative.expected new file mode 100644 index 000000000..095482297 --- /dev/null +++ b/tests/streams/dump_varint_negative.expected @@ -0,0 +1 @@ +ÿÿÿÿÿÿÿÿÿ€Óûÿÿÿÿÿ€€€€€€€€€€€€€€€€€ \ No newline at end of file diff --git a/tests/streams/dump_varint_positive.expected b/tests/streams/dump_varint_positive.expected new file mode 100644 index 000000000..8614b9d7a --- /dev/null +++ b/tests/streams/dump_varint_positive.expected @@ -0,0 +1 @@ +€­â \ No newline at end of file diff --git a/tests/streams/java/.gitignore b/tests/streams/java/.gitignore new file mode 100644 index 000000000..9b1ebba99 --- /dev/null +++ b/tests/streams/java/.gitignore @@ -0,0 +1,38 @@ +### Output ### +target/ +!.mvn/wrapper/maven-wrapper.jar +!**/src/main/**/target/ +!**/src/test/**/target/ +dependency-reduced-pom.xml +MANIFEST.MF + +### IntelliJ IDEA ### +.idea/ +*.iws +*.iml +*.ipr + +### Eclipse ### +.apt_generated +.classpath +.factorypath +.project +.settings +.springBeans +.sts4-cache + +### NetBeans ### +/nbproject/private/ +/nbbuild/ +/dist/ +/nbdist/ +/.nb-gradle/ +build/ +!**/src/main/**/build/ +!**/src/test/**/build/ + +### VS Code ### +.vscode/ + +### Mac OS ### +.DS_Store \ No newline at end of file diff --git a/tests/streams/java/pom.xml b/tests/streams/java/pom.xml new file mode 100644 index 000000000..170d2d66c --- /dev/null +++ b/tests/streams/java/pom.xml @@ -0,0 +1,94 @@ + + + 4.0.0 + + betterproto + compatibility-test + 1.0-SNAPSHOT + jar + + + 11 + 11 + UTF-8 + 3.23.4 + + + + + com.google.protobuf + protobuf-java + ${protobuf.version} + + + + + + + kr.motd.maven + os-maven-plugin + 1.7.1 + + + + + + org.apache.maven.plugins + maven-shade-plugin + 3.5.0 + + + package + + shade + + + + + betterproto.CompatibilityTest + + + + + + + + + org.apache.maven.plugins + maven-jar-plugin + 3.3.0 + + + + true + betterproto.CompatibilityTest + + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + 0.6.1 + + + + compile + + + + + + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + + + + + ${project.artifactId} + + + \ No newline at end of file diff --git a/tests/streams/java/src/main/java/betterproto/CompatibilityTest.java b/tests/streams/java/src/main/java/betterproto/CompatibilityTest.java new file mode 100644 index 000000000..908f87afb --- /dev/null +++ b/tests/streams/java/src/main/java/betterproto/CompatibilityTest.java @@ -0,0 +1,41 @@ +package betterproto; + +import java.io.IOException; + +public class CompatibilityTest { + public static void main(String[] args) throws IOException { + if (args.length < 2) + throw new RuntimeException("Attempted to run without the required arguments."); + else if (args.length > 2) + throw new RuntimeException( + "Attempted to run with more than the expected number of arguments (>1)."); + + Tests tests = new Tests(args[1]); + + switch (args[0]) { + case "single_varint": + tests.testSingleVarint(); + break; + + case "multiple_varints": + tests.testMultipleVarints(); + break; + + case "single_message": + tests.testSingleMessage(); + break; + + case "multiple_messages": + tests.testMultipleMessages(); + break; + + case "infinite_messages": + tests.testInfiniteMessages(); + break; + + default: + throw new RuntimeException( + "Attempted to run with unknown argument '" + args[0] + "'."); + } + } +} diff --git a/tests/streams/java/src/main/java/betterproto/Tests.java b/tests/streams/java/src/main/java/betterproto/Tests.java new file mode 100644 index 000000000..a7c8fd572 --- /dev/null +++ b/tests/streams/java/src/main/java/betterproto/Tests.java @@ -0,0 +1,115 @@ +package betterproto; + +import betterproto.nested.NestedOuterClass; +import betterproto.oneof.Oneof; + +import com.google.protobuf.CodedInputStream; +import com.google.protobuf.CodedOutputStream; + +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; + +public class Tests { + String path; + + public Tests(String path) { + this.path = path; + } + + public void testSingleVarint() throws IOException { + // Read in the Python-generated single varint file + FileInputStream inputStream = new FileInputStream(path + "/py_single_varint.out"); + CodedInputStream codedInput = CodedInputStream.newInstance(inputStream); + + int value = codedInput.readUInt32(); + + inputStream.close(); + + // Write the value back to a file + FileOutputStream outputStream = new FileOutputStream(path + "/java_single_varint.out"); + CodedOutputStream codedOutput = CodedOutputStream.newInstance(outputStream); + + codedOutput.writeUInt32NoTag(value); + + codedOutput.flush(); + outputStream.close(); + } + + public void testMultipleVarints() throws IOException { + // Read in the Python-generated multiple varints file + FileInputStream inputStream = new FileInputStream(path + "/py_multiple_varints.out"); + CodedInputStream codedInput = CodedInputStream.newInstance(inputStream); + + int value1 = codedInput.readUInt32(); + int value2 = codedInput.readUInt32(); + long value3 = codedInput.readUInt64(); + + inputStream.close(); + + // Write the values back to a file + FileOutputStream outputStream = new FileOutputStream(path + "/java_multiple_varints.out"); + CodedOutputStream codedOutput = CodedOutputStream.newInstance(outputStream); + + codedOutput.writeUInt32NoTag(value1); + codedOutput.writeUInt64NoTag(value2); + codedOutput.writeUInt64NoTag(value3); + + codedOutput.flush(); + outputStream.close(); + } + + public void testSingleMessage() throws IOException { + // Read in the Python-generated single message file + FileInputStream inputStream = new FileInputStream(path + "/py_single_message.out"); + CodedInputStream codedInput = CodedInputStream.newInstance(inputStream); + + Oneof.Test message = Oneof.Test.parseFrom(codedInput); + + inputStream.close(); + + // Write the message back to a file + FileOutputStream outputStream = new FileOutputStream(path + "/java_single_message.out"); + CodedOutputStream codedOutput = CodedOutputStream.newInstance(outputStream); + + message.writeTo(codedOutput); + + codedOutput.flush(); + outputStream.close(); + } + + public void testMultipleMessages() throws IOException { + // Read in the Python-generated multi-message file + FileInputStream inputStream = new FileInputStream(path + "/py_multiple_messages.out"); + + Oneof.Test oneof = Oneof.Test.parseDelimitedFrom(inputStream); + NestedOuterClass.Test nested = NestedOuterClass.Test.parseDelimitedFrom(inputStream); + + inputStream.close(); + + // Write the messages back to a file + FileOutputStream outputStream = new FileOutputStream(path + "/java_multiple_messages.out"); + + oneof.writeDelimitedTo(outputStream); + nested.writeDelimitedTo(outputStream); + + outputStream.flush(); + outputStream.close(); + } + + public void testInfiniteMessages() throws IOException { + // Read in as many messages as are present in the Python-generated file and write them back + FileInputStream inputStream = new FileInputStream(path + "/py_infinite_messages.out"); + FileOutputStream outputStream = new FileOutputStream(path + "/java_infinite_messages.out"); + + Oneof.Test current = Oneof.Test.parseDelimitedFrom(inputStream); + while (current != null) { + current.writeDelimitedTo(outputStream); + current = Oneof.Test.parseDelimitedFrom(inputStream); + } + + inputStream.close(); + outputStream.flush(); + outputStream.close(); + } +} diff --git a/tests/streams/java/src/main/proto/betterproto/nested.proto b/tests/streams/java/src/main/proto/betterproto/nested.proto new file mode 100644 index 000000000..405a05a47 --- /dev/null +++ b/tests/streams/java/src/main/proto/betterproto/nested.proto @@ -0,0 +1,27 @@ +syntax = "proto3"; + +package nested; +option java_package = "betterproto.nested"; + +// A test message with a nested message inside of it. +message Test { + // This is the nested type. + message Nested { + // Stores a simple counter. + int32 count = 1; + } + // This is the nested enum. + enum Msg { + NONE = 0; + THIS = 1; + } + + Nested nested = 1; + Sibling sibling = 2; + Sibling sibling2 = 3; + Msg msg = 4; +} + +message Sibling { + int32 foo = 1; +} \ No newline at end of file diff --git a/tests/streams/java/src/main/proto/betterproto/oneof.proto b/tests/streams/java/src/main/proto/betterproto/oneof.proto new file mode 100644 index 000000000..ad21028ca --- /dev/null +++ b/tests/streams/java/src/main/proto/betterproto/oneof.proto @@ -0,0 +1,19 @@ +syntax = "proto3"; + +package oneof; +option java_package = "betterproto.oneof"; + +message Test { + oneof foo { + int32 pitied = 1; + string pitier = 2; + } + + int32 just_a_regular_field = 3; + + oneof bar { + int32 drinks = 11; + string bar_name = 12; + } +} + diff --git a/tests/streams/load_varint_cutoff.in b/tests/streams/load_varint_cutoff.in new file mode 100644 index 000000000..52b9bf1e1 --- /dev/null +++ b/tests/streams/load_varint_cutoff.in @@ -0,0 +1 @@ +È \ No newline at end of file diff --git a/tests/streams/message_dump_file_multiple.expected b/tests/streams/message_dump_file_multiple.expected new file mode 100644 index 000000000..b5fdf9c30 --- /dev/null +++ b/tests/streams/message_dump_file_multiple.expected @@ -0,0 +1,2 @@ +•šï:bTesting•šï:bTesting +  \ No newline at end of file diff --git a/tests/streams/message_dump_file_single.expected b/tests/streams/message_dump_file_single.expected new file mode 100644 index 000000000..9b7bafb6a --- /dev/null +++ b/tests/streams/message_dump_file_single.expected @@ -0,0 +1 @@ +•šï:bTesting \ No newline at end of file diff --git a/tests/test_all_definition.py b/tests/test_all_definition.py new file mode 100644 index 000000000..61abb5f37 --- /dev/null +++ b/tests/test_all_definition.py @@ -0,0 +1,19 @@ +def test_all_definition(): + """ + Check that a compiled module defines __all__ with the right value. + + These modules have been chosen since they contain messages, services and enums. + """ + import tests.output_betterproto.enum as enum + import tests.output_betterproto.service as service + + assert service.__all__ == ( + "ThingType", + "DoThingRequest", + "DoThingResponse", + "GetThingRequest", + "GetThingResponse", + "TestStub", + "TestBase", + ) + assert enum.__all__ == ("Choice", "ArithmeticOperator", "Test") diff --git a/tests/test_casing.py b/tests/test_casing.py index ec60483b2..56cd8a930 100644 --- a/tests/test_casing.py +++ b/tests/test_casing.py @@ -1,6 +1,10 @@ import pytest -from betterproto.casing import camel_case, pascal_case, snake_case +from betterproto.casing import ( + camel_case, + pascal_case, + snake_case, +) @pytest.mark.parametrize( diff --git a/tests/test_deprecated.py b/tests/test_deprecated.py index cbdea330c..ea16d3708 100644 --- a/tests/test_deprecated.py +++ b/tests/test_deprecated.py @@ -1,26 +1,61 @@ +import warnings + import pytest -from tests.output_betterproto.deprecated import Test as DeprecatedMessageTest -from tests.output_betterproto.deprecated_field import Test as DeprecatedFieldTest +from tests.mocks import MockChannel +from tests.output_betterproto.deprecated import ( + Empty, + Message, + Test, + TestServiceStub, +) + + +@pytest.fixture +def message(): + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", category=DeprecationWarning) + return Message(value="hello") def test_deprecated_message(): - with pytest.deprecated_call(): - DeprecatedMessageTest(value=10) + with pytest.warns(DeprecationWarning) as record: + Message(value="hello") + + assert len(record) == 1 + assert str(record[0].message) == f"{Message.__name__} is deprecated" + + +def test_message_with_deprecated_field(message): + with pytest.warns(DeprecationWarning) as record: + Test(message=message, value=10) + + assert len(record) == 1 + assert str(record[0].message) == f"{Test.__name__}.message is deprecated" + + +def test_message_with_deprecated_field_not_set(message): + with warnings.catch_warnings(): + warnings.simplefilter("error") + Test(value=10) + +def test_message_with_deprecated_field_not_set_default(message): + with warnings.catch_warnings(): + warnings.simplefilter("error") + _ = Test(value=10).message -def test_deprecated_message_with_deprecated_field(): - with pytest.warns(None) as record: - DeprecatedMessageTest(v=10, value=10) - assert len(record) == 2 +@pytest.mark.asyncio +async def test_service_with_deprecated_method(): + stub = TestServiceStub(MockChannel([Empty(), Empty()])) -def test_deprecated_field_warning(): - with pytest.deprecated_call(): - DeprecatedFieldTest(v=10, value=10) + with pytest.warns(DeprecationWarning) as record: + await stub.deprecated_func(Empty()) + assert len(record) == 1 + assert str(record[0].message) == f"TestService.deprecated_func is deprecated" -def test_deprecated_field_no_warning(): - with pytest.warns(None) as record: - DeprecatedFieldTest(value=10) - assert not record + with warnings.catch_warnings(): + warnings.simplefilter("error") + await stub.func(Empty()) diff --git a/tests/test_documentation.py b/tests/test_documentation.py new file mode 100644 index 000000000..da82a1b76 --- /dev/null +++ b/tests/test_documentation.py @@ -0,0 +1,37 @@ +import ast +import inspect + + +def check(generated_doc: str, type: str) -> None: + assert f"Documentation of {type} 1" in generated_doc + assert "other line 1" in generated_doc + assert f"Documentation of {type} 2" in generated_doc + assert "other line 2" in generated_doc + assert f"Documentation of {type} 3" in generated_doc + + +def test_documentation() -> None: + from .output_betterproto.documentation import ( + Enum, + ServiceBase, + ServiceStub, + Test, + ) + + check(Test.__doc__, "message") + + source = inspect.getsource(Test) + tree = ast.parse(source) + check(tree.body[0].body[2].value.value, "field") + + check(Enum.__doc__, "enum") + + source = inspect.getsource(Enum) + tree = ast.parse(source) + check(tree.body[0].body[2].value.value, "variant") + + check(ServiceBase.__doc__, "service") + check(ServiceBase.get.__doc__, "method") + + check(ServiceStub.__doc__, "service") + check(ServiceStub.get.__doc__, "method") diff --git a/tests/test_enum.py b/tests/test_enum.py new file mode 100644 index 000000000..04b8a1674 --- /dev/null +++ b/tests/test_enum.py @@ -0,0 +1,79 @@ +from typing import ( + Optional, + Tuple, +) + +import pytest + +import betterproto + + +class Colour(betterproto.Enum): + RED = 1 + GREEN = 2 + BLUE = 3 + + +PURPLE = Colour.__new__(Colour, name=None, value=4) + + +@pytest.mark.parametrize( + "member, str_value", + [ + (Colour.RED, "RED"), + (Colour.GREEN, "GREEN"), + (Colour.BLUE, "BLUE"), + ], +) +def test_str(member: Colour, str_value: str) -> None: + assert str(member) == str_value + + +@pytest.mark.parametrize( + "member, repr_value", + [ + (Colour.RED, "Colour.RED"), + (Colour.GREEN, "Colour.GREEN"), + (Colour.BLUE, "Colour.BLUE"), + ], +) +def test_repr(member: Colour, repr_value: str) -> None: + assert repr(member) == repr_value + + +@pytest.mark.parametrize( + "member, values", + [ + (Colour.RED, ("RED", 1)), + (Colour.GREEN, ("GREEN", 2)), + (Colour.BLUE, ("BLUE", 3)), + (PURPLE, (None, 4)), + ], +) +def test_name_values(member: Colour, values: Tuple[Optional[str], int]) -> None: + assert (member.name, member.value) == values + + +@pytest.mark.parametrize( + "member, input_str", + [ + (Colour.RED, "RED"), + (Colour.GREEN, "GREEN"), + (Colour.BLUE, "BLUE"), + ], +) +def test_from_string(member: Colour, input_str: str) -> None: + assert Colour.from_string(input_str) == member + + +@pytest.mark.parametrize( + "member, input_int", + [ + (Colour.RED, 1), + (Colour.GREEN, 2), + (Colour.BLUE, 3), + (PURPLE, 4), + ], +) +def test_try_value(member: Colour, input_int: int) -> None: + assert Colour.try_value(input_int) == member diff --git a/tests/test_features.py b/tests/test_features.py index 3f44f17d6..8ca44c674 100644 --- a/tests/test_features.py +++ b/tests/test_features.py @@ -1,7 +1,28 @@ -import betterproto +import json +import sys +from copy import ( + copy, + deepcopy, +) from dataclasses import dataclass -from typing import Optional, List, Dict -from datetime import datetime, timedelta +from datetime import ( + datetime, + timedelta, +) +from inspect import ( + Parameter, + signature, +) +from typing import ( + Dict, + List, + Optional, +) +from unittest.mock import ANY + +import pytest + +import betterproto def test_has_field(): @@ -66,6 +87,7 @@ class Foo(betterproto.Message): foo = Foo(name="foo", child=Bar(name="bar")) assert foo.to_dict() == {"name": "foo", "child": {"name": "bar"}} + assert foo.to_pydict() == {"name": "foo", "child": {"name": "bar"}} def test_enum_as_int_json(): @@ -85,6 +107,11 @@ class Foo(betterproto.Message): foo.bar = 1 assert foo.to_dict() == {"bar": "ONE"} + # Similar expectations for pydict + foo = Foo().from_pydict({"bar": 1}) + assert foo.bar == TestEnum.ONE + assert foo.to_pydict() == {"bar": TestEnum.ONE} + def test_unknown_fields(): @dataclass @@ -128,17 +155,18 @@ class Foo(betterproto.Message): foo.baz = "test" # Other oneof fields should now be unset - assert foo.bar == 0 + assert not hasattr(foo, "bar") + assert object.__getattribute__(foo, "bar") == betterproto.PLACEHOLDER assert betterproto.which_one_of(foo, "group1")[0] == "baz" - foo.sub.val = 1 + foo.sub = Sub(val=1) assert betterproto.serialized_on_wire(foo.sub) foo.abc = "test" # Group 1 shouldn't be touched, group 2 should have reset - assert foo.sub.val == 0 - assert betterproto.serialized_on_wire(foo.sub) is False + assert not hasattr(foo, "sub") + assert object.__getattribute__(foo, "sub") == betterproto.PLACEHOLDER assert betterproto.which_one_of(foo, "group2")[0] == "abc" # Zero value should always serialize for one-of @@ -153,6 +181,16 @@ class Foo(betterproto.Message): assert betterproto.which_one_of(foo2, "group2")[0] == "" +@pytest.mark.skipif( + sys.version_info < (3, 10), + reason="pattern matching is only supported in python3.10+", +) +def test_oneof_pattern_matching(): + from .oneof_pattern_matching import test_oneof_pattern_matching + + test_oneof_pattern_matching() + + def test_json_casing(): @dataclass class CasingTest(betterproto.Message): @@ -168,6 +206,37 @@ class CasingTest(betterproto.Message): assert test == CasingTest(1, 2, 3, 4) + # Serializing should be strict. + assert json.loads(test.to_json()) == { + "pascalCase": 1, + "camelCase": 2, + "snakeCase": 3, + "kabobCase": 4, + } + + assert json.loads(test.to_json(casing=betterproto.Casing.SNAKE)) == { + "pascal_case": 1, + "camel_case": 2, + "snake_case": 3, + "kabob_case": 4, + } + + +def test_dict_casing(): + @dataclass + class CasingTest(betterproto.Message): + pascal_case: int = betterproto.int32_field(1) + camel_case: int = betterproto.int32_field(2) + snake_case: int = betterproto.int32_field(3) + kabob_case: int = betterproto.int32_field(4) + + # Parsing should accept almost any input + test = CasingTest().from_dict( + {"PascalCase": 1, "camelCase": 2, "snake_case": 3, "kabob-case": 4} + ) + + assert test == CasingTest(1, 2, 3, 4) + # Serializing should be strict. assert test.to_dict() == { "pascalCase": 1, @@ -175,6 +244,12 @@ class CasingTest(betterproto.Message): "snakeCase": 3, "kabobCase": 4, } + assert test.to_pydict() == { + "pascalCase": 1, + "camelCase": 2, + "snakeCase": 3, + "kabobCase": 4, + } assert test.to_dict(casing=betterproto.Casing.SNAKE) == { "pascal_case": 1, @@ -182,6 +257,12 @@ class CasingTest(betterproto.Message): "snake_case": 3, "kabob_case": 4, } + assert test.to_pydict(casing=betterproto.Casing.SNAKE) == { + "pascal_case": 1, + "camel_case": 2, + "snake_case": 3, + "kabob_case": 4, + } def test_optional_flag(): @@ -199,6 +280,63 @@ class Request(betterproto.Message): assert Request().parse(b"\n\x00").flag is False +def test_optional_datetime_to_dict(): + @dataclass + class Request(betterproto.Message): + date: Optional[datetime] = betterproto.message_field(1, optional=True) + + # Check dict serialization + assert Request().to_dict() == {} + assert Request().to_dict(include_default_values=True) == {"date": None} + assert Request(date=datetime(2020, 1, 1)).to_dict() == { + "date": "2020-01-01T00:00:00Z" + } + assert Request(date=datetime(2020, 1, 1)).to_dict(include_default_values=True) == { + "date": "2020-01-01T00:00:00Z" + } + + # Check pydict serialization + assert Request().to_pydict() == {} + assert Request().to_pydict(include_default_values=True) == {"date": None} + assert Request(date=datetime(2020, 1, 1)).to_pydict() == { + "date": datetime(2020, 1, 1) + } + assert Request(date=datetime(2020, 1, 1)).to_pydict( + include_default_values=True + ) == {"date": datetime(2020, 1, 1)} + + +def test_to_json_default_values(): + @dataclass + class TestMessage(betterproto.Message): + some_int: int = betterproto.int32_field(1) + some_double: float = betterproto.double_field(2) + some_str: str = betterproto.string_field(3) + some_bool: bool = betterproto.bool_field(4) + + # Empty dict + test = TestMessage().from_dict({}) + + assert json.loads(test.to_json(include_default_values=True)) == { + "someInt": 0, + "someDouble": 0.0, + "someStr": "", + "someBool": False, + } + + # All default values + test = TestMessage().from_dict( + {"someInt": 0, "someDouble": 0.0, "someStr": "", "someBool": False} + ) + + assert json.loads(test.to_json(include_default_values=True)) == { + "someInt": 0, + "someDouble": 0.0, + "someStr": "", + "someBool": False, + } + + def test_to_dict_default_values(): @dataclass class TestMessage(betterproto.Message): @@ -217,6 +355,15 @@ class TestMessage(betterproto.Message): "someBool": False, } + test = TestMessage().from_pydict({}) + + assert test.to_pydict(include_default_values=True) == { + "someInt": 0, + "someDouble": 0.0, + "someStr": "", + "someBool": False, + } + # All default values test = TestMessage().from_dict( {"someInt": 0, "someDouble": 0.0, "someStr": "", "someBool": False} @@ -229,6 +376,17 @@ class TestMessage(betterproto.Message): "someBool": False, } + test = TestMessage().from_pydict( + {"someInt": 0, "someDouble": 0.0, "someStr": "", "someBool": False} + ) + + assert test.to_pydict(include_default_values=True) == { + "someInt": 0, + "someDouble": 0.0, + "someStr": "", + "someBool": False, + } + # Some default and some other values @dataclass class TestMessage2(betterproto.Message): @@ -265,6 +423,30 @@ class TestMessage2(betterproto.Message): "someDefaultBool": False, } + test = TestMessage2().from_pydict( + { + "someInt": 2, + "someDouble": 1.2, + "someStr": "hello", + "someBool": True, + "someDefaultInt": 0, + "someDefaultDouble": 0.0, + "someDefaultStr": "", + "someDefaultBool": False, + } + ) + + assert test.to_pydict(include_default_values=True) == { + "someInt": 2, + "someDouble": 1.2, + "someStr": "hello", + "someBool": True, + "someDefaultInt": 0, + "someDefaultDouble": 0.0, + "someDefaultStr": "", + "someDefaultBool": False, + } + # Nested messages @dataclass class TestChildMessage(betterproto.Message): @@ -284,6 +466,36 @@ class TestParentMessage(betterproto.Message): "someMessage": {"someOtherInt": 0}, } + test = TestParentMessage().from_pydict({"someInt": 0, "someDouble": 1.2}) + + assert test.to_pydict(include_default_values=True) == { + "someInt": 0, + "someDouble": 1.2, + "someMessage": {"someOtherInt": 0}, + } + + +def test_to_dict_datetime_values(): + @dataclass + class TestDatetimeMessage(betterproto.Message): + bar: datetime = betterproto.message_field(1) + baz: timedelta = betterproto.message_field(2) + + test = TestDatetimeMessage().from_dict( + {"bar": "2020-01-01T00:00:00Z", "baz": "86400.000s"} + ) + + assert test.to_dict() == {"bar": "2020-01-01T00:00:00Z", "baz": "86400.000s"} + + test = TestDatetimeMessage().from_pydict( + {"bar": datetime(year=2020, month=1, day=1), "baz": timedelta(days=1)} + ) + + assert test.to_pydict() == { + "bar": datetime(year=2020, month=1, day=1), + "baz": timedelta(days=1), + } + def test_oneof_default_value_set_causes_writes_wire(): @dataclass @@ -333,47 +545,6 @@ def _round_trip_serialization(foo: Foo) -> Foo: ) -def test_recursive_message(): - from tests.output_betterproto.recursivemessage import Test as RecursiveMessage - - msg = RecursiveMessage() - - assert msg.child == RecursiveMessage() - - # Lazily-created zero-value children must not affect equality. - assert msg == RecursiveMessage() - - # Lazily-created zero-value children must not affect serialization. - assert bytes(msg) == b"" - - -def test_recursive_message_defaults(): - from tests.output_betterproto.recursivemessage import ( - Test as RecursiveMessage, - Intermediate, - ) - - msg = RecursiveMessage(name="bob", intermediate=Intermediate(42)) - - # set values are as expected - assert msg == RecursiveMessage(name="bob", intermediate=Intermediate(42)) - - # lazy initialized works modifies the message - assert msg != RecursiveMessage( - name="bob", intermediate=Intermediate(42), child=RecursiveMessage(name="jude") - ) - msg.child.child.name = "jude" - assert msg == RecursiveMessage( - name="bob", - intermediate=Intermediate(42), - child=RecursiveMessage(child=RecursiveMessage(name="jude")), - ) - - # lazily initialization recurses as needed - assert msg.child.child.child.child.child.child.child == RecursiveMessage() - assert msg.intermediate.child.intermediate == Intermediate() - - def test_message_repr(): from tests.output_betterproto.recursivemessage import Test @@ -450,9 +621,7 @@ class Truthy(betterproto.Message): 2010-02-18T16:00:00.23334444 2010-02-18T16:00:00,2283 2009-05-19 143922 -2009-05-19 1439""".split( - "\n" -) +2009-05-19 1439""".split("\n") def test_iso_datetime(): @@ -476,3 +645,36 @@ class Envelope(betterproto.Message): msg.from_dict({"timestamps": iso_candidates}) assert all([isinstance(item, datetime) for item in msg.timestamps]) + + +def test_service_argument__expected_parameter(): + from tests.output_betterproto.service import TestStub + + sig = signature(TestStub.do_thing) + do_thing_request_parameter = sig.parameters["do_thing_request"] + assert do_thing_request_parameter.default is Parameter.empty + assert do_thing_request_parameter.annotation == "DoThingRequest" + + +def test_is_set(): + @dataclass + class Spam(betterproto.Message): + foo: bool = betterproto.bool_field(1) + bar: Optional[int] = betterproto.int32_field(2, optional=True) + + assert not Spam().is_set("foo") + assert not Spam().is_set("bar") + assert Spam(foo=True).is_set("foo") + assert Spam(foo=True, bar=0).is_set("bar") + + +def test_equality_comparison(): + from tests.output_betterproto.bool import Test as TestMessage + + msg = TestMessage(value=True) + + assert msg == msg + assert msg == ANY + assert msg == TestMessage(value=True) + assert msg != 1 + assert msg != TestMessage(value=False) diff --git a/tests/test_get_ref_type.py b/tests/test_get_ref_type.py index cbee4caa5..21796629d 100644 --- a/tests/test_get_ref_type.py +++ b/tests/test_get_ref_type.py @@ -1,6 +1,18 @@ import pytest -from betterproto.compile.importing import get_type_reference, parse_source_type_name +from betterproto.compile.importing import ( + get_type_reference, + parse_source_type_name, +) +from betterproto.plugin.typing_compiler import DirectImportTypingCompiler + + +@pytest.fixture +def typing_compiler() -> DirectImportTypingCompiler: + """ + Generates a simple Direct Import Typing Compiler for testing. + """ + return DirectImportTypingCompiler() @pytest.mark.parametrize( @@ -29,15 +41,70 @@ ], ) def test_reference_google_wellknown_types_non_wrappers( - google_type: str, expected_name: str, expected_import: str + google_type: str, + expected_name: str, + expected_import: str, + typing_compiler: DirectImportTypingCompiler, ): imports = set() - name = get_type_reference(package="", imports=imports, source_type=google_type) + name = get_type_reference( + package="", + imports=imports, + source_type=google_type, + typing_compiler=typing_compiler, + pydantic=False, + ) assert name == expected_name - assert imports.__contains__( - expected_import - ), f"{expected_import} not found in {imports}" + assert imports.__contains__(expected_import), ( + f"{expected_import} not found in {imports}" + ) + + +@pytest.mark.parametrize( + ["google_type", "expected_name", "expected_import"], + [ + ( + ".google.protobuf.Empty", + '"betterproto_lib_pydantic_google_protobuf.Empty"', + "import betterproto.lib.pydantic.google.protobuf as betterproto_lib_pydantic_google_protobuf", + ), + ( + ".google.protobuf.Struct", + '"betterproto_lib_pydantic_google_protobuf.Struct"', + "import betterproto.lib.pydantic.google.protobuf as betterproto_lib_pydantic_google_protobuf", + ), + ( + ".google.protobuf.ListValue", + '"betterproto_lib_pydantic_google_protobuf.ListValue"', + "import betterproto.lib.pydantic.google.protobuf as betterproto_lib_pydantic_google_protobuf", + ), + ( + ".google.protobuf.Value", + '"betterproto_lib_pydantic_google_protobuf.Value"', + "import betterproto.lib.pydantic.google.protobuf as betterproto_lib_pydantic_google_protobuf", + ), + ], +) +def test_reference_google_wellknown_types_non_wrappers_pydantic( + google_type: str, + expected_name: str, + expected_import: str, + typing_compiler: DirectImportTypingCompiler, +): + imports = set() + name = get_type_reference( + package="", + imports=imports, + source_type=google_type, + typing_compiler=typing_compiler, + pydantic=True, + ) + + assert name == expected_name + assert imports.__contains__(expected_import), ( + f"{expected_import} not found in {imports}" + ) @pytest.mark.parametrize( @@ -55,10 +122,15 @@ def test_reference_google_wellknown_types_non_wrappers( ], ) def test_referenceing_google_wrappers_unwraps_them( - google_type: str, expected_name: str + google_type: str, expected_name: str, typing_compiler: DirectImportTypingCompiler ): imports = set() - name = get_type_reference(package="", imports=imports, source_type=google_type) + name = get_type_reference( + package="", + imports=imports, + source_type=google_type, + typing_compiler=typing_compiler, + ) assert name == expected_name assert imports == set() @@ -91,223 +163,321 @@ def test_referenceing_google_wrappers_unwraps_them( ], ) def test_referenceing_google_wrappers_without_unwrapping( - google_type: str, expected_name: str + google_type: str, expected_name: str, typing_compiler: DirectImportTypingCompiler ): name = get_type_reference( - package="", imports=set(), source_type=google_type, unwrap=False + package="", + imports=set(), + source_type=google_type, + typing_compiler=typing_compiler, + unwrap=False, ) assert name == expected_name -def test_reference_child_package_from_package(): +def test_reference_child_package_from_package( + typing_compiler: DirectImportTypingCompiler, +): imports = set() name = get_type_reference( - package="package", imports=imports, source_type="package.child.Message" + package="package", + imports=imports, + source_type="package.child.Message", + typing_compiler=typing_compiler, ) assert imports == {"from . import child"} assert name == '"child.Message"' -def test_reference_child_package_from_root(): +def test_reference_child_package_from_root(typing_compiler: DirectImportTypingCompiler): imports = set() - name = get_type_reference(package="", imports=imports, source_type="child.Message") + name = get_type_reference( + package="", + imports=imports, + source_type="child.Message", + typing_compiler=typing_compiler, + ) assert imports == {"from . import child"} assert name == '"child.Message"' -def test_reference_camel_cased(): +def test_reference_camel_cased(typing_compiler: DirectImportTypingCompiler): imports = set() name = get_type_reference( - package="", imports=imports, source_type="child_package.example_message" + package="", + imports=imports, + source_type="child_package.example_message", + typing_compiler=typing_compiler, ) assert imports == {"from . import child_package"} assert name == '"child_package.ExampleMessage"' -def test_reference_nested_child_from_root(): +def test_reference_nested_child_from_root(typing_compiler: DirectImportTypingCompiler): imports = set() name = get_type_reference( - package="", imports=imports, source_type="nested.child.Message" + package="", + imports=imports, + source_type="nested.child.Message", + typing_compiler=typing_compiler, ) assert imports == {"from .nested import child as nested_child"} assert name == '"nested_child.Message"' -def test_reference_deeply_nested_child_from_root(): +def test_reference_deeply_nested_child_from_root( + typing_compiler: DirectImportTypingCompiler, +): imports = set() name = get_type_reference( - package="", imports=imports, source_type="deeply.nested.child.Message" + package="", + imports=imports, + source_type="deeply.nested.child.Message", + typing_compiler=typing_compiler, ) assert imports == {"from .deeply.nested import child as deeply_nested_child"} assert name == '"deeply_nested_child.Message"' -def test_reference_deeply_nested_child_from_package(): +def test_reference_deeply_nested_child_from_package( + typing_compiler: DirectImportTypingCompiler, +): imports = set() name = get_type_reference( package="package", imports=imports, source_type="package.deeply.nested.child.Message", + typing_compiler=typing_compiler, ) assert imports == {"from .deeply.nested import child as deeply_nested_child"} assert name == '"deeply_nested_child.Message"' -def test_reference_root_sibling(): +def test_reference_root_sibling(typing_compiler: DirectImportTypingCompiler): imports = set() - name = get_type_reference(package="", imports=imports, source_type="Message") + name = get_type_reference( + package="", + imports=imports, + source_type="Message", + typing_compiler=typing_compiler, + ) assert imports == set() assert name == '"Message"' -def test_reference_nested_siblings(): +def test_reference_nested_siblings(typing_compiler: DirectImportTypingCompiler): imports = set() - name = get_type_reference(package="foo", imports=imports, source_type="foo.Message") + name = get_type_reference( + package="foo", + imports=imports, + source_type="foo.Message", + typing_compiler=typing_compiler, + ) assert imports == set() assert name == '"Message"' -def test_reference_deeply_nested_siblings(): +def test_reference_deeply_nested_siblings(typing_compiler: DirectImportTypingCompiler): imports = set() name = get_type_reference( - package="foo.bar", imports=imports, source_type="foo.bar.Message" + package="foo.bar", + imports=imports, + source_type="foo.bar.Message", + typing_compiler=typing_compiler, ) assert imports == set() assert name == '"Message"' -def test_reference_parent_package_from_child(): +def test_reference_parent_package_from_child( + typing_compiler: DirectImportTypingCompiler, +): imports = set() name = get_type_reference( - package="package.child", imports=imports, source_type="package.Message" + package="package.child", + imports=imports, + source_type="package.Message", + typing_compiler=typing_compiler, ) assert imports == {"from ... import package as __package__"} assert name == '"__package__.Message"' -def test_reference_parent_package_from_deeply_nested_child(): +def test_reference_parent_package_from_deeply_nested_child( + typing_compiler: DirectImportTypingCompiler, +): imports = set() name = get_type_reference( package="package.deeply.nested.child", imports=imports, source_type="package.deeply.nested.Message", + typing_compiler=typing_compiler, ) assert imports == {"from ... import nested as __nested__"} assert name == '"__nested__.Message"' -def test_reference_ancestor_package_from_nested_child(): +def test_reference_ancestor_package_from_nested_child( + typing_compiler: DirectImportTypingCompiler, +): imports = set() name = get_type_reference( package="package.ancestor.nested.child", imports=imports, source_type="package.ancestor.Message", + typing_compiler=typing_compiler, ) assert imports == {"from .... import ancestor as ___ancestor__"} assert name == '"___ancestor__.Message"' -def test_reference_root_package_from_child(): +def test_reference_root_package_from_child(typing_compiler: DirectImportTypingCompiler): imports = set() name = get_type_reference( - package="package.child", imports=imports, source_type="Message" + package="package.child", + imports=imports, + source_type="Message", + typing_compiler=typing_compiler, ) assert imports == {"from ... import Message as __Message__"} assert name == '"__Message__"' -def test_reference_root_package_from_deeply_nested_child(): +def test_reference_root_package_from_deeply_nested_child( + typing_compiler: DirectImportTypingCompiler, +): imports = set() name = get_type_reference( - package="package.deeply.nested.child", imports=imports, source_type="Message" + package="package.deeply.nested.child", + imports=imports, + source_type="Message", + typing_compiler=typing_compiler, ) assert imports == {"from ..... import Message as ____Message__"} assert name == '"____Message__"' -def test_reference_unrelated_package(): +def test_reference_unrelated_package(typing_compiler: DirectImportTypingCompiler): imports = set() - name = get_type_reference(package="a", imports=imports, source_type="p.Message") + name = get_type_reference( + package="a", + imports=imports, + source_type="p.Message", + typing_compiler=typing_compiler, + ) assert imports == {"from .. import p as _p__"} assert name == '"_p__.Message"' -def test_reference_unrelated_nested_package(): +def test_reference_unrelated_nested_package( + typing_compiler: DirectImportTypingCompiler, +): imports = set() - name = get_type_reference(package="a.b", imports=imports, source_type="p.q.Message") + name = get_type_reference( + package="a.b", + imports=imports, + source_type="p.q.Message", + typing_compiler=typing_compiler, + ) assert imports == {"from ...p import q as __p_q__"} assert name == '"__p_q__.Message"' -def test_reference_unrelated_deeply_nested_package(): +def test_reference_unrelated_deeply_nested_package( + typing_compiler: DirectImportTypingCompiler, +): imports = set() name = get_type_reference( - package="a.b.c.d", imports=imports, source_type="p.q.r.s.Message" + package="a.b.c.d", + imports=imports, + source_type="p.q.r.s.Message", + typing_compiler=typing_compiler, ) assert imports == {"from .....p.q.r import s as ____p_q_r_s__"} assert name == '"____p_q_r_s__.Message"' -def test_reference_cousin_package(): +def test_reference_cousin_package(typing_compiler: DirectImportTypingCompiler): imports = set() - name = get_type_reference(package="a.x", imports=imports, source_type="a.y.Message") + name = get_type_reference( + package="a.x", + imports=imports, + source_type="a.y.Message", + typing_compiler=typing_compiler, + ) assert imports == {"from .. import y as _y__"} assert name == '"_y__.Message"' -def test_reference_cousin_package_different_name(): +def test_reference_cousin_package_different_name( + typing_compiler: DirectImportTypingCompiler, +): imports = set() name = get_type_reference( - package="test.package1", imports=imports, source_type="cousin.package2.Message" + package="test.package1", + imports=imports, + source_type="cousin.package2.Message", + typing_compiler=typing_compiler, ) assert imports == {"from ...cousin import package2 as __cousin_package2__"} assert name == '"__cousin_package2__.Message"' -def test_reference_cousin_package_same_name(): +def test_reference_cousin_package_same_name( + typing_compiler: DirectImportTypingCompiler, +): imports = set() name = get_type_reference( - package="test.package", imports=imports, source_type="cousin.package.Message" + package="test.package", + imports=imports, + source_type="cousin.package.Message", + typing_compiler=typing_compiler, ) assert imports == {"from ...cousin import package as __cousin_package__"} assert name == '"__cousin_package__.Message"' -def test_reference_far_cousin_package(): +def test_reference_far_cousin_package(typing_compiler: DirectImportTypingCompiler): imports = set() name = get_type_reference( - package="a.x.y", imports=imports, source_type="a.b.c.Message" + package="a.x.y", + imports=imports, + source_type="a.b.c.Message", + typing_compiler=typing_compiler, ) assert imports == {"from ...b import c as __b_c__"} assert name == '"__b_c__.Message"' -def test_reference_far_far_cousin_package(): +def test_reference_far_far_cousin_package(typing_compiler: DirectImportTypingCompiler): imports = set() name = get_type_reference( - package="a.x.y.z", imports=imports, source_type="a.b.c.d.Message" + package="a.x.y.z", + imports=imports, + source_type="a.b.c.d.Message", + typing_compiler=typing_compiler, ) assert imports == {"from ....b.c import d as ___b_c_d__"} diff --git a/tests/test_inputs.py b/tests/test_inputs.py index dbcf1975a..919bbc8cf 100644 --- a/tests/test_inputs.py +++ b/tests/test_inputs.py @@ -5,7 +5,13 @@ import sys from collections import namedtuple from types import ModuleType -from typing import Any, Dict, List, Set, Tuple +from typing import ( + Any, + Dict, + List, + Set, + Tuple, +) import pytest @@ -19,12 +25,11 @@ inputs_path, ) + # Force pure-python implementation instead of C++, otherwise imports # break things because we can't properly reset the symbol database. os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" -from google.protobuf import symbol_database -from google.protobuf.descriptor_pool import DescriptorPool from google.protobuf.json_format import Parse @@ -125,14 +130,9 @@ def dict_replace_nans(input_dict: Dict[Any, Any]) -> Dict[Any, Any]: @pytest.fixture -def test_data(request): +def test_data(request, reset_sys_path): test_case_name = request.param - # Reset the internal symbol database so we can import the `Test` message - # multiple times. Ugh. - sym = symbol_database.Default() - sym.pool = DescriptorPool() - reference_module_root = os.path.join( *reference_output_package.split("."), test_case_name ) @@ -158,8 +158,6 @@ def test_data(request): ) ) - sys.path.remove(reference_module_root) - @pytest.mark.parametrize("test_data", test_cases.messages, indirect=True) def test_message_can_instantiated(test_data: TestData) -> None: @@ -176,22 +174,21 @@ def test_message_equality(test_data: TestData) -> None: @pytest.mark.parametrize("test_data", test_cases.messages_with_json, indirect=True) -def test_message_json(repeat, test_data: TestData) -> None: +def test_message_json(test_data: TestData) -> None: plugin_module, _, json_data = test_data - for _ in range(repeat): - for sample in json_data: - if sample.belongs_to(test_input_config.non_symmetrical_json): - continue + for sample in json_data: + if sample.belongs_to(test_input_config.non_symmetrical_json): + continue - message: betterproto.Message = plugin_module.Test() + message: betterproto.Message = plugin_module.Test() - message.from_json(sample.json) - message_json = message.to_json(0) + message.from_json(sample.json) + message_json = message.to_json(0) - assert dict_replace_nans(json.loads(message_json)) == dict_replace_nans( - json.loads(sample.json) - ) + assert dict_replace_nans(json.loads(message_json)) == dict_replace_nans( + json.loads(sample.json) + ) @pytest.mark.parametrize("test_data", test_cases.services, indirect=True) @@ -200,28 +197,27 @@ def test_service_can_be_instantiated(test_data: TestData) -> None: @pytest.mark.parametrize("test_data", test_cases.messages_with_json, indirect=True) -def test_binary_compatibility(repeat, test_data: TestData) -> None: +def test_binary_compatibility(test_data: TestData) -> None: plugin_module, reference_module, json_data = test_data for sample in json_data: reference_instance = Parse(sample.json, reference_module().Test()) reference_binary_output = reference_instance.SerializeToString() - for _ in range(repeat): - plugin_instance_from_json: betterproto.Message = ( - plugin_module.Test().from_json(sample.json) - ) - plugin_instance_from_binary = plugin_module.Test.FromString( - reference_binary_output - ) - - # Generally this can't be relied on, but here we are aiming to match the - # existing Python implementation and aren't doing anything tricky. - # https://developers.google.com/protocol-buffers/docs/encoding#implications - assert bytes(plugin_instance_from_json) == reference_binary_output - assert bytes(plugin_instance_from_binary) == reference_binary_output - - assert plugin_instance_from_json == plugin_instance_from_binary - assert dict_replace_nans( - plugin_instance_from_json.to_dict() - ) == dict_replace_nans(plugin_instance_from_binary.to_dict()) + plugin_instance_from_json: betterproto.Message = plugin_module.Test().from_json( + sample.json + ) + plugin_instance_from_binary = plugin_module.Test.FromString( + reference_binary_output + ) + + # Generally this can't be relied on, but here we are aiming to match the + # existing Python implementation and aren't doing anything tricky. + # https://developers.google.com/protocol-buffers/docs/encoding#implications + assert bytes(plugin_instance_from_json) == reference_binary_output + assert bytes(plugin_instance_from_binary) == reference_binary_output + + assert plugin_instance_from_json == plugin_instance_from_binary + assert dict_replace_nans( + plugin_instance_from_json.to_dict() + ) == dict_replace_nans(plugin_instance_from_binary.to_dict()) diff --git a/tests/test_mapmessage.py b/tests/test_mapmessage.py new file mode 100644 index 000000000..16bd6ce65 --- /dev/null +++ b/tests/test_mapmessage.py @@ -0,0 +1,18 @@ +from tests.output_betterproto.mapmessage import ( + Nested, + Test, +) + + +def test_mapmessage_to_dict_preserves_message(): + message = Test( + items={ + "test": Nested( + count=1, + ) + } + ) + + message.to_dict() + + assert isinstance(message.items["test"], Nested), "Wrong nested type after to_dict" diff --git a/tests/test_module_validation.py b/tests/test_module_validation.py new file mode 100644 index 000000000..9cae272bb --- /dev/null +++ b/tests/test_module_validation.py @@ -0,0 +1,111 @@ +from typing import ( + List, + Optional, + Set, +) + +import pytest + +from betterproto.plugin.module_validation import ModuleValidator + + +@pytest.mark.parametrize( + ["text", "expected_collisions"], + [ + pytest.param( + ["import os"], + None, + id="single import", + ), + pytest.param( + ["import os", "import sys"], + None, + id="multiple imports", + ), + pytest.param( + ["import os", "import os"], + {"os"}, + id="duplicate imports", + ), + pytest.param( + ["from os import path", "import os"], + None, + id="duplicate imports with alias", + ), + pytest.param( + ["from os import path", "import os as os_alias"], + None, + id="duplicate imports with alias", + ), + pytest.param( + ["from os import path", "import os as path"], + {"path"}, + id="duplicate imports with alias", + ), + pytest.param( + ["import os", "class os:"], + {"os"}, + id="duplicate import with class", + ), + pytest.param( + ["import os", "class os:", " pass", "import sys"], + {"os"}, + id="duplicate import with class and another", + ), + pytest.param( + ["def test(): pass", "class test:"], + {"test"}, + id="duplicate class and function", + ), + pytest.param( + ["def test(): pass", "def test(): pass"], + {"test"}, + id="duplicate functions", + ), + pytest.param( + ["def test(): pass", "test = 100"], + {"test"}, + id="function and variable", + ), + pytest.param( + ["def test():", " test = 3"], + None, + id="function and variable in function", + ), + pytest.param( + [ + "def test(): pass", + "'''", + "def test(): pass", + "'''", + "def test_2(): pass", + ], + None, + id="duplicate functions with multiline string", + ), + pytest.param( + ["def test(): pass", "# def test(): pass"], + None, + id="duplicate functions with comments", + ), + pytest.param( + ["from test import (", " A", " B", " C", ")"], + None, + id="multiline import", + ), + pytest.param( + ["from test import (", " A", " B", " C", ")", "from test import A"], + {"A"}, + id="multiline import with duplicate", + ), + ], +) +def test_module_validator(text: List[str], expected_collisions: Optional[Set[str]]): + line_iterator = iter(text) + validator = ModuleValidator(line_iterator) + valid = validator.validate() + if expected_collisions is None: + assert valid + else: + assert set(validator.collisions.keys()) == expected_collisions + assert not valid diff --git a/tests/test_pickling.py b/tests/test_pickling.py new file mode 100644 index 000000000..32478c843 --- /dev/null +++ b/tests/test_pickling.py @@ -0,0 +1,216 @@ +import pickle +from copy import ( + copy, + deepcopy, +) +from dataclasses import dataclass +from typing import ( + Dict, + List, +) +from unittest.mock import ANY + +import cachelib + +import betterproto +from betterproto.lib.google import protobuf as google + + +def unpickled(message): + return pickle.loads(pickle.dumps(message)) + + +@dataclass(eq=False, repr=False) +class Fe(betterproto.Message): + abc: str = betterproto.string_field(1) + + +@dataclass(eq=False, repr=False) +class Fi(betterproto.Message): + abc: str = betterproto.string_field(1) + + +@dataclass(eq=False, repr=False) +class Fo(betterproto.Message): + abc: str = betterproto.string_field(1) + + +@dataclass(eq=False, repr=False) +class NestedData(betterproto.Message): + struct_foo: Dict[str, "google.Struct"] = betterproto.map_field( + 1, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE + ) + map_str_any_bar: Dict[str, "google.Any"] = betterproto.map_field( + 2, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE + ) + + +@dataclass(eq=False, repr=False) +class Complex(betterproto.Message): + foo_str: str = betterproto.string_field(1) + fe: "Fe" = betterproto.message_field(3, group="grp") + fi: "Fi" = betterproto.message_field(4, group="grp") + fo: "Fo" = betterproto.message_field(5, group="grp") + nested_data: "NestedData" = betterproto.message_field(6) + mapping: Dict[str, "google.Any"] = betterproto.map_field( + 7, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE + ) + + +class BetterprotoEnum(betterproto.Enum): + UNSPECIFIED = 0 + ONE = 1 + + +def complex_msg(): + return Complex( + foo_str="yep", + fe=Fe(abc="1"), + nested_data=NestedData( + struct_foo={ + "foo": google.Struct( + fields={ + "hello": google.Value( + list_value=google.ListValue( + values=[google.Value(string_value="world")] + ) + ) + } + ), + }, + map_str_any_bar={ + "key": google.Any(value=b"value"), + }, + ), + mapping={ + "message": google.Any(value=bytes(Fi(abc="hi"))), + "string": google.Any(value=b"howdy"), + }, + ) + + +def test_pickling_complex_message(): + msg = complex_msg() + deser = unpickled(msg) + assert msg == deser + assert msg.fe.abc == "1" + assert msg.is_set("fi") is not True + assert msg.mapping["message"] == google.Any(value=bytes(Fi(abc="hi"))) + assert msg.mapping["string"].value.decode() == "howdy" + assert ( + msg.nested_data.struct_foo["foo"] + .fields["hello"] + .list_value.values[0] + .string_value + == "world" + ) + + +def test_recursive_message(): + from tests.output_betterproto.recursivemessage import Test as RecursiveMessage + + msg = RecursiveMessage() + msg = unpickled(msg) + + assert msg.child == RecursiveMessage() + + # Lazily-created zero-value children must not affect equality. + assert msg == RecursiveMessage() + + # Lazily-created zero-value children must not affect serialization. + assert bytes(msg) == b"" + + +def test_recursive_message_defaults(): + from tests.output_betterproto.recursivemessage import ( + Intermediate, + Test as RecursiveMessage, + ) + + msg = RecursiveMessage(name="bob", intermediate=Intermediate(42)) + msg = unpickled(msg) + + # set values are as expected + assert msg == RecursiveMessage(name="bob", intermediate=Intermediate(42)) + + # lazy initialized works modifies the message + assert msg != RecursiveMessage( + name="bob", intermediate=Intermediate(42), child=RecursiveMessage(name="jude") + ) + msg.child.child.name = "jude" + assert msg == RecursiveMessage( + name="bob", + intermediate=Intermediate(42), + child=RecursiveMessage(child=RecursiveMessage(name="jude")), + ) + + # lazily initialization recurses as needed + assert msg.child.child.child.child.child.child.child == RecursiveMessage() + assert msg.intermediate.child.intermediate == Intermediate() + + +@dataclass +class PickledMessage(betterproto.Message): + foo: bool = betterproto.bool_field(1) + bar: int = betterproto.int32_field(2) + baz: List[str] = betterproto.string_field(3) + + +def test_copyability(): + msg = PickledMessage(bar=12, baz=["hello"]) + msg = unpickled(msg) + + copied = copy(msg) + assert msg == copied + assert msg is not copied + assert msg.baz is copied.baz + + deepcopied = deepcopy(msg) + assert msg == deepcopied + assert msg is not deepcopied + assert msg.baz is not deepcopied.baz + + +def test_message_can_be_cached(): + """Cachelib uses pickling to cache values""" + + cache = cachelib.SimpleCache() + + def use_cache(): + calls = getattr(use_cache, "calls", 0) + result = cache.get("message") + if result is not None: + return result + else: + setattr(use_cache, "calls", calls + 1) + result = complex_msg() + cache.set("message", result) + return result + + for n in range(10): + if n == 0: + assert not cache.has("message") + else: + assert cache.has("message") + + msg = use_cache() + assert use_cache.calls == 1 # The message is only ever built once + assert msg.fe.abc == "1" + assert msg.is_set("fi") is not True + assert msg.mapping["message"] == google.Any(value=bytes(Fi(abc="hi"))) + assert msg.mapping["string"].value.decode() == "howdy" + assert ( + msg.nested_data.struct_foo["foo"] + .fields["hello"] + .list_value.values[0] + .string_value + == "world" + ) + + +def test_pickle_enum(): + enum = BetterprotoEnum.ONE + assert unpickled(enum) == enum + + enum = BetterprotoEnum.UNSPECIFIED + assert unpickled(enum) == enum diff --git a/tests/test_streams.py b/tests/test_streams.py new file mode 100644 index 000000000..1ac3c4c96 --- /dev/null +++ b/tests/test_streams.py @@ -0,0 +1,434 @@ +from dataclasses import dataclass +from io import BytesIO +from pathlib import Path +from shutil import which +from subprocess import run +from typing import Optional + +import pytest + +import betterproto +from tests.output_betterproto import ( + map, + nested, + oneof, + repeated, + repeatedpacked, +) + + +oneof_example = oneof.Test().from_dict( + {"pitied": 1, "just_a_regular_field": 123456789, "bar_name": "Testing"} +) + +len_oneof = len(oneof_example) + +nested_example = nested.Test().from_dict( + { + "nested": {"count": 1}, + "sibling": {"foo": 2}, + "sibling2": {"foo": 3}, + "msg": nested.TestMsg.THIS, + } +) + +repeated_example = repeated.Test().from_dict({"names": ["blah", "Blah2"]}) + +packed_example = repeatedpacked.Test().from_dict( + {"counts": [1, 2, 3], "signed": [-1, 2, -3], "fixed": [1.2, -2.3, 3.4]} +) + +map_example = map.Test().from_dict({"counts": {"blah": 1, "Blah2": 2}}) + +streams_path = Path("tests/streams/") + +java = which("java") + + +def test_load_varint_too_long(): + with BytesIO( + b"\x80\x80\x80\x80\x80\x80\x80\x80\x80\x80\x01" + ) as stream, pytest.raises(ValueError): + betterproto.load_varint(stream) + + with BytesIO(b"\x80\x80\x80\x80\x80\x80\x80\x80\x80\x01") as stream: + # This should not raise a ValueError, as it is within 64 bits + betterproto.load_varint(stream) + + +def test_load_varint_file(): + with open(streams_path / "message_dump_file_single.expected", "rb") as stream: + assert betterproto.load_varint(stream) == (8, b"\x08") # Single-byte varint + stream.read(2) # Skip until first multi-byte + assert betterproto.load_varint(stream) == ( + 123456789, + b"\x95\x9a\xef\x3a", + ) # Multi-byte varint + + +def test_load_varint_cutoff(): + with open(streams_path / "load_varint_cutoff.in", "rb") as stream: + with pytest.raises(EOFError): + betterproto.load_varint(stream) + + stream.seek(1) + with pytest.raises(EOFError): + betterproto.load_varint(stream) + + +def test_dump_varint_file(tmp_path): + # Dump test varints to file + with open(tmp_path / "dump_varint_file.out", "wb") as stream: + betterproto.dump_varint(8, stream) # Single-byte varint + betterproto.dump_varint(123456789, stream) # Multi-byte varint + + # Check that file contents are as expected + with open(tmp_path / "dump_varint_file.out", "rb") as test_stream, open( + streams_path / "message_dump_file_single.expected", "rb" + ) as exp_stream: + assert betterproto.load_varint(test_stream) == betterproto.load_varint( + exp_stream + ) + exp_stream.read(2) + assert betterproto.load_varint(test_stream) == betterproto.load_varint( + exp_stream + ) + + +def test_parse_fields(): + with open(streams_path / "message_dump_file_single.expected", "rb") as stream: + parsed_bytes = betterproto.parse_fields(stream.read()) + + with open(streams_path / "message_dump_file_single.expected", "rb") as stream: + parsed_stream = betterproto.load_fields(stream) + for field in parsed_bytes: + assert field == next(parsed_stream) + + +def test_message_dump_file_single(tmp_path): + # Write the message to the stream + with open(tmp_path / "message_dump_file_single.out", "wb") as stream: + oneof_example.dump(stream) + + # Check that the outputted file is exactly as expected + with open(tmp_path / "message_dump_file_single.out", "rb") as test_stream, open( + streams_path / "message_dump_file_single.expected", "rb" + ) as exp_stream: + assert test_stream.read() == exp_stream.read() + + +def test_message_dump_file_multiple(tmp_path): + # Write the same Message twice and another, different message + with open(tmp_path / "message_dump_file_multiple.out", "wb") as stream: + oneof_example.dump(stream) + oneof_example.dump(stream) + nested_example.dump(stream) + + # Check that all three Messages were outputted to the file correctly + with open(tmp_path / "message_dump_file_multiple.out", "rb") as test_stream, open( + streams_path / "message_dump_file_multiple.expected", "rb" + ) as exp_stream: + assert test_stream.read() == exp_stream.read() + + +def test_message_dump_delimited(tmp_path): + with open(tmp_path / "message_dump_delimited.out", "wb") as stream: + oneof_example.dump(stream, betterproto.SIZE_DELIMITED) + oneof_example.dump(stream, betterproto.SIZE_DELIMITED) + nested_example.dump(stream, betterproto.SIZE_DELIMITED) + + with open(tmp_path / "message_dump_delimited.out", "rb") as test_stream, open( + streams_path / "delimited_messages.in", "rb" + ) as exp_stream: + assert test_stream.read() == exp_stream.read() + + +def test_message_len(): + assert len_oneof == len(bytes(oneof_example)) + assert len(nested_example) == len(bytes(nested_example)) + + +def test_message_load_file_single(): + with open(streams_path / "message_dump_file_single.expected", "rb") as stream: + assert oneof.Test().load(stream) == oneof_example + stream.seek(0) + assert oneof.Test().load(stream, len_oneof) == oneof_example + + +def test_message_load_file_multiple(): + with open(streams_path / "message_dump_file_multiple.expected", "rb") as stream: + oneof_size = len_oneof + assert oneof.Test().load(stream, oneof_size) == oneof_example + assert oneof.Test().load(stream, oneof_size) == oneof_example + assert nested.Test().load(stream) == nested_example + assert stream.read(1) == b"" + + +def test_message_load_too_small(): + with open( + streams_path / "message_dump_file_single.expected", "rb" + ) as stream, pytest.raises(ValueError): + oneof.Test().load(stream, len_oneof - 1) + + +def test_message_load_delimited(): + with open(streams_path / "delimited_messages.in", "rb") as stream: + assert oneof.Test().load(stream, betterproto.SIZE_DELIMITED) == oneof_example + assert oneof.Test().load(stream, betterproto.SIZE_DELIMITED) == oneof_example + assert nested.Test().load(stream, betterproto.SIZE_DELIMITED) == nested_example + assert stream.read(1) == b"" + + +def test_message_load_too_large(): + with open( + streams_path / "message_dump_file_single.expected", "rb" + ) as stream, pytest.raises(ValueError): + oneof.Test().load(stream, len_oneof + 1) + + +def test_message_len_optional_field(): + @dataclass + class Request(betterproto.Message): + flag: Optional[bool] = betterproto.message_field(1, wraps=betterproto.TYPE_BOOL) + + assert len(Request()) == len(b"") + assert len(Request(flag=True)) == len(b"\n\x02\x08\x01") + assert len(Request(flag=False)) == len(b"\n\x00") + + +def test_message_len_repeated_field(): + assert len(repeated_example) == len(bytes(repeated_example)) + + +def test_message_len_packed_field(): + assert len(packed_example) == len(bytes(packed_example)) + + +def test_message_len_map_field(): + assert len(map_example) == len(bytes(map_example)) + + +def test_message_len_empty_string(): + @dataclass + class Empty(betterproto.Message): + string: str = betterproto.string_field(1, "group") + integer: int = betterproto.int32_field(2, "group") + + empty = Empty().from_dict({"string": ""}) + assert len(empty) == len(bytes(empty)) + + +def test_calculate_varint_size_negative(): + single_byte = -1 + multi_byte = -10000000 + edge = -(1 << 63) + beyond = -(1 << 63) - 1 + before = -(1 << 63) + 1 + + assert ( + betterproto.size_varint(single_byte) + == len(betterproto.encode_varint(single_byte)) + == 10 + ) + assert ( + betterproto.size_varint(multi_byte) + == len(betterproto.encode_varint(multi_byte)) + == 10 + ) + assert betterproto.size_varint(edge) == len(betterproto.encode_varint(edge)) == 10 + assert ( + betterproto.size_varint(before) == len(betterproto.encode_varint(before)) == 10 + ) + + with pytest.raises(ValueError): + betterproto.size_varint(beyond) + + +def test_calculate_varint_size_positive(): + single_byte = 1 + multi_byte = 10000000 + + assert betterproto.size_varint(single_byte) == len( + betterproto.encode_varint(single_byte) + ) + assert betterproto.size_varint(multi_byte) == len( + betterproto.encode_varint(multi_byte) + ) + + +def test_dump_varint_negative(tmp_path): + single_byte = -1 + multi_byte = -10000000 + edge = -(1 << 63) + beyond = -(1 << 63) - 1 + before = -(1 << 63) + 1 + + with open(tmp_path / "dump_varint_negative.out", "wb") as stream: + betterproto.dump_varint(single_byte, stream) + betterproto.dump_varint(multi_byte, stream) + betterproto.dump_varint(edge, stream) + betterproto.dump_varint(before, stream) + + with pytest.raises(ValueError): + betterproto.dump_varint(beyond, stream) + + with open(streams_path / "dump_varint_negative.expected", "rb") as exp_stream, open( + tmp_path / "dump_varint_negative.out", "rb" + ) as test_stream: + assert test_stream.read() == exp_stream.read() + + +def test_dump_varint_positive(tmp_path): + single_byte = 1 + multi_byte = 10000000 + + with open(tmp_path / "dump_varint_positive.out", "wb") as stream: + betterproto.dump_varint(single_byte, stream) + betterproto.dump_varint(multi_byte, stream) + + with open(tmp_path / "dump_varint_positive.out", "rb") as test_stream, open( + streams_path / "dump_varint_positive.expected", "rb" + ) as exp_stream: + assert test_stream.read() == exp_stream.read() + + +# Java compatibility tests + + +@pytest.fixture(scope="module") +def compile_jar(): + # Skip if not all required tools are present + if java is None: + pytest.skip("`java` command is absent and is required") + mvn = which("mvn") + if mvn is None: + pytest.skip("Maven is absent and is required") + + # Compile the JAR + proc_maven = run([mvn, "clean", "install", "-f", "tests/streams/java/pom.xml"]) + if proc_maven.returncode != 0: + pytest.skip( + "Maven compatibility-test.jar build failed (maybe Java version <11?)" + ) + + +jar = "tests/streams/java/target/compatibility-test.jar" + + +def run_jar(command: str, tmp_path): + return run([java, "-jar", jar, command, tmp_path], check=True) + + +def run_java_single_varint(value: int, tmp_path) -> int: + # Write single varint to file + with open(tmp_path / "py_single_varint.out", "wb") as stream: + betterproto.dump_varint(value, stream) + + # Have Java read this varint and write it back + run_jar("single_varint", tmp_path) + + # Read single varint from Java output file + with open(tmp_path / "java_single_varint.out", "rb") as stream: + returned = betterproto.load_varint(stream) + with pytest.raises(EOFError): + betterproto.load_varint(stream) + + return returned + + +def test_single_varint(compile_jar, tmp_path): + single_byte = (1, b"\x01") + multi_byte = (123456789, b"\x95\x9a\xef\x3a") + + # Write a single-byte varint to a file and have Java read it back + returned = run_java_single_varint(single_byte[0], tmp_path) + assert returned == single_byte + + # Same for a multi-byte varint + returned = run_java_single_varint(multi_byte[0], tmp_path) + assert returned == multi_byte + + +def test_multiple_varints(compile_jar, tmp_path): + single_byte = (1, b"\x01") + multi_byte = (123456789, b"\x95\x9a\xef\x3a") + over32 = (3000000000, b"\x80\xbc\xc1\x96\x0b") + + # Write two varints to the same file + with open(tmp_path / "py_multiple_varints.out", "wb") as stream: + betterproto.dump_varint(single_byte[0], stream) + betterproto.dump_varint(multi_byte[0], stream) + betterproto.dump_varint(over32[0], stream) + + # Have Java read these varints and write them back + run_jar("multiple_varints", tmp_path) + + # Read varints from Java output file + with open(tmp_path / "java_multiple_varints.out", "rb") as stream: + returned_single = betterproto.load_varint(stream) + returned_multi = betterproto.load_varint(stream) + returned_over32 = betterproto.load_varint(stream) + with pytest.raises(EOFError): + betterproto.load_varint(stream) + + assert returned_single == single_byte + assert returned_multi == multi_byte + assert returned_over32 == over32 + + +def test_single_message(compile_jar, tmp_path): + # Write message to file + with open(tmp_path / "py_single_message.out", "wb") as stream: + oneof_example.dump(stream) + + # Have Java read and return the message + run_jar("single_message", tmp_path) + + # Read and check the returned message + with open(tmp_path / "java_single_message.out", "rb") as stream: + returned = oneof.Test().load(stream, len(bytes(oneof_example))) + assert stream.read() == b"" + + assert returned == oneof_example + + +def test_multiple_messages(compile_jar, tmp_path): + # Write delimited messages to file + with open(tmp_path / "py_multiple_messages.out", "wb") as stream: + oneof_example.dump(stream, betterproto.SIZE_DELIMITED) + nested_example.dump(stream, betterproto.SIZE_DELIMITED) + + # Have Java read and return the messages + run_jar("multiple_messages", tmp_path) + + # Read and check the returned messages + with open(tmp_path / "java_multiple_messages.out", "rb") as stream: + returned_oneof = oneof.Test().load(stream, betterproto.SIZE_DELIMITED) + returned_nested = nested.Test().load(stream, betterproto.SIZE_DELIMITED) + assert stream.read() == b"" + + assert returned_oneof == oneof_example + assert returned_nested == nested_example + + +def test_infinite_messages(compile_jar, tmp_path): + num_messages = 5 + + # Write delimited messages to file + with open(tmp_path / "py_infinite_messages.out", "wb") as stream: + for x in range(num_messages): + oneof_example.dump(stream, betterproto.SIZE_DELIMITED) + + # Have Java read and return the messages + run_jar("infinite_messages", tmp_path) + + # Read and check the returned messages + messages = [] + with open(tmp_path / "java_infinite_messages.out", "rb") as stream: + while True: + try: + messages.append(oneof.Test().load(stream, betterproto.SIZE_DELIMITED)) + except EOFError: + break + + assert len(messages) == num_messages diff --git a/tests/test_struct.py b/tests/test_struct.py new file mode 100644 index 000000000..6376ea45e --- /dev/null +++ b/tests/test_struct.py @@ -0,0 +1,36 @@ +import json + +from betterproto.lib.google.protobuf import Struct +from betterproto.lib.pydantic.google.protobuf import Struct as StructPydantic + + +def test_struct_roundtrip(): + data = { + "foo": "bar", + "baz": None, + "quux": 123, + "zap": [1, {"two": 3}, "four"], + } + data_json = json.dumps(data) + + struct_from_dict = Struct().from_dict(data) + assert struct_from_dict.fields == data + assert struct_from_dict.to_dict() == data + assert struct_from_dict.to_json() == data_json + + struct_from_json = Struct().from_json(data_json) + assert struct_from_json.fields == data + assert struct_from_json.to_dict() == data + assert struct_from_json == struct_from_dict + assert struct_from_json.to_json() == data_json + + struct_pyd_from_dict = StructPydantic(fields={}).from_dict(data) + assert struct_pyd_from_dict.fields == data + assert struct_pyd_from_dict.to_dict() == data + assert struct_pyd_from_dict.to_json() == data_json + + struct_pyd_from_dict = StructPydantic(fields={}).from_json(data_json) + assert struct_pyd_from_dict.fields == data + assert struct_pyd_from_dict.to_dict() == data + assert struct_pyd_from_dict == struct_pyd_from_dict + assert struct_pyd_from_dict.to_json() == data_json diff --git a/tests/test_timestamp.py b/tests/test_timestamp.py new file mode 100644 index 000000000..422738ffb --- /dev/null +++ b/tests/test_timestamp.py @@ -0,0 +1,27 @@ +from datetime import ( + datetime, + timezone, +) + +import pytest + +from betterproto import _Timestamp + + +@pytest.mark.parametrize( + "dt", + [ + datetime(2023, 10, 11, 9, 41, 12, tzinfo=timezone.utc), + datetime.now(timezone.utc), + # potential issue with floating point precision: + datetime(2242, 12, 31, 23, 0, 0, 1, tzinfo=timezone.utc), + # potential issue with negative timestamps: + datetime(1969, 12, 31, 23, 0, 0, 1, tzinfo=timezone.utc), + ], +) +def test_timestamp_to_datetime_and_back(dt: datetime): + """ + Make sure converting a datetime to a protobuf timestamp message + and then back again ends up with the same datetime. + """ + assert _Timestamp.from_datetime(dt).to_datetime() == dt diff --git a/tests/test_typing_compiler.py b/tests/test_typing_compiler.py new file mode 100644 index 000000000..ee17449b5 --- /dev/null +++ b/tests/test_typing_compiler.py @@ -0,0 +1,78 @@ +import pytest + +from betterproto.plugin.typing_compiler import ( + DirectImportTypingCompiler, + NoTyping310TypingCompiler, + TypingImportTypingCompiler, +) + + +def test_direct_import_typing_compiler(): + compiler = DirectImportTypingCompiler() + assert compiler.imports() == {} + assert compiler.optional("str") == "Optional[str]" + assert compiler.imports() == {"typing": {"Optional"}} + assert compiler.list("str") == "List[str]" + assert compiler.imports() == {"typing": {"Optional", "List"}} + assert compiler.dict("str", "int") == "Dict[str, int]" + assert compiler.imports() == {"typing": {"Optional", "List", "Dict"}} + assert compiler.union("str", "int") == "Union[str, int]" + assert compiler.imports() == {"typing": {"Optional", "List", "Dict", "Union"}} + assert compiler.iterable("str") == "Iterable[str]" + assert compiler.imports() == { + "typing": {"Optional", "List", "Dict", "Union", "Iterable"} + } + assert compiler.async_iterable("str") == "AsyncIterable[str]" + assert compiler.imports() == { + "typing": {"Optional", "List", "Dict", "Union", "Iterable", "AsyncIterable"} + } + assert compiler.async_iterator("str") == "AsyncIterator[str]" + assert compiler.imports() == { + "typing": { + "Optional", + "List", + "Dict", + "Union", + "Iterable", + "AsyncIterable", + "AsyncIterator", + } + } + + +def test_typing_import_typing_compiler(): + compiler = TypingImportTypingCompiler() + assert compiler.imports() == {} + assert compiler.optional("str") == "typing.Optional[str]" + assert compiler.imports() == {"typing": None} + assert compiler.list("str") == "typing.List[str]" + assert compiler.imports() == {"typing": None} + assert compiler.dict("str", "int") == "typing.Dict[str, int]" + assert compiler.imports() == {"typing": None} + assert compiler.union("str", "int") == "typing.Union[str, int]" + assert compiler.imports() == {"typing": None} + assert compiler.iterable("str") == "typing.Iterable[str]" + assert compiler.imports() == {"typing": None} + assert compiler.async_iterable("str") == "typing.AsyncIterable[str]" + assert compiler.imports() == {"typing": None} + assert compiler.async_iterator("str") == "typing.AsyncIterator[str]" + assert compiler.imports() == {"typing": None} + + +def test_no_typing_311_typing_compiler(): + compiler = NoTyping310TypingCompiler() + assert compiler.imports() == {} + assert compiler.optional("str") == '"str | None"' + assert compiler.imports() == {} + assert compiler.list("str") == '"list[str]"' + assert compiler.imports() == {} + assert compiler.dict("str", "int") == '"dict[str, int]"' + assert compiler.imports() == {} + assert compiler.union("str", "int") == '"str | int"' + assert compiler.imports() == {} + assert compiler.iterable("str") == '"Iterable[str]"' + assert compiler.async_iterable("str") == '"AsyncIterable[str]"' + assert compiler.async_iterator("str") == '"AsyncIterator[str]"' + assert compiler.imports() == { + "collections.abc": {"Iterable", "AsyncIterable", "AsyncIterator"} + } diff --git a/tests/test_version.py b/tests/test_version.py index 05fe79efe..09bc115e7 100644 --- a/tests/test_version.py +++ b/tests/test_version.py @@ -1,13 +1,16 @@ -from betterproto import __version__ from pathlib import Path + import tomlkit +from betterproto import __version__ + + PROJECT_TOML = Path(__file__).joinpath("..", "..", "pyproject.toml").resolve() def test_version(): with PROJECT_TOML.open() as toml_file: project_config = tomlkit.loads(toml_file.read()) - assert ( - __version__ == project_config["tool"]["poetry"]["version"] - ), "Project version should match in package and package config" + assert __version__ == project_config["project"]["version"], ( + "Project version should match in package and package config" + ) diff --git a/tests/util.py b/tests/util.py index 950cf7af7..22c4f9012 100644 --- a/tests/util.py +++ b/tests/util.py @@ -1,11 +1,23 @@ import asyncio -from dataclasses import dataclass +import atexit import importlib import os -from pathlib import Path +import platform import sys +import tempfile +from dataclasses import dataclass +from pathlib import Path from types import ModuleType -from typing import Callable, Dict, Generator, List, Optional, Tuple, Union +from typing import ( + Callable, + Dict, + Generator, + List, + Optional, + Tuple, + Union, +) + os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python" @@ -13,6 +25,7 @@ inputs_path = root_path.joinpath("inputs") output_path_reference = root_path.joinpath("output_reference") output_path_betterproto = root_path.joinpath("output_betterproto") +output_path_betterproto_pydantic = root_path.joinpath("output_betterproto_pydantic") def get_files(path, suffix: str) -> Generator[str, None, None]: @@ -27,19 +40,56 @@ def get_directories(path): async def protoc( - path: Union[str, Path], output_dir: Union[str, Path], reference: bool = False + path: Union[str, Path], + output_dir: Union[str, Path], + reference: bool = False, + pydantic_dataclasses: bool = False, ): path: Path = Path(path).resolve() output_dir: Path = Path(output_dir).resolve() python_out_option: str = "python_betterproto_out" if not reference else "python_out" - command = [ - sys.executable, - "-m", - "grpc.tools.protoc", - f"--proto_path={path.as_posix()}", - f"--{python_out_option}={output_dir.as_posix()}", - *[p.as_posix() for p in path.glob("*.proto")], - ] + + if pydantic_dataclasses: + plugin_path = Path("src/betterproto/plugin/main.py") + + if "Win" in platform.system(): + with tempfile.NamedTemporaryFile( + "w", encoding="UTF-8", suffix=".bat", delete=False + ) as tf: + # See https://stackoverflow.com/a/42622705 + tf.writelines( + [ + "@echo off", + f"\nchdir {os.getcwd()}", + f"\n{sys.executable} -u {plugin_path.as_posix()}", + ] + ) + + tf.flush() + + plugin_path = Path(tf.name) + atexit.register(os.remove, plugin_path) + + command = [ + sys.executable, + "-m", + "grpc.tools.protoc", + f"--plugin=protoc-gen-custom={plugin_path.as_posix()}", + "--experimental_allow_proto3_optional", + "--custom_opt=pydantic_dataclasses", + f"--proto_path={path.as_posix()}", + f"--custom_out={output_dir.as_posix()}", + *[p.as_posix() for p in path.glob("*.proto")], + ] + else: + command = [ + sys.executable, + "-m", + "grpc.tools.protoc", + f"--proto_path={path.as_posix()}", + f"--{python_out_option}={output_dir.as_posix()}", + *[p.as_posix() for p in path.glob("*.proto")], + ] proc = await asyncio.create_subprocess_exec( *command, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE )