diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index f1a6ae47..107bf9e7 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -7,9 +7,9 @@ jobs:
lint:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Setup Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: '3.11'
cache: 'pip'
@@ -22,13 +22,13 @@ jobs:
test:
strategy:
matrix:
- python: ['3.7', '3.8', '3.9', '3.10', '3.11']
+ python: ['3.8', '3.9', '3.10', '3.11']
os: [ubuntu-latest, windows-latest, macos-latest]
runs-on: ${{ matrix.os }}
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Setup Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python }}
cache: 'pip'
diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml
index 56bbf66a..2b1dbf0c 100644
--- a/.github/workflows/pypi-release.yml
+++ b/.github/workflows/pypi-release.yml
@@ -12,15 +12,16 @@ jobs:
name: Build source distribution
runs-on: ubuntu-22.04
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Build SDist and wheel
run: pipx run build
- - uses: actions/upload-artifact@v3
+ - uses: actions/upload-artifact@v4
with:
+ name: artifact
path: dist/*
- name: Check metadata
@@ -30,7 +31,7 @@ jobs:
if: github.event_name == 'push'
needs: [ build_dist ]
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
@@ -40,7 +41,7 @@ jobs:
cache: 'pip'
- name: Install build dependencies
run: pip install -U setuptools wheel build
- - uses: actions/download-artifact@v3
+ - uses: actions/download-artifact@v4
with:
# unpacks default artifact into dist/
# if `name: artifact` is omitted, the action will create extra parent dir
@@ -51,6 +52,7 @@ jobs:
with:
user: __token__
password: ${{ secrets.pypi_password }}
+ attestations: false
- name: Install GitPython and cloudevents for pypi_packaging
run: pip install -U -r requirements/publish.txt
- name: Create Tag
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 091a3557..75ad2ef1 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -1,22 +1,22 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.4.0
+ rev: v4.5.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-toml
- repo: https://github.com/pycqa/isort
- rev: 5.12.0
+ rev: 5.13.2
hooks:
- id: isort
args: [ "--profile", "black", "--filter-files" ]
- repo: https://github.com/psf/black
- rev: 23.7.0
+ rev: 24.4.2
hooks:
- id: black
- language_version: python3.10
+ language_version: python3.11
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: "v1.5.1"
+ rev: v1.10.0
hooks:
- id: mypy
files: ^(cloudevents/)
@@ -24,4 +24,4 @@ repos:
types: [ python ]
args: [ ]
additional_dependencies:
- - 'pydantic'
+ - "pydantic~=2.7"
diff --git a/CHANGELOG.md b/CHANGELOG.md
index c025b6bf..458a1dd7 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,6 +6,32 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
+## [1.11.1]
+
+### Fixed
+- Kafka `conversion` marshaller and unmarshaller typings ([#240])
+
+## [1.11.0]
+
+### Fixed
+- Pydantic v2 `examples` keyword usage and improved typings handling ([#235])
+- Kafka `to_binary` check for invalid `content-type` attribute ([#232])
+
+### Changed
+
+- Dropped Python3.7 from CI while its EOL. ([#236])
+
+## [1.10.1]
+
+### Fixed
+- Fixed Pydantic v2 `to_json` (and `to_structured`) conversion ([#229])
+
+## [1.10.0] — 2023-09-25
+### Added
+- Pydantic v2 support. ([#219])
+- Pydantic v2 to v1 compatibility layer. ([#218])
+- Governance docs per main CE discussions. ([#221])
+
## [1.9.0] — 2023-01-04
### Added
- Added typings to the codebase. ([#207])
@@ -179,6 +205,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
- Initial release
+[1.11.0]: https://github.com/cloudevents/sdk-python/compare/1.10.1...1.11.0
+[1.10.1]: https://github.com/cloudevents/sdk-python/compare/1.10.0...1.10.1
+[1.10.0]: https://github.com/cloudevents/sdk-python/compare/1.9.0...1.10.0
[1.9.0]: https://github.com/cloudevents/sdk-python/compare/1.8.0...1.9.0
[1.8.0]: https://github.com/cloudevents/sdk-python/compare/1.7.0...1.8.0
[1.7.1]: https://github.com/cloudevents/sdk-python/compare/1.7.0...1.7.1
@@ -256,3 +285,11 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
[#207]: https://github.com/cloudevents/sdk-python/pull/207
[#208]: https://github.com/cloudevents/sdk-python/pull/208
[#209]: https://github.com/cloudevents/sdk-python/pull/209
+[#218]: https://github.com/cloudevents/sdk-python/pull/218
+[#219]: https://github.com/cloudevents/sdk-python/pull/219
+[#221]: https://github.com/cloudevents/sdk-python/pull/221
+[#229]: https://github.com/cloudevents/sdk-python/pull/229
+[#232]: https://github.com/cloudevents/sdk-python/pull/232
+[#235]: https://github.com/cloudevents/sdk-python/pull/235
+[#236]: https://github.com/cloudevents/sdk-python/pull/236
+[#240]: https://github.com/cloudevents/sdk-python/pull/240
diff --git a/MAINTAINERS.md b/MAINTAINERS.md
new file mode 100644
index 00000000..619a34c5
--- /dev/null
+++ b/MAINTAINERS.md
@@ -0,0 +1,9 @@
+# Maintainers
+
+Current active maintainers of this SDK:
+
+- [Grant Timmerman](https://github.com/grant)
+- [Denys Makogon](https://github.com/denismakogon)
+- [Curtis Mason](https://github.com/cumason123)
+- [Claudio Canales](https://github.com/Klaudioz)
+- [Yurii Serhiichuk](https://github.com/xSAVIKx)
diff --git a/OWNERS b/OWNERS
deleted file mode 100644
index 6d9a2c48..00000000
--- a/OWNERS
+++ /dev/null
@@ -1,6 +0,0 @@
-admins:
- - grant
- - denismakogon
- - cumason123
- - Klaudioz
- - xSAVIKx
diff --git a/README.md b/README.md
index 1103468e..abcf5cbf 100644
--- a/README.md
+++ b/README.md
@@ -149,6 +149,17 @@ for how PR reviews and approval, and our
[Code of Conduct](https://github.com/cloudevents/spec/blob/main/docs/GOVERNANCE.md#additional-information)
information.
+If there is a security concern with one of the CloudEvents specifications, or
+with one of the project's SDKs, please send an email to
+[cncf-cloudevents-security@lists.cncf.io](mailto:cncf-cloudevents-security@lists.cncf.io).
+
+## Additional SDK Resources
+
+- [List of current active maintainers](MAINTAINERS.md)
+- [How to contribute to the project](CONTRIBUTING.md)
+- [SDK's License](LICENSE)
+- [SDK's Release process](RELEASING.md)
+
## Maintenance
We use [black][black] and [isort][isort] for autoformatting. We set up a [tox][tox]
diff --git a/cloudevents/__init__.py b/cloudevents/__init__.py
index 3b98aa8b..d332910d 100644
--- a/cloudevents/__init__.py
+++ b/cloudevents/__init__.py
@@ -12,4 +12,4 @@
# License for the specific language governing permissions and limitations
# under the License.
-__version__ = "1.9.0"
+__version__ = "1.11.1"
diff --git a/cloudevents/kafka/conversion.py b/cloudevents/kafka/conversion.py
index 832594d1..bfddca61 100644
--- a/cloudevents/kafka/conversion.py
+++ b/cloudevents/kafka/conversion.py
@@ -21,9 +21,14 @@
from cloudevents.kafka.exceptions import KeyMapperError
from cloudevents.sdk import types
-DEFAULT_MARSHALLER: types.MarshallerType = json.dumps
-DEFAULT_UNMARSHALLER: types.MarshallerType = json.loads
-DEFAULT_EMBEDDED_DATA_MARSHALLER: types.MarshallerType = lambda x: x
+JSON_MARSHALLER: types.MarshallerType = json.dumps
+JSON_UNMARSHALLER: types.UnmarshallerType = json.loads
+IDENTITY_MARSHALLER = IDENTITY_UNMARSHALLER = lambda x: x
+
+DEFAULT_MARSHALLER: types.MarshallerType = JSON_MARSHALLER
+DEFAULT_UNMARSHALLER: types.UnmarshallerType = JSON_UNMARSHALLER
+DEFAULT_EMBEDDED_DATA_MARSHALLER: types.MarshallerType = IDENTITY_MARSHALLER
+DEFAULT_EMBEDDED_DATA_UNMARSHALLER: types.UnmarshallerType = IDENTITY_UNMARSHALLER
class KafkaMessage(typing.NamedTuple):
@@ -87,10 +92,10 @@ def to_binary(
)
headers = {}
- if event["content-type"]:
- headers["content-type"] = event["content-type"].encode("utf-8")
+ if event["datacontenttype"]:
+ headers["content-type"] = event["datacontenttype"].encode("utf-8")
for attr, value in event.get_attributes().items():
- if attr not in ["data", "partitionkey", "content-type"]:
+ if attr not in ["data", "partitionkey", "datacontenttype"]:
if value is not None:
headers["ce_{0}".format(attr)] = value.encode("utf-8")
@@ -109,7 +114,7 @@ def to_binary(
def from_binary(
message: KafkaMessage,
event_type: typing.Optional[typing.Type[AnyCloudEvent]] = None,
- data_unmarshaller: typing.Optional[types.MarshallerType] = None,
+ data_unmarshaller: typing.Optional[types.UnmarshallerType] = None,
) -> AnyCloudEvent:
"""
Returns a CloudEvent from a KafkaMessage in binary format.
@@ -126,7 +131,7 @@ def from_binary(
for header, value in message.headers.items():
header = header.lower()
if header == "content-type":
- attributes["content-type"] = value.decode()
+ attributes["datacontenttype"] = value.decode()
elif header.startswith("ce_"):
attributes[header[3:]] = value.decode()
@@ -189,8 +194,8 @@ def to_structured(
attrs["data"] = data
headers = {}
- if "content-type" in attrs:
- headers["content-type"] = attrs.pop("content-type").encode("utf-8")
+ if "datacontenttype" in attrs:
+ headers["content-type"] = attrs.pop("datacontenttype").encode("utf-8")
try:
value = envelope_marshaller(attrs)
@@ -208,7 +213,7 @@ def to_structured(
def from_structured(
message: KafkaMessage,
event_type: typing.Optional[typing.Type[AnyCloudEvent]] = None,
- data_unmarshaller: typing.Optional[types.MarshallerType] = None,
+ data_unmarshaller: typing.Optional[types.UnmarshallerType] = None,
envelope_unmarshaller: typing.Optional[types.UnmarshallerType] = None,
) -> AnyCloudEvent:
"""
@@ -222,7 +227,7 @@ def from_structured(
:returns: CloudEvent
"""
- data_unmarshaller = data_unmarshaller or DEFAULT_EMBEDDED_DATA_MARSHALLER
+ data_unmarshaller = data_unmarshaller or DEFAULT_EMBEDDED_DATA_UNMARSHALLER
envelope_unmarshaller = envelope_unmarshaller or DEFAULT_UNMARSHALLER
try:
structure = envelope_unmarshaller(message.value)
@@ -255,7 +260,10 @@ def from_structured(
attributes[name] = decoded_value
for header, val in message.headers.items():
- attributes[header.lower()] = val.decode()
+ if header.lower() == "content-type":
+ attributes["datacontenttype"] = val.decode()
+ else:
+ attributes[header.lower()] = val.decode()
if event_type:
result = event_type.create(attributes, data)
else:
diff --git a/cloudevents/pydantic/__init__.py b/cloudevents/pydantic/__init__.py
index e1dd9b5b..f8556ca1 100644
--- a/cloudevents/pydantic/__init__.py
+++ b/cloudevents/pydantic/__init__.py
@@ -11,7 +11,37 @@
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
-from cloudevents.pydantic.conversion import from_dict, from_http, from_json
-from cloudevents.pydantic.event import CloudEvent
+
+from typing import TYPE_CHECKING
+
+from cloudevents.exceptions import PydanticFeatureNotInstalled
+
+try:
+ if TYPE_CHECKING:
+ from cloudevents.pydantic.v2 import CloudEvent, from_dict, from_http, from_json
+ else:
+ from pydantic import VERSION as PYDANTIC_VERSION
+
+ pydantic_major_version = PYDANTIC_VERSION.split(".")[0]
+ if pydantic_major_version == "1":
+ from cloudevents.pydantic.v1 import (
+ CloudEvent,
+ from_dict,
+ from_http,
+ from_json,
+ )
+ else:
+ from cloudevents.pydantic.v2 import (
+ CloudEvent,
+ from_dict,
+ from_http,
+ from_json,
+ )
+
+except ImportError: # pragma: no cover # hard to test
+ raise PydanticFeatureNotInstalled(
+ "CloudEvents pydantic feature is not installed. "
+ "Install it using pip install cloudevents[pydantic]"
+ )
__all__ = ["CloudEvent", "from_json", "from_dict", "from_http"]
diff --git a/cloudevents/pydantic/fields_docs.py b/cloudevents/pydantic/fields_docs.py
new file mode 100644
index 00000000..00ed0bd3
--- /dev/null
+++ b/cloudevents/pydantic/fields_docs.py
@@ -0,0 +1,142 @@
+# Copyright 2018-Present The CloudEvents Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from cloudevents.sdk.event import attribute
+
+FIELD_DESCRIPTIONS = {
+ "data": {
+ "title": "Event Data",
+ "description": (
+ "CloudEvents MAY include domain-specific information about the occurrence."
+ " When present, this information will be encapsulated within data.It is"
+ " encoded into a media format which is specified by the datacontenttype"
+ " attribute (e.g. application/json), and adheres to the dataschema format"
+ " when those respective attributes are present."
+ ),
+ },
+ "source": {
+ "title": "Event Source",
+ "description": (
+ "Identifies the context in which an event happened. Often this will include"
+ " information such as the type of the event source, the organization"
+ " publishing the event or the process that produced the event. The exact"
+ " syntax and semantics behind the data encoded in the URI is defined by the"
+ " event producer.\n"
+ "\n"
+ "Producers MUST ensure that source + id is unique for"
+ " each distinct event.\n"
+ "\n"
+ "An application MAY assign a unique source to each"
+ " distinct producer, which makes it easy to produce unique IDs since no"
+ " other producer will have the same source. The application MAY use UUIDs,"
+ " URNs, DNS authorities or an application-specific scheme to create unique"
+ " source identifiers.\n"
+ "\n"
+ "A source MAY include more than one producer. In"
+ " that case the producers MUST collaborate to ensure that source + id is"
+ " unique for each distinct event."
+ ),
+ "example": "https://github.com/cloudevents",
+ },
+ "id": {
+ "title": "Event ID",
+ "description": (
+ "Identifies the event. Producers MUST ensure that source + id is unique for"
+ " each distinct event. If a duplicate event is re-sent (e.g. due to a"
+ " network error) it MAY have the same id. Consumers MAY assume that Events"
+ " with identical source and id are duplicates. MUST be unique within the"
+ " scope of the producer"
+ ),
+ "example": "A234-1234-1234",
+ },
+ "type": {
+ "title": "Event Type",
+ "description": (
+ "This attribute contains a value describing the type of event related to"
+ " the originating occurrence. Often this attribute is used for routing,"
+ " observability, policy enforcement, etc. The format of this is producer"
+ " defined and might include information such as the version of the type"
+ ),
+ "example": "com.github.pull_request.opened",
+ },
+ "specversion": {
+ "title": "Specification Version",
+ "description": (
+ "The version of the CloudEvents specification which the event uses. This"
+ " enables the interpretation of the context.\n"
+ "\n"
+ "Currently, this attribute will only have the 'major'"
+ " and 'minor' version numbers included in it. This allows for 'patch'"
+ " changes to the specification to be made without changing this property's"
+ " value in the serialization."
+ ),
+ "example": attribute.DEFAULT_SPECVERSION,
+ },
+ "time": {
+ "title": "Occurrence Time",
+ "description": (
+ " Timestamp of when the occurrence happened. If the time of the occurrence"
+ " cannot be determined then this attribute MAY be set to some other time"
+ " (such as the current time) by the CloudEvents producer, however all"
+ " producers for the same source MUST be consistent in this respect. In"
+ " other words, either they all use the actual time of the occurrence or"
+ " they all use the same algorithm to determine the value used."
+ ),
+ "example": "2018-04-05T17:31:00Z",
+ },
+ "subject": {
+ "title": "Event Subject",
+ "description": (
+ "This describes the subject of the event in the context of the event"
+ " producer (identified by source). In publish-subscribe scenarios, a"
+ " subscriber will typically subscribe to events emitted by a source, but"
+ " the source identifier alone might not be sufficient as a qualifier for"
+ " any specific event if the source context has internal"
+ " sub-structure.\n"
+ "\n"
+ "Identifying the subject of the event in context"
+ " metadata (opposed to only in the data payload) is particularly helpful in"
+ " generic subscription filtering scenarios where middleware is unable to"
+ " interpret the data content. In the above example, the subscriber might"
+ " only be interested in blobs with names ending with '.jpg' or '.jpeg' and"
+ " the subject attribute allows for constructing a simple and efficient"
+ " string-suffix filter for that subset of events."
+ ),
+ "example": "123",
+ },
+ "datacontenttype": {
+ "title": "Event Data Content Type",
+ "description": (
+ "Content type of data value. This attribute enables data to carry any type"
+ " of content, whereby format and encoding might differ from that of the"
+ " chosen event format."
+ ),
+ "example": "text/xml",
+ },
+ "dataschema": {
+ "title": "Event Data Schema",
+ "description": (
+ "Identifies the schema that data adheres to. "
+ "Incompatible changes to the schema SHOULD be reflected by a different URI"
+ ),
+ },
+}
+
+"""
+The dictionary above contains title, description, example and other
+NON-FUNCTIONAL data for pydantic fields. It could be potentially.
+used across all the SDK.
+Functional field configurations (e.g. defaults) are still defined
+in the pydantic model classes.
+"""
diff --git a/cloudevents/pydantic/v1/__init__.py b/cloudevents/pydantic/v1/__init__.py
new file mode 100644
index 00000000..e17151a4
--- /dev/null
+++ b/cloudevents/pydantic/v1/__init__.py
@@ -0,0 +1,18 @@
+# Copyright 2018-Present The CloudEvents Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from cloudevents.pydantic.v1.conversion import from_dict, from_http, from_json
+from cloudevents.pydantic.v1.event import CloudEvent
+
+__all__ = ["CloudEvent", "from_json", "from_dict", "from_http"]
diff --git a/cloudevents/pydantic/conversion.py b/cloudevents/pydantic/v1/conversion.py
similarity index 98%
rename from cloudevents/pydantic/conversion.py
rename to cloudevents/pydantic/v1/conversion.py
index d67010ed..dcf0b7db 100644
--- a/cloudevents/pydantic/conversion.py
+++ b/cloudevents/pydantic/v1/conversion.py
@@ -16,7 +16,7 @@
from cloudevents.conversion import from_dict as _abstract_from_dict
from cloudevents.conversion import from_http as _abstract_from_http
from cloudevents.conversion import from_json as _abstract_from_json
-from cloudevents.pydantic.event import CloudEvent
+from cloudevents.pydantic.v1.event import CloudEvent
from cloudevents.sdk import types
diff --git a/cloudevents/pydantic/event.py b/cloudevents/pydantic/v1/event.py
similarity index 58%
rename from cloudevents/pydantic/event.py
rename to cloudevents/pydantic/v1/event.py
index 0855ee7e..d18736a4 100644
--- a/cloudevents/pydantic/event.py
+++ b/cloudevents/pydantic/v1/event.py
@@ -16,6 +16,7 @@
import typing
from cloudevents.exceptions import PydanticFeatureNotInstalled
+from cloudevents.pydantic.fields_docs import FIELD_DESCRIPTIONS
try:
from pydantic import VERSION as PYDANTIC_VERSION
@@ -72,7 +73,7 @@ def _ce_json_dumps( # type: ignore[no-untyped-def]
def _ce_json_loads( # type: ignore[no-untyped-def]
data: typing.AnyStr, *args, **kwargs # noqa
) -> typing.Dict[typing.Any, typing.Any]:
- """Perforns Pydantic-specific deserialization of the event.
+ """Performs Pydantic-specific deserialization of the event.
Needed by the pydantic base-model to de-serialize the event correctly from json.
Without this function the data will be incorrectly de-serialized.
@@ -104,125 +105,52 @@ def create(
return cls(attributes, data)
data: typing.Optional[typing.Any] = Field(
- title="Event Data",
- description=(
- "CloudEvents MAY include domain-specific information about the occurrence."
- " When present, this information will be encapsulated within data.It is"
- " encoded into a media format which is specified by the datacontenttype"
- " attribute (e.g. application/json), and adheres to the dataschema format"
- " when those respective attributes are present."
- ),
+ title=FIELD_DESCRIPTIONS["data"].get("title"),
+ description=FIELD_DESCRIPTIONS["data"].get("description"),
+ example=FIELD_DESCRIPTIONS["data"].get("example"),
)
source: str = Field(
- title="Event Source",
- description=(
- "Identifies the context in which an event happened. Often this will include"
- " information such as the type of the event source, the organization"
- " publishing the event or the process that produced the event. The exact"
- " syntax and semantics behind the data encoded in the URI is defined by the"
- " event producer.\n"
- "\n"
- "Producers MUST ensure that source + id is unique for"
- " each distinct event.\n"
- "\n"
- "An application MAY assign a unique source to each"
- " distinct producer, which makes it easy to produce unique IDs since no"
- " other producer will have the same source. The application MAY use UUIDs,"
- " URNs, DNS authorities or an application-specific scheme to create unique"
- " source identifiers.\n"
- "\n"
- "A source MAY include more than one producer. In"
- " that case the producers MUST collaborate to ensure that source + id is"
- " unique for each distinct event."
- ),
- example="https://github.com/cloudevents",
+ title=FIELD_DESCRIPTIONS["source"].get("title"),
+ description=FIELD_DESCRIPTIONS["source"].get("description"),
+ example=FIELD_DESCRIPTIONS["source"].get("example"),
)
-
id: str = Field(
+ title=FIELD_DESCRIPTIONS["id"].get("title"),
+ description=FIELD_DESCRIPTIONS["id"].get("description"),
+ example=FIELD_DESCRIPTIONS["id"].get("example"),
default_factory=attribute.default_id_selection_algorithm,
- title="Event ID",
- description=(
- "Identifies the event. Producers MUST ensure that source + id is unique for"
- " each distinct event. If a duplicate event is re-sent (e.g. due to a"
- " network error) it MAY have the same id. Consumers MAY assume that Events"
- " with identical source and id are duplicates. MUST be unique within the"
- " scope of the producer"
- ),
- example="A234-1234-1234",
)
type: str = Field(
- title="Event Type",
- description=(
- "This attribute contains a value describing the type of event related to"
- " the originating occurrence. Often this attribute is used for routing,"
- " observability, policy enforcement, etc. The format of this is producer"
- " defined and might include information such as the version of the type"
- ),
- example="com.github.pull_request.opened",
+ title=FIELD_DESCRIPTIONS["type"].get("title"),
+ description=FIELD_DESCRIPTIONS["type"].get("description"),
+ example=FIELD_DESCRIPTIONS["type"].get("example"),
)
specversion: attribute.SpecVersion = Field(
+ title=FIELD_DESCRIPTIONS["specversion"].get("title"),
+ description=FIELD_DESCRIPTIONS["specversion"].get("description"),
+ example=FIELD_DESCRIPTIONS["specversion"].get("example"),
default=attribute.DEFAULT_SPECVERSION,
- title="Specification Version",
- description=(
- "The version of the CloudEvents specification which the event uses. This"
- " enables the interpretation of the context.\n"
- "\n"
- "Currently, this attribute will only have the 'major'"
- " and 'minor' version numbers included in it. This allows for 'patch'"
- " changes to the specification to be made without changing this property's"
- " value in the serialization."
- ),
- example=attribute.DEFAULT_SPECVERSION,
)
time: typing.Optional[datetime.datetime] = Field(
+ title=FIELD_DESCRIPTIONS["time"].get("title"),
+ description=FIELD_DESCRIPTIONS["time"].get("description"),
+ example=FIELD_DESCRIPTIONS["time"].get("example"),
default_factory=attribute.default_time_selection_algorithm,
- title="Occurrence Time",
- description=(
- " Timestamp of when the occurrence happened. If the time of the occurrence"
- " cannot be determined then this attribute MAY be set to some other time"
- " (such as the current time) by the CloudEvents producer, however all"
- " producers for the same source MUST be consistent in this respect. In"
- " other words, either they all use the actual time of the occurrence or"
- " they all use the same algorithm to determine the value used."
- ),
- example="2018-04-05T17:31:00Z",
)
-
subject: typing.Optional[str] = Field(
- title="Event Subject",
- description=(
- "This describes the subject of the event in the context of the event"
- " producer (identified by source). In publish-subscribe scenarios, a"
- " subscriber will typically subscribe to events emitted by a source, but"
- " the source identifier alone might not be sufficient as a qualifier for"
- " any specific event if the source context has internal"
- " sub-structure.\n"
- "\n"
- "Identifying the subject of the event in context"
- " metadata (opposed to only in the data payload) is particularly helpful in"
- " generic subscription filtering scenarios where middleware is unable to"
- " interpret the data content. In the above example, the subscriber might"
- " only be interested in blobs with names ending with '.jpg' or '.jpeg' and"
- " the subject attribute allows for constructing a simple and efficient"
- " string-suffix filter for that subset of events."
- ),
- example="123",
+ title=FIELD_DESCRIPTIONS["subject"].get("title"),
+ description=FIELD_DESCRIPTIONS["subject"].get("description"),
+ example=FIELD_DESCRIPTIONS["subject"].get("example"),
)
datacontenttype: typing.Optional[str] = Field(
- title="Event Data Content Type",
- description=(
- "Content type of data value. This attribute enables data to carry any type"
- " of content, whereby format and encoding might differ from that of the"
- " chosen event format."
- ),
- example="text/xml",
+ title=FIELD_DESCRIPTIONS["datacontenttype"].get("title"),
+ description=FIELD_DESCRIPTIONS["datacontenttype"].get("description"),
+ example=FIELD_DESCRIPTIONS["datacontenttype"].get("example"),
)
dataschema: typing.Optional[str] = Field(
- title="Event Data Schema",
- description=(
- "Identifies the schema that data adheres to. "
- "Incompatible changes to the schema SHOULD be reflected by a different URI"
- ),
+ title=FIELD_DESCRIPTIONS["dataschema"].get("title"),
+ description=FIELD_DESCRIPTIONS["dataschema"].get("description"),
+ example=FIELD_DESCRIPTIONS["dataschema"].get("example"),
)
def __init__( # type: ignore[no-untyped-def]
@@ -258,7 +186,7 @@ def __init__( # type: ignore[no-untyped-def]
)
attributes = {k.lower(): v for k, v in attributes.items()}
kwargs.update(attributes)
- super(CloudEvent, self).__init__(data=data, **kwargs)
+ super().__init__(data=data, **kwargs)
class Config:
extra: str = "allow" # this is the way we implement extensions
diff --git a/cloudevents/pydantic/v2/__init__.py b/cloudevents/pydantic/v2/__init__.py
new file mode 100644
index 00000000..55d2a7fd
--- /dev/null
+++ b/cloudevents/pydantic/v2/__init__.py
@@ -0,0 +1,18 @@
+# Copyright 2018-Present The CloudEvents Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from cloudevents.pydantic.v2.conversion import from_dict, from_http, from_json
+from cloudevents.pydantic.v2.event import CloudEvent
+
+__all__ = ["CloudEvent", "from_json", "from_dict", "from_http"]
diff --git a/cloudevents/pydantic/v2/conversion.py b/cloudevents/pydantic/v2/conversion.py
new file mode 100644
index 00000000..65108544
--- /dev/null
+++ b/cloudevents/pydantic/v2/conversion.py
@@ -0,0 +1,75 @@
+# Copyright 2018-Present The CloudEvents Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import typing
+
+from cloudevents.conversion import from_dict as _abstract_from_dict
+from cloudevents.conversion import from_http as _abstract_from_http
+from cloudevents.conversion import from_json as _abstract_from_json
+from cloudevents.pydantic.v2.event import CloudEvent
+from cloudevents.sdk import types
+
+
+def from_http(
+ headers: typing.Dict[str, str],
+ data: typing.Optional[typing.AnyStr],
+ data_unmarshaller: typing.Optional[types.UnmarshallerType] = None,
+) -> CloudEvent:
+ """
+ Parses CloudEvent `data` and `headers` into a CloudEvent.
+
+ The method supports both binary and structured representations.
+
+ :param headers: The HTTP request headers.
+ :param data: The HTTP request body. If set to None, "" or b'', the returned
+ event's `data` field will be set to None.
+ :param data_unmarshaller: Callable function to map data to a python object
+ e.g. lambda x: x or lambda x: json.loads(x)
+ :returns: A CloudEvent parsed from the passed HTTP parameters
+ """
+ return _abstract_from_http(
+ headers=headers,
+ data=data,
+ data_unmarshaller=data_unmarshaller,
+ event_type=CloudEvent,
+ )
+
+
+def from_json(
+ data: typing.AnyStr,
+ data_unmarshaller: typing.Optional[types.UnmarshallerType] = None,
+) -> CloudEvent:
+ """
+ Parses JSON string `data` into a CloudEvent.
+
+ :param data: JSON string representation of a CloudEvent.
+ :param data_unmarshaller: Callable function that casts `data` to a
+ Python object.
+ :returns: A CloudEvent parsed from the given JSON representation.
+ """
+ return _abstract_from_json(
+ data=data, data_unmarshaller=data_unmarshaller, event_type=CloudEvent
+ )
+
+
+def from_dict(
+ event: typing.Dict[str, typing.Any],
+) -> CloudEvent:
+ """
+ Construct an CloudEvent from a dict `event` representation.
+
+ :param event: The event represented as a dict.
+ :returns: A CloudEvent parsed from the given dict representation.
+ """
+ return _abstract_from_dict(CloudEvent, event)
diff --git a/cloudevents/pydantic/v2/event.py b/cloudevents/pydantic/v2/event.py
new file mode 100644
index 00000000..643794c1
--- /dev/null
+++ b/cloudevents/pydantic/v2/event.py
@@ -0,0 +1,244 @@
+# Copyright 2018-Present The CloudEvents Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import datetime
+import json
+import typing
+from typing import Any
+
+from pydantic.deprecated import parse as _deprecated_parse
+
+from cloudevents.exceptions import PydanticFeatureNotInstalled
+from cloudevents.pydantic.fields_docs import FIELD_DESCRIPTIONS
+
+try:
+ from pydantic import BaseModel, ConfigDict, Field, model_serializer
+except ImportError: # pragma: no cover # hard to test
+ raise PydanticFeatureNotInstalled(
+ "CloudEvents pydantic feature is not installed. "
+ "Install it using pip install cloudevents[pydantic]"
+ )
+
+from cloudevents import abstract, conversion
+from cloudevents.exceptions import IncompatibleArgumentsError
+from cloudevents.sdk.event import attribute
+
+
+class CloudEvent(abstract.CloudEvent, BaseModel): # type: ignore
+ """
+ A Python-friendly CloudEvent representation backed by Pydantic-modeled fields.
+
+ Supports both binary and structured modes of the CloudEvents v1 specification.
+ """
+
+ @classmethod
+ def create(
+ cls, attributes: typing.Dict[str, typing.Any], data: typing.Optional[typing.Any]
+ ) -> "CloudEvent":
+ return cls(attributes, data)
+
+ data: typing.Optional[typing.Any] = Field(
+ title=FIELD_DESCRIPTIONS["data"].get("title"),
+ description=FIELD_DESCRIPTIONS["data"].get("description"),
+ examples=[FIELD_DESCRIPTIONS["data"].get("example")],
+ default=None,
+ )
+ source: str = Field(
+ title=FIELD_DESCRIPTIONS["source"].get("title"),
+ description=FIELD_DESCRIPTIONS["source"].get("description"),
+ examples=[FIELD_DESCRIPTIONS["source"].get("example")],
+ )
+ id: str = Field(
+ title=FIELD_DESCRIPTIONS["id"].get("title"),
+ description=FIELD_DESCRIPTIONS["id"].get("description"),
+ examples=[FIELD_DESCRIPTIONS["id"].get("example")],
+ default_factory=attribute.default_id_selection_algorithm,
+ )
+ type: str = Field(
+ title=FIELD_DESCRIPTIONS["type"].get("title"),
+ description=FIELD_DESCRIPTIONS["type"].get("description"),
+ examples=[FIELD_DESCRIPTIONS["type"].get("example")],
+ )
+ specversion: attribute.SpecVersion = Field(
+ title=FIELD_DESCRIPTIONS["specversion"].get("title"),
+ description=FIELD_DESCRIPTIONS["specversion"].get("description"),
+ examples=[FIELD_DESCRIPTIONS["specversion"].get("example")],
+ default=attribute.DEFAULT_SPECVERSION,
+ )
+ time: typing.Optional[datetime.datetime] = Field(
+ title=FIELD_DESCRIPTIONS["time"].get("title"),
+ description=FIELD_DESCRIPTIONS["time"].get("description"),
+ examples=[FIELD_DESCRIPTIONS["time"].get("example")],
+ default_factory=attribute.default_time_selection_algorithm,
+ )
+ subject: typing.Optional[str] = Field(
+ title=FIELD_DESCRIPTIONS["subject"].get("title"),
+ description=FIELD_DESCRIPTIONS["subject"].get("description"),
+ examples=[FIELD_DESCRIPTIONS["subject"].get("example")],
+ default=None,
+ )
+ datacontenttype: typing.Optional[str] = Field(
+ title=FIELD_DESCRIPTIONS["datacontenttype"].get("title"),
+ description=FIELD_DESCRIPTIONS["datacontenttype"].get("description"),
+ examples=[FIELD_DESCRIPTIONS["datacontenttype"].get("example")],
+ default=None,
+ )
+ dataschema: typing.Optional[str] = Field(
+ title=FIELD_DESCRIPTIONS["dataschema"].get("title"),
+ description=FIELD_DESCRIPTIONS["dataschema"].get("description"),
+ examples=[FIELD_DESCRIPTIONS["dataschema"].get("example")],
+ default=None,
+ )
+
+ def __init__( # type: ignore[no-untyped-def]
+ self,
+ attributes: typing.Optional[typing.Dict[str, typing.Any]] = None,
+ data: typing.Optional[typing.Any] = None,
+ **kwargs,
+ ):
+ """
+ :param attributes: A dict with CloudEvent attributes.
+ Minimally expects the attributes 'type' and 'source'. If not given the
+ attributes 'specversion', 'id' or 'time', this will create
+ those attributes with default values.
+
+ If no attribute is given the class MUST use the kwargs as the attributes.
+
+ Example Attributes:
+ {
+ "specversion": "1.0",
+ "type": "com.github.pull_request.opened",
+ "source": "https://github.com/cloudevents/spec/pull",
+ "id": "A234-1234-1234",
+ "time": "2018-04-05T17:31:00Z",
+ }
+
+ :param data: Domain-specific information about the occurrence.
+ """
+ if attributes:
+ if len(kwargs) != 0:
+ # To prevent API complexity and confusion.
+ raise IncompatibleArgumentsError(
+ "Attributes dict and kwargs are incompatible."
+ )
+ attributes = {k.lower(): v for k, v in attributes.items()}
+ kwargs.update(attributes)
+ super().__init__(data=data, **kwargs)
+
+ model_config = ConfigDict(
+ extra="allow", # this is the way we implement extensions
+ json_schema_extra={
+ "example": {
+ "specversion": "1.0",
+ "type": "com.github.pull_request.opened",
+ "source": "https://github.com/cloudevents/spec/pull",
+ "subject": "123",
+ "id": "A234-1234-1234",
+ "time": "2018-04-05T17:31:00Z",
+ "comexampleextension1": "value",
+ "comexampleothervalue": 5,
+ "datacontenttype": "text/xml",
+ "data": '',
+ }
+ },
+ )
+
+ """
+ We should use a @model_validator decorator to handle JSON deserialisation,
+ however it's not possible to completely bypass the internal pydantic logic
+ and still use the CloudEvents shared conversion logic.
+
+ Same issue applies to the multiple from/to JSON conversion logic in the
+ @model_serializer implemented after
+
+ To remove the need for the multiple from/to JSON transformation we need
+ major refactor in the SDK conversion logic.
+ """
+
+ @classmethod
+ def model_validate_json(
+ cls,
+ json_data: typing.Union[str, bytes, bytearray],
+ *,
+ strict: typing.Optional[bool] = None,
+ context: typing.Optional[typing.Dict[str, Any]] = None,
+ ) -> "CloudEvent":
+ return conversion.from_json(cls, json_data)
+
+ @classmethod
+ def parse_raw(
+ cls,
+ b: typing.Union[str, bytes],
+ *,
+ content_type: typing.Optional[str] = None,
+ encoding: str = "utf8",
+ proto: typing.Optional[_deprecated_parse.Protocol] = None,
+ allow_pickle: bool = False,
+ ) -> "CloudEvent":
+ return conversion.from_json(cls, b)
+
+ @model_serializer(when_used="json")
+ def _ce_json_dumps(self) -> typing.Dict[str, typing.Any]:
+ """Performs Pydantic-specific serialization of the event when
+ serializing the model using `.model_dump_json()` method.
+
+ Needed by the pydantic base-model to serialize the event correctly to json.
+ Without this function the data will be incorrectly serialized.
+
+ :param self: CloudEvent.
+
+ :return: Event serialized as a standard CloudEvent dict with user specific
+ parameters.
+ """
+ # Here mypy complains about json.loads returning Any
+ # which is incompatible with this method return type
+ # but we know it's always a dictionary in this case
+ return json.loads(conversion.to_json(self)) # type: ignore
+
+ def _get_attributes(self) -> typing.Dict[str, typing.Any]:
+ return {
+ key: conversion.best_effort_encode_attribute_value(value)
+ for key, value in dict(BaseModel.__iter__(self)).items()
+ if key not in ["data"]
+ }
+
+ def get_data(self) -> typing.Optional[typing.Any]:
+ return self.data
+
+ def __setitem__(self, key: str, value: typing.Any) -> None:
+ """
+ Set event attribute value
+
+ MUST NOT set event data with this method, use `.data` member instead
+
+ Method SHOULD mimic `cloudevents.http.event.CloudEvent` interface
+
+ :param key: Event attribute name
+ :param value: New event attribute value
+ """
+ if key != "data": # to mirror the behaviour of the http event
+ setattr(self, key, value)
+ else:
+ pass # It is de-facto ignored by the http event
+
+ def __delitem__(self, key: str) -> None:
+ """
+ SHOULD raise `KeyError` if no event attribute for the given key exists.
+
+ Method SHOULD mimic `cloudevents.http.event.CloudEvent` interface
+ :param key: The event attribute name.
+ """
+ if key == "data":
+ raise KeyError(key) # to mirror the behaviour of the http event
+ delattr(self, key)
diff --git a/cloudevents/tests/test_kafka_conversions.py b/cloudevents/tests/test_kafka_conversions.py
index 696e75cb..5580773a 100644
--- a/cloudevents/tests/test_kafka_conversions.py
+++ b/cloudevents/tests/test_kafka_conversions.py
@@ -59,7 +59,7 @@ def source_event(self) -> CloudEvent:
"source": "pytest",
"type": "com.pytest.test",
"time": datetime.datetime(2000, 1, 1, 6, 42, 33).isoformat(),
- "content-type": "foo",
+ "datacontenttype": "foo",
"partitionkey": "test_key_123",
},
data=self.expected_data,
@@ -123,7 +123,7 @@ def test_sets_headers(self, source_event):
assert result.headers["ce_source"] == source_event["source"].encode("utf-8")
assert result.headers["ce_type"] == source_event["type"].encode("utf-8")
assert result.headers["ce_time"] == source_event["time"].encode("utf-8")
- assert result.headers["content-type"] == source_event["content-type"].encode(
+ assert result.headers["content-type"] == source_event["datacontenttype"].encode(
"utf-8"
)
assert "data" not in result.headers
@@ -163,7 +163,7 @@ def source_binary_bytes_message(self) -> KafkaMessage:
"ce_time": datetime.datetime(2000, 1, 1, 6, 42, 33)
.isoformat()
.encode("utf-8"),
- "content-type": "foo".encode("utf-8"),
+ "datacontenttype": "foo".encode("utf-8"),
},
value=simple_serialize(self.expected_data),
key="test_key_123",
@@ -205,7 +205,7 @@ def test_sets_attrs_from_headers(self, source_binary_json_message):
assert result["type"] == source_binary_json_message.headers["ce_type"].decode()
assert result["time"] == source_binary_json_message.headers["ce_time"].decode()
assert (
- result["content-type"]
+ result["datacontenttype"]
== source_binary_json_message.headers["content-type"].decode()
)
@@ -328,7 +328,7 @@ def test_no_key(self, source_event):
def test_sets_headers(self, source_event):
result = to_structured(source_event)
assert len(result.headers) == 1
- assert result.headers["content-type"] == source_event["content-type"].encode(
+ assert result.headers["content-type"] == source_event["datacontenttype"].encode(
"utf-8"
)
@@ -474,7 +474,7 @@ def test_sets_content_type_default_envelope_unmarshaller(
):
result = from_structured(source_structured_json_message)
assert (
- result["content-type"]
+ result["datacontenttype"]
== source_structured_json_message.headers["content-type"].decode()
)
@@ -487,7 +487,7 @@ def test_sets_content_type_custom_envelope_unmarshaller(
envelope_unmarshaller=custom_unmarshaller,
)
assert (
- result["content-type"]
+ result["datacontenttype"]
== source_structured_bytes_bytes_message.headers["content-type"].decode()
)
diff --git a/cloudevents/tests/test_pydantic_cloudevent.py b/cloudevents/tests/test_pydantic_cloudevent.py
index eef8e91a..87ac5507 100644
--- a/cloudevents/tests/test_pydantic_cloudevent.py
+++ b/cloudevents/tests/test_pydantic_cloudevent.py
@@ -15,19 +15,15 @@
from json import loads
import pytest
-from pydantic import VERSION as PYDANTIC_VERSION
+from pydantic import ValidationError as PydanticV2ValidationError
+from pydantic.v1 import ValidationError as PydanticV1ValidationError
from cloudevents.conversion import _json_or_string
from cloudevents.exceptions import IncompatibleArgumentsError
-from cloudevents.pydantic import CloudEvent
+from cloudevents.pydantic.v1.event import CloudEvent as PydanticV1CloudEvent
+from cloudevents.pydantic.v2.event import CloudEvent as PydanticV2CloudEvent
from cloudevents.sdk.event.attribute import SpecVersion
-pydantic_major_version = PYDANTIC_VERSION.split(".")[0]
-if pydantic_major_version == "2":
- from pydantic.v1 import ValidationError
-else:
- from pydantic import ValidationError
-
_DUMMY_SOURCE = "dummy:source"
_DUMMY_TYPE = "tests.cloudevents.override"
_DUMMY_TIME = "2022-07-16T11:20:34.284130+00:00"
@@ -39,6 +35,25 @@ def specversion(request):
return request.param
+_pydantic_implementation = {
+ "v1": {
+ "event": PydanticV1CloudEvent,
+ "validation_error": PydanticV1ValidationError,
+ "pydantic_version": "v1",
+ },
+ "v2": {
+ "event": PydanticV2CloudEvent,
+ "validation_error": PydanticV2ValidationError,
+ "pydantic_version": "v2",
+ },
+}
+
+
+@pytest.fixture(params=["v1", "v2"])
+def cloudevents_implementation(request):
+ return _pydantic_implementation[request.param]
+
+
@pytest.fixture()
def dummy_attributes(specversion):
return {
@@ -64,8 +79,10 @@ def your_dummy_data():
@pytest.fixture()
-def dummy_event(dummy_attributes, my_dummy_data):
- return CloudEvent(attributes=dummy_attributes, data=my_dummy_data)
+def dummy_event(dummy_attributes, my_dummy_data, cloudevents_implementation):
+ return cloudevents_implementation["event"](
+ attributes=dummy_attributes, data=my_dummy_data
+ )
@pytest.fixture()
@@ -75,10 +92,12 @@ def non_exiting_attribute_name(dummy_event):
return result
-def test_pydantic_cloudevent_equality(dummy_attributes, my_dummy_data, your_dummy_data):
+def test_pydantic_cloudevent_equality(
+ dummy_attributes, my_dummy_data, your_dummy_data, cloudevents_implementation
+):
data = my_dummy_data
- event1 = CloudEvent(dummy_attributes, data)
- event2 = CloudEvent(dummy_attributes, data)
+ event1 = cloudevents_implementation["event"](dummy_attributes, data)
+ event2 = cloudevents_implementation["event"](dummy_attributes, data)
assert event1 == event2
# Test different attributes
for key in dummy_attributes:
@@ -86,15 +105,15 @@ def test_pydantic_cloudevent_equality(dummy_attributes, my_dummy_data, your_dumm
continue
else:
dummy_attributes[key] = f"noise-{key}"
- event3 = CloudEvent(dummy_attributes, data)
- event2 = CloudEvent(dummy_attributes, data)
+ event3 = cloudevents_implementation["event"](dummy_attributes, data)
+ event2 = cloudevents_implementation["event"](dummy_attributes, data)
assert event2 == event3
assert event1 != event2 and event3 != event1
# Test different data
data = your_dummy_data
- event3 = CloudEvent(dummy_attributes, data)
- event2 = CloudEvent(dummy_attributes, data)
+ event3 = cloudevents_implementation["event"](dummy_attributes, data)
+ event2 = cloudevents_implementation["event"](dummy_attributes, data)
assert event2 == event3
assert event1 != event2 and event3 != event1
@@ -115,12 +134,12 @@ def test_http_cloudevent_must_not_equal_to_non_cloudevent_value(
def test_http_cloudevent_mutates_equality(
- dummy_attributes, my_dummy_data, your_dummy_data
+ dummy_attributes, my_dummy_data, your_dummy_data, cloudevents_implementation
):
data = my_dummy_data
- event1 = CloudEvent(dummy_attributes, data)
- event2 = CloudEvent(dummy_attributes, data)
- event3 = CloudEvent(dummy_attributes, data)
+ event1 = cloudevents_implementation["event"](dummy_attributes, data)
+ event2 = cloudevents_implementation["event"](dummy_attributes, data)
+ event3 = cloudevents_implementation["event"](dummy_attributes, data)
assert event1 == event2
# Test different attributes
@@ -140,29 +159,40 @@ def test_http_cloudevent_mutates_equality(
assert event1 != event2 and event3 != event1
-def test_cloudevent_missing_specversion():
+def test_cloudevent_missing_specversion(cloudevents_implementation):
+ errors = {
+ "v1": "value is not a valid enumeration member; permitted: '0.3', '1.0'",
+ "v2": "Input should be '0.3' or '1.0'",
+ }
attributes = {"specversion": "0.2", "source": "s", "type": "t"}
- with pytest.raises(ValidationError) as e:
- _ = CloudEvent(attributes, None)
- assert "value is not a valid enumeration member; permitted: '0.3', '1.0'" in str(
- e.value
- )
+ with pytest.raises(cloudevents_implementation["validation_error"]) as e:
+ _ = cloudevents_implementation["event"](attributes, None)
+ assert errors[cloudevents_implementation["pydantic_version"]] in str(e.value)
-def test_cloudevent_missing_minimal_required_fields():
+def test_cloudevent_missing_minimal_required_fields(cloudevents_implementation):
attributes = {"type": "t"}
- with pytest.raises(ValidationError) as e:
- _ = CloudEvent(attributes, None)
- assert "\nsource\n field required " in str(e.value)
+ errors = {
+ "v1": "\nsource\n field required ",
+ "v2": "\nsource\n Field required ",
+ }
+
+ with pytest.raises(cloudevents_implementation["validation_error"]) as e:
+ _ = cloudevents_implementation["event"](attributes, None)
+ assert errors[cloudevents_implementation["pydantic_version"]] in str(e.value)
attributes = {"source": "s"}
- with pytest.raises(ValidationError) as e:
- _ = CloudEvent(attributes, None)
- assert "\ntype\n field required " in str(e.value)
+ errors = {
+ "v1": "\ntype\n field required ",
+ "v2": "\ntype\n Field required ",
+ }
+ with pytest.raises(cloudevents_implementation["validation_error"]) as e:
+ _ = cloudevents_implementation["event"](attributes, None)
+ assert errors[cloudevents_implementation["pydantic_version"]] in str(e.value)
-def test_cloudevent_general_overrides():
- event = CloudEvent(
+def test_cloudevent_general_overrides(cloudevents_implementation):
+ event = cloudevents_implementation["event"](
{
"source": "my-source",
"type": "com.test.overrides",
@@ -223,9 +253,9 @@ def test_get_operation_on_non_existing_attribute_should_not_copy_default_value(
@pytest.mark.xfail() # https://github.com/cloudevents/sdk-python/issues/185
-def test_json_data_serialization_without_explicit_type():
+def test_json_data_serialization_without_explicit_type(cloudevents_implementation):
assert loads(
- CloudEvent(
+ cloudevents_implementation["event"](
source=_DUMMY_SOURCE, type=_DUMMY_TYPE, data='{"hello": "world"}'
).json()
)["data"] == {"hello": "world"}
@@ -242,17 +272,15 @@ def test_json_data_serialization_without_explicit_type():
],
)
def test_json_data_serialization_with_explicit_json_content_type(
- dummy_attributes, json_content_type
+ dummy_attributes, json_content_type, cloudevents_implementation
):
dummy_attributes["datacontenttype"] = json_content_type
assert loads(
- CloudEvent(
+ cloudevents_implementation["event"](
dummy_attributes,
data='{"hello": "world"}',
).json()
- )[
- "data"
- ] == {"hello": "world"}
+ )["data"] == {"hello": "world"}
_NON_JSON_CONTENT_TYPES = [
@@ -275,10 +303,10 @@ def test_json_data_serialization_with_explicit_json_content_type(
@pytest.mark.parametrize("datacontenttype", _NON_JSON_CONTENT_TYPES)
def test_json_data_serialization_with_explicit_non_json_content_type(
- dummy_attributes, datacontenttype
+ dummy_attributes, datacontenttype, cloudevents_implementation
):
dummy_attributes["datacontenttype"] = datacontenttype
- event = CloudEvent(
+ event = cloudevents_implementation["event"](
dummy_attributes,
data='{"hello": "world"}',
).json()
@@ -286,18 +314,20 @@ def test_json_data_serialization_with_explicit_non_json_content_type(
@pytest.mark.parametrize("datacontenttype", _NON_JSON_CONTENT_TYPES)
-def test_binary_data_serialization(dummy_attributes, datacontenttype):
+def test_binary_data_serialization(
+ dummy_attributes, datacontenttype, cloudevents_implementation
+):
dummy_attributes["datacontenttype"] = datacontenttype
- event = CloudEvent(
+ event = cloudevents_implementation["event"](
dummy_attributes,
data=b"\x00\x00\x11Hello World",
).json()
result_json = loads(event)
assert result_json["data_base64"] == "AAARSGVsbG8gV29ybGQ="
- assert "daata" not in result_json
+ assert "data" not in result_json
-def test_binary_data_deserialization():
+def test_binary_data_deserialization(cloudevents_implementation):
given = (
b'{"source": "dummy:source", "id": "11775cb2-fd00-4487-a18b-30c3600eaa5f",'
b' "type": "dummy.type", "specversion": "1.0", "time":'
@@ -318,7 +348,12 @@ def test_binary_data_deserialization():
),
"type": "dummy.type",
}
- assert CloudEvent.parse_raw(given).dict() == expected
+ assert cloudevents_implementation["event"].parse_raw(given).dict() == expected
+ if cloudevents_implementation["pydantic_version"] == "v2":
+ assert (
+ cloudevents_implementation["event"].model_validate_json(given).dict()
+ == expected
+ )
def test_access_data_event_attribute_should_raise_key_error(dummy_event):
@@ -355,6 +390,6 @@ def test_data_must_never_exist_as_an_attribute_name(dummy_event):
assert "data" not in dummy_event
-def test_attributes_and_kwards_are_incompatible():
+def test_attributes_and_kwards_are_incompatible(cloudevents_implementation):
with pytest.raises(IncompatibleArgumentsError):
- CloudEvent({"a": "b"}, other="hello world")
+ cloudevents_implementation["event"]({"a": "b"}, other="hello world")
diff --git a/cloudevents/tests/test_pydantic_conversions.py b/cloudevents/tests/test_pydantic_conversions.py
index 91ab0151..801b76bd 100644
--- a/cloudevents/tests/test_pydantic_conversions.py
+++ b/cloudevents/tests/test_pydantic_conversions.py
@@ -17,21 +17,51 @@
import json
import pytest
+from pydantic import ValidationError as PydanticV2ValidationError
+from pydantic.v1 import ValidationError as PydanticV1ValidationError
from cloudevents.conversion import to_json
-from cloudevents.pydantic import CloudEvent, from_dict, from_json
+from cloudevents.pydantic.v1.conversion import from_dict as pydantic_v1_from_dict
+from cloudevents.pydantic.v1.conversion import from_json as pydantic_v1_from_json
+from cloudevents.pydantic.v1.event import CloudEvent as PydanticV1CloudEvent
+from cloudevents.pydantic.v2.conversion import from_dict as pydantic_v2_from_dict
+from cloudevents.pydantic.v2.conversion import from_json as pydantic_v2_from_json
+from cloudevents.pydantic.v2.event import CloudEvent as PydanticV2CloudEvent
from cloudevents.sdk.event.attribute import SpecVersion
test_data = json.dumps({"data-key": "val"})
test_attributes = {
"type": "com.example.string",
"source": "https://example.com/event-producer",
+ "extension-attribute": "extension-attribute-test-value",
}
+_pydantic_implementation = {
+ "v1": {
+ "event": PydanticV1CloudEvent,
+ "validation_error": PydanticV1ValidationError,
+ "from_dict": pydantic_v1_from_dict,
+ "from_json": pydantic_v1_from_json,
+ "pydantic_version": "v1",
+ },
+ "v2": {
+ "event": PydanticV2CloudEvent,
+ "validation_error": PydanticV2ValidationError,
+ "from_dict": pydantic_v2_from_dict,
+ "from_json": pydantic_v2_from_json,
+ "pydantic_version": "v2",
+ },
+}
+
+
+@pytest.fixture(params=["v1", "v2"])
+def cloudevents_implementation(request):
+ return _pydantic_implementation[request.param]
+
@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
-def test_to_json(specversion):
- event = CloudEvent(test_attributes, test_data)
+def test_to_json(specversion, cloudevents_implementation):
+ event = cloudevents_implementation["event"](test_attributes, test_data)
event_json = to_json(event)
event_dict = json.loads(event_json)
@@ -42,10 +72,10 @@ def test_to_json(specversion):
@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
-def test_to_json_base64(specversion):
+def test_to_json_base64(specversion, cloudevents_implementation):
data = b"test123"
- event = CloudEvent(test_attributes, data)
+ event = cloudevents_implementation["event"](test_attributes, data)
event_json = to_json(event)
event_dict = json.loads(event_json)
@@ -60,7 +90,7 @@ def test_to_json_base64(specversion):
@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
-def test_from_json(specversion):
+def test_from_json(specversion, cloudevents_implementation):
payload = {
"type": "com.example.string",
"source": "https://example.com/event-producer",
@@ -68,7 +98,7 @@ def test_from_json(specversion):
"specversion": specversion,
"data": {"data-key": "val"},
}
- event = from_json(json.dumps(payload))
+ event = cloudevents_implementation["from_json"](json.dumps(payload))
for key, val in payload.items():
if key == "data":
@@ -78,7 +108,7 @@ def test_from_json(specversion):
@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
-def test_from_json_base64(specversion):
+def test_from_json_base64(specversion, cloudevents_implementation):
# Create base64 encoded data
raw_data = {"data-key": "val"}
data = json.dumps(raw_data).encode()
@@ -95,7 +125,7 @@ def test_from_json_base64(specversion):
payload_json = json.dumps(payload)
# Create event
- event = from_json(payload_json)
+ event = cloudevents_implementation["from_json"](payload_json)
# Test fields were marshalled properly
for key, val in payload.items():
@@ -107,11 +137,11 @@ def test_from_json_base64(specversion):
@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
-def test_json_can_talk_to_itself(specversion):
- event = CloudEvent(test_attributes, test_data)
+def test_json_can_talk_to_itself(specversion, cloudevents_implementation):
+ event = cloudevents_implementation["event"](test_attributes, test_data)
event_json = to_json(event)
- event = from_json(event_json)
+ event = cloudevents_implementation["from_json"](event_json)
for key, val in test_attributes.items():
assert event[key] == val
@@ -119,20 +149,20 @@ def test_json_can_talk_to_itself(specversion):
@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
-def test_json_can_talk_to_itself_base64(specversion):
+def test_json_can_talk_to_itself_base64(specversion, cloudevents_implementation):
data = b"test123"
- event = CloudEvent(test_attributes, data)
+ event = cloudevents_implementation["event"](test_attributes, data)
event_json = to_json(event)
- event = from_json(event_json)
+ event = cloudevents_implementation["from_json"](event_json)
for key, val in test_attributes.items():
assert event[key] == val
assert event.data == data
-def test_from_dict():
+def test_from_dict(cloudevents_implementation):
given = {
"data": b"\x00\x00\x11Hello World",
"datacontenttype": "application/octet-stream",
@@ -146,12 +176,4 @@ def test_from_dict():
),
"type": "dummy.type",
}
- assert from_dict(given).dict() == given
-
-
-@pytest.mark.parametrize("specversion", ["0.3", "1.0"])
-def test_pydantic_json_function_parameters_must_affect_output(specversion):
- event = CloudEvent(test_attributes, test_data)
- v1 = event.json(indent=2, sort_keys=True)
- v2 = event.json(indent=4, sort_keys=True)
- assert v1 != v2
+ assert cloudevents_implementation["from_dict"](given).dict() == given
diff --git a/cloudevents/tests/test_pydantic_events.py b/cloudevents/tests/test_pydantic_events.py
index 4195fdb6..3e536f05 100644
--- a/cloudevents/tests/test_pydantic_events.py
+++ b/cloudevents/tests/test_pydantic_events.py
@@ -18,11 +18,16 @@
import typing
import pytest
+from pydantic import ValidationError as PydanticV2ValidationError
+from pydantic.v1 import ValidationError as PydanticV1ValidationError
from sanic import Sanic, response
import cloudevents.exceptions as cloud_exceptions
from cloudevents.conversion import to_binary, to_structured
-from cloudevents.pydantic import CloudEvent, from_http
+from cloudevents.pydantic.v1.conversion import from_http as pydantic_v1_from_http
+from cloudevents.pydantic.v1.event import CloudEvent as PydanticV1CloudEvent
+from cloudevents.pydantic.v2.conversion import from_http as pydantic_v2_from_http
+from cloudevents.pydantic.v2.event import CloudEvent as PydanticV2CloudEvent
from cloudevents.sdk import converters
from cloudevents.sdk.converters.binary import is_binary
from cloudevents.sdk.converters.structured import is_structured
@@ -65,13 +70,35 @@
app = Sanic("test_pydantic_http_events")
+_pydantic_implementation = {
+ "v1": {
+ "event": PydanticV1CloudEvent,
+ "validation_error": PydanticV1ValidationError,
+ "from_http": pydantic_v1_from_http,
+ "pydantic_version": "v1",
+ },
+ "v2": {
+ "event": PydanticV2CloudEvent,
+ "validation_error": PydanticV2ValidationError,
+ "from_http": pydantic_v2_from_http,
+ "pydantic_version": "v2",
+ },
+}
+
+
+@pytest.fixture(params=["v1", "v2"])
+def cloudevents_implementation(request):
+ return _pydantic_implementation[request.param]
-@app.route("/event", ["POST"])
-async def echo(request):
+
+@app.route("/event/", ["POST"])
+async def echo(request, pydantic_version):
decoder = None
if "binary-payload" in request.headers:
decoder = lambda x: x
- event = from_http(dict(request.headers), request.body, data_unmarshaller=decoder)
+ event = _pydantic_implementation[pydantic_version]["from_http"](
+ dict(request.headers), request.body, data_unmarshaller=decoder
+ )
data = (
event.data
if isinstance(event.data, (bytes, bytearray, memoryview))
@@ -81,28 +108,28 @@ async def echo(request):
@pytest.mark.parametrize("body", invalid_cloudevent_request_body)
-def test_missing_required_fields_structured(body):
+def test_missing_required_fields_structured(body, cloudevents_implementation):
with pytest.raises(cloud_exceptions.MissingRequiredFields):
- _ = from_http(
+ _ = cloudevents_implementation["from_http"](
{"Content-Type": "application/cloudevents+json"}, json.dumps(body)
)
@pytest.mark.parametrize("headers", invalid_test_headers)
-def test_missing_required_fields_binary(headers):
+def test_missing_required_fields_binary(headers, cloudevents_implementation):
with pytest.raises(cloud_exceptions.MissingRequiredFields):
- _ = from_http(headers, json.dumps(test_data))
+ _ = cloudevents_implementation["from_http"](headers, json.dumps(test_data))
@pytest.mark.parametrize("headers", invalid_test_headers)
-def test_missing_required_fields_empty_data_binary(headers):
+def test_missing_required_fields_empty_data_binary(headers, cloudevents_implementation):
# Test for issue #115
with pytest.raises(cloud_exceptions.MissingRequiredFields):
- _ = from_http(headers, None)
+ _ = cloudevents_implementation["from_http"](headers, None)
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_emit_binary_event(specversion):
+def test_emit_binary_event(specversion, cloudevents_implementation):
headers = {
"ce-id": "my-id",
"ce-source": "",
@@ -111,7 +138,11 @@ def test_emit_binary_event(specversion):
"Content-Type": "text/plain",
}
data = json.dumps(test_data)
- _, r = app.test_client.post("/event", headers=headers, data=data)
+ _, r = app.test_client.post(
+ f"/event/{cloudevents_implementation['pydantic_version']}",
+ headers=headers,
+ data=data,
+ )
# Convert byte array to dict
# e.g. r.body = b'{"payload-content": "Hello World!"}'
@@ -128,7 +159,7 @@ def test_emit_binary_event(specversion):
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_emit_structured_event(specversion):
+def test_emit_structured_event(specversion, cloudevents_implementation):
headers = {"Content-Type": "application/cloudevents+json"}
body = {
"id": "my-id",
@@ -137,7 +168,11 @@ def test_emit_structured_event(specversion):
"specversion": specversion,
"data": test_data,
}
- _, r = app.test_client.post("/event", headers=headers, data=json.dumps(body))
+ _, r = app.test_client.post(
+ f"/event/{cloudevents_implementation['pydantic_version']}",
+ headers=headers,
+ data=json.dumps(body),
+ )
# Convert byte array to dict
# e.g. r.body = b'{"payload-content": "Hello World!"}'
@@ -153,7 +188,7 @@ def test_emit_structured_event(specversion):
"converter", [converters.TypeBinary, converters.TypeStructured]
)
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_roundtrip_non_json_event(converter, specversion):
+def test_roundtrip_non_json_event(converter, specversion, cloudevents_implementation):
input_data = io.BytesIO()
for _ in range(100):
for j in range(20):
@@ -161,7 +196,7 @@ def test_roundtrip_non_json_event(converter, specversion):
compressed_data = bz2.compress(input_data.getvalue())
attrs = {"source": "test", "type": "t"}
- event = CloudEvent(attrs, compressed_data)
+ event = cloudevents_implementation["event"](attrs, compressed_data)
if converter == converters.TypeStructured:
headers, data = to_structured(event, data_marshaller=lambda x: x)
@@ -169,7 +204,11 @@ def test_roundtrip_non_json_event(converter, specversion):
headers, data = to_binary(event, data_marshaller=lambda x: x)
headers["binary-payload"] = "true" # Decoding hint for server
- _, r = app.test_client.post("/event", headers=headers, data=data)
+ _, r = app.test_client.post(
+ f"/event/{cloudevents_implementation['pydantic_version']}",
+ headers=headers,
+ data=data,
+ )
assert r.status_code == 200
for key in attrs:
@@ -178,7 +217,7 @@ def test_roundtrip_non_json_event(converter, specversion):
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_missing_ce_prefix_binary_event(specversion):
+def test_missing_ce_prefix_binary_event(specversion, cloudevents_implementation):
prefixed_headers = {}
headers = {
"ce-id": "my-id",
@@ -195,11 +234,13 @@ def test_missing_ce_prefix_binary_event(specversion):
# and NotImplementedError because structured calls aren't
# implemented. In this instance one of the required keys should have
# prefix e-id instead of ce-id therefore it should throw
- _ = from_http(prefixed_headers, json.dumps(test_data))
+ _ = cloudevents_implementation["from_http"](
+ prefixed_headers, json.dumps(test_data)
+ )
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_valid_binary_events(specversion):
+def test_valid_binary_events(specversion, cloudevents_implementation):
# Test creating multiple cloud events
events_queue = []
headers = {}
@@ -212,7 +253,9 @@ def test_valid_binary_events(specversion):
"ce-specversion": specversion,
}
data = {"payload": f"payload-{i}"}
- events_queue.append(from_http(headers, json.dumps(data)))
+ events_queue.append(
+ cloudevents_implementation["from_http"](headers, json.dumps(data))
+ )
for i, event in enumerate(events_queue):
data = event.data
@@ -223,7 +266,7 @@ def test_valid_binary_events(specversion):
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_structured_to_request(specversion):
+def test_structured_to_request(specversion, cloudevents_implementation):
attributes = {
"specversion": specversion,
"type": "word.found.name",
@@ -232,7 +275,7 @@ def test_structured_to_request(specversion):
}
data = {"message": "Hello World!"}
- event = CloudEvent(attributes, data)
+ event = cloudevents_implementation["event"](attributes, data)
headers, body_bytes = to_structured(event)
assert isinstance(body_bytes, bytes)
body = json.loads(body_bytes)
@@ -244,7 +287,7 @@ def test_structured_to_request(specversion):
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_attributes_view_accessor(specversion: str):
+def test_attributes_view_accessor(specversion: str, cloudevents_implementation):
attributes: dict[str, typing.Any] = {
"specversion": specversion,
"type": "word.found.name",
@@ -253,7 +296,9 @@ def test_attributes_view_accessor(specversion: str):
}
data = {"message": "Hello World!"}
- event: CloudEvent = CloudEvent(attributes, data)
+ event: cloudevents_implementation["event"] = cloudevents_implementation["event"](
+ attributes, data
+ )
event_attributes: typing.Mapping[str, typing.Any] = event.get_attributes()
assert event_attributes["specversion"] == attributes["specversion"]
assert event_attributes["type"] == attributes["type"]
@@ -263,7 +308,7 @@ def test_attributes_view_accessor(specversion: str):
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_binary_to_request(specversion):
+def test_binary_to_request(specversion, cloudevents_implementation):
attributes = {
"specversion": specversion,
"type": "word.found.name",
@@ -271,7 +316,7 @@ def test_binary_to_request(specversion):
"source": "pytest",
}
data = {"message": "Hello World!"}
- event = CloudEvent(attributes, data)
+ event = cloudevents_implementation["event"](attributes, data)
headers, body_bytes = to_binary(event)
body = json.loads(body_bytes)
@@ -282,7 +327,7 @@ def test_binary_to_request(specversion):
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_empty_data_structured_event(specversion):
+def test_empty_data_structured_event(specversion, cloudevents_implementation):
# Testing if cloudevent breaks when no structured data field present
attributes = {
"specversion": specversion,
@@ -293,21 +338,21 @@ def test_empty_data_structured_event(specversion):
"source": "",
}
- event = from_http(
+ event = cloudevents_implementation["from_http"](
{"content-type": "application/cloudevents+json"}, json.dumps(attributes)
)
assert event.data is None
attributes["data"] = ""
# Data of empty string will be marshalled into None
- event = from_http(
+ event = cloudevents_implementation["from_http"](
{"content-type": "application/cloudevents+json"}, json.dumps(attributes)
)
assert event.data is None
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_empty_data_binary_event(specversion):
+def test_empty_data_binary_event(specversion, cloudevents_implementation):
# Testing if cloudevent breaks when no structured data field present
headers = {
"Content-Type": "application/octet-stream",
@@ -317,17 +362,17 @@ def test_empty_data_binary_event(specversion):
"ce-time": "2018-10-23T12:28:22.4579346Z",
"ce-source": "",
}
- event = from_http(headers, None)
+ event = cloudevents_implementation["from_http"](headers, None)
assert event.data is None
data = ""
# Data of empty string will be marshalled into None
- event = from_http(headers, data)
+ event = cloudevents_implementation["from_http"](headers, data)
assert event.data is None
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_valid_structured_events(specversion):
+def test_valid_structured_events(specversion, cloudevents_implementation):
# Test creating multiple cloud events
events_queue = []
num_cloudevents = 30
@@ -340,7 +385,7 @@ def test_valid_structured_events(specversion):
"data": {"payload": f"payload-{i}"},
}
events_queue.append(
- from_http(
+ cloudevents_implementation["from_http"](
{"content-type": "application/cloudevents+json"},
json.dumps(event),
)
@@ -354,7 +399,7 @@ def test_valid_structured_events(specversion):
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_structured_no_content_type(specversion):
+def test_structured_no_content_type(specversion, cloudevents_implementation):
# Test creating multiple cloud events
data = {
"id": "id",
@@ -363,7 +408,7 @@ def test_structured_no_content_type(specversion):
"specversion": specversion,
"data": test_data,
}
- event = from_http({}, json.dumps(data))
+ event = cloudevents_implementation["from_http"]({}, json.dumps(data))
assert event["id"] == "id"
assert event["source"] == "source.com.test"
@@ -392,7 +437,7 @@ def test_is_binary():
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_cloudevent_repr(specversion):
+def test_cloudevent_repr(specversion, cloudevents_implementation):
headers = {
"Content-Type": "application/octet-stream",
"ce-specversion": specversion,
@@ -401,7 +446,7 @@ def test_cloudevent_repr(specversion):
"ce-time": "2018-10-23T12:28:22.4579346Z",
"ce-source": "",
}
- event = from_http(headers, "")
+ event = cloudevents_implementation["from_http"](headers, "")
# Testing to make sure event is printable. I could run event. __repr__() but
# we had issues in the past where event.__repr__() could run but
# print(event) would fail.
@@ -409,8 +454,8 @@ def test_cloudevent_repr(specversion):
@pytest.mark.parametrize("specversion", ["1.0", "0.3"])
-def test_none_data_cloudevent(specversion):
- event = CloudEvent(
+def test_none_data_cloudevent(specversion, cloudevents_implementation):
+ event = cloudevents_implementation["event"](
{
"source": "",
"type": "issue.example",
@@ -421,7 +466,7 @@ def test_none_data_cloudevent(specversion):
to_structured(event)
-def test_wrong_specversion():
+def test_wrong_specversion(cloudevents_implementation):
headers = {"Content-Type": "application/cloudevents+json"}
data = json.dumps(
{
@@ -432,20 +477,20 @@ def test_wrong_specversion():
}
)
with pytest.raises(cloud_exceptions.InvalidRequiredFields) as e:
- from_http(headers, data)
+ cloudevents_implementation["from_http"](headers, data)
assert "Found invalid specversion 0.2" in str(e.value)
-def test_invalid_data_format_structured_from_http():
+def test_invalid_data_format_structured_from_http(cloudevents_implementation):
headers = {"Content-Type": "application/cloudevents+json"}
data = 20
with pytest.raises(cloud_exceptions.InvalidStructuredJSON) as e:
- from_http(headers, data)
+ cloudevents_implementation["from_http"](headers, data)
assert "Expected json of type (str, bytes, bytearray)" in str(e.value)
-def test_wrong_specversion_to_request():
- event = CloudEvent({"source": "s", "type": "t"}, None)
+def test_wrong_specversion_to_request(cloudevents_implementation):
+ event = cloudevents_implementation["event"]({"source": "s", "type": "t"}, None)
with pytest.raises(cloud_exceptions.InvalidRequiredFields) as e:
event["specversion"] = "0.2"
to_binary(event)
@@ -468,22 +513,22 @@ def test_is_structured():
assert not is_structured(headers)
-def test_empty_json_structured():
+def test_empty_json_structured(cloudevents_implementation):
headers = {"Content-Type": "application/cloudevents+json"}
data = ""
with pytest.raises(cloud_exceptions.MissingRequiredFields) as e:
- from_http(headers, data)
+ cloudevents_implementation["from_http"](headers, data)
assert "Failed to read specversion from both headers and data" in str(e.value)
-def test_uppercase_headers_with_none_data_binary():
+def test_uppercase_headers_with_none_data_binary(cloudevents_implementation):
headers = {
"Ce-Id": "my-id",
"Ce-Source": "",
"Ce-Type": "cloudevent.event.type",
"Ce-Specversion": "1.0",
}
- event = from_http(headers, None)
+ event = cloudevents_implementation["from_http"](headers, None)
for key in headers:
assert event[key.lower()[3:]] == headers[key]
@@ -493,7 +538,7 @@ def test_uppercase_headers_with_none_data_binary():
assert new_data is None
-def test_generic_exception():
+def test_generic_exception(cloudevents_implementation):
headers = {"Content-Type": "application/cloudevents+json"}
data = json.dumps(
{
@@ -505,28 +550,30 @@ def test_generic_exception():
}
)
with pytest.raises(cloud_exceptions.GenericException) as e:
- from_http({}, None)
+ cloudevents_implementation["from_http"]({}, None)
e.errisinstance(cloud_exceptions.MissingRequiredFields)
with pytest.raises(cloud_exceptions.GenericException) as e:
- from_http({}, 123)
+ cloudevents_implementation["from_http"]({}, 123)
e.errisinstance(cloud_exceptions.InvalidStructuredJSON)
with pytest.raises(cloud_exceptions.GenericException) as e:
- from_http(headers, data, data_unmarshaller=lambda x: 1 / 0)
+ cloudevents_implementation["from_http"](
+ headers, data, data_unmarshaller=lambda x: 1 / 0
+ )
e.errisinstance(cloud_exceptions.DataUnmarshallerError)
with pytest.raises(cloud_exceptions.GenericException) as e:
- event = from_http(headers, data)
+ event = cloudevents_implementation["from_http"](headers, data)
to_binary(event, data_marshaller=lambda x: 1 / 0)
e.errisinstance(cloud_exceptions.DataMarshallerError)
-def test_non_dict_data_no_headers_bug():
+def test_non_dict_data_no_headers_bug(cloudevents_implementation):
# Test for issue #116
headers = {"Content-Type": "application/cloudevents+json"}
data = "123"
with pytest.raises(cloud_exceptions.MissingRequiredFields) as e:
- from_http(headers, data)
+ cloudevents_implementation["from_http"](headers, data)
assert "Failed to read specversion from both headers and data" in str(e.value)
assert "The following deserialized data has no 'get' method" in str(e.value)
diff --git a/mypy.ini b/mypy.ini
index 39426375..d8fb9cc0 100644
--- a/mypy.ini
+++ b/mypy.ini
@@ -1,6 +1,6 @@
[mypy]
plugins = pydantic.mypy
-python_version = 3.7
+python_version = 3.8
pretty = True
show_error_context = True
diff --git a/requirements/test.txt b/requirements/test.txt
index 0e9ff4b4..3e32e4a8 100644
--- a/requirements/test.txt
+++ b/requirements/test.txt
@@ -10,4 +10,4 @@ aiohttp
Pillow
requests
flask
-pydantic>=1.0.0,<3.0
+pydantic>=2.0.0,<3.0
diff --git a/setup.py b/setup.py
index 95ccf97c..a4e4befc 100644
--- a/setup.py
+++ b/setup.py
@@ -65,7 +65,6 @@ def get_version(rel_path):
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
- "Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
diff --git a/tox.ini b/tox.ini
index a5cbdfa7..0436a1be 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,5 +1,5 @@
[tox]
-envlist = py{37,38,39,310,311},lint
+envlist = py{38,39,310,311,312},lint
skipsdist = True
[testenv]