diff --git a/.github/workflows/poetry.yml b/.github/workflows/poetry.yml index a7f04fd..a7a2da2 100644 --- a/.github/workflows/poetry.yml +++ b/.github/workflows/poetry.yml @@ -27,12 +27,28 @@ jobs: if: always() run: python -m poetry run python -m pytest -v tests --alluredir=allure-results - - name: Allure results + - name: Get Allure history + uses: actions/checkout@v2 + if: always() + continue-on-error: true + with: + ref: gh-pages + path: gh-pages + + - name: Allure Report action from marketplace uses: simple-elf/allure-report-action@master if: always() id: allure-report with: - allure_results: allure-results - allure_report: allure-report - path: ./artifacts/*.* - allure_history: allure-history + allure_results: allure-results + gh_pages: gh-pages + allure_report: allure-report + allure_history: allure-history + + - name: Deploy report to Github Pages + if: always() + uses: peaceiris/actions-gh-pages@v2 + env: + PERSONAL_TOKEN: ${{ secrets.GITHUB_TOKEN }} + PUBLISH_BRANCH: gh-pages + PUBLISH_DIR: allure-history diff --git a/README.md b/README.md index 0104053..385b6f7 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,8 @@ +![.github/workflows/poetry.yml](https://github.com/77ripdrive/Python_sandbox/workflows/.github/workflows/poetry.yml/badge.svg) +[![PYTEST](https://img.shields.io/badge/pytest-v%206.2.1-green)](https://img.shields.io/badge/pytest-v%206.2.1-green) + +#### The test report can be found here [Allure report](https://77ripdrive.github.io/Python_sandbox.github.io/) + ## An example of initializing a Python project from scratch ### Dependency management with [Poetry-Python](https://python-poetry.org/docs/) diff --git a/config.py b/config.py new file mode 100644 index 0000000..679ca7d --- /dev/null +++ b/config.py @@ -0,0 +1,2 @@ +BASE_URL_ZIPPOPO = "http://api.zippopotam.us" +DUCKDUCKGO_API = "https://api.duckduckgo.com/" diff --git a/poetry.lock b/poetry.lock index 1ea6ab7..8b7c8d6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -24,6 +24,14 @@ attrs = ">=16.0.0" pluggy = ">=0.4.0" six = ">=1.9.0" +[[package]] +name = "assertpy" +version = "1.1" +description = "Simple assertion library for unit testing in python with a fluent API" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "atomicwrites" version = "1.4.0" @@ -46,6 +54,14 @@ docs = ["furo", "sphinx", "zope.interface"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] +[[package]] +name = "cerberus" +version = "1.3.2" +description = "Lightweight, extensible schema and data validation tool for Python dictionaries." +category = "main" +optional = false +python-versions = ">=2.7" + [[package]] name = "certifi" version = "2020.12.5" @@ -311,7 +327,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [metadata] lock-version = "1.1" python-versions = "^3.9" -content-hash = "8720fc81b4bf7430c6dd52c0d1d00e7579d790c9239e86ef35a9619d139d6a45" +content-hash = "0ec99c07802eefdfb1ce706b6bc2eaddb810f5482796f8fe858b52f150b6047d" [metadata.files] allure-pytest = [ @@ -322,6 +338,9 @@ allure-python-commons = [ {file = "allure-python-commons-2.8.29.tar.gz", hash = "sha256:b74c03e793bd4628d8c7644e49737c6ebdc1d883692da34d7547d1ef7ee7c863"}, {file = "allure_python_commons-2.8.29-py3-none-any.whl", hash = "sha256:e9831bd01f25049a54cf47d7c17ad4b65506d5a35d70571cfaf8c4aa82a30f90"}, ] +assertpy = [ + {file = "assertpy-1.1.tar.gz", hash = "sha256:acc64329934ad71a3221de185517a43af33e373bb44dc05b5a9b174394ef4833"}, +] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, @@ -330,6 +349,9 @@ attrs = [ {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, ] +cerberus = [ + {file = "Cerberus-1.3.2.tar.gz", hash = "sha256:302e6694f206dd85cb63f13fd5025b31ab6d38c99c50c6d769f8fa0b0f299589"}, +] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, diff --git a/pyproject.toml b/pyproject.toml index 7d707eb..9deddd3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,6 +13,8 @@ pytest-bdd = "^4.0.2" jsonpath = "^0.82" allure-pytest = "^2.8.29" pytest-github-actions-annotate-failures = "^0.1.1" +Cerberus = "^1.3.2" +assertpy = "^1.1" [tool.poetry.dev-dependencies] diff --git a/tests/conftest.py b/tests/conftest.py index 7ebd4fb..2584ed2 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,30 +1,9 @@ -import csv -import json -import os - import pytest -path_json_schema_file = os.getcwd() + "/asset/schema_first.json" - - -def read_test_data_from_csv(path): - test_data = [] - with open(path, newline="") as csvfile: - data = csv.reader(csvfile, delimiter=",") - next(data) - for row in data: - test_data.append(row) - return test_data - - -def get_data_from_json_schema(path): - with open(path, "r") as schema_file: - return json.load(schema_file) - - -# Hooks +from tests.utils.utils import read_data_from_json_file +# bdd hooks def pytest_bdd_step_error(request, feature, scenario, step, step_func, step_func_args, exception): print(f"Step failed: {step}") print(f"Request failed: {request}") @@ -33,10 +12,10 @@ def pytest_bdd_step_error(request, feature, scenario, step, step_func, step_func @pytest.fixture(scope="session") def set_base_payload(): payload = {"productId": 1, "productName": "A green door", "productPrice": 12.50, "tags": ["home", "green"]} - return payload + yield payload @pytest.fixture(scope="session") def set_schema(): - schema = get_data_from_json_schema(path_json_schema_file) - return schema + schema = read_data_from_json_file("schema_first.json") + yield schema diff --git a/tests/features/api.feature b/tests/features/api.feature index 91c75b4..3f47506 100644 --- a/tests/features/api.feature +++ b/tests/features/api.feature @@ -1,4 +1,4 @@ -@pytest.mark.duck's_search +@pytest.mark.duckduck_search Feature: DuckDuckGo Instant Answer API As an application developer, I want to get instant answers for search terms via a REST API, diff --git a/tests/steps_defs/test_api_steps.py b/tests/steps_defs/test_api_steps.py index 58f8541..50ffccc 100644 --- a/tests/steps_defs/test_api_steps.py +++ b/tests/steps_defs/test_api_steps.py @@ -5,11 +5,11 @@ from pytest_bdd import then # Shared Variables - -DUCKDUCKGO_API = "https://api.duckduckgo.com/" +from config import DUCKDUCKGO_API # Scenarios + scenarios("../features/api.feature", example_converters=dict(phrase=str)) @@ -19,7 +19,7 @@ @given('the DuckDuckGo API is queried with ""', target_fixture="ddg_response") def ddg_response(phrase): params = {"q": phrase, "format": "json"} - response = requests.get(DUCKDUCKGO_API, params=params) + response = requests.get(f"{DUCKDUCKGO_API}", params=params) return response diff --git a/tests/steps_defs/test_zippo_steps.py b/tests/steps_defs/test_zippo_steps.py index e3372a0..4df1e01 100644 --- a/tests/steps_defs/test_zippo_steps.py +++ b/tests/steps_defs/test_zippo_steps.py @@ -5,6 +5,8 @@ from pytest_bdd import scenarios from pytest_bdd import then +from config import BASE_URL_ZIPPOPO + CONVERTERS = {"country_code": str, "zip_code": int, "place_name": str} scenarios("../features/zippopo.feature", example_converters=CONVERTERS) @@ -12,8 +14,7 @@ @given('the Zippopotam API is queried with "" and ""', target_fixture="ddg_response") def ddg_response(country_code, zip_code): - url = "http://api.zippopotam.us/{}/{}".format(country_code, zip_code) - response = requests.get(url) + response = requests.get(f"{BASE_URL_ZIPPOPO}/{country_code}/{zip_code}") return response diff --git a/tests/test_cerberus_assertpy.py b/tests/test_cerberus_assertpy.py new file mode 100644 index 0000000..315555b --- /dev/null +++ b/tests/test_cerberus_assertpy.py @@ -0,0 +1,35 @@ +from dataclasses import dataclass + +import pytest +from assertpy import assert_that +from cerberus import Validator + + +@dataclass +class Person: + name: str + age: int + + +class PersonValidator(Validator): + def validate_person(self, obj): + return self.validate(obj.__dict__) + + +schema = {"name": {"type": "string", "minlength": 2}, "age": {"type": "integer", "min": 18, "max": 65}} + + +@pytest.mark.json_validation_cerberus +def test_cerberus_validator_base(set_schema, set_base_payload): + v = PersonValidator(schema) + person = Person("John Doe", 20) + result = v.validate_person(person) + assert_that(result, description=v.errors).is_true() + + +@pytest.mark.json_validation_cerberus +def test_cerberus_validator_negative(set_schema, set_base_payload): + v = PersonValidator(schema) + children = Person("Zoe Doe", 2) + result = v.validate_person(children) + assert_that(result, description=v.errors).is_false() diff --git a/tests/test_json_schema_base.py b/tests/test_json_schema_base.py index c2b9e7f..e48a045 100644 --- a/tests/test_json_schema_base.py +++ b/tests/test_json_schema_base.py @@ -1,12 +1,12 @@ -import logging - +import allure import pytest from jsonschema import Draft7Validator -LOGGER = logging.getLogger(__name__) - +@allure.feature("Jsonschema validation") +@allure.story("Validation with correct payload value") @pytest.mark.jsonValidation def test_using_csv_with_different_fields(set_schema, set_base_payload): - result = Draft7Validator(set_schema).is_valid(set_base_payload) - assert result == True, LOGGER.info("Schema is valid") + with allure.step("Validation with correct JsonSchema"): + result = Draft7Validator(set_schema).is_valid(set_base_payload) + assert result == True diff --git a/tests/utils/__init__.py b/tests/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/utils/utils.py b/tests/utils/utils.py new file mode 100644 index 0000000..00fb96e --- /dev/null +++ b/tests/utils/utils.py @@ -0,0 +1,67 @@ +import csv +import json +import os +from pathlib import Path + +import jsonpath as jsonpath + +BASE_PATH = Path.cwd().joinpath("asset") +test_data = [] + + +def read_data_from_json_file(file_name): + path = get_path(file_name) + with path.open(mode="r") as schema_file: + return json.load(schema_file) + + +def read_data_from_csv_file(file_name): + + path = get_path(file_name) + with path.open(mode="r") as csvfile: + data = csv.reader(csvfile, delimiter=",") + next(data) + for row in data: + test_data.append(row) + return test_data + + +def get_path(file_name): + if ".json" in file_name: + path = BASE_PATH.joinpath(file_name) + elif ".csv" in file_name: + path = BASE_PATH.joinpath(file_name) + return path + + +def read_data(file): + """Чтение csv файла в list of jsons""" + with open(f'{file}', "r", encoding='utf-8') as f: + dict_reader = csv.DictReader(f, delimiter=";") + for row in dict_reader: + dict_from_csv = dict(row) + test_data.append(dict_from_csv) + + return test_data + + +def write_scv_to_list_dict_file(file, name): + """ Чтение данных из csv в корне проекта и запись в файл как list of jsons """ + dicts = read_data(file) + file_name = os.getcwd() + f"/assets/assets_{name}.py" + with open(file_name, "a+", encoding="utf-8") as f: + json.dump(dicts, f, ensure_ascii=False, sort_keys=True, indent=4) + + +def search_data_from_json_to_list_of_jsons(base_file: json, jsonpath_expression: str): + """метод для получения list required полей из openApi + в зависимости от endpoint запроса и статуса response""" + list_jsons = jsonpath.jsonpath(base_file, jsonpath_expression) + return list_jsons + + +def write_json_file(base_name, json_object: json): + """Too do """ + file_name = os.getcwd() + f"/assets/{base_name}.json" + with open(file_name, "a+", encoding="utf-8") as f: + json.dump(json_object, f, ensure_ascii=False, sort_keys=True, indent=4) \ No newline at end of file