diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..72624dc3 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,32 @@ +**Description** + +- [ ] I have read the [contribution guidelines](https://github.com/NLeSC/python-template/blob/main/CONTRIBUTING.md) +- [ ] This update is in line with what is recommended in the [Python chapter of the Guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python) +- [ ] All user facing changes have been added to CHANGELOG.md + + + + + +**Instructions to review the pull request** + + +Create a `python-template-test` repo on GitHub (will be overwritten if existing) +``` +cd $(mktemp -d --tmpdir py-tmpl-XXXXXX) +cookiecutter -c https://github.com//python-template +# Fill with python-template-test info +cd python-template-test +git init +git add --all +git commit -m "First commit" +git remote add origin https://github.com//python-template-test +git push -u origin main -f +python -m venv env +source env/bin/activate +python -m pip install --upgrade pip setuptools +python -m pip install '.[dev,publishing]' +``` diff --git a/.github/workflows/cffconvert.yml b/.github/workflows/cffconvert.yml index 0dba0b1c..71443637 100644 --- a/.github/workflows/cffconvert.yml +++ b/.github/workflows/cffconvert.yml @@ -1,15 +1,19 @@ name: cffconvert -on: push +on: + push: + paths: + - CITATION.cff jobs: - - cffconvert: - name: Verify citation metadata consistency + validate: + name: "validate" runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - name: Check out a copy of the repository + - name: Check out a copy of the repository + uses: actions/checkout@v4 - - uses: citation-file-format/cffconvert-github-action@main - name: Check whether the citation metadata from CITATION.cff is equivalent to that in .zenodo.json + - name: Check whether the citation metadata from CITATION.cff is valid + uses: citation-file-format/cffconvert-github-action@2.0.0 + with: + args: "--validate" diff --git a/.github/workflows/markdown-link-check.yml b/.github/workflows/markdown-link-check.yml index 0afe69eb..e27e10b3 100644 --- a/.github/workflows/markdown-link-check.yml +++ b/.github/workflows/markdown-link-check.yml @@ -1,6 +1,12 @@ name: markdown-link-check -on: [push, pull_request] +on: + push: + branches: + - main + pull_request: + branches: + - main jobs: @@ -8,7 +14,8 @@ jobs: name: Check markdown links runs-on: ubuntu-latest steps: - - uses: actions/checkout@main + - uses: actions/checkout@v4 - uses: gaurav-nelson/github-action-markdown-link-check@v1 with: + use-quiet-mode: 'yes' config-file: '.mlc-config.json' diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index d13da9a5..f709f3d0 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -2,9 +2,11 @@ name: tests on: push: + branches: + - main pull_request: - types: [opened, synchronize, reopened] - + branches: + - main jobs: tests: @@ -14,22 +16,22 @@ jobs: fail-fast: false matrix: os: ['ubuntu-latest', 'macos-latest', 'windows-latest'] - python-version: ['3.6', '3.7', '3.8', '3.9'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Python info - shell: bash -l {0} + shell: bash -e {0} run: | - which python3 - python3 --version + which python + python --version - name: Install dependencies run: | - python3 -m pip install --upgrade pip setuptools - python3 -m pip install .[dev] + python -m pip install --upgrade pip setuptools + python -m pip install .[dev] - name: Run pytest run: | - pytest -v + python -m pytest -v --durations=0 diff --git a/.mlc-config.json b/.mlc-config.json index 7b0dfb57..95443f7b 100644 --- a/.mlc-config.json +++ b/.mlc-config.json @@ -15,9 +15,13 @@ }, { "pattern": "^.github/workflows/sonarcloud.yml$" + }, + { + "pattern": "^https://readthedocs.org/dashboard/import.*" } ], "replacementPatterns": [ ], + "retryOn429": true, "timeout": "20s" } diff --git a/.zenodo.json b/.zenodo.json deleted file mode 100644 index e76a8287..00000000 --- a/.zenodo.json +++ /dev/null @@ -1,77 +0,0 @@ -{ - "creators": [ - { - "affiliation": "Netherlands eScience Center", - "name": "van der Zwaan, Janneke", - "orcid": "0000-0002-8329-7000" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "van Werkhoven, Ben", - "orcid": "0000-0002-7508-3272" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Andela, Bouwe", - "orcid": "0000-0001-9005-8940" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Bos, Patrick", - "orcid": "0000-0002-6033-960X" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Attema, Jisk", - "orcid": "0000-0002-0948-1176" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Bakker, Tom" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Spaaks, Jurriaan H.", - "orcid": "0000-0002-7064-4069" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "van Kuppevelt, Dafne", - "orcid": "0000-0002-2662-1994" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Veen, Lourens", - "orcid": "0000-0002-6311-1168" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Rol, Evert", - "orcid": "0000-0001-8357-4453" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Verhoeven, Stefan", - "orcid": "0000-0002-5821-2060" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Diblen, Faruk", - "orcid": "0000-0002-0989-929X" - }, - { - "affiliation": "Netherlands eScience Center", - "name": "Tjong Kim Sang, Erik", - "orcid": "0000-0002-8431-081X" - } - ], - "keywords": [ - "cookiecutter", - "template", - "Python" - ], - "license": { - "id": "Apache-2.0" - }, - "title": "Netherlands eScience Center Python Template" -} diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ed2a8cf..8978bd94 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,14 +2,50 @@ ## Unreleased +### Added + +* Added Python 3.12 support [#356](https://github.com/NLeSC/python-template/issues/356) +* Template unit tests for documentation generation, linting and version bumping +* Docstring for function +* Intersphinx to documentation +* Coverage and doctest commands for documentation [#97](https://github.com/NLeSC/python-template/issues/97) +* Added new 'docs' section in extra dependencies [#317](https://github.com/NLeSC/python-template/issues/317) + +### Changed + +* Moved to src/ based layout for generated packages +* Moved from setup.cfg/.py to pyproject.toml [#351](https://github.com/NLeSC/python-template/issues/351) +* Moved from prospector to ruff [#336](https://github.com/NLeSC/python-template/issues/336) +* Renamed `project_name` to `directory_name` in cookiecutter questionnaire +* Initial linting is error free [#227](https://github.com/NLeSC/python-template/issues/227) +* Consolidated test/lint/build/docs into single matrix workflow [#270](https://github.com/NLeSC/python-template/issues/276) +* Enforce isort configuration +* Default for `package_short_description` in cookiecutter questionnaire +* Link checker ignores GH private pages and test pypi site [#288](https://github.com/NLeSC/python-template/issues/288) +* In CI build workflow make prospector die if there are errors [#275](https://github.com/NLeSC/python-template/issues/275) +* All example tests make use of example function +* Use bumpversion for version in Sphinx config [#44](https://github.com/NLeSC/python-template/issues/44) +* Regenerated docs/conf.py with sphinx-quickstart v3.5.4 + enabled built-in extensions [#44](https://github.com/NLeSC/python-template/issues/44) +* Generate api rst files with extension instead of custom function [#95](https://github.com/NLeSC/python-template/issues/95) +* Change from bump2version (unmaintained) to bump-my-version. +* Set markdown link checker to quiet mode: only report broken links [#262](https://github.com/NLeSC/python-template/issues/262) + +### Removed + +* Removed Python 3.7 support [#343](https://github.com/NLeSC/python-template/issues/343) +* `.pylintrc` file, was too strict, too soon [#267](https://github.com/NLeSC/python-template/issues/267) +* Unused development dependencies [#167](https://github.com/NLeSC/python-template/issues/167) +* Statements in project_setup.md already mentioned in README.dev.md +* .zenodo.json is no longer necessary, CITATION.cff also works with Zenodo. + ## 0.4.0 ### Added * Instructions to add your existing code to directory generated by the NLeSC Python template [#202](https://github.com/NLeSC/python-template/issues/202) -* Keywords to questionaire [#270](https://github.com/NLeSC/python-template/issues/270) +* Keywords to questionnaire [#270](https://github.com/NLeSC/python-template/issues/270) * Next step issue generation workflow [#228](https://github.com/NLeSC/python-template/issues/228) -* Next step issue for SonarCloud integration [#234](https://github.com/NLeSC/python-template/issues/234) +* Next step issue for SonarCloud integration [#234](https://github.com/NLeSC/python-template/issues/234) * Next step issue for Zenodo integration [#235](https://github.com/NLeSC/python-template/issues/235) * Next step issue for Read the Docs [#236](https://github.com/NLeSC/python-template/issues/236) * Next step issue for citation data [#237](https://github.com/NLeSC/python-template/issues/237) @@ -20,7 +56,7 @@ * CI Tests on Windows [#140](https://github.com/NLeSC/python-template/issues/140) [#223](https://github.com/NLeSC/python-template/issues/223) * `.pylintrc` file * Valid license name and first author name in `CITATION.cff` -* SonarCloud integration for code quality and coverage [#89](https://github.com/NLeSC/python-template/issues/89) +* SonarCloud integration for code quality and coverage [#89](https://github.com/NLeSC/python-template/issues/89) * Read the Docs [#78](https://github.com/NLeSC/python-template/issues/78) ### Changed @@ -67,7 +103,7 @@ * Dropped appveyor [#160](https://github.com/NLeSC/python-template/issues/160) * Dropped everything Conda related * Drop Python 3.5 support -* Removed unit tests doing the linting +* Removed unit tests doing the linting ## 0.2.0 diff --git a/CITATION.cff b/CITATION.cff index 40b191cc..045e0e43 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -68,7 +68,7 @@ authors: family-names: Tjong Kim Sang given-names: Erik orcid: "https://orcid.org/0000-0002-8431-081X" -cff-version: "1.1.0" +cff-version: "1.2.0" date-released: 2018-07-17 doi: 10.5281/zenodo.1310751 keywords: diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 70f2da13..5b19d271 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -27,14 +27,31 @@ The sections below outline the steps in each case. ## You want to make some kind of change to the code base +1. (**important**) this repository implements the best practices we recommend in the Python chapter of the [Guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python). Check that your planned contribution is +in line with what is recommended there. If not, please [contribute to the guide](https://github.com/NLeSC/guide/blob/main/CONTRIBUTING.md) instead / as well, or at least create an issue there. 1. (**important**) announce your plan to the rest of the community _before you start working_. This announcement should be in the form of a (new) issue; 1. (**important**) wait until some kind of consensus is reached about your idea being a good idea; 1. if needed, fork the repository to your own Github profile and create your own feature branch off of the latest main commit. While working on your feature branch, make sure to stay up to date with the main branch by pulling in changes, possibly from the 'upstream' repository (follow the instructions [here](https://help.github.com/articles/configuring-a-remote-for-a-fork/) and [here](https://help.github.com/articles/syncing-a-fork/)); -1. Install dependencies with `pip3 install -r requirements.txt`; -1. make sure the existing tests still work by running ``pytest``. If project tests fails use ``pytest --keep-baked-projects`` to keep generated project in /tmp/pytest-* and investigate; +1. install dependencies (see the [development documentation](README.dev.md#create-a-virtual-environment)); +1. make sure the existing tests still work by running ``pytest``. If project tests fail use ``pytest --keep-baked-projects`` to keep generated project files in `/tmp/pytest-*` and investigate; 1. add your own tests (if necessary); 1. update or expand the documentation; -1. push your feature branch to (your fork of) the Python Template repository on GitHub; +1. update the `CHANGELOG.md` file with your change; +1. [push](http://rogerdudler.github.io/git-guide/) your feature branch to (your fork of) the Python Template repository on GitHub; 1. create the pull request, e.g. following the instructions [here](https://help.github.com/articles/creating-a-pull-request/). In case you feel like you've made a valuable contribution, but you don't know how to write or run tests for it, or how to generate the documentation: don't let this discourage you from making the pull request; we can help you! Just go ahead and submit the pull request, but keep in mind that you might be asked to append additional commits to your pull request. + +## You want to make a new release of the code base + +To create a release you need write permission on the repository. + +1. Check the author list in [`CITATION.cff`](CITATION.cff) +1. Update the version number in setup.cfg and CITATION.cff +1. Update the `CHANGELOG.md` to include changes made +1. Go to the [GitHub release page](https://github.com/nlesc/python-template/releases) +1. Press draft a new release button +1. Fill version, title and description field +1. Press the Publish Release button + +Also a Zenodo entry will be made for the release with its own DOI. \ No newline at end of file diff --git a/README.dev.md b/README.dev.md index f2debd1c..1039a9a9 100644 --- a/README.dev.md +++ b/README.dev.md @@ -10,7 +10,7 @@ We recommend installing `cookiecutter` in user space as per `cookiecutter`'s ins install `cookiecutter` for every new project. ```shell -python3 -m pip install --user --upgrade cookiecutter +python -m pip install --user --upgrade cookiecutter ``` ### Get your own copy of the repository @@ -30,17 +30,17 @@ run the tests later. ```shell # Create a virtual environment, e.g. with -python3 -m venv env +python -m venv env # activate virtual environment source env/bin/activate # make sure to have a recent version of pip and setuptools -python3 -m pip install --upgrade pip setuptools +python -m pip install --upgrade pip setuptools # (from the project root directory) # install development dependencies -python3 -m pip install --no-cache-dir .[dev] +python -m pip install --no-cache-dir .[dev] ``` ## Running the tests @@ -82,7 +82,7 @@ In addition to the information in `my-python-project/project_setup.md`, the deve 1. generating `my-python-project`'s documentation locally 1. running `my-python-project`'s tests locally 1. running `my-python-project`'s linters locally -1. verifying that the `my-python-project`'s version can be updated using `bumpversion` +1. verifying that the `my-python-project`'s version can be updated using `bump-my-version` 1. making a release of `my-python-project` on https://test.pypi.org/ Follow the instructions from `my-python-project/README.dev.md` and make sure that everything works. @@ -92,7 +92,7 @@ Follow the instructions from `my-python-project/README.dev.md` and make sure tha ### Preparation 1. Make sure the `CHANGELOG.md` has been updated -2. Verify that the information in `CITATION.cff` is correct, and that `.zenodo.json` contains equivalent data +2. Verify that the information in `CITATION.cff` is correct. 3. Make sure that `version` in [setup.cfg](setup.cfg) and `version` in [CITATION.cff](CITATION.cff) have been bumped to the to-be-released version of the template 4. Run the unit tests with `pytest tests/` 5. Go through the steps outlined above for [generating a new package from the command line](#using-cookiecutter-to-generate-a-new-package-from-the-command-line), and verify that the generated package works as it should. diff --git a/README.md b/README.md index 2a2f84be..e23cebc9 100644 --- a/README.md +++ b/README.md @@ -8,23 +8,23 @@ Use this [Cookiecutter](https://cookiecutter.readthedocs.io) template to generat an empty Python package. Features include: - Boilerplate unit tests and documentation, -- [Python static setup configuration]({{cookiecutter.project_name}}/setup.cfg), +- [Python static setup configuration]({{cookiecutter.directory_name}}/pyproject.toml), - Open source software license, -- Continuous integration with [GitHub action workflows]({{cookiecutter.project_name}}/.github/workflows) for building, testing, link checking and linting, -- Code style checking with [prospector](https://pypi.org/project/prospector/), -- [Editorconfig]({{cookiecutter.project_name}}/.editorconfig), +- Continuous integration with [GitHub action workflows]({{cookiecutter.directory_name}}/.github/workflows) for building, testing, link checking and linting, +- Code style checking with [ruff](https://beta.ruff.rs/), +- [Editorconfig]({{cookiecutter.directory_name}}/.editorconfig), - Usage and contribution documents: - - [README.md]({{cookiecutter.project_name}}/README.md) for package users, - - [README.dev.md]({{cookiecutter.project_name}}/README.dev.md) for package developer, - - [project_setup.md]({{cookiecutter.project_name}}/project_setup.md) with extensive documentation about project setup, - - [Change log]({{cookiecutter.project_name}}/CHANGELOG.md), - - [Code of Conduct]({{cookiecutter.project_name}}/CODE_OF_CONDUCT.md), - - [Contributing guidelines]({{cookiecutter.project_name}}/CONTRIBUTING.md), + - [README.md]({{cookiecutter.directory_name}}/README.md) for package users, + - [README.dev.md]({{cookiecutter.directory_name}}/README.dev.md) for package developer, + - [project_setup.md]({{cookiecutter.directory_name}}/project_setup.md) with extensive documentation about project setup, + - [Change log]({{cookiecutter.directory_name}}/CHANGELOG.md), + - [Code of Conduct]({{cookiecutter.directory_name}}/CODE_OF_CONDUCT.md), + - [Contributing guidelines]({{cookiecutter.directory_name}}/CONTRIBUTING.md), - Continuous code quality and code coverage reporting using [Sonarcloud](https://sonarcloud.io/), -- Automatic creation of [issues]({{cookiecutter.project_name}}/.github/next_steps) with instructions how to pass all GitHub action workflows and integrate with services like Zenodo and Read the Docs, -- Instructions how to make package [citable]({{cookiecutter.project_name}}/.github/next_steps/04_citation.md) +- Automatic creation of [issues]({{cookiecutter.directory_name}}/.github/next_steps) with instructions how to pass all GitHub action workflows and integrate with services like Zenodo and Read the Docs, +- Instructions how to make package [citable]({{cookiecutter.directory_name}}/.github/next_steps/02_citation.md) - FAIR software recommendation badge, -- Optional [pre commit hook](https://github.com/NLeSC/python-template/blob/main/%7B%7Bcookiecutter.project_name%7D%7D/README.dev.md#running-linters-locally) to catch lint errors early +- Optional [pre commit hook]({{cookiecutter.directory_name}}/README.dev.md#running-linters-locally) to catch lint errors early ## Badges @@ -51,7 +51,7 @@ We recommend installing `cookiecutter` in user space as per `cookiecutter`'s ins install `cookiecutter` for every new project. ```shell -python3 -m pip install --user --upgrade cookiecutter +python -m pip install --user --upgrade cookiecutter ``` ### Step 2/3: Generate the files and directory structure @@ -67,21 +67,21 @@ cookiecutter https://github.com/nlesc/python-template.git | Name | Default value | Explanation | | ------------------------- | ------------- | ----------- | +| directory_name | my-python-project | Name of the directory that contains the package. Avoid using spaces or uppercase letters for the best experience across operating systems. To get an impression of what will be generated, see the directory tree [below](https://github.com/NLeSC/python-template#step-33-read-about-what-was-just-generated) | | package_name | my_python_package | Name of the package. Avoid using spaces, dashes, or uppercase letters for the best experience across operating systems. | -| project_name | my-python-project | Name of the project that contains the package. Avoid using spaces or uppercase letters for the best experience across operating systems. | -| package_short_description |   | The information that you enter here will end up in the README, documentation, license, and setup.cfg, so it may be a good idea to prepare something in advance. | +| package_short_description | Short description of package | The information that you enter here will end up in the README, documentation, license, and pyproject.toml, so it may be a good idea to prepare something in advance. | | keyword1 | keyword1 | A term that describes your package. | | keyword2 | keyword2 | Another term that describes your package. | | version | 0.1.0 |   | | github_organization | <my-github-organization> | GitHub organization that will contain this project's repository. This can also be your GitHub user name. | | license | Apache Software License 2.0 | The software license under which the code is made available. | -| full_name | John Smith | Your full name, e.g. _John Smith_. | +| full_name | Jane Smith | Your full name, e.g. _Jane Smith_. | | email | yourname@esciencecenter.nl | Your (work) email address. | | copyright_holder | Netherlands eScience Center | Name(s) of the organization(s) or person(s) who hold the copyright of the software. | | code_of_conduct_email | yourname@esciencecenter.nl | Email address of the person who should be contacted in case of violations of the Code of Conduct. | Once the project files have been generated, follow the steps outlined in -[{{cookiecutter.project_name}}/next_steps.md]({{cookiecutter.project_name}}/next_steps.md). +[{{cookiecutter.directory_name}}/next_steps.md]({{cookiecutter.directory_name}}/next_steps.md). ### Step 3/3: Read about what was just generated @@ -89,20 +89,6 @@ Good job! You have now generated the skeleton for your package: ```text my-python-project/ -├── .bumpversion.cfg -├── CHANGELOG.md -├── CITATION.cff -├── CODE_OF_CONDUCT.md -├── CONTRIBUTING.md -├── docs -│ ├── conf.py -│ ├── index.rst -│ ├── make.bat -│ ├── Makefile -│ ├── _static -│ │ └── theme_overrides.css -│ └── _templates -│ └── .gitignore ├── .editorconfig ├── .githooks │ └── pre-commit @@ -116,35 +102,43 @@ my-python-project/ │ └── workflows │ ├── build.yml │ ├── cffconvert.yml -│ ├── lint.yml +│ ├── documentation.yml │ ├── markdown-link-check.yml │ ├── next_steps.yml │ └── sonarcloud.yml ├── .gitignore +├── .mlc-config.json +├── .readthedocs.yaml +├── CHANGELOG.md +├── CITATION.cff +├── CODE_OF_CONDUCT.md +├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in -├── .mlc-config.json -├── my_python_package -│ ├── __init__.py -│ ├── my_module.py -│ └── __version__.py -├── next_steps.md ├── NOTICE -├── project_setup.md -├── .prospector.yml -├── .pylintrc -├── pyproject.toml ├── README.dev.md ├── README.md -├── setup.cfg -├── setup.py +├── docs +│ ├── Makefile +│ ├── _templates +│ │ └── .gitignore +│ ├── conf.py +│ ├── index.rst +│ └── make.bat +├── next_steps.md +├── project_setup.md +├── pyproject.toml ├── sonar-project.properties +├── src +│ └── my_python_package +│ ├── __init__.py +│ └── my_module.py └── tests ├── __init__.py └── test_my_module.py ``` -For an explanation of what's there, read on in the [project_setup.md]({{cookiecutter.project_name}}/project_setup.md) file. +For an explanation of what's there, read on in the [project_setup.md]({{cookiecutter.directory_name}}/project_setup.md) file. There are also instructions on how to [apply the template to an existing Python package](ADD_TO_EXISTING_PACKAGE.md). ## Examples @@ -162,7 +156,7 @@ list below: 1. pycff: [https://github.com/citation-file-format/pycff](https://github.com/citation-file-format/pycff) 1. spec2vec: [https://github.com/iomega/spec2vec](https://github.com/iomega/spec2vec) 1. yatiml: [https://github.com/yatiml/yatiml](https://github.com/yatiml/yatiml) -1. _... And many more. Make a PR to add your project here!_ +1. _... And many more (see [this discussion](https://github.com/NLeSC/python-template/issues/48)). Make a PR to add your project here, or simply ping us in an issue!_ ## How to contribute diff --git a/cookiecutter.json b/cookiecutter.json index bdd74d59..60d0b3f3 100644 --- a/cookiecutter.json +++ b/cookiecutter.json @@ -1,14 +1,13 @@ { + "directory_name": "my-python-project", "package_name": "my_python_package", - "project_name": "my-python-project", - "_copy_without_render": [".github/workflows/*"], - "package_short_description": "", + "package_short_description": "Short description of package", "keyword1": "keyword1", "keyword2": "keyword2", "version": "0.1.0", "github_organization": "", - "license": ["Apache Software License 2.0", "MIT license", "BSD license", "ISC license", "GNU General Public License v3 or later", "Not open source"], - "full_name": "John Smith", + "license": ["Apache Software License 2.0", "MIT license", "BSD license", "ISC license", "GNU General Public License v3 or later", "GNU Lesser General Public License v3 or later", "Not open source"], + "full_name": "Jane Smith", "email": "yourname@esciencecenter.nl", "copyright_holder": "Netherlands eScience Center", "code_of_conduct_email": "{{ cookiecutter.email }}" diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index 39960c13..f7ceaa80 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -1 +1 @@ -print("\nProject was successfully generated. For next steps, refer to file {{ cookiecutter.project_name }}/next_steps.md.\n") +print("\nProject was successfully generated. For next steps, refer to file {{ cookiecutter.directory_name }}/next_steps.md.\n") diff --git a/hooks/pre_gen_project.py b/hooks/pre_gen_project.py index c0e56fa3..665cbe50 100644 --- a/hooks/pre_gen_project.py +++ b/hooks/pre_gen_project.py @@ -1,10 +1,11 @@ # Note: cookiecutter first makes the main level directory using -# project_name from cookiecutter.json before running this hook +# directory_name from cookiecutter.json before running this hook {{ cookiecutter.update({ "package_name": cookiecutter.package_name.lower().replace(" ", "_").replace("-", "_"), - "project_name": cookiecutter.project_name.lower().replace(" ", "-"), + "directory_name": cookiecutter.directory_name.lower().replace(" ", "-"), "full_name": cookiecutter.full_name.replace('\"', '\\\"'), - "repository": "https://github.com/" + cookiecutter.github_organization + "/" + cookiecutter.project_name.lower().replace(" ", "-"), + "repository": "git@github.com:" + cookiecutter.github_organization + "/" + cookiecutter.directory_name.lower().replace(" ", "-"), + "repository_url": "https://github.com/" + cookiecutter.github_organization + "/" + cookiecutter.directory_name.lower().replace(" ", "-"), "package_short_description": cookiecutter.package_short_description.replace('\"', '\\\"') }) }} diff --git a/setup.cfg b/setup.cfg index 809449c4..5aef0661 100644 --- a/setup.cfg +++ b/setup.cfg @@ -12,10 +12,11 @@ classifiers = License :: OSI Approved :: Apache Software License Natural Language :: English Programming Language :: Python :: 3 - Programming Language :: Python :: 3.6 - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 + Programming Language :: Python :: 3.10 + Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 description = Cookiecutter template to initialize Python projects in accordance with Netherlands eScience Center best practices long_description = file: README.md long_description_content_type = text/markdown @@ -29,6 +30,7 @@ version = 0.4.0 [options] zip_safe = False include_package_data = True +python_requires = >=3.8 packages = install_requires = cookiecutter==1.7.2 @@ -41,10 +43,11 @@ install_requires = [options.extras_require] dev = - pytest<5.0.0,>=3.3.0 + coverage [toml] + pytest pytest-cookies [tool:pytest] testpaths = tests -norecursedirs = .git .github hooks {{cookiecutter.project_name}} +norecursedirs = .git .github hooks {{cookiecutter.directory_name}} diff --git a/tests/test_project.py b/tests/test_project.py index 53f8cc8c..1eb97d46 100644 --- a/tests/test_project.py +++ b/tests/test_project.py @@ -1,14 +1,12 @@ import os import subprocess -from pathlib import Path -from shutil import which +import sys from sys import platform from typing import Sequence import pytest -IS_WINDOWS = platform.startswith("win") -IS_WINDOWS_CI = IS_WINDOWS and os.environ.get('CI', False) +IS_WINDOWS = platform.startswith('win') def test_project_folder(cookies): @@ -16,8 +14,8 @@ def test_project_folder(cookies): assert project.exit_code == 0 assert project.exception is None - assert project.project.basename == 'my-python-project' - assert project.project.isdir() + assert project.project_path.name == 'my-python-project' + assert project.project_path.is_dir() def run(args: Sequence[str], dirpath: os.PathLike) -> subprocess.CompletedProcess: @@ -25,63 +23,170 @@ def run(args: Sequence[str], dirpath: os.PathLike) -> subprocess.CompletedProces stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=dirpath, - encoding="utf-8") + encoding='utf-8') print(completed_process.stdout) print(completed_process.stderr) return completed_process -@pytest.fixture -def baked_with_development_dependencies(cookies): - result = cookies.bake() - if IS_WINDOWS_CI: - # Creating virtualenv does not work on Windows CI, - # falling back to using current pip3 dir - pip = Path(which('pip3')) - bin_dir = str(pip.parent) + '\\' - else: - env_output = run(['python3', '-m', 'venv', 'env'], result.project) - assert env_output.returncode == 0 - bin_dir = 'env/Scripts/' if IS_WINDOWS else 'env/bin/' - latest_pip_output = run([f'{bin_dir}pip3', 'install', '--upgrade', 'pip', 'setuptools'], result.project) +@pytest.fixture(scope='session') +def project_env_bin_dir(tmp_path_factory): + tmp_path = tmp_path_factory.mktemp('venv') + env_output = run(['python', '-m', 'venv', 'env'], tmp_path) + assert env_output.returncode == 0 + bin_dir = str(tmp_path / 'env' / 'bin') + if IS_WINDOWS: + bin_dir = str(tmp_path / 'env' / 'Scripts') + return str(bin_dir) + os.sep + + +@pytest.fixture(scope='session') +def baked_with_development_dependencies(cookies_session, project_env_bin_dir): + result = cookies_session.bake() + assert result.exit_code == 0 + bin_dir = project_env_bin_dir + latest_pip_output = run([f'{bin_dir}python', '-m', 'pip', 'install', '--upgrade', 'pip', 'setuptools'], result.project_path) assert latest_pip_output.returncode == 0 - pip_output = run([f'{bin_dir}pip3', 'install', '--editable', '.[dev]'], result.project) + pip_output = run([f'{bin_dir}python', '-m', 'pip', 'install', '--editable', '.[dev]'], result.project_path) assert pip_output.returncode == 0 - return result.project, bin_dir + return result.project_path + + +def test_pytest(baked_with_development_dependencies, project_env_bin_dir): + project_dir = baked_with_development_dependencies + bin_dir = project_env_bin_dir + result = run([f'{bin_dir}python', '-m', 'pytest'], project_dir) + assert result.returncode == 0 + assert '== 3 passed in' in result.stdout + +def test_coverage(baked_with_development_dependencies, project_env_bin_dir): + project_dir = baked_with_development_dependencies + bin_dir = project_env_bin_dir + result = run([f'{bin_dir}coverage', 'run', '-m', 'pytest'], project_dir) + assert result.returncode == 0 + assert '== 3 passed in' in result.stdout + assert (project_dir / '.coverage').exists() -def test_pytest(baked_with_development_dependencies): - project_dir, bin_dir = baked_with_development_dependencies - pytest_output = run([f'{bin_dir}pytest'], project_dir) - assert pytest_output.returncode == 0 - assert '== 4 passed in' in pytest_output.stdout - assert (project_dir / 'coverage.xml').exists() - assert (project_dir / 'htmlcov/index.html').exists() +def test_tox(baked_with_development_dependencies, project_env_bin_dir): + project_dir = baked_with_development_dependencies + bin_dir = project_env_bin_dir + result = run([f'{bin_dir}tox'], project_dir) + assert result.returncode == 0 + assert '== 3 passed in' in result.stdout + # assert (project_dir / '.tox' / 'dist' / 'my_python_package-0.1.0.zip').exists() + assert (project_dir / '.tox' / '.pkg' / 'dist'/ 'my_python_package-0.1.0.tar.gz').exists() -def test_subpackage(baked_with_development_dependencies): - project_dir, bin_dir = baked_with_development_dependencies - subpackage = (project_dir / 'my_python_package' / 'mysub') + +def test_subpackage(baked_with_development_dependencies, project_env_bin_dir): + """Test if subpackages end up in (wheel) distributions""" + project_dir = baked_with_development_dependencies + bin_dir = project_env_bin_dir + subpackage = (project_dir / 'src' / 'my_python_package' / 'mysub') subpackage.mkdir() - (subpackage / '__init__.py').write_text('FOO = "bar"', encoding="utf-8") + (subpackage / '__init__.py').write_text('FOO = "bar"\n', encoding="utf-8") - subsubpackage = (project_dir / 'my_python_package' / 'mysub' / 'mysub2') + subsubpackage = (project_dir / 'src' / 'my_python_package' / 'mysub' / 'mysub2') subsubpackage.mkdir() - (subsubpackage / '__init__.py').write_text('FOO = "bar"', encoding="utf-8") - - if IS_WINDOWS_CI: - # On Windows CI python and pip executable are in different paths - bin_dir = '' - build_output = run([f'{bin_dir}python', 'setup.py', 'build'], project_dir) - assert build_output.returncode == 0 + (subsubpackage / '__init__.py').write_text('FOO = "bar"\n', encoding="utf-8") + + # Note: we pass --wheel explicitly, because wheel has a useful side-effect + # of leaving a build directory after building that we can check for its + # contents in the asserts below. However, be aware that this behavior is + # not guaranteed to stay and is in fact a known bug / PEP-violation! + # See https://github.com/pypa/wheel/issues/447. Also, by passing --wheel + # explicitly (although by default build already builds a wheel as well), + # we omit the sdist being built, saving some seconds. + result = run([f'{bin_dir}python', '-m', 'build', '--wheel'], project_dir) + assert result.returncode == 0 assert (project_dir / 'build' / 'lib' / 'my_python_package' / 'mysub' / '__init__.py').exists() assert (project_dir / 'build' / 'lib' / 'my_python_package' / 'mysub' / 'mysub2' / '__init__.py').exists() -def test_generate_api_docs(baked_with_development_dependencies): - project_dir, bin_dir = baked_with_development_dependencies +def test_generate_api_docs(baked_with_development_dependencies, project_env_bin_dir): + project_dir = baked_with_development_dependencies + bin_dir = project_env_bin_dir - build_output = run([f'{bin_dir}sphinx-build', '-b', 'html', 'docs', 'docs/_build/html'], project_dir) - assert build_output.returncode == 0 - assert 'build succeeded' in build_output.stdout + result = run([f'{bin_dir}sphinx-build', '-b', 'html', 'docs', 'docs/_build/html'], project_dir) + assert result.returncode == 0 + assert 'build succeeded' in result.stdout assert (project_dir / 'docs' / '_build' / 'html' / 'index.html').exists() + + +@pytest.mark.skipif(sys.version_info < (3, 9), reason= +"requires python 3.9 or higher, see https://github.com/NLeSC/python-template/pull/347#issuecomment-1710684574") +def test_coverage_api_docs(baked_with_development_dependencies, project_env_bin_dir): + project_dir = baked_with_development_dependencies + bin_dir = project_env_bin_dir + + result = run([f'{bin_dir}sphinx-build', '-b', 'coverage', 'docs', 'docs/_build/coverage'], project_dir) + assert result.returncode == 0 + assert 'build succeeded' in result.stdout + coverage_file = project_dir / 'docs' / '_build' / 'coverage' / 'python.txt' + coverage_file_lines = coverage_file.read_text('utf8').splitlines() + # Coverage file lines should look globally like: + # ['Undocumented Python objects', + # '===========================', + # '', + # 'Statistics', + # '----------', + # '', + # '+--------------------------------+----------+--------------+', + # '| Module | Coverage | Undocumented |', + # '+================================+==========+==============+', + # '| my_python_package.my_module | 100.00% | 0 |', + # '+--------------------------------+----------+--------------+', + # '| my_python_package.mysub.mysub2 | 100.00% | 0 |', + # '+--------------------------------+----------+--------------+', + # '| my_python_package | 100.00% | 0 |', + # '+--------------------------------+----------+--------------+', + # '| my_python_package.mysub | 100.00% | 0 |', + # '+--------------------------------+----------+--------------+', + # '| TOTAL | 100.00% | 0 |', + # '+--------------------------------+----------+--------------+', + # '' + # ] + # The package coverage lines change order between runs, so we test for each data row individually: + assert '| my_python_package | 100.00% | 0 |' in coverage_file_lines + assert '| my_python_package.my_module | 100.00% | 0 |' in coverage_file_lines + assert '| TOTAL | 100.00% | 0 |' in coverage_file_lines + + +def test_doctest_api_docs(baked_with_development_dependencies, project_env_bin_dir): + project_dir = baked_with_development_dependencies + bin_dir = project_env_bin_dir + + result = run([f'{bin_dir}sphinx-build', '-b', 'doctest', 'docs', 'docs/_build/doctest'], project_dir) + assert result.returncode == 0 + assert 'build succeeded' in result.stdout + assert (project_dir / 'docs' / '_build' / 'doctest' / 'output.txt').exists() + + +def test_ruff_check(baked_with_development_dependencies, project_env_bin_dir): + project_dir = baked_with_development_dependencies + bin_dir = project_env_bin_dir + + result = run([f'{bin_dir}ruff', 'check', '--fix'], project_dir) + assert result.returncode == 0 + assert '' in result.stdout + + +def test_bumpversion(baked_with_development_dependencies, project_env_bin_dir): + project_dir = baked_with_development_dependencies + bin_dir = project_env_bin_dir + + original_version = '0.1.0' + assert original_version in (project_dir / 'pyproject.toml').read_text('utf-8') + assert original_version in (project_dir / 'CITATION.cff').read_text('utf-8') + assert original_version in (project_dir / 'src' / 'my_python_package' / '__init__.py').read_text('utf-8') + assert original_version in (project_dir / 'docs' / 'conf.py').read_text('utf-8') + + result = run([f'{bin_dir}bump-my-version', 'bump', 'major'], project_dir) + assert result.returncode == 0 + assert '' in result.stdout + expected_version = '1.0.0' + assert expected_version in (project_dir / 'pyproject.toml').read_text('utf-8') + assert expected_version in (project_dir / 'CITATION.cff').read_text('utf-8') + assert expected_version in (project_dir / 'src' / 'my_python_package' / '__init__.py').read_text('utf-8') + assert expected_version in (project_dir / 'docs' / 'conf.py').read_text('utf-8') diff --git a/tests/test_values.py b/tests/test_values.py index 22001147..01d748a9 100644 --- a/tests/test_values.py +++ b/tests/test_values.py @@ -1,6 +1,3 @@ -import pytest - - def test_double_quotes_in_name_and_description(cookies): ctx = { "project_short_description": '"double quotes"', @@ -21,18 +18,18 @@ def test_single_quotes_in_name_and_description(cookies): assert project.exit_code == 0 -def test_dash_in_project_name(cookies): +def test_dash_in_directory_name(cookies): ctx = { - "project_name": "my-python-project" + "directory_name": "my-python-project" } project = cookies.bake(extra_context=ctx) assert project.exit_code == 0 -def test_space_in_project_name(cookies): +def test_space_in_directory_name(cookies): ctx = { - "project_name": "my python project" + "directory_name": "my python project" } project = cookies.bake(extra_context=ctx) diff --git a/{{cookiecutter.project_name}}/.editorconfig b/{{cookiecutter.directory_name}}/.editorconfig similarity index 100% rename from {{cookiecutter.project_name}}/.editorconfig rename to {{cookiecutter.directory_name}}/.editorconfig diff --git a/{{cookiecutter.directory_name}}/.githooks/pre-commit b/{{cookiecutter.directory_name}}/.githooks/pre-commit new file mode 100755 index 00000000..c4bc8dd3 --- /dev/null +++ b/{{cookiecutter.directory_name}}/.githooks/pre-commit @@ -0,0 +1,20 @@ +#!/bin/sh + +echo "Script $0 triggered ..." + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +echo "Starting ruff analysis..." + +# quietly run ruff +ruff check --fix +ruff format + +# use return code to abort commit if necessary +if [ $? != "0" ]; then + echo "Commit aborted. Fix linter issues found by ruff before committing." + exit 1 +fi + +# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # +echo "Pre-commit checks completed successfully." +exit 0 diff --git a/{{cookiecutter.project_name}}/.github/next_steps/01_sonarcloud_integration.md b/{{cookiecutter.directory_name}}/.github/next_steps/01_sonarcloud_integration.md similarity index 78% rename from {{cookiecutter.project_name}}/.github/next_steps/01_sonarcloud_integration.md rename to {{cookiecutter.directory_name}}/.github/next_steps/01_sonarcloud_integration.md index 95c3a2c5..23761831 100644 --- a/{{cookiecutter.project_name}}/.github/next_steps/01_sonarcloud_integration.md +++ b/{{cookiecutter.directory_name}}/.github/next_steps/01_sonarcloud_integration.md @@ -4,13 +4,13 @@ title: 'Next step: Sonarcloud integration' Continuous code quality can be handled by [Sonarcloud](https://sonarcloud.io/). This repository is configured to use Sonarcloud to perform quality analysis and code coverage report on each push. -In order to configure Sonarcloud analysis [GitHub Action workflow](.github/workflows/sonarcloud.yml) you must follow the steps below: +In order to configure Sonarcloud analysis [GitHub Action workflow]({{cookiecutter.repository_url}}/blob/main/.github/workflows/sonarcloud.yml) you must follow the steps below: 1. go to [Sonarcloud](https://sonarcloud.io/projects/create) to create a new Sonarcloud project 1. login with your GitHub account 1. add Sonarcloud organization or reuse existing one 1. set up a repository -1. go to [new code definition administration page](https://sonarcloud.io/project/new_code?id={{cookiecutter.github_organization}}_{{cookiecutter.project_name}}) and select `Number of days` option +1. go to [new code definition administration page](https://sonarcloud.io/project/new_code?id={{cookiecutter.github_organization}}_{{cookiecutter.directory_name}}) and select `Number of days` option 1. To be able to run the analysis: 1. a token must be created at [Sonarcloud account](https://sonarcloud.io/account/security/) - 1. the created token must be added as `SONAR_TOKEN` to [secrets on GitHub](https://github.com/{{cookiecutter.github_organization}}/{{cookiecutter.project_name}}/settings/secrets/actions) + 1. the created token must be added as `SONAR_TOKEN` to [secrets on GitHub](https://github.com/{{cookiecutter.github_organization}}/{{cookiecutter.directory_name}}/settings/secrets/actions) diff --git a/{{cookiecutter.directory_name}}/.github/next_steps/02_citation.md b/{{cookiecutter.directory_name}}/.github/next_steps/02_citation.md new file mode 100644 index 00000000..7a3279d7 --- /dev/null +++ b/{{cookiecutter.directory_name}}/.github/next_steps/02_citation.md @@ -0,0 +1,28 @@ +--- +title: 'Next step: Citation data' +--- + +It is likely that your `CITATION.cff` currently doesn't pass validation. The error messages you get from the [`cffconvert`]({{cookiecutter.repository_url}}/actions/workflows/cffconvert.yml) GitHub Action are unfortunately a bit cryptic, but doing the following helps: + +- [ ] Check if the `given-name` and `family-name` keys need updating. If your family name has a name particle like `von` or `van` or `de`, use the `name-particle` key; if your name has a suffix like `Sr` or `IV`, use `name-suffix`. For details, refer to the schema description: https://github.com/citation-file-format/citation-file-format +- [ ] Update the value of the `orcid` key. If you do not have an orcid yet, you can get one here [https://orcid.org/](https://orcid.org/). +- [ ] Add more authors if needed +- [ ] Update `date-released` using the YYYY-MM-DD format. +- [ ] Update the `doi` key with the conceptDOI for your repository (see [https://help.zenodo.org](https://help.zenodo.org/) for more information on what a conceptDOI is). If your project doesn't have a DOI yet, you can use the string `10.0000/FIXME` to pass validation. +- [ ] Verify that the `keywords` array accurately describes your project. + +Afterwards, the `cffconvert` GitHub Action should be green. + +To make sure services like [Zenodo](https://zenodo.org) and the [Research Software Directory](https://research-software-directory.org/) can keep your citation data up to date, the [`cffconvert`]({{cookiecutter.repository_url}}/actions/workflows/cffconvert.yml) GitHub Action checks the following: + +1. Whether your repository includes a `CITATION.cff` file. + + _By including this file, authors of the software can receive credit for the work they put in._ + +1. Whether your `CITATION.cff` is valid YAML. + + _Visit http://www.yamllint.com/ to see if the contents of your CITATION.cff are valid YAML._ + +1. Whether your `CITATION.cff` adheres to the schema (as listed in the `CITATION.cff` file itself under key `cff-version`). + + _The Citation File Format schema can be found [here](https://github.com/citation-file-format/citation-file-format), along with an explanation of all the keys. You're advised to use the latest available schema version._ diff --git a/{{cookiecutter.project_name}}/.github/next_steps/03_readthedocs.md b/{{cookiecutter.directory_name}}/.github/next_steps/03_readthedocs.md similarity index 75% rename from {{cookiecutter.project_name}}/.github/next_steps/03_readthedocs.md rename to {{cookiecutter.directory_name}}/.github/next_steps/03_readthedocs.md index 1ad6bf26..fb72b7f3 100644 --- a/{{cookiecutter.project_name}}/.github/next_steps/03_readthedocs.md +++ b/{{cookiecutter.directory_name}}/.github/next_steps/03_readthedocs.md @@ -9,12 +9,12 @@ To host the documentation of this repository please perform the following instru 1. go to [Read the Docs](https://readthedocs.org/dashboard/import/?) 1. log in with your GitHub account -1. find `{{ cookiecutter.github_organization }}/{{ cookiecutter.project_name }}` in list and press `+` button. +1. find `{{ cookiecutter.github_organization }}/{{ cookiecutter.directory_name }}` in list and press `+` button. * If repository is not listed, 1. go to [Read the Docs GitHub app](https://github.com/settings/connections/applications/fae83c942bc1d89609e2) 2. make sure {{ cookiecutter.github_organization }} has been granted access. 3. reload repository list on Read the Docs import page -1. wait for the first build to be completed at -1. check that the link of the documentation badge in the [README.md]({{ cookiecutter.repository }}) works +1. wait for the first build to be completed at +1. check that the link of the documentation badge in the [README.md]({{ cookiecutter.repository_url }}) works -See [README.dev.md#]({{cookiecutter.repository}}/blob/main/README.dev.md#generating-the-api-docs) how to build documentation site locally. +See [README.dev.md#]({{cookiecutter.repository_url}}/blob/main/README.dev.md#generating-the-api-docs) how to build documentation site locally. diff --git a/{{cookiecutter.project_name}}/.github/next_steps/04_zenodo_integration.md b/{{cookiecutter.directory_name}}/.github/next_steps/04_zenodo_integration.md similarity index 96% rename from {{cookiecutter.project_name}}/.github/next_steps/04_zenodo_integration.md rename to {{cookiecutter.directory_name}}/.github/next_steps/04_zenodo_integration.md index a0c81a8b..45ecb639 100644 --- a/{{cookiecutter.project_name}}/.github/next_steps/04_zenodo_integration.md +++ b/{{cookiecutter.directory_name}}/.github/next_steps/04_zenodo_integration.md @@ -8,7 +8,7 @@ To enable Zenodo integration: 1. Go to http://zenodo.org and login with your GitHub account. When you are redirected to GitHub, *Authorize application* to give permission to Zenodo to access your account. 1. Go to and enable Zenodo integration of your repository by clicking on `On` toggle button. -2. Your package will get a DOI only after you make a release. Create a new release as described in [README.dev.md]({{cookiecutter.repository}}/blob/main/README.dev.md#33-github) +2. Your package will get a DOI only after you make a release. Create a new release as described in [README.dev.md]({{cookiecutter.repository_url}}/blob/main/README.dev.md#33-github) 3. At this point you should have a DOI. To find out the DOI generated by Zenodo: 1. Visit https://zenodo.org/deposit and click on your repository link 2. You will find the latest DOI in the right column in Versions box in **Cite all versions?** section diff --git a/{{cookiecutter.directory_name}}/.github/next_steps/05_linting.md b/{{cookiecutter.directory_name}}/.github/next_steps/05_linting.md new file mode 100644 index 00000000..cc603833 --- /dev/null +++ b/{{cookiecutter.directory_name}}/.github/next_steps/05_linting.md @@ -0,0 +1,17 @@ +--- +title: 'Next step: Linting' +--- + +Your repository has a [workflow]({{ cookiecutter.repository_url }}/blob/main/.github/workflows/build.yml) which [lints](https://en.wikipedia.org/wiki/Lint_(software)) your code after every push and when creating a pull request. + +Linter workflow may fail if `description` or `keywords` field in [pyproject.toml]({{ cookiecutter.repository_url }}/blob/main/pyproject.toml) is empty. Please update these fields. To validate your changes run: + +```shell +ruff . +``` + +Enabling [githook](https://git-scm.com/docs/githooks) will automatically lint your code in every commit. You can enable it by running the command below. + +```shell +git config --local core.hooksPath .githooks +``` diff --git a/{{cookiecutter.directory_name}}/.github/workflows/build.yml b/{{cookiecutter.directory_name}}/.github/workflows/build.yml new file mode 100644 index 00000000..97c648cf --- /dev/null +++ b/{{cookiecutter.directory_name}}/.github/workflows/build.yml @@ -0,0 +1,64 @@ +name: Python package + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + + build: + name: Build for (${{ '{{ ' -}} matrix.python-version }}, ${{ '{{ ' -}} matrix.os }}) + runs-on: ${{ '{{ ' -}} matrix.os }} + strategy: + fail-fast: false + matrix: + os: ['ubuntu-latest', 'macos-latest', 'windows-latest'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ '{{ ' -}} matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ '{{ ' -}} matrix.python-version }} + - name: Python info + shell: bash -e {0} + run: | + which python + python --version + - name: Upgrade pip and install dependencies + run: | + python -m pip install --upgrade pip setuptools + python -m pip install .[dev,publishing] + - name: Run unit tests + run: python -m pytest -v + - name: Verify that we can build the package + run: python -m build + + lint: + name: Linting build + runs-on: ubuntu-latest + strategy: + fail-fast: false + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.9 + uses: actions/setup-python@v5 + with: + python-version: 3.9 + - name: Python info + shell: bash -e {0} + run: | + which python + python --version + - name: Upgrade pip and install dependencies + run: | + python -m pip install --upgrade pip setuptools + python -m pip install .[dev,publishing] + - name: Check style against standards using ruff + run: | + ruff check + ruff format --check diff --git a/{{cookiecutter.directory_name}}/.github/workflows/cffconvert.yml b/{{cookiecutter.directory_name}}/.github/workflows/cffconvert.yml new file mode 100644 index 00000000..04fb9ea0 --- /dev/null +++ b/{{cookiecutter.directory_name}}/.github/workflows/cffconvert.yml @@ -0,0 +1,23 @@ +name: cffconvert + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + + verify: + name: "cffconvert" + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + name: Check out a copy of the repository + + - name: Check whether the citation metadata from CITATION.cff is valid + uses: citation-file-format/cffconvert-github-action@2.0.0 + with: + args: "--validate" diff --git a/{{cookiecutter.directory_name}}/.github/workflows/documentation.yml b/{{cookiecutter.directory_name}}/.github/workflows/documentation.yml new file mode 100644 index 00000000..f875b720 --- /dev/null +++ b/{{cookiecutter.directory_name}}/.github/workflows/documentation.yml @@ -0,0 +1,36 @@ +name: documentation + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + build-documentation: + name: Build documentation + runs-on: ubuntu-latest + strategy: + fail-fast: false + steps: + - uses: actions/checkout@v4 + - name: Set up Python 3.9 + uses: actions/setup-python@v5 + with: + python-version: 3.9 + - name: Python info + shell: bash -e {0} + run: | + which python + python --version + - name: Upgrade pip and install dependencies + run: | + python -m pip install --upgrade pip setuptools + python -m pip install .[dev,publishing] + - name: Install pandoc using apt + run: sudo apt install pandoc + - name: Build documentation + run: make coverage doctest html + working-directory: docs diff --git a/{{cookiecutter.project_name}}/.github/workflows/markdown-link-check.yml b/{{cookiecutter.directory_name}}/.github/workflows/markdown-link-check.yml similarity index 63% rename from {{cookiecutter.project_name}}/.github/workflows/markdown-link-check.yml rename to {{cookiecutter.directory_name}}/.github/workflows/markdown-link-check.yml index 0afe69eb..e27e10b3 100644 --- a/{{cookiecutter.project_name}}/.github/workflows/markdown-link-check.yml +++ b/{{cookiecutter.directory_name}}/.github/workflows/markdown-link-check.yml @@ -1,6 +1,12 @@ name: markdown-link-check -on: [push, pull_request] +on: + push: + branches: + - main + pull_request: + branches: + - main jobs: @@ -8,7 +14,8 @@ jobs: name: Check markdown links runs-on: ubuntu-latest steps: - - uses: actions/checkout@main + - uses: actions/checkout@v4 - uses: gaurav-nelson/github-action-markdown-link-check@v1 with: + use-quiet-mode: 'yes' config-file: '.mlc-config.json' diff --git a/{{cookiecutter.project_name}}/.github/workflows/next_steps.yml b/{{cookiecutter.directory_name}}/.github/workflows/next_steps.yml similarity index 67% rename from {{cookiecutter.project_name}}/.github/workflows/next_steps.yml rename to {{cookiecutter.directory_name}}/.github/workflows/next_steps.yml index fbf0877b..e759c491 100644 --- a/{{cookiecutter.project_name}}/.github/workflows/next_steps.yml +++ b/{{cookiecutter.directory_name}}/.github/workflows/next_steps.yml @@ -1,14 +1,17 @@ on: [push] +permissions: + contents: write + issues: write name: Create issues for next steps jobs: next_steps: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Create Sonarcloud integration issue uses: JasonEtco/create-an-issue@v2 env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ '{{ ' -}} secrets.GITHUB_TOKEN }} with: filename: .github/next_steps/01_sonarcloud_integration.md id: sonarcloud @@ -16,39 +19,39 @@ jobs: - name: Create citation data issue uses: JasonEtco/create-an-issue@v2 env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ '{{ ' -}} secrets.GITHUB_TOKEN }} with: filename: .github/next_steps/02_citation.md id: citation - name: Create readthedocs issue uses: JasonEtco/create-an-issue@v2 env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ '{{ ' -}} secrets.GITHUB_TOKEN }} with: filename: .github/next_steps/03_readthedocs.md id: readthedocs - name: Create Zenodo integration issue uses: JasonEtco/create-an-issue@v2 env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ '{{ ' -}} secrets.GITHUB_TOKEN }} with: filename: .github/next_steps/04_zenodo_integration.md id: zenodo - name: Create linting issue uses: JasonEtco/create-an-issue@v2 env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + GITHUB_TOKEN: ${{ '{{ ' -}} secrets.GITHUB_TOKEN }} with: filename: .github/next_steps/05_linting.md id: linting - name: List created issues run: | echo 'Created issues that must be completed to have fully working Python package: - * Sonarcloud integration ${{ steps.sonarcloud.outputs.url }} - * Zenodo integration ${{ steps.zenodo.outputs.url }} - * Read the Docs instructions ${{ steps.readthedocs.outputs.url }} - * Citation data ${{ steps.citation.outputs.url }} - * Linting fixes ${{ steps.linting.outputs.url }}' + * Sonarcloud integration ${{ '{{ ' -}} steps.sonarcloud.outputs.url }} + * Zenodo integration ${{ '{{ ' -}} steps.zenodo.outputs.url }} + * Read the Docs instructions ${{ '{{ ' -}} steps.readthedocs.outputs.url }} + * Citation data ${{ '{{ ' -}} steps.citation.outputs.url }} + * Linting fixes ${{ '{{ ' -}} steps.linting.outputs.url }}' - name: Cleanup files needed to create next steps issues run: | git config --global user.name 'NLeSC Python template' diff --git a/{{cookiecutter.project_name}}/.github/workflows/sonarcloud.yml b/{{cookiecutter.directory_name}}/.github/workflows/sonarcloud.yml similarity index 56% rename from {{cookiecutter.project_name}}/.github/workflows/sonarcloud.yml rename to {{cookiecutter.directory_name}}/.github/workflows/sonarcloud.yml index 1fde7d64..1c7ec8b3 100644 --- a/{{cookiecutter.project_name}}/.github/workflows/sonarcloud.yml +++ b/{{cookiecutter.directory_name}}/.github/workflows/sonarcloud.yml @@ -2,8 +2,12 @@ name: sonarcloud on: push: + branches: + - main pull_request: types: [opened, synchronize, reopened] + branches: + - main jobs: @@ -11,26 +15,26 @@ jobs: name: SonarCloud runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis - name: Set up Python - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: 3.9 - name: Python info - shell: bash -l {0} + shell: bash -e {0} run: | - which python3 - python3 --version + which python + python --version - name: Install dependencies - run: python3 -m pip install .[dev] + run: python -m pip install .[dev] - name: Run unit tests with coverage - run: pytest --cov --cov-report term --cov-report xml --junitxml=xunit-result.xml tests/ + run: python -m pytest --cov --cov-report term --cov-report xml --junitxml=xunit-result.xml tests/ - name: Correct coverage paths run: sed -i "s+$PWD/++g" coverage.xml - name: SonarCloud Scan uses: SonarSource/sonarcloud-github-action@master env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any - SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }} + GITHUB_TOKEN: ${{ '{{' -}} secrets.GITHUB_TOKEN }} # Needed to get PR information, if any + SONAR_TOKEN: ${{ '{{' -}} secrets.SONAR_TOKEN }} diff --git a/{{cookiecutter.project_name}}/.gitignore b/{{cookiecutter.directory_name}}/.gitignore similarity index 94% rename from {{cookiecutter.project_name}}/.gitignore rename to {{cookiecutter.directory_name}}/.gitignore index deeb37d8..57fb2ab9 100644 --- a/{{cookiecutter.project_name}}/.gitignore +++ b/{{cookiecutter.directory_name}}/.gitignore @@ -12,9 +12,9 @@ htmlcov .coverage coverage.xml .pytest_cache +.tox docs/_build -docs/apidocs # ide .idea diff --git a/{{cookiecutter.project_name}}/.mlc-config.json b/{{cookiecutter.directory_name}}/.mlc-config.json similarity index 60% rename from {{cookiecutter.project_name}}/.mlc-config.json rename to {{cookiecutter.directory_name}}/.mlc-config.json index 54d24dcb..1d388679 100644 --- a/{{cookiecutter.project_name}}/.mlc-config.json +++ b/{{cookiecutter.directory_name}}/.mlc-config.json @@ -7,11 +7,24 @@ { "pattern": "^https://doi.org/" }, + { + "pattern": "^https://github.com/.*/settings/secrets/actions$" + }, + { + "pattern": "^https://github.com/organizations/.*/repositories/new" + }, + { + "pattern": "^https://test.pypi.org" + }, { "pattern": "^https://bestpractices.coreinfrastructure.org/projects/" + }, + { + "pattern": "^https://readthedocs.org/dashboard/import.*" } ], "replacementPatterns": [ ], + "retryOn429": true, "timeout": "20s" } diff --git a/{{cookiecutter.directory_name}}/.readthedocs.yaml b/{{cookiecutter.directory_name}}/.readthedocs.yaml new file mode 100644 index 00000000..b2a53689 --- /dev/null +++ b/{{cookiecutter.directory_name}}/.readthedocs.yaml @@ -0,0 +1,7 @@ +version: 2 +python: + install: + - method: pip + path: . + extra_requirements: + - docs diff --git a/{{cookiecutter.project_name}}/CHANGELOG.md b/{{cookiecutter.directory_name}}/CHANGELOG.md similarity index 100% rename from {{cookiecutter.project_name}}/CHANGELOG.md rename to {{cookiecutter.directory_name}}/CHANGELOG.md diff --git a/{{cookiecutter.project_name}}/CITATION.cff b/{{cookiecutter.directory_name}}/CITATION.cff similarity index 86% rename from {{cookiecutter.project_name}}/CITATION.cff rename to {{cookiecutter.directory_name}}/CITATION.cff index 15c28e34..88cb9c25 100644 --- a/{{cookiecutter.project_name}}/CITATION.cff +++ b/{{cookiecutter.directory_name}}/CITATION.cff @@ -1,7 +1,7 @@ # YAML 1.2 --- -cff-version: "1.1.0" -title: "{{ cookiecutter.project_name }}" +cff-version: "1.2.0" +title: "{{ cookiecutter.package_name }}" authors: - family-names: {{ cookiecutter.full_name.split(' ')[-1] }} @@ -10,7 +10,7 @@ authors: date-released: 20??-MM-DD doi: version: "{{ cookiecutter.version }}" -repository-code: "{{ cookiecutter.repository }}" +repository-code: "{{ cookiecutter.repository_url }}" keywords: - {{ cookiecutter.keyword1 }} - {{ cookiecutter.keyword2 }} diff --git a/{{cookiecutter.project_name}}/CODE_OF_CONDUCT.md b/{{cookiecutter.directory_name}}/CODE_OF_CONDUCT.md similarity index 100% rename from {{cookiecutter.project_name}}/CODE_OF_CONDUCT.md rename to {{cookiecutter.directory_name}}/CODE_OF_CONDUCT.md diff --git a/{{cookiecutter.directory_name}}/CONTRIBUTING.md b/{{cookiecutter.directory_name}}/CONTRIBUTING.md new file mode 100644 index 00000000..c3ec5d42 --- /dev/null +++ b/{{cookiecutter.directory_name}}/CONTRIBUTING.md @@ -0,0 +1,68 @@ +# Contributing guidelines + +We welcome any kind of contribution to our software, from simple comment or question to a full fledged [pull request](https://help.github.com/articles/about-pull-requests/). Please read and follow our [Code of Conduct](CODE_OF_CONDUCT.md). + +A contribution can be one of the following cases: + +1. you have a question; +1. you think you may have found a bug (including unexpected behavior); +1. you want to make some kind of change to the code base (e.g. to fix a bug, to add a new feature, to update documentation); +1. you want to make a new release of the code base. + +The sections below outline the steps in each case. + +## You have a question + +1. use the search functionality [here]({{cookiecutter.repository_url}}/issues) to see if someone already filed the same issue; +2. if your issue search did not yield any relevant results, make a new issue; +3. apply the "Question" label; apply other labels when relevant. + +## You think you may have found a bug + +1. use the search functionality [here]({{cookiecutter.repository_url}}/issues) to see if someone already filed the same issue; +1. if your issue search did not yield any relevant results, make a new issue, making sure to provide enough information to the rest of the community to understand the cause and context of the problem. Depending on the issue, you may want to include: + - the [SHA hashcode](https://help.github.com/articles/autolinked-references-and-urls/#commit-shas) of the commit that is causing your problem; + - some identifying information (name and version number) for dependencies you're using; + - information about the operating system; +1. apply relevant labels to the newly created issue. + +## You want to make some kind of change to the code base + +1. (**important**) announce your plan to the rest of the community *before you start working*. This announcement should be in the form of a (new) issue; +1. (**important**) wait until some kind of consensus is reached about your idea being a good idea; +1. if needed, fork the repository to your own Github profile and create your own feature branch off of the latest main commit. While working on your feature branch, make sure to stay up to date with the main branch by pulling in changes, possibly from the 'upstream' repository (follow the instructions [here](https://help.github.com/articles/configuring-a-remote-for-a-fork/) and [here](https://help.github.com/articles/syncing-a-fork/)); +1. install dependencies (see the [development documentation](README.dev.md#development_install)); +1. make sure the existing tests still work by running ``pytest``; +1. add your own tests (if necessary); +1. update or expand the documentation; +1. update the `CHANGELOG.md` file with your change; +1. [push](http://rogerdudler.github.io/git-guide/) your feature branch to (your fork of) the {{ cookiecutter.package_name }} repository on GitHub; +1. create the pull request, e.g. following the instructions [here](https://help.github.com/articles/creating-a-pull-request/). + +In case you feel like you've made a valuable contribution, but you don't know how to write or run tests for it, or how to generate the documentation: don't let this discourage you from making the pull request; we can help you! Just go ahead and submit the pull request, but keep in mind that you might be asked to append additional commits to your pull request. + +## You want to make a new release of the code base + +To create a release you need write permission on the repository. + +1. Check the author list in [`CITATION.cff`](CITATION.cff) +1. Bump the version using `bump-my-version bump `. For example, `bump-my-version bump major` will increase major version numbers everywhere it's needed (code, meta, etc.) in the repo. Alternatively the version can be manually changed in {{ cookiecutter.package_name }}/__init__.py, pyproject.toml, CITATION.cff and docs/conf.py (and other places it was possibly added). +1. Update the `CHANGELOG.md` to include changes made +1. Go to the [GitHub release page]({{ cookiecutter.repository_url }}/releases) +1. Press draft a new release button +1. Fill version, title and description field +1. Press the Publish Release button + + + +Also a Zenodo entry will be made for the release with its own DOI. \ No newline at end of file diff --git a/{{cookiecutter.project_name}}/LICENSE b/{{cookiecutter.directory_name}}/LICENSE similarity index 86% rename from {{cookiecutter.project_name}}/LICENSE rename to {{cookiecutter.directory_name}}/LICENSE index 00bfbf25..aa06a167 100644 --- a/{{cookiecutter.project_name}}/LICENSE +++ b/{{cookiecutter.directory_name}}/LICENSE @@ -924,4 +924,170 @@ may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . +{% elif cookiecutter.license == 'GNU Lesser General Public License v3 or later' %} + GNU LESSER GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + + This version of the GNU Lesser General Public License incorporates +the terms and conditions of version 3 of the GNU General Public +License, supplemented by the additional permissions listed below. + + 0. Additional Definitions. + + As used herein, "this License" refers to version 3 of the GNU Lesser +General Public License, and the "GNU GPL" refers to version 3 of the GNU +General Public License. + + "The Library" refers to a covered work governed by this License, +other than an Application or a Combined Work as defined below. + + An "Application" is any work that makes use of an interface provided +by the Library, but which is not otherwise based on the Library. +Defining a subclass of a class defined by the Library is deemed a mode +of using an interface provided by the Library. + + A "Combined Work" is a work produced by combining or linking an +Application with the Library. The particular version of the Library +with which the Combined Work was made is also called the "Linked +Version". + + The "Minimal Corresponding Source" for a Combined Work means the +Corresponding Source for the Combined Work, excluding any source code +for portions of the Combined Work that, considered in isolation, are +based on the Application, and not on the Linked Version. + + The "Corresponding Application Code" for a Combined Work means the +object code and/or source code for the Application, including any data +and utility programs needed for reproducing the Combined Work from the +Application, but excluding the System Libraries of the Combined Work. + + 1. Exception to Section 3 of the GNU GPL. + + You may convey a covered work under sections 3 and 4 of this License +without being bound by section 3 of the GNU GPL. + + 2. Conveying Modified Versions. + + If you modify a copy of the Library, and, in your modifications, a +facility refers to a function or data to be supplied by an Application +that uses the facility (other than as an argument passed when the +facility is invoked), then you may convey a copy of the modified +version: + + a) under this License, provided that you make a good faith effort to + ensure that, in the event an Application does not supply the + function or data, the facility still operates, and performs + whatever part of its purpose remains meaningful, or + + b) under the GNU GPL, with none of the additional permissions of + this License applicable to that copy. + + 3. Object Code Incorporating Material from Library Header Files. + + The object code form of an Application may incorporate material from +a header file that is part of the Library. You may convey such object +code under terms of your choice, provided that, if the incorporated +material is not limited to numerical parameters, data structure +layouts and accessors, or small macros, inline functions and templates +(ten or fewer lines in length), you do both of the following: + + a) Give prominent notice with each copy of the object code that the + Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the object code with a copy of the GNU GPL and this license + document. + + 4. Combined Works. + + You may convey a Combined Work under terms of your choice that, +taken together, effectively do not restrict modification of the +portions of the Library contained in the Combined Work and reverse +engineering for debugging such modifications, if you also do each of +the following: + + a) Give prominent notice with each copy of the Combined Work that + the Library is used in it and that the Library and its use are + covered by this License. + + b) Accompany the Combined Work with a copy of the GNU GPL and this license + document. + + c) For a Combined Work that displays copyright notices during + execution, include the copyright notice for the Library among + these notices, as well as a reference directing the user to the + copies of the GNU GPL and this license document. + + d) Do one of the following: + + 0) Convey the Minimal Corresponding Source under the terms of this + License, and the Corresponding Application Code in a form + suitable for, and under terms that permit, the user to + recombine or relink the Application with a modified version of + the Linked Version to produce a modified Combined Work, in the + manner specified by section 6 of the GNU GPL for conveying + Corresponding Source. + + 1) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (a) uses at run time + a copy of the Library already present on the user's computer + system, and (b) will operate properly with a modified version + of the Library that is interface-compatible with the Linked + Version. + + e) Provide Installation Information, but only if you would otherwise + be required to provide such information under section 6 of the + GNU GPL, and only to the extent that such information is + necessary to install and execute a modified version of the + Combined Work produced by recombining or relinking the + Application with a modified version of the Linked Version. (If + you use option 4d0, the Installation Information must accompany + the Minimal Corresponding Source and Corresponding Application + Code. If you use option 4d1, you must provide the Installation + Information in the manner specified by section 6 of the GNU GPL + for conveying Corresponding Source.) + + 5. Combined Libraries. + + You may place library facilities that are a work based on the +Library side by side in a single library together with other library +facilities that are not Applications and are not covered by this +License, and convey such a combined library under terms of your +choice, if you do both of the following: + + a) Accompany the combined library with a copy of the same work based + on the Library, uncombined with any other library facilities, + conveyed under the terms of this License. + + b) Give prominent notice with the combined library that part of it + is a work based on the Library, and explaining where to find the + accompanying uncombined form of the same work. + + 6. Revised Versions of the GNU Lesser General Public License. + + The Free Software Foundation may publish revised and/or new versions +of the GNU Lesser General Public License from time to time. Such new +versions will be similar in spirit to the present version, but may +differ in detail to address new problems or concerns. + + Each version is given a distinguishing version number. If the +Library as you received it specifies that a certain numbered version +of the GNU Lesser General Public License "or any later version" +applies to it, you have the option of following the terms and +conditions either of that published version or of any later version +published by the Free Software Foundation. If the Library as you +received it does not specify a version number of the GNU Lesser +General Public License, you may choose any version of the GNU Lesser +General Public License ever published by the Free Software Foundation. + + If the Library as you received it specifies that a proxy can decide +whether future versions of the GNU Lesser General Public License shall +apply, that proxy's public statement of acceptance of any version is +permanent authorization for you to choose that version for the +Library. {% endif %} diff --git a/{{cookiecutter.project_name}}/MANIFEST.in b/{{cookiecutter.directory_name}}/MANIFEST.in similarity index 100% rename from {{cookiecutter.project_name}}/MANIFEST.in rename to {{cookiecutter.directory_name}}/MANIFEST.in diff --git a/{{cookiecutter.project_name}}/NOTICE b/{{cookiecutter.directory_name}}/NOTICE similarity index 100% rename from {{cookiecutter.project_name}}/NOTICE rename to {{cookiecutter.directory_name}}/NOTICE diff --git a/{{cookiecutter.directory_name}}/README.dev.md b/{{cookiecutter.directory_name}}/README.dev.md new file mode 100644 index 00000000..b839f920 --- /dev/null +++ b/{{cookiecutter.directory_name}}/README.dev.md @@ -0,0 +1,196 @@ +# `{{ cookiecutter.package_name }}` developer documentation + +If you're looking for user documentation, go [here](README.md). + +## Development install + +```shell +# Create a virtual environment, e.g. with +python -m venv env + +# activate virtual environment +source env/bin/activate + +# make sure to have a recent version of pip and setuptools +python -m pip install --upgrade pip setuptools + +# (from the project root directory) +# install {{ cookiecutter.package_name }} as an editable package +python -m pip install --no-cache-dir --editable . +# install development dependencies +python -m pip install --no-cache-dir --editable .[dev] +# install documentation dependencies only +python -m pip install --no-cache-dir --editable .[docs] +``` + +Afterwards check that the install directory is present in the `PATH` environment variable. + +## Running the tests + +There are two ways to run tests. + +The first way requires an activated virtual environment with the development tools installed: + +```shell +pytest -v +``` + +The second is to use `tox`, which can be installed separately (e.g. with `pip install tox`), i.e. not necessarily inside the virtual environment you use for installing `{{ cookiecutter.package_name }}`, but then builds the necessary virtual environments itself by simply running: + +```shell +tox +``` + +Testing with `tox` allows for keeping the testing environment separate from your development environment. +The development environment will typically accumulate (old) packages during development that interfere with testing; this problem is avoided by testing with `tox`. + +### Test coverage + +In addition to just running the tests to see if they pass, they can be used for coverage statistics, i.e. to determine how much of the package's code is actually executed during tests. +In an activated virtual environment with the development tools installed, inside the package directory, run: + +```shell +coverage run +``` + +This runs tests and stores the result in a `.coverage` file. +To see the results on the command line, run + +```shell +coverage report +``` + +`coverage` can also generate output in HTML and other formats; see `coverage help` for more information. + +## Running linters locally + +For linting and sorting imports we will use [ruff](https://beta.ruff.rs/docs/). Running the linters requires an +activated virtual environment with the development tools installed. + +```shell +# linter +ruff . + +# linter with automatic fixing +ruff . --fix +``` + +To fix readability of your code style you can use [yapf](https://github.com/google/yapf). + +You can enable automatic linting with `ruff` on commit by enabling the git hook from `.githooks/pre-commit`, like so: + +```shell +git config --local core.hooksPath .githooks +``` + +## Generating the API docs + +```shell +cd docs +make html +``` + +The documentation will be in `docs/_build/html` + +If you do not have `make` use + +```shell +sphinx-build -b html docs docs/_build/html +``` + +To find undocumented Python objects run + +```shell +cd docs +make coverage +cat _build/coverage/python.txt +``` + +To [test snippets](https://www.sphinx-doc.org/en/master/usage/extensions/doctest.html) in documentation run + +```shell +cd docs +make doctest +``` + +## Versioning + +Bumping the version across all files is done with [bump-my-version](https://github.com/callowayproject/bump-my-version), e.g. + +```shell +bump-my-version bump major # bumps from e.g. 0.3.2 to 1.0.0 +bump-my-version bump minor # bumps from e.g. 0.3.2 to 0.4.0 +bump-my-version bump patch # bumps from e.g. 0.3.2 to 0.3.3 +``` + +## Making a release + +This section describes how to make a release in 3 parts: + +1. preparation +1. making a release on PyPI +1. making a release on GitHub + +### (1/3) Preparation + +1. Update the (don't forget to update links at bottom of page) +2. Verify that the information in [`CITATION.cff`](CITATION.cff) is correct. +3. Make sure the [version has been updated](#versioning). +4. Run the unit tests with `pytest -v` + +### (2/3) PyPI + +In a new terminal: + +```shell +# OPTIONAL: prepare a new directory with fresh git clone to ensure the release +# has the state of origin/main branch +cd $(mktemp -d {{ cookiecutter.package_name }}.XXXXXX) +git clone {{ cookiecutter.repository }} . + +# make sure to have a recent version of pip and the publishing dependencies +python -m pip install --upgrade pip +python -m pip install .[publishing] + +# create the source distribution and the wheel +python -m build + +# upload to test pypi instance (requires credentials) +python -m twine upload --repository testpypi dist/* +``` + +Visit +[https://test.pypi.org/project/{{cookiecutter.package_name}}](https://test.pypi.org/project/{{cookiecutter.package_name}}) +and verify that your package was uploaded successfully. Keep the terminal open, we'll need it later. + +In a new terminal, without an activated virtual environment or an env directory: + +```shell +cd $(mktemp -d {{ cookiecutter.package_name }}-test.XXXXXX) + +# prepare a clean virtual environment and activate it +python -m venv env +source env/bin/activate + +# make sure to have a recent version of pip and setuptools +python -m pip install --upgrade pip + +# install from test pypi instance: +python -m pip -v install --no-cache-dir \ +--index-url https://test.pypi.org/simple/ \ +--extra-index-url https://pypi.org/simple {{ cookiecutter.package_name }} +``` + +Check that the package works as it should when installed from pypitest. + +Then upload to pypi.org with: + +```shell +# Back to the first terminal, +# FINAL STEP: upload to PyPI (requires credentials) +python -m twine upload dist/* +``` + +### (3/3) GitHub + +Don't forget to also make a [release on GitHub]({{cookiecutter.repository_url}}/releases/new). If your repository uses the GitHub-Zenodo integration this will also trigger Zenodo into making a snapshot of your repository and sticking a DOI on it. diff --git a/{{cookiecutter.project_name}}/README.md b/{{cookiecutter.directory_name}}/README.md similarity index 60% rename from {{cookiecutter.project_name}}/README.md rename to {{cookiecutter.directory_name}}/README.md index 5f76a536..0e5e2918 100644 --- a/{{cookiecutter.project_name}}/README.md +++ b/{{cookiecutter.directory_name}}/README.md @@ -4,22 +4,21 @@ | fair-software.eu recommendations | | | :-- | :-- | -| (1/5) code repository | [![github repo badge](https://img.shields.io/badge/github-repo-000.svg?logo=github&labelColor=gray&color=blue)]({{cookiecutter.repository}}) | -| (2/5) license | [![github license badge](https://img.shields.io/github/license/{{cookiecutter.github_organization}}/{{cookiecutter.project_name}})]({{cookiecutter.repository}}) | -| (3/5) community registry | [![RSD](https://img.shields.io/badge/rsd-{{cookiecutter.project_name}}-00a3e3.svg)](https://www.research-software.nl/software/{{cookiecutter.project_name}}) [![workflow pypi badge](https://img.shields.io/pypi/v/{{cookiecutter.project_name}}.svg?colorB=blue)](https://pypi.python.org/project/{{cookiecutter.project_name}}/) | +| (1/5) code repository | [![github repo badge](https://img.shields.io/badge/github-repo-000.svg?logo=github&labelColor=gray&color=blue)]({{cookiecutter.repository_url}}) | +| (2/5) license | [![github license badge](https://img.shields.io/github/license/{{cookiecutter.github_organization}}/{{cookiecutter.directory_name}})]({{cookiecutter.repository_url}}) | +| (3/5) community registry | [![RSD](https://img.shields.io/badge/rsd-{{cookiecutter.package_name}}-00a3e3.svg)](https://www.research-software.nl/software/{{cookiecutter.package_name}}) [![workflow pypi badge](https://img.shields.io/pypi/v/{{cookiecutter.package_name}}.svg?colorB=blue)](https://pypi.python.org/project/{{cookiecutter.package_name}}/) | | (4/5) citation | [![DOI](https://zenodo.org/badge/DOI/.svg)](https://doi.org/) | | (5/5) checklist | [![workflow cii badge](https://bestpractices.coreinfrastructure.org/projects//badge)](https://bestpractices.coreinfrastructure.org/projects/) | | howfairis | [![fair-software badge](https://img.shields.io/badge/fair--software.eu-%E2%97%8F%20%20%E2%97%8F%20%20%E2%97%8F%20%20%E2%97%8F%20%20%E2%97%8B-yellow)](https://fair-software.eu) | | **Other best practices** |   | -| Static analysis | [![workflow scq badge](https://sonarcloud.io/api/project_badges/measure?project={{cookiecutter.github_organization}}_{{cookiecutter.project_name}}&metric=alert_status)](https://sonarcloud.io/dashboard?id={{cookiecutter.github_organization}}_{{cookiecutter.project_name}}) | -| Coverage | [![workflow scc badge](https://sonarcloud.io/api/project_badges/measure?project={{cookiecutter.github_organization}}_{{cookiecutter.project_name}}&metric=coverage)](https://sonarcloud.io/dashboard?id={{cookiecutter.github_organization}}_{{cookiecutter.project_name}}) | -| Documentation | [![Documentation Status](https://readthedocs.org/projects/{{cookiecutter.project_name}}/badge/?version=latest)](https://{{cookiecutter.project_name}}.readthedocs.io/en/latest/?badge=latest) | +| Static analysis | [![workflow scq badge](https://sonarcloud.io/api/project_badges/measure?project={{cookiecutter.github_organization}}_{{cookiecutter.directory_name}}&metric=alert_status)](https://sonarcloud.io/dashboard?id={{cookiecutter.github_organization}}_{{cookiecutter.directory_name}}) | +| Coverage | [![workflow scc badge](https://sonarcloud.io/api/project_badges/measure?project={{cookiecutter.github_organization}}_{{cookiecutter.directory_name}}&metric=coverage)](https://sonarcloud.io/dashboard?id={{cookiecutter.github_organization}}_{{cookiecutter.directory_name}}) | +| Documentation | [![Documentation Status](https://readthedocs.org/projects/{{cookiecutter.directory_name}}/badge/?version=latest)](https://{{cookiecutter.directory_name}}.readthedocs.io/en/latest/?badge=latest) | | **GitHub Actions** |   | -| Build | [![build]({{cookiecutter.repository}}/actions/workflows/build.yml/badge.svg)]({{cookiecutter.repository}}/actions/workflows/build.yml) | -| Metadata consistency | [![cffconvert]({{cookiecutter.repository}}/actions/workflows/cffconvert.yml/badge.svg)]({{cookiecutter.repository}}/actions/workflows/cffconvert.yml) | -| Lint | [![lint]({{cookiecutter.repository}}/actions/workflows/lint.yml/badge.svg)]({{cookiecutter.repository}}/actions/workflows/lint.yml) | -| SonarCloud | [![sonarcloud]({{cookiecutter.repository}}/actions/workflows/sonarcloud.yml/badge.svg)]({{cookiecutter.repository}}/actions/workflows/sonarcloud.yml) | -| MarkDown link checker | [![markdown-link-check]({{cookiecutter.repository}}/actions/workflows/markdown-link-check.yml/badge.svg)]({{cookiecutter.repository}}/actions/workflows/markdown-link-check.yml) | +| Build | [![build]({{cookiecutter.repository_url}}/actions/workflows/build.yml/badge.svg)]({{cookiecutter.repository_url}}/actions/workflows/build.yml) | +| Citation data consistency | [![cffconvert]({{cookiecutter.repository_url}}/actions/workflows/cffconvert.yml/badge.svg)]({{cookiecutter.repository_url}}/actions/workflows/cffconvert.yml) | +| SonarCloud | [![sonarcloud]({{cookiecutter.repository_url}}/actions/workflows/sonarcloud.yml/badge.svg)]({{cookiecutter.repository_url}}/actions/workflows/sonarcloud.yml) | +| MarkDown link checker | [![markdown-link-check]({{cookiecutter.repository_url}}/actions/workflows/markdown-link-check.yml/badge.svg)]({{cookiecutter.repository_url}}/actions/workflows/markdown-link-check.yml) | ## How to use {{ cookiecutter.package_name }} @@ -33,8 +32,8 @@ To install {{ cookiecutter.package_name }} from GitHub repository, do: ```console git clone {{ cookiecutter.repository }}.git -cd {{ cookiecutter.project_name }} -python3 -m pip install . +cd {{ cookiecutter.directory_name }} +python -m pip install . ``` ## Documentation @@ -43,7 +42,7 @@ Include a link to your project's full documentation here. ## Contributing -If you want to contribute to the development of {{ cookiecutter.project_name }}, +If you want to contribute to the development of {{ cookiecutter.package_name }}, have a look at the [contribution guidelines](CONTRIBUTING.md). ## Credits diff --git a/{{cookiecutter.project_name}}/docs/Makefile b/{{cookiecutter.directory_name}}/docs/Makefile similarity index 100% rename from {{cookiecutter.project_name}}/docs/Makefile rename to {{cookiecutter.directory_name}}/docs/Makefile diff --git a/{{cookiecutter.project_name}}/docs/_templates/.gitignore b/{{cookiecutter.directory_name}}/docs/_templates/.gitignore similarity index 100% rename from {{cookiecutter.project_name}}/docs/_templates/.gitignore rename to {{cookiecutter.directory_name}}/docs/_templates/.gitignore diff --git a/{{cookiecutter.directory_name}}/docs/conf.py b/{{cookiecutter.directory_name}}/docs/conf.py new file mode 100644 index 00000000..c73e3d3b --- /dev/null +++ b/{{cookiecutter.directory_name}}/docs/conf.py @@ -0,0 +1,88 @@ +# Configuration file for the Sphinx documentation builder. +# +# This file only contains a selection of the most common options. For a full +# list see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html +# + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +# import os +# import sys +# sys.path.insert(0, os.path.abspath('.')) + +# -- Project information ----------------------------------------------------- + +project = u"{{ cookiecutter.package_name }}" +copyright = u"{% now "local", "%Y" %}, {{ cookiecutter.copyright_holder }}" +author = u"{{ cookiecutter.full_name.replace('\"', '\\\"') }}" + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = "0.1.0" +# The full version, including alpha/beta/rc tags. +release = version + +# -- General configuration ------------------------------------------------ + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named "sphinx.ext.*") or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.coverage", + "sphinx.ext.doctest", + "sphinx.ext.intersphinx", + "sphinx.ext.mathjax", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", + "autoapi.extension", + "myst_parser", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = False + +# -- Use autoapi.extension to run sphinx-apidoc ------- + +autoapi_dirs = ['../src/{{ cookiecutter.package_name }}'] + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "sphinx_rtd_theme" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# -- Options for Intersphinx + +intersphinx_mapping = {'python': ('https://docs.python.org/3', None), + # Commonly used libraries, uncomment when used in package + # 'numpy': ('http://docs.scipy.org/doc/numpy/', None), + # 'scipy': ('http://docs.scipy.org/doc/scipy/reference/', None), + # 'scikit-learn': ('https://scikit-learn.org/stable/', None), + # 'matplotlib': ('https://matplotlib.org/stable/', None), + # 'pandas': ('http://pandas.pydata.org/docs/', None), + } diff --git a/{{cookiecutter.directory_name}}/docs/index.rst b/{{cookiecutter.directory_name}}/docs/index.rst new file mode 100644 index 00000000..d4af697b --- /dev/null +++ b/{{cookiecutter.directory_name}}/docs/index.rst @@ -0,0 +1,18 @@ +.. {{ cookiecutter.package_name }} documentation master file, created by + sphinx-quickstart on Wed May 5 22:45:36 2021. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to {{ cookiecutter.package_name }}'s documentation! +========================================================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/{{cookiecutter.project_name}}/docs/make.bat b/{{cookiecutter.directory_name}}/docs/make.bat similarity index 100% rename from {{cookiecutter.project_name}}/docs/make.bat rename to {{cookiecutter.directory_name}}/docs/make.bat diff --git a/{{cookiecutter.project_name}}/next_steps.md b/{{cookiecutter.directory_name}}/next_steps.md similarity index 52% rename from {{cookiecutter.project_name}}/next_steps.md rename to {{cookiecutter.directory_name}}/next_steps.md index dc10897b..c9710c54 100644 --- a/{{cookiecutter.project_name}}/next_steps.md +++ b/{{cookiecutter.directory_name}}/next_steps.md @@ -6,8 +6,15 @@ Once your Python package is created, put it under [version control](https://guide.esciencecenter.nl/#/best_practices/version_control) using [git](https://git-scm.com/) and [GitHub](https://github.com/). +Note that the next step assumes you have setup your connection to GitHub via SSH, +see [Connecting to GitHub with SSH](https://docs.github.com/en/github-ae@latest/authentication/connecting-to-github-with-ssh). + +Alternatively, you can also use a personal access token, see +[Creating a personal access token](https://docs.github.com/en/github-ae@latest/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token). If you choose this option, below you will have to replace +`git@github.com:` by `https://github.com/`. + ```shell -cd {{ cookiecutter.project_name }} +cd {{ cookiecutter.directory_name }} git init git add --all git commit -m "first commit" @@ -19,7 +26,7 @@ git remote add origin {{ cookiecutter.repository }} Go to [https://github.com/organizations/{{cookiecutter.github_organization}}/repositories/new](https://github.com/organizations/{{cookiecutter.github_organization}}/repositories/new) -and create a new repository named `{{ cookiecutter.project_name }}` as an empty repository, then push your commits to GitHub: +and create a new repository named `{{ cookiecutter.directory_name }}` as an empty repository, then push your commits to GitHub: ```shell git push --set-upstream origin main @@ -28,9 +35,13 @@ git push --set-upstream origin main ## Check automatically generated issues A short while after you push your commits to GitHub for the first time, a few issues outlining next steps will added -automatically ([here]({{cookiecutter.repository}}/issues?q=author%3Aapp%2Fgithub-actions)). Resolve them to complete the +automatically ([here]({{cookiecutter.repository_url}}/issues?q=author%3Aapp%2Fgithub-actions)). Resolve them to complete the setup of your repository. +## Project development documentation + +The [README.dev.md](README.dev.md) contains developer documentation. + ## Project layout explained For an explanation of what files are there, and what each of these do, please refer to [project_setup.md](project_setup.md). diff --git a/{{cookiecutter.project_name}}/project_setup.md b/{{cookiecutter.directory_name}}/project_setup.md similarity index 80% rename from {{cookiecutter.project_name}}/project_setup.md rename to {{cookiecutter.directory_name}}/project_setup.md index d9d50dfb..3d1b196e 100644 --- a/{{cookiecutter.project_name}}/project_setup.md +++ b/{{cookiecutter.directory_name}}/project_setup.md @@ -11,10 +11,11 @@ checklist](https://guide.esciencecenter.nl/#/best_practices/checklist). This repository is set up with Python versions: -- 3.6 -- 3.7 - 3.8 - 3.9 +- 3.10 +- 3.11 +- 3.12 Add or remove Python versions based on project requirements. See [the guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python) for more information about Python @@ -27,8 +28,8 @@ to use one or the other, as project requirements differ. For advice on what to u of the guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python?id=dependencies-and-package-management). -- Runtime dependencies should be added to `setup.cfg` in the `install_requires` list under `[options]`. -- Development dependencies should be added to `setup.cfg` in one of the lists under `[options.extras_require]`. +- Runtime dependencies should be added to `pyproject.toml` in the `dependencies` list under `[project]`. +- Development dependencies, such as for testing or documentation, should be added to `pyproject.toml` in one of the lists under `[project.optional-dependencies]`. ## Packaging/One command install @@ -42,10 +43,9 @@ help you decide which tool to use for packaging. - The `tests` folder contains: - Example tests that you should replace with your own meaningful tests (file: `test_my_module.py`) - The testing framework used is [PyTest](https://pytest.org) - - [PyTest introduction](http://pythontesting.net/framework/pytest/pytest-introduction/) - - PyTest is listed as a development dependency, and can thus be installed with `pip3 install --editable .[dev]` -- Tests can be run with `pytest` - - This is configured in `setup.cfg` + - [PyTest introduction](https://pythontest.com/pytest-book/) + - PyTest is listed as a development dependency + - This is configured in `pyproject.toml` - The project uses [GitHub action workflows](https://docs.github.com/en/actions) to automatically run tests on GitHub infrastructure against multiple Python versions - Workflows can be found in [`.github/workflows`](.github/workflows/) - [Relevant section in the guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python?id=testing) @@ -54,18 +54,17 @@ help you decide which tool to use for packaging. - Documentation should be put in the [`docs/`](docs/) directory. The contents have been generated using `sphinx-quickstart` (Sphinx version 1.6.5). - We recommend writing the documentation using Restructured Text (reST) and Google style docstrings. - - [Restructured Text (reST) and Sphinx CheatSheet](http://openalea.gforge.inria.fr/doc/openalea/doc/_build/html/source/sphinx/rest_syntax.html) + - [Restructured Text (reST) and Sphinx CheatSheet](https://thomas-cokelaer.info/tutorials/sphinx/rest_syntax.html) - [Google style docstring examples](http://sphinxcontrib-napoleon.readthedocs.io/en/latest/example_google.html). - The documentation is set up with the ReadTheDocs Sphinx theme. - Check out its [configuration options](https://sphinx-rtd-theme.readthedocs.io/en/latest/). +- [AutoAPI](https://sphinx-autoapi.readthedocs.io/) is used to generate documentation for the package Python objects. +- `.readthedocs.yaml` is the ReadTheDocs configuration file. When ReadTheDocs is building the documentation this package and its development dependencies are installed so the API reference can be rendered. - [Relevant section in the guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python?id=writingdocumentation) ## Coding style conventions and code quality -- Check your code style with `prospector` -- You may need run `pip install --editable .[dev]` first, to install the required dependencies -- You can use `yapf` to fix the readability of your code style and `isort` to format and group your imports -- [Relevant section in the guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python?id=coding-style-conventions) +- [Relevant section in the NLeSC guide](https://guide.esciencecenter.nl/#/best_practices/language_guides/python?id=coding-style-conventions) and [README.dev.md](README.dev.md). ## Continuous code quality @@ -77,9 +76,7 @@ help you decide which tool to use for packaging. ## Package version number - We recommend using [semantic versioning](https://guide.esciencecenter.nl/#/best_practices/releases?id=semantic-versioning). -- For convenience, the package version is stored in a single place: `{{ cookiecutter.project_name }}/.bumpversion.cfg`. - For updating the version number, make sure the dev dependencies are installed and run `bumpversion patch`, - `bumpversion minor`, or `bumpversion major` as appropriate. +- For convenience, the package version is stored in a single place: `{{ cookiecutter.directory_name }}/pyproject.toml` under the `tool.bumpversion` header. - Don't forget to update the version number before [making a release](https://guide.esciencecenter.nl/#/best_practices/releases)! ## Logging diff --git a/{{cookiecutter.directory_name}}/pyproject.toml b/{{cookiecutter.directory_name}}/pyproject.toml new file mode 100644 index 00000000..67ec3ecd --- /dev/null +++ b/{{cookiecutter.directory_name}}/pyproject.toml @@ -0,0 +1,169 @@ +# see documentation, e.g. +# - https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#declaring-project-metadata +# - https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html +# - https://www.python.org/dev/peps/pep-0621/ + +[build-system] +requires = ["setuptools>=64.0.0", "setuptools-scm", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +authors = [ + { name = "{{ cookiecutter.full_name }}", email = "{{ cookiecutter.email }}" } +] +classifiers = [ + "Development Status :: 2 - Pre-Alpha", + "Intended Audience :: Developers", + "Intended Audience :: Science/Research", + "{{ {'Apache Software License 2.0': 'License :: OSI Approved :: Apache Software License', + 'MIT license': 'License :: OSI Approved :: MIT License', + 'BSD license': 'License :: OSI Approved :: BSD License', + 'ISC license': 'License :: OSI Approved :: ISC License (ISCL)', + 'GNU General Public License v3 or later': 'License :: OSI Approved :: GNU General Public License', + 'Not open source': 'License :: Other/Proprietary License' + }[cookiecutter.license] }}", + "Natural Language :: English", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", +] +dependencies = [] +description = "{{ cookiecutter.package_short_description }}" +keywords = [ + "{{ cookiecutter.keyword1 }}", + "{{ cookiecutter.keyword2 }}", +] +license = {file = "LICENSE"} +name = "{{ cookiecutter.package_name }}" +readme = {file = "README.md", content-type = "text/markdown"} +requires-python = ">=3.8" +version = "{{ cookiecutter.version }}" + +[project.optional-dependencies] +dev = [ + "build", # build is not only used in publishing (below), but also in the template's test suite + "bump-my-version", + "coverage [toml]", + "pytest", + "pytest-cov", + "ruff", + "sphinx", + "sphinx_rtd_theme", + "sphinx-autoapi", + "tox", + "myst_parser", +] +docs = [ + "sphinx", + "sphinx_rtd_theme", + "sphinx-autoapi", + "myst_parser", +] +publishing = [ + "build", + "twine", + "wheel", +] + +[project.urls] +Repository = "{{ cookiecutter.repository_url }}" +Issues = "{{ cookiecutter.repository_url }}/issues" +Changelog = "{{ cookiecutter.repository_url }}/CHANGELOG.md" + +[tool.pytest.ini_options] +testpaths = ["tests"] + +[tool.coverage.run] +branch = true +source = ["src/{{ cookiecutter.package_name }}"] +command_line = "-m pytest" + +[tool.tox] +legacy_tox_ini = """ +[tox] +envlist = py38,py39,py310,py311,py312 +skip_missing_interpreters = true +[testenv] +commands = pytest +extras = dev +""" + +[tool.ruff] +line-length = 120 +output-format = "concise" + +[tool.ruff.lint] +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +# Enable Pyflakes `E` and `F` codes by default. +select = ["ALL"] +ignore = [ + "ANN101", # Missing type annotation for `self` in method + "ANN102", # Missing type annotation for `cls` in classmethod + "ANN204", # Missing return type annotation for special (dunder) method + "FBT", # Using boolean function arguments + "TD", # TODOs + "FIX001", # Resolve FIXMEs + "FIX002", # Resolve TODOs + "B028", # No explicit `stacklevel` keyword argument found in warning + # No docstrings required in the following cases + "D100", # Missing module docstring + "D104", # Missing public package docstring + "D105", # Missing docstring in magic method + "D107", # Missing docstring in `__init__` +] +pydocstyle.convention = "google" + +# Allow autofix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = ["F401"] # unused imports (should not disappear while editing) +extend-safe-fixes = [ + "D415", # First line should end with a period, question mark, or exclamation point + "D300", # Use triple double quotes `"""` + "D200", # One-line docstring should fit on one line + "TCH", # Format type checking only imports + "ISC001", # Implicitly concatenated strings on a single line + "EM", # Exception message variables + "RUF013", # Implicit Optional + "B006", # Mutable default argument +] + +[tool.ruff.lint.per-file-ignores] +"tests/*" = [ + "S101", # Use of `assert` detected + "PT011", # pytest-raises-too-broad + "ANN001", # Missing function argument type + "ANN201", # Missing return type + "D103", # Missing function docstring + "ANN401", # Function arguments annotated with too generic `Any` type + "SLF001", # Private member access +] +"docs/conf.py" = [ + "INP001", # Add __init__.py to implicit namespace package + "ERA001", # Commented-out code + "A001", # Shadowing Python builtin name, specifically `copyright` +] + +[tool.ruff.lint.isort] +known-first-party = ["{{ cookiecutter.package_name }}"] +force-single-line = true +no-lines-before = ["future","standard-library","third-party","first-party","local-folder"] + +[tool.bumpversion] +current_version = "{{ cookiecutter.version }}" + +[[tool.bumpversion.files]] +filename = "src/{{ cookiecutter.package_name }}/__init__.py" + +[[tool.bumpversion.files]] +filename = "pyproject.toml" + +[[tool.bumpversion.files]] +filename = "CITATION.cff" + +[[tool.bumpversion.files]] +filename = "docs/conf.py" diff --git a/{{cookiecutter.project_name}}/sonar-project.properties b/{{cookiecutter.directory_name}}/sonar-project.properties similarity index 59% rename from {{cookiecutter.project_name}}/sonar-project.properties rename to {{cookiecutter.directory_name}}/sonar-project.properties index 5adf322d..512f6831 100644 --- a/{{cookiecutter.project_name}}/sonar-project.properties +++ b/{{cookiecutter.directory_name}}/sonar-project.properties @@ -1,12 +1,12 @@ sonar.organization={{ cookiecutter.github_organization }} -sonar.projectKey={{ cookiecutter.github_organization }}_{{ cookiecutter.project_name }} +sonar.projectKey={{ cookiecutter.github_organization }}_{{ cookiecutter.directory_name }} sonar.host.url=https://sonarcloud.io -sonar.sources={{ cookiecutter.package_name }}/ +sonar.sources=src/{{ cookiecutter.package_name }}/ sonar.tests=tests/ -sonar.links.homepage={{ cookiecutter.repository }} +sonar.links.homepage={{ cookiecutter.repository_url }} sonar.links.scm={{ cookiecutter.repository }} -sonar.links.issue={{ cookiecutter.repository }}/issues -sonar.links.ci={{ cookiecutter.repository }}/actions +sonar.links.issue={{ cookiecutter.repository_url }}/issues +sonar.links.ci={{ cookiecutter.repository_url }}/actions sonar.python.coverage.reportPaths=coverage.xml sonar.python.xunit.reportPath=xunit-result.xml sonar.python.pylint.reportPaths=pylint-report.txt diff --git a/{{cookiecutter.project_name}}/{{cookiecutter.package_name}}/__init__.py b/{{cookiecutter.directory_name}}/src/{{cookiecutter.package_name}}/__init__.py similarity index 61% rename from {{cookiecutter.project_name}}/{{cookiecutter.package_name}}/__init__.py rename to {{cookiecutter.directory_name}}/src/{{cookiecutter.package_name}}/__init__.py index 418484ea..4a72a364 100644 --- a/{{cookiecutter.project_name}}/{{cookiecutter.package_name}}/__init__.py +++ b/{{cookiecutter.directory_name}}/src/{{cookiecutter.package_name}}/__init__.py @@ -1,9 +1,8 @@ -"""Documentation about {{ cookiecutter.package_name }}""" +"""Documentation about {{ cookiecutter.package_name }}.""" import logging -from .__version__ import __version__ - logging.getLogger(__name__).addHandler(logging.NullHandler()) __author__ = "{{ cookiecutter.full_name }}" __email__ = "{{ cookiecutter.email }}" +__version__ = "{{ cookiecutter.version }}" diff --git a/{{cookiecutter.directory_name}}/src/{{cookiecutter.package_name}}/my_module.py b/{{cookiecutter.directory_name}}/src/{{cookiecutter.package_name}}/my_module.py new file mode 100644 index 00000000..1d4476d5 --- /dev/null +++ b/{{cookiecutter.directory_name}}/src/{{cookiecutter.package_name}}/my_module.py @@ -0,0 +1,29 @@ +"""Documentation about the {{ cookiecutter.package_name }} module.""" + + +# FIXME: put actual code here +def hello(name: str) -> str: + """Say hello. + + Function docstring using Google docstring style. + + Args: + name (str): Name to say hello to + + Returns: + str: Hello message + + Raises: + ValueError: If `name` is equal to `nobody` + + Example: + This function can be called with `Jane Smith` as argument using + + >>> from {{ cookiecutter.package_name }}.my_module import hello + >>> hello('Jane Smith') + 'Hello Jane Smith!' + + """ + if name == 'nobody': + raise ValueError('Can not say hello to nobody') + return f'Hello {name}!' diff --git a/{{cookiecutter.project_name}}/tests/__init__.py b/{{cookiecutter.directory_name}}/tests/__init__.py similarity index 100% rename from {{cookiecutter.project_name}}/tests/__init__.py rename to {{cookiecutter.directory_name}}/tests/__init__.py diff --git a/{{cookiecutter.directory_name}}/tests/test_my_module.py b/{{cookiecutter.directory_name}}/tests/test_my_module.py new file mode 100644 index 00000000..ad6481d6 --- /dev/null +++ b/{{cookiecutter.directory_name}}/tests/test_my_module.py @@ -0,0 +1,26 @@ +"""Tests for the {{ cookiecutter.package_name }}.my_module module.""" +import pytest +from {{ cookiecutter.package_name }}.my_module import hello + + +def test_hello(): + """Example using assert.""" + assert hello('nlesc') == 'Hello nlesc!' + + +def test_hello_with_error(): + """Example of testing for raised errors.""" + with pytest.raises(ValueError) as excinfo: + hello('nobody') + assert 'Can not say hello to nobody' in str(excinfo.value) + + +@pytest.fixture +def some_name(): + """Example fixture.""" + return 'Jane Smith' + + +def test_hello_with_fixture(some_name): + """Example using a fixture.""" + assert hello(some_name) == 'Hello Jane Smith!' diff --git a/{{cookiecutter.project_name}}/.bumpversion.cfg b/{{cookiecutter.project_name}}/.bumpversion.cfg deleted file mode 100644 index bb9563d4..00000000 --- a/{{cookiecutter.project_name}}/.bumpversion.cfg +++ /dev/null @@ -1,17 +0,0 @@ -[bumpversion] -current_version = {{ cookiecutter.version }} - -[comment] -comment = The contents of this file cannot be merged with that of setup.cfg until https://github.com/c4urself/bump2version/issues/185 is resolved - -[bumpversion:file:{{ cookiecutter.package_name }}/__version__.py] -search = __version__ = "{current_version}" -replace = __version__ = "{new_version}" - -[bumpversion:file:setup.cfg] -search = version = {current_version} -replace = version = {new_version} - -[bumpversion:file:CITATION.cff] -search = version: "{current_version}" -replace = version: "{new_version}" diff --git a/{{cookiecutter.project_name}}/.githooks/pre-commit b/{{cookiecutter.project_name}}/.githooks/pre-commit deleted file mode 100755 index 31ca5c16..00000000 --- a/{{cookiecutter.project_name}}/.githooks/pre-commit +++ /dev/null @@ -1,31 +0,0 @@ -#!/bin/sh - -echo "Script $0 triggered ..." - -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -echo "Starting prospector analysis using configuration from .prospector.yml..." - -# quietly run prospector -prospector 1>/dev/null - -# use return code to abort commit if necessary -if [ $? != "0" ]; then - echo "Commit aborted. Run 'prospector' to see the errors." - exit 1 -fi - -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # -echo "Starting isort analysis using configuration from setup.cfg..." - -# recursively run isort on {{ cookiecutter.package_name }}/ directory, don't try to automatically fix anything -isort --recursive --check-only {{ cookiecutter.package_name }} - -if [ $? != "0" ]; then - echo "Commit aborted." - echo " Run 'isort --recursive --check-only --diff {{ cookiecutter.package_name }}' to see what's wrong." - echo " Run 'isort --recursive {{ cookiecutter.package_name }}' to let isort fix problems automatically." - exit 1 -fi - -echo "Pre-commit checks completed successfully." -exit 0 diff --git a/{{cookiecutter.project_name}}/.github/next_steps/02_citation.md b/{{cookiecutter.project_name}}/.github/next_steps/02_citation.md deleted file mode 100644 index ea181384..00000000 --- a/{{cookiecutter.project_name}}/.github/next_steps/02_citation.md +++ /dev/null @@ -1,43 +0,0 @@ ---- -title: 'Next step: Citation data' ---- - -It is likely that your `CITATION.cff` currently doesn't pass validation. The error messages you get from the [`cffconvert`]({{cookiecutter.repository}}/actions/workflows/cffconvert.yml) GitHub Action are unfortunately a bit cryptic, but doing the following helps: - -- [ ] Check if the `given-name` and `family-name` keys need updating. If your family name has a name particle like `von` or `van` or `de`, use the `name-particle` key; if your name has a suffix like `Sr` or `IV`, use `name-suffix`. For details, refer to the schema description: https://github.com/citation-file-format/citation-file-format -- [ ] Update the value of the `orcid` key. If you do not have an orcid yet, you can get one here [https://orcid.org/](https://orcid.org/). -- [ ] Add more authors if needed -- [ ] Update `date-released` using the YYYY-MM-DD format. -- [ ] Update the `doi` key with the conceptDOI for your repository (see [https://help.zenodo.org](https://help.zenodo.org/) for more information on what a conceptDOI is). If your project doesn't have a DOI yet, you can use the string `10.0000/FIXME` to pass validation. -- [ ] Verify that the `keywords` array accurately describes your project. - -Once you do all the steps above, the `cffconvert` workflow will tell you what content it expected to see in `.zenodo.json`. Copy-paste from the GitHub Action log into a new file `.zenodo.json`. Afterwards, the `cffconvert` GitHub Action should be green. - - -To help you keep the citation metadata up to date and synchronized, the [`cffconvert`]({{cookiecutter.repository}}/actions/workflows/cffconvert.yml) GitHub Action checks the following 6 aspects: - -1. Whether your repository includes a `CITATION.cff` file. - - _By including this file, authors of the software can receive credit for the work they put in._ - -1. Whether your `CITATION.cff` is valid YAML. - - _Visit http://www.yamllint.com/ to see if the contents of your CITATION.cff are valid YAML._ - -1. Whether your `CITATION.cff` adheres to the schema (as listed in the `CITATION.cff` file itself under key `cff-version`). - - _The Citation File Format schema can be found [here](https://github.com/citation-file-format/citation-file-format), along with an explanation of all the keys. You're advised to use the latest available schema version._ - -1. Whether your repository includes a `.zenodo.json` file. - - _With this file, you can control what metadata should be associated with any future releases of your software on Zenodo: things like the author names, along with their affiliations and their ORCIDs, the license under which the software has been released, as well as the name of your software and a short description. If your repository doesn't have a .zenodo.json file, Zenodo will take a somewhat crude guess to assign these metadata._ - - _The `cffconvert` GitHub action will tell you what it expects to find in `.zenodo.json`, just copy and paste it to a new file named `.zenodo.json`. The suggested text ignores CITATION.cff's `version`, `commit`, and `date-released`. `cffconvert` considers these keys `suspect` in the sense that they are often out of date, and there is little purpose to telling Zenodo about these properties: Zenodo already knows._ - -1. Whether `.zenodo.json` is valid JSON. - - _Currently unimplemented, but you can check for yourself on [https://jsonlint.com/](https://jsonlint.com/)._ - -1. Whether `CITATION.cff` and `.zenodo.json` contain equivalent data. - - _This final check verifies that the two files are in sync. The check ignores CITATION.cff's `version`, `commit`, and `date-released`._ diff --git a/{{cookiecutter.project_name}}/.github/next_steps/05_linting.md b/{{cookiecutter.project_name}}/.github/next_steps/05_linting.md deleted file mode 100644 index 385fee66..00000000 --- a/{{cookiecutter.project_name}}/.github/next_steps/05_linting.md +++ /dev/null @@ -1,17 +0,0 @@ ---- -title: 'Next step: Linting' ---- - -Your repository has a [workflow]({{ cookiecutter.repository }}/blob/main/.github/workflows/lint.yml) which [lints](https://en.wikipedia.org/wiki/Lint_(software)) your code after every push and when creating a pull request. - -Linter workflow may fail if `description` or `keywords` field in [setup.cfg]({{ cookiecutter.repository }}/blob/main/setup.cfg) is empty. Please update these fields. To validate your changes run: - -```shell -prospector -``` - -Enabling [githook](https://git-scm.com/docs/githooks) will automatically lint your code in every commit. You can enable it by running the command below. - -```shell -git config --local core.hooksPath .githooks -``` diff --git a/{{cookiecutter.project_name}}/.github/workflows/build.yml b/{{cookiecutter.project_name}}/.github/workflows/build.yml deleted file mode 100644 index 3fdc9ebc..00000000 --- a/{{cookiecutter.project_name}}/.github/workflows/build.yml +++ /dev/null @@ -1,33 +0,0 @@ -name: build - -on: [push, pull_request] - -jobs: - - build: - name: Build for (${{ matrix.python-version }}, ${{ matrix.os }}) - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: ['ubuntu-latest', 'macos-latest', 'windows-latest'] - python-version: ['3.6', '3.7', '3.8', '3.9'] - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Python info - shell: bash -l {0} - run: | - which python3 - python3 --version - - name: Upgrade pip and install dependencies - run: | - python3 -m pip install --upgrade pip setuptools - python3 -m pip install .[publishing] - - name: Verify that we can build the package - shell: bash -l {0} - run: | - python3 setup.py sdist bdist_wheel diff --git a/{{cookiecutter.project_name}}/.github/workflows/cffconvert.yml b/{{cookiecutter.project_name}}/.github/workflows/cffconvert.yml deleted file mode 100644 index 0d2ac633..00000000 --- a/{{cookiecutter.project_name}}/.github/workflows/cffconvert.yml +++ /dev/null @@ -1,15 +0,0 @@ -name: cffconvert - -on: [push, pull_request] - -jobs: - - verify: - name: "cffconvert" - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - name: Check out a copy of the repository - - - uses: citation-file-format/cffconvert-github-action@main - name: Check whether the citation metadata from CITATION.cff is equivalent to that in .zenodo.json diff --git a/{{cookiecutter.project_name}}/.github/workflows/lint.yml b/{{cookiecutter.project_name}}/.github/workflows/lint.yml deleted file mode 100644 index ab502e86..00000000 --- a/{{cookiecutter.project_name}}/.github/workflows/lint.yml +++ /dev/null @@ -1,32 +0,0 @@ -name: lint - -on: [push, pull_request] - -jobs: - - lint: - name: Lint for (${{ matrix.python-version }}, ${{ matrix.os }}) - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: ['ubuntu-latest', 'macos-latest', 'windows-latest'] - python-version: ['3.6', '3.7', '3.8', '3.9'] - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Python info - shell: bash -l {0} - run: | - which python3 - python3 --version - - name: Install package and its dependencies - run: | - python3 -m pip install --upgrade pip setuptools - python3 -m pip install .[dev] - - name: Check style against standards using prospector - shell: bash -l {0} - run: prospector --zero-exit --output-format grouped --output-format pylint:pylint-report.txt diff --git a/{{cookiecutter.project_name}}/.prospector.yml b/{{cookiecutter.project_name}}/.prospector.yml deleted file mode 100644 index f7902836..00000000 --- a/{{cookiecutter.project_name}}/.prospector.yml +++ /dev/null @@ -1,29 +0,0 @@ -# prospector configuration file - ---- - -output-format: grouped - -strictness: medium -doc-warnings: false -test-warnings: true -member-warnings: false - -ignore-paths: - - docs - -pyroma: - run: true - -pep8: - full: true - -pep257: - disable: [ - # Disable because not part of PEP257 official convention: - # see http://pep257.readthedocs.io/en/latest/error_codes.html - D203, # 1 blank line required before class docstring - D212, # Multi-line docstring summary should start at the first line - D213, # Multi-line docstring summary should start at the second line - D404, # First word of the docstring should not be This - ] diff --git a/{{cookiecutter.project_name}}/.pylintrc b/{{cookiecutter.project_name}}/.pylintrc deleted file mode 100644 index 659dd6e6..00000000 --- a/{{cookiecutter.project_name}}/.pylintrc +++ /dev/null @@ -1,597 +0,0 @@ -[MASTER] - -# A comma-separated list of package or module names from where C extensions may -# be loaded. Extensions are loading into the active Python interpreter and may -# run arbitrary code. -extension-pkg-whitelist= - -# Specify a score threshold to be exceeded before program exits with error. -fail-under=10 - -# Add files or directories to the blacklist. They should be base names, not -# paths. -ignore=CVS - -# Add files or directories matching the regex patterns to the blacklist. The -# regex matches against base names, not paths. -ignore-patterns= - -# Python code to execute, usually for sys.path manipulation such as -# pygtk.require(). -#init-hook= - -# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the -# number of processors available to use. -jobs=1 - -# Control the amount of potential inferred values when inferring a single -# object. This can help the performance when dealing with large functions or -# complex, nested conditions. -limit-inference-results=100 - -# List of plugins (as comma separated values of python module names) to load, -# usually to register additional checkers. -load-plugins= - -# Pickle collected data for later comparisons. -persistent=yes - -# When enabled, pylint would attempt to guess common misconfiguration and emit -# user-friendly hints instead of false-positive error messages. -suggestion-mode=yes - -# Allow loading of arbitrary C extensions. Extensions are imported into the -# active Python interpreter and may run arbitrary code. -unsafe-load-any-extension=no - - -[MESSAGES CONTROL] - -# Only show warnings with the listed confidence levels. Leave empty to show -# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. -confidence= - -# Disable the message, report, category or checker with the given id(s). You -# can either give multiple identifiers separated by comma (,) or put this -# option multiple times (only on the command line, not in the configuration -# file where it should appear only once). You can also use "--disable=all" to -# disable everything first and then reenable specific checks. For example, if -# you want to run only the similarities checker, you can use "--disable=all -# --enable=similarities". If you want to run only the classes checker, but have -# no Warning level messages displayed, use "--disable=all --enable=classes -# --disable=W". -disable=apply-builtin, - backtick, - bad-inline-option, - bad-python3-import, - basestring-builtin, - buffer-builtin, - cmp-builtin, - cmp-method, - coerce-builtin, - coerce-method, - comprehension-escape, - delslice-method, - deprecated-itertools-function, - deprecated-operator-function, - deprecated-pragma, - deprecated-str-translate-call, - deprecated-string-function, - deprecated-sys-function, - deprecated-types-field, - deprecated-urllib-function, - dict-items-not-iterating, - dict-iter-method, - dict-keys-not-iterating, - dict-values-not-iterating, - dict-view-method, - div-method, - duplicate-code, - empty-docstring, - eq-without-hash, - exception-escape, - exception-message-attribute, - execfile-builtin, - file-builtin, - file-ignored, - filter-builtin-not-iterating, - getslice-method, - hex-method, - idiv-method, - import-star-module-level, - indexing-exception, - input-builtin, - intern-builtin, - invalid-str-codec, - locally-disabled, - long-builtin, - long-suffix, - map-builtin-not-iterating, - metaclass-assignment, - missing-module-docstring, - next-method-called, - next-method-defined, - no-absolute-import, - non-ascii-bytes-literal, - nonzero-method, - oct-method, - old-division, - old-ne-operator, - old-octal-literal, - old-raise-syntax, - parameter-unpacking, - print-statement, - raising-string, - range-builtin-not-iterating, - raw_input-builtin, - raw-checker-failed, - rdiv-method, - reduce-builtin, - reload-builtin, - round-builtin, - setslice-method, - standarderror-builtin, - suppressed-message, - sys-max-int, - unichr-builtin, - unicode-builtin, - unpacking-in-except, - unsubscriptable-object, - use-symbolic-message-instead, - useless-suppression, - using-cmp-argument, - xrange-builtin, - xreadlines-attribute, - zip-builtin-not-iterating - -# Enable the message, report, category or checker with the given id(s). You can -# either give multiple identifier separated by comma (,) or put this option -# multiple time (only on the command line, not in the configuration file where -# it should appear only once). See also the "--disable" option for examples. -enable=c-extension-no-member - - -[REPORTS] - -# Python expression which should return a score less than or equal to 10. You -# have access to the variables 'error', 'warning', 'refactor', and 'convention' -# which contain the number of messages in each category, as well as 'statement' -# which is the total number of statements analyzed. This score is used by the -# global evaluation report (RP0004). -evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) - -# Template used to display messages. This is a python new-style format string -# used to format the message information. See doc for all details. -#msg-template= - -# Set the output format. Available formats are text, parseable, colorized, json -# and msvs (visual studio). You can also give a reporter class, e.g. -# mypackage.mymodule.MyReporterClass. -output-format=text - -# Tells whether to display a full report or only the messages. -reports=no - -# Activate the evaluation score. -score=yes - - -[REFACTORING] - -# Maximum number of nested blocks for function / method body -max-nested-blocks=5 - -# Complete name of functions that never returns. When checking for -# inconsistent-return-statements if a never returning function is called then -# it will be considered as an explicit return statement and no message will be -# printed. -never-returning-functions=sys.exit - - -[BASIC] - -# Naming style matching correct argument names. -argument-naming-style=snake_case - -# Regular expression matching correct argument names. Overrides argument- -# naming-style. -#argument-rgx= - -# Naming style matching correct attribute names. -attr-naming-style=snake_case - -# Regular expression matching correct attribute names. Overrides attr-naming- -# style. -#attr-rgx= - -# Bad variable names which should always be refused, separated by a comma. -bad-names=foo, - bar, - baz, - toto, - tutu, - tata - -# Bad variable names regexes, separated by a comma. If names match any regex, -# they will always be refused -bad-names-rgxs= - -# Naming style matching correct class attribute names. -class-attribute-naming-style=any - -# Regular expression matching correct class attribute names. Overrides class- -# attribute-naming-style. -#class-attribute-rgx= - -# Naming style matching correct class names. -class-naming-style=PascalCase - -# Regular expression matching correct class names. Overrides class-naming- -# style. -#class-rgx= - -# Naming style matching correct constant names. -const-naming-style=UPPER_CASE - -# Regular expression matching correct constant names. Overrides const-naming- -# style. -#const-rgx= - -# Minimum line length for functions/classes that require docstrings, shorter -# ones are exempt. -docstring-min-length=-1 - -# Naming style matching correct function names. -function-naming-style=snake_case - -# Regular expression matching correct function names. Overrides function- -# naming-style. -#function-rgx= - -# Good variable names which should always be accepted, separated by a comma. -good-names=i, - ex, - Run, - _ - -# Good variable names regexes, separated by a comma. If names match any regex, -# they will always be accepted -good-names-rgxs= - -# Include a hint for the correct naming format with invalid-name. -include-naming-hint=no - -# Naming style matching correct inline iteration names. -inlinevar-naming-style=any - -# Regular expression matching correct inline iteration names. Overrides -# inlinevar-naming-style. -#inlinevar-rgx= - -# Naming style matching correct method names. -method-naming-style=snake_case - -# Regular expression matching correct method names. Overrides method-naming- -# style. -#method-rgx= - -# Naming style matching correct module names. -module-naming-style=snake_case - -# Regular expression matching correct module names. Overrides module-naming- -# style. -#module-rgx= - -# Colon-delimited sets of names that determine each other's naming style when -# the name regexes allow several styles. -name-group= - -# Regular expression which should only match function or class names that do -# not require a docstring. -no-docstring-rgx=^_ - -# List of decorators that produce properties, such as abc.abstractproperty. Add -# to this list to register other decorators that produce valid properties. -# These decorators are taken in consideration only for invalid-name. -property-classes=abc.abstractproperty - -# Naming style matching correct variable names. -variable-naming-style=snake_case - -# Regular expression matching correct variable names. Overrides variable- -# naming-style. -#variable-rgx= - - -[SPELLING] - -# Limits count of emitted suggestions for spelling mistakes. -max-spelling-suggestions=4 - -# Spelling dictionary name. Available dictionaries: none. To make it work, -# install the python-enchant package. -spelling-dict= - -# List of comma separated words that should not be checked. -spelling-ignore-words= - -# A path to a file that contains the private dictionary; one word per line. -spelling-private-dict-file= - -# Tells whether to store unknown words to the private dictionary (see the -# --spelling-private-dict-file option) instead of raising a message. -spelling-store-unknown-words=no - - -[STRING] - -# This flag controls whether inconsistent-quotes generates a warning when the -# character used as a quote delimiter is used inconsistently within a module. -check-quote-consistency=yes - -# This flag controls whether the implicit-str-concat should generate a warning -# on implicit string concatenation in sequences defined over several lines. -check-str-concat-over-line-jumps=yes - - -[VARIABLES] - -# List of additional names supposed to be defined in builtins. Remember that -# you should avoid defining new builtins when possible. -additional-builtins= - -# Tells whether unused global variables should be treated as a violation. -allow-global-unused-variables=yes - -# List of strings which can identify a callback function by name. A callback -# name must start or end with one of those strings. -callbacks=cb_, - _cb - -# A regular expression matching the name of dummy variables (i.e. expected to -# not be used). -dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ - -# Argument names that match this expression will be ignored. Default to name -# with leading underscore. -ignored-argument-names=_.*|^ignored_|^unused_ - -# Tells whether we should check for unused import in __init__ files. -init-import=yes - -# List of qualified module names which can have objects that can redefine -# builtins. -redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io - - -[SIMILARITIES] - -# Ignore comments when computing similarities. -ignore-comments=yes - -# Ignore docstrings when computing similarities. -ignore-docstrings=yes - -# Ignore imports when computing similarities. -ignore-imports=no - -# Minimum lines number of a similarity. -min-similarity-lines=4 - - -[LOGGING] - -# The type of string formatting that logging methods do. `old` means using % -# formatting, `new` is for `{}` formatting. -logging-format-style=old - -# Logging modules to check that the string format arguments are in logging -# function parameter format. -logging-modules=logging - - -[MISCELLANEOUS] - -# List of note tags to take in consideration, separated by a comma. -notes=FIXME, - XXX, - TODO - -# Regular expression of note tags to take in consideration. -#notes-rgx= - - -[TYPECHECK] - -# List of decorators that produce context managers, such as -# contextlib.contextmanager. Add to this list to register other decorators that -# produce valid context managers. -contextmanager-decorators=contextlib.contextmanager - -# List of members which are set dynamically and missed by pylint inference -# system, and so shouldn't trigger E1101 when accessed. Python regular -# expressions are accepted. -generated-members= - -# Tells whether missing members accessed in mixin class should be ignored. A -# mixin class is detected if its name ends with "mixin" (case insensitive). -ignore-mixin-members=yes - -# Tells whether to warn about missing members when the owner of the attribute -# is inferred to be None. -ignore-none=yes - -# This flag controls whether pylint should warn about no-member and similar -# checks whenever an opaque object is returned when inferring. The inference -# can return multiple potential results while evaluating a Python object, but -# some branches might not be evaluated, which results in partial inference. In -# that case, it might be useful to still emit no-member and other checks for -# the rest of the inferred objects. -ignore-on-opaque-inference=yes - -# List of class names for which member attributes should not be checked (useful -# for classes with dynamically set attributes). This supports the use of -# qualified names. -ignored-classes=optparse.Values,thread._local,_thread._local - -# List of module names for which member attributes should not be checked -# (useful for modules/projects where namespaces are manipulated during runtime -# and thus existing member attributes cannot be deduced by static analysis). It -# supports qualified module names, as well as Unix pattern matching. -ignored-modules= - -# Show a hint with possible names when a member name was not found. The aspect -# of finding the hint is based on edit distance. -missing-member-hint=yes - -# The minimum edit distance a name should have in order to be considered a -# similar match for a missing member name. -missing-member-hint-distance=1 - -# The total number of similar names that should be taken in consideration when -# showing a hint for a missing member. -missing-member-max-choices=1 - -# List of decorators that change the signature of a decorated function. -signature-mutators= - - -[FORMAT] - -# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. -expected-line-ending-format= - -# Regexp for a line that is allowed to be longer than the limit. -ignore-long-lines=^\s*(# )??$ - -# Number of spaces of indent required inside a hanging or continued line. -indent-after-paren=4 - -# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 -# tab). -indent-string=' ' - -# Maximum number of characters on a single line. -max-line-length=120 - -# Maximum number of lines in a module. -max-module-lines=1000 - -# List of optional constructs for which whitespace checking is disabled. `dict- -# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. -# `trailing-comma` allows a space between comma and closing bracket: (a, ). -# `empty-line` allows space-only lines. -no-space-check=trailing-comma, - dict-separator - -# Allow the body of a class to be on the same line as the declaration if body -# contains single statement. -single-line-class-stmt=no - -# Allow the body of an if to be on the same line as the test if there is no -# else. -single-line-if-stmt=no - - -[IMPORTS] - -# List of modules that can be imported at any level, not just the top level -# one. -allow-any-import-level= - -# Allow wildcard imports from modules that define __all__. -allow-wildcard-with-all=no - -# Analyse import fallback blocks. This can be used to support both Python 2 and -# 3 compatible code, which means that the block might have code that exists -# only in one or another interpreter, leading to false positives when analysed. -analyse-fallback-blocks=no - -# Deprecated modules which should not be used, separated by a comma. -deprecated-modules=optparse,tkinter.tix - -# Create a graph of external dependencies in the given file (report RP0402 must -# not be disabled). -ext-import-graph= - -# Create a graph of every (i.e. internal and external) dependencies in the -# given file (report RP0402 must not be disabled). -import-graph= - -# Create a graph of internal dependencies in the given file (report RP0402 must -# not be disabled). -int-import-graph= - -# Force import order to recognize a module as part of the standard -# compatibility libraries. -known-standard-library= - -# Force import order to recognize a module as part of a third party library. -known-third-party=enchant - -# Couples of modules and preferred modules, separated by a comma. -preferred-modules= - - -[DESIGN] - -# Maximum number of arguments for function / method. -max-args=5 - -# Maximum number of attributes for a class (see R0902). -max-attributes=7 - -# Maximum number of boolean expressions in an if statement (see R0916). -max-bool-expr=5 - -# Maximum number of branch for function / method body. -max-branches=12 - -# Maximum number of locals for function / method body. -max-locals=15 - -# Maximum number of parents for a class (see R0901). -max-parents=7 - -# Maximum number of public methods for a class (see R0904). -max-public-methods=20 - -# Maximum number of return / yield for function / method body. -max-returns=6 - -# Maximum number of statements in function / method body. -max-statements=50 - -# Minimum number of public methods for a class (see R0903). -min-public-methods=2 - - -[CLASSES] - -# List of method names used to declare (i.e. assign) instance attributes. -defining-attr-methods=__init__, - __new__, - setUp, - __post_init__ - -# List of member names, which should be excluded from the protected access -# warning. -exclude-protected=_asdict, - _fields, - _replace, - _source, - _make - -# List of valid names for the first argument in a class method. -valid-classmethod-first-arg=cls - -# List of valid names for the first argument in a metaclass class method. -valid-metaclass-classmethod-first-arg=cls - - -[EXCEPTIONS] - -# Exceptions that will emit a warning when being caught. Defaults to -# "BaseException, Exception". -overgeneral-exceptions=BaseException, - Exception diff --git a/{{cookiecutter.project_name}}/CONTRIBUTING.md b/{{cookiecutter.project_name}}/CONTRIBUTING.md deleted file mode 100644 index 2cf3cdd1..00000000 --- a/{{cookiecutter.project_name}}/CONTRIBUTING.md +++ /dev/null @@ -1,41 +0,0 @@ -# Contributing guidelines - -We welcome any kind of contribution to our software, from simple comment or question to a full fledged [pull request](https://help.github.com/articles/about-pull-requests/). Please read and follow our [Code of Conduct](CODE_OF_CONDUCT.md). - -A contribution can be one of the following cases: - -1. you have a question; -1. you think you may have found a bug (including unexpected behavior); -1. you want to make some kind of change to the code base (e.g. to fix a bug, to add a new feature, to update documentation); -1. you want to make a new release of the code base. - -The sections below outline the steps in each case. - -## You have a question - -1. use the search functionality [here]({{cookiecutter.repository}}/issues) to see if someone already filed the same issue; -2. if your issue search did not yield any relevant results, make a new issue; -3. apply the "Question" label; apply other labels when relevant. - -## You think you may have found a bug - -1. use the search functionality [here]({{cookiecutter.repository}}/issues) to see if someone already filed the same issue; -1. if your issue search did not yield any relevant results, make a new issue, making sure to provide enough information to the rest of the community to understand the cause and context of the problem. Depending on the issue, you may want to include: - - the [SHA hashcode](https://help.github.com/articles/autolinked-references-and-urls/#commit-shas) of the commit that is causing your problem; - - some identifying information (name and version number) for dependencies you're using; - - information about the operating system; -1. apply relevant labels to the newly created issue. - -## You want to make some kind of change to the code base - -1. (**important**) announce your plan to the rest of the community *before you start working*. This announcement should be in the form of a (new) issue; -1. (**important**) wait until some kind of consensus is reached about your idea being a good idea; -1. if needed, fork the repository to your own Github profile and create your own feature branch off of the latest master commit. While working on your feature branch, make sure to stay up to date with the master branch by pulling in changes, possibly from the 'upstream' repository (follow the instructions [here](https://help.github.com/articles/configuring-a-remote-for-a-fork/) and [here](https://help.github.com/articles/syncing-a-fork/)); -1. make sure the existing tests still work by running ``pytest``; -1. add your own tests (if necessary); -1. update or expand the documentation; -1. update the `CHANGELOG.md` file with change; -1. push your feature branch to (your fork of) the {{ cookiecutter.package_name }} repository on GitHub; -1. create the pull request, e.g. following the instructions [here](https://help.github.com/articles/creating-a-pull-request/). - -In case you feel like you've made a valuable contribution, but you don't know how to write or run tests for it, or how to generate the documentation: don't let this discourage you from making the pull request; we can help you! Just go ahead and submit the pull request, but keep in mind that you might be asked to append additional commits to your pull request. diff --git a/{{cookiecutter.project_name}}/README.dev.md b/{{cookiecutter.project_name}}/README.dev.md deleted file mode 100644 index 61cf613d..00000000 --- a/{{cookiecutter.project_name}}/README.dev.md +++ /dev/null @@ -1,162 +0,0 @@ -# `{{ cookiecutter.package_name }}` developer documentation - -If you're looking for user documentation, go [here](README.md). - -## Development install - -```shell -# Create a virtual environment, e.g. with -python3 -m venv env - -# activate virtual environment -source env/bin/activate - -# make sure to have a recent version of pip and setuptools -python3 -m pip install --upgrade pip setuptools - -# (from the project root directory) -# install {{ cookiecutter.package_name }} as an editable package -python3 -m pip install --no-cache-dir --editable . -# install development dependencies -python3 -m pip install --no-cache-dir --editable .[dev] -``` - -Afterwards check that the install directory is present in the `PATH` environment variable. - -## Running the tests - -Running the tests requires an activated virtual environment with the development tools installed. - -```shell -pytest -v -``` - -## Running linters locally - -For linting we will use [prospector](https://pypi.org/project/prospector/) and to sort imports we will use -[isort](https://pycqa.github.io/isort/). Running the linters requires an activated virtual environment with the -development tools installed. - -```shell -# linter -prospector - -# recursively check import style for the {{ cookiecutter.package_name }} module only -isort --recursive --check-only {{ cookiecutter.package_name }} - -# recursively check import style for the {{ cookiecutter.package_name }} module only and show -# any proposed changes as a diff -isort --recursive --check-only --diff {{ cookiecutter.package_name }} - -# recursively fix import style for the {{ cookiecutter.package_name }} module only -isort --recursive {{ cookiecutter.package_name }} -``` - -You can enable automatic linting with `prospector` and `isort` on commit by enabling the git hook from `.githooks/pre-commit`, like so: - -```shell -git config --local core.hooksPath .githooks -``` - -## Generating the API docs - -```shell -cd docs -make html -``` - -The documentation will be in `docs/_build/` - -## Versioning - -Bumping the version across all files is done with bumpversion, e.g. - -```shell -bumpversion major -bumpversion minor -bumpversion patch -``` - -## Making a release - -This section describes how to make a release in 3 parts: - -1. preparation -1. making a release on PyPI -1. making a release on GitHub - -### (1/3) Preparation - -1. Update the `CHANGELOG.md` -2. Verify that the information in `CITATION.cff` is correct, and that `.zenodo.json` contains equivalent data -3. Make sure the version has been updated. -4. Run the unit tests with `pytest tests/` - -### (2/3) PyPI - -In a new terminal, without an activated virtual environment or an env directory: - -```shell -# prepare a new directory -cd $(mktemp -d --tmpdir {{ cookiecutter.package_name }}.XXXXXX) - -# fresh git clone ensures the release has the state of origin/main branch -git clone {{ cookiecutter.repository }} . - -# prepare a clean virtual environment and activate it -python3 -m venv env -source env/bin/activate - -# make sure to have a recent version of pip and setuptools -python3 -m pip install --upgrade pip setuptools - -# install runtime dependencies and publishing dependencies -python3 -m pip install --no-cache-dir . -python3 -m pip install --no-cache-dir .[publishing] - -# clean up any previously generated artefacts -rm -rf {{ cookiecutter.package_name }}.egg-info -rm -rf dist - -# create the source distribution and the wheel -python3 setup.py sdist bdist_wheel - -# upload to test pypi instance (requires credentials) -twine upload --repository-url https://test.pypi.org/legacy/ dist/* -``` - -Visit -[https://test.pypi.org/project/{{cookiecutter.package_name}}](https://test.pypi.org/project/{{cookiecutter.package_name}}) -and verify that your package was uploaded successfully. Keep the terminal open, we'll need it later. - -In a new terminal, without an activated virtual environment or an env directory: - -```shell -cd $(mktemp -d --tmpdir {{ cookiecutter.package_name }}-test.XXXXXX) - -# prepare a clean virtual environment and activate it -python3 -m venv env -source env/bin/activate - -# make sure to have a recent version of pip and setuptools -pip install --upgrade pip setuptools - -# install from test pypi instance: -python3 -m pip -v install --no-cache-dir \ ---index-url https://test.pypi.org/simple/ \ ---extra-index-url https://pypi.org/simple {{ cookiecutter.package_name }} -``` - -Check that the package works as it should when installed from pypitest. - -Then upload to pypi.org with: - -```shell -# Back to the first terminal, -# FINAL STEP: upload to PyPI (requires credentials) -twine upload dist/* -``` - -### (3/3) GitHub - -Don't forget to also make a release on GitHub. If your repository uses the GitHub-Zenodo integration this will also trigger Zenodo into making a snapshot of your repository and sticking a DOI on it. diff --git a/{{cookiecutter.project_name}}/docs/_static/theme_overrides.css b/{{cookiecutter.project_name}}/docs/_static/theme_overrides.css deleted file mode 100644 index 8194805c..00000000 --- a/{{cookiecutter.project_name}}/docs/_static/theme_overrides.css +++ /dev/null @@ -1,12 +0,0 @@ -/* override table width restrictions */ -@media screen and (min-width: 767px) { - .wy-table-responsive table td { - /* !important prevents the common CSS stylesheets from overriding - this as on RTD they are loaded after this stylesheet */ - white-space: normal !important; - } - - .wy-table-responsive { - overflow: visible !important; - } -} diff --git a/{{cookiecutter.project_name}}/docs/conf.py b/{{cookiecutter.project_name}}/docs/conf.py deleted file mode 100644 index d18ab35e..00000000 --- a/{{cookiecutter.project_name}}/docs/conf.py +++ /dev/null @@ -1,202 +0,0 @@ -# {{ cookiecutter.package_name }} documentation build configuration file, created by -# sphinx-quickstart on {% now "local", "%a %b %d %H:%M:%S %Y" %}. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -import os -import sys - -here = os.path.dirname(__file__) -sys.path.insert(0, os.path.abspath(os.path.join(here, ".."))) - -import {{ cookiecutter.package_name }} - - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = "1.0" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named "sphinx.ext.*") or your custom -# ones. -extensions = ["sphinx.ext.autodoc", "sphinx.ext.napoleon"] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = [".rst", ".md"] -source_suffix = ".rst" - -# The main toctree document. -main_doc = "index" - -# General information about the project. -project = u"{{ cookiecutter.package_name }}" -copyright = u"{% now "local", "%Y" %}, {{ cookiecutter.copyright_holder }}" -author = u"{{ cookiecutter.full_name.replace('\"', '\\\"') }}" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = {{ cookiecutter.package_name }}.__version__ -# The full version, including alpha/beta/rc tags. -release = {{ cookiecutter.package_name }}.__version__ - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = False - -# -- Run apidoc plug-in manually, as readthedocs doesn't support it ------- -# See https://github.com/rtfd/readthedocs.org/issues/1139 -def run_apidoc(_): - here = os.path.dirname(__file__) - out = os.path.abspath(os.path.join(here, "apidocs")) - src = os.path.abspath(os.path.join(here, "..", "{{ cookiecutter.package_name }}")) - - ignore_paths = [] - - argv = [ - "-f", - "-T", - "-e", - "-M", - "-o", out, - src - ] + ignore_paths - - try: - # Sphinx 1.7+ - from sphinx.ext import apidoc - apidoc.main(argv) - except ImportError: - # Sphinx 1.6 (and earlier) - from sphinx import apidoc - argv.insert(0, apidoc.__file__) - apidoc.main(argv) - - -def setup(app): - app.connect("builder-inited", run_apidoc) - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = "sphinx_rtd_theme" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -# html_theme_options = {} - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] -html_context = { - "css_files": [ - "_static/theme_overrides.css" # override wide tables in RTD theme - ] -} - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# This is required for the alabaster theme -# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars -html_sidebars = { - "**": [ - "relations.html", # needs "show_related": True theme option to display - "searchbox.html" - ] -} - - -# -- Options for HTMLHelp output ------------------------------------------ - -# Output file base name for HTML help builder. -htmlhelp_basename = "{{ cookiecutter.package_name }}_doc" - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ("letterpaper" or "a4paper"). - # - # "papersize": "letterpaper", - - # The font size ("10pt", "11pt" or "12pt"). - # - # "pointsize": "10pt", - - # Additional stuff for the LaTeX preamble. - # - # "preamble": "", - - # Latex figure (float) alignment - # - # "figure_align": "htbp", -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (main_doc, "{{ cookiecutter.package_name }}.tex", u"{{ cookiecutter.package_name }} Documentation", - u"{{ cookiecutter.full_name }}", "manual") -] - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (main_doc, "{{ cookiecutter.package_name }}", u"{{ cookiecutter.package_name }} Documentation", [author], 1) -] - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (main_doc, "{{ cookiecutter.package_name }}", u"{{ cookiecutter.package_name }} Documentation", - author, "{{ cookiecutter.package_name }}", "{{ cookiecutter.package_short_description }}", - "Miscellaneous") -] diff --git a/{{cookiecutter.project_name}}/docs/index.rst b/{{cookiecutter.project_name}}/docs/index.rst deleted file mode 100644 index d8ef571c..00000000 --- a/{{cookiecutter.project_name}}/docs/index.rst +++ /dev/null @@ -1,26 +0,0 @@ -.. {{ cookiecutter.package_name }} documentation main file, created by - sphinx-quickstart on Thu Jun 21 11:07:11 2018. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to {{ cookiecutter.package_name }}'s documentation! -========================================================== - -.. toctree:: - :maxdepth: 2 - :caption: Contents: - -API Reference -============= - -.. toctree:: - :maxdepth: 2 - - {{ cookiecutter.package_name }} - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/{{cookiecutter.project_name}}/pyproject.toml b/{{cookiecutter.project_name}}/pyproject.toml deleted file mode 100644 index 9787c3bd..00000000 --- a/{{cookiecutter.project_name}}/pyproject.toml +++ /dev/null @@ -1,3 +0,0 @@ -[build-system] -requires = ["setuptools", "wheel"] -build-backend = "setuptools.build_meta" diff --git a/{{cookiecutter.project_name}}/setup.cfg b/{{cookiecutter.project_name}}/setup.cfg deleted file mode 100644 index d957df47..00000000 --- a/{{cookiecutter.project_name}}/setup.cfg +++ /dev/null @@ -1,85 +0,0 @@ -# see documentation, e.g. -# - https://packaging.python.org/tutorials/packaging-projects/#configuring-metadata -# - https://setuptools.readthedocs.io/en/latest/userguide/declarative_config.html -# - https://www.python.org/dev/peps/pep-0314/ - -[metadata] -author = {{ cookiecutter.full_name }} -author_email = {{ cookiecutter.email }} -classifiers = - Development Status :: 2 - Pre-Alpha - Intended Audience :: Developers - {{ {"Apache Software License 2.0": "License :: OSI Approved :: Apache Software License", - "MIT license": "License :: OSI Approved :: MIT License", - "BSD license": "License :: OSI Approved :: BSD License", - "ISC license": "License :: OSI Approved :: ISC License (ISCL)", - "GNU General Public License v3 or later": "License :: OSI Approved :: GNU General Public License", - "Not open source": "License :: Other/Proprietary License" - }[cookiecutter.license] }} - Natural Language :: English - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.6 - Programming Language :: Python :: 3.7 - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 -description = {{ cookiecutter.package_short_description }} -keywords = - {{ cookiecutter.keyword1 }} - {{ cookiecutter.keyword2 }} -long_description = file: README.md -long_description_content_type = text/markdown -name = {{ cookiecutter.package_name }} -project_urls = - Bug Tracker = {{ cookiecutter.repository }}/issues -url = {{ cookiecutter.repository }} -version = {{ cookiecutter.version }} - -[options] -zip_safe = False -include_package_data = True -packages = find: -install_requires = - -[options.data_files] -# This section requires setuptools>=40.6.0 -# It remains empty for now -# Check if MANIFEST.in works for your purposes - -[options.extras_require] -dev = - bump2version - prospector[with_pyroma] - yapf - isort - pytest - pytest-cov - pycodestyle - docutils - pytest-runner - sphinx - sphinx_rtd_theme - recommonmark -publishing = - twine - wheel - -[options.packages.find] -include = {{ cookiecutter.package_name }}, {{ cookiecutter.package_name }}.* - -[coverage:run] -branch = True -source = {{ cookiecutter.package_name }} - -[tool:isort] -lines_after_imports = 2 -force_single_line = 1 -no_lines_before = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER -known_first_party = {{ cookiecutter.package_name }} -src_paths = {{ cookiecutter.package_name }},tests -line_length = 120 - -[tool:pytest] -testpaths = tests -# Note that visual debugger in some editors like pycharm gets confused by coverage calculation. -# As a workaround, configure the test configuration in pycharm et al with a --no-cov argument -addopts = --cov --cov-report xml --cov-report term --cov-report html diff --git a/{{cookiecutter.project_name}}/setup.py b/{{cookiecutter.project_name}}/setup.py deleted file mode 100644 index 64c3869b..00000000 --- a/{{cookiecutter.project_name}}/setup.py +++ /dev/null @@ -1,7 +0,0 @@ -#!/usr/bin/env python -import os -from setuptools import setup - - -# see setup.cfg -setup() diff --git a/{{cookiecutter.project_name}}/tests/test_my_module.py b/{{cookiecutter.project_name}}/tests/test_my_module.py deleted file mode 100644 index 76922037..00000000 --- a/{{cookiecutter.project_name}}/tests/test_my_module.py +++ /dev/null @@ -1,30 +0,0 @@ -#!/usr/bin/env python - -"""Tests for the {{ cookiecutter.package_name }}.my_module module. -""" -import pytest -from {{ cookiecutter.package_name }} import my_module - - -def test_hello(): - assert my_module.hello('nlesc') == 'Hello nlesc!' - - -def test_something(): - assert True - - -def test_with_error(): - with pytest.raises(ValueError): - # Do something that raises a ValueError - raise(ValueError) - - -# Fixture example -@pytest.fixture -def an_object(): - return {} - - -def test_my_module(an_object): - assert an_object == {} diff --git a/{{cookiecutter.project_name}}/{{cookiecutter.package_name}}/__version__.py b/{{cookiecutter.project_name}}/{{cookiecutter.package_name}}/__version__.py deleted file mode 100644 index a955058c..00000000 --- a/{{cookiecutter.project_name}}/{{cookiecutter.package_name}}/__version__.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = "{{ cookiecutter.version }}" diff --git a/{{cookiecutter.project_name}}/{{cookiecutter.package_name}}/my_module.py b/{{cookiecutter.project_name}}/{{cookiecutter.package_name}}/my_module.py deleted file mode 100644 index 4ad555d4..00000000 --- a/{{cookiecutter.project_name}}/{{cookiecutter.package_name}}/my_module.py +++ /dev/null @@ -1,6 +0,0 @@ -"""Documentation about the {{ cookiecutter.package_name }} module.""" - - -# FIXME: put actual code here -def hello(name): - return f'Hello {name}!'