diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml new file mode 100644 index 00000000..bde39a55 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -0,0 +1,68 @@ +name: 🐛 Bug report +description: Create a bug report +body: + - type: checkboxes + attributes: + label: Are you certain it's a bug? + description: If you're uncertain, please report at https://github.com/serverless/serverless-python-requirements/discussions instead + options: + - label: Yes, it looks like a bug + required: true + - type: checkboxes + attributes: + label: Are you using the latest plugin release? + description: Latest version can be checked at https://github.com/serverless/serverless-python-requirements/releases/latest + options: + - label: Yes, I'm using the latest plugin release + required: true + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists + options: + - label: I have searched existing issues, it hasn't been reported yet + required: true + - type: textarea + attributes: + label: Issue description + validations: + required: true + - type: textarea + attributes: + label: Service configuration (serverless.yml) content + description: | + Provide COMPLETE content of serverless.yml, ensuring that: + • It consistently reproduces described issue + • It's as minimal as possible + • Ideally with no other plugins involved + • Has sensitive parts masked out + + If not applicable, fill with "N/A" + render: yaml + validations: + required: true + - type: input + attributes: + label: Command name and used flags + description: | + Full command name with used flags (If not applicable, fill with "N/A") + placeholder: serverless [...flags] + validations: + required: true + - type: textarea + attributes: + label: Command output + description: | + COMPLETE command output. + + If not applicable, fill with "N/A" + render: shell + validations: + required: true + - type: textarea + attributes: + label: Environment information + description: '"serverless --version" output + used version of the plugin' + render: shell + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..a7f83c6b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: false +contact_links: + - name: Question + url: https://github.com/serverless/serverless-python-requirements/discussions + about: Please ask and answer questions here diff --git a/.github/ISSUE_TEMPLATE/feature-request.yml b/.github/ISSUE_TEMPLATE/feature-request.yml new file mode 100644 index 00000000..14907ec2 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature-request.yml @@ -0,0 +1,21 @@ +name: 🎉 Feature request +description: Suggest an idea +body: + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists + options: + - label: I have searched existing issues, it hasn't been reported yet + required: true + - type: textarea + attributes: + label: Use case description + description: Describe the use case that needs to be addressed + validations: + required: true + - type: textarea + attributes: + label: Proposed solution (optional) + description: | + e.g. propose how the configuration and implementation of the new feature could look diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml new file mode 100644 index 00000000..01fb27a3 --- /dev/null +++ b/.github/workflows/integrate.yml @@ -0,0 +1,34 @@ +# master only + +name: Integrate + +on: + push: + branches: [master] + +env: + FORCE_COLOR: 1 + +jobs: + tagIfNewVersion: + name: Tag if new version + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # Ensure to have complete history of commits pushed with given push operation + # It's loose and imperfect assumption that no more than 30 commits will be pushed at once + fetch-depth: 30 + # Tag needs to be pushed with real user token, otherwise pushed tag won't trigger the actions workflow + # Hence we're passing 'serverless-ci' user authentication token + token: ${{ secrets.USER_GITHUB_TOKEN }} + + - name: Tag if new version + run: | + NEW_VERSION=`git diff -U0 ${{ github.event.before }} package.json | grep '"version": "' | tail -n 1 | grep -oE "[0-9]+\.[0-9]+\.[0-9]+"` || : + if [ -n "$NEW_VERSION" ]; + then + git tag v$NEW_VERSION + git push --tags + fi diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 00000000..0e3dc867 --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,52 @@ +# Version tags only + +name: Publish + +on: + push: + tags: + - v[0-9]+.[0-9]+.[0-9]+ + +jobs: + publish: + name: Publish + runs-on: ubuntu-latest + env: + # It'll work with secrets.GITHUB_TOKEN (which is provided by GitHub unconditionally) + # Still then release author would be "github-actions". It's better if it's dedicated repo bot + GITHUB_TOKEN: ${{ secrets.USER_GITHUB_TOKEN }} + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Retrieve node_modules from cache + id: cacheNodeModules + uses: actions/cache@v4 + with: + path: | + ~/.npm + node_modules + key: npm-v18-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} + + - name: Install Node.js and npm + uses: actions/setup-node@v4 + with: + node-version: 18.x + registry-url: https://registry.npmjs.org + + - name: Publish new version + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + run: npm publish + + # Note: No need to install dependencies as: + # 1. We have retrieved cached `node_modules` for very same `package.json` + # as stored with recent `master `build + # 2. If for some reason cache retrieval fails `npx` will download and install + # `github-release-from-cc-changelog` + + - name: Publish release notes + run: | + TEMP_ARRAY=($(echo $GITHUB_REF | tr "/" "\n")) + TAG=${TEMP_ARRAY[@]: -1} + npx github-release-from-cc-changelog $TAG diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml new file mode 100644 index 00000000..23e2d67f --- /dev/null +++ b/.github/workflows/validate.yml @@ -0,0 +1,92 @@ +# PR's only + +name: Validate + +on: + pull_request: + branches: [master] + +env: + FORCE_COLOR: 1 + +jobs: + linuxNode18: + name: '[Linux] Node.js v18: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + sls-version: [2, 3] + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # For commitlint purpose ensure to have complete list of PR commits + # It's loose and imperfect assumption that PR has no more than 30 commits + fetch-depth: 30 + + - name: Retrieve last master commit (for `git diff` purposes) + run: | + git checkout -b pr + git fetch --prune --depth=30 origin +refs/heads/master:refs/remotes/origin/master + git checkout master + git checkout pr + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v4 + with: + path: | + ~/.npm + node_modules + key: npm-v18-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: | + npm-v18-${{ runner.os }}-${{ github.ref }}- + npm-v18-${{ runner.os }}-refs/heads/master- + + - name: Set up Python 3.9 + uses: actions/setup-python@v5 + with: + python-version: 3.9 + + - name: Install Node.js and npm + uses: actions/setup-node@v4 + with: + node-version: 18.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry && poetry self add poetry-plugin-export + + - name: Install serverless + run: npm install -g serverless@${{ matrix.sls-version }} + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Validate Prettier formatting + run: npm run prettier-check:updated + - name: Validate ESLint rules + run: npm run lint:updated + - name: Validate commit messages + if: github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id + run: npx commitlint -f master + - name: Validate changelog (if new version) + run: | + NEW_VERSION=`git diff -U0 master package.json | grep '"version": "' | tail -n 1 | grep -oE "[0-9]+\.[0-9]+\.[0-9]+"` || : + if [ -n "$NEW_VERSION" ]; + then + npx dump-release-notes-from-cc-changelog $NEW_VERSION + fi + - name: Unit tests + run: npm test + env: + SERVERLESS_PLATFORM_STAGE: dev + SERVERLESS_LICENSE_KEY: ${{ secrets.SERVERLESS_LICENSE_KEY }} diff --git a/.gitignore b/.gitignore index 53d9d5b0..64bdbd6a 100644 --- a/.gitignore +++ b/.gitignore @@ -42,3 +42,40 @@ admin.env #PYTHON STUFF *.py[co] __pycache__ + +#NODE STUFF +package-lock.json +yarn.lock + +# Lockfiles +*.lock + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib64/ +parts/ +sdist/ +var/ +*.egg-info/ +.installed.cfg +*.egg + +# Serverless directories +.serverless +.requirements +.requirements.zip +unzip_requirements.py + +# Project ignores +puck/ +serverless.yml.bak + +# Generated packaging +*.tgz diff --git a/.python-version b/.python-version new file mode 100644 index 00000000..bd28b9c5 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.9 diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..42026cdc --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,103 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +### [6.1.2](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.1.1...v6.1.2) (2025-02-11) + +### Bug Fixes + +- Use absolute paths to ensure compatibility with v4 Compose ([#854](https://github.com/UnitedIncome/serverless-python-requirements/issues/854)) ([bceb737](https://github.com/UnitedIncome/serverless-python-requirements/commit/bceb7371dd64d59829377fe6fd16e17f631d0251)) + +### [6.1.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.1.0...v6.1.1) (2024-06-03) + +## [6.1.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.1...v6.1.0) (2024-03-27) + +### Features + +- Support Scaleway provider ([#812](https://github.com/UnitedIncome/serverless-python-requirements/issues/812)) ([1b0faae](https://github.com/UnitedIncome/serverless-python-requirements/commit/1b0faaeb6aadd2bc4b1b53526e35298a98d00aca)) ([Andy Méry](https://github.com/cyclimse)) +- Improved pip failure logging ([#813](https://github.com/UnitedIncome/serverless-python-requirements/issues/813)) ([787b479](https://github.com/UnitedIncome/serverless-python-requirements/commit/787b4791306e9a3ded5f0177c304cfbce081c119)) ([Justin Lyons](https://github.com/babyhuey)) + +### Bug Fixes + +- Ensure proper support for mixed runtimes and architectures ([#815](https://github.com/UnitedIncome/serverless-python-requirements/issues/815)) ([27b70f4](https://github.com/UnitedIncome/serverless-python-requirements/commit/27b70f4d6a7e43fd0e9711bbb56752fee2762901)) ([Stijn IJzermans](https://github.com/stijzermans)) + +### [6.0.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.0...v6.0.1) (2023-10-22) + +### Bug Fixes + +- Add legacy `pipenv` backward compatability ([#742](https://github.com/UnitedIncome/serverless-python-requirements/issues/742)) ([22a1f83](https://github.com/UnitedIncome/serverless-python-requirements/commit/22a1f832ac8051f0963328743f9e768f8e66649e)) ([Randy Westergren](https://github.com/rwestergren)) +- Not crash when runtime is not `python` ([#773](https://github.com/UnitedIncome/serverless-python-requirements/issues/773)) ([c1f5ca1](https://github.com/UnitedIncome/serverless-python-requirements/commit/c1f5ca114de815ca19ad213a79e250b5b81f29b3)) ([Jim Kirkbride](https://github.com/jameskbride)) +- Remove outdated Pipenv requirements flag ([#780](https://github.com/UnitedIncome/serverless-python-requirements/issues/780)) ([ad40278](https://github.com/UnitedIncome/serverless-python-requirements/commit/ad40278629c63f4d0971637214b4d9bc20dbd288)) ([Jeff Gordon](https://github.com/jfgordon2)) + +### Maintenance Improvements + +- Fix integration test matrix configuration ([#755](https://github.com/UnitedIncome/serverless-python-requirements/issues/755)) ([e8b2e51](https://github.com/UnitedIncome/serverless-python-requirements/commit/e8b2e51c265792046bacc3946f22f7bd842c60e6)) ([Randy Westergren](https://github.com/rwestergren)) + +## [6.0.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.4.0...v6.0.0) (2022-10-23) + +### ⚠ BREAKING CHANGES + +- Changes default `dockerImage` used for building dependencies (now uses images from `public.ecr.aws/sam` repository) +- Requires `pipenv` in version `2022-04-08` or higher + +### Features + +- Introduce `requirePoetryLockFile` flag ([#728](https://github.com/serverless/serverless-python-requirements/pull/728)) ([e81d9e1](https://github.com/UnitedIncome/serverless-python-requirements/commit/e81d9e1824c135f110b4deccae2c26b0cbb26778)) ([François-Michel L'Heureux](https://github.com/FinchPowers)) +- Switch to official AWS docker images by default ([#724](https://github.com/UnitedIncome/serverless-python-requirements/issues/724)) ([4ba3bbe](https://github.com/UnitedIncome/serverless-python-requirements/commit/4ba3bbeb9296b4844feb476de695f33ee2a30056)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +### Bug Fixes + +- Adapt to support latest `pipenv` version ([#718](https://github.com/UnitedIncome/serverless-python-requirements/issues/718)) ([853da8d](https://github.com/UnitedIncome/serverless-python-requirements/commit/853da8d39921dc83a23d59fd825b2180814f87ff)) ([Anders Steiner](https://github.com/andidev) & [Randy Westergren](https://github.com/rwestergren) & [Piotr Grzesik](https://github.com/pgrzesik)) +- Properly recognize individual function ([#725](https://github.com/UnitedIncome/serverless-python-requirements/issues/725)) ([78795be](https://github.com/UnitedIncome/serverless-python-requirements/commit/78795be24eb08dc78acd7566778b3960c28b263c)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +### Maintenance Improvements + +- Improve error message for docker failures ([#723](https://github.com/serverless/serverless-python-requirements/pull/723))([cc146d0](https://github.com/UnitedIncome/serverless-python-requirements/commit/cc146d088d362187641dd5ae3e9d0129a14c60e2)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +## [5.4.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.3.1...v5.4.0) (2022-03-14) + +### Features + +- Support `dockerPrivateKey` to specify path to SSH key ([#674](https://github.com/UnitedIncome/serverless-python-requirements/issues/674)) ([915bcad](https://github.com/UnitedIncome/serverless-python-requirements/commit/915bcadad2f8a3be5434d6e42771bc835271baf8)) ([Marcin Szleszynski](https://github.com/martinezpl)) +- Support individual packaging with `poetry` ([#682](https://github.com/UnitedIncome/serverless-python-requirements/issues/682)) ([ebd12cb](https://github.com/UnitedIncome/serverless-python-requirements/commit/ebd12cb14ea352fb08c0957f213bda7dcce800df)) ([Brandon White](https://github.com/BrandonLWhite)) + +### Maintenance Improvements + +- Log child process command output on error ([#679](https://github.com/UnitedIncome/serverless-python-requirements/issues/679)) ([ff11497](https://github.com/UnitedIncome/serverless-python-requirements/commit/ff11497cbcf42fe7f7d73fb2e8e2642c542dd8d7)) ([Andrei Zhemaituk](https://github.com/zhemaituk)) +- Replace `lodash.set` with `set-value` ([#676](https://github.com/UnitedIncome/serverless-python-requirements/issues/676)) ([3edf0e0](https://github.com/UnitedIncome/serverless-python-requirements/commit/3edf0e0cabeeb11ffadd9dcac6f198f22aee4a16)) ([Marc Hassan](https://github.com/mhassan1)) + +### [5.3.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.3.0...v5.3.1) (2022-01-28) + +### Bug Fixes + +- Address unknown path format error in `wsl2` ([#667](https://github.com/UnitedIncome/serverless-python-requirements/issues/667)) ([b16c82d](https://github.com/UnitedIncome/serverless-python-requirements/commit/b16c82dbdd31ca7f61093bb6b8ed50be31908a24)) ([Shinichi Makino](https://github.com/snicmakino)) + +## [5.3.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.1...v5.3.0) (2021-12-21) + +### Features + +- Support requirements layer caching ([#644](https://github.com/UnitedIncome/serverless-python-requirements/issues/644)) ([406f6ba](https://github.com/UnitedIncome/serverless-python-requirements/commit/406f6bac1ca934a34387048b5c00242aff3f581b)) ([Maciej Wilczyński](https://github.com/mLupine)) + +### Bug Fixes + +- Ensure cast `toString` before `trim` on buffer ([f60eed1](https://github.com/UnitedIncome/serverless-python-requirements/commit/f60eed1225f091c090f9c253771a12b33fafcab0)) + +### [5.2.2](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.1...v5.2.2) (2021-12-03) + +### Bug Fixes + +- Ensure cast `toString` before `trim` on buffer ([#656](https://github.com/serverless/serverless-python-requirements/pull/656)) ([f60eed1](https://github.com/UnitedIncome/serverless-python-requirements/commit/f60eed1225f091c090f9c253771a12b33fafcab0)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +### [5.2.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.0...v5.2.1) (2021-11-30) + +### Maintenance Improvements + +- Adapt plugin to modern logs ([#646](https://github.com/serverless/serverless-python-requirements/pull/646)) ([8ff97e6](https://github.com/UnitedIncome/serverless-python-requirements/commit/8ff97e6b7c279334e417dbdb65e64d0de2656986)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Adapt to `async` version of `spawn` ([#648](https://github.com/serverless/serverless-python-requirements/pull/648)) ([50c2850](https://github.com/UnitedIncome/serverless-python-requirements/commit/50c2850874ded795fd50ae377f1db817a0212e7d)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Adapt v3 log writing interfaces ([#646](https://github.com/serverless/serverless-python-requirements/pull/646)) ([a79899a](https://github.com/UnitedIncome/serverless-python-requirements/commit/a79899ae5f6f66aa0c65e7fda8e0186d38ff446e)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Ensure proper verbose progress logs ([#646](https://github.com/serverless/serverless-python-requirements/pull/646)) ([44b9591](https://github.com/UnitedIncome/serverless-python-requirements/commit/44b9591f01157a1811e3ca8b43e21265a155a976)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Use `ServerlessError` ([#649](https://github.com/serverless/serverless-python-requirements/pull/649)) ([cdb7111](https://github.com/UnitedIncome/serverless-python-requirements/commit/cdb71110bc9c69b5087b6e18fb353d65962afe4a)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..9d7afa9c --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,75 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +- Using welcoming and inclusive language +- Being respectful of differing viewpoints and experiences +- Gracefully accepting constructive criticism +- Focusing on what is best for the community +- Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +- The use of sexualized language or imagery and unwelcome sexual attention or + advances +- Trolling, insulting/derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or electronic + address, without explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting our team at **hello@serverless.com**. As an alternative +feel free to reach out to any of us personally. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..900a425b --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,105 @@ +# Contributing Guidelines + +We are always looking to promote good contributors to be maintainers and provide them a front-row seat to serverless innovation. + +If you would like to be a maintainer for the [Serverless Framework](https://github.com/serverless/serverless) or any of our plugins, please get started with making code contributions and engaging with open issues/PRs. Also, please reach out to any of [Serverless organization](https://github.com/serverless) members to express your interest. + +We'd love to collaborate closely with amazing developers as we drive the development of this open technology into the future. + +Welcome, and thanks in advance for your help! + +# How to contribute to `serverless-python-requirements` + +## Setup + +Pre-Reqs: + +- Python 3.9 +- [poetry](https://python-poetry.org/docs/) (if you use multiple versions of Python be sure to install it with python 3.9) +- Perl (used in the tests) +- Node v14 or v16 + +Then, to begin development: + +1. fork the repository +2. `npm install -g serverless@` (check the peer dependencies in the root `package.json` file for the version) +3. run `npm install` in its root folder +4. run the tests via `npm run test` + +## Getting started + +A good first step is to search for open [issues](https://github.com/serverless/serverless-python-requirements/issues). Issues are labeled, and some good issues to start with are labeled: [good first issue](https://github.com/serverless/serverless-python-requirements/labels/good%20first%20issue) and [help wanted](https://github.com/serverless/serverless-python-requirements/labels/help%20wanted). + +## When you propose a new feature or bug fix + +Please make sure there is an open issue discussing your contribution before jumping into a Pull Request! +There are just a few situations (listed below) in which it is fine to submit PR without a corresponding issue: + +- Documentation update +- Obvious bug fix +- Maintenance improvement + +In all other cases please check if there's an open an issue discussing the given proposal, if there is not, create an issue respecting all its template remarks. + +In non-trivial cases please propose and let us review an implementation spec (in the corresponding issue) before jumping into implementation. + +Do not submit draft PRs. Submit only finalized work which is ready for merge. If you have any doubts related to implementation work please discuss in the corresponding issue. + +Once a PR has been reviewed and some changes are suggested, please ensure to **re-request review** after all new changes are pushed. It's the best and quietest way to inform maintainers that your work is ready to be checked again. + +## When you want to work on an existing issue + +**Note:** Please write a quick comment in the corresponding issue and ask if the feature is still relevant and that you want to jump into the implementation. + +Check out our [help wanted](https://github.com/serverless/serverless-python-requirements/labels/help%20wanted) or [good first issue](https://github.com/serverless/serverless-python-requirements/labels/good%20first%20issue) labels to find issues we want to move forward with your help. + +We will do our best to respond/review/merge your PR according to priority. We hope that you stay engaged with us during this period to ensure QA. Please note that the PR will be closed if there hasn't been any activity for a long time (~ 30 days) to keep us focused and keep the repo clean. + +## Reviewing Pull Requests + +Another really useful way to contribute is to review other people's Pull Requests. Having feedback from multiple people is helpful and reduces the overall time to make a final decision about the Pull Request. + +## Providing support + +The easiest thing you can do to help us move forward and make an impact on our progress is to simply provide support to other people having difficulties with their projects. + +You can do that by replying to [issues on GitHub](https://github.com/serverless/serverless-python-requirements/issues), chatting with other community members in [our Community Slack](https://www.serverless.com/slack), or [GitHub Discussions](https://github.com/serverless/serverless-python-requirements/discussions). + +--- + +# Code Style + +We aim for a clean, consistent code style. We're using [Prettier](https://prettier.io/) to confirm one code formatting style and [ESlint](https://eslint.org/) helps us to stay away from obvious issues that can be picked via static analysis. + +Ideally, you should have Prettier and ESlint integrated into your code editor, which will help you not think about specific rules and be sure you submit the code that follows guidelines. + +## Verifying prettier formatting + +``` +npm run prettier-check +``` + +## Verifying linting style + +``` +npm run lint +``` + +## Other guidelines + +- Minimize [lodash](https://lodash.com/) usage - resort to it, only if given part of logic cannot be expressed easily with native language constructs +- When writing asynchronous code, ensure to take advantage of [async functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function) and native `Promise` API. Do not rely on [Bluebird](http://bluebirdjs.com) even though still large parts of old code rely on it. We're looking forward to drop this dependency in the near future. + +# Testing + +When proposing a few feature or fixing a bug, it is recommended to also provide sufficient test coverage. All tests live in `./test.js` module. + +# Our Code of Conduct + +Finally, to make sure you have a pleasant experience while being in our welcoming community, please read our [code of conduct](CODE_OF_CONDUCT.md). It outlines our core values and beliefs and will make working together a happier experience. + +Thanks again for being a contributor to the Serverless Community :tada:! + +Cheers, + +The :zap: [Serverless](http://www.serverless.com) Team diff --git a/README.md b/README.md index bb2fc843..d9127adb 100644 --- a/README.md +++ b/README.md @@ -1,87 +1,189 @@ # Serverless Python Requirements [![serverless](http://public.serverless.com/badges/v3.svg)](http://www.serverless.com) -[![CircleCI](https://circleci.com/gh/UnitedIncome/serverless-python-requirements.svg?style=shield)](https://circleci.com/gh/UnitedIncome/serverless-python-requirements) -[![appveyor](https://ci.appveyor.com/api/projects/status/biel93xc535nxvi2?svg=true)](https://ci.appveyor.com/project/dschep/serverless-python-requirements) +![Github Actions](https://github.com/UnitedIncome/serverless-python-requirements/workflows/Test/badge.svg) [![npm](https://img.shields.io/npm/v/serverless-python-requirements.svg)](https://www.npmjs.com/package/serverless-python-requirements) [![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) -A Serverless v1.x plugin to automatically bundle dependencies from -`requirements.txt` and make them available in your `PYTHONPATH`. +A Serverless Framework plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. -**Requires Serverless >= v1.12** +--- + +_Originally developed by [**Capital One**](https://www.capitalone.com/tech/open-source/), now maintained in scope of Serverless, Inc_ + +_Capital One considers itself the bank a technology company would build. It's delivering best-in-class innovation so that its millions of customers can manage their finances with ease. Capital One is all-in on the cloud and is a leader in the adoption of open source, RESTful APIs, microservices and containers. We build our own products and release them with a speed and agility that allows us to get new customer experiences to market quickly. Our engineers use artificial intelligence and machine learning to transform real-time data, software and algorithms into the future of finance, reimagined._ + +--- ## Install -``` +```shell sls plugin install -n serverless-python-requirements ``` -[:apple::beer::snake: Mac Brew installed Python notes](#applebeersnake-mac-brew-installed-python-notes) +This will automatically add the plugin to your project's `package.json` and the plugins section of its +`serverless.yml`. That's all that's needed for basic use! The plugin will now bundle your python +dependencies specified in your `requirements.txt` or `Pipfile` when you run `sls deploy`. + +For a more in depth introduction on how to use this plugin, check out +[this post on the Serverless Blog](https://serverless.com/blog/serverless-python-packaging/) + +If you're on a mac, check out [these notes](#applebeersnake-mac-brew-installed-python-notes) about using python installed by brew. + +## Cross compiling -## Cross compiling! Compiling non-pure-Python modules or fetching their manylinux wheels is -supported on non-linux OSs via the use of Docker and the -[docker-lambda](https://github.com/lambci/docker-lambda) image. +supported on non-linux OSs via the use of Docker and [official AWS build](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-image-repositories.html) images. To enable docker usage, add the following to your `serverless.yml`: + ```yaml custom: pythonRequirements: dockerizePip: true ``` + The dockerizePip option supports a special case in addition to booleans of `'non-linux'` which makes it dockerize only on non-linux environments. - To utilize your own Docker container instead of the default, add the following to your `serverless.yml`: + ```yaml custom: pythonRequirements: dockerImage: :tag ``` + This must be the full image name and tag to use, including the runtime specific tag if applicable. Alternatively, you can define your Docker image in your own Dockerfile and add the following to your `serverless.yml`: + ```yaml custom: pythonRequirements: dockerFile: ./path/to/Dockerfile ``` + With `Dockerfile` the path to the Dockerfile that must be in the current folder (or a subfolder). Please note the `dockerImage` and the `dockerFile` are mutually exclusive. To install requirements from private git repositories, add the following to your `serverless.yml`: + ```yaml custom: pythonRequirements: dockerizePip: true dockerSsh: true ``` + The `dockerSsh` option will mount your `$HOME/.ssh/id_rsa` and `$HOME/.ssh/known_hosts` as a -volume in the docker container. If your SSH key is password protected, you can use `ssh-agent` -because `$SSH_AUTH_SOCK` is also mounted & the env var set. +volume in the docker container. + +In case you want to use a different key, you can specify the path (absolute) to it through `dockerPrivateKey` option: + +```yaml +custom: + pythonRequirements: + dockerizePip: true + dockerSsh: true + dockerPrivateKey: /home/.ssh/id_ed25519 +``` + +If your SSH key is password protected, you can use `ssh-agent` +because `$SSH_AUTH_SOCK` is also mounted & the env var is set. It is important that the host of your private repositories has already been added in your `$HOME/.ssh/known_hosts` file, as the install process will fail otherwise due to host authenticity failure. +You can also pass environment variables to docker by specifying them in `dockerEnv` +option: + +```yaml +custom: + pythonRequirements: + dockerEnv: + - https_proxy +``` + [:checkered_flag: Windows notes](#checkered_flag-windows-dockerizepip-notes) -## Pipenv support :sparkles::cake::sparkles: -If you include a `Pipfile` and have `pipenv` installed instead of a `requirements.txt` this will use -`pipenv lock --r` to generate them. It is fully compatible with all options such as `zip` and +## :sparkles::cake::sparkles: Pipenv support + +Requires `pipenv` in version `2022-04-08` or higher. + +If you include a `Pipfile` and have `pipenv` installed, this will use `pipenv` to generate requirements instead of a `requirements.txt`. It is fully compatible with all options such as `zip` and `dockerizePip`. If you don't want this plugin to generate it for you, set the following option: + ```yaml custom: pythonRequirements: usePipenv: false ``` +## :sparkles::pencil::sparkles: Poetry support + +If you include a `pyproject.toml` and have `poetry` installed instead of a `requirements.txt` this will use +`poetry export --without-hashes -f requirements.txt -o requirements.txt --with-credentials` to generate them. It is fully compatible with all options such as `zip` and +`dockerizePip`. If you don't want this plugin to generate it for you, set the following option: + +```yaml +custom: + pythonRequirements: + usePoetry: false +``` + +Be aware that if no `poetry.lock` file is present, a new one will be generated on the fly. To help having predictable builds, +you can set the `requirePoetryLockFile` flag to true to throw an error when `poetry.lock` is missing. + +```yaml +custom: + pythonRequirements: + requirePoetryLockFile: false +``` + +If your Poetry configuration includes custom dependency groups, they will not be installed automatically. To include them in the deployment package, use the `poetryWithGroups`, `poetryWithoutGroups` and `poetryOnlyGroups` options which wrap `poetry export`'s `--with`, `--without` and `--only` parameters. + +```yaml +custom: + pythonRequirements: + poetryWithGroups: + - internal_dependencies + - lambda_dependencies +``` + +### Poetry with git dependencies + +Poetry by default generates the exported requirements.txt file with `-e` and that breaks pip with `-t` parameter +(used to install all requirements in a specific folder). In order to fix that we remove all `-e` from the generated file but, +for that to work you need to add the git dependencies in a specific way. + +Instead of: + +```toml +[tool.poetry.dependencies] +bottle = {git = "git@github.com/bottlepy/bottle.git", tag = "0.12.16"} +``` + +Use: + +```toml +[tool.poetry.dependencies] +bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} +``` + +Or, if you have an SSH key configured: + +```toml +[tool.poetry.dependencies] +bottle = {git = "ssh://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} +``` ## Dealing with Lambda's size limitations + To help deal with potentially large dependencies (for example: `numpy`, `scipy` and `scikit-learn`) there is support for compressing the libraries. This does -require a minor change to your code to decompress them. To enable this add the -following to your `serverless.yml`: +require a minor change to your code to decompress them. To enable this add the +following to your `serverless.yml`: + ```yaml custom: pythonRequirements: @@ -89,6 +191,7 @@ custom: ``` and add this to your handler module before any code that imports your deps: + ```python try: import unzip_requirements @@ -96,11 +199,105 @@ except ImportError: pass ``` -## Omitting Packages +### Slim Package + +_Works on non 'win32' environments: Docker, WSL are included_ +To remove the tests, information and caches from the installed packages, +enable the `slim` option. This will: `strip` the `.so` files, remove `__pycache__` +and `dist-info` directories as well as `.pyc` and `.pyo` files. + +```yaml +custom: + pythonRequirements: + slim: true +``` + +#### Custom Removal Patterns + +To specify additional directories to remove from the installed packages, +define a list of patterns in the serverless config using the `slimPatterns` +option and glob syntax. These patterns will be added to the default ones (`**/*.py[c|o]`, `**/__pycache__*`, `**/*.dist-info*`). +Note, the glob syntax matches against whole paths, so to match a file in any +directory, start your pattern with `**/`. + +```yaml +custom: + pythonRequirements: + slim: true + slimPatterns: + - '**/*.egg-info*' +``` + +To overwrite the default patterns set the option `slimPatternsAppendDefaults` to `false` (`true` by default). + +```yaml +custom: + pythonRequirements: + slim: true + slimPatternsAppendDefaults: false + slimPatterns: + - '**/*.egg-info*' +``` + +This will remove all folders within the installed requirements that match +the names in `slimPatterns` + +#### Option not to strip binaries + +In some cases, stripping binaries leads to problems like "ELF load command address/offset not properly aligned", even when done in the Docker environment. You can still slim down the package without `*.so` files with: + +```yaml +custom: + pythonRequirements: + slim: true + strip: false +``` + +### Lambda Layer + +Another method for dealing with large dependencies is to put them into a +[Lambda Layer](https://docs.aws.amazon.com/lambda/latest/dg/configuration-layers.html). +Simply add the `layer` option to the configuration. + +```yaml +custom: + pythonRequirements: + layer: true +``` + +The requirements will be zipped up and a layer will be created automatically. +Now just add the reference to the functions that will use the layer. + +```yaml +functions: + hello: + handler: handler.hello + layers: + - Ref: PythonRequirementsLambdaLayer +``` + +If the layer requires additional or custom configuration, add them onto the `layer` option. + +```yaml +custom: + pythonRequirements: + layer: + name: ${self:provider.stage}-layerName + description: Python requirements lambda layer + compatibleRuntimes: + - python3.7 + licenseInfo: GPLv3 + allowedAccounts: + - '*' +``` + +## Omitting Packages + You can omit a package from deployment with the `noDeploy` option. Note that dependencies of omitted packages must explicitly be omitted too. -By default, this will not install the AWS SDKs that are already installed on -Lambda. This example makes it instead omit pytest: + +This example makes it instead omit pytest: + ```yaml custom: pythonRequirements: @@ -109,26 +306,67 @@ custom: ``` ## Extra Config Options -### extra pip arguments -You can specify extra arguments to be passed to pip like this: + +### Caching + +You can enable two kinds of caching with this plugin which are currently both ENABLED by default. +First, a download cache that will cache downloads that pip needs to compile the packages. +And second, a what we call "static caching" which caches output of pip after compiling everything for your requirements file. +Since generally `requirements.txt` files rarely change, you will often see large amounts of speed improvements when enabling the static cache feature. +These caches will be shared between all your projects if no custom `cacheLocation` is specified (see below). + +_**Please note:** This has replaced the previously recommended usage of "--cache-dir" in the pipCmdExtraArgs_ + ```yaml custom: pythonRequirements: - dockerizePip: true - pipCmdExtraArgs: - - --cache-dir - - .requirements-cache + useDownloadCache: true + useStaticCache: true ``` -When using `--cache-dir` don't forget to also exclude it from the package. +### Other caching options + +There are two additional options related to caching. +You can specify where in your system that this plugin caches with the `cacheLocation` option. +By default it will figure out automatically where based on your username and your OS to store the cache via the [appdirectory](https://www.npmjs.com/package/appdirectory) module. +Additionally, you can specify how many max static caches to store with `staticCacheMaxVersions`, as a simple attempt to limit disk space usage for caching. +This is DISABLED (set to 0) by default. +Example: ```yaml -package: - exclude: - - .requirements-cache/** +custom: + pythonRequirements: + useStaticCache: true + useDownloadCache: true + cacheLocation: '/home/user/.my_cache_goes_here' + staticCacheMaxVersions: 10 +``` + +### Extra pip arguments + +You can specify extra arguments [supported by pip](https://pip.pypa.io/en/stable/reference/pip_install/#options) to be passed to pip like this: + +```yaml +custom: + pythonRequirements: + pipCmdExtraArgs: + - --compile +``` + +### Extra Docker arguments + +You can specify extra arguments to be passed to [docker build](https://docs.docker.com/engine/reference/commandline/build/) during the build step, and [docker run](https://docs.docker.com/engine/reference/run/) during the dockerized pip install step: + +```yaml +custom: + pythonRequirements: + dockerizePip: true + dockerBuildCmdExtraArgs: ['--build-arg', 'MY_GREAT_ARG=123'] + dockerRunCmdExtraArgs: ['-v', '${env:PWD}:/my-app'] ``` ### Customize requirements file name + [Some `pip` workflows involve using requirements files not named `requirements.txt`](https://www.kennethreitz.org/essays/a-better-pip-workflow). To support these, this plugin has the following option: @@ -140,9 +378,14 @@ custom: ``` ### Per-function requirements + +**Note: this feature does not work with Pipenv/Poetry, it requires `requirements.txt` +files for your Python modules.** + If you have different python functions, with different sets of requirements, you can avoid including all the unecessary dependencies of your functions by using the following structure: -``` + +```shell ├── serverless.yml ├── function1 │ ├── requirements.txt @@ -151,7 +394,9 @@ including all the unecessary dependencies of your functions by using the followi ├── requirements.txt └── index.py ``` + With the content of your `serverless.yml` containing: + ```yml package: individually: true @@ -164,19 +409,23 @@ functions: handler: index.handler module: function2 ``` + The result is 2 zip archives, with only the requirements for function1 in the first one, and only the requirements for function2 in the second one. Quick notes on the config file: - * The `module` field must be used to tell the plugin where to find the `requirements.txt` file for -each function. - * The `handler` field must not be prefixed by the folder name (already known through `module`) as -the root of the zip artifact is already the path to your function. + +- The `module` field must be used to tell the plugin where to find the `requirements.txt` file for + each function. +- The `handler` field must not be prefixed by the folder name (already known through `module`) as + the root of the zip artifact is already the path to your function. ### Customize Python executable + Sometimes your Python executable isn't available on your `$PATH` as `python2.7` or `python3.6` (for example, windows or using pyenv). To support this, this plugin has the following option: + ```yaml custom: pythonRequirements: @@ -184,11 +433,13 @@ custom: ``` ### Vendor library directory + For certain libraries, default packaging produces too large an installation, even when zipping. In those cases it may be necessary to tailor make a version of the module. In that case you can store them in a directory and use the `vendor` option, and the plugin will copy them along with all the other dependencies to install: + ```yaml custom: pythonRequirements: @@ -199,15 +450,27 @@ functions: vendor: ./hello-vendor # The option is also available at the function level ``` +## Manual invocation +The `.requirements` and `requirements.zip` (if using zip support) files are left +behind to speed things up on subsequent deploys. To clean them up, run: +```plaintext +sls requirements clean +``` -## Manual invocations +You can also create them (and `unzip_requirements` if +using zip support) manually with: -The `.requirements` and `requirements.zip`(if using zip support) files are left -behind to speed things up on subsequent deploys. To clean them up, run -`sls requirements clean`. You can also create them (and `unzip_requirements` if -using zip support) manually with `sls requirements install`. +```plaintext +sls requirements install +``` + +The pip download/static cache is outside the serverless folder, and should be manually cleaned when i.e. changing python versions: + +```plaintext +sls requirements cleanCache +``` ## Invalidate requirements caches on package @@ -215,54 +478,166 @@ If you are using your own Python library, you have to cleanup `.requirements` on any update. You can use the following option to cleanup `.requirements` everytime you package. -``` +```yaml custom: pythonRequirements: invalidateCaches: true ``` ## :apple::beer::snake: Mac Brew installed Python notes + [Brew wilfully breaks the `--target` option with no seeming intention to fix it](https://github.com/Homebrew/brew/pull/821) which causes issues since this uses that option. There are a few easy workarounds for this: -* Install Python from [python.org](https://wwwpython.org/downloads/) and specify it with the -[`pythonBin` option](#customize-python-executable). + +- Install Python from [python.org](https://www.python.org/downloads/) and specify it with the + [`pythonBin` option](#customize-python-executable). OR -* Create a virtualenv and activate it while using serverless. +- Create a virtualenv and activate it while using serverless. OR -* [Install Docker](https://docs.docker.com/docker-for-mac/install/) and use the [`dockerizePip` option](#cross-compiling). +- [Install Docker](https://docs.docker.com/docker-for-mac/install/) and use the [`dockerizePip` option](#cross-compiling). Also, [brew seems to cause issues with pipenv](https://github.com/dschep/lambda-decorators/issues/4#event-1418928080), so make sure you install pipenv using pip. ## :checkered_flag: Windows `dockerizePip` notes + For usage of `dockerizePip` on Windows do Step 1 only if running serverless on windows, or do both Step 1 & 2 if running serverless inside WSL. 1. [Enabling shared volume in Windows Docker Taskbar settings](https://forums.docker.com/t/docker-data-volumes-and-windows-mounts/31499/2) 1. [Installing the Docker client on Windows Subsystem for Linux (Ubuntu)](https://medium.com/@sebagomez/installing-the-docker-client-on-ubuntus-windows-subsystem-for-linux-612b392a44c4) +## Native Code Dependencies During Build + +Some Python packages require extra OS dependencies to build successfully. To deal with this, replace the default image with a `Dockerfile` like: + +```dockerfile +FROM public.ecr.aws/sam/build-python3.9 + +# Install your dependencies +RUN yum -y install mysql-devel +``` + +Then update your `serverless.yml`: + +```yaml +custom: + pythonRequirements: + dockerFile: Dockerfile +``` + +## Native Code Dependencies During Runtime + +Some Python packages require extra OS libraries (`*.so` files) at runtime. You need to manually include these files in the root directory of your Serverless package. The simplest way to do this is to use the `dockerExtraFiles` option. + +For instance, the `mysqlclient` package requires `libmysqlclient.so.1020`. If you use the Dockerfile from the previous section, add an item to the `dockerExtraFiles` option in your `serverless.yml`: + +```yaml +custom: + pythonRequirements: + dockerExtraFiles: + - /usr/lib64/mysql57/libmysqlclient.so.1020 +``` + +Then verify the library gets included in your package: + +```bash +sls package +zipinfo .serverless/xxx.zip +``` + +If you can't see the library, you might need to adjust your package include/exclude configuration in `serverless.yml`. + +## Optimising packaging time + +If you wish to exclude most of the files in your project, and only include the source files of your lambdas and their dependencies you may well use an approach like this: + +```yaml +package: + individually: false + include: + - './src/lambda_one/**' + - './src/lambda_two/**' + exclude: + - '**' +``` + +This will be very slow. Serverless adds a default `"**"` include. If you are using the `cacheLocation` parameter to this plugin, this will result in all of the cached files' names being loaded and then subsequently discarded because of the exclude pattern. To avoid this happening you can add a negated include pattern, as is observed in . + +Use this approach instead: + +```yaml +package: + individually: false + include: + - '!./**' + - './src/lambda_one/**' + - './src/lambda_two/**' + exclude: + - '**' +``` + +## Custom Provider Support + +### Scaleway + +This plugin is compatible with the [Scaleway Serverless Framework Plugin](https://github.com/scaleway/serverless-scaleway-functions) to package dependencies for Python functions deployed on [Scaleway](https://www.scaleway.com/en/serverless-functions/). To use it, add the following to your `serverless.yml`: + +```yaml +provider: + name: scaleway + runtime: python311 + +plugins: + - serverless-python-requirements + - serverless-scaleway-functions +``` + +To handle native dependencies, it's recommended to use the Docker builder with the image provided by Scaleway: + +```yaml +custom: + pythonRequirements: + # Can use any Python version supported by Scaleway + dockerImage: rg.fr-par.scw.cloud/scwfunctionsruntimes-public/python-dep:3.11 +``` ## Contributors - * [@dschep](https://github.com/dschep) - Lead developer & maintainer - * [@azurelogic](https://github.com/azurelogic) - logging & documentation fixes - * [@abetomo](https://github.com/abetomo) - style & linting - * [@angstwad](https://github.com/angstwad) - `deploy --function` support - * [@mather](https://github.com/mather) - the cache invalidation option - * [@rmax](https://github.com/rmax) - the extra pip args option - * [@bsamuel-ui](https://github.com/bsamuel-ui) - Python 3 support - * [@suxor42](https://github.com/suxor42) - fixing permission issues with Docker on Linux - * [@mbeltran213](https://github.com/mbeltran213) - fixing docker linux -u option bug - * [@Tethik](https://github.com/Tethik) - adding usePipenv option - * [@miketheman](https://github.com/miketheman) - fixing bug with includes when using zip option - * [@wattdave](https://github.com/wattdave) - fixing bug when using `deploymentBucket` - * [@heri16](https://github.com/heri16) - fixing Docker support in Windows - * [@ryansb](https://github.com/ryansb) - package individually support - * [@cgrimal](https://github.com/cgrimal) - Private SSH Repo access in Docker, `dockerFile` option - to build a custom docker image, real per-function requirements, and the - `vendor` option - * [@kichik](https://github.com/kichik) - Imposed windows & `noDeploy` support, - switched to adding files straight to zip instead of creating symlinks, and - improved pip chache support when using docker. + +- [@dschep](https://github.com/dschep) - Original developer +- [@azurelogic](https://github.com/azurelogic) - logging & documentation fixes +- [@abetomo](https://github.com/abetomo) - style & linting +- [@angstwad](https://github.com/angstwad) - `deploy --function` support +- [@mather](https://github.com/mather) - the cache invalidation option +- [@rmax](https://github.com/rmax) - the extra pip args option +- [@bsamuel-ui](https://github.com/bsamuel-ui) - Python 3 support, current maintainer +- [@suxor42](https://github.com/suxor42) - fixing permission issues with Docker on Linux +- [@mbeltran213](https://github.com/mbeltran213) - fixing docker linux -u option bug +- [@Tethik](https://github.com/Tethik) - adding usePipenv option +- [@miketheman](https://github.com/miketheman) - fixing bug with includes when using zip option, update eslint, +- [@wattdave](https://github.com/wattdave) - fixing bug when using `deploymentBucket` +- [@heri16](https://github.com/heri16) - fixing Docker support in Windows +- [@ryansb](https://github.com/ryansb) - package individually support +- [@cgrimal](https://github.com/cgrimal) - Private SSH Repo access in Docker, `dockerFile` option + to build a custom docker image, real per-function requirements, and the `vendor` option +- [@kichik](https://github.com/kichik) - Imposed windows & `noDeploy` support, + switched to adding files straight to zip instead of creating symlinks, and + improved pip cache support when using docker. +- [@dee-me-tree-or-love](https://github.com/dee-me-tree-or-love) - the `slim` package option +- [@alexjurkiewicz](https://github.com/alexjurkiewicz) - [docs about docker workflows](#native-code-dependencies-during-build) +- [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching +- [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements & Porting tests from bats to js! +- Poetry support + - [@squaresurf](https://github.com/squaresurf) + - [@drice](https://github.com/drice) + - [@ofercaspi](https://github.com/ofercaspi) + - [@tpansino](https://github.com/tpansino) +- [@david-mk-lawrence](https://github.com/david-mk-lawrence) - added Lambda Layer support +- [@bryantbriggs](https://github.com/bryantbiggs) - Fixing CI/CD +- [@jacksgt](https://github.com/jacksgt) - Fixing pip issues +- [@lephuongbg](https://github.com/lephuongbg) - Fixing single function deployment +- [@rileypriddle](https://github.com/rileypriddle) - Introducing schema validation for `module` property +- [@martinezpl](https://github.com/martinezpl) - Fixing test issues, adding `dockerPrivateKey` option diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index 7431e08b..00000000 --- a/appveyor.yml +++ /dev/null @@ -1,14 +0,0 @@ -version: '{build}' -init: -- ps: >- - Install-Product node 6 - - npm i -g serverless -build: off -test_script: -- cmd: >- - cd example - - npm i - - sls package --pythonBin=c:/python36/python.exe \ No newline at end of file diff --git a/circle.yml b/circle.yml deleted file mode 100644 index cef66fcb..00000000 --- a/circle.yml +++ /dev/null @@ -1,39 +0,0 @@ -version: 2 -jobs: - build: - working_directory: ~/sls-py-req - machine: - image: circleci/classic - steps: - - checkout - # Build python3.6. gross. (based on my gist here: https://git.io/vxMFG ) - - run: | - sudo apt-get update - sudo apt-get install build-essential tk-dev libncurses5-dev libncursesw5-dev libreadline6-dev libdb5.3-dev libgdbm-dev libsqlite3-dev libssl-dev libbz2-dev libexpat1-dev liblzma-dev zlib1g-dev - wget https://www.python.org/ftp/python/3.6.5/Python-3.6.5.tar.xz - tar xf Python-3.6.5.tar.xz - cd Python-3.6.5 - ./configure - make - sudo make altinstall - # install bats - - run: | - git clone https://github.com/sstephenson/bats.git - cd bats - sudo ./install.sh /usr/local - # other deps - - run: sudo apt -y update && sudo apt -y install python-pip python2.7 curl unzip - # instal pipenv - - run: sudo python3.6 -m pip install pipenv - # install nodejs - - run: curl -sL https://deb.nodesource.com/setup_6.x | sudo bash - && sudo apt -y install nodejs - # install serverless & depcheck - - run: npm install -g serverless depcheck - # install deps - - run: npm i - # depcheck - - run: depcheck . - # lint: - - run: npm run lint - # test! - - run: npm run test diff --git a/commitlint.config.js b/commitlint.config.js new file mode 100644 index 00000000..d23a0d6b --- /dev/null +++ b/commitlint.config.js @@ -0,0 +1,31 @@ +'use strict'; + +module.exports = { + rules: { + 'body-leading-blank': [2, 'always'], + 'footer-leading-blank': [2, 'always'], + 'header-max-length': [2, 'always', 72], + 'scope-enum': [2, 'always', ['', 'Config', 'Log']], + 'subject-case': [2, 'always', 'sentence-case'], + 'subject-empty': [2, 'never'], + 'subject-full-stop': [2, 'never', '.'], + 'type-case': [2, 'always', 'lower-case'], + 'type-empty': [2, 'never'], + 'type-enum': [ + 2, + 'always', + [ + 'build', + 'chore', + 'ci', + 'docs', + 'feat', + 'fix', + 'perf', + 'refactor', + 'style', + 'test', + ], + ], + }, +}; diff --git a/example/.gitignore b/example/.gitignore deleted file mode 100644 index 213a542c..00000000 --- a/example/.gitignore +++ /dev/null @@ -1,25 +0,0 @@ -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# Serverless directories -.serverless -.requirements - -# Project ignores -puck/ -serverless.yml.bak diff --git a/example/serverless.yml b/example/serverless.yml index 9b58ead1..e5c4c924 100644 --- a/example/serverless.yml +++ b/example/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/example_native_deps/.gitignore b/example_native_deps/.gitignore deleted file mode 100644 index abe9b1af..00000000 --- a/example_native_deps/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -# npm install -node_modules -package-lock.json - -# serverless -.serverless -.requirements.zip \ No newline at end of file diff --git a/example_native_deps/README.md b/example_native_deps/README.md index 6d3aae9c..016ee11e 100644 --- a/example_native_deps/README.md +++ b/example_native_deps/README.md @@ -1,10 +1,12 @@ -### native compilation example +# Native compilation example + Uses `dockerizePip` to deploy numpy-scipy-sklearn demo. -### test +## Test + As in other examples, use node version >= 6. -``` +```bash cd example_native_deps npm install --prefix . serverless-python-requirements sls deploy --verbose @@ -13,10 +15,10 @@ sls invoke -f hello --verbose --log ...expected result: -``` +```json { - "numpy": "1.13.3", - "scipy": "1.0.0", - "sklearn": "0.19.1" + "numpy": "1.13.3", + "scipy": "1.0.0", + "sklearn": "0.19.1" } ``` diff --git a/example_native_deps/serverless.yml b/example_native_deps/serverless.yml index 0f4e632a..cfbd4913 100644 --- a/example_native_deps/serverless.yml +++ b/example_native_deps/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/index.js b/index.js index dda2fe8a..44906956 100644 --- a/index.js +++ b/index.js @@ -3,16 +3,17 @@ const BbPromise = require('bluebird'); const fse = require('fs-extra'); +const values = require('lodash.values'); const { addVendorHelper, removeVendorHelper, - packRequirements + packRequirements, } = require('./lib/zip'); const { injectAllRequirements } = require('./lib/inject'); +const { layerRequirements } = require('./lib/layer'); const { installAllRequirements } = require('./lib/pip'); const { pipfileToRequirements } = require('./lib/pipenv'); -const { cleanup } = require('./lib/clean'); - +const { cleanup, cleanupCache } = require('./lib/clean'); BbPromise.promisifyAll(fse); /** @@ -26,45 +27,79 @@ class ServerlessPythonRequirements { get options() { const options = Object.assign( { + slim: false, + slimPatterns: false, + slimPatternsAppendDefaults: true, zip: false, + layer: false, cleanupZipHelper: true, invalidateCaches: false, fileName: 'requirements.txt', usePipenv: true, - pythonBin: this.serverless.service.provider.runtime || 'python', + usePoetry: true, + pythonBin: + process.platform === 'win32' + ? 'python.exe' + : this.serverless.service.provider.runtime || 'python', dockerizePip: false, dockerSsh: false, + dockerPrivateKey: null, dockerImage: null, dockerFile: null, + dockerEnv: false, + dockerBuildCmdExtraArgs: [], + dockerRunCmdExtraArgs: [], + dockerExtraFiles: [], + dockerRootless: false, + useStaticCache: true, + useDownloadCache: true, + cacheLocation: false, + staticCacheMaxVersions: 0, pipCmdExtraArgs: [], - noDeploy: [ - 'boto3', - 'botocore', - 'docutils', - 'jmespath', - 'python-dateutil', - 's3transfer', - 'six', - 'pip', - 'setuptools' - ], - vendor: '' + noDeploy: [], + vendor: '', + requirePoetryLockFile: false, + poetryWithGroups: [], + poetryWithoutGroups: [], + poetryOnlyGroups: [], }, (this.serverless.service.custom && this.serverless.service.custom.pythonRequirements) || {} ); + if ( + options.pythonBin === this.serverless.service.provider.runtime && + !options.pythonBin.startsWith('python') + ) { + options.pythonBin = 'python'; + } + if (/python3[0-9]+/.test(options.pythonBin)) { + // "google" and "scaleway" providers' runtimes use python3XX + options.pythonBin = options.pythonBin.replace(/3([0-9]+)/, '3.$1'); + } if (options.dockerizePip === 'non-linux') { options.dockerizePip = process.platform !== 'linux'; } + if (options.dockerizePip && process.platform === 'win32') { + options.pythonBin = 'python'; + } if ( !options.dockerizePip && - (options.dockerSsh || options.dockerImage || options.dockerFile) + (options.dockerSsh || + options.dockerImage || + options.dockerFile || + options.dockerPrivateKey) ) { if (!this.warningLogged) { - this.serverless.cli.log( - 'WARNING: You provided a docker related option but dockerizePip is set to false.' - ); + if (this.log) { + this.log.warning( + 'You provided a docker related option but dockerizePip is set to false.' + ); + } else { + this.serverless.cli.log( + 'WARNING: You provided a docker related option but dockerizePip is set to false.' + ); + } this.warningLogged = true; } } @@ -72,79 +107,168 @@ class ServerlessPythonRequirements { throw new Error( 'Python Requirements: you can provide a dockerImage or a dockerFile option, not both.' ); - } else if (!options.dockerFile) { - // If no dockerFile is provided, use default image - const defaultImage = `lambci/lambda:build-${ - this.serverless.service.provider.runtime - }`; - options.dockerImage = options.dockerImage || defaultImage; + } + + if (options.layer) { + // If layer was set as a boolean, set it to an empty object to use the layer defaults. + if (options.layer === true) { + options.layer = {}; + } } return options; } + get targetFuncs() { + let inputOpt = this.serverless.processedInput.options; + return inputOpt.function + ? [this.serverless.service.functions[inputOpt.function]] + : values(this.serverless.service.functions).filter((func) => !func.image); + } + /** * The plugin constructor * @param {Object} serverless * @param {Object} options + * @param {Object} v3Utils * @return {undefined} */ - constructor(serverless) { + constructor(serverless, cliOptions, v3Utils) { this.serverless = serverless; this.servicePath = this.serverless.config.servicePath; this.warningLogged = false; + if ( + this.serverless.configSchemaHandler && + this.serverless.configSchemaHandler.defineFunctionProperties + ) { + this.serverless.configSchemaHandler.defineFunctionProperties('aws', { + properties: { + module: { + type: 'string', + }, + }, + }); + } + + if (v3Utils) { + this.log = v3Utils.log; + this.progress = v3Utils.progress; + this.writeText = v3Utils.writeText; + } this.commands = { requirements: { commands: { clean: { usage: 'Remove .requirements and requirements.zip', - lifecycleEvents: ['clean'] + lifecycleEvents: ['clean'], }, install: { usage: 'install requirements manually', - lifecycleEvents: ['install'] - } + lifecycleEvents: ['install'], + }, + cleanCache: { + usage: + 'Removes all items in the pip download/static cache (if present)', + lifecycleEvents: ['cleanCache'], + }, + }, + }, + }; + + if (this.serverless.cli.generateCommandsHelp) { + Object.assign(this.commands.requirements, { + usage: 'Serverless plugin to bundle Python packages', + lifecycleEvents: ['requirements'], + }); + } else { + this.commands.requirements.type = 'container'; + } + + this.dockerImageForFunction = (funcOptions) => { + const runtime = + funcOptions.runtime || this.serverless.service.provider.runtime; + + const architecture = + funcOptions.architecture || + this.serverless.service.provider.architecture || + 'x86_64'; + const defaultImage = `public.ecr.aws/sam/build-${runtime}:latest-${architecture}`; + return this.options.dockerImage || defaultImage; + }; + + const isFunctionRuntimePython = (args) => { + // If functionObj.runtime is undefined, python. + if (!args[1].functionObj || !args[1].functionObj.runtime) { + return true; + } + return args[1].functionObj.runtime.startsWith('python'); + }; + + const clean = () => + BbPromise.bind(this).then(cleanup).then(removeVendorHelper); + + const setupArtifactPathCapturing = () => { + // Reference: + // https://github.com/serverless/serverless/blob/9591d5a232c641155613d23b0f88ca05ea51b436/lib/plugins/package/lib/packageService.js#L139 + // The packageService#packageFunction does set artifact path back to the function config. + // As long as the function config's "package" attribute wasn't undefined, we can still use it + // later to access the artifact path. + for (const functionName in this.serverless.service.functions) { + if (!serverless.service.functions[functionName].package) { + serverless.service.functions[functionName].package = {}; } } }; - const before = () => - BbPromise.bind(this) + const before = () => { + if (!isFunctionRuntimePython(arguments)) { + return; + } + return BbPromise.bind(this) .then(pipfileToRequirements) .then(addVendorHelper) .then(installAllRequirements) - .then(packRequirements); + .then(packRequirements) + .then(setupArtifactPathCapturing); + }; - const after = () => - BbPromise.bind(this) + const after = () => { + if (!isFunctionRuntimePython(arguments)) { + return; + } + return BbPromise.bind(this) .then(removeVendorHelper) - .then(injectAllRequirements); + .then(layerRequirements) + .then(() => + injectAllRequirements.bind(this)( + arguments[1].functionObj && + arguments[1].functionObj.package.artifact + ) + ); + }; const invalidateCaches = () => { if (this.options.invalidateCaches) { - return BbPromise.bind(this) - .then(cleanup) - .then(removeVendorHelper); + return clean; } return BbPromise.resolve(); }; + const cleanCache = () => BbPromise.bind(this).then(cleanupCache); + this.hooks = { 'after:package:cleanup': invalidateCaches, 'before:package:createDeploymentArtifacts': before, 'after:package:createDeploymentArtifacts': after, 'before:deploy:function:packageFunction': before, 'after:deploy:function:packageFunction': after, - 'requirements:install:install': () => - BbPromise.bind(this) - .then(pipfileToRequirements) - .then(addVendorHelper) - .then(installAllRequirements) - .then(packRequirements), - 'requirements:clean:clean': () => - BbPromise.bind(this) - .then(cleanup) - .then(removeVendorHelper) + 'requirements:requirements': () => { + this.serverless.cli.generateCommandsHelp(['requirements']); + return BbPromise.resolve(); + }, + 'requirements:install:install': before, + 'requirements:clean:clean': clean, + 'requirements:cleanCache:cleanCache': cleanCache, }; } } diff --git a/lib/clean.js b/lib/clean.js index f3c4fbef..8aaf331e 100644 --- a/lib/clean.js +++ b/lib/clean.js @@ -1,7 +1,8 @@ const BbPromise = require('bluebird'); const fse = require('fs-extra'); const path = require('path'); -const values = require('lodash.values'); +const glob = require('glob-all'); +const { getUserCachePath } = require('./shared'); BbPromise.promisifyAll(fse); @@ -13,7 +14,7 @@ function cleanup() { const artifacts = ['.requirements']; if (this.options.zip) { if (this.serverless.service.package.individually) { - values(this.serverless.service.functions).forEach(f => { + this.targetFuncs.forEach((f) => { artifacts.push(path.join(f.module, '.requirements.zip')); artifacts.push(path.join(f.module, 'unzip_requirements.py')); }); @@ -24,10 +25,51 @@ function cleanup() { } return BbPromise.all( - artifacts.map(artifact => + artifacts.map((artifact) => fse.removeAsync(path.join(this.servicePath, artifact)) ) ); } -module.exports = { cleanup }; +/** + * Clean up static cache, remove all items in there + * @return {Promise} + */ +function cleanupCache() { + const cacheLocation = getUserCachePath(this.options); + if (fse.existsSync(cacheLocation)) { + let cleanupProgress; + if (this.serverless) { + if (this.log) { + cleanupProgress = this.progress.get('python-cleanup-cache'); + cleanupProgress.notice('Removing static caches'); + this.log.info(`Removing static caches at: ${cacheLocation}`); + } else { + this.serverless.cli.log(`Removing static caches at: ${cacheLocation}`); + } + } + + // Only remove cache folders that we added, just incase someone accidentally puts a weird + // static cache location so we don't remove a bunch of personal stuff + const promises = []; + glob + .sync([path.join(cacheLocation, '*slspyc/')], { mark: true, dot: false }) + .forEach((file) => { + promises.push(fse.removeAsync(file)); + }); + return BbPromise.all(promises).finally( + () => cleanupProgress && cleanupProgress.remove() + ); + } else { + if (this.serverless) { + if (this.log) { + this.log.info(`No static cache found`); + } else { + this.serverless.cli.log(`No static cache found`); + } + } + return BbPromise.resolve(); + } +} + +module.exports = { cleanup, cleanupCache }; diff --git a/lib/docker.js b/lib/docker.js index 4b914942..68cf935b 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -1,43 +1,89 @@ -const { spawnSync } = require('child_process'); +const spawn = require('child-process-ext/spawn'); const isWsl = require('is-wsl'); +const fse = require('fs-extra'); +const path = require('path'); +const os = require('os'); /** * Helper function to run a docker command * @param {string[]} options * @return {Object} */ -function dockerCommand(options) { +async function dockerCommand(options, pluginInstance) { const cmd = 'docker'; - const ps = spawnSync(cmd, options, { encoding: 'utf-8' }); - if (ps.error) { - if (ps.error.code === 'ENOENT') { - throw new Error('docker not found! Please install it.'); + try { + return await spawn(cmd, options, { encoding: 'utf-8' }); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + throw new pluginInstance.serverless.classes.Error( + 'docker not found! Please install it.', + 'PYTHON_REQUIREMENTS_DOCKER_NOT_FOUND' + ); } - throw new Error(ps.error); - } else if (ps.status !== 0) { - throw new Error(ps.stderr); + throw e; } - return ps; } /** * Build the custom Docker image * @param {string} dockerFile + * @param {string[]} extraArgs * @return {string} The name of the built docker image. */ -function buildImage(dockerFile) { +async function buildImage(dockerFile, extraArgs, pluginInstance) { const imageName = 'sls-py-reqs-custom'; - const options = ['build', '-f', dockerFile, '-t', imageName, '.']; - dockerCommand(options); + const options = ['build', '-f', dockerFile, '-t', imageName]; + + if (Array.isArray(extraArgs)) { + options.push(...extraArgs); + } else { + throw new pluginInstance.serverless.classes.Error( + 'dockerRunCmdExtraArgs option must be an array', + 'PYTHON_REQUIREMENTS_INVALID_DOCKER_EXTRA_ARGS' + ); + } + + options.push('.'); + + await dockerCommand(options, pluginInstance); return imageName; } +/** + * Find a file that exists on all projects so we can test if Docker can see it too + * @param {string} servicePath + * @return {string} file name + */ +function findTestFile(servicePath, pluginInstance) { + if (fse.pathExistsSync(path.join(servicePath, 'serverless.yml'))) { + return 'serverless.yml'; + } + if (fse.pathExistsSync(path.join(servicePath, 'serverless.yaml'))) { + return 'serverless.yaml'; + } + if (fse.pathExistsSync(path.join(servicePath, 'serverless.json'))) { + return 'serverless.json'; + } + if (fse.pathExistsSync(path.join(servicePath, 'requirements.txt'))) { + return 'requirements.txt'; + } + throw new pluginInstance.serverless.classes.Error( + 'Unable to find serverless.{yml|yaml|json} or requirements.txt for getBindPath()', + 'PYTHON_REQUIREMENTS_MISSING_GET_BIND_PATH_FILE' + ); +} + /** * Test bind path to make sure it's working * @param {string} bindPath * @return {boolean} */ -function tryBindPath(bindPath) { +async function tryBindPath(bindPath, testFile, pluginInstance) { + const { serverless, log } = pluginInstance; + const debug = process.env.SLS_DEBUG; const options = [ 'run', '--rm', @@ -45,29 +91,52 @@ function tryBindPath(bindPath) { `${bindPath}:/test`, 'alpine', 'ls', - '/test/serverless.yml' + `/test/${testFile}`, ]; try { - const ps = dockerCommand(options); - return ps.stdout.trim() === '/test/serverless.yml'; + if (debug) { + if (log) { + log.debug(`Trying bindPath ${bindPath} (${options})`); + } else { + serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); + } + } + const ps = await dockerCommand(options, pluginInstance); + if (debug) { + if (log) { + log.debug(ps.stdoutBuffer.toString().trim()); + } else { + serverless.cli.log(ps.stdoutBuffer.toString().trim()); + } + } + return ps.stdoutBuffer.toString().trim() === `/test/${testFile}`; } catch (err) { + if (debug) { + if (log) { + log.debug(`Finding bindPath failed with ${err}`); + } else { + serverless.cli.log(`Finding bindPath failed with ${err}`); + } + } return false; } } /** * Get bind path depending on os platform + * @param {object} serverless * @param {string} servicePath * @return {string} The bind path. */ -function getBindPath(servicePath) { +async function getBindPath(servicePath, pluginInstance) { // Determine bind path - if (process.platform !== 'win32' && !isWsl) { + let isWsl1 = isWsl && !os.release().includes('microsoft-standard'); + if (process.platform !== 'win32' && !isWsl1) { return servicePath; } // test docker is available - dockerCommand(['version']); + await dockerCommand(['version'], pluginInstance); // find good bind path for Windows let bindPaths = []; @@ -92,16 +161,19 @@ function getBindPath(servicePath) { throw new Error(`Unknown path format ${baseBindPath.substr(10)}...`); } - bindPaths.push(`/${drive.toLowerCase()}/${path}`); + bindPaths.push(`/${drive.toLowerCase()}/${path}`); // Docker Toolbox (seems like Docker for Windows can support this too) + bindPaths.push(`${drive.toLowerCase()}:/${path}`); // Docker for Windows + // other options just in case bindPaths.push(`/${drive.toUpperCase()}/${path}`); bindPaths.push(`/mnt/${drive.toLowerCase()}/${path}`); bindPaths.push(`/mnt/${drive.toUpperCase()}/${path}`); - bindPaths.push(`${drive.toLowerCase()}:/${path}`); bindPaths.push(`${drive.toUpperCase()}:/${path}`); + const testFile = findTestFile(servicePath, pluginInstance); + for (let i = 0; i < bindPaths.length; i++) { const bindPath = bindPaths[i]; - if (tryBindPath(bindPath)) { + if (await tryBindPath(bindPath, testFile, pluginInstance)) { return bindPath; } } @@ -114,7 +186,7 @@ function getBindPath(servicePath) { * @param {string} bindPath * @return {boolean} */ -function getDockerUid(bindPath) { +async function getDockerUid(bindPath, pluginInstance) { const options = [ 'run', '--rm', @@ -124,10 +196,10 @@ function getDockerUid(bindPath) { 'stat', '-c', '%u', - '/test/.serverless' + '/bin/sh', ]; - const ps = dockerCommand(options); - return ps.stdout.trim(); + const ps = await dockerCommand(options, pluginInstance); + return ps.stdoutBuffer.toString().trim(); } module.exports = { buildImage, getBindPath, getDockerUid }; diff --git a/lib/inject.js b/lib/inject.js index e862e769..f4acde9d 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -1,134 +1,179 @@ +const BbPromise = require('bluebird'); const fse = require('fs-extra'); const glob = require('glob-all'); const get = require('lodash.get'); -const set = require('lodash.set'); +const set = require('set-value'); const path = require('path'); -const values = require('lodash.values'); -const zipper = require('zip-local'); const JSZip = require('jszip'); +const { writeZip, zipFile } = require('./zipTree'); -/** - * write zip contents to a file - * @param {Object} zip - * @param {string} path - */ -function writeZip(zip, path) { - const buff = zip.generate({ - type: 'nodebuffer', - compression: 'DEFLATE' - }); - - fse.writeFileSync(path, buff); -} - -/** - * add a new file to a zip file from a buffer - * @param {Object} zip - * @param {string} path path to put in zip - * @param {string} buffer file contents - */ -function zipFile(zip, path, buffer) { - zip.file(path, buffer, { - date: new Date(0) // necessary to get the same hash when zipping the same content - }); -} +BbPromise.promisifyAll(fse); /** - * inject requirements into packaged application + * Inject requirements into packaged application. * @param {string} requirementsPath requirements folder path * @param {string} packagePath target package path + * @param {string} injectionRelativePath installation directory in target package * @param {Object} options our options object + * @return {Promise} the JSZip object constructed. */ -function injectRequirements(requirementsPath, packagePath, options) { +function injectRequirements( + requirementsPath, + packagePath, + injectionRelativePath, + options +) { const noDeploy = new Set(options.noDeploy || []); - const zip = zipper.sync.unzip(packagePath).lowLevel(); - - glob - .sync([path.join(requirementsPath, '**')], { mark: true, dot: true }) - .forEach(file => { - if (file.endsWith('/')) { - return; - } - - const relativeFile = path.relative(requirementsPath, file); - - if (relativeFile.match(/^__pycache__[\\/]/)) { - return; - } - if (noDeploy.has(relativeFile.split(/([-\\/]|\.py$|\.pyc$)/, 1)[0])) { - return; - } - - zipFile(zip, relativeFile, fse.readFileSync(file)); - }); - - writeZip(zip, packagePath); + return fse + .readFileAsync(packagePath) + .then((buffer) => JSZip.loadAsync(buffer)) + .then((zip) => + BbPromise.resolve( + glob.sync([path.join(requirementsPath, '**')], { + mark: true, + dot: true, + }) + ) + .map((file) => [ + file, + path.join( + injectionRelativePath, + path.relative(requirementsPath, file) + ), + ]) + .filter( + ([file, relativeFile]) => + !file.endsWith('/') && + !relativeFile.match(/^__pycache__[\\/]/) && + !noDeploy.has(relativeFile.split(/([-\\/]|\.py$|\.pyc$)/, 1)[0]) + ) + .map(([file, relativeFile]) => + Promise.all([file, relativeFile, fse.statAsync(file)]) + ) + .mapSeries(([file, relativeFile, fileStat]) => + zipFile(zip, relativeFile, fse.readFileAsync(file), { + unixPermissions: fileStat.mode, + createFolders: false, + }) + ) + .then(() => writeZip(zip, packagePath)) + ); } /** - * remove all modules but the selected module from a package - * @param {string} source original package - * @param {string} target result package + * Remove all modules but the selected module from a package. + * @param {string} source path to original package + * @param {string} target path to result package * @param {string} module module to keep + * @return {Promise} the JSZip object written out. */ function moveModuleUp(source, target, module) { - const sourceZip = zipper.sync.unzip(source).memory(); - const targetZip = JSZip.make(); - - sourceZip.contents().forEach(file => { - if (!file.startsWith(module + '/')) { - return; - } - zipFile( - targetZip, - file.replace(module + '/', ''), - sourceZip.read(file, 'buffer') - ); - }); - - writeZip(targetZip, target); + const targetZip = new JSZip(); + + return fse + .readFileAsync(source) + .then((buffer) => JSZip.loadAsync(buffer)) + .then((sourceZip) => + sourceZip.filter( + (file) => + file.startsWith(module + '/') || + file.startsWith('serverless_sdk/') || + file.match(/^s_.*\.py/) !== null + ) + ) + .map((srcZipObj) => + zipFile( + targetZip, + srcZipObj.name.startsWith(module + '/') + ? srcZipObj.name.replace(module + '/', '') + : srcZipObj.name, + srcZipObj.async('nodebuffer') + ) + ) + .then(() => writeZip(targetZip, target)); } /** - * inject requirements into packaged application + * Inject requirements into packaged application. + * @return {Promise} the combined promise for requirements injection. */ -function injectAllRequirements() { - this.serverless.cli.log('Injecting required Python packages to package...'); +async function injectAllRequirements(funcArtifact) { + if (this.options.layer) { + // The requirements will be placed in a Layer, so just resolve + return BbPromise.resolve(); + } + + let injectProgress; + if (this.progress && this.log) { + injectProgress = this.progress.get('python-inject-requirements'); + injectProgress.update('Injecting required Python packages to package'); + this.log.info('Injecting required Python packages to package'); + } else { + this.serverless.cli.log('Injecting required Python packages to package...'); + } - if (this.options.zip) { - return; + let injectionRelativePath = '.'; + if (this.serverless.service.provider.name == 'scaleway') { + injectionRelativePath = 'package'; } - if (this.serverless.service.package.individually) { - values(this.serverless.service.functions).forEach(f => { - if ( - !(f.runtime || this.serverless.service.provider.runtime).match( - /^python.*/ + try { + if (this.serverless.service.package.individually) { + await BbPromise.resolve(this.targetFuncs) + .filter((func) => + (func.runtime || this.serverless.service.provider.runtime).match( + /^python.*/ + ) ) - ) { - return; - } - if (!get(f, 'module')) { - set(f, ['module'], '.'); - } - if (f.module !== '.') { - const artifactPath = path.join('.serverless', `${f.module}.zip`); - moveModuleUp(f.package.artifact, artifactPath, f.module); - f.package.artifact = artifactPath; - } - injectRequirements( - path.join('.serverless', f.module, 'requirements'), - f.package.artifact, + .map((func) => { + if (!get(func, 'module')) { + set(func, ['module'], '.'); + } + return func; + }) + .map((func) => { + if (func.module !== '.') { + const artifact = func.package + ? func.package.artifact + : funcArtifact; + const newArtifact = path.join( + '.serverless', + `${func.module}-${func.name}.zip` + ); + func.package.artifact = newArtifact; + return moveModuleUp(artifact, newArtifact, func.module).then( + () => func + ); + } else { + return func; + } + }) + .map((func) => { + return this.options.zip + ? func + : injectRequirements( + path.join( + this.serverless.serviceDir, + '.serverless', + func.module, + 'requirements' + ), + func.package.artifact, + injectionRelativePath, + this.options + ); + }); + } else if (!this.options.zip) { + await injectRequirements( + path.join(this.serverless.serviceDir, '.serverless', 'requirements'), + this.serverless.service.package.artifact || funcArtifact, + injectionRelativePath, this.options ); - }); - } else { - injectRequirements( - path.join('.serverless', 'requirements'), - this.serverless.service.package.artifact, - this.options - ); + } + } finally { + injectProgress && injectProgress.remove(); } } diff --git a/lib/layer.js b/lib/layer.js new file mode 100644 index 00000000..6fe9ca4c --- /dev/null +++ b/lib/layer.js @@ -0,0 +1,110 @@ +const BbPromise = require('bluebird'); +const fse = require('fs-extra'); +const path = require('path'); +const JSZip = require('jszip'); +const { writeZip, addTree } = require('./zipTree'); +const { sha256Path, getRequirementsLayerPath } = require('./shared'); + +BbPromise.promisifyAll(fse); + +/** + * Zip up requirements to be used as layer package. + * @return {Promise} the JSZip object constructed. + */ +function zipRequirements() { + const src = path.join('.serverless', 'requirements'); + const reqChecksum = sha256Path(path.join('.serverless', 'requirements.txt')); + const targetZipPath = path.join('.serverless', 'pythonRequirements.zip'); + const zipCachePath = getRequirementsLayerPath( + reqChecksum, + targetZipPath, + this.options, + this.serverless + ); + + const promises = []; + if (fse.existsSync(zipCachePath)) { + let layerProgress; + if (this.progress && this.log) { + layerProgress = this.progress.get('python-layer-requirements'); + layerProgress.update( + 'Using cached Python Requirements Lambda Layer file' + ); + this.log.info('Found cached Python Requirements Lambda Layer file'); + } else { + this.serverless.cli.log( + 'Found cached Python Requirements Lambda Layer file' + ); + } + } else { + const rootZip = new JSZip(); + const runtimepath = 'python'; + + promises.push( + addTree(rootZip.folder(runtimepath), src).then(() => + writeZip(rootZip, zipCachePath) + ) + ); + } + return BbPromise.all(promises).then(() => { + if (zipCachePath !== targetZipPath) { + if (process.platform === 'win32') { + fse.copySync(zipCachePath, targetZipPath); + } else { + fse.symlink(zipCachePath, targetZipPath, 'file'); + } + } + }); +} + +/** + * Creates a layer on the serverless service for the requirements zip. + * @return {Promise} empty promise + */ +function createLayers() { + if (!this.serverless.service.layers) { + this.serverless.service.layers = {}; + } + this.serverless.service.layers['pythonRequirements'] = Object.assign( + { + artifact: path.join('.serverless', 'pythonRequirements.zip'), + name: `${ + this.serverless.service.service + }-${this.serverless.providers.aws.getStage()}-python-requirements`, + description: + 'Python requirements generated by serverless-python-requirements.', + compatibleRuntimes: [this.serverless.service.provider.runtime], + }, + this.options.layer + ); + + return BbPromise.resolve(); +} + +/** + * Creates a layer from the installed requirements. + * @return {Promise} the combined promise for requirements layer. + */ +function layerRequirements() { + if (!this.options.layer) { + return BbPromise.resolve(); + } + + let layerProgress; + if (this.progress && this.log) { + layerProgress = this.progress.get('python-layer-requirements'); + layerProgress.update('Packaging Python Requirements Lambda Layer'); + this.log.info('Packaging Python Requirements Lambda Layer'); + } else { + this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); + } + + return BbPromise.bind(this) + .then(zipRequirements) + .then(createLayers) + .finally(() => layerProgress && layerProgress.remove()); +} + +module.exports = { + layerRequirements, +}; diff --git a/lib/pip.js b/lib/pip.js index a5647d84..40140d36 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -2,170 +2,547 @@ const fse = require('fs-extra'); const rimraf = require('rimraf'); const path = require('path'); const get = require('lodash.get'); -const set = require('lodash.set'); -const { spawnSync } = require('child_process'); -const values = require('lodash.values'); +const set = require('set-value'); +const spawn = require('child-process-ext/spawn'); +const { quote } = require('shell-quote'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); +const { getStripCommand, getStripMode, deleteFiles } = require('./slim'); +const { isPoetryProject, pyprojectTomlToRequirements } = require('./poetry'); +const { + checkForAndDeleteMaxCacheVersions, + sha256Path, + getRequirementsWorkingPath, + getUserCachePath, +} = require('./shared'); /** - * Install requirements described in requirementsPath to targetFolder + * Omit empty commands. + * In this context, a "command" is a list of arguments. An empty list or falsy value is ommitted. + * @param {string[][]} many commands to merge. + * @return {string[][]} a list of valid commands. + */ +function filterCommands(commands) { + return commands.filter((cmd) => Boolean(cmd) && cmd.length > 0); +} + +/** + * Render zero or more commands as a single command for a Unix environment. + * In this context, a "command" is a list of arguments. An empty list or falsy value is ommitted. + * + * @param {string[][]} many commands to merge. + * @return {string[]} a single list of words. + */ +function mergeCommands(commands) { + const cmds = filterCommands(commands); + if (cmds.length === 0) { + throw new Error('Expected at least one non-empty command'); + } else if (cmds.length === 1) { + return cmds[0]; + } else { + // Quote the arguments in each command and join them all using &&. + const script = cmds.map(quote).join(' && '); + return ['/bin/sh', '-c', script]; + } +} + +/** + * Just generate the requirements file in the .serverless folder * @param {string} requirementsPath - * @param {string} targetFolder + * @param {string} targetFile * @param {Object} serverless * @param {string} servicePath * @param {Object} options * @return {undefined} */ -function installRequirements( +function generateRequirementsFile( requirementsPath, - targetFolder, - serverless, - servicePath, - options + targetFile, + pluginInstance ) { - // Create target folder if it does not exist - const targetRequirementsFolder = path.join(targetFolder, 'requirements'); - fse.ensureDirSync(targetRequirementsFolder); + const { serverless, servicePath, options, log } = pluginInstance; + const modulePath = path.dirname(requirementsPath); + if (options.usePoetry && isPoetryProject(modulePath)) { + filterRequirementsFile(targetFile, targetFile, pluginInstance); + if (log) { + log.info(`Parsed requirements.txt from pyproject.toml in ${targetFile}`); + } else { + serverless.cli.log( + `Parsed requirements.txt from pyproject.toml in ${targetFile}...` + ); + } + } else if ( + options.usePipenv && + fse.existsSync(path.join(servicePath, 'Pipfile')) + ) { + filterRequirementsFile( + path.join(servicePath, '.serverless/requirements.txt'), + targetFile, + pluginInstance + ); + if (log) { + log.info(`Parsed requirements.txt from Pipfile in ${targetFile}`); + } else { + serverless.cli.log( + `Parsed requirements.txt from Pipfile in ${targetFile}...` + ); + } + } else { + filterRequirementsFile(requirementsPath, targetFile, pluginInstance); + if (log) { + log.info( + `Generated requirements from ${requirementsPath} in ${targetFile}` + ); + } else { + serverless.cli.log( + `Generated requirements from ${requirementsPath} in ${targetFile}...` + ); + } + } +} - const dotSlsReqs = path.join(targetFolder, 'requirements.txt'); - if (options.usePipenv && fse.existsSync(path.join(servicePath, 'Pipfile'))) { - generateRequirementsFile(dotSlsReqs, dotSlsReqs, options); +async function pipAcceptsSystem(pythonBin, pluginInstance) { + // Check if pip has Debian's --system option and set it if so + try { + const pipTestRes = await spawn(pythonBin, ['-m', 'pip', 'help', 'install']); + return ( + pipTestRes.stdoutBuffer && + pipTestRes.stdoutBuffer.toString().indexOf('--system') >= 0 + ); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + throw new pluginInstance.serverless.classes.Error( + `${pythonBin} not found! Install it according to the poetry docs.`, + 'PYTHON_REQUIREMENTS_PYTHON_NOT_FOUND' + ); + } + throw e; + } +} + +/** + * Install requirements described from requirements in the targetFolder into that same targetFolder + * @param {string} targetFolder + * @param {Object} pluginInstance + * @param {Object} funcOptions + * @return {undefined} + */ +async function installRequirements(targetFolder, pluginInstance, funcOptions) { + const { options, serverless, log, progress, dockerImageForFunction } = + pluginInstance; + const targetRequirementsTxt = path.join(targetFolder, 'requirements.txt'); + + let installProgress; + if (progress) { + log.info(`Installing requirements from "${targetRequirementsTxt}"`); + installProgress = progress.get('python-install'); + installProgress.update('Installing requirements'); } else { - generateRequirementsFile(requirementsPath, dotSlsReqs, options); + serverless.cli.log( + `Installing requirements from ${targetRequirementsTxt} ...` + ); } - serverless.cli.log( - `Installing requirements of ${requirementsPath} in ${targetFolder}...` - ); + try { + const dockerCmd = []; + const pipCmd = [options.pythonBin, '-m', 'pip', 'install']; - let cmd; - let cmdOptions; - let pipCmd = [ - options.pythonBin, - '-m', - 'pip', - 'install', - '-t', - dockerPathForWin(options, targetRequirementsFolder), - '-r', - dockerPathForWin(options, dotSlsReqs), - ...options.pipCmdExtraArgs - ]; - if (!options.dockerizePip) { - // Check if pip has Debian's --system option and set it if so - const pipTestRes = spawnSync(options.pythonBin, [ - '-m', - 'pip', - 'help', - 'install' - ]); - if (pipTestRes.error) { - if (pipTestRes.error.code === 'ENOENT') { - throw new Error( - `${options.pythonBin} not found! ` + 'Try the pythonBin option.' + if ( + Array.isArray(options.pipCmdExtraArgs) && + options.pipCmdExtraArgs.length > 0 + ) { + options.pipCmdExtraArgs.forEach((cmd) => { + const parts = cmd.split(/\s+/, 2); + pipCmd.push(...parts); + }); + } + + const pipCmds = [pipCmd]; + const postCmds = []; + // Check if we're using the legacy --cache-dir command... + if (options.pipCmdExtraArgs.indexOf('--cache-dir') > -1) { + if (options.dockerizePip) { + throw new pluginInstance.serverless.classes.Error( + 'You cannot use --cache-dir with Docker any more, please use the new option useDownloadCache instead. Please see: https://github.com/UnitedIncome/serverless-python-requirements#caching for more details.', + 'PYTHON_REQUIREMENTS_CACHE_DIR_DOCKER_INVALID' ); + } else { + if (log) { + log.warning( + 'You are using a deprecated --cache-dir inside\n' + + ' your pipCmdExtraArgs which may not work properly, please use the\n' + + ' useDownloadCache option instead. Please see: \n' + + ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + ); + } else { + serverless.cli.log( + '==================================================' + ); + serverless.cli.log( + 'Warning: You are using a deprecated --cache-dir inside\n' + + ' your pipCmdExtraArgs which may not work properly, please use the\n' + + ' useDownloadCache option instead. Please see: \n' + + ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + ); + serverless.cli.log( + '==================================================' + ); + } } - throw pipTestRes.error; - } - if (pipTestRes.stdout.toString().indexOf('--system') >= 0) { - pipCmd.push('--system'); } - } - if (options.dockerizePip) { - cmd = 'docker'; - // Build docker image if required - let dockerImage; - if (options.dockerFile) { - serverless.cli.log( - `Building custom docker image from ${options.dockerFile}...` + if (!options.dockerizePip) { + // Push our local OS-specific paths for requirements and target directory + pipCmd.push( + '-t', + dockerPathForWin(targetFolder), + '-r', + dockerPathForWin(targetRequirementsTxt) ); - dockerImage = buildImage(options.dockerFile); - } else { - dockerImage = options.dockerImage; + // If we want a download cache... + if (options.useDownloadCache) { + const downloadCacheDir = path.join( + getUserCachePath(options), + 'downloadCacheslspyc' + ); + if (log) { + log.info(`Using download cache directory ${downloadCacheDir}`); + } else { + serverless.cli.log( + `Using download cache directory ${downloadCacheDir}` + ); + } + fse.ensureDirSync(downloadCacheDir); + pipCmd.push('--cache-dir', downloadCacheDir); + } + + if (await pipAcceptsSystem(options.pythonBin, pluginInstance)) { + pipCmd.push('--system'); + } } - serverless.cli.log(`Docker Image: ${dockerImage}`); - // Prepare bind path depending on os platform - const bindPath = getBindPath(servicePath); + // If we are dockerizing pip + if (options.dockerizePip) { + // Push docker-specific paths for requirements and target directory + pipCmd.push('-t', '/var/task/', '-r', '/var/task/requirements.txt'); - cmdOptions = ['run', '--rm', '-v', `${bindPath}:/var/task:z`]; - if (options.dockerSsh) { - // Mount necessary ssh files to work with private repos - cmdOptions.push( - '-v', - `${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z` - ); - cmdOptions.push( - '-v', - `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z` + // Build docker image if required + let dockerImage; + if (options.dockerFile) { + let buildDockerImageProgress; + if (progress) { + buildDockerImageProgress = progress.get( + 'python-install-build-docker' + ); + buildDockerImageProgress.update( + `Building custom docker image from ${options.dockerFile}` + ); + } else { + serverless.cli.log( + `Building custom docker image from ${options.dockerFile}...` + ); + } + try { + dockerImage = await buildImage( + options.dockerFile, + options.dockerBuildCmdExtraArgs, + pluginInstance + ); + } finally { + buildDockerImageProgress && buildDockerImageProgress.remove(); + } + } else { + dockerImage = dockerImageForFunction(funcOptions); + } + if (log) { + log.info(`Docker Image: ${dockerImage}`); + } else { + serverless.cli.log(`Docker Image: ${dockerImage}`); + } + + // Prepare bind path depending on os platform + const bindPath = dockerPathForWin( + await getBindPath(targetFolder, pluginInstance) ); - cmdOptions.push('-v', `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`); - cmdOptions.push('-e', 'SSH_AUTH_SOCK=/tmp/ssh_sock'); - } - if (process.platform === 'linux') { - // Use same user so requirements folder is not root and so --cache-dir works - cmdOptions.push('-u', `${process.getuid()}`); - // const stripCmd = quote([ - // 'find', targetRequirementsFolder, - // '-name', '"*.so"', - // '-exec', 'strip', '{}', '\;', - // ]); - // pipCmd = ['/bin/bash', '-c', '"' + pipCmd + ' && ' + stripCmd + ' && ' + chownCmd + '"']; + + dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); + if (options.dockerSsh) { + const homePath = require('os').homedir(); + const sshKeyPath = + options.dockerPrivateKey || `${homePath}/.ssh/id_rsa`; + + // Mount necessary ssh files to work with private repos + dockerCmd.push( + '-v', + `${sshKeyPath}:/root/.ssh/${sshKeyPath.split('/').splice(-1)[0]}:z`, + '-v', + `${homePath}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, + '-v', + `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`, + '-e', + 'SSH_AUTH_SOCK=/tmp/ssh_sock' + ); + } + + // If we want a download cache... + const dockerDownloadCacheDir = '/var/useDownloadCache'; + if (options.useDownloadCache) { + const downloadCacheDir = path.join( + getUserCachePath(options), + 'downloadCacheslspyc' + ); + if (log) { + log.info(`Using download cache directory ${downloadCacheDir}`); + } else { + serverless.cli.log( + `Using download cache directory ${downloadCacheDir}` + ); + } + fse.ensureDirSync(downloadCacheDir); + // This little hack is necessary because getBindPath requires something inside of it to test... + // Ugh, this is so ugly, but someone has to fix getBindPath in some other way (eg: make it use + // its own temp file) + fse.closeSync( + fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w') + ); + const windowsized = await getBindPath(downloadCacheDir, pluginInstance); + // And now push it to a volume mount and to pip... + dockerCmd.push('-v', `${windowsized}:${dockerDownloadCacheDir}:z`); + pipCmd.push('--cache-dir', dockerDownloadCacheDir); + } + + if (options.dockerEnv) { + // Add environment variables to docker run cmd + options.dockerEnv.forEach(function (item) { + dockerCmd.push('-e', item); + }); + } + + if (process.platform === 'linux') { + // Use same user so requirements folder is not root and so --cache-dir works + if (options.useDownloadCache) { + // Set the ownership of the download cache dir to root + pipCmds.unshift(['chown', '-R', '0:0', dockerDownloadCacheDir]); + } + // Install requirements with pip + // Set the ownership of the current folder to user + // If you use docker-rootless, you don't need to set the ownership + if (options.dockerRootless !== true) { + pipCmds.push([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + '/var/task', + ]); + } else { + pipCmds.push(['chown', '-R', '0:0', '/var/task']); + } + } else { + // Use same user so --cache-dir works + dockerCmd.push('-u', await getDockerUid(bindPath, pluginInstance)); + } + + for (let path of options.dockerExtraFiles) { + pipCmds.push(['cp', path, '/var/task/']); + } + + if (process.platform === 'linux') { + if (options.useDownloadCache) { + // Set the ownership of the download cache dir back to user + if (options.dockerRootless !== true) { + pipCmds.push([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + dockerDownloadCacheDir, + ]); + } else { + pipCmds.push(['chown', '-R', '0:0', dockerDownloadCacheDir]); + } + } + } + + if (Array.isArray(options.dockerRunCmdExtraArgs)) { + dockerCmd.push(...options.dockerRunCmdExtraArgs); + } else { + throw new pluginInstance.serverless.classes.Error( + 'dockerRunCmdExtraArgs option must be an array', + 'PYTHON_REQUIREMENTS_INVALID_DOCKER_EXTRA_ARGS' + ); + } + + dockerCmd.push(dockerImage); + } + + // If enabled slimming, strip so files + switch (getStripMode(options)) { + case 'docker': + pipCmds.push(getStripCommand(options, '/var/task')); + break; + case 'direct': + postCmds.push(getStripCommand(options, dockerPathForWin(targetFolder))); + break; + } + + let spawnArgs = { shell: true }; + if (process.env.SLS_DEBUG) { + spawnArgs.stdio = 'inherit'; + } + let mainCmds = []; + if (dockerCmd.length) { + dockerCmd.push(...mergeCommands(pipCmds)); + mainCmds = [dockerCmd]; } else { - // Use same user so --cache-dir works - cmdOptions.push('-u', getDockerUid(bindPath)); + mainCmds = pipCmds; } - cmdOptions.push(dockerImage); - cmdOptions.push(...pipCmd); - } else { - cmd = pipCmd[0]; - cmdOptions = pipCmd.slice(1); - } - const res = spawnSync(cmd, cmdOptions, { cwd: servicePath, shell: true }); - if (res.error) { - if (res.error.code === 'ENOENT') { - if (options.dockerizePip) { - throw new Error('docker not found! Please install it.'); + mainCmds.push(...postCmds); + + if (log) { + log.info(`Running ${quote(dockerCmd)}...`); + } else { + serverless.cli.log(`Running ${quote(dockerCmd)}...`); + } + + for (const [cmd, ...args] of mainCmds) { + try { + await spawn(cmd, args); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + const advice = + cmd.indexOf('python') > -1 + ? 'Try the pythonBin option' + : 'Please install it'; + throw new pluginInstance.serverless.classes.Error( + `${cmd} not found! ${advice}`, + 'PYTHON_REQUIREMENTS_COMMAND_NOT_FOUND' + ); + } + + if (cmd === 'docker' && e.stderrBuffer) { + throw new pluginInstance.serverless.classes.Error( + `Running "${cmd} ${args.join(' ')}" failed with: "${e.stderrBuffer + .toString() + .trim()}"`, + 'PYTHON_REQUIREMENTS_DOCKER_COMMAND_FAILED' + ); + } + + if (log) { + log.error(`Stdout: ${e.stdoutBuffer}`); + log.error(`Stderr: ${e.stderrBuffer}`); + } else { + serverless.cli.log(`Stdout: ${e.stdoutBuffer}`); + serverless.cli.log(`Stderr: ${e.stderrBuffer}`); + } + throw e; } - throw new Error( - `${options.pythonBin} not found! Try the pythonBin option.` - ); } - throw res.error; - } - if (res.status !== 0) { - throw new Error(res.stderr); + // If enabled slimming, delete files in slimPatterns + if (options.slim === true || options.slim === 'true') { + deleteFiles(options, targetFolder); + } + } finally { + installProgress && installProgress.remove(); } } /** - * convert path from Windows style to Linux style, if needed - * @param {Object} options + * Convert path from Windows style to Linux style, if needed. * @param {string} path * @return {string} */ -function dockerPathForWin(options, path) { - if (process.platform === 'win32' && options.dockerizePip) { - return path.replace('\\', '/'); +function dockerPathForWin(path) { + if (process.platform === 'win32') { + return path.replace(/\\/g, '/'); + } else { + return path; } - return path; +} + +/** + * get requirements from requirements.txt + * @param {string} source + * @return {string[]} + */ +function getRequirements(source) { + const requirements = fse + .readFileSync(source, { encoding: 'utf-8' }) + .replace(/\\\n/g, ' ') + .split(/\r?\n/); + + return requirements.reduce((acc, req) => { + req = req.trim(); + if (!req.startsWith('-r')) { + return [...acc, req]; + } + source = path.join(path.dirname(source), req.replace(/^-r\s+/, '')); + return [...acc, ...getRequirements(source)]; + }, []); } /** create a filtered requirements.txt without anything from noDeploy + * then remove all comments and empty lines, and sort the list which + * assist with matching the static cache. The sorting will skip any + * lines starting with -- as those are typically ordered at the + * start of a file ( eg: --index-url / --extra-index-url ) or any + * lines that start with -c, -e, -f, -i or -r, Please see: + * https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format * @param {string} source requirements * @param {string} target requirements where results are written * @param {Object} options */ -function generateRequirementsFile(source, target, options) { +function filterRequirementsFile(source, target, { options, serverless, log }) { const noDeploy = new Set(options.noDeploy || []); - const requirements = fse - .readFileSync(source, { encoding: 'utf-8' }) - .split(/\r?\n/); - const filteredRequirements = requirements.filter(req => { + const requirements = getRequirements(source); + var prepend = []; + const filteredRequirements = requirements.filter((req) => { + req = req.trim(); + if (req.startsWith('#')) { + // Skip comments + return false; + } else if ( + req.startsWith('--') || + req.startsWith('-c') || + req.startsWith('-e') || + req.startsWith('-f') || + req.startsWith('-i') || + req.startsWith('-r') + ) { + if (req.startsWith('-e')) { + // strip out editable flags + // not required inside final archive and avoids pip bugs + // see https://github.com/UnitedIncome/serverless-python-requirements/issues/240 + req = req.split('-e')[1].trim(); + if (log) { + log.warning(`Stripping -e flag from requirement ${req}`); + } else { + serverless.cli.log( + `Warning: Stripping -e flag from requirement ${req}` + ); + } + } + + // Keep options for later + prepend.push(req); + return false; + } else if (req === '') { + return false; + } return !noDeploy.has(req.split(/[=<> \t]/)[0].trim()); }); - fse.writeFileSync(target, filteredRequirements.join('\n')); + filteredRequirements.sort(); // Sort remaining alphabetically + // Then prepend any options from above in the same order + for (let item of prepend.reverse()) { + if (item && item.length > 0) { + filteredRequirements.unshift(item); + } + } + fse.writeFileSync(target, filteredRequirements.join('\n') + '\n'); } /** @@ -175,17 +552,23 @@ function generateRequirementsFile(source, target, options) { * @param {Object} serverless * @return {undefined} */ -function copyVendors(vendorFolder, targetFolder, serverless) { +function copyVendors(vendorFolder, targetFolder, { serverless, log }) { // Create target folder if it does not exist - const targetRequirementsFolder = path.join(targetFolder, 'requirements'); + fse.ensureDirSync(targetFolder); - serverless.cli.log( - `Copying vendor libraries from ${vendorFolder} to ${targetRequirementsFolder}...` - ); + if (log) { + log.info( + `Copying vendor libraries from ${vendorFolder} to ${targetFolder}` + ); + } else { + serverless.cli.log( + `Copying vendor libraries from ${vendorFolder} to ${targetFolder}...` + ); + } - fse.readdirSync(vendorFolder).map(file => { + fse.readdirSync(vendorFolder).map((file) => { let source = path.join(vendorFolder, file); - let dest = path.join(targetRequirementsFolder, file); + let dest = path.join(targetFolder, file); if (fse.existsSync(dest)) { rimraf.sync(dest); } @@ -194,47 +577,227 @@ function copyVendors(vendorFolder, targetFolder, serverless) { } /** - * pip install the requirements to the .serverless/requirements directory + * This checks if requirements file exists. + * @param {string} servicePath + * @param {Object} options + * @param {string} fileName + */ +function requirementsFileExists(servicePath, options, fileName) { + if (options.usePoetry && isPoetryProject(path.dirname(fileName))) { + return true; + } + + if (options.usePipenv && fse.existsSync(path.join(servicePath, 'Pipfile'))) { + return true; + } + + if (fse.existsSync(fileName)) { + return true; + } + + return false; +} + +/** + * This evaluates if requirements are actually needed to be installed, but fails + * gracefully if no req file is found intentionally. It also assists with code + * re-use for this logic pertaining to individually packaged functions + * @param {string} servicePath + * @param {string} modulePath + * @param {Object} options + * @param {Object} funcOptions + * @param {Object} serverless + * @return {string} + */ +async function installRequirementsIfNeeded( + modulePath, + funcOptions, + pluginInstance +) { + const { servicePath, options, serverless } = pluginInstance; + // Our source requirements, under our service path, and our module path (if specified) + const fileName = path.join(servicePath, modulePath, options.fileName); + + await pyprojectTomlToRequirements(modulePath, pluginInstance); + + // Skip requirements generation, if requirements file doesn't exist + if (!requirementsFileExists(servicePath, options, fileName)) { + return false; + } + + let requirementsTxtDirectory; + // Copy our requirements to another path in .serverless (incase of individually packaged) + if (modulePath && modulePath !== '.') { + requirementsTxtDirectory = path.join( + servicePath, + '.serverless', + modulePath + ); + } else { + requirementsTxtDirectory = path.join(servicePath, '.serverless'); + } + fse.ensureDirSync(requirementsTxtDirectory); + const slsReqsTxt = path.join(requirementsTxtDirectory, 'requirements.txt'); + + generateRequirementsFile(fileName, slsReqsTxt, pluginInstance); + + // If no requirements file or an empty requirements file, then do nothing + if (!fse.existsSync(slsReqsTxt) || fse.statSync(slsReqsTxt).size == 0) { + if (pluginInstance.log) { + pluginInstance.log.info( + `Skipping empty output requirements.txt file from ${slsReqsTxt}` + ); + } else { + serverless.cli.log( + `Skipping empty output requirements.txt file from ${slsReqsTxt}` + ); + } + return false; + } + + // Then generate our MD5 Sum of this requirements file to determine where it should "go" to and/or pull cache from + const reqChecksum = sha256Path(slsReqsTxt); + + // Then figure out where this cache should be, if we're caching, if we're in a module, etc + const workingReqsFolder = getRequirementsWorkingPath( + reqChecksum, + requirementsTxtDirectory, + options, + serverless + ); + + // Check if our static cache is present and is valid + if (fse.existsSync(workingReqsFolder)) { + if ( + fse.existsSync(path.join(workingReqsFolder, '.completed_requirements')) && + workingReqsFolder.endsWith('_slspyc') + ) { + if (pluginInstance.log) { + pluginInstance.log.info( + `Using static cache of requirements found at ${workingReqsFolder}` + ); + } else { + serverless.cli.log( + `Using static cache of requirements found at ${workingReqsFolder} ...` + ); + } + // We'll "touch" the folder, as to bring it to the start of the FIFO cache + fse.utimesSync(workingReqsFolder, new Date(), new Date()); + return workingReqsFolder; + } + // Remove our old folder if it didn't complete properly, but _just incase_ only remove it if named properly... + if ( + workingReqsFolder.endsWith('_slspyc') || + workingReqsFolder.endsWith('.requirements') + ) { + rimraf.sync(workingReqsFolder); + } + } + + // Ensuring the working reqs folder exists + fse.ensureDirSync(workingReqsFolder); + + // Copy our requirements.txt into our working folder... + fse.copySync(slsReqsTxt, path.join(workingReqsFolder, 'requirements.txt')); + + // Then install our requirements from this folder + await installRequirements(workingReqsFolder, pluginInstance, funcOptions); + + // Copy vendor libraries to requirements folder + if (options.vendor) { + copyVendors(options.vendor, workingReqsFolder, pluginInstance); + } + if (funcOptions.vendor) { + copyVendors(funcOptions.vendor, workingReqsFolder, pluginInstance); + } + + // Then touch our ".completed_requirements" file so we know we can use this for static cache + if (options.useStaticCache) { + fse.closeSync( + fse.openSync(path.join(workingReqsFolder, '.completed_requirements'), 'w') + ); + } + return workingReqsFolder; +} + +/** + * pip install the requirements to the requirements directory * @return {undefined} */ -function installAllRequirements() { - fse.ensureDirSync(path.join(this.servicePath, '.serverless')); +async function installAllRequirements() { + // fse.ensureDirSync(path.join(this.servicePath, '.serverless')); + // First, check and delete cache versions, if enabled + checkForAndDeleteMaxCacheVersions(this); + + // Then if we're going to package functions individually... if (this.serverless.service.package.individually) { let doneModules = []; - values(this.serverless.service.functions).forEach(f => { + const filteredFuncs = this.targetFuncs.filter((func) => + (func.runtime || this.serverless.service.provider.runtime).match( + /^python.*/ + ) + ); + + for (const f of filteredFuncs) { if (!get(f, 'module')) { set(f, ['module'], '.'); } + + // If we didn't already process a module (functions can re-use modules) if (!doneModules.includes(f.module)) { - installRequirements( - path.join(f.module, this.options.fileName), - path.join('.serverless', f.module), - this.serverless, + const reqsInstalledAt = await installRequirementsIfNeeded( + f.module, + f, + this + ); + // Add modulePath into .serverless for each module so it's easier for injecting and for users to see where reqs are + let modulePath = path.join( this.servicePath, - this.options + '.serverless', + `${f.module}`, + 'requirements' ); - if (f.vendor) { - // copy vendor libraries to requirements folder - copyVendors( - f.vendor, - path.join('.serverless', f.module), - this.serverless - ); + // Only do if we didn't already do it + if ( + reqsInstalledAt && + !fse.existsSync(modulePath) && + reqsInstalledAt != modulePath + ) { + if (this.options.useStaticCache) { + // Windows can't symlink so we have to copy on Windows, + // it's not as fast, but at least it works + if (process.platform == 'win32') { + fse.copySync(reqsInstalledAt, modulePath); + } else { + fse.symlink(reqsInstalledAt, modulePath); + } + } else { + fse.rename(reqsInstalledAt, modulePath); + } } doneModules.push(f.module); } - }); + } } else { - installRequirements( - this.options.fileName, - '.serverless', - this.serverless, + const reqsInstalledAt = await installRequirementsIfNeeded('', {}, this); + // Add symlinks into .serverless for so it's easier for injecting and for users to see where reqs are + let symlinkPath = path.join( this.servicePath, - this.options + '.serverless', + `requirements` ); - if (this.options.vendor) { - // copy vendor libraries to requirements folder - copyVendors(this.options.vendor, '.serverless', this.serverless); + // Only do if we didn't already do it + if ( + reqsInstalledAt && + !fse.existsSync(symlinkPath) && + reqsInstalledAt != symlinkPath + ) { + // Windows can't symlink so we have to use junction on Windows + if (process.platform == 'win32') { + fse.symlink(reqsInstalledAt, symlinkPath, 'junction'); + } else { + fse.symlink(reqsInstalledAt, symlinkPath); + } } } } diff --git a/lib/pipenv.js b/lib/pipenv.js index 4fd82a90..1099b651 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -1,11 +1,49 @@ const fse = require('fs-extra'); const path = require('path'); -const { spawnSync } = require('child_process'); +const spawn = require('child-process-ext/spawn'); +const { EOL } = require('os'); +const semver = require('semver'); + +const LEGACY_PIPENV_VERSION = '2022.8.5'; + +async function getPipenvVersion() { + try { + const res = await spawn('pipenv', ['--version'], { + cwd: this.servicePath, + }); + + const stdoutBuffer = + (res.stdoutBuffer && res.stdoutBuffer.toString().trim()) || ''; + + const version = stdoutBuffer.split(' ')[2]; + + if (semver.valid(version)) { + return version; + } else { + throw new this.serverless.classes.Error( + `Unable to parse pipenv version!`, + 'PYTHON_REQUIREMENTS_PIPENV_VERSION_ERROR' + ); + } + } catch (e) { + const stderrBufferContent = + (e.stderrBuffer && e.stderrBuffer.toString()) || ''; + + if (stderrBufferContent.includes('command not found')) { + throw new this.serverless.classes.Error( + `pipenv not found! Install it according to the pipenv docs.`, + 'PYTHON_REQUIREMENTS_PIPENV_NOT_FOUND' + ); + } else { + throw e; + } + } +} /** * pipenv install */ -function pipfileToRequirements() { +async function pipfileToRequirements() { if ( !this.options.usePipenv || !fse.existsSync(path.join(this.servicePath, 'Pipfile')) @@ -13,27 +51,97 @@ function pipfileToRequirements() { return; } - this.serverless.cli.log('Generating requirements.txt from Pipfile...'); + let generateRequirementsProgress; + if (this.progress && this.log) { + generateRequirementsProgress = this.progress.get( + 'python-generate-requirements-pipfile' + ); + generateRequirementsProgress.update( + 'Generating requirements.txt from Pipfile' + ); + this.log.info('Generating requirements.txt from Pipfile'); + } else { + this.serverless.cli.log('Generating requirements.txt from Pipfile...'); + } + + try { + // Get and validate pipenv version + if (this.log) { + this.log.info('Getting pipenv version'); + } else { + this.serverless.cli.log('Getting pipenv version'); + } + + const pipenvVersion = await getPipenvVersion(); + let res; - const res = spawnSync('pipenv', ['lock', '--requirements'], { - cwd: this.servicePath - }); - if (res.error) { - if (res.error.code === 'ENOENT') { - throw new Error( - `pipenv not found! Install it with 'pip install pipenv'.` + if (semver.gt(pipenvVersion, LEGACY_PIPENV_VERSION)) { + // Using new pipenv syntax ( >= 2022.8.13) + // Generate requirements from existing lock file. + // See: https://pipenv.pypa.io/en/latest/advanced/#generating-a-requirements-txt + try { + res = await spawn('pipenv', ['requirements'], { + cwd: this.servicePath, + }); + } catch (e) { + const stderrBufferContent = + (e.stderrBuffer && e.stderrBuffer.toString()) || ''; + if (stderrBufferContent.includes('FileNotFoundError')) { + // No previous Pipfile.lock, we will try to generate it here + if (this.log) { + this.log.warning( + 'No Pipfile.lock found! Review https://pipenv.pypa.io/en/latest/pipfile/ for recommendations.' + ); + } else { + this.serverless.cli.log( + 'WARNING: No Pipfile.lock found! Review https://pipenv.pypa.io/en/latest/pipfile/ for recommendations.' + ); + } + await spawn('pipenv', ['lock'], { + cwd: this.servicePath, + }); + res = await spawn('pipenv', ['requirements'], { + cwd: this.servicePath, + }); + } else { + throw e; + } + } + } else { + // Falling back to legacy pipenv syntax + res = await spawn( + 'pipenv', + ['lock', '--requirements', '--keep-outdated'], + { + cwd: this.servicePath, + } ); } - throw new Error(res.error); + + fse.ensureDirSync(path.join(this.servicePath, '.serverless')); + fse.writeFileSync( + path.join(this.servicePath, '.serverless/requirements.txt'), + removeEditableFlagFromRequirementsString(res.stdoutBuffer) + ); + } finally { + generateRequirementsProgress && generateRequirementsProgress.remove(); } - if (res.status !== 0) { - throw new Error(res.stderr); +} + +/** + * + * @param requirementBuffer + * @returns Buffer with editable flags remove + */ +function removeEditableFlagFromRequirementsString(requirementBuffer) { + const flagStr = '-e '; + const lines = requirementBuffer.toString('utf8').split(EOL); + for (let i = 0; i < lines.length; i++) { + if (lines[i].startsWith(flagStr)) { + lines[i] = lines[i].substring(flagStr.length); + } } - fse.ensureDirSync(path.join(this.servicePath, '.serverless')); - fse.writeFileSync( - path.join(this.servicePath, '.serverless/requirements.txt'), - res.stdout - ); + return Buffer.from(lines.join(EOL)); } module.exports = { pipfileToRequirements }; diff --git a/lib/poetry.js b/lib/poetry.js new file mode 100644 index 00000000..17e3268f --- /dev/null +++ b/lib/poetry.js @@ -0,0 +1,143 @@ +const fs = require('fs'); +const fse = require('fs-extra'); +const path = require('path'); + +const spawn = require('child-process-ext/spawn'); +const tomlParse = require('@iarna/toml/parse-string'); + +/** + * poetry install + */ +async function pyprojectTomlToRequirements(modulePath, pluginInstance) { + const { serverless, servicePath, options, log, progress } = pluginInstance; + + const moduleProjectPath = path.join(servicePath, modulePath); + if (!options.usePoetry || !isPoetryProject(moduleProjectPath)) { + return; + } + + let generateRequirementsProgress; + if (progress && log) { + generateRequirementsProgress = progress.get( + 'python-generate-requirements-toml' + ); + } + + const emitMsg = (msg) => { + if (generateRequirementsProgress) { + generateRequirementsProgress.update(msg); + log.info(msg); + } else { + serverless.cli.log(msg); + } + }; + + if (fs.existsSync('poetry.lock')) { + emitMsg('Generating requirements.txt from poetry.lock'); + } else { + if (options.requirePoetryLockFile) { + throw new serverless.classes.Error( + 'poetry.lock file not found - set requirePoetryLockFile to false to ' + + 'disable this error', + 'MISSING_REQUIRED_POETRY_LOCK' + ); + } + emitMsg('Generating poetry.lock and requirements.txt from pyproject.toml'); + } + + try { + try { + await spawn( + 'poetry', + [ + 'export', + '--without-hashes', + '-f', + 'requirements.txt', + '-o', + 'requirements.txt', + '--with-credentials', + ...(options.poetryWithGroups.length + ? [`--with=${options.poetryWithGroups.join(',')}`] + : []), + ...(options.poetryWithoutGroups.length + ? [`--without=${options.poetryWithoutGroups.join(',')}`] + : []), + ...(options.poetryOnlyGroups.length + ? [`--only=${options.poetryOnlyGroups.join(',')}`] + : []), + ], + { + cwd: moduleProjectPath, + } + ); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + throw new serverless.classes.Error( + `poetry not found! Install it according to the poetry docs.`, + 'PYTHON_REQUIREMENTS_POETRY_NOT_FOUND' + ); + } + throw e; + } + + const editableFlag = new RegExp(/^-e /gm); + const sourceRequirements = path.join(moduleProjectPath, 'requirements.txt'); + const requirementsContents = fse.readFileSync(sourceRequirements, { + encoding: 'utf-8', + }); + + if (requirementsContents.match(editableFlag)) { + if (log) { + log.info('The generated file contains -e flags, removing them'); + } else { + serverless.cli.log( + 'The generated file contains -e flags, removing them...' + ); + } + fse.writeFileSync( + sourceRequirements, + requirementsContents.replace(editableFlag, '') + ); + } + + fse.ensureDirSync(path.join(servicePath, '.serverless')); + fse.moveSync( + sourceRequirements, + path.join(servicePath, '.serverless', modulePath, 'requirements.txt'), + { overwrite: true } + ); + } finally { + generateRequirementsProgress && generateRequirementsProgress.remove(); + } +} + +/** + * Check if pyproject.toml file exists and is a poetry project. + */ +function isPoetryProject(servicePath) { + const pyprojectPath = path.join(servicePath, 'pyproject.toml'); + + if (!fse.existsSync(pyprojectPath)) { + return false; + } + + const pyprojectToml = fs.readFileSync(pyprojectPath); + const pyproject = tomlParse(pyprojectToml); + + const buildSystemReqs = + (pyproject['build-system'] && pyproject['build-system']['requires']) || []; + + for (var i = 0; i < buildSystemReqs.length; i++) { + if (buildSystemReqs[i].startsWith('poetry')) { + return true; + } + } + + return false; +} + +module.exports = { pyprojectTomlToRequirements, isPoetryProject }; diff --git a/lib/shared.js b/lib/shared.js new file mode 100644 index 00000000..bebb3f09 --- /dev/null +++ b/lib/shared.js @@ -0,0 +1,143 @@ +const Appdir = require('appdirectory'); +const rimraf = require('rimraf'); +const glob = require('glob-all'); +const path = require('path'); +const fse = require('fs-extra'); +const sha256File = require('sha256-file'); + +/** + * This helper will check if we're using static cache and have max + * versions enabled and will delete older versions in a fifo fashion + * @param {Object} options + * @param {Object} serverless + * @return {undefined} + */ +function checkForAndDeleteMaxCacheVersions({ serverless, options, log }) { + // If we're using the static cache, and we have static cache max versions enabled + if ( + options.useStaticCache && + options.staticCacheMaxVersions && + parseInt(options.staticCacheMaxVersions) > 0 + ) { + // Get the list of our cache files + const files = glob.sync( + [path.join(getUserCachePath(options), '*_slspyc/')], + { mark: true } + ); + // Check if we have too many + if (files.length >= options.staticCacheMaxVersions) { + // Sort by modified time + files.sort(function (a, b) { + return ( + fse.statSync(a).mtime.getTime() - fse.statSync(b).mtime.getTime() + ); + }); + // Remove the older files... + var items = 0; + for ( + var i = 0; + i < files.length - options.staticCacheMaxVersions + 1; + i++ + ) { + rimraf.sync(files[i]); + items++; + } + + // Log the number of cache files flushed + if (log) { + log.info( + `Removed ${items} items from cache because of staticCacheMaxVersions` + ); + } else { + serverless.cli.log( + `Removed ${items} items from cache because of staticCacheMaxVersions` + ); + } + } + } +} + +/** + * The working path that all requirements will be compiled into + * @param {string} subfolder + * @param {string} servicePath + * @param {Object} options + * @param {Object} serverless + * @return {string} + */ +function getRequirementsWorkingPath( + subfolder, + requirementsTxtDirectory, + options, + serverless +) { + // If we want to use the static cache + if (options && options.useStaticCache) { + if (subfolder) { + const architecture = serverless.service.provider.architecture || 'x86_64'; + subfolder = `${subfolder}_${architecture}_slspyc`; + } + // If we have max number of cache items... + + return path.join(getUserCachePath(options), subfolder); + } + + // If we don't want to use the static cache, then fallback to the way things used to work + return path.join(requirementsTxtDirectory, 'requirements'); +} + +/** + * Path of a cached requirements layer archive file + * @param {string} subfolder + * @param {string} fallback + * @param {Object} options + * @param {Object} serverless + * @return {string} + */ +function getRequirementsLayerPath(hash, fallback, options, serverless) { + // If we want to use the static cache + if (hash && options && options.useStaticCache) { + const architecture = serverless.service.provider.architecture || 'x86_64'; + hash = `${hash}_${architecture}_slspyc.zip`; + return path.join(getUserCachePath(options), hash); + } + + // If we don't want to use the static cache, then fallback to requirements file in .serverless directory + return fallback; +} + +/** + * The static cache path that will be used for this system + options, used if static cache is enabled + * @param {Object} options + * @return {string} + */ +function getUserCachePath(options) { + // If we've manually set the static cache location + if (options && options.cacheLocation) { + return path.resolve(options.cacheLocation); + } + + // Otherwise, find/use the python-ey appdirs cache location + const dirs = new Appdir({ + appName: 'serverless-python-requirements', + appAuthor: 'UnitedIncome', + }); + return dirs.userCache(); +} + +/** + * Helper to get the md5 a a file's contents to determine if a requirements has a static cache + * @param {string} fullpath + * @return {string} + */ +function sha256Path(fullpath) { + return sha256File(fullpath); +} + +module.exports = { + checkForAndDeleteMaxCacheVersions, + getRequirementsWorkingPath, + getRequirementsLayerPath, + getUserCachePath, + sha256Path, +}; diff --git a/lib/slim.js b/lib/slim.js new file mode 100644 index 00000000..8ead7fcc --- /dev/null +++ b/lib/slim.js @@ -0,0 +1,59 @@ +const isWsl = require('is-wsl'); +const glob = require('glob-all'); +const fse = require('fs-extra'); + +const getStripMode = (options) => { + if ( + options.strip === false || + options.strip === 'false' || + options.slim === false || + options.slim === 'false' + ) { + return 'skip'; + } else if (options.dockerizePip) { + return 'docker'; + } else if ( + (!isWsl && process.platform === 'win32') || + process.platform === 'darwin' + ) { + return 'skip'; + } else { + return 'direct'; + } +}; + +const getStripCommand = (options, folderPath) => [ + 'find', + folderPath, + '-name', + '*.so', + '-exec', + 'strip', + '{}', + ';', +]; + +const deleteFiles = (options, folderPath) => { + let patterns = ['**/*.py[c|o]', '**/__pycache__*', '**/*.dist-info*']; + if (options.slimPatterns) { + if ( + options.slimPatternsAppendDefaults === false || + options.slimPatternsAppendDefaults == 'false' + ) { + patterns = options.slimPatterns; + } else { + patterns = patterns.concat(options.slimPatterns); + } + } + for (const pattern of patterns) { + for (const file of glob.sync(`${folderPath}/${pattern}`)) { + fse.removeSync(file); + } + } +}; + +module.exports = { + getStripMode, + getStripCommand, + deleteFiles, +}; diff --git a/lib/zip.js b/lib/zip.js index 3dd21a51..3c21bbbf 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -1,56 +1,60 @@ const fse = require('fs-extra'); const path = require('path'); const get = require('lodash.get'); -const set = require('lodash.set'); -const zipper = require('zip-local'); +const set = require('set-value'); +const uniqBy = require('lodash.uniqby'); const BbPromise = require('bluebird'); -const values = require('lodash.values'); +const JSZip = require('jszip'); +const { addTree, writeZip } = require('./zipTree'); BbPromise.promisifyAll(fse); /** - * add the vendor helper to the current service tree + * Add the vendor helper to the current service tree. * @return {Promise} */ function addVendorHelper() { if (this.options.zip) { if (this.serverless.service.package.individually) { - let promises = []; - let doneModules = []; - values(this.serverless.service.functions).forEach(f => { - if (!get(f, 'package.include')) { - set(f, ['package', 'include'], []); - } - if (!get(f, 'module')) { - set(f, ['module'], '.'); - } + return BbPromise.resolve(this.targetFuncs) + .map((f) => { + if (!get(f, 'package.patterns')) { + set(f, ['package', 'patterns'], []); + } + if (!get(f, 'module')) { + set(f, ['module'], '.'); + } - f.package.include.push('unzip_requirements.py'); + f.package.patterns.push('unzip_requirements.py'); + return f; + }) + .then((functions) => uniqBy(functions, (func) => func.module)) + .map((f) => { + if (this.log) { + this.log.info(`Adding Python requirements helper to ${f.module}`); + } else { + this.serverless.cli.log( + `Adding Python requirements helper to ${f.module}...` + ); + } - if (!doneModules.includes(f.module)) { - this.serverless.cli.log( - `Adding Python requirements helper to ${f.module}...` + return fse.copyAsync( + path.resolve(__dirname, '../unzip_requirements.py'), + path.join(this.servicePath, f.module, 'unzip_requirements.py') ); - - promises.push( - fse.copyAsync( - path.resolve(__dirname, '../unzip_requirements.py'), - path.join(this.servicePath, f.module, 'unzip_requirements.py') - ) - ); - - doneModules.push(f.module); - } - }); - return BbPromise.all(promises); + }); } else { - this.serverless.cli.log('Adding Python requirements helper...'); + if (this.log) { + this.log.info('Adding Python requirements helper'); + } else { + this.serverless.cli.log('Adding Python requirements helper...'); + } - if (!get(this.serverless.service, 'package.include')) { - set(this.serverless.service, ['package', 'include'], []); + if (!get(this.serverless.service, 'package.patterns')) { + set(this.serverless.service, ['package', 'patterns'], []); } - this.serverless.service.package.include.push('unzip_requirements.py'); + this.serverless.service.package.patterns.push('unzip_requirements.py'); return fse.copyAsync( path.resolve(__dirname, '../unzip_requirements.py'), @@ -61,33 +65,40 @@ function addVendorHelper() { } /** - * remove the vendor helper from the current service tree - * @return {Promise} + * Remove the vendor helper from the current service tree. + * @return {Promise} the promise to remove the vendor helper. */ function removeVendorHelper() { if (this.options.zip && this.options.cleanupZipHelper) { if (this.serverless.service.package.individually) { - let promises = []; - let doneModules = []; - values(this.serverless.service.functions).forEach(f => { - if (!get(f, 'module')) { - set(f, ['module'], '.'); - } - if (!doneModules.includes(f.module)) { - this.serverless.cli.log( - `Removing Python requirements helper from ${f.module}...` + return BbPromise.resolve(this.targetFuncs) + .map((f) => { + if (!get(f, 'module')) { + set(f, ['module'], '.'); + } + return f; + }) + .then((funcs) => uniqBy(funcs, (f) => f.module)) + .map((f) => { + if (this.log) { + this.log.info( + `Removing Python requirements helper from ${f.module}` + ); + } else { + this.serverless.cli.log( + `Removing Python requirements helper from ${f.module}...` + ); + } + return fse.removeAsync( + path.join(this.servicePath, f.module, 'unzip_requirements.py') ); - promises.push( - fse.removeAsync( - path.join(this.servicePath, f.module, 'unzip_requirements.py') - ) - ); - doneModules.push(f.module); - } - }); - return BbPromise.all(promises); + }); } else { - this.serverless.cli.log('Removing Python requirements helper...'); + if (this.log) { + this.log.info('Removing Python requirements helper'); + } else { + this.serverless.cli.log('Removing Python requirements helper...'); + } return fse.removeAsync( path.join(this.servicePath, 'unzip_requirements.py') ); @@ -96,35 +107,58 @@ function removeVendorHelper() { } /** - * zip up .serverless/requirements + * Zip up .serverless/requirements or .serverless/[MODULE]/requirements. + * @return {Promise} the promise to pack requirements. */ function packRequirements() { if (this.options.zip) { if (this.serverless.service.package.individually) { - let doneModules = []; - values(this.serverless.service.functions).forEach(f => { - if (!get(f, 'module')) { - set(f, ['module'], '.'); - } - if (!doneModules.includes(f.module)) { - this.serverless.cli.log( - `Zipping required Python packages for ${f.module}...` - ); - f.package.include.push(`${f.module}/.requirements.zip`); - zipper.sync - .zip(`.serverless/${f.module}/requirements`) - .compress() - .save(`${f.module}/.requirements.zip`); - doneModules.push(f.module); - } - }); + return BbPromise.resolve(this.targetFuncs) + .filter((func) => { + return ( + func.runtime || this.serverless.service.provider.runtime + ).match(/^python.*/); + }) + .map((f) => { + if (!get(f, 'module')) { + set(f, ['module'], '.'); + } + return f; + }) + .then((funcs) => uniqBy(funcs, (f) => f.module)) + .map((f) => { + let packProgress; + if (this.progress && this.log) { + packProgress = this.progress.get( + `python-pack-requirements-${f.module}` + ); + packProgress.update( + `Zipping required Python packages for ${f.module}` + ); + this.log.info(`Zipping required Python packages for ${f.module}`); + } else { + this.serverless.cli.log( + `Zipping required Python packages for ${f.module}...` + ); + } + f.package.patterns.push(`${f.module}/.requirements.zip`); + return addTree(new JSZip(), `.serverless/${f.module}/requirements`) + .then((zip) => writeZip(zip, `${f.module}/.requirements.zip`)) + .finally(() => packProgress && packProgress.remove()); + }); } else { - this.serverless.cli.log('Zipping required Python packages...'); - this.serverless.service.package.include.push('.requirements.zip'); - zipper.sync - .zip(path.join(this.servicePath, '.serverless/requirements')) - .compress() - .save(path.join(this.servicePath, '.requirements.zip')); + let packProgress; + if (this.progress) { + packProgress = this.progress.get(`python-pack-requirements`); + } else { + this.serverless.cli.log('Zipping required Python packages...'); + } + this.serverless.service.package.patterns.push('.requirements.zip'); + return addTree(new JSZip(), '.serverless/requirements') + .then((zip) => + writeZip(zip, path.join(this.servicePath, '.requirements.zip')) + ) + .finally(() => packProgress && packProgress.remove()); } } } diff --git a/lib/zipTree.js b/lib/zipTree.js new file mode 100644 index 00000000..1654f665 --- /dev/null +++ b/lib/zipTree.js @@ -0,0 +1,84 @@ +const BbPromise = require('bluebird'); +const fse = require('fs-extra'); +const path = require('path'); + +BbPromise.promisifyAll(fse); + +/** + * Add a directory recursively to a zip file. Files in src will be added to the top folder of zip. + * @param {JSZip} zip a zip object in the folder you want to add files to. + * @param {string} src the source folder. + * @return {Promise} a promise offering the original JSZip object. + */ +function addTree(zip, src) { + const srcN = path.normalize(src); + + return fse + .readdirAsync(srcN) + .map((name) => { + const srcPath = path.join(srcN, name); + + return fse.statAsync(srcPath).then((stat) => { + if (stat.isDirectory()) { + return addTree(zip.folder(name), srcPath); + } else { + const opts = { date: stat.mtime, unixPermissions: stat.mode }; + return fse + .readFileAsync(srcPath) + .then((data) => zip.file(name, data, opts)); + } + }); + }) + .then(() => zip); // Original zip for chaining. +} + +/** + * Write zip contents to a file. + * @param {JSZip} zip the zip object + * @param {string} targetPath path to write the zip file to. + * @return {Promise} a promise resolving to null. + */ +function writeZip(zip, targetPath) { + const opts = { + platform: process.platform == 'win32' ? 'DOS' : 'UNIX', + compression: 'DEFLATE', + compressionOptions: { + level: 9, + }, + }; + return new BbPromise((resolve) => + zip + .generateNodeStream(opts) + .pipe(fse.createWriteStream(targetPath)) + .on('finish', resolve) + ).then(() => null); +} + +/** + * Add a new file to a zip file from a buffer. + * @param {JSZip} zip the zip object to add the file to. + * @param {string} zipPath the target path in the zip. + * @param {Promise} bufferPromise a promise providing a nodebuffer. + * @return {Promise} a promise providing the JSZip object. + * @param {object} fileOpts an object with the opts to save for the file in the zip. + */ +function zipFile(zip, zipPath, bufferPromise, fileOpts) { + return bufferPromise + .then((buffer) => + zip.file( + zipPath, + buffer, + Object.assign( + {}, + { + // necessary to get the same hash when zipping the same content + date: new Date(0), + }, + fileOpts + ) + ) + ) + .then(() => zip); +} + +module.exports = { addTree, writeZip, zipFile }; diff --git a/package.json b/package.json index 3601635a..55ab4989 100644 --- a/package.json +++ b/package.json @@ -1,8 +1,8 @@ { "name": "serverless-python-requirements", - "version": "4.0.0", + "version": "6.1.2", "engines": { - "node": ">=6.0" + "node": ">=12.0" }, "description": "Serverless Python Requirements Plugin", "author": "United Income ", @@ -38,33 +38,91 @@ "main": "index.js", "bin": {}, "scripts": { - "test": "bats test.bats", - "lint": "eslint *.js lib/*.js && prettier -l index.js lib/*.js || (echo need formatting ; exit 1)", - "format": "prettier --write index.js lib/*.js" + "commitlint": "commitlint -f HEAD@{15}", + "lint": "eslint .", + "lint:updated": "pipe-git-updated --ext=js -- eslint", + "prepare-release": "standard-version && prettier --write CHANGELOG.md", + "prettier-check": "prettier -c --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", + "prettier-check:updated": "pipe-git-updated --ext=css --ext=html --ext=js --ext=json --ext=md --ext=yaml --ext=yml -- prettier -c", + "prettify": "prettier --write --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", + "prettify:updated": "pipe-git-updated --ext=css --ext=html --ext=js --ext=json --ext=md --ext=yaml --ext=yml -- prettier --write", + "test": "node test.js" }, "devDependencies": { - "eslint": "*", - "prettier": "*" + "cross-spawn": "*", + "eslint": "^8.57.0", + "git-list-updated": "^1.2.1", + "github-release-from-cc-changelog": "^2.3.0", + "lodash": "^4.17.21", + "prettier": "^2", + "standard-version": "^9.5.0", + "tape": "*", + "tape-promise": "*" }, "dependencies": { - "bluebird": "^3.0.6", - "fs-extra": "^5.0.0", - "glob-all": "^3.1.0", - "is-wsl": "^1.1.0", - "jszip": "^2.5.0", + "@iarna/toml": "^2.2.5", + "appdirectory": "^0.1.0", + "bluebird": "^3.7.2", + "child-process-ext": "^2.1.1", + "fs-extra": "^10.1.0", + "glob-all": "^3.3.1", + "is-wsl": "^2.2.0", + "jszip": "^3.10.1", "lodash.get": "^4.4.2", - "lodash.set": "^4.3.2", + "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", - "rimraf": "^2.6.2", - "zip-local": "^0.3.4" + "rimraf": "^3.0.2", + "semver": "^7.6.0", + "set-value": "^4.1.0", + "sha256-file": "1.0.0", + "shell-quote": "^1.8.1" + }, + "lint-staged": { + "*.js": [ + "eslint" + ], + "*.{css,html,js,json,md,yaml,yml}": [ + "prettier -c" + ] }, "eslintConfig": { "extends": "eslint:recommended", "env": { + "commonjs": true, "node": true, "es6": true + }, + "parserOptions": { + "ecmaVersion": 2018 + }, + "rules": { + "no-console": "off" } }, + "standard-version": { + "skip": { + "commit": true, + "tag": true + }, + "types": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "perf", + "section": "Performance Improvements" + }, + { + "type": "refactor", + "section": "Maintenance Improvements" + } + ] + }, "prettier": { "semi": true, "singleQuote": true diff --git a/test.bats b/test.bats deleted file mode 100755 index adb24e73..00000000 --- a/test.bats +++ /dev/null @@ -1,219 +0,0 @@ -#!/usr/bin/env bats - - -setup() { - export SLS_DEBUG=t - if ! [ -z "$CI" ]; then - export LC_ALL=C.UTF-8 - export LANG=C.UTF-8 - fi -} - -teardown() { - rm -rf puck puck2 puck3 node_modules .serverless .requirements.zip .requirements-cache - if [ -f serverless.yml.bak ]; then mv serverless.yml.bak serverless.yml; fi -} - -@test "py3.6 can package flask with default options" { - cd tests/base - npm i $(npm pack ../..) - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask -} - -@test "py3.6 can package flask with zip option" { - cd tests/base - npm i $(npm pack ../..) - sls --zip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py - ! ls puck/flask -} - -@test "py3.6 doesn't package boto3 by default" { - cd tests/base - npm i $(npm pack ../..) - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ! ls puck/boto3 -} - -@test "py3.6 doesn't package bottle with noDeploy option" { - cd tests/base - npm i $(npm pack ../..) - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n noDeploy: [bottle]/' serverless.yml - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ! ls puck/bottle.py - ! ls puck/__pycache__/bottle.cpython-36.pyc -} - -@test "py3.6 can package flask with zip & dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true --zip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py -} - -@test "py3.6 can package flask with dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask -} - -@test "py3.6 uses cache with dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n pipCmdExtraArgs: ["--cache-dir", ".requirements-cache"]/' serverless.yml - sls --dockerizePip=true package - ls .requirements-cache/http -} - -@test "py2.7 can package flask with default options" { - cd tests/base - npm i $(npm pack ../..) - sls --runtime=python2.7 package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask -} - -@test "py2.7 can package flask with zip option" { - cd tests/base - npm i $(npm pack ../..) - sls --runtime=python2.7 --zip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py -} - -@test "py2.7 doesn't package boto3 by default" { - cd tests/base - npm i $(npm pack ../..) - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ! ls puck/boto3 -} - -@test "py2.7 doesn't package bottle with noDeploy option" { - cd tests/base - npm i $(npm pack ../..) - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n noDeploy: [bottle]/' serverless.yml - sls --runtime=python2.7 package - unzip .serverless/sls-py-req-test.zip -d puck - ! ls puck/bottle.py -} - -@test "py2.7 can package flask with zip & dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true --runtime=python2.7 --zip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py -} - -@test "py2.7 can package flask with dockerizePip option" { - cd tests/base - npm i $(npm pack ../..) - ! uname -sm|grep Linux || groups|grep docker || id -u|egrep '^0$' || skip "can't dockerize on linux if not root & not in docker group" - sls --dockerizePip=true --runtime=python2.7 package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask -} - -@test "pipenv py3.6 can package flask with default options" { - cd tests/pipenv - npm i $(npm pack ../..) - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask -} - -@test "pipenv py3.6 can package flask with zip option" { - cd tests/pipenv - npm i $(npm pack ../..) - sls --zip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py -} - -@test "pipenv py3.6 doesn't package boto3 by default" { - cd tests/pipenv - npm i $(npm pack ../..) - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ! ls puck/boto3 -} - -@test "pipenv py3.6 doesn't package bottle with noDeploy option" { - cd tests/pipenv - npm i $(npm pack ../..) - perl -p -i'.bak' -e 's/(pythonRequirements:$)/\1\n noDeploy: [bottle]/' serverless.yml - sls package - unzip .serverless/sls-py-req-test.zip -d puck - ! ls puck/bottle.py -} - -@test "py3.6 can package flask with zip option and no explicit include" { - cd tests/base - npm i $(npm pack ../..) - sed -i'.bak' -e 's/include://' -e 's/^.*handler.py//' serverless.yml - sls --zip=true package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/.requirements.zip puck/unzip_requirements.py -} - -@test "py3.6 can package flask with package individually option" { - cd tests/base - npm i $(npm pack ../..) - sls --individually=true package - unzip .serverless/hello.zip -d puck - unzip .serverless/hello2.zip -d puck2 - unzip .serverless/hello3.zip -d puck3 - ls puck/flask - ls puck2/flask - ! ls puck3/flask -} - -@test "py2.7 can package flask with package individually option" { - cd tests/base - npm i $(npm pack ../..) - sls --individually=true --runtime=python2.7 package - unzip .serverless/hello.zip -d puck - unzip .serverless/hello2.zip -d puck2 - unzip .serverless/hello3.zip -d puck3 - ls puck/flask - ls puck2/flask - ! ls puck3/flask -} - -@test "py3.6 can package only requirements of module" { - cd tests/individually - npm i $(npm pack ../..) - sls package - unzip .serverless/module1.zip -d puck - unzip .serverless/module2.zip -d puck2 - ls puck/handler1.py - ls puck2/handler2.py - ls puck/pyaml - ls puck2/flask - ! ls puck/handler2.py - ! ls puck2/handler1.py - ! ls puck/flask - ! ls puck2/pyaml -} - -@test "py3.6 can package lambda-decorators using vendor option" { - cd tests/base - npm i $(npm pack ../..) - sls --vendor=./vendor package - unzip .serverless/sls-py-req-test.zip -d puck - ls puck/flask - ls puck/lambda_decorators.py -} diff --git a/test.js b/test.js new file mode 100644 index 00000000..1967330b --- /dev/null +++ b/test.js @@ -0,0 +1,1838 @@ +const crossSpawn = require('cross-spawn'); +const glob = require('glob-all'); +const JSZip = require('jszip'); +const sha256File = require('sha256-file'); +const tape = require('tape-promise/tape'); + +const { + chmodSync, + removeSync, + readFile, + copySync, + writeFileSync, + statSync, + pathExistsSync, +} = require('fs-extra'); +const { quote } = require('shell-quote'); +const { sep } = require('path'); + +const { getUserCachePath, sha256Path } = require('./lib/shared'); + +const initialWorkingDir = process.cwd(); + +const mkCommand = + (cmd) => + (args, options = {}) => { + options['env'] = Object.assign( + { SLS_DEBUG: 'true' }, + process.env, + options['env'] + ); + const { error, stdout, stderr, status } = crossSpawn.sync( + cmd, + args, + options + ); + if (error && !options['noThrow']) { + console.error(`Error running: ${quote([cmd, ...args])}`); // eslint-disable-line no-console + throw error; + } + if (status && !options['noThrow']) { + console.error('STDOUT: ', stdout.toString()); // eslint-disable-line no-console + console.error('STDERR: ', stderr.toString()); // eslint-disable-line no-console + throw new Error( + `${quote([cmd, ...args])} failed with status code ${status}` + ); + } + return { + stdout: stdout && stdout.toString().trim(), + stderr: stderr && stderr.toString().trim(), + }; + }; + +const sls = mkCommand('sls'); +const git = mkCommand('git'); +const npm = mkCommand('npm'); +const perl = mkCommand('perl'); + +const setup = () => { + removeSync(getUserCachePath()); + process.chdir(initialWorkingDir); +}; + +const teardown = () => { + const cwd = process.cwd(); + if (!cwd.startsWith(initialWorkingDir)) { + throw new Error(`Somehow cd'd into ${cwd}`); + } + if (cwd != initialWorkingDir) { + [ + 'puck', + 'puck2', + 'puck3', + 'node_modules', + '.serverless', + '.requirements.zip', + '.requirements-cache', + 'foobar', + 'package-lock.json', + 'slimPatterns.yml', + 'serverless.yml.bak', + 'module1/foobar', + getUserCachePath(), + ...glob.sync('serverless-python-requirements-*.tgz'), + ].map((path) => removeSync(path)); + if (!cwd.endsWith('base with a space')) { + try { + git(['checkout', 'serverless.yml']); + } catch (err) { + console.error( + `At ${cwd} failed to checkout 'serverless.yml' with ${err}.` + ); + throw err; + } + } + process.chdir(initialWorkingDir); + } + removeSync('tests/base with a space'); +}; + +const testFilter = (() => { + const elems = process.argv.slice(2); // skip ['node', 'test.js'] + if (elems.length) { + return (desc) => + elems.some((text) => desc.search(text) != -1) + ? tape.test + : tape.test.skip; + } else { + return () => tape.test; + } +})(); + +const test = (desc, func, opts = {}) => + testFilter(desc)(desc, opts, async (t) => { + setup(); + let ended = false; + try { + await func(t); + ended = true; + } catch (err) { + t.fail(err); + } finally { + try { + teardown(); + } catch (err) { + t.fail(err); + } + if (!ended) t.end(); + } + }); + +const availablePythons = (() => { + const binaries = []; + const mapping = {}; + if (process.env.USE_PYTHON) { + binaries.push( + ...process.env.USE_PYTHON.split(',').map((v) => v.toString().trim()) + ); + } else { + // For running outside of CI + binaries.push('python'); + } + const exe = process.platform === 'win32' ? '.exe' : ''; + for (const bin of binaries) { + const python = `${bin}${exe}`; + const { stdout, status } = crossSpawn.sync(python, [ + '-c', + 'import sys; sys.stdout.write(".".join(map(str, sys.version_info[:2])))', + ]); + const ver = stdout && stdout.toString().trim(); + if (!status && ver) { + for (const recommend of [ver, ver.split('.')[0]]) { + if (!mapping[recommend]) { + mapping[recommend] = python; + } + } + } + } + if (!Object.entries(mapping).length) { + throw new Error('No pythons found'); + } + return mapping; +})(); + +const getPythonBin = (version) => { + const bin = availablePythons[String(version)]; + if (!bin) throw new Error(`No python version ${version} available`); + return bin; +}; + +const listZipFiles = async function (filename) { + const file = await readFile(filename); + const zip = await new JSZip().loadAsync(file); + return Object.keys(zip.files); +}; + +const listZipFilesWithMetaData = async function (filename) { + const file = await readFile(filename); + const zip = await new JSZip().loadAsync(file); + return Object(zip.files); +}; + +const listRequirementsZipFiles = async function (filename) { + const file = await readFile(filename); + const zip = await new JSZip().loadAsync(file); + const reqsBuffer = await zip.file('.requirements.zip').async('nodebuffer'); + const reqsZip = await new JSZip().loadAsync(reqsBuffer); + return Object.keys(reqsZip.files); +}; + +const canUseDocker = () => { + let result; + try { + result = crossSpawn.sync('docker', ['ps']); + } catch (e) { + return false; + } + return result.status === 0; +}; + +// Skip if running on these platforms. +const brokenOn = (...platforms) => platforms.indexOf(process.platform) != -1; + +test( + 'dockerPrivateKey option correctly resolves docker command', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + const { stdout } = sls(['package'], { + noThrow: true, + env: { + dockerizePip: true, + dockerSsh: true, + dockerPrivateKey: `${__dirname}${sep}tests${sep}base${sep}custom_ssh`, + dockerImage: 'break the build to log the command', + }, + }); + t.true( + stdout.includes( + `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z` + ), + 'docker command properly resolved' + ); + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + +test('default pythonBin can package flask with default options', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('py3.9 packages have the same hash', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const fileHash = sha256File('.serverless/sls-py-req-test.zip'); + sls(['package'], { env: {} }); + t.equal( + sha256File('.serverless/sls-py-req-test.zip'), + fileHash, + 'packages have the same hash' + ); + t.end(); +}); + +test('py3.9 can package flask with default options', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test( + 'py3.9 can package flask with hashes', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + fileName: 'requirements-w-hashes.txt', + pythonBin: getPythonBin(3), + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.end(); + }, + { skip: brokenOn('win32') } +); + +test('py3.9 can package flask with nested', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + fileName: 'requirements-w-nested.txt', + pythonBin: getPythonBin(3), + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('py3.9 can package flask with zip option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test('py3.9 can package flask with slim option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('py3.9 can package flask with slim & slimPatterns options', async (t) => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test("py3.9 doesn't package bottle with noDeploy option", async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: { pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test('py3.9 can package boto3 with editable', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + fileName: 'requirements-w-editable.txt', + pythonBin: getPythonBin(3), + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true( + zipfiles.includes(`botocore${sep}__init__.py`), + 'botocore is packaged' + ); + t.end(); +}); + +test( + 'py3.9 can package flask with dockerizePip option', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { dockerizePip: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + +test( + 'py3.9 can package flask with slim & dockerizePip option', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + '*.pyc files are NOT packaged' + ); + t.true( + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, + '__main__.py files are packaged' + ); + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + +test( + 'py3.9 can package flask with slim & dockerizePip & slimPatterns options', + async (t) => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + '*.pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + +test( + 'py3.9 can package flask with zip & dockerizePip option', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = await listRequirementsZipFiles( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true( + zipfiles.includes(`unzip_requirements.py`), + 'unzip util is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.true( + zippedReqs.includes(`flask/__init__.py`), + 'flask is packaged in the .requirements.zip file' + ); + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + +test( + 'py3.9 can package flask with zip & slim & dockerizePip option', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { dockerizePip: 'true', zip: 'true', slim: 'true' }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = await listRequirementsZipFiles( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true( + zipfiles.includes(`unzip_requirements.py`), + 'unzip util is packaged' + ); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.true( + zippedReqs.includes(`flask/__init__.py`), + 'flask is packaged in the .requirements.zip file' + ); + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + +test('pipenv py3.9 can package flask with default options', async (t) => { + process.chdir('tests/pipenv'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.false( + zipfiles.includes(`pytest${sep}__init__.py`), + 'dev-package pytest is NOT packaged' + ); + t.end(); +}); + +test('pipenv py3.9 can package flask with slim option', async (t) => { + process.chdir('tests/pipenv'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('pipenv py3.9 can package flask with slim & slimPatterns options', async (t) => { + process.chdir('tests/pipenv'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('pipenv py3.9 can package flask with zip option', async (t) => { + process.chdir('tests/pipenv'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test("pipenv py3.9 doesn't package bottle with noDeploy option", async (t) => { + process.chdir('tests/pipenv'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test('non build pyproject.toml uses requirements.txt', async (t) => { + process.chdir('tests/non_build_pyproject'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('non poetry pyproject.toml without requirements.txt packages handler only', async (t) => { + process.chdir('tests/non_poetry_pyproject'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`handler.py`), 'handler is packaged'); + t.end(); +}); + +test('poetry py3.9 can package flask with default options', async (t) => { + process.chdir('tests/poetry'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('poetry py3.9 can package flask with slim option', async (t) => { + process.chdir('tests/poetry'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('poetry py3.9 can package flask with slim & slimPatterns options', async (t) => { + process.chdir('tests/poetry'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('poetry py3.9 can package flask with zip option', async (t) => { + process.chdir('tests/poetry'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test("poetry py3.9 doesn't package bottle with noDeploy option", async (t) => { + process.chdir('tests/poetry'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test('py3.9 can package flask with zip option and no explicit include', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); + perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); + sls(['package'], { env: { zip: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test('py3.9 can package lambda-decorators using vendor option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { vendor: './vendor' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true( + zipfiles.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged' + ); + t.end(); +}); + +test( + "Don't nuke execute perms", + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + const perm = '755'; + + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(handler.py.*$)/$1\n - foobar/', + 'serverless.yml', + ]); + writeFileSync(`foobar`, ''); + chmodSync(`foobar`, perm); + sls(['package'], { env: { vendor: './vendor' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true( + zipfiles.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged' + ); + t.true(zipfiles.includes(`foobar`), 'foobar is packaged'); + + const zipfiles_with_metadata = await listZipFilesWithMetaData( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles_with_metadata['foobar'].unixPermissions + .toString(8) + .slice(3, 6) === perm, + 'foobar has retained its executable file permissions' + ); + + const flaskPerm = statSync('.serverless/requirements/bin/flask').mode; + t.true( + zipfiles_with_metadata['bin/flask'].unixPermissions === flaskPerm, + 'bin/flask has retained its executable file permissions' + ); + + t.end(); + }, + { skip: process.platform === 'win32' } +); + +test('py3.9 can package flask in a project with a space in it', async (t) => { + copySync('tests/base', 'tests/base with a space'); + process.chdir('tests/base with a space'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test( + 'py3.9 can package flask in a project with a space in it with docker', + async (t) => { + copySync('tests/base', 'tests/base with a space'); + process.chdir('tests/base with a space'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { dockerizePip: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + +test('py3.9 supports custom file name with fileName option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + writeFileSync('puck', 'requests'); + npm(['i', path]); + sls(['package'], { env: { fileName: 'puck' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes(`requests${sep}__init__.py`), + 'requests is packaged' + ); + t.false(zipfiles.includes(`flask${sep}__init__.py`), 'flask is NOT packaged'); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.end(); +}); + +test("py3.9 doesn't package bottle with zip option", async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = await listRequirementsZipFiles( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.true( + zippedReqs.includes(`flask/__init__.py`), + 'flask is packaged in the .requirements.zip file' + ); + t.false( + zippedReqs.includes(`bottle.py`), + 'bottle is NOT packaged in the .requirements.zip file' + ); + t.end(); +}); + +test('py3.9 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test( + 'py3.9 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + async (t) => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + dockerizePip: 'true', + slim: 'true', + slimPatternsAppendDefaults: 'false', + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + +test('pipenv py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { + process.chdir('tests/pipenv'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('poetry py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { + process.chdir('tests/poetry'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('poetry py3.9 can package flask with package individually option', async (t) => { + process.chdir('tests/poetry_individually'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles( + '.serverless/module1-sls-py-req-test-dev-hello.zip' + ); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('py3.9 can package flask with package individually option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true' } }); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.false( + zipfiles_hello.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello' + ); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.false( + zipfiles_hello2.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.false( + zipfiles_hello3.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello3' + ); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.false( + zipfiles_hello4.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello4' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + + t.end(); +}); + +test('py3.9 can package flask with package individually & slim option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true', slim: 'true' } }); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.deepEqual( + zipfiles_hello.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); + + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.deepEqual( + zipfiles_hello2.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); + + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.deepEqual( + zipfiles_hello3.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + t.deepEqual( + zipfiles_hello4.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello4' + ); + + t.end(); +}); + +test('py3.9 can package only requirements of module', async (t) => { + process.chdir('tests/individually'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles_hello = await listZipFiles( + '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip' + ); + t.true( + zipfiles_hello.includes('handler1.py'), + 'handler1.py is packaged at root level in function hello1' + ); + t.false( + zipfiles_hello.includes('handler2.py'), + 'handler2.py is NOT packaged at root level in function hello1' + ); + t.true( + zipfiles_hello.includes(`pyaml${sep}__init__.py`), + 'pyaml is packaged in function hello1' + ); + t.true( + zipfiles_hello.includes(`boto3${sep}__init__.py`), + 'boto3 is packaged in function hello1' + ); + t.false( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello1' + ); + + const zipfiles_hello2 = await listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' + ); + t.true( + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes('handler1.py'), + 'handler1.py is NOT packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes(`pyaml${sep}__init__.py`), + 'pyaml is NOT packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + + t.end(); +}); + +test('py3.9 can package lambda-decorators using vendor and invidiually option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true', vendor: './vendor' } }); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged at root level in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); + + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged at root level in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); + + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged at root level in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`lambda_decorators.py`), + 'lambda_decorators.py is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); + + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + t.end(); +}); + +test( + "Don't nuke execute perms when using individually", + async (t) => { + process.chdir('tests/individually'); + const { stdout: path } = npm(['pack', '../..']); + const perm = '755'; + writeFileSync(`module1${sep}foobar`, ''); + chmodSync(`module1${sep}foobar`, perm); + + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles_hello1 = await listZipFilesWithMetaData( + '.serverless/hello1.zip' + ); + + t.true( + zipfiles_hello1['module1/foobar'].unixPermissions + .toString(8) + .slice(3, 6) === perm, + 'foobar has retained its executable file permissions' + ); + + const zipfiles_hello2 = await listZipFilesWithMetaData( + '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' + ); + const flaskPerm = statSync( + '.serverless/module2/requirements/bin/flask' + ).mode; + + t.true( + zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm, + 'bin/flask has retained its executable file permissions' + ); + + t.end(); + }, + { skip: process.platform === 'win32' } +); + +test( + "Don't nuke execute perms when using individually w/docker", + async (t) => { + process.chdir('tests/individually'); + const { stdout: path } = npm(['pack', '../..']); + const perm = '755'; + writeFileSync(`module1${sep}foobar`, '', { mode: perm }); + chmodSync(`module1${sep}foobar`, perm); + + npm(['i', path]); + sls(['package'], { env: { dockerizePip: 'true' } }); + const zipfiles_hello = await listZipFilesWithMetaData( + '.serverless/hello1.zip' + ); + + t.true( + zipfiles_hello['module1/foobar'].unixPermissions + .toString(8) + .slice(3, 6) === perm, + 'foobar has retained its executable file permissions' + ); + + const zipfiles_hello2 = await listZipFilesWithMetaData( + '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' + ); + const flaskPerm = statSync( + '.serverless/module2/requirements/bin/flask' + ).mode; + + t.true( + zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm, + 'bin/flask has retained its executable file permissions' + ); + + t.end(); + }, + { skip: !canUseDocker() || process.platform === 'win32' } +); + +test( + 'py3.9 can package flask running in docker with module runtime & architecture of function', + async (t) => { + process.chdir('tests/individually_mixed_runtime'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { + env: { dockerizePip: 'true' }, + }); + + const zipfiles_hello2 = await listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip' + ); + t.true( + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + }, + { + skip: !canUseDocker() || process.platform === 'win32', + } +); + +test( + 'py3.9 can package flask succesfully when using mixed architecture, docker and zipping', + async (t) => { + process.chdir('tests/individually_mixed_runtime'); + const { stdout: path } = npm(['pack', '../..']); + + npm(['i', path]); + sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } }); + + const zipfiles_hello = await listZipFiles('.serverless/hello1.zip'); + t.true( + zipfiles_hello.includes(`module1${sep}handler1.ts`), + 'handler1.ts is packaged in module dir for hello1' + ); + t.false( + zipfiles_hello.includes('handler2.py'), + 'handler2.py is NOT packaged at root level in function hello1' + ); + t.false( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello1' + ); + + const zipfiles_hello2 = await listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip' + ); + const zippedReqs = await listRequirementsZipFiles( + '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip' + ); + t.true( + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes(`module1${sep}handler1.ts`), + 'handler1.ts is NOT included at module1 level in hello2' + ); + t.false( + zipfiles_hello2.includes(`pyaml${sep}__init__.py`), + 'pyaml is NOT packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT included in zipfile' + ); + t.true( + zippedReqs.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2 in requirements.zip' + ); + + t.end(); + }, + { skip: !canUseDocker() || process.platform === 'win32' } +); + +test( + 'py3.9 uses download cache by default option', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'cache directory exists' + ); + t.end(); + }, + { skip: true } +); + +test( + 'py3.9 uses download cache by default', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); + t.true( + pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), + 'cache directory exists' + ); + t.end(); + }, + { skip: true } +); + +test( + 'py3.9 uses download cache with dockerizePip option', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { dockerizePip: 'true' } }); + const cachepath = getUserCachePath(); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'cache directory exists' + ); + t.end(); + }, + // { skip: !canUseDocker() || brokenOn('win32') } + { skip: true } +); + +test( + 'py3.9 uses download cache with dockerizePip by default option', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { dockerizePip: 'true', cacheLocation: '.requirements-cache' }, + }); + t.true( + pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), + 'cache directory exists' + ); + t.end(); + }, + // { skip: !canUseDocker() || brokenOn('win32') } + { skip: true } +); + +test( + 'py3.9 uses static and download cache', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'http exists in download-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.end(); + }, + { skip: true } +); + +test( + 'py3.9 uses static and download cache with dockerizePip option', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { dockerizePip: 'true' } }); + const cachepath = getUserCachePath(); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'http exists in download-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + +test('py3.9 uses static cache', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); + + // py3.9 checking that static cache actually pulls from cache (by poisoning it) + writeFileSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, + 'injected new file into static cache folder' + ); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('injected_file_is_bad_form'), + "static cache is really used when running 'sls package' again" + ); + + t.end(); +}); + +test('py3.9 uses static cache with cacheLocation option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + const cachepath = '.requirements-cache'; + sls(['package'], { env: { cacheLocation: cachepath } }); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); + t.end(); +}); + +test( + 'py3.9 uses static cache with dockerizePip & slim option', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); + const cachepath = getUserCachePath(); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); + + // py3.9 checking that static cache actually pulls from cache (by poisoning it) + writeFileSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, + 'injected new file into static cache folder' + ); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('injected_file_is_bad_form'), + "static cache is really used when running 'sls package' again" + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files are packaged' + ); + + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + +test( + 'py3.9 uses download cache with dockerizePip & slim option', + async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); + const cachepath = getUserCachePath(); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'http exists in download-cache' + ); + + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files are packaged' + ); + + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + +test('py3.9 can ignore functions defined with `image`', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true' } }); + t.true(pathExistsSync('.serverless/hello.zip'), 'function hello is packaged'); + t.true( + pathExistsSync('.serverless/hello2.zip'), + 'function hello2 is packaged' + ); + t.true( + pathExistsSync('.serverless/hello3.zip'), + 'function hello3 is packaged' + ); + t.true( + pathExistsSync('.serverless/hello4.zip'), + 'function hello4 is packaged' + ); + t.false( + pathExistsSync('.serverless/hello5.zip'), + 'function hello5 is not packaged' + ); + + t.end(); +}); + +test('poetry py3.9 fails packaging if poetry.lock is missing and flag requirePoetryLockFile is set to true', async (t) => { + copySync('tests/poetry', 'tests/base with a space'); + process.chdir('tests/base with a space'); + removeSync('poetry.lock'); + + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + const { stdout } = sls(['package'], { + env: { requirePoetryLockFile: 'true', slim: 'true' }, + noThrow: true, + }); + t.true( + stdout.includes( + 'poetry.lock file not found - set requirePoetryLockFile to false to disable this error' + ), + 'flag works and error is properly reported' + ); + t.end(); +}); + +test('works with provider.runtime not being python', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { runtime: 'nodejs20.x' } }); + t.true( + pathExistsSync('.serverless/sls-py-req-test.zip'), + 'sls-py-req-test is packaged' + ); + t.end(); +}); + +test('poetry py3.9 packages additional optional packages', async (t) => { + process.chdir('tests/poetry_packages'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + poetryWithGroups: 'poetryWithGroups', + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('poetry py3.9 skips additional optional packages specified in withoutGroups', async (t) => { + process.chdir('tests/poetry_packages'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + poetryWithGroups: 'poetryWithGroups', + poetryWithoutGroups: 'poetryWithoutGroups', + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('poetry py3.9 only installs optional packages specified in onlyGroups', async (t) => { + process.chdir('tests/poetry_packages'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + poetryOnlyGroups: 'poetryOnlyGroups', + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.false(zipfiles.includes(`flask${sep}__init__.py`), 'flask is NOT packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test( + 'py3.7 injects dependencies into `package` folder when using scaleway provider', + async (t) => { + process.chdir('tests/scaleway_provider'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes(`package${sep}flask${sep}__init__.py`), + 'flask is packaged' + ); + t.true( + zipfiles.includes(`package${sep}boto3${sep}__init__.py`), + 'boto3 is packaged' + ); + t.end(); + }, + { skip: true } // sls v4 supports aws provider only +); diff --git a/tests/base/.gitignore b/tests/base/.gitignore deleted file mode 100644 index 213a542c..00000000 --- a/tests/base/.gitignore +++ /dev/null @@ -1,25 +0,0 @@ -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# Serverless directories -.serverless -.requirements - -# Project ignores -puck/ -serverless.yml.bak diff --git a/tests/base/_slimPatterns.yml b/tests/base/_slimPatterns.yml new file mode 100644 index 00000000..443af9a0 --- /dev/null +++ b/tests/base/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - '**/__main__.py' diff --git a/tests/base/custom_ssh b/tests/base/custom_ssh new file mode 100644 index 00000000..8a7c4203 --- /dev/null +++ b/tests/base/custom_ssh @@ -0,0 +1 @@ +SOME KEY diff --git a/tests/base/fn2/__init__.py b/tests/base/fn2/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/base/fn2/fn2_handler.py b/tests/base/fn2/fn2_handler.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/base/fn2/requirements.txt b/tests/base/fn2/requirements.txt new file mode 100644 index 00000000..eea18113 --- /dev/null +++ b/tests/base/fn2/requirements.txt @@ -0,0 +1 @@ +dataclasses \ No newline at end of file diff --git a/tests/base/package.json b/tests/base/package.json index c53d13ee..b07744c9 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/base/requirements-common.txt b/tests/base/requirements-common.txt new file mode 100644 index 00000000..30ddf823 --- /dev/null +++ b/tests/base/requirements-common.txt @@ -0,0 +1 @@ +boto3 diff --git a/tests/base/requirements-w-editable.txt b/tests/base/requirements-w-editable.txt new file mode 100644 index 00000000..a7c63986 --- /dev/null +++ b/tests/base/requirements-w-editable.txt @@ -0,0 +1 @@ +-e git+https://github.com/boto/boto3.git#egg=boto3 diff --git a/tests/base/requirements-w-hashes.txt b/tests/base/requirements-w-hashes.txt new file mode 100644 index 00000000..428e8528 --- /dev/null +++ b/tests/base/requirements-w-hashes.txt @@ -0,0 +1,90 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --generate-hashes --output-file requirements-w-hashes.txt requirements.txt +# +boto3==1.9.50 \ + --hash=sha256:177e9dd53db5028bb43050da20cc7956287889fc172e5e6275a634e42a10beeb \ + --hash=sha256:8c63e616b91907037ab19236afbcf0057efb31411faf38b46f4590e634dc17ea +botocore==1.12.50 \ + --hash=sha256:07fae5a2b8cfb5a92c1dbee3f2feb4da7c471bcead7e18ce735babe5f39e270f \ + --hash=sha256:eeaa190f50ee05a56225ee78c64cb8bf0c3bf090ec605ca6c2f325aa3826a347 \ + # via boto3, s3transfer +bottle==0.12.19 \ + --hash=sha256:f6b8a34fe9aa406f9813c02990db72ca69ce6a158b5b156d2c41f345016a723d \ + --hash=sha256:a9d73ffcbc6a1345ca2d7949638db46349f5b2b77dac65d6494d45c23628da2c +click==7.0 \ + --hash=sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13 \ + --hash=sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7 \ + # via flask +docutils==0.14 \ + --hash=sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6 \ + --hash=sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274 \ + --hash=sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6 \ + # via botocore +flask==1.0.2 \ + --hash=sha256:2271c0070dbcb5275fad4a82e29f23ab92682dc45f9dfbc22c02ba9b9322ce48 \ + --hash=sha256:a080b744b7e345ccfcbc77954861cb05b3c63786e93f2b3875e0913d44b43f05 +itsdangerous==1.1.0 \ + --hash=sha256:321b033d07f2a4136d3ec762eac9f16a10ccd60f53c0c91af90217ace7ba1f19 \ + --hash=sha256:b12271b2047cb23eeb98c8b5622e2e5c5e9abd9784a153e9d8ef9cb4dd09d749 \ + # via flask +jinja2==2.11.3 \ + --hash=sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419 \ + --hash=sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6 \ + # via flask +jmespath==0.9.3 \ + --hash=sha256:6a81d4c9aa62caf061cb517b4d9ad1dd300374cd4706997aff9cd6aedd61fc64 \ + --hash=sha256:f11b4461f425740a1d908e9a3f7365c3d2e569f6ca68a2ff8bc5bcd9676edd63 \ + # via boto3, botocore +markupsafe==1.1.0 \ + --hash=sha256:048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432 \ + --hash=sha256:130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b \ + --hash=sha256:19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9 \ + --hash=sha256:1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af \ + --hash=sha256:1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834 \ + --hash=sha256:1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd \ + --hash=sha256:1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d \ + --hash=sha256:31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7 \ + --hash=sha256:3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b \ + --hash=sha256:4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3 \ + --hash=sha256:525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c \ + --hash=sha256:52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2 \ + --hash=sha256:52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7 \ + --hash=sha256:5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36 \ + --hash=sha256:5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1 \ + --hash=sha256:5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e \ + --hash=sha256:7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1 \ + --hash=sha256:83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c \ + --hash=sha256:857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856 \ + --hash=sha256:98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550 \ + --hash=sha256:bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492 \ + --hash=sha256:d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672 \ + --hash=sha256:e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401 \ + --hash=sha256:edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6 \ + --hash=sha256:efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6 \ + --hash=sha256:f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c \ + --hash=sha256:f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd \ + --hash=sha256:fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1 \ + # via jinja2 +python-dateutil==2.7.5 \ + --hash=sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93 \ + --hash=sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02 \ + # via botocore +s3transfer==0.1.13 \ + --hash=sha256:90dc18e028989c609146e241ea153250be451e05ecc0c2832565231dacdf59c1 \ + --hash=sha256:c7a9ec356982d5e9ab2d4b46391a7d6a950e2b04c472419f5fdec70cc0ada72f \ + # via boto3 +six==1.11.0 \ + --hash=sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9 \ + --hash=sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb \ + # via python-dateutil +urllib3==1.24.2 \ + --hash=sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0 \ + --hash=sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3 \ + # via botocore +werkzeug==0.15.3 \ + --hash=sha256:97660b282aa7e29f94f3fe378e5c7162d7ab9d601a8dbb1cbb2ffc8f0e54607d \ + --hash=sha256:cfd1281b1748288e59762c0e174d64d8bcb2b70e7c57bc4a1203c8825af24ac3 \ + # via flask diff --git a/tests/base/requirements-w-nested.txt b/tests/base/requirements-w-nested.txt new file mode 100644 index 00000000..b09aa52a --- /dev/null +++ b/tests/base/requirements-w-nested.txt @@ -0,0 +1,3 @@ +flask==2.0.3 +bottle +-r requirements-common.txt diff --git a/tests/base/requirements.txt b/tests/base/requirements.txt index 24a42e66..23bfb7a6 100644 --- a/tests/base/requirements.txt +++ b/tests/base/requirements.txt @@ -1,3 +1,3 @@ -flask +flask==0.12.5 bottle boto3 diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index ea22c46e..87423210 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -2,26 +2,43 @@ service: sls-py-req-test provider: name: aws - runtime: ${opt:runtime, 'python3.6'} + runtime: ${env:runtime, 'python3.9'} plugins: - serverless-python-requirements custom: pythonRequirements: - zip: ${opt:zip, self:custom.defaults.zip} - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} - vendor: ${opt:vendor, ''} + zip: ${env:zip, self:custom.defaults.zip} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + dockerSsh: ${env:dockerSsh, self:custom.defaults.dockerSsh} + dockerPrivateKey: ${env:dockerPrivateKey, self:custom.defaults.dockerPrivateKey} + dockerImage: ${env:dockerImage, self:custom.defaults.dockerImage} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + vendor: ${env:vendor, ''} + fileName: ${env:fileName, 'requirements.txt'} + useStaticCache: ${env:useStaticCache, self:custom.defaults.useStaticCache} + useDownloadCache: ${env:useDownloadCache, self:custom.defaults.useDownloadCache} + cacheLocation: ${env:cacheLocation, ''} defaults: + slim: false + slimPatterns: false + slimPatternsAppendDefaults: true zip: false dockerizePip: false + dockerSsh: false + dockerPrivateKey: '' + dockerImage: '' individually: false + useStaticCache: true + useDownloadCache: true package: - individually: ${opt:individually, self:custom.defaults.individually} - exclude: - - '**/*' - include: - - handler.py + individually: ${env:individually, self:custom.defaults.individually} + patterns: + - '!**/*' + - 'handler.py' functions: hello: @@ -30,4 +47,12 @@ functions: handler: handler.hello hello3: handler: handler.hello - runtime: nodejs6.10 + runtime: nodejs14.x + hello4: + handler: fn2_handler.hello + module: fn2 + package: + patterns: + - 'fn2/**' + hello5: + image: 000000000000.dkr.ecr.sa-east-1.amazonaws.com/test-lambda-docker@sha256:6bb600b4d6e1d7cf521097177dd0c4e9ea373edb91984a505333be8ac9455d38 diff --git a/tests/individually/module1/handler1.py b/tests/individually/module1/handler1.py index 970b0c01..369835cd 100644 --- a/tests/individually/module1/handler1.py +++ b/tests/individually/module1/handler1.py @@ -1,6 +1,9 @@ import boto3 + def hello(event, context): - return { - 'status': 200, - } + return {"status": 200} + + +def hello2(event, context): + return {"status": 200} diff --git a/tests/individually/module1/requirements.txt b/tests/individually/module1/requirements.txt index 2e64be1f..9b7a216a 100644 --- a/tests/individually/module1/requirements.txt +++ b/tests/individually/module1/requirements.txt @@ -1 +1,2 @@ +-r ../requirements-common.txt pyaml diff --git a/tests/individually/module2/requirements.txt b/tests/individually/module2/requirements.txt index 7e106024..c09d0264 100644 --- a/tests/individually/module2/requirements.txt +++ b/tests/individually/module2/requirements.txt @@ -1 +1 @@ -flask +flask==2.0.3 diff --git a/tests/individually/package.json b/tests/individually/package.json index c53d13ee..b07744c9 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/individually/requirements-common.txt b/tests/individually/requirements-common.txt new file mode 100644 index 00000000..30ddf823 --- /dev/null +++ b/tests/individually/requirements-common.txt @@ -0,0 +1 @@ +boto3 diff --git a/tests/individually/serverless.yml b/tests/individually/serverless.yml index 427dba75..6409532b 100644 --- a/tests/individually/serverless.yml +++ b/tests/individually/serverless.yml @@ -2,10 +2,17 @@ service: sls-py-req-test-indiv provider: name: aws - runtime: python3.6 + runtime: python3.9 package: individually: true + patterns: + - '!node_modules/**' +custom: + pythonRequirements: + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + defaults: + dockerizePip: false functions: hello1: diff --git a/tests/individually_mixed_runtime/module1/handler1.ts b/tests/individually_mixed_runtime/module1/handler1.ts new file mode 100644 index 00000000..b8062f8b --- /dev/null +++ b/tests/individually_mixed_runtime/module1/handler1.ts @@ -0,0 +1,3 @@ +function hello() { + return "hello" +} diff --git a/tests/individually_mixed_runtime/module2/handler2.py b/tests/individually_mixed_runtime/module2/handler2.py new file mode 100644 index 00000000..d9f5c465 --- /dev/null +++ b/tests/individually_mixed_runtime/module2/handler2.py @@ -0,0 +1,6 @@ +import flask + +def hello(event, context): + return { + 'status': 200, + } diff --git a/tests/individually_mixed_runtime/module2/requirements.txt b/tests/individually_mixed_runtime/module2/requirements.txt new file mode 100644 index 00000000..c09d0264 --- /dev/null +++ b/tests/individually_mixed_runtime/module2/requirements.txt @@ -0,0 +1 @@ +flask==2.0.3 diff --git a/tests/individually_mixed_runtime/package.json b/tests/individually_mixed_runtime/package.json new file mode 100644 index 00000000..b07744c9 --- /dev/null +++ b/tests/individually_mixed_runtime/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" + } +} diff --git a/tests/individually_mixed_runtime/requirements-common.txt b/tests/individually_mixed_runtime/requirements-common.txt new file mode 100644 index 00000000..30ddf823 --- /dev/null +++ b/tests/individually_mixed_runtime/requirements-common.txt @@ -0,0 +1 @@ +boto3 diff --git a/tests/individually_mixed_runtime/serverless.yml b/tests/individually_mixed_runtime/serverless.yml new file mode 100644 index 00000000..7c602239 --- /dev/null +++ b/tests/individually_mixed_runtime/serverless.yml @@ -0,0 +1,39 @@ +service: sls-py-req-test-indiv-mixed-runtime + +provider: + name: aws + runtime: nodejs18.x + architecture: arm64 + +package: + individually: true + +custom: + pythonRequirements: + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + zip: ${env:zip, self:custom.defaults.zip} + defaults: + dockerizePip: false + zip: false + +functions: + hello1: + handler: handler1.hello + architecture: x86_64 + package: + patterns: + - '!**' + - 'module1/**' + + hello2: + handler: handler2.hello + module: module2 + runtime: python3.9 + architecture: x86_64 + package: + patterns: + - '!**' + - 'module2/**' + +plugins: + - serverless-python-requirements diff --git a/tests/non_build_pyproject/handler.py b/tests/non_build_pyproject/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/non_build_pyproject/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/non_build_pyproject/package.json b/tests/non_build_pyproject/package.json new file mode 100644 index 00000000..b07744c9 --- /dev/null +++ b/tests/non_build_pyproject/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" + } +} diff --git a/tests/non_build_pyproject/pyproject.toml b/tests/non_build_pyproject/pyproject.toml new file mode 100644 index 00000000..41932632 --- /dev/null +++ b/tests/non_build_pyproject/pyproject.toml @@ -0,0 +1,10 @@ +[tool.black] +line-length = 79 +py36 = true +skip-string-normalization = true +exclude = ''' +/( + \.serverless + | node_modules +)/ +''' diff --git a/tests/non_build_pyproject/requirements.txt b/tests/non_build_pyproject/requirements.txt new file mode 100644 index 00000000..09764fc3 --- /dev/null +++ b/tests/non_build_pyproject/requirements.txt @@ -0,0 +1,2 @@ +flask==2.0.3 +boto3 diff --git a/tests/non_build_pyproject/serverless.yml b/tests/non_build_pyproject/serverless.yml new file mode 100644 index 00000000..d1bbaee6 --- /dev/null +++ b/tests/non_build_pyproject/serverless.yml @@ -0,0 +1,20 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.9 + +plugins: + - serverless-python-requirements +custom: + pythonRequirements: + usePoetry: false + +package: + patterns: + - '!**/*' + - 'handler.py' + +functions: + hello: + handler: handler.hello diff --git a/tests/non_poetry_pyproject/handler.py b/tests/non_poetry_pyproject/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/non_poetry_pyproject/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/non_poetry_pyproject/package.json b/tests/non_poetry_pyproject/package.json new file mode 100644 index 00000000..b07744c9 --- /dev/null +++ b/tests/non_poetry_pyproject/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" + } +} diff --git a/tests/non_poetry_pyproject/pyproject.toml b/tests/non_poetry_pyproject/pyproject.toml new file mode 100644 index 00000000..41932632 --- /dev/null +++ b/tests/non_poetry_pyproject/pyproject.toml @@ -0,0 +1,10 @@ +[tool.black] +line-length = 79 +py36 = true +skip-string-normalization = true +exclude = ''' +/( + \.serverless + | node_modules +)/ +''' diff --git a/tests/non_poetry_pyproject/serverless.yml b/tests/non_poetry_pyproject/serverless.yml new file mode 100644 index 00000000..7338b10b --- /dev/null +++ b/tests/non_poetry_pyproject/serverless.yml @@ -0,0 +1,17 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.9 + +plugins: + - serverless-python-requirements + +package: + patterns: + - '!**/*' + - 'handler.py' + +functions: + hello: + handler: handler.hello diff --git a/tests/pipenv/.gitignore b/tests/pipenv/.gitignore deleted file mode 100644 index cf9dab3c..00000000 --- a/tests/pipenv/.gitignore +++ /dev/null @@ -1,21 +0,0 @@ -# Distribution / packaging -.Python -env/ -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -*.egg-info/ -.installed.cfg -*.egg - -# Serverless directories -.serverless -.requirements diff --git a/tests/pipenv/Pipfile b/tests/pipenv/Pipfile index 3798645f..30e51dda 100644 --- a/tests/pipenv/Pipfile +++ b/tests/pipenv/Pipfile @@ -1,8 +1,12 @@ [[source]] -url = "https://pypi.python.org/simple" +url = "https://pypi.org/simple" verify_ssl = true +name = "pypi" [packages] -Flask = "*" +Flask = "==2.0.3" bottle = "*" boto3 = "*" + +[dev-packages] +pytest = "*" diff --git a/tests/pipenv/Pipfile.lock b/tests/pipenv/Pipfile.lock deleted file mode 100644 index 84bde3cd..00000000 --- a/tests/pipenv/Pipfile.lock +++ /dev/null @@ -1,127 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "ef2bf8ae3e097071390b1bceee7f9b5944c959aea100e9f0ee6a53df3c57275b" - }, - "host-environment-markers": { - "implementation_name": "cpython", - "implementation_version": "3.6.3", - "os_name": "posix", - "platform_machine": "x86_64", - "platform_python_implementation": "CPython", - "platform_release": "4.13.0-32-generic", - "platform_system": "Linux", - "platform_version": "#35-Ubuntu SMP Thu Jan 25 09:13:46 UTC 2018", - "python_full_version": "3.6.3", - "python_version": "3.6", - "sys_platform": "linux" - }, - "pipfile-spec": 6, - "requires": {}, - "sources": [ - { - "url": "https://pypi.python.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "boto3": { - "hashes": [ - "sha256:49bda3ac6e69c2d0a34c37fc4ec47efd73e5f5cf86e34524b1918857aa74d797", - "sha256:5430b5cd532fe56ccc9eaf1ed433ac74805811b931ae1e44eb896af98a1297f0" - ], - "version": "==1.5.22" - }, - "botocore": { - "hashes": [ - "sha256:a91430f0bfbf7c13edc474c3f0d46449108aaebcd6d8e82a5bf9aebe17b42258", - "sha256:b2c9e0fd6d14910f759a33c19f8315dddedbb3c5569472b7be7ceed4f001a675" - ], - "version": "==1.8.36" - }, - "bottle": { - "hashes": [ - "sha256:39b751aee0b167be8dffb63ca81b735bbf1dd0905b3bc42761efedee8f123355" - ], - "version": "==0.12.13" - }, - "click": { - "hashes": [ - "sha256:29f99fc6125fbc931b758dc053b3114e55c77a6e4c6c3a2674a2dc986016381d", - "sha256:f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b" - ], - "version": "==6.7" - }, - "docutils": { - "hashes": [ - "sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6", - "sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", - "sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274" - ], - "version": "==0.14" - }, - "flask": { - "hashes": [ - "sha256:0749df235e3ff61ac108f69ac178c9770caeaccad2509cb762ce1f65570a8856", - "sha256:49f44461237b69ecd901cc7ce66feea0319b9158743dd27a2899962ab214dac1" - ], - "version": "==0.12.2" - }, - "itsdangerous": { - "hashes": [ - "sha256:cbb3fcf8d3e33df861709ecaf89d9e6629cff0a217bc2848f1b41cd30d360519" - ], - "version": "==0.24" - }, - "jinja2": { - "hashes": [ - "sha256:74c935a1b8bb9a3947c50a54766a969d4846290e1e788ea44c1392163723c3bd", - "sha256:f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4" - ], - "version": "==2.10" - }, - "jmespath": { - "hashes": [ - "sha256:f11b4461f425740a1d908e9a3f7365c3d2e569f6ca68a2ff8bc5bcd9676edd63", - "sha256:6a81d4c9aa62caf061cb517b4d9ad1dd300374cd4706997aff9cd6aedd61fc64" - ], - "version": "==0.9.3" - }, - "markupsafe": { - "hashes": [ - "sha256:a6be69091dac236ea9c6bc7d012beab42010fa914c459791d627dad4910eb665" - ], - "version": "==1.0" - }, - "python-dateutil": { - "hashes": [ - "sha256:95511bae634d69bc7329ba55e646499a842bc4ec342ad54a8cdb65645a0aad3c", - "sha256:891c38b2a02f5bb1be3e4793866c8df49c7d19baabf9c1bad62547e0b4866aca" - ], - "version": "==2.6.1" - }, - "s3transfer": { - "hashes": [ - "sha256:23c156ca4d64b022476c92c44bf938bef71af9ce0dcd8fd6585e7bce52f66e47", - "sha256:10891b246296e0049071d56c32953af05cea614dca425a601e4c0be35990121e" - ], - "version": "==0.1.12" - }, - "six": { - "hashes": [ - "sha256:832dc0e10feb1aa2c68dcc57dbb658f1c7e65b9b61af69048abc87a2db00a0eb", - "sha256:70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9" - ], - "version": "==1.11.0" - }, - "werkzeug": { - "hashes": [ - "sha256:d5da73735293558eb1651ee2fddc4d0dedcfa06538b8813a2e20011583c9e49b", - "sha256:c3fd7a7d41976d9f44db327260e263132466836cef6f91512889ed60ad26557c" - ], - "version": "==0.14.1" - } - }, - "develop": {} -} diff --git a/tests/pipenv/_slimPatterns.yml b/tests/pipenv/_slimPatterns.yml new file mode 100644 index 00000000..443af9a0 --- /dev/null +++ b/tests/pipenv/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - '**/__main__.py' diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index c53d13ee..b07744c9 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-4.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/pipenv/serverless.yml b/tests/pipenv/serverless.yml index b8ebb38b..2b471526 100644 --- a/tests/pipenv/serverless.yml +++ b/tests/pipenv/serverless.yml @@ -2,23 +2,28 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.9 plugins: - serverless-python-requirements custom: pythonRequirements: - zip: ${opt:zip, self:custom.defaults.zip} - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} defaults: zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false dockerizePip: false package: - exclude: - - '**/*' - include: - - handler.py + patterns: + - '!**/*' + - 'handler.py' functions: hello: diff --git a/tests/poetry/_slimPatterns.yml b/tests/poetry/_slimPatterns.yml new file mode 100644 index 00000000..443af9a0 --- /dev/null +++ b/tests/poetry/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - '**/__main__.py' diff --git a/tests/poetry/handler.py b/tests/poetry/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/poetry/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/poetry/package.json b/tests/poetry/package.json new file mode 100644 index 00000000..b07744c9 --- /dev/null +++ b/tests/poetry/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" + } +} diff --git a/tests/poetry/pyproject.toml b/tests/poetry/pyproject.toml new file mode 100644 index 00000000..896b48e7 --- /dev/null +++ b/tests/poetry/pyproject.toml @@ -0,0 +1,17 @@ +[tool.poetry] +name = "poetry" +version = "0.1.0" +description = "" +authors = ["Your Name "] + +[tool.poetry.dependencies] +python = "^3.7" +Flask = "2.0" +bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} +boto3 = "1.29.6" + +[tool.poetry.dev-dependencies] + +[build-system] +requires = ["poetry"] +build-backend = "poetry.masonry.api" diff --git a/tests/poetry/serverless.yml b/tests/poetry/serverless.yml new file mode 100644 index 00000000..d10c4997 --- /dev/null +++ b/tests/poetry/serverless.yml @@ -0,0 +1,31 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.9 + +plugins: + - serverless-python-requirements +custom: + pythonRequirements: + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + requirePoetryLockFile: ${env:requirePoetryLockFile, false} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + patterns: + - '!**/*' + - 'handler.py' + +functions: + hello: + handler: handler.hello diff --git a/tests/poetry_individually/module1/handler.py b/tests/poetry_individually/module1/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/poetry_individually/module1/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/poetry_individually/module1/pyproject.toml b/tests/poetry_individually/module1/pyproject.toml new file mode 100644 index 00000000..896b48e7 --- /dev/null +++ b/tests/poetry_individually/module1/pyproject.toml @@ -0,0 +1,17 @@ +[tool.poetry] +name = "poetry" +version = "0.1.0" +description = "" +authors = ["Your Name "] + +[tool.poetry.dependencies] +python = "^3.7" +Flask = "2.0" +bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} +boto3 = "1.29.6" + +[tool.poetry.dev-dependencies] + +[build-system] +requires = ["poetry"] +build-backend = "poetry.masonry.api" diff --git a/tests/poetry_individually/package.json b/tests/poetry_individually/package.json new file mode 100644 index 00000000..b07744c9 --- /dev/null +++ b/tests/poetry_individually/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" + } +} diff --git a/tests/poetry_individually/serverless.yml b/tests/poetry_individually/serverless.yml new file mode 100644 index 00000000..86dbb547 --- /dev/null +++ b/tests/poetry_individually/serverless.yml @@ -0,0 +1,32 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.9 + +plugins: + - serverless-python-requirements +custom: + pythonRequirements: + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + individually: true + +functions: + hello: + handler: handler.hello + module: module1 + package: + patterns: + - 'module1/**' diff --git a/tests/poetry_packages/_poetryGroups.yml b/tests/poetry_packages/_poetryGroups.yml new file mode 100644 index 00000000..25abd07a --- /dev/null +++ b/tests/poetry_packages/_poetryGroups.yml @@ -0,0 +1,8 @@ +empty: [] +poetryWithGroups: + - custom1 + - custom2 +poetryWithoutGroups: + - custom1 +poetryOnlyGroups: + - custom2 diff --git a/tests/poetry_packages/_slimPatterns.yml b/tests/poetry_packages/_slimPatterns.yml new file mode 100644 index 00000000..443af9a0 --- /dev/null +++ b/tests/poetry_packages/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - '**/__main__.py' diff --git a/tests/poetry_packages/handler.py b/tests/poetry_packages/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/poetry_packages/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/poetry_packages/package.json b/tests/poetry_packages/package.json new file mode 100644 index 00000000..b07744c9 --- /dev/null +++ b/tests/poetry_packages/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" + } +} diff --git a/tests/poetry_packages/pyproject.toml b/tests/poetry_packages/pyproject.toml new file mode 100644 index 00000000..0f9fc705 --- /dev/null +++ b/tests/poetry_packages/pyproject.toml @@ -0,0 +1,19 @@ +[tool.poetry] +name = "poetry" +version = "0.1.0" +description = "" +authors = ["Your Name "] + +[tool.poetry.dependencies] +python = "^3.7" +Flask = "2.0" + +[tool.poetry.group.custom1.dependencies] +bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} + +[tool.poetry.group.custom2.dependencies] +boto3 = "1.29.6" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/tests/poetry_packages/serverless.yml b/tests/poetry_packages/serverless.yml new file mode 100644 index 00000000..c6972ede --- /dev/null +++ b/tests/poetry_packages/serverless.yml @@ -0,0 +1,34 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.9 + +plugins: + - serverless-python-requirements +custom: + pythonRequirements: + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + requirePoetryLockFile: ${env:requirePoetryLockFile, false} + poetryWithGroups: ${file(./_poetryGroups.yml):${env:poetryWithGroups, "empty"}} + poetryWithoutGroups: ${file(./_poetryGroups.yml):${env:poetryWithoutGroups, "empty"}} + poetryOnlyGroups: ${file(./_poetryGroups.yml):${env:poetryOnlyGroups, "empty"}} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + patterns: + - '!**/*' + - 'handler.py' + +functions: + hello: + handler: handler.hello diff --git a/tests/scaleway_provider/_slimPatterns.yml b/tests/scaleway_provider/_slimPatterns.yml new file mode 100644 index 00000000..443af9a0 --- /dev/null +++ b/tests/scaleway_provider/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - '**/__main__.py' diff --git a/tests/scaleway_provider/handler.py b/tests/scaleway_provider/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/scaleway_provider/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/scaleway_provider/package.json b/tests/scaleway_provider/package.json new file mode 100644 index 00000000..d54b88e0 --- /dev/null +++ b/tests/scaleway_provider/package.json @@ -0,0 +1,15 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz", + "serverless-scaleway-functions": "^0.4.8" + } +} diff --git a/tests/scaleway_provider/requirements.txt b/tests/scaleway_provider/requirements.txt new file mode 100644 index 00000000..23bfb7a6 --- /dev/null +++ b/tests/scaleway_provider/requirements.txt @@ -0,0 +1,3 @@ +flask==0.12.5 +bottle +boto3 diff --git a/tests/scaleway_provider/serverless.yml b/tests/scaleway_provider/serverless.yml new file mode 100644 index 00000000..5d827bdf --- /dev/null +++ b/tests/scaleway_provider/serverless.yml @@ -0,0 +1,34 @@ +service: sls-py-req-test + +configValidationMode: off + +provider: + name: scaleway + runtime: python39 + +plugins: + - serverless-python-requirements + - serverless-scaleway-functions + +custom: + pythonRequirements: + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + patterns: + - '!**/*' + - 'handler.py' + +functions: + hello: + handler: handler.hello diff --git a/unzip_requirements.py b/unzip_requirements.py index 4e3b9e51..68f907fa 100644 --- a/unzip_requirements.py +++ b/unzip_requirements.py @@ -6,15 +6,17 @@ pkgdir = '/tmp/sls-py-req' -sys.path.append(pkgdir) +# We want our path to look like [working_dir, serverless_requirements, ...] +sys.path.insert(1, pkgdir) if not os.path.exists(pkgdir): tempdir = '/tmp/_temp-sls-py-req' if os.path.exists(tempdir): shutil.rmtree(tempdir) - zip_requirements = os.path.join( - os.environ.get('LAMBDA_TASK_ROOT', os.getcwd()), '.requirements.zip') + default_lambda_task_root = os.environ.get('LAMBDA_TASK_ROOT', os.getcwd()) + lambda_task_root = os.getcwd() if os.environ.get('IS_LOCAL') == 'true' else default_lambda_task_root + zip_requirements = os.path.join(lambda_task_root, '.requirements.zip') zipfile.ZipFile(zip_requirements, 'r').extractall(tempdir) os.rename(tempdir, pkgdir) # Atomic