diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml new file mode 100644 index 00000000..bde39a55 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -0,0 +1,68 @@ +name: 🐛 Bug report +description: Create a bug report +body: + - type: checkboxes + attributes: + label: Are you certain it's a bug? + description: If you're uncertain, please report at https://github.com/serverless/serverless-python-requirements/discussions instead + options: + - label: Yes, it looks like a bug + required: true + - type: checkboxes + attributes: + label: Are you using the latest plugin release? + description: Latest version can be checked at https://github.com/serverless/serverless-python-requirements/releases/latest + options: + - label: Yes, I'm using the latest plugin release + required: true + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists + options: + - label: I have searched existing issues, it hasn't been reported yet + required: true + - type: textarea + attributes: + label: Issue description + validations: + required: true + - type: textarea + attributes: + label: Service configuration (serverless.yml) content + description: | + Provide COMPLETE content of serverless.yml, ensuring that: + • It consistently reproduces described issue + • It's as minimal as possible + • Ideally with no other plugins involved + • Has sensitive parts masked out + + If not applicable, fill with "N/A" + render: yaml + validations: + required: true + - type: input + attributes: + label: Command name and used flags + description: | + Full command name with used flags (If not applicable, fill with "N/A") + placeholder: serverless [...flags] + validations: + required: true + - type: textarea + attributes: + label: Command output + description: | + COMPLETE command output. + + If not applicable, fill with "N/A" + render: shell + validations: + required: true + - type: textarea + attributes: + label: Environment information + description: '"serverless --version" output + used version of the plugin' + render: shell + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..a7f83c6b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: false +contact_links: + - name: Question + url: https://github.com/serverless/serverless-python-requirements/discussions + about: Please ask and answer questions here diff --git a/.github/ISSUE_TEMPLATE/feature-request.yml b/.github/ISSUE_TEMPLATE/feature-request.yml new file mode 100644 index 00000000..14907ec2 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature-request.yml @@ -0,0 +1,21 @@ +name: 🎉 Feature request +description: Suggest an idea +body: + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists + options: + - label: I have searched existing issues, it hasn't been reported yet + required: true + - type: textarea + attributes: + label: Use case description + description: Describe the use case that needs to be addressed + validations: + required: true + - type: textarea + attributes: + label: Proposed solution (optional) + description: | + e.g. propose how the configuration and implementation of the new feature could look diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index ac29398e..00000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,12 +0,0 @@ -version: 2 -updates: -- package-ecosystem: npm - directory: "/" - schedule: - interval: daily - time: "10:00" - open-pull-requests-limit: 10 - ignore: - - dependency-name: eslint - versions: - - "> 7.22.0" diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml new file mode 100644 index 00000000..01fb27a3 --- /dev/null +++ b/.github/workflows/integrate.yml @@ -0,0 +1,34 @@ +# master only + +name: Integrate + +on: + push: + branches: [master] + +env: + FORCE_COLOR: 1 + +jobs: + tagIfNewVersion: + name: Tag if new version + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # Ensure to have complete history of commits pushed with given push operation + # It's loose and imperfect assumption that no more than 30 commits will be pushed at once + fetch-depth: 30 + # Tag needs to be pushed with real user token, otherwise pushed tag won't trigger the actions workflow + # Hence we're passing 'serverless-ci' user authentication token + token: ${{ secrets.USER_GITHUB_TOKEN }} + + - name: Tag if new version + run: | + NEW_VERSION=`git diff -U0 ${{ github.event.before }} package.json | grep '"version": "' | tail -n 1 | grep -oE "[0-9]+\.[0-9]+\.[0-9]+"` || : + if [ -n "$NEW_VERSION" ]; + then + git tag v$NEW_VERSION + git push --tags + fi diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index 1e6b9ee8..00000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Lint - -on: [push, pull_request] - -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - - name: Set up Node ${{ matrix.node-version }} - uses: actions/setup-node@v1 - with: - node-version: 14 - - - name: Install deps - run: npm install - - - name: Lint - run: npm run ci:lint diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 6a1e7d26..0e3dc867 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,18 +1,52 @@ +# Version tags only + name: Publish -on: [release] +on: + push: + tags: + - v[0-9]+.[0-9]+.[0-9]+ jobs: - publish-npm: + publish: + name: Publish runs-on: ubuntu-latest + env: + # It'll work with secrets.GITHUB_TOKEN (which is provided by GitHub unconditionally) + # Still then release author would be "github-actions". It's better if it's dedicated repo bot + GITHUB_TOKEN: ${{ secrets.USER_GITHUB_TOKEN }} steps: - - uses: actions/checkout@v2 + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Retrieve node_modules from cache + id: cacheNodeModules + uses: actions/cache@v4 + with: + path: | + ~/.npm + node_modules + key: npm-v18-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} - - uses: actions/setup-node@v2 + - name: Install Node.js and npm + uses: actions/setup-node@v4 with: - version: 14 - registry-url: https://registry.npmjs.org/ + node-version: 18.x + registry-url: https://registry.npmjs.org - - run: npm publish + - name: Publish new version env: - NODE_AUTH_TOKEN: ${{secrets.npm_token}} + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + run: npm publish + + # Note: No need to install dependencies as: + # 1. We have retrieved cached `node_modules` for very same `package.json` + # as stored with recent `master `build + # 2. If for some reason cache retrieval fails `npx` will download and install + # `github-release-from-cc-changelog` + + - name: Publish release notes + run: | + TEMP_ARRAY=($(echo $GITHUB_REF | tr "/" "\n")) + TAG=${TEMP_ARRAY[@]: -1} + npx github-release-from-cc-changelog $TAG diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index f89486bf..00000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: Test - -on: [push, pull_request] - -jobs: - build: - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, windows-latest, macOS-latest] - python-version: [2.7, 3.6] - steps: - - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Set up Node - uses: actions/setup-node@v1 - with: - node-version: 14 - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv poetry - - - name: Install serverless - run: npm install -g serverless@2 - - - name: Install deps - run: npm install - - - name: Test - run: npm run test - env: - LC_ALL: C.UTF-8 - LANG: C.UTF-8 - if: matrix.os != 'macOS-latest' - - - name: Test (Mac) - run: npm run test - env: - LC_ALL: en_US.UTF-8 - LANG: en_US.UTF-8 - if: matrix.os == 'macOS-latest' diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml new file mode 100644 index 00000000..23e2d67f --- /dev/null +++ b/.github/workflows/validate.yml @@ -0,0 +1,92 @@ +# PR's only + +name: Validate + +on: + pull_request: + branches: [master] + +env: + FORCE_COLOR: 1 + +jobs: + linuxNode18: + name: '[Linux] Node.js v18: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + sls-version: [2, 3] + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + # For commitlint purpose ensure to have complete list of PR commits + # It's loose and imperfect assumption that PR has no more than 30 commits + fetch-depth: 30 + + - name: Retrieve last master commit (for `git diff` purposes) + run: | + git checkout -b pr + git fetch --prune --depth=30 origin +refs/heads/master:refs/remotes/origin/master + git checkout master + git checkout pr + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v4 + with: + path: | + ~/.npm + node_modules + key: npm-v18-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: | + npm-v18-${{ runner.os }}-${{ github.ref }}- + npm-v18-${{ runner.os }}-refs/heads/master- + + - name: Set up Python 3.9 + uses: actions/setup-python@v5 + with: + python-version: 3.9 + + - name: Install Node.js and npm + uses: actions/setup-node@v4 + with: + node-version: 18.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry && poetry self add poetry-plugin-export + + - name: Install serverless + run: npm install -g serverless@${{ matrix.sls-version }} + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Validate Prettier formatting + run: npm run prettier-check:updated + - name: Validate ESLint rules + run: npm run lint:updated + - name: Validate commit messages + if: github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id + run: npx commitlint -f master + - name: Validate changelog (if new version) + run: | + NEW_VERSION=`git diff -U0 master package.json | grep '"version": "' | tail -n 1 | grep -oE "[0-9]+\.[0-9]+\.[0-9]+"` || : + if [ -n "$NEW_VERSION" ]; + then + npx dump-release-notes-from-cc-changelog $NEW_VERSION + fi + - name: Unit tests + run: npm test + env: + SERVERLESS_PLATFORM_STAGE: dev + SERVERLESS_LICENSE_KEY: ${{ secrets.SERVERLESS_LICENSE_KEY }} diff --git a/.gitignore b/.gitignore index ab0317f3..64bdbd6a 100644 --- a/.gitignore +++ b/.gitignore @@ -59,7 +59,6 @@ dist/ downloads/ eggs/ .eggs/ -lib/ lib64/ parts/ sdist/ @@ -77,3 +76,6 @@ unzip_requirements.py # Project ignores puck/ serverless.yml.bak + +# Generated packaging +*.tgz diff --git a/.python-version b/.python-version new file mode 100644 index 00000000..bd28b9c5 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.9 diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..42026cdc --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,103 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +### [6.1.2](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.1.1...v6.1.2) (2025-02-11) + +### Bug Fixes + +- Use absolute paths to ensure compatibility with v4 Compose ([#854](https://github.com/UnitedIncome/serverless-python-requirements/issues/854)) ([bceb737](https://github.com/UnitedIncome/serverless-python-requirements/commit/bceb7371dd64d59829377fe6fd16e17f631d0251)) + +### [6.1.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.1.0...v6.1.1) (2024-06-03) + +## [6.1.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.1...v6.1.0) (2024-03-27) + +### Features + +- Support Scaleway provider ([#812](https://github.com/UnitedIncome/serverless-python-requirements/issues/812)) ([1b0faae](https://github.com/UnitedIncome/serverless-python-requirements/commit/1b0faaeb6aadd2bc4b1b53526e35298a98d00aca)) ([Andy Méry](https://github.com/cyclimse)) +- Improved pip failure logging ([#813](https://github.com/UnitedIncome/serverless-python-requirements/issues/813)) ([787b479](https://github.com/UnitedIncome/serverless-python-requirements/commit/787b4791306e9a3ded5f0177c304cfbce081c119)) ([Justin Lyons](https://github.com/babyhuey)) + +### Bug Fixes + +- Ensure proper support for mixed runtimes and architectures ([#815](https://github.com/UnitedIncome/serverless-python-requirements/issues/815)) ([27b70f4](https://github.com/UnitedIncome/serverless-python-requirements/commit/27b70f4d6a7e43fd0e9711bbb56752fee2762901)) ([Stijn IJzermans](https://github.com/stijzermans)) + +### [6.0.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.0...v6.0.1) (2023-10-22) + +### Bug Fixes + +- Add legacy `pipenv` backward compatability ([#742](https://github.com/UnitedIncome/serverless-python-requirements/issues/742)) ([22a1f83](https://github.com/UnitedIncome/serverless-python-requirements/commit/22a1f832ac8051f0963328743f9e768f8e66649e)) ([Randy Westergren](https://github.com/rwestergren)) +- Not crash when runtime is not `python` ([#773](https://github.com/UnitedIncome/serverless-python-requirements/issues/773)) ([c1f5ca1](https://github.com/UnitedIncome/serverless-python-requirements/commit/c1f5ca114de815ca19ad213a79e250b5b81f29b3)) ([Jim Kirkbride](https://github.com/jameskbride)) +- Remove outdated Pipenv requirements flag ([#780](https://github.com/UnitedIncome/serverless-python-requirements/issues/780)) ([ad40278](https://github.com/UnitedIncome/serverless-python-requirements/commit/ad40278629c63f4d0971637214b4d9bc20dbd288)) ([Jeff Gordon](https://github.com/jfgordon2)) + +### Maintenance Improvements + +- Fix integration test matrix configuration ([#755](https://github.com/UnitedIncome/serverless-python-requirements/issues/755)) ([e8b2e51](https://github.com/UnitedIncome/serverless-python-requirements/commit/e8b2e51c265792046bacc3946f22f7bd842c60e6)) ([Randy Westergren](https://github.com/rwestergren)) + +## [6.0.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.4.0...v6.0.0) (2022-10-23) + +### ⚠ BREAKING CHANGES + +- Changes default `dockerImage` used for building dependencies (now uses images from `public.ecr.aws/sam` repository) +- Requires `pipenv` in version `2022-04-08` or higher + +### Features + +- Introduce `requirePoetryLockFile` flag ([#728](https://github.com/serverless/serverless-python-requirements/pull/728)) ([e81d9e1](https://github.com/UnitedIncome/serverless-python-requirements/commit/e81d9e1824c135f110b4deccae2c26b0cbb26778)) ([François-Michel L'Heureux](https://github.com/FinchPowers)) +- Switch to official AWS docker images by default ([#724](https://github.com/UnitedIncome/serverless-python-requirements/issues/724)) ([4ba3bbe](https://github.com/UnitedIncome/serverless-python-requirements/commit/4ba3bbeb9296b4844feb476de695f33ee2a30056)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +### Bug Fixes + +- Adapt to support latest `pipenv` version ([#718](https://github.com/UnitedIncome/serverless-python-requirements/issues/718)) ([853da8d](https://github.com/UnitedIncome/serverless-python-requirements/commit/853da8d39921dc83a23d59fd825b2180814f87ff)) ([Anders Steiner](https://github.com/andidev) & [Randy Westergren](https://github.com/rwestergren) & [Piotr Grzesik](https://github.com/pgrzesik)) +- Properly recognize individual function ([#725](https://github.com/UnitedIncome/serverless-python-requirements/issues/725)) ([78795be](https://github.com/UnitedIncome/serverless-python-requirements/commit/78795be24eb08dc78acd7566778b3960c28b263c)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +### Maintenance Improvements + +- Improve error message for docker failures ([#723](https://github.com/serverless/serverless-python-requirements/pull/723))([cc146d0](https://github.com/UnitedIncome/serverless-python-requirements/commit/cc146d088d362187641dd5ae3e9d0129a14c60e2)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +## [5.4.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.3.1...v5.4.0) (2022-03-14) + +### Features + +- Support `dockerPrivateKey` to specify path to SSH key ([#674](https://github.com/UnitedIncome/serverless-python-requirements/issues/674)) ([915bcad](https://github.com/UnitedIncome/serverless-python-requirements/commit/915bcadad2f8a3be5434d6e42771bc835271baf8)) ([Marcin Szleszynski](https://github.com/martinezpl)) +- Support individual packaging with `poetry` ([#682](https://github.com/UnitedIncome/serverless-python-requirements/issues/682)) ([ebd12cb](https://github.com/UnitedIncome/serverless-python-requirements/commit/ebd12cb14ea352fb08c0957f213bda7dcce800df)) ([Brandon White](https://github.com/BrandonLWhite)) + +### Maintenance Improvements + +- Log child process command output on error ([#679](https://github.com/UnitedIncome/serverless-python-requirements/issues/679)) ([ff11497](https://github.com/UnitedIncome/serverless-python-requirements/commit/ff11497cbcf42fe7f7d73fb2e8e2642c542dd8d7)) ([Andrei Zhemaituk](https://github.com/zhemaituk)) +- Replace `lodash.set` with `set-value` ([#676](https://github.com/UnitedIncome/serverless-python-requirements/issues/676)) ([3edf0e0](https://github.com/UnitedIncome/serverless-python-requirements/commit/3edf0e0cabeeb11ffadd9dcac6f198f22aee4a16)) ([Marc Hassan](https://github.com/mhassan1)) + +### [5.3.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.3.0...v5.3.1) (2022-01-28) + +### Bug Fixes + +- Address unknown path format error in `wsl2` ([#667](https://github.com/UnitedIncome/serverless-python-requirements/issues/667)) ([b16c82d](https://github.com/UnitedIncome/serverless-python-requirements/commit/b16c82dbdd31ca7f61093bb6b8ed50be31908a24)) ([Shinichi Makino](https://github.com/snicmakino)) + +## [5.3.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.1...v5.3.0) (2021-12-21) + +### Features + +- Support requirements layer caching ([#644](https://github.com/UnitedIncome/serverless-python-requirements/issues/644)) ([406f6ba](https://github.com/UnitedIncome/serverless-python-requirements/commit/406f6bac1ca934a34387048b5c00242aff3f581b)) ([Maciej Wilczyński](https://github.com/mLupine)) + +### Bug Fixes + +- Ensure cast `toString` before `trim` on buffer ([f60eed1](https://github.com/UnitedIncome/serverless-python-requirements/commit/f60eed1225f091c090f9c253771a12b33fafcab0)) + +### [5.2.2](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.1...v5.2.2) (2021-12-03) + +### Bug Fixes + +- Ensure cast `toString` before `trim` on buffer ([#656](https://github.com/serverless/serverless-python-requirements/pull/656)) ([f60eed1](https://github.com/UnitedIncome/serverless-python-requirements/commit/f60eed1225f091c090f9c253771a12b33fafcab0)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +### [5.2.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.0...v5.2.1) (2021-11-30) + +### Maintenance Improvements + +- Adapt plugin to modern logs ([#646](https://github.com/serverless/serverless-python-requirements/pull/646)) ([8ff97e6](https://github.com/UnitedIncome/serverless-python-requirements/commit/8ff97e6b7c279334e417dbdb65e64d0de2656986)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Adapt to `async` version of `spawn` ([#648](https://github.com/serverless/serverless-python-requirements/pull/648)) ([50c2850](https://github.com/UnitedIncome/serverless-python-requirements/commit/50c2850874ded795fd50ae377f1db817a0212e7d)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Adapt v3 log writing interfaces ([#646](https://github.com/serverless/serverless-python-requirements/pull/646)) ([a79899a](https://github.com/UnitedIncome/serverless-python-requirements/commit/a79899ae5f6f66aa0c65e7fda8e0186d38ff446e)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Ensure proper verbose progress logs ([#646](https://github.com/serverless/serverless-python-requirements/pull/646)) ([44b9591](https://github.com/UnitedIncome/serverless-python-requirements/commit/44b9591f01157a1811e3ca8b43e21265a155a976)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Use `ServerlessError` ([#649](https://github.com/serverless/serverless-python-requirements/pull/649)) ([cdb7111](https://github.com/UnitedIncome/serverless-python-requirements/commit/cdb71110bc9c69b5087b6e18fb353d65962afe4a)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..9d7afa9c --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,75 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +- Using welcoming and inclusive language +- Being respectful of differing viewpoints and experiences +- Gracefully accepting constructive criticism +- Focusing on what is best for the community +- Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +- The use of sexualized language or imagery and unwelcome sexual attention or + advances +- Trolling, insulting/derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or electronic + address, without explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting our team at **hello@serverless.com**. As an alternative +feel free to reach out to any of us personally. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..900a425b --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,105 @@ +# Contributing Guidelines + +We are always looking to promote good contributors to be maintainers and provide them a front-row seat to serverless innovation. + +If you would like to be a maintainer for the [Serverless Framework](https://github.com/serverless/serverless) or any of our plugins, please get started with making code contributions and engaging with open issues/PRs. Also, please reach out to any of [Serverless organization](https://github.com/serverless) members to express your interest. + +We'd love to collaborate closely with amazing developers as we drive the development of this open technology into the future. + +Welcome, and thanks in advance for your help! + +# How to contribute to `serverless-python-requirements` + +## Setup + +Pre-Reqs: + +- Python 3.9 +- [poetry](https://python-poetry.org/docs/) (if you use multiple versions of Python be sure to install it with python 3.9) +- Perl (used in the tests) +- Node v14 or v16 + +Then, to begin development: + +1. fork the repository +2. `npm install -g serverless@` (check the peer dependencies in the root `package.json` file for the version) +3. run `npm install` in its root folder +4. run the tests via `npm run test` + +## Getting started + +A good first step is to search for open [issues](https://github.com/serverless/serverless-python-requirements/issues). Issues are labeled, and some good issues to start with are labeled: [good first issue](https://github.com/serverless/serverless-python-requirements/labels/good%20first%20issue) and [help wanted](https://github.com/serverless/serverless-python-requirements/labels/help%20wanted). + +## When you propose a new feature or bug fix + +Please make sure there is an open issue discussing your contribution before jumping into a Pull Request! +There are just a few situations (listed below) in which it is fine to submit PR without a corresponding issue: + +- Documentation update +- Obvious bug fix +- Maintenance improvement + +In all other cases please check if there's an open an issue discussing the given proposal, if there is not, create an issue respecting all its template remarks. + +In non-trivial cases please propose and let us review an implementation spec (in the corresponding issue) before jumping into implementation. + +Do not submit draft PRs. Submit only finalized work which is ready for merge. If you have any doubts related to implementation work please discuss in the corresponding issue. + +Once a PR has been reviewed and some changes are suggested, please ensure to **re-request review** after all new changes are pushed. It's the best and quietest way to inform maintainers that your work is ready to be checked again. + +## When you want to work on an existing issue + +**Note:** Please write a quick comment in the corresponding issue and ask if the feature is still relevant and that you want to jump into the implementation. + +Check out our [help wanted](https://github.com/serverless/serverless-python-requirements/labels/help%20wanted) or [good first issue](https://github.com/serverless/serverless-python-requirements/labels/good%20first%20issue) labels to find issues we want to move forward with your help. + +We will do our best to respond/review/merge your PR according to priority. We hope that you stay engaged with us during this period to ensure QA. Please note that the PR will be closed if there hasn't been any activity for a long time (~ 30 days) to keep us focused and keep the repo clean. + +## Reviewing Pull Requests + +Another really useful way to contribute is to review other people's Pull Requests. Having feedback from multiple people is helpful and reduces the overall time to make a final decision about the Pull Request. + +## Providing support + +The easiest thing you can do to help us move forward and make an impact on our progress is to simply provide support to other people having difficulties with their projects. + +You can do that by replying to [issues on GitHub](https://github.com/serverless/serverless-python-requirements/issues), chatting with other community members in [our Community Slack](https://www.serverless.com/slack), or [GitHub Discussions](https://github.com/serverless/serverless-python-requirements/discussions). + +--- + +# Code Style + +We aim for a clean, consistent code style. We're using [Prettier](https://prettier.io/) to confirm one code formatting style and [ESlint](https://eslint.org/) helps us to stay away from obvious issues that can be picked via static analysis. + +Ideally, you should have Prettier and ESlint integrated into your code editor, which will help you not think about specific rules and be sure you submit the code that follows guidelines. + +## Verifying prettier formatting + +``` +npm run prettier-check +``` + +## Verifying linting style + +``` +npm run lint +``` + +## Other guidelines + +- Minimize [lodash](https://lodash.com/) usage - resort to it, only if given part of logic cannot be expressed easily with native language constructs +- When writing asynchronous code, ensure to take advantage of [async functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function) and native `Promise` API. Do not rely on [Bluebird](http://bluebirdjs.com) even though still large parts of old code rely on it. We're looking forward to drop this dependency in the near future. + +# Testing + +When proposing a few feature or fixing a bug, it is recommended to also provide sufficient test coverage. All tests live in `./test.js` module. + +# Our Code of Conduct + +Finally, to make sure you have a pleasant experience while being in our welcoming community, please read our [code of conduct](CODE_OF_CONDUCT.md). It outlines our core values and beliefs and will make working together a happier experience. + +Thanks again for being a contributor to the Serverless Community :tada:! + +Cheers, + +The :zap: [Serverless](http://www.serverless.com) Team diff --git a/README.md b/README.md index 518d5ce1..d9127adb 100644 --- a/README.md +++ b/README.md @@ -5,15 +5,15 @@ [![npm](https://img.shields.io/npm/v/serverless-python-requirements.svg)](https://www.npmjs.com/package/serverless-python-requirements) [![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) -A Serverless v1.x plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. +A Serverless Framework plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. ---- +--- _Originally developed by [**Capital One**](https://www.capitalone.com/tech/open-source/), now maintained in scope of Serverless, Inc_ _Capital One considers itself the bank a technology company would build. It's delivering best-in-class innovation so that its millions of customers can manage their finances with ease. Capital One is all-in on the cloud and is a leader in the adoption of open source, RESTful APIs, microservices and containers. We build our own products and release them with a speed and agility that allows us to get new customer experiences to market quickly. Our engineers use artificial intelligence and machine learning to transform real-time data, software and algorithms into the future of finance, reimagined._ ---- +--- ## Install @@ -33,8 +33,7 @@ If you're on a mac, check out [these notes](#applebeersnake-mac-brew-installed-p ## Cross compiling Compiling non-pure-Python modules or fetching their manylinux wheels is -supported on non-linux OSs via the use of Docker and the -[docker-lambda](https://github.com/lambci/docker-lambda) image. +supported on non-linux OSs via the use of Docker and [official AWS build](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-image-repositories.html) images. To enable docker usage, add the following to your `serverless.yml`: ```yaml @@ -77,8 +76,20 @@ custom: ``` The `dockerSsh` option will mount your `$HOME/.ssh/id_rsa` and `$HOME/.ssh/known_hosts` as a -volume in the docker container. If your SSH key is password protected, you can use `ssh-agent` -because `$SSH_AUTH_SOCK` is also mounted & the env var set. +volume in the docker container. + +In case you want to use a different key, you can specify the path (absolute) to it through `dockerPrivateKey` option: + +```yaml +custom: + pythonRequirements: + dockerizePip: true + dockerSsh: true + dockerPrivateKey: /home/.ssh/id_ed25519 +``` + +If your SSH key is password protected, you can use `ssh-agent` +because `$SSH_AUTH_SOCK` is also mounted & the env var is set. It is important that the host of your private repositories has already been added in your `$HOME/.ssh/known_hosts` file, as the install process will fail otherwise due to host authenticity failure. @@ -97,8 +108,9 @@ custom: ## :sparkles::cake::sparkles: Pipenv support -If you include a `Pipfile` and have `pipenv` installed instead of a `requirements.txt` this will use -`pipenv lock -r` to generate them. It is fully compatible with all options such as `zip` and +Requires `pipenv` in version `2022-04-08` or higher. + +If you include a `Pipfile` and have `pipenv` installed, this will use `pipenv` to generate requirements instead of a `requirements.txt`. It is fully compatible with all options such as `zip` and `dockerizePip`. If you don't want this plugin to generate it for you, set the following option: ```yaml @@ -119,6 +131,25 @@ custom: usePoetry: false ``` +Be aware that if no `poetry.lock` file is present, a new one will be generated on the fly. To help having predictable builds, +you can set the `requirePoetryLockFile` flag to true to throw an error when `poetry.lock` is missing. + +```yaml +custom: + pythonRequirements: + requirePoetryLockFile: false +``` + +If your Poetry configuration includes custom dependency groups, they will not be installed automatically. To include them in the deployment package, use the `poetryWithGroups`, `poetryWithoutGroups` and `poetryOnlyGroups` options which wrap `poetry export`'s `--with`, `--without` and `--only` parameters. + +```yaml +custom: + pythonRequirements: + poetryWithGroups: + - internal_dependencies + - lambda_dependencies +``` + ### Poetry with git dependencies Poetry by default generates the exported requirements.txt file with `-e` and that breaks pip with `-t` parameter @@ -213,7 +244,7 @@ the names in `slimPatterns` #### Option not to strip binaries -In some cases, stripping binaries leads to problems like "ELF load command address/offset not properly aligned", even when done in the Docker environment. You can still slim down the package without `*.so` files with +In some cases, stripping binaries leads to problems like "ELF load command address/offset not properly aligned", even when done in the Docker environment. You can still slim down the package without `*.so` files with: ```yaml custom: @@ -348,6 +379,9 @@ custom: ### Per-function requirements +**Note: this feature does not work with Pipenv/Poetry, it requires `requirements.txt` +files for your Python modules.** + If you have different python functions, with different sets of requirements, you can avoid including all the unecessary dependencies of your functions by using the following structure: @@ -416,12 +450,27 @@ functions: vendor: ./hello-vendor # The option is also available at the function level ``` -## Manual invocations +## Manual invocation + +The `.requirements` and `requirements.zip` (if using zip support) files are left +behind to speed things up on subsequent deploys. To clean them up, run: + +```plaintext +sls requirements clean +``` + +You can also create them (and `unzip_requirements` if +using zip support) manually with: -The `.requirements` and `requirements.zip`(if using zip support) files are left -behind to speed things up on subsequent deploys. To clean them up, run -`sls requirements clean`. You can also create them (and `unzip_requirements` if -using zip support) manually with `sls requirements install`. +```plaintext +sls requirements install +``` + +The pip download/static cache is outside the serverless folder, and should be manually cleaned when i.e. changing python versions: + +```plaintext +sls requirements cleanCache +``` ## Invalidate requirements caches on package @@ -463,10 +512,10 @@ For usage of `dockerizePip` on Windows do Step 1 only if running serverless on w ## Native Code Dependencies During Build -Some Python packages require extra OS dependencies to build successfully. To deal with this, replace the default image (`lambci/lambda:python3.6`) with a `Dockerfile` like: +Some Python packages require extra OS dependencies to build successfully. To deal with this, replace the default image with a `Dockerfile` like: ```dockerfile -FROM lambci/lambda:build-python3.6 +FROM public.ecr.aws/sam/build-python3.9 # Install your dependencies RUN yum -y install mysql-devel @@ -531,9 +580,34 @@ package: - '**' ``` +## Custom Provider Support + +### Scaleway + +This plugin is compatible with the [Scaleway Serverless Framework Plugin](https://github.com/scaleway/serverless-scaleway-functions) to package dependencies for Python functions deployed on [Scaleway](https://www.scaleway.com/en/serverless-functions/). To use it, add the following to your `serverless.yml`: + +```yaml +provider: + name: scaleway + runtime: python311 + +plugins: + - serverless-python-requirements + - serverless-scaleway-functions +``` + +To handle native dependencies, it's recommended to use the Docker builder with the image provided by Scaleway: + +```yaml +custom: + pythonRequirements: + # Can use any Python version supported by Scaleway + dockerImage: rg.fr-par.scw.cloud/scwfunctionsruntimes-public/python-dep:3.11 +``` + ## Contributors -- [@dschep](https://github.com/dschep) - Lead developer & original maintainer +- [@dschep](https://github.com/dschep) - Original developer - [@azurelogic](https://github.com/azurelogic) - logging & documentation fixes - [@abetomo](https://github.com/abetomo) - style & linting - [@angstwad](https://github.com/angstwad) - `deploy --function` support @@ -557,12 +631,13 @@ package: - [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching - [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements & Porting tests from bats to js! - Poetry support - - [@squaresurf](https://github.com/squaresurf) - - [@drice](https://github.com/drice) - - [@ofercaspi](https://github.com/ofercaspi) - - [@tpansino](https://github.com/tpansino) + - [@squaresurf](https://github.com/squaresurf) + - [@drice](https://github.com/drice) + - [@ofercaspi](https://github.com/ofercaspi) + - [@tpansino](https://github.com/tpansino) - [@david-mk-lawrence](https://github.com/david-mk-lawrence) - added Lambda Layer support - [@bryantbriggs](https://github.com/bryantbiggs) - Fixing CI/CD - [@jacksgt](https://github.com/jacksgt) - Fixing pip issues - [@lephuongbg](https://github.com/lephuongbg) - Fixing single function deployment - [@rileypriddle](https://github.com/rileypriddle) - Introducing schema validation for `module` property +- [@martinezpl](https://github.com/martinezpl) - Fixing test issues, adding `dockerPrivateKey` option diff --git a/commitlint.config.js b/commitlint.config.js new file mode 100644 index 00000000..d23a0d6b --- /dev/null +++ b/commitlint.config.js @@ -0,0 +1,31 @@ +'use strict'; + +module.exports = { + rules: { + 'body-leading-blank': [2, 'always'], + 'footer-leading-blank': [2, 'always'], + 'header-max-length': [2, 'always', 72], + 'scope-enum': [2, 'always', ['', 'Config', 'Log']], + 'subject-case': [2, 'always', 'sentence-case'], + 'subject-empty': [2, 'never'], + 'subject-full-stop': [2, 'never', '.'], + 'type-case': [2, 'always', 'lower-case'], + 'type-empty': [2, 'never'], + 'type-enum': [ + 2, + 'always', + [ + 'build', + 'chore', + 'ci', + 'docs', + 'feat', + 'fix', + 'perf', + 'refactor', + 'style', + 'test', + ], + ], + }, +}; diff --git a/example/serverless.yml b/example/serverless.yml index 9b58ead1..e5c4c924 100644 --- a/example/serverless.yml +++ b/example/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/example_native_deps/serverless.yml b/example_native_deps/serverless.yml index 0f4e632a..cfbd4913 100644 --- a/example_native_deps/serverless.yml +++ b/example_native_deps/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/index.js b/index.js index 2072bbc1..44906956 100644 --- a/index.js +++ b/index.js @@ -7,15 +7,13 @@ const values = require('lodash.values'); const { addVendorHelper, removeVendorHelper, - packRequirements + packRequirements, } = require('./lib/zip'); const { injectAllRequirements } = require('./lib/inject'); const { layerRequirements } = require('./lib/layer'); const { installAllRequirements } = require('./lib/pip'); const { pipfileToRequirements } = require('./lib/pipenv'); -const { pyprojectTomlToRequirements } = require('./lib/poetry'); const { cleanup, cleanupCache } = require('./lib/clean'); - BbPromise.promisifyAll(fse); /** @@ -45,24 +43,40 @@ class ServerlessPythonRequirements { : this.serverless.service.provider.runtime || 'python', dockerizePip: false, dockerSsh: false, + dockerPrivateKey: null, dockerImage: null, dockerFile: null, dockerEnv: false, dockerBuildCmdExtraArgs: [], dockerRunCmdExtraArgs: [], dockerExtraFiles: [], + dockerRootless: false, useStaticCache: true, useDownloadCache: true, cacheLocation: false, staticCacheMaxVersions: 0, pipCmdExtraArgs: [], noDeploy: [], - vendor: '' + vendor: '', + requirePoetryLockFile: false, + poetryWithGroups: [], + poetryWithoutGroups: [], + poetryOnlyGroups: [], }, (this.serverless.service.custom && this.serverless.service.custom.pythonRequirements) || {} ); + if ( + options.pythonBin === this.serverless.service.provider.runtime && + !options.pythonBin.startsWith('python') + ) { + options.pythonBin = 'python'; + } + if (/python3[0-9]+/.test(options.pythonBin)) { + // "google" and "scaleway" providers' runtimes use python3XX + options.pythonBin = options.pythonBin.replace(/3([0-9]+)/, '3.$1'); + } if (options.dockerizePip === 'non-linux') { options.dockerizePip = process.platform !== 'linux'; } @@ -71,12 +85,21 @@ class ServerlessPythonRequirements { } if ( !options.dockerizePip && - (options.dockerSsh || options.dockerImage || options.dockerFile) + (options.dockerSsh || + options.dockerImage || + options.dockerFile || + options.dockerPrivateKey) ) { if (!this.warningLogged) { - this.serverless.cli.log( - 'WARNING: You provided a docker related option but dockerizePip is set to false.' - ); + if (this.log) { + this.log.warning( + 'You provided a docker related option but dockerizePip is set to false.' + ); + } else { + this.serverless.cli.log( + 'WARNING: You provided a docker related option but dockerizePip is set to false.' + ); + } this.warningLogged = true; } } @@ -84,11 +107,8 @@ class ServerlessPythonRequirements { throw new Error( 'Python Requirements: you can provide a dockerImage or a dockerFile option, not both.' ); - } else if (!options.dockerFile) { - // If no dockerFile is provided, use default image - const defaultImage = `lambci/lambda:build-${this.serverless.service.provider.runtime}`; - options.dockerImage = options.dockerImage || defaultImage; } + if (options.layer) { // If layer was set as a boolean, set it to an empty object to use the layer defaults. if (options.layer === true) { @@ -101,7 +121,7 @@ class ServerlessPythonRequirements { get targetFuncs() { let inputOpt = this.serverless.processedInput.options; return inputOpt.function - ? [inputOpt.functionObj] + ? [this.serverless.service.functions[inputOpt.function]] : values(this.serverless.service.functions).filter((func) => !func.image); } @@ -109,9 +129,10 @@ class ServerlessPythonRequirements { * The plugin constructor * @param {Object} serverless * @param {Object} options + * @param {Object} v3Utils * @return {undefined} */ - constructor(serverless) { + constructor(serverless, cliOptions, v3Utils) { this.serverless = serverless; this.servicePath = this.serverless.config.servicePath; this.warningLogged = false; @@ -127,36 +148,55 @@ class ServerlessPythonRequirements { }, }); } + + if (v3Utils) { + this.log = v3Utils.log; + this.progress = v3Utils.progress; + this.writeText = v3Utils.writeText; + } + this.commands = { requirements: { commands: { clean: { usage: 'Remove .requirements and requirements.zip', - lifecycleEvents: ['clean'] + lifecycleEvents: ['clean'], }, install: { usage: 'install requirements manually', - lifecycleEvents: ['install'] + lifecycleEvents: ['install'], }, cleanCache: { usage: 'Removes all items in the pip download/static cache (if present)', - lifecycleEvents: ['cleanCache'] - } - } - } + lifecycleEvents: ['cleanCache'], + }, + }, + }, }; - if (this.serverless.cli.generateCommandsHelp) { - Object.assign(this.commands.requirements, { - usage: 'Serverless plugin to bundle Python packages', - lifecycleEvents: ['requirements'] - }); - } else { - this.commands.requirements.type = 'container'; - } + if (this.serverless.cli.generateCommandsHelp) { + Object.assign(this.commands.requirements, { + usage: 'Serverless plugin to bundle Python packages', + lifecycleEvents: ['requirements'], + }); + } else { + this.commands.requirements.type = 'container'; + } + + this.dockerImageForFunction = (funcOptions) => { + const runtime = + funcOptions.runtime || this.serverless.service.provider.runtime; + + const architecture = + funcOptions.architecture || + this.serverless.service.provider.architecture || + 'x86_64'; + const defaultImage = `public.ecr.aws/sam/build-${runtime}:latest-${architecture}`; + return this.options.dockerImage || defaultImage; + }; - const isFunctionRuntimePython = args => { + const isFunctionRuntimePython = (args) => { // If functionObj.runtime is undefined, python. if (!args[1].functionObj || !args[1].functionObj.runtime) { return true; @@ -165,9 +205,7 @@ class ServerlessPythonRequirements { }; const clean = () => - BbPromise.bind(this) - .then(cleanup) - .then(removeVendorHelper); + BbPromise.bind(this).then(cleanup).then(removeVendorHelper); const setupArtifactPathCapturing = () => { // Reference: @@ -188,7 +226,6 @@ class ServerlessPythonRequirements { } return BbPromise.bind(this) .then(pipfileToRequirements) - .then(pyprojectTomlToRequirements) .then(addVendorHelper) .then(installAllRequirements) .then(packRequirements) @@ -231,7 +268,7 @@ class ServerlessPythonRequirements { }, 'requirements:install:install': before, 'requirements:clean:clean': clean, - 'requirements:cleanCache:cleanCache': cleanCache + 'requirements:cleanCache:cleanCache': cleanCache, }; } } diff --git a/lib/clean.js b/lib/clean.js index e0bff238..8aaf331e 100644 --- a/lib/clean.js +++ b/lib/clean.js @@ -38,8 +38,15 @@ function cleanup() { function cleanupCache() { const cacheLocation = getUserCachePath(this.options); if (fse.existsSync(cacheLocation)) { + let cleanupProgress; if (this.serverless) { - this.serverless.cli.log(`Removing static caches at: ${cacheLocation}`); + if (this.log) { + cleanupProgress = this.progress.get('python-cleanup-cache'); + cleanupProgress.notice('Removing static caches'); + this.log.info(`Removing static caches at: ${cacheLocation}`); + } else { + this.serverless.cli.log(`Removing static caches at: ${cacheLocation}`); + } } // Only remove cache folders that we added, just incase someone accidentally puts a weird @@ -50,10 +57,16 @@ function cleanupCache() { .forEach((file) => { promises.push(fse.removeAsync(file)); }); - return BbPromise.all(promises); + return BbPromise.all(promises).finally( + () => cleanupProgress && cleanupProgress.remove() + ); } else { if (this.serverless) { - this.serverless.cli.log(`No static cache found`); + if (this.log) { + this.log.info(`No static cache found`); + } else { + this.serverless.cli.log(`No static cache found`); + } } return BbPromise.resolve(); } diff --git a/lib/docker.js b/lib/docker.js index 328e3088..68cf935b 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -1,25 +1,30 @@ -const { spawnSync } = require('child_process'); +const spawn = require('child-process-ext/spawn'); const isWsl = require('is-wsl'); const fse = require('fs-extra'); const path = require('path'); +const os = require('os'); /** * Helper function to run a docker command * @param {string[]} options * @return {Object} */ -function dockerCommand(options) { +async function dockerCommand(options, pluginInstance) { const cmd = 'docker'; - const ps = spawnSync(cmd, options, { encoding: 'utf-8' }); - if (ps.error) { - if (ps.error.code === 'ENOENT') { - throw new Error('docker not found! Please install it.'); + try { + return await spawn(cmd, options, { encoding: 'utf-8' }); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + throw new pluginInstance.serverless.classes.Error( + 'docker not found! Please install it.', + 'PYTHON_REQUIREMENTS_DOCKER_NOT_FOUND' + ); } - throw new Error(ps.error); - } else if (ps.status !== 0) { - throw new Error(ps.stderr); + throw e; } - return ps; } /** @@ -28,19 +33,22 @@ function dockerCommand(options) { * @param {string[]} extraArgs * @return {string} The name of the built docker image. */ -function buildImage(dockerFile, extraArgs) { +async function buildImage(dockerFile, extraArgs, pluginInstance) { const imageName = 'sls-py-reqs-custom'; const options = ['build', '-f', dockerFile, '-t', imageName]; if (Array.isArray(extraArgs)) { options.push(...extraArgs); } else { - throw new Error('dockerRunCmdExtraArgs option must be an array'); + throw new pluginInstance.serverless.classes.Error( + 'dockerRunCmdExtraArgs option must be an array', + 'PYTHON_REQUIREMENTS_INVALID_DOCKER_EXTRA_ARGS' + ); } options.push('.'); - dockerCommand(options); + await dockerCommand(options, pluginInstance); return imageName; } @@ -49,7 +57,7 @@ function buildImage(dockerFile, extraArgs) { * @param {string} servicePath * @return {string} file name */ -function findTestFile(servicePath) { +function findTestFile(servicePath, pluginInstance) { if (fse.pathExistsSync(path.join(servicePath, 'serverless.yml'))) { return 'serverless.yml'; } @@ -62,8 +70,9 @@ function findTestFile(servicePath) { if (fse.pathExistsSync(path.join(servicePath, 'requirements.txt'))) { return 'requirements.txt'; } - throw new Error( - 'Unable to find serverless.{yml|yaml|json} or requirements.txt for getBindPath()' + throw new pluginInstance.serverless.classes.Error( + 'Unable to find serverless.{yml|yaml|json} or requirements.txt for getBindPath()', + 'PYTHON_REQUIREMENTS_MISSING_GET_BIND_PATH_FILE' ); } @@ -72,7 +81,8 @@ function findTestFile(servicePath) { * @param {string} bindPath * @return {boolean} */ -function tryBindPath(serverless, bindPath, testFile) { +async function tryBindPath(bindPath, testFile, pluginInstance) { + const { serverless, log } = pluginInstance; const debug = process.env.SLS_DEBUG; const options = [ 'run', @@ -84,12 +94,30 @@ function tryBindPath(serverless, bindPath, testFile) { `/test/${testFile}`, ]; try { - if (debug) serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); - const ps = dockerCommand(options); - if (debug) serverless.cli.log(ps.stdout.trim()); - return ps.stdout.trim() === `/test/${testFile}`; + if (debug) { + if (log) { + log.debug(`Trying bindPath ${bindPath} (${options})`); + } else { + serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); + } + } + const ps = await dockerCommand(options, pluginInstance); + if (debug) { + if (log) { + log.debug(ps.stdoutBuffer.toString().trim()); + } else { + serverless.cli.log(ps.stdoutBuffer.toString().trim()); + } + } + return ps.stdoutBuffer.toString().trim() === `/test/${testFile}`; } catch (err) { - if (debug) serverless.cli.log(`Finding bindPath failed with ${err}`); + if (debug) { + if (log) { + log.debug(`Finding bindPath failed with ${err}`); + } else { + serverless.cli.log(`Finding bindPath failed with ${err}`); + } + } return false; } } @@ -100,14 +128,15 @@ function tryBindPath(serverless, bindPath, testFile) { * @param {string} servicePath * @return {string} The bind path. */ -function getBindPath(serverless, servicePath) { +async function getBindPath(servicePath, pluginInstance) { // Determine bind path - if (process.platform !== 'win32' && !isWsl) { + let isWsl1 = isWsl && !os.release().includes('microsoft-standard'); + if (process.platform !== 'win32' && !isWsl1) { return servicePath; } // test docker is available - dockerCommand(['version']); + await dockerCommand(['version'], pluginInstance); // find good bind path for Windows let bindPaths = []; @@ -140,11 +169,11 @@ function getBindPath(serverless, servicePath) { bindPaths.push(`/mnt/${drive.toUpperCase()}/${path}`); bindPaths.push(`${drive.toUpperCase()}:/${path}`); - const testFile = findTestFile(servicePath); + const testFile = findTestFile(servicePath, pluginInstance); for (let i = 0; i < bindPaths.length; i++) { const bindPath = bindPaths[i]; - if (tryBindPath(serverless, bindPath, testFile)) { + if (await tryBindPath(bindPath, testFile, pluginInstance)) { return bindPath; } } @@ -157,7 +186,7 @@ function getBindPath(serverless, servicePath) { * @param {string} bindPath * @return {boolean} */ -function getDockerUid(bindPath) { +async function getDockerUid(bindPath, pluginInstance) { const options = [ 'run', '--rm', @@ -169,8 +198,8 @@ function getDockerUid(bindPath) { '%u', '/bin/sh', ]; - const ps = dockerCommand(options); - return ps.stdout.trim(); + const ps = await dockerCommand(options, pluginInstance); + return ps.stdoutBuffer.toString().trim(); } module.exports = { buildImage, getBindPath, getDockerUid }; diff --git a/lib/inject.js b/lib/inject.js index 3cad758d..f4acde9d 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -2,7 +2,7 @@ const BbPromise = require('bluebird'); const fse = require('fs-extra'); const glob = require('glob-all'); const get = require('lodash.get'); -const set = require('lodash.set'); +const set = require('set-value'); const path = require('path'); const JSZip = require('jszip'); const { writeZip, zipFile } = require('./zipTree'); @@ -13,10 +13,16 @@ BbPromise.promisifyAll(fse); * Inject requirements into packaged application. * @param {string} requirementsPath requirements folder path * @param {string} packagePath target package path + * @param {string} injectionRelativePath installation directory in target package * @param {Object} options our options object * @return {Promise} the JSZip object constructed. */ -function injectRequirements(requirementsPath, packagePath, options) { +function injectRequirements( + requirementsPath, + packagePath, + injectionRelativePath, + options +) { const noDeploy = new Set(options.noDeploy || []); return fse @@ -29,7 +35,13 @@ function injectRequirements(requirementsPath, packagePath, options) { dot: true, }) ) - .map((file) => [file, path.relative(requirementsPath, file)]) + .map((file) => [ + file, + path.join( + injectionRelativePath, + path.relative(requirementsPath, file) + ), + ]) .filter( ([file, relativeFile]) => !file.endsWith('/') && @@ -86,57 +98,82 @@ function moveModuleUp(source, target, module) { * Inject requirements into packaged application. * @return {Promise} the combined promise for requirements injection. */ -function injectAllRequirements(funcArtifact) { +async function injectAllRequirements(funcArtifact) { if (this.options.layer) { // The requirements will be placed in a Layer, so just resolve return BbPromise.resolve(); } - this.serverless.cli.log('Injecting required Python packages to package...'); + let injectProgress; + if (this.progress && this.log) { + injectProgress = this.progress.get('python-inject-requirements'); + injectProgress.update('Injecting required Python packages to package'); + this.log.info('Injecting required Python packages to package'); + } else { + this.serverless.cli.log('Injecting required Python packages to package...'); + } + + let injectionRelativePath = '.'; + if (this.serverless.service.provider.name == 'scaleway') { + injectionRelativePath = 'package'; + } - if (this.serverless.service.package.individually) { - return BbPromise.resolve(this.targetFuncs) - .filter((func) => - (func.runtime || this.serverless.service.provider.runtime).match( - /^python.*/ + try { + if (this.serverless.service.package.individually) { + await BbPromise.resolve(this.targetFuncs) + .filter((func) => + (func.runtime || this.serverless.service.provider.runtime).match( + /^python.*/ + ) ) - ) - .map((func) => { - if (!get(func, 'module')) { - set(func, ['module'], '.'); - } - return func; - }) - .map((func) => { - if (func.module !== '.') { - const artifact = func.package ? func.package.artifact : funcArtifact; - const newArtifact = path.join( - '.serverless', - `${func.module}-${func.name}.zip` - ); - func.package.artifact = newArtifact; - return moveModuleUp(artifact, newArtifact, func.module).then( - () => func - ); - } else { + .map((func) => { + if (!get(func, 'module')) { + set(func, ['module'], '.'); + } return func; - } - }) - .map((func) => { - return this.options.zip - ? func - : injectRequirements( - path.join('.serverless', func.module, 'requirements'), - func.package.artifact, - this.options + }) + .map((func) => { + if (func.module !== '.') { + const artifact = func.package + ? func.package.artifact + : funcArtifact; + const newArtifact = path.join( + '.serverless', + `${func.module}-${func.name}.zip` ); - }); - } else if (!this.options.zip) { - return injectRequirements( - path.join('.serverless', 'requirements'), - this.serverless.service.package.artifact || funcArtifact, - this.options - ); + func.package.artifact = newArtifact; + return moveModuleUp(artifact, newArtifact, func.module).then( + () => func + ); + } else { + return func; + } + }) + .map((func) => { + return this.options.zip + ? func + : injectRequirements( + path.join( + this.serverless.serviceDir, + '.serverless', + func.module, + 'requirements' + ), + func.package.artifact, + injectionRelativePath, + this.options + ); + }); + } else if (!this.options.zip) { + await injectRequirements( + path.join(this.serverless.serviceDir, '.serverless', 'requirements'), + this.serverless.service.package.artifact || funcArtifact, + injectionRelativePath, + this.options + ); + } + } finally { + injectProgress && injectProgress.remove(); } } diff --git a/lib/layer.js b/lib/layer.js index 12d338ec..6fe9ca4c 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -3,6 +3,7 @@ const fse = require('fs-extra'); const path = require('path'); const JSZip = require('jszip'); const { writeZip, addTree } = require('./zipTree'); +const { sha256Path, getRequirementsLayerPath } = require('./shared'); BbPromise.promisifyAll(fse); @@ -11,13 +12,49 @@ BbPromise.promisifyAll(fse); * @return {Promise} the JSZip object constructed. */ function zipRequirements() { - const rootZip = new JSZip(); const src = path.join('.serverless', 'requirements'); - const runtimepath = 'python'; - - return addTree(rootZip.folder(runtimepath), src).then(() => - writeZip(rootZip, path.join('.serverless', 'pythonRequirements.zip')) + const reqChecksum = sha256Path(path.join('.serverless', 'requirements.txt')); + const targetZipPath = path.join('.serverless', 'pythonRequirements.zip'); + const zipCachePath = getRequirementsLayerPath( + reqChecksum, + targetZipPath, + this.options, + this.serverless ); + + const promises = []; + if (fse.existsSync(zipCachePath)) { + let layerProgress; + if (this.progress && this.log) { + layerProgress = this.progress.get('python-layer-requirements'); + layerProgress.update( + 'Using cached Python Requirements Lambda Layer file' + ); + this.log.info('Found cached Python Requirements Lambda Layer file'); + } else { + this.serverless.cli.log( + 'Found cached Python Requirements Lambda Layer file' + ); + } + } else { + const rootZip = new JSZip(); + const runtimepath = 'python'; + + promises.push( + addTree(rootZip.folder(runtimepath), src).then(() => + writeZip(rootZip, zipCachePath) + ) + ); + } + return BbPromise.all(promises).then(() => { + if (zipCachePath !== targetZipPath) { + if (process.platform === 'win32') { + fse.copySync(zipCachePath, targetZipPath); + } else { + fse.symlink(zipCachePath, targetZipPath, 'file'); + } + } + }); } /** @@ -53,9 +90,19 @@ function layerRequirements() { return BbPromise.resolve(); } - this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); + let layerProgress; + if (this.progress && this.log) { + layerProgress = this.progress.get('python-layer-requirements'); + layerProgress.update('Packaging Python Requirements Lambda Layer'); + this.log.info('Packaging Python Requirements Lambda Layer'); + } else { + this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); + } - return BbPromise.bind(this).then(zipRequirements).then(createLayers); + return BbPromise.bind(this) + .then(zipRequirements) + .then(createLayers) + .finally(() => layerProgress && layerProgress.remove()); } module.exports = { diff --git a/lib/pip.js b/lib/pip.js index 244010c8..40140d36 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -2,12 +2,12 @@ const fse = require('fs-extra'); const rimraf = require('rimraf'); const path = require('path'); const get = require('lodash.get'); -const set = require('lodash.set'); -const { spawnSync } = require('child_process'); +const set = require('set-value'); +const spawn = require('child-process-ext/spawn'); const { quote } = require('shell-quote'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); const { getStripCommand, getStripMode, deleteFiles } = require('./slim'); -const { isPoetryProject } = require('./poetry'); +const { isPoetryProject, pyprojectTomlToRequirements } = require('./poetry'); const { checkForAndDeleteMaxCacheVersions, sha256Path, @@ -57,24 +57,19 @@ function mergeCommands(commands) { function generateRequirementsFile( requirementsPath, targetFile, - serverless, - servicePath, - options + pluginInstance ) { - if ( - options.usePoetry && - fse.existsSync(path.join(servicePath, 'pyproject.toml')) && - isPoetryProject(servicePath) - ) { - filterRequirementsFile( - path.join(servicePath, '.serverless/requirements.txt'), - targetFile, - options, - serverless - ); - serverless.cli.log( - `Parsed requirements.txt from pyproject.toml in ${targetFile}...` - ); + const { serverless, servicePath, options, log } = pluginInstance; + const modulePath = path.dirname(requirementsPath); + if (options.usePoetry && isPoetryProject(modulePath)) { + filterRequirementsFile(targetFile, targetFile, pluginInstance); + if (log) { + log.info(`Parsed requirements.txt from pyproject.toml in ${targetFile}`); + } else { + serverless.cli.log( + `Parsed requirements.txt from pyproject.toml in ${targetFile}...` + ); + } } else if ( options.usePipenv && fse.existsSync(path.join(servicePath, 'Pipfile')) @@ -82,258 +77,377 @@ function generateRequirementsFile( filterRequirementsFile( path.join(servicePath, '.serverless/requirements.txt'), targetFile, - options, - serverless - ); - serverless.cli.log( - `Parsed requirements.txt from Pipfile in ${targetFile}...` + pluginInstance ); + if (log) { + log.info(`Parsed requirements.txt from Pipfile in ${targetFile}`); + } else { + serverless.cli.log( + `Parsed requirements.txt from Pipfile in ${targetFile}...` + ); + } } else { - filterRequirementsFile(requirementsPath, targetFile, options, serverless); - serverless.cli.log( - `Generated requirements from ${requirementsPath} in ${targetFile}...` - ); + filterRequirementsFile(requirementsPath, targetFile, pluginInstance); + if (log) { + log.info( + `Generated requirements from ${requirementsPath} in ${targetFile}` + ); + } else { + serverless.cli.log( + `Generated requirements from ${requirementsPath} in ${targetFile}...` + ); + } } } -function pipAcceptsSystem(pythonBin) { +async function pipAcceptsSystem(pythonBin, pluginInstance) { // Check if pip has Debian's --system option and set it if so - const pipTestRes = spawnSync(pythonBin, ['-m', 'pip', 'help', 'install']); - if (pipTestRes.error) { - if (pipTestRes.error.code === 'ENOENT') { - throw new Error(`${pythonBin} not found! Try the pythonBin option.`); + try { + const pipTestRes = await spawn(pythonBin, ['-m', 'pip', 'help', 'install']); + return ( + pipTestRes.stdoutBuffer && + pipTestRes.stdoutBuffer.toString().indexOf('--system') >= 0 + ); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + throw new pluginInstance.serverless.classes.Error( + `${pythonBin} not found! Install it according to the poetry docs.`, + 'PYTHON_REQUIREMENTS_PYTHON_NOT_FOUND' + ); } - throw pipTestRes.error; + throw e; } - return pipTestRes.stdout.toString().indexOf('--system') >= 0; } /** * Install requirements described from requirements in the targetFolder into that same targetFolder * @param {string} targetFolder - * @param {Object} serverless - * @param {Object} options + * @param {Object} pluginInstance + * @param {Object} funcOptions * @return {undefined} */ -function installRequirements(targetFolder, serverless, options) { +async function installRequirements(targetFolder, pluginInstance, funcOptions) { + const { options, serverless, log, progress, dockerImageForFunction } = + pluginInstance; const targetRequirementsTxt = path.join(targetFolder, 'requirements.txt'); - serverless.cli.log( - `Installing requirements from ${targetRequirementsTxt} ...` - ); - - const dockerCmd = []; - const pipCmd = [options.pythonBin, '-m', 'pip', 'install']; - - if ( - Array.isArray(options.pipCmdExtraArgs) && - options.pipCmdExtraArgs.length > 0 - ) { - options.pipCmdExtraArgs.forEach((cmd) => { - const parts = cmd.split(/\s+/, 2); - pipCmd.push(...parts); - }); + let installProgress; + if (progress) { + log.info(`Installing requirements from "${targetRequirementsTxt}"`); + installProgress = progress.get('python-install'); + installProgress.update('Installing requirements'); + } else { + serverless.cli.log( + `Installing requirements from ${targetRequirementsTxt} ...` + ); } - const pipCmds = [pipCmd]; - const postCmds = []; - // Check if we're using the legacy --cache-dir command... - if (options.pipCmdExtraArgs.indexOf('--cache-dir') > -1) { - if (options.dockerizePip) { - throw ( - 'Error: You can not use --cache-dir with Docker any more, please\n' + - ' use the new option useDownloadCache instead. Please see:\n' + - ' https://github.com/UnitedIncome/serverless-python-requirements#caching' - ); - } else { - serverless.cli.log('=================================================='); - serverless.cli.log( - 'Warning: You are using a deprecated --cache-dir inside\n' + - ' your pipCmdExtraArgs which may not work properly, please use the\n' + - ' useDownloadCache option instead. Please see: \n' + - ' https://github.com/UnitedIncome/serverless-python-requirements#caching' - ); - serverless.cli.log('=================================================='); - } - } + try { + const dockerCmd = []; + const pipCmd = [options.pythonBin, '-m', 'pip', 'install']; - if (!options.dockerizePip) { - // Push our local OS-specific paths for requirements and target directory - pipCmd.push( - '-t', - dockerPathForWin(targetFolder), - '-r', - dockerPathForWin(targetRequirementsTxt) - ); - // If we want a download cache... - if (options.useDownloadCache) { - const downloadCacheDir = path.join( - getUserCachePath(options), - 'downloadCacheslspyc' - ); - serverless.cli.log(`Using download cache directory ${downloadCacheDir}`); - fse.ensureDirSync(downloadCacheDir); - pipCmd.push('--cache-dir', downloadCacheDir); + if ( + Array.isArray(options.pipCmdExtraArgs) && + options.pipCmdExtraArgs.length > 0 + ) { + options.pipCmdExtraArgs.forEach((cmd) => { + const parts = cmd.split(/\s+/, 2); + pipCmd.push(...parts); + }); } - if (pipAcceptsSystem(options.pythonBin)) { - pipCmd.push('--system'); - } - } - // If we are dockerizing pip - if (options.dockerizePip) { - // Push docker-specific paths for requirements and target directory - pipCmd.push('-t', '/var/task/', '-r', '/var/task/requirements.txt'); - - // Build docker image if required - let dockerImage; - if (options.dockerFile) { - serverless.cli.log( - `Building custom docker image from ${options.dockerFile}...` - ); - dockerImage = buildImage( - options.dockerFile, - options.dockerBuildCmdExtraArgs - ); - } else { - dockerImage = options.dockerImage; + const pipCmds = [pipCmd]; + const postCmds = []; + // Check if we're using the legacy --cache-dir command... + if (options.pipCmdExtraArgs.indexOf('--cache-dir') > -1) { + if (options.dockerizePip) { + throw new pluginInstance.serverless.classes.Error( + 'You cannot use --cache-dir with Docker any more, please use the new option useDownloadCache instead. Please see: https://github.com/UnitedIncome/serverless-python-requirements#caching for more details.', + 'PYTHON_REQUIREMENTS_CACHE_DIR_DOCKER_INVALID' + ); + } else { + if (log) { + log.warning( + 'You are using a deprecated --cache-dir inside\n' + + ' your pipCmdExtraArgs which may not work properly, please use the\n' + + ' useDownloadCache option instead. Please see: \n' + + ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + ); + } else { + serverless.cli.log( + '==================================================' + ); + serverless.cli.log( + 'Warning: You are using a deprecated --cache-dir inside\n' + + ' your pipCmdExtraArgs which may not work properly, please use the\n' + + ' useDownloadCache option instead. Please see: \n' + + ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + ); + serverless.cli.log( + '==================================================' + ); + } + } } - serverless.cli.log(`Docker Image: ${dockerImage}`); - - // Prepare bind path depending on os platform - const bindPath = dockerPathForWin(getBindPath(serverless, targetFolder)); - - dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); - if (options.dockerSsh) { - // Mount necessary ssh files to work with private repos - dockerCmd.push( - '-v', - `${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z`, - '-v', - `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, - '-v', - `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`, - '-e', - 'SSH_AUTH_SOCK=/tmp/ssh_sock' + + if (!options.dockerizePip) { + // Push our local OS-specific paths for requirements and target directory + pipCmd.push( + '-t', + dockerPathForWin(targetFolder), + '-r', + dockerPathForWin(targetRequirementsTxt) ); + // If we want a download cache... + if (options.useDownloadCache) { + const downloadCacheDir = path.join( + getUserCachePath(options), + 'downloadCacheslspyc' + ); + if (log) { + log.info(`Using download cache directory ${downloadCacheDir}`); + } else { + serverless.cli.log( + `Using download cache directory ${downloadCacheDir}` + ); + } + fse.ensureDirSync(downloadCacheDir); + pipCmd.push('--cache-dir', downloadCacheDir); + } + + if (await pipAcceptsSystem(options.pythonBin, pluginInstance)) { + pipCmd.push('--system'); + } } - // If we want a download cache... - const dockerDownloadCacheDir = '/var/useDownloadCache'; - if (options.useDownloadCache) { - const downloadCacheDir = path.join( - getUserCachePath(options), - 'downloadCacheslspyc' - ); - serverless.cli.log(`Using download cache directory ${downloadCacheDir}`); - fse.ensureDirSync(downloadCacheDir); - // This little hack is necessary because getBindPath requires something inside of it to test... - // Ugh, this is so ugly, but someone has to fix getBindPath in some other way (eg: make it use - // its own temp file) - fse.closeSync( - fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w') + // If we are dockerizing pip + if (options.dockerizePip) { + // Push docker-specific paths for requirements and target directory + pipCmd.push('-t', '/var/task/', '-r', '/var/task/requirements.txt'); + + // Build docker image if required + let dockerImage; + if (options.dockerFile) { + let buildDockerImageProgress; + if (progress) { + buildDockerImageProgress = progress.get( + 'python-install-build-docker' + ); + buildDockerImageProgress.update( + `Building custom docker image from ${options.dockerFile}` + ); + } else { + serverless.cli.log( + `Building custom docker image from ${options.dockerFile}...` + ); + } + try { + dockerImage = await buildImage( + options.dockerFile, + options.dockerBuildCmdExtraArgs, + pluginInstance + ); + } finally { + buildDockerImageProgress && buildDockerImageProgress.remove(); + } + } else { + dockerImage = dockerImageForFunction(funcOptions); + } + if (log) { + log.info(`Docker Image: ${dockerImage}`); + } else { + serverless.cli.log(`Docker Image: ${dockerImage}`); + } + + // Prepare bind path depending on os platform + const bindPath = dockerPathForWin( + await getBindPath(targetFolder, pluginInstance) ); - const windowsized = getBindPath(serverless, downloadCacheDir); - // And now push it to a volume mount and to pip... - dockerCmd.push('-v', `${windowsized}:${dockerDownloadCacheDir}:z`); - pipCmd.push('--cache-dir', dockerDownloadCacheDir); - } - if (options.dockerEnv) { - // Add environment variables to docker run cmd - options.dockerEnv.forEach(function (item) { - dockerCmd.push('-e', item); - }); - } + dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); + if (options.dockerSsh) { + const homePath = require('os').homedir(); + const sshKeyPath = + options.dockerPrivateKey || `${homePath}/.ssh/id_rsa`; + + // Mount necessary ssh files to work with private repos + dockerCmd.push( + '-v', + `${sshKeyPath}:/root/.ssh/${sshKeyPath.split('/').splice(-1)[0]}:z`, + '-v', + `${homePath}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, + '-v', + `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`, + '-e', + 'SSH_AUTH_SOCK=/tmp/ssh_sock' + ); + } - if (process.platform === 'linux') { - // Use same user so requirements folder is not root and so --cache-dir works + // If we want a download cache... + const dockerDownloadCacheDir = '/var/useDownloadCache'; if (options.useDownloadCache) { - // Set the ownership of the download cache dir to root - pipCmds.unshift(['chown', '-R', '0:0', dockerDownloadCacheDir]); + const downloadCacheDir = path.join( + getUserCachePath(options), + 'downloadCacheslspyc' + ); + if (log) { + log.info(`Using download cache directory ${downloadCacheDir}`); + } else { + serverless.cli.log( + `Using download cache directory ${downloadCacheDir}` + ); + } + fse.ensureDirSync(downloadCacheDir); + // This little hack is necessary because getBindPath requires something inside of it to test... + // Ugh, this is so ugly, but someone has to fix getBindPath in some other way (eg: make it use + // its own temp file) + fse.closeSync( + fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w') + ); + const windowsized = await getBindPath(downloadCacheDir, pluginInstance); + // And now push it to a volume mount and to pip... + dockerCmd.push('-v', `${windowsized}:${dockerDownloadCacheDir}:z`); + pipCmd.push('--cache-dir', dockerDownloadCacheDir); } - // Install requirements with pip - // Set the ownership of the current folder to user - pipCmds.push([ - 'chown', - '-R', - `${process.getuid()}:${process.getgid()}`, - '/var/task', - ]); - } else { - // Use same user so --cache-dir works - dockerCmd.push('-u', getDockerUid(bindPath)); + + if (options.dockerEnv) { + // Add environment variables to docker run cmd + options.dockerEnv.forEach(function (item) { + dockerCmd.push('-e', item); + }); + } + + if (process.platform === 'linux') { + // Use same user so requirements folder is not root and so --cache-dir works + if (options.useDownloadCache) { + // Set the ownership of the download cache dir to root + pipCmds.unshift(['chown', '-R', '0:0', dockerDownloadCacheDir]); + } + // Install requirements with pip + // Set the ownership of the current folder to user + // If you use docker-rootless, you don't need to set the ownership + if (options.dockerRootless !== true) { + pipCmds.push([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + '/var/task', + ]); + } else { + pipCmds.push(['chown', '-R', '0:0', '/var/task']); + } + } else { + // Use same user so --cache-dir works + dockerCmd.push('-u', await getDockerUid(bindPath, pluginInstance)); + } + + for (let path of options.dockerExtraFiles) { + pipCmds.push(['cp', path, '/var/task/']); + } + + if (process.platform === 'linux') { + if (options.useDownloadCache) { + // Set the ownership of the download cache dir back to user + if (options.dockerRootless !== true) { + pipCmds.push([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + dockerDownloadCacheDir, + ]); + } else { + pipCmds.push(['chown', '-R', '0:0', dockerDownloadCacheDir]); + } + } + } + + if (Array.isArray(options.dockerRunCmdExtraArgs)) { + dockerCmd.push(...options.dockerRunCmdExtraArgs); + } else { + throw new pluginInstance.serverless.classes.Error( + 'dockerRunCmdExtraArgs option must be an array', + 'PYTHON_REQUIREMENTS_INVALID_DOCKER_EXTRA_ARGS' + ); + } + + dockerCmd.push(dockerImage); } - for (let path of options.dockerExtraFiles) { - pipCmds.push(['cp', path, '/var/task/']); + // If enabled slimming, strip so files + switch (getStripMode(options)) { + case 'docker': + pipCmds.push(getStripCommand(options, '/var/task')); + break; + case 'direct': + postCmds.push(getStripCommand(options, dockerPathForWin(targetFolder))); + break; } - if (process.platform === 'linux') { - if (options.useDownloadCache) { - // Set the ownership of the download cache dir back to user - pipCmds.push([ - 'chown', - '-R', - `${process.getuid()}:${process.getgid()}`, - dockerDownloadCacheDir, - ]); - } + let spawnArgs = { shell: true }; + if (process.env.SLS_DEBUG) { + spawnArgs.stdio = 'inherit'; + } + let mainCmds = []; + if (dockerCmd.length) { + dockerCmd.push(...mergeCommands(pipCmds)); + mainCmds = [dockerCmd]; + } else { + mainCmds = pipCmds; } + mainCmds.push(...postCmds); - if (Array.isArray(options.dockerRunCmdExtraArgs)) { - dockerCmd.push(...options.dockerRunCmdExtraArgs); + if (log) { + log.info(`Running ${quote(dockerCmd)}...`); } else { - throw new Error('dockerRunCmdExtraArgs option must be an array'); + serverless.cli.log(`Running ${quote(dockerCmd)}...`); } - dockerCmd.push(dockerImage); - } + for (const [cmd, ...args] of mainCmds) { + try { + await spawn(cmd, args); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + const advice = + cmd.indexOf('python') > -1 + ? 'Try the pythonBin option' + : 'Please install it'; + throw new pluginInstance.serverless.classes.Error( + `${cmd} not found! ${advice}`, + 'PYTHON_REQUIREMENTS_COMMAND_NOT_FOUND' + ); + } - // If enabled slimming, strip so files - switch (getStripMode(options)) { - case 'docker': - pipCmds.push(getStripCommand(options, '/var/task')); - break; - case 'direct': - postCmds.push(getStripCommand(options, dockerPathForWin(targetFolder))); - break; - } + if (cmd === 'docker' && e.stderrBuffer) { + throw new pluginInstance.serverless.classes.Error( + `Running "${cmd} ${args.join(' ')}" failed with: "${e.stderrBuffer + .toString() + .trim()}"`, + 'PYTHON_REQUIREMENTS_DOCKER_COMMAND_FAILED' + ); + } - let spawnArgs = { shell: true }; - if (process.env.SLS_DEBUG) { - spawnArgs.stdio = 'inherit'; - } - let mainCmds = []; - if (dockerCmd.length) { - dockerCmd.push(...mergeCommands(pipCmds)); - mainCmds = [dockerCmd]; - } else { - mainCmds = pipCmds; - } - mainCmds.push(...postCmds); - - serverless.cli.log(`Running ${quote(dockerCmd)}...`); - - filterCommands(mainCmds).forEach(([cmd, ...args]) => { - const res = spawnSync(cmd, args); - if (res.error) { - if (res.error.code === 'ENOENT') { - const advice = - cmd.indexOf('python') > -1 - ? 'Try the pythonBin option' - : 'Please install it'; - throw new Error(`${cmd} not found! ${advice}`); + if (log) { + log.error(`Stdout: ${e.stdoutBuffer}`); + log.error(`Stderr: ${e.stderrBuffer}`); + } else { + serverless.cli.log(`Stdout: ${e.stdoutBuffer}`); + serverless.cli.log(`Stderr: ${e.stderrBuffer}`); + } + throw e; } - throw res.error; } - if (res.status !== 0) { - throw new Error(`STDOUT: ${res.stdout}\n\nSTDERR: ${res.stderr}`); + // If enabled slimming, delete files in slimPatterns + if (options.slim === true || options.slim === 'true') { + deleteFiles(options, targetFolder); } - }); - // If enabled slimming, delete files in slimPatterns - if (options.slim === true || options.slim === 'true') { - deleteFiles(options, targetFolder); + } finally { + installProgress && installProgress.remove(); } } @@ -382,7 +496,7 @@ function getRequirements(source) { * @param {string} target requirements where results are written * @param {Object} options */ -function filterRequirementsFile(source, target, options, serverless) { +function filterRequirementsFile(source, target, { options, serverless, log }) { const noDeploy = new Set(options.noDeploy || []); const requirements = getRequirements(source); var prepend = []; @@ -404,9 +518,13 @@ function filterRequirementsFile(source, target, options, serverless) { // not required inside final archive and avoids pip bugs // see https://github.com/UnitedIncome/serverless-python-requirements/issues/240 req = req.split('-e')[1].trim(); - serverless.cli.log( - `Warning: Stripping -e flag from requirement ${req}` - ); + if (log) { + log.warning(`Stripping -e flag from requirement ${req}`); + } else { + serverless.cli.log( + `Warning: Stripping -e flag from requirement ${req}` + ); + } } // Keep options for later @@ -434,13 +552,19 @@ function filterRequirementsFile(source, target, options, serverless) { * @param {Object} serverless * @return {undefined} */ -function copyVendors(vendorFolder, targetFolder, serverless) { +function copyVendors(vendorFolder, targetFolder, { serverless, log }) { // Create target folder if it does not exist fse.ensureDirSync(targetFolder); - serverless.cli.log( - `Copying vendor libraries from ${vendorFolder} to ${targetFolder}...` - ); + if (log) { + log.info( + `Copying vendor libraries from ${vendorFolder} to ${targetFolder}` + ); + } else { + serverless.cli.log( + `Copying vendor libraries from ${vendorFolder} to ${targetFolder}...` + ); + } fse.readdirSync(vendorFolder).map((file) => { let source = path.join(vendorFolder, file); @@ -459,11 +583,7 @@ function copyVendors(vendorFolder, targetFolder, serverless) { * @param {string} fileName */ function requirementsFileExists(servicePath, options, fileName) { - if ( - options.usePoetry && - fse.existsSync(path.join(servicePath, 'pyproject.toml')) && - isPoetryProject(servicePath) - ) { + if (options.usePoetry && isPoetryProject(path.dirname(fileName))) { return true; } @@ -489,16 +609,17 @@ function requirementsFileExists(servicePath, options, fileName) { * @param {Object} serverless * @return {string} */ -function installRequirementsIfNeeded( - servicePath, +async function installRequirementsIfNeeded( modulePath, - options, funcOptions, - serverless + pluginInstance ) { + const { servicePath, options, serverless } = pluginInstance; // Our source requirements, under our service path, and our module path (if specified) const fileName = path.join(servicePath, modulePath, options.fileName); + await pyprojectTomlToRequirements(modulePath, pluginInstance); + // Skip requirements generation, if requirements file doesn't exist if (!requirementsFileExists(servicePath, options, fileName)) { return false; @@ -518,19 +639,19 @@ function installRequirementsIfNeeded( fse.ensureDirSync(requirementsTxtDirectory); const slsReqsTxt = path.join(requirementsTxtDirectory, 'requirements.txt'); - generateRequirementsFile( - fileName, - slsReqsTxt, - serverless, - servicePath, - options - ); + generateRequirementsFile(fileName, slsReqsTxt, pluginInstance); // If no requirements file or an empty requirements file, then do nothing if (!fse.existsSync(slsReqsTxt) || fse.statSync(slsReqsTxt).size == 0) { - serverless.cli.log( - `Skipping empty output requirements.txt file from ${slsReqsTxt}` - ); + if (pluginInstance.log) { + pluginInstance.log.info( + `Skipping empty output requirements.txt file from ${slsReqsTxt}` + ); + } else { + serverless.cli.log( + `Skipping empty output requirements.txt file from ${slsReqsTxt}` + ); + } return false; } @@ -541,7 +662,8 @@ function installRequirementsIfNeeded( const workingReqsFolder = getRequirementsWorkingPath( reqChecksum, requirementsTxtDirectory, - options + options, + serverless ); // Check if our static cache is present and is valid @@ -550,9 +672,15 @@ function installRequirementsIfNeeded( fse.existsSync(path.join(workingReqsFolder, '.completed_requirements')) && workingReqsFolder.endsWith('_slspyc') ) { - serverless.cli.log( - `Using static cache of requirements found at ${workingReqsFolder} ...` - ); + if (pluginInstance.log) { + pluginInstance.log.info( + `Using static cache of requirements found at ${workingReqsFolder}` + ); + } else { + serverless.cli.log( + `Using static cache of requirements found at ${workingReqsFolder} ...` + ); + } // We'll "touch" the folder, as to bring it to the start of the FIFO cache fse.utimesSync(workingReqsFolder, new Date(), new Date()); return workingReqsFolder; @@ -573,14 +701,14 @@ function installRequirementsIfNeeded( fse.copySync(slsReqsTxt, path.join(workingReqsFolder, 'requirements.txt')); // Then install our requirements from this folder - installRequirements(workingReqsFolder, serverless, options); + await installRequirements(workingReqsFolder, pluginInstance, funcOptions); // Copy vendor libraries to requirements folder if (options.vendor) { - copyVendors(options.vendor, workingReqsFolder, serverless); + copyVendors(options.vendor, workingReqsFolder, pluginInstance); } if (funcOptions.vendor) { - copyVendors(funcOptions.vendor, workingReqsFolder, serverless); + copyVendors(funcOptions.vendor, workingReqsFolder, pluginInstance); } // Then touch our ".completed_requirements" file so we know we can use this for static cache @@ -596,69 +724,62 @@ function installRequirementsIfNeeded( * pip install the requirements to the requirements directory * @return {undefined} */ -function installAllRequirements() { +async function installAllRequirements() { // fse.ensureDirSync(path.join(this.servicePath, '.serverless')); // First, check and delete cache versions, if enabled - checkForAndDeleteMaxCacheVersions(this.options, this.serverless); + checkForAndDeleteMaxCacheVersions(this); // Then if we're going to package functions individually... if (this.serverless.service.package.individually) { let doneModules = []; - this.targetFuncs - .filter((func) => - (func.runtime || this.serverless.service.provider.runtime).match( - /^python.*/ - ) + const filteredFuncs = this.targetFuncs.filter((func) => + (func.runtime || this.serverless.service.provider.runtime).match( + /^python.*/ ) - .map((f) => { - if (!get(f, 'module')) { - set(f, ['module'], '.'); - } - // If we didn't already process a module (functions can re-use modules) - if (!doneModules.includes(f.module)) { - const reqsInstalledAt = installRequirementsIfNeeded( - this.servicePath, - f.module, - this.options, - f, - this.serverless - ); - // Add modulePath into .serverless for each module so it's easier for injecting and for users to see where reqs are - let modulePath = path.join( - this.servicePath, - '.serverless', - `${f.module}`, - 'requirements' - ); - // Only do if we didn't already do it - if ( - reqsInstalledAt && - !fse.existsSync(modulePath) && - reqsInstalledAt != modulePath - ) { - if (this.options.useStaticCache) { - // Windows can't symlink so we have to copy on Windows, - // it's not as fast, but at least it works - if (process.platform == 'win32') { - fse.copySync(reqsInstalledAt, modulePath); - } else { - fse.symlink(reqsInstalledAt, modulePath); - } + ); + + for (const f of filteredFuncs) { + if (!get(f, 'module')) { + set(f, ['module'], '.'); + } + + // If we didn't already process a module (functions can re-use modules) + if (!doneModules.includes(f.module)) { + const reqsInstalledAt = await installRequirementsIfNeeded( + f.module, + f, + this + ); + // Add modulePath into .serverless for each module so it's easier for injecting and for users to see where reqs are + let modulePath = path.join( + this.servicePath, + '.serverless', + `${f.module}`, + 'requirements' + ); + // Only do if we didn't already do it + if ( + reqsInstalledAt && + !fse.existsSync(modulePath) && + reqsInstalledAt != modulePath + ) { + if (this.options.useStaticCache) { + // Windows can't symlink so we have to copy on Windows, + // it's not as fast, but at least it works + if (process.platform == 'win32') { + fse.copySync(reqsInstalledAt, modulePath); } else { - fse.rename(reqsInstalledAt, modulePath); + fse.symlink(reqsInstalledAt, modulePath); } + } else { + fse.rename(reqsInstalledAt, modulePath); } - doneModules.push(f.module); } - }); + doneModules.push(f.module); + } + } } else { - const reqsInstalledAt = installRequirementsIfNeeded( - this.servicePath, - '', - this.options, - {}, - this.serverless - ); + const reqsInstalledAt = await installRequirementsIfNeeded('', {}, this); // Add symlinks into .serverless for so it's easier for injecting and for users to see where reqs are let symlinkPath = path.join( this.servicePath, diff --git a/lib/pipenv.js b/lib/pipenv.js index 063fb5d8..1099b651 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -1,12 +1,49 @@ const fse = require('fs-extra'); const path = require('path'); -const { spawnSync } = require('child_process'); +const spawn = require('child-process-ext/spawn'); const { EOL } = require('os'); +const semver = require('semver'); + +const LEGACY_PIPENV_VERSION = '2022.8.5'; + +async function getPipenvVersion() { + try { + const res = await spawn('pipenv', ['--version'], { + cwd: this.servicePath, + }); + + const stdoutBuffer = + (res.stdoutBuffer && res.stdoutBuffer.toString().trim()) || ''; + + const version = stdoutBuffer.split(' ')[2]; + + if (semver.valid(version)) { + return version; + } else { + throw new this.serverless.classes.Error( + `Unable to parse pipenv version!`, + 'PYTHON_REQUIREMENTS_PIPENV_VERSION_ERROR' + ); + } + } catch (e) { + const stderrBufferContent = + (e.stderrBuffer && e.stderrBuffer.toString()) || ''; + + if (stderrBufferContent.includes('command not found')) { + throw new this.serverless.classes.Error( + `pipenv not found! Install it according to the pipenv docs.`, + 'PYTHON_REQUIREMENTS_PIPENV_NOT_FOUND' + ); + } else { + throw e; + } + } +} /** * pipenv install */ -function pipfileToRequirements() { +async function pipfileToRequirements() { if ( !this.options.usePipenv || !fse.existsSync(path.join(this.servicePath, 'Pipfile')) @@ -14,31 +51,81 @@ function pipfileToRequirements() { return; } - this.serverless.cli.log('Generating requirements.txt from Pipfile...'); + let generateRequirementsProgress; + if (this.progress && this.log) { + generateRequirementsProgress = this.progress.get( + 'python-generate-requirements-pipfile' + ); + generateRequirementsProgress.update( + 'Generating requirements.txt from Pipfile' + ); + this.log.info('Generating requirements.txt from Pipfile'); + } else { + this.serverless.cli.log('Generating requirements.txt from Pipfile...'); + } - const res = spawnSync( - 'pipenv', - ['lock', '--requirements', '--keep-outdated'], - { - cwd: this.servicePath, + try { + // Get and validate pipenv version + if (this.log) { + this.log.info('Getting pipenv version'); + } else { + this.serverless.cli.log('Getting pipenv version'); } - ); - if (res.error) { - if (res.error.code === 'ENOENT') { - throw new Error( - `pipenv not found! Install it with 'pip install pipenv'.` + + const pipenvVersion = await getPipenvVersion(); + let res; + + if (semver.gt(pipenvVersion, LEGACY_PIPENV_VERSION)) { + // Using new pipenv syntax ( >= 2022.8.13) + // Generate requirements from existing lock file. + // See: https://pipenv.pypa.io/en/latest/advanced/#generating-a-requirements-txt + try { + res = await spawn('pipenv', ['requirements'], { + cwd: this.servicePath, + }); + } catch (e) { + const stderrBufferContent = + (e.stderrBuffer && e.stderrBuffer.toString()) || ''; + if (stderrBufferContent.includes('FileNotFoundError')) { + // No previous Pipfile.lock, we will try to generate it here + if (this.log) { + this.log.warning( + 'No Pipfile.lock found! Review https://pipenv.pypa.io/en/latest/pipfile/ for recommendations.' + ); + } else { + this.serverless.cli.log( + 'WARNING: No Pipfile.lock found! Review https://pipenv.pypa.io/en/latest/pipfile/ for recommendations.' + ); + } + await spawn('pipenv', ['lock'], { + cwd: this.servicePath, + }); + res = await spawn('pipenv', ['requirements'], { + cwd: this.servicePath, + }); + } else { + throw e; + } + } + } else { + // Falling back to legacy pipenv syntax + res = await spawn( + 'pipenv', + ['lock', '--requirements', '--keep-outdated'], + { + cwd: this.servicePath, + } ); } - throw new Error(res.error); - } - if (res.status !== 0) { - throw new Error(res.stderr); + + fse.ensureDirSync(path.join(this.servicePath, '.serverless')); + fse.writeFileSync( + path.join(this.servicePath, '.serverless/requirements.txt'), + removeEditableFlagFromRequirementsString(res.stdoutBuffer) + ); + } finally { + generateRequirementsProgress && generateRequirementsProgress.remove(); } - fse.ensureDirSync(path.join(this.servicePath, '.serverless')); - fse.writeFileSync( - path.join(this.servicePath, '.serverless/requirements.txt'), - removeEditableFlagFromRequirementsString(res.stdout) - ); } /** diff --git a/lib/poetry.js b/lib/poetry.js index 553a1392..17e3268f 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -1,68 +1,118 @@ const fs = require('fs'); const fse = require('fs-extra'); const path = require('path'); -const { spawnSync } = require('child_process'); + +const spawn = require('child-process-ext/spawn'); const tomlParse = require('@iarna/toml/parse-string'); /** * poetry install */ -function pyprojectTomlToRequirements() { - if (!this.options.usePoetry || !isPoetryProject(this.servicePath)) { +async function pyprojectTomlToRequirements(modulePath, pluginInstance) { + const { serverless, servicePath, options, log, progress } = pluginInstance; + + const moduleProjectPath = path.join(servicePath, modulePath); + if (!options.usePoetry || !isPoetryProject(moduleProjectPath)) { return; } - this.serverless.cli.log('Generating requirements.txt from pyproject.toml...'); - - const res = spawnSync( - 'poetry', - [ - 'export', - '--without-hashes', - '-f', - 'requirements.txt', - '-o', - 'requirements.txt', - '--with-credentials', - ], - { - cwd: this.servicePath, + let generateRequirementsProgress; + if (progress && log) { + generateRequirementsProgress = progress.get( + 'python-generate-requirements-toml' + ); + } + + const emitMsg = (msg) => { + if (generateRequirementsProgress) { + generateRequirementsProgress.update(msg); + log.info(msg); + } else { + serverless.cli.log(msg); } - ); - if (res.error) { - if (res.error.code === 'ENOENT') { - throw new Error( - `poetry not found! Install it according to the poetry docs.` + }; + + if (fs.existsSync('poetry.lock')) { + emitMsg('Generating requirements.txt from poetry.lock'); + } else { + if (options.requirePoetryLockFile) { + throw new serverless.classes.Error( + 'poetry.lock file not found - set requirePoetryLockFile to false to ' + + 'disable this error', + 'MISSING_REQUIRED_POETRY_LOCK' ); } - throw new Error(res.error); - } - if (res.status !== 0) { - throw new Error(res.stderr); + emitMsg('Generating poetry.lock and requirements.txt from pyproject.toml'); } - const editableFlag = new RegExp(/^-e /gm); - const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); - const requirementsContents = fse.readFileSync(sourceRequirements, { - encoding: 'utf-8', - }); + try { + try { + await spawn( + 'poetry', + [ + 'export', + '--without-hashes', + '-f', + 'requirements.txt', + '-o', + 'requirements.txt', + '--with-credentials', + ...(options.poetryWithGroups.length + ? [`--with=${options.poetryWithGroups.join(',')}`] + : []), + ...(options.poetryWithoutGroups.length + ? [`--without=${options.poetryWithoutGroups.join(',')}`] + : []), + ...(options.poetryOnlyGroups.length + ? [`--only=${options.poetryOnlyGroups.join(',')}`] + : []), + ], + { + cwd: moduleProjectPath, + } + ); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + throw new serverless.classes.Error( + `poetry not found! Install it according to the poetry docs.`, + 'PYTHON_REQUIREMENTS_POETRY_NOT_FOUND' + ); + } + throw e; + } - if (requirementsContents.match(editableFlag)) { - this.serverless.cli.log( - 'The generated file contains -e flags, removing them...' - ); - fse.writeFileSync( + const editableFlag = new RegExp(/^-e /gm); + const sourceRequirements = path.join(moduleProjectPath, 'requirements.txt'); + const requirementsContents = fse.readFileSync(sourceRequirements, { + encoding: 'utf-8', + }); + + if (requirementsContents.match(editableFlag)) { + if (log) { + log.info('The generated file contains -e flags, removing them'); + } else { + serverless.cli.log( + 'The generated file contains -e flags, removing them...' + ); + } + fse.writeFileSync( + sourceRequirements, + requirementsContents.replace(editableFlag, '') + ); + } + + fse.ensureDirSync(path.join(servicePath, '.serverless')); + fse.moveSync( sourceRequirements, - requirementsContents.replace(editableFlag, '') + path.join(servicePath, '.serverless', modulePath, 'requirements.txt'), + { overwrite: true } ); + } finally { + generateRequirementsProgress && generateRequirementsProgress.remove(); } - - fse.ensureDirSync(path.join(this.servicePath, '.serverless')); - fse.moveSync( - sourceRequirements, - path.join(this.servicePath, '.serverless', 'requirements.txt'), - { overwrite: true } - ); } /** diff --git a/lib/shared.js b/lib/shared.js index 79b60cef..bebb3f09 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -12,7 +12,7 @@ const sha256File = require('sha256-file'); * @param {Object} serverless * @return {undefined} */ -function checkForAndDeleteMaxCacheVersions(options, serverless) { +function checkForAndDeleteMaxCacheVersions({ serverless, options, log }) { // If we're using the static cache, and we have static cache max versions enabled if ( options.useStaticCache && @@ -42,10 +42,17 @@ function checkForAndDeleteMaxCacheVersions(options, serverless) { rimraf.sync(files[i]); items++; } + // Log the number of cache files flushed - serverless.cli.log( - `Removed ${items} items from cache because of staticCacheMaxVersions` - ); + if (log) { + log.info( + `Removed ${items} items from cache because of staticCacheMaxVersions` + ); + } else { + serverless.cli.log( + `Removed ${items} items from cache because of staticCacheMaxVersions` + ); + } } } } @@ -55,17 +62,20 @@ function checkForAndDeleteMaxCacheVersions(options, serverless) { * @param {string} subfolder * @param {string} servicePath * @param {Object} options + * @param {Object} serverless * @return {string} */ function getRequirementsWorkingPath( subfolder, requirementsTxtDirectory, - options + options, + serverless ) { // If we want to use the static cache if (options && options.useStaticCache) { if (subfolder) { - subfolder = subfolder + '_slspyc'; + const architecture = serverless.service.provider.architecture || 'x86_64'; + subfolder = `${subfolder}_${architecture}_slspyc`; } // If we have max number of cache items... @@ -76,6 +86,26 @@ function getRequirementsWorkingPath( return path.join(requirementsTxtDirectory, 'requirements'); } +/** + * Path of a cached requirements layer archive file + * @param {string} subfolder + * @param {string} fallback + * @param {Object} options + * @param {Object} serverless + * @return {string} + */ +function getRequirementsLayerPath(hash, fallback, options, serverless) { + // If we want to use the static cache + if (hash && options && options.useStaticCache) { + const architecture = serverless.service.provider.architecture || 'x86_64'; + hash = `${hash}_${architecture}_slspyc.zip`; + return path.join(getUserCachePath(options), hash); + } + + // If we don't want to use the static cache, then fallback to requirements file in .serverless directory + return fallback; +} + /** * The static cache path that will be used for this system + options, used if static cache is enabled * @param {Object} options @@ -107,6 +137,7 @@ function sha256Path(fullpath) { module.exports = { checkForAndDeleteMaxCacheVersions, getRequirementsWorkingPath, + getRequirementsLayerPath, getUserCachePath, sha256Path, }; diff --git a/lib/zip.js b/lib/zip.js index 2e872aa9..3c21bbbf 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -1,7 +1,7 @@ const fse = require('fs-extra'); const path = require('path'); const get = require('lodash.get'); -const set = require('lodash.set'); +const set = require('set-value'); const uniqBy = require('lodash.uniqby'); const BbPromise = require('bluebird'); const JSZip = require('jszip'); @@ -30,9 +30,13 @@ function addVendorHelper() { }) .then((functions) => uniqBy(functions, (func) => func.module)) .map((f) => { - this.serverless.cli.log( - `Adding Python requirements helper to ${f.module}...` - ); + if (this.log) { + this.log.info(`Adding Python requirements helper to ${f.module}`); + } else { + this.serverless.cli.log( + `Adding Python requirements helper to ${f.module}...` + ); + } return fse.copyAsync( path.resolve(__dirname, '../unzip_requirements.py'), @@ -40,7 +44,11 @@ function addVendorHelper() { ); }); } else { - this.serverless.cli.log('Adding Python requirements helper...'); + if (this.log) { + this.log.info('Adding Python requirements helper'); + } else { + this.serverless.cli.log('Adding Python requirements helper...'); + } if (!get(this.serverless.service, 'package.patterns')) { set(this.serverless.service, ['package', 'patterns'], []); @@ -72,15 +80,25 @@ function removeVendorHelper() { }) .then((funcs) => uniqBy(funcs, (f) => f.module)) .map((f) => { - this.serverless.cli.log( - `Removing Python requirements helper from ${f.module}...` - ); + if (this.log) { + this.log.info( + `Removing Python requirements helper from ${f.module}` + ); + } else { + this.serverless.cli.log( + `Removing Python requirements helper from ${f.module}...` + ); + } return fse.removeAsync( path.join(this.servicePath, f.module, 'unzip_requirements.py') ); }); } else { - this.serverless.cli.log('Removing Python requirements helper...'); + if (this.log) { + this.log.info('Removing Python requirements helper'); + } else { + this.serverless.cli.log('Removing Python requirements helper...'); + } return fse.removeAsync( path.join(this.servicePath, 'unzip_requirements.py') ); @@ -96,6 +114,11 @@ function packRequirements() { if (this.options.zip) { if (this.serverless.service.package.individually) { return BbPromise.resolve(this.targetFuncs) + .filter((func) => { + return ( + func.runtime || this.serverless.service.provider.runtime + ).match(/^python.*/); + }) .map((f) => { if (!get(f, 'module')) { set(f, ['module'], '.'); @@ -104,21 +127,38 @@ function packRequirements() { }) .then((funcs) => uniqBy(funcs, (f) => f.module)) .map((f) => { - this.serverless.cli.log( - `Zipping required Python packages for ${f.module}...` - ); + let packProgress; + if (this.progress && this.log) { + packProgress = this.progress.get( + `python-pack-requirements-${f.module}` + ); + packProgress.update( + `Zipping required Python packages for ${f.module}` + ); + this.log.info(`Zipping required Python packages for ${f.module}`); + } else { + this.serverless.cli.log( + `Zipping required Python packages for ${f.module}...` + ); + } f.package.patterns.push(`${f.module}/.requirements.zip`); - return addTree( - new JSZip(), - `.serverless/${f.module}/requirements` - ).then((zip) => writeZip(zip, `${f.module}/.requirements.zip`)); + return addTree(new JSZip(), `.serverless/${f.module}/requirements`) + .then((zip) => writeZip(zip, `${f.module}/.requirements.zip`)) + .finally(() => packProgress && packProgress.remove()); }); } else { - this.serverless.cli.log('Zipping required Python packages...'); + let packProgress; + if (this.progress) { + packProgress = this.progress.get(`python-pack-requirements`); + } else { + this.serverless.cli.log('Zipping required Python packages...'); + } this.serverless.service.package.patterns.push('.requirements.zip'); - return addTree(new JSZip(), '.serverless/requirements').then((zip) => - writeZip(zip, path.join(this.servicePath, '.requirements.zip')) - ); + return addTree(new JSZip(), '.serverless/requirements') + .then((zip) => + writeZip(zip, path.join(this.servicePath, '.requirements.zip')) + ) + .finally(() => packProgress && packProgress.remove()); } } } diff --git a/package.json b/package.json index 1fed4c39..55ab4989 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.2.0", + "version": "6.1.2", "engines": { "node": ">=12.0" }, @@ -38,16 +38,24 @@ "main": "index.js", "bin": {}, "scripts": { - "ci:lint": "eslint *.js lib/*.js --format junit --output-file ~/reports/eslint.xml && prettier -c '{.,lib}/*.{js,md}'", - "test": "node test.js", - "lint": "eslint *.js lib/*.js && prettier -c '{.,lib}/*.{js,md}'", - "format": "prettier --write '{.,lib}/*.{js,md}'" + "commitlint": "commitlint -f HEAD@{15}", + "lint": "eslint .", + "lint:updated": "pipe-git-updated --ext=js -- eslint", + "prepare-release": "standard-version && prettier --write CHANGELOG.md", + "prettier-check": "prettier -c --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", + "prettier-check:updated": "pipe-git-updated --ext=css --ext=html --ext=js --ext=json --ext=md --ext=yaml --ext=yml -- prettier -c", + "prettify": "prettier --write --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", + "prettify:updated": "pipe-git-updated --ext=css --ext=html --ext=js --ext=json --ext=md --ext=yaml --ext=yml -- prettier --write", + "test": "node test.js" }, "devDependencies": { "cross-spawn": "*", - "eslint": "^7.32.0", + "eslint": "^8.57.0", + "git-list-updated": "^1.2.1", + "github-release-from-cc-changelog": "^2.3.0", "lodash": "^4.17.21", "prettier": "^2", + "standard-version": "^9.5.0", "tape": "*", "tape-promise": "*" }, @@ -55,20 +63,27 @@ "@iarna/toml": "^2.2.5", "appdirectory": "^0.1.0", "bluebird": "^3.7.2", - "fs-extra": "^9.1.0", - "glob-all": "^3.2.1", + "child-process-ext": "^2.1.1", + "fs-extra": "^10.1.0", + "glob-all": "^3.3.1", "is-wsl": "^2.2.0", - "jszip": "^3.7.1", + "jszip": "^3.10.1", "lodash.get": "^4.4.2", - "lodash.set": "^4.3.2", "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", "rimraf": "^3.0.2", + "semver": "^7.6.0", + "set-value": "^4.1.0", "sha256-file": "1.0.0", - "shell-quote": "^1.7.3" + "shell-quote": "^1.8.1" }, - "peerDependencies": { - "serverless": "^2.32" + "lint-staged": { + "*.js": [ + "eslint" + ], + "*.{css,html,js,json,md,yaml,yml}": [ + "prettier -c" + ] }, "eslintConfig": { "extends": "eslint:recommended", @@ -84,6 +99,30 @@ "no-console": "off" } }, + "standard-version": { + "skip": { + "commit": true, + "tag": true + }, + "types": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "perf", + "section": "Performance Improvements" + }, + { + "type": "refactor", + "section": "Maintenance Improvements" + } + ] + }, "prettier": { "semi": true, "singleQuote": true diff --git a/test.js b/test.js index 0322ab91..1967330b 100644 --- a/test.js +++ b/test.js @@ -3,6 +3,7 @@ const glob = require('glob-all'); const JSZip = require('jszip'); const sha256File = require('sha256-file'); const tape = require('tape-promise/tape'); + const { chmodSync, removeSync, @@ -10,7 +11,7 @@ const { copySync, writeFileSync, statSync, - pathExistsSync + pathExistsSync, } = require('fs-extra'); const { quote } = require('shell-quote'); const { sep } = require('path'); @@ -19,30 +20,36 @@ const { getUserCachePath, sha256Path } = require('./lib/shared'); const initialWorkingDir = process.cwd(); -const mkCommand = cmd => (args, options = {}) => { - const { error, stdout, stderr, status } = crossSpawn.sync( - cmd, - args, - Object.assign( - { - env: Object.assign({}, process.env, { SLS_DEBUG: 't' }) - }, +const mkCommand = + (cmd) => + (args, options = {}) => { + options['env'] = Object.assign( + { SLS_DEBUG: 'true' }, + process.env, + options['env'] + ); + const { error, stdout, stderr, status } = crossSpawn.sync( + cmd, + args, options - ) - ); - if (error) { - console.error(`Error running: ${quote([cmd, ...args])}`); // eslint-disable-line no-console - throw error; - } - if (status) { - console.error('STDOUT: ', stdout.toString()); // eslint-disable-line no-console - console.error('STDERR: ', stderr.toString()); // eslint-disable-line no-console - throw new Error( - `${quote([cmd, ...args])} failed with status code ${status}` ); - } - return stdout && stdout.toString().trim(); -}; + if (error && !options['noThrow']) { + console.error(`Error running: ${quote([cmd, ...args])}`); // eslint-disable-line no-console + throw error; + } + if (status && !options['noThrow']) { + console.error('STDOUT: ', stdout.toString()); // eslint-disable-line no-console + console.error('STDERR: ', stderr.toString()); // eslint-disable-line no-console + throw new Error( + `${quote([cmd, ...args])} failed with status code ${status}` + ); + } + return { + stdout: stdout && stdout.toString().trim(), + stderr: stderr && stderr.toString().trim(), + }; + }; + const sls = mkCommand('sls'); const git = mkCommand('git'); const npm = mkCommand('npm'); @@ -73,8 +80,8 @@ const teardown = () => { 'serverless.yml.bak', 'module1/foobar', getUserCachePath(), - ...glob.sync('serverless-python-requirements-*.tgz') - ].map(path => removeSync(path)); + ...glob.sync('serverless-python-requirements-*.tgz'), + ].map((path) => removeSync(path)); if (!cwd.endsWith('base with a space')) { try { git(['checkout', 'serverless.yml']); @@ -93,15 +100,17 @@ const teardown = () => { const testFilter = (() => { const elems = process.argv.slice(2); // skip ['node', 'test.js'] if (elems.length) { - return desc => - elems.some(text => desc.search(text) != -1) ? tape.test : tape.test.skip; + return (desc) => + elems.some((text) => desc.search(text) != -1) + ? tape.test + : tape.test.skip; } else { return () => tape.test; } })(); const test = (desc, func, opts = {}) => - testFilter(desc)(desc, opts, async t => { + testFilter(desc)(desc, opts, async (t) => { setup(); let ended = false; try { @@ -124,7 +133,7 @@ const availablePythons = (() => { const mapping = {}; if (process.env.USE_PYTHON) { binaries.push( - ...process.env.USE_PYTHON.split(',').map(v => v.toString().trim()) + ...process.env.USE_PYTHON.split(',').map((v) => v.toString().trim()) ); } else { // For running outside of CI @@ -135,7 +144,7 @@ const availablePythons = (() => { const python = `${bin}${exe}`; const { stdout, status } = crossSpawn.sync(python, [ '-c', - 'import sys; sys.stdout.write(".".join(map(str, sys.version_info[:2])))' + 'import sys; sys.stdout.write(".".join(map(str, sys.version_info[:2])))', ]); const ver = stdout && stdout.toString().trim(); if (!status && ver) { @@ -152,29 +161,25 @@ const availablePythons = (() => { return mapping; })(); -const getPythonBin = version => { +const getPythonBin = (version) => { const bin = availablePythons[String(version)]; if (!bin) throw new Error(`No python version ${version} available`); return bin; }; -const hasPython = version => { - return Boolean(availablePythons[String(version)]); -}; - -const listZipFiles = async function(filename) { +const listZipFiles = async function (filename) { const file = await readFile(filename); const zip = await new JSZip().loadAsync(file); return Object.keys(zip.files); }; -const listZipFilesWithMetaData = async function(filename) { +const listZipFilesWithMetaData = async function (filename) { const file = await readFile(filename); const zip = await new JSZip().loadAsync(file); return Object(zip.files); }; -const listRequirementsZipFiles = async function(filename) { +const listRequirementsZipFiles = async function (filename) { const file = await readFile(filename); const zip = await new JSZip().loadAsync(file); const reqsBuffer = await zip.file('.requirements.zip').async('nodebuffer'); @@ -196,280 +201,269 @@ const canUseDocker = () => { const brokenOn = (...platforms) => platforms.indexOf(process.platform) != -1; test( - 'default pythonBin can package flask with default options', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'py3.6 packages have the same hash', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const fileHash = sha256File('.serverless/sls-py-req-test.zip'); - sls(['package']); - t.equal( - sha256File('.serverless/sls-py-req-test.zip'), - fileHash, - 'packages have the same hash' - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'py3.6 can package flask with default options', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - 'py3.6 can package flask with hashes', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(3)}`, - '--fileName=requirements-w-hashes.txt', - 'package' - ]); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.end(); - }, - { skip: !hasPython(3) || brokenOn('win32') } -); - -test( - 'py3.6 can package flask with nested', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(3)}`, - '--fileName=requirements-w-nested.txt', - 'package' - ]); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - 'py3.6 can package flask with zip option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - 'py3.6 can package flask with slim option', - async t => { + 'dockerPrivateKey option correctly resolves docker command', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--slim=true', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + const { stdout } = sls(['package'], { + noThrow: true, + env: { + dockerizePip: true, + dockerSsh: true, + dockerPrivateKey: `${__dirname}${sep}tests${sep}base${sep}custom_ssh`, + dockerImage: 'break the build to log the command', + }, + }); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, - '__main__.py files are packaged' - ); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - 'py3.6 can package flask with slim & slimPatterns options', - async t => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--slim=true', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - "py3.6 doesn't package bottle with noDeploy option", - async t => { + stdout.includes( + `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z` + ), + 'docker command properly resolved' + ); + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + +test('default pythonBin can package flask with default options', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('py3.9 packages have the same hash', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const fileHash = sha256File('.serverless/sls-py-req-test.zip'); + sls(['package'], { env: {} }); + t.equal( + sha256File('.serverless/sls-py-req-test.zip'), + fileHash, + 'packages have the same hash' + ); + t.end(); +}); + +test('py3.9 can package flask with default options', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test( + 'py3.9 can package flask with hashes', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' - ]); - sls([`--pythonBin=${getPythonBin(3)}`, 'package']); + sls(['package'], { + env: { + fileName: 'requirements-w-hashes.txt', + pythonBin: getPythonBin(3), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); t.end(); }, - { skip: !hasPython(3) } + { skip: brokenOn('win32') } ); -test( - 'py3.6 can package boto3 with editable', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(3)}`, - '--fileName=requirements-w-editable.txt', - 'package' - ]); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.true( - zipfiles.includes(`botocore${sep}__init__.py`), - 'botocore is packaged' - ); - t.end(); - }, - { skip: !hasPython(3) } -); +test('py3.9 can package flask with nested', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + fileName: 'requirements-w-nested.txt', + pythonBin: getPythonBin(3), + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('py3.9 can package flask with zip option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test('py3.9 can package flask with slim option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('py3.9 can package flask with slim & slimPatterns options', async (t) => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test("py3.9 doesn't package bottle with noDeploy option", async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: { pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test('py3.9 can package boto3 with editable', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + fileName: 'requirements-w-editable.txt', + pythonBin: getPythonBin(3), + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true( + zipfiles.includes(`botocore${sep}__init__.py`), + 'botocore is packaged' + ); + t.end(); +}); test( - 'py3.6 can package flask with dockerizePip option', - async t => { + 'py3.9 can package flask with dockerizePip option', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); - + sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( - 'py3.6 can package flask with slim & dockerizePip option', - async t => { + 'py3.9 can package flask with slim & dockerizePip option', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--slim=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], '*.pyc files are NOT packaged' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, '__main__.py files are packaged' ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( - 'py3.6 can package flask with slim & dockerizePip & slimPatterns options', - async t => { + 'py3.9 can package flask with slim & dockerizePip & slimPatterns options', + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--slim=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], '*.pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( - 'py3.6 can package flask with zip & dockerizePip option', - async t => { + 'py3.9 can package flask with zip & dockerizePip option', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--zip=true', 'package']); - + sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' @@ -492,17 +486,18 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( - 'py3.6 can package flask with zip & slim & dockerizePip option', - async t => { + 'py3.9 can package flask with zip & slim & dockerizePip option', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--zip=true', '--slim=true', 'package']); - + sls(['package'], { + env: { dockerizePip: 'true', zip: 'true', slim: 'true' }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' @@ -525,1441 +520,795 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); -test( - 'py2.7 can package flask with default options', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(2) } -); +test('pipenv py3.9 can package flask with default options', async (t) => { + process.chdir('tests/pipenv'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.false( + zipfiles.includes(`pytest${sep}__init__.py`), + 'dev-package pytest is NOT packaged' + ); + t.end(); +}); + +test('pipenv py3.9 can package flask with slim option', async (t) => { + process.chdir('tests/pipenv'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('pipenv py3.9 can package flask with slim & slimPatterns options', async (t) => { + process.chdir('tests/pipenv'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('pipenv py3.9 can package flask with zip option', async (t) => { + process.chdir('tests/pipenv'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test("pipenv py3.9 doesn't package bottle with noDeploy option", async (t) => { + process.chdir('tests/pipenv'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test('non build pyproject.toml uses requirements.txt', async (t) => { + process.chdir('tests/non_build_pyproject'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('non poetry pyproject.toml without requirements.txt packages handler only', async (t) => { + process.chdir('tests/non_poetry_pyproject'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`handler.py`), 'handler is packaged'); + t.end(); +}); + +test('poetry py3.9 can package flask with default options', async (t) => { + process.chdir('tests/poetry'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('poetry py3.9 can package flask with slim option', async (t) => { + process.chdir('tests/poetry'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('poetry py3.9 can package flask with slim & slimPatterns options', async (t) => { + process.chdir('tests/poetry'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('poetry py3.9 can package flask with zip option', async (t) => { + process.chdir('tests/poetry'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test("poetry py3.9 doesn't package bottle with noDeploy option", async (t) => { + process.chdir('tests/poetry'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test('py3.9 can package flask with zip option and no explicit include', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); + perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); + sls(['package'], { env: { zip: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test('py3.9 can package lambda-decorators using vendor option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { vendor: './vendor' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true( + zipfiles.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged' + ); + t.end(); +}); test( - 'py2.7 can package flask with slim option', - async t => { + "Don't nuke execute perms", + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); + const perm = '755'; + npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--slim=true', - 'package' + perl([ + '-p', + '-i.bak', + '-e', + 's/(handler.py.*$)/$1\n - foobar/', + 'serverless.yml', ]); + writeFileSync(`foobar`, ''); + chmodSync(`foobar`, perm); + sls(['package'], { env: { vendor: './vendor' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true( + zipfiles.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged' + ); + t.true(zipfiles.includes(`foobar`), 'foobar is packaged'); + + const zipfiles_with_metadata = await listZipFilesWithMetaData( + '.serverless/sls-py-req-test.zip' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, - '__main__.py files are packaged' + zipfiles_with_metadata['foobar'].unixPermissions + .toString(8) + .slice(3, 6) === perm, + 'foobar has retained its executable file permissions' + ); + + const flaskPerm = statSync('.serverless/requirements/bin/flask').mode; + t.true( + zipfiles_with_metadata['bin/flask'].unixPermissions === flaskPerm, + 'bin/flask has retained its executable file permissions' ); + t.end(); }, - { skip: !hasPython(2) } + { skip: process.platform === 'win32' } ); +test('py3.9 can package flask in a project with a space in it', async (t) => { + copySync('tests/base', 'tests/base with a space'); + process.chdir('tests/base with a space'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + test( - 'py2.7 can package flask with zip option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); + 'py3.9 can package flask in a project with a space in it with docker', + async (t) => { + copySync('tests/base', 'tests/base with a space'); + process.chdir('tests/base with a space'); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--zip=true', - 'package' - ]); + sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !hasPython(2) } + { skip: !canUseDocker() || brokenOn('win32') } ); +test('py3.9 supports custom file name with fileName option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + writeFileSync('puck', 'requests'); + npm(['i', path]); + sls(['package'], { env: { fileName: 'puck' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes(`requests${sep}__init__.py`), + 'requests is packaged' + ); + t.false(zipfiles.includes(`flask${sep}__init__.py`), 'flask is NOT packaged'); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.end(); +}); + +test("py3.9 doesn't package bottle with zip option", async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = await listRequirementsZipFiles( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.true( + zippedReqs.includes(`flask/__init__.py`), + 'flask is packaged in the .requirements.zip file' + ); + t.false( + zippedReqs.includes(`bottle.py`), + 'bottle is NOT packaged in the .requirements.zip file' + ); + t.end(); +}); + +test('py3.9 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + test( - 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', - async t => { + 'py3.9 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + async (t) => { process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--slim=true', - 'package' - ]); + sls(['package'], { + env: { + dockerizePip: 'true', + slim: 'true', + slimPatternsAppendDefaults: 'false', + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - '*.pyc files are packaged' + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); t.end(); }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - "py2.7 doesn't package bottle with noDeploy option", - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' - ]); - sls([`--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); - }, - { skip: !hasPython(2) } + { skip: !canUseDocker() || brokenOn('win32') } ); -test( - 'py2.7 can package flask with zip & dockerizePip option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--zip=true', - 'package' - ]); +test('pipenv py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { + process.chdir('tests/pipenv'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = await listRequirementsZipFiles( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.true( - zippedReqs.includes(`flask/__init__.py`), - 'flask is packaged in the .requirements.zip file' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with zip & slim & dockerizePip option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--zip=true', - '--slim=true', - 'package' - ]); - - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = await listRequirementsZipFiles( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.true( - zippedReqs.includes(`flask/__init__.py`), - 'flask is packaged in the .requirements.zip file' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with dockerizePip option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - 'package' - ]); - - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with slim & dockerizePip option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--slim=true', - 'package' - ]); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - '*.pyc files are NOT packaged' - ); - t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, - '__main__.py files are packaged' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', - async t => { - process.chdir('tests/base'); - - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--slim=true', - 'package' - ]); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - '*.pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'pipenv py3.6 can package flask with default options', - async t => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.false( - zipfiles.includes(`pytest${sep}__init__.py`), - 'dev-package pytest is NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'pipenv py3.6 can package flask with slim option', - async t => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--slim=true', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, - '__main__.py files are packaged' - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'pipenv py3.6 can package flask with slim & slimPatterns options', - async t => { - process.chdir('tests/pipenv'); - - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--slim=true', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'pipenv py3.6 can package flask with zip option', - async t => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - "pipenv py3.6 doesn't package bottle with noDeploy option", - async t => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' - ]); - sls(['package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'non build pyproject.toml uses requirements.txt', - async t => { - process.chdir('tests/non_build_pyproject'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'non poetry pyproject.toml without requirements.txt packages handler only', - async t => { - process.chdir('tests/non_poetry_pyproject'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`handler.py`), 'handler is packaged'); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'poetry py3.6 can package flask with default options', - async t => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'poetry py3.6 can package flask with slim option', - async t => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--slim=true', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, - '__main__.py files are packaged' - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'poetry py3.6 can package flask with slim & slimPatterns options', - async t => { - process.chdir('tests/poetry'); - - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--slim=true', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'poetry py3.6 can package flask with zip option', - async t => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - "poetry py3.6 doesn't package bottle with noDeploy option", - async t => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' - ]); - sls(['package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'py3.6 can package flask with zip option and no explicit include', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); - perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); - sls(['--zip=true', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'py3.6 can package lambda-decorators using vendor option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([`--vendor=./vendor`, 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.true( - zipfiles.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged' - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - "Don't nuke execute perms", - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - const perm = '755'; - - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(handler.py.*$)/$1\n - foobar/', - 'serverless.yml' - ]); - writeFileSync(`foobar`, ''); - chmodSync(`foobar`, perm); - sls(['--vendor=./vendor', 'package']); - - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.true( - zipfiles.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged' - ); - t.true(zipfiles.includes(`foobar`), 'foobar is packaged'); - - const zipfiles_with_metadata = await listZipFilesWithMetaData( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles_with_metadata['foobar'].unixPermissions - .toString(8) - .slice(3, 6) === perm, - 'foobar has retained its executable file permissions' - ); - - const flaskPerm = statSync('.serverless/requirements/bin/flask').mode; - t.true( - zipfiles_with_metadata['bin/flask'].unixPermissions === flaskPerm, - 'bin/flask has retained its executable file permissions' - ); - - t.end(); - }, - { skip: process.platform === 'win32' || !hasPython(3.6) } -); - -test( - 'py3.6 can package flask in a project with a space in it', - async t => { - copySync('tests/base', 'tests/base with a space'); - process.chdir('tests/base with a space'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'py3.6 can package flask in a project with a space in it with docker', - async t => { - copySync('tests/base', 'tests/base with a space'); - process.chdir('tests/base with a space'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--dockerizePip=true', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } -); - -test( - 'py3.6 supports custom file name with fileName option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - writeFileSync('puck', 'requests'); - npm(['i', path]); - sls(['--fileName=puck', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes(`requests${sep}__init__.py`), - 'requests is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged' - ); - t.false( - zipfiles.includes(`boto3${sep}__init__.py`), - 'boto3 is NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - "py3.6 doesn't package bottle with zip option", - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' - ]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = await listRequirementsZipFiles( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.true( - zippedReqs.includes(`flask/__init__.py`), - 'flask is packaged in the .requirements.zip file' - ); - t.false( - zippedReqs.includes(`bottle.py`), - 'bottle is NOT packaged in the .requirements.zip file' - ); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - 'py3.6 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', - async t => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); - - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'py3.6 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', - async t => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - '--dockerizePip=true', - '--slim=true', - '--slimPatternsAppendDefaults=false', - 'package' - ]); - - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false options', - async t => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - '--runtime=python2.7', - '--slim=true', - '--slimPatternsAppendDefaults=false', - 'package' - ]); - - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(2.7) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', - async t => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - '--dockerizePip=true', - '--runtime=python2.7', - '--slim=true', - '--slimPatternsAppendDefaults=false', - 'package' - ]); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2.7) || brokenOn('win32') } -); - -test( - 'pipenv py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', - async t => { - process.chdir('tests/pipenv'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - - sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'poetry py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', - async t => { - process.chdir('tests/poetry'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - - sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'py3.6 can package flask with package individually option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--individually=true', 'package']); - - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.false( - zipfiles_hello.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello' - ); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.false( - zipfiles_hello2.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.false( - zipfiles_hello3.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello3' - ); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.false( - zipfiles_hello4.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello4' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - - t.end(); - }, - { skip: !hasPython(3.6) } -); - - - -test( - 'py3.6 can package flask with package individually & slim option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--individually=true', '--slim=true', 'package']); - - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.deepEqual( - zipfiles_hello.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.deepEqual( - zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.deepEqual( - zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - t.deepEqual( - zipfiles_hello4.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello4' - ); - - t.end(); - }, - { skip: !hasPython(3.6) } -); - -test( - 'py2.7 can package flask with package individually option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--individually=true', '--runtime=python2.7', 'package']); - - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - - t.end(); - }, - { skip: !hasPython(2.7) } -); - -test( - 'py2.7 can package flask with package individually & slim option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls([ - '--individually=true', - '--runtime=python2.7', - '--slim=true', - 'package' - ]); + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.deepEqual( - zipfiles_hello.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); +test('poetry py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { + process.chdir('tests/poetry'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.deepEqual( - zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.deepEqual( - zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); +test('poetry py3.9 can package flask with package individually option', async (t) => { + process.chdir('tests/poetry_individually'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles( + '.serverless/module1-sls-py-req-test-dev-hello.zip' + ); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('py3.9 can package flask with package individually option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true' } }); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.false( + zipfiles_hello.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello' + ); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); - t.end(); - }, - { skip: !hasPython(2.7) } -); + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.false( + zipfiles_hello2.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); -test( - 'py2.7 can ignore functions defined with `image`', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--individually=true', '--runtime=python2.7', 'package']); + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.false( + zipfiles_hello3.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello3' + ); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); - t.true( - pathExistsSync('.serverless/hello.zip'), - 'function hello is packaged' - ); - t.true( - pathExistsSync('.serverless/hello2.zip'), - 'function hello2 is packaged' - ); - t.true( - pathExistsSync('.serverless/hello3.zip'), - 'function hello3 is packaged' - ); - t.true( - pathExistsSync('.serverless/hello4.zip'), - 'function hello4 is packaged' - ); - t.false( - pathExistsSync('.serverless/hello5.zip'), - 'function hello5 is not packaged' - ); + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.false( + zipfiles_hello4.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello4' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); - t.end(); - }, - { skip: !hasPython(2.7) } -); + t.end(); +}); + +test('py3.9 can package flask with package individually & slim option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true', slim: 'true' } }); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.deepEqual( + zipfiles_hello.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); -test( - 'py3.6 can package only requirements of module', - async t => { - process.chdir('tests/individually'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package']); + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.deepEqual( + zipfiles_hello2.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); - const zipfiles_hello = await listZipFiles( - '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip' - ); - t.true( - zipfiles_hello.includes('handler1.py'), - 'handler1.py is packaged at root level in function hello1' - ); - t.false( - zipfiles_hello.includes('handler2.py'), - 'handler2.py is NOT packaged at root level in function hello1' - ); - t.true( - zipfiles_hello.includes(`pyaml${sep}__init__.py`), - 'pyaml is packaged in function hello1' - ); - t.true( - zipfiles_hello.includes(`boto3${sep}__init__.py`), - 'boto3 is packaged in function hello1' - ); - t.false( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello1' - ); + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.deepEqual( + zipfiles_hello3.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); - const zipfiles_hello2 = await listZipFiles( - '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' - ); - t.true( - zipfiles_hello2.includes('handler2.py'), - 'handler2.py is packaged at root level in function hello2' - ); - t.false( - zipfiles_hello2.includes('handler1.py'), - 'handler1.py is NOT packaged at root level in function hello2' - ); - t.false( - zipfiles_hello2.includes(`pyaml${sep}__init__.py`), - 'pyaml is NOT packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`boto3${sep}__init__.py`), - 'boto3 is NOT packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + t.deepEqual( + zipfiles_hello4.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello4' + ); - t.end(); - }, - { skip: !hasPython(3.6) } -); + t.end(); +}); -test( - 'py3.6 can package lambda-decorators using vendor and invidiually option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--individually=true', '--vendor=./vendor', 'package']); +test('py3.9 can package only requirements of module', async (t) => { + process.chdir('tests/individually'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles_hello = await listZipFiles( + '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip' + ); + t.true( + zipfiles_hello.includes('handler1.py'), + 'handler1.py is packaged at root level in function hello1' + ); + t.false( + zipfiles_hello.includes('handler2.py'), + 'handler2.py is NOT packaged at root level in function hello1' + ); + t.true( + zipfiles_hello.includes(`pyaml${sep}__init__.py`), + 'pyaml is packaged in function hello1' + ); + t.true( + zipfiles_hello.includes(`boto3${sep}__init__.py`), + 'boto3 is packaged in function hello1' + ); + t.false( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello1' + ); - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged at root level in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); + const zipfiles_hello2 = await listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' + ); + t.true( + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes('handler1.py'), + 'handler1.py is NOT packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes(`pyaml${sep}__init__.py`), + 'pyaml is NOT packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged at root level in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); + t.end(); +}); + +test('py3.9 can package lambda-decorators using vendor and invidiually option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true', vendor: './vendor' } }); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged at root level in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged at root level in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`lambda_decorators.py`), - 'lambda_decorators.py is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged at root level in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - t.end(); - }, - { skip: !hasPython(3.6) } -); + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged at root level in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`lambda_decorators.py`), + 'lambda_decorators.py is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); + + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + t.end(); +}); test( "Don't nuke execute perms when using individually", - async t => { + async (t) => { process.chdir('tests/individually'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); const perm = '755'; writeFileSync(`module1${sep}foobar`, ''); chmodSync(`module1${sep}foobar`, perm); npm(['i', path]); - sls(['package']); - + sls(['package'], { env: {} }); const zipfiles_hello1 = await listZipFilesWithMetaData( '.serverless/hello1.zip' ); @@ -1974,8 +1323,9 @@ test( const zipfiles_hello2 = await listZipFilesWithMetaData( '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' ); - const flaskPerm = statSync('.serverless/module2/requirements/bin/flask') - .mode; + const flaskPerm = statSync( + '.serverless/module2/requirements/bin/flask' + ).mode; t.true( zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm, @@ -1984,21 +1334,20 @@ test( t.end(); }, - { skip: process.platform === 'win32' || !hasPython(3.6) } + { skip: process.platform === 'win32' } ); test( "Don't nuke execute perms when using individually w/docker", - async t => { + async (t) => { process.chdir('tests/individually'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); const perm = '755'; writeFileSync(`module1${sep}foobar`, '', { mode: perm }); chmodSync(`module1${sep}foobar`, perm); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); - + sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles_hello = await listZipFilesWithMetaData( '.serverless/hello1.zip' ); @@ -2013,8 +1362,9 @@ test( const zipfiles_hello2 = await listZipFilesWithMetaData( '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' ); - const flaskPerm = statSync('.serverless/module2/requirements/bin/flask') - .mode; + const flaskPerm = statSync( + '.serverless/module2/requirements/bin/flask' + ).mode; t.true( zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm, @@ -2023,49 +1373,99 @@ test( t.end(); }, - { skip: !canUseDocker() || process.platform === 'win32' || !hasPython(3.6) } + { skip: !canUseDocker() || process.platform === 'win32' } ); test( - 'py3.6 uses download cache by default option', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); + 'py3.9 can package flask running in docker with module runtime & architecture of function', + async (t) => { + process.chdir('tests/individually_mixed_runtime'); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); - const cachepath = getUserCachePath(); + + sls(['package'], { + env: { dockerizePip: 'true' }, + }); + + const zipfiles_hello2 = await listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip' + ); t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'cache directory exists' + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' ); - t.end(); }, - { skip: !hasPython(3.6) } + { + skip: !canUseDocker() || process.platform === 'win32', + } ); test( - 'py3.6 uses download cache by default', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); + 'py3.9 can package flask succesfully when using mixed architecture, docker and zipping', + async (t) => { + process.chdir('tests/individually_mixed_runtime'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); - sls(['--cacheLocation=.requirements-cache', 'package']); + sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } }); + + const zipfiles_hello = await listZipFiles('.serverless/hello1.zip'); t.true( - pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), - 'cache directory exists' + zipfiles_hello.includes(`module1${sep}handler1.ts`), + 'handler1.ts is packaged in module dir for hello1' + ); + t.false( + zipfiles_hello.includes('handler2.py'), + 'handler2.py is NOT packaged at root level in function hello1' + ); + t.false( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello1' ); + + const zipfiles_hello2 = await listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip' + ); + const zippedReqs = await listRequirementsZipFiles( + '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip' + ); + t.true( + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes(`module1${sep}handler1.ts`), + 'handler1.ts is NOT included at module1 level in hello2' + ); + t.false( + zipfiles_hello2.includes(`pyaml${sep}__init__.py`), + 'pyaml is NOT packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT included in zipfile' + ); + t.true( + zippedReqs.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2 in requirements.zip' + ); + t.end(); }, - { skip: !hasPython(3.6) } + { skip: !canUseDocker() || process.platform === 'win32' } ); test( - 'py3.6 uses download cache with dockerizePip option', - async t => { + 'py3.9 uses download cache by default option', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); + sls(['package'], { env: {} }); const cachepath = getUserCachePath(); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), @@ -2073,185 +1473,223 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: true } ); test( - 'py3.6 uses download cache with dockerizePip by default option', - async t => { + 'py3.9 uses download cache by default', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--dockerizePip=true', - '--cacheLocation=.requirements-cache', - 'package' - ]); + sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); t.true( pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), 'cache directory exists' ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: true } ); test( - 'py3.6 uses static and download cache', - async t => { + 'py3.9 uses download cache with dockerizePip option', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: { dockerizePip: 'true' } }); const cachepath = getUserCachePath(); - const cacheFolderHash = sha256Path('.serverless/requirements.txt'); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'http exists in download-cache' - ); - t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), - 'flask exists in static-cache' + 'cache directory exists' ); t.end(); }, - { skip: !hasPython(3.6) } + // { skip: !canUseDocker() || brokenOn('win32') } + { skip: true } ); test( - 'py3.6 uses static and download cache with dockerizePip option', - async t => { + 'py3.9 uses download cache with dockerizePip by default option', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); - const cachepath = getUserCachePath(); - const cacheFolderHash = sha256Path('.serverless/requirements.txt'); - t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'http exists in download-cache' - ); + sls(['package'], { + env: { dockerizePip: 'true', cacheLocation: '.requirements-cache' }, + }); t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), - 'flask exists in static-cache' + pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), + 'cache directory exists' ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + // { skip: !canUseDocker() || brokenOn('win32') } + { skip: true } ); test( - 'py3.6 uses static cache', - async t => { + 'py3.9 uses static and download cache', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), - 'flask exists in static-cache' + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'http exists in download-cache' ); t.true( pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` ), - '.completed_requirements exists in static-cache' - ); - - // py3.6 checking that static cache actually pulls from cache (by poisoning it) - writeFileSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}injected_file_is_bad_form`, - 'injected new file into static cache folder' - ); - sls(['package']); - - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('injected_file_is_bad_form'), - "static cache is really used when running 'sls package' again" + 'flask exists in static-cache' ); - t.end(); }, - { skip: !hasPython(3.6) } + { skip: true } ); test( - 'py3.6 uses static cache with cacheLocation option', - async t => { + 'py3.9 uses static and download cache with dockerizePip option', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - const cachepath = '.requirements-cache'; - sls([`--cacheLocation=${cachepath}`, 'package']); + sls(['package'], { env: { dockerizePip: 'true' } }); + const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), - 'flask exists in static-cache' + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'http exists in download-cache' ); t.true( pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` ), - '.completed_requirements exists in static-cache' + 'flask exists in static-cache' ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !canUseDocker() || brokenOn('win32') } ); +test('py3.9 uses static cache', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); + + // py3.9 checking that static cache actually pulls from cache (by poisoning it) + writeFileSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, + 'injected new file into static cache folder' + ); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('injected_file_is_bad_form'), + "static cache is really used when running 'sls package' again" + ); + + t.end(); +}); + +test('py3.9 uses static cache with cacheLocation option', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + const cachepath = '.requirements-cache'; + sls(['package'], { env: { cacheLocation: cachepath } }); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); + t.end(); +}); + test( - 'py3.6 uses static cache with dockerizePip & slim option', - async t => { + 'py3.9 uses static cache with dockerizePip & slim option', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--slim=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), 'flask exists in static-cache' ); t.true( pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` ), '.completed_requirements exists in static-cache' ); - // py3.6 checking that static cache actually pulls from cache (by poisoning it) + // py3.9 checking that static cache actually pulls from cache (by poisoning it) writeFileSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}injected_file_is_bad_form`, + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' ); - sls(['--dockerizePip=true', '--slim=true', 'package']); - + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('injected_file_is_bad_form'), "static cache is really used when running 'sls package' again" ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files are packaged' ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( - 'py3.6 uses download cache with dockerizePip & slim option', - async t => { + 'py3.9 uses download cache with dockerizePip & slim option', + async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--slim=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const cachepath = getUserCachePath(); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), @@ -2261,47 +1699,140 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files are packaged' ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); -test( - 'py3.6 can ignore functions defined with `image`', - async t => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['--individually=true', 'package']); +test('py3.9 can ignore functions defined with `image`', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true' } }); + t.true(pathExistsSync('.serverless/hello.zip'), 'function hello is packaged'); + t.true( + pathExistsSync('.serverless/hello2.zip'), + 'function hello2 is packaged' + ); + t.true( + pathExistsSync('.serverless/hello3.zip'), + 'function hello3 is packaged' + ); + t.true( + pathExistsSync('.serverless/hello4.zip'), + 'function hello4 is packaged' + ); + t.false( + pathExistsSync('.serverless/hello5.zip'), + 'function hello5 is not packaged' + ); + t.end(); +}); +test('poetry py3.9 fails packaging if poetry.lock is missing and flag requirePoetryLockFile is set to true', async (t) => { + copySync('tests/poetry', 'tests/base with a space'); + process.chdir('tests/base with a space'); + removeSync('poetry.lock'); + + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + const { stdout } = sls(['package'], { + env: { requirePoetryLockFile: 'true', slim: 'true' }, + noThrow: true, + }); + t.true( + stdout.includes( + 'poetry.lock file not found - set requirePoetryLockFile to false to disable this error' + ), + 'flag works and error is properly reported' + ); + t.end(); +}); + +test('works with provider.runtime not being python', async (t) => { + process.chdir('tests/base'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { runtime: 'nodejs20.x' } }); + t.true( + pathExistsSync('.serverless/sls-py-req-test.zip'), + 'sls-py-req-test is packaged' + ); + t.end(); +}); + +test('poetry py3.9 packages additional optional packages', async (t) => { + process.chdir('tests/poetry_packages'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + poetryWithGroups: 'poetryWithGroups', + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('poetry py3.9 skips additional optional packages specified in withoutGroups', async (t) => { + process.chdir('tests/poetry_packages'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + poetryWithGroups: 'poetryWithGroups', + poetryWithoutGroups: 'poetryWithoutGroups', + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('poetry py3.9 only installs optional packages specified in onlyGroups', async (t) => { + process.chdir('tests/poetry_packages'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + poetryOnlyGroups: 'poetryOnlyGroups', + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.false(zipfiles.includes(`flask${sep}__init__.py`), 'flask is NOT packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test( + 'py3.7 injects dependencies into `package` folder when using scaleway provider', + async (t) => { + process.chdir('tests/scaleway_provider'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( - pathExistsSync('.serverless/hello.zip'), - 'function hello is packaged' - ); - t.true( - pathExistsSync('.serverless/hello2.zip'), - 'function hello2 is packaged' - ); - t.true( - pathExistsSync('.serverless/hello3.zip'), - 'function hello3 is packaged' + zipfiles.includes(`package${sep}flask${sep}__init__.py`), + 'flask is packaged' ); t.true( - pathExistsSync('.serverless/hello4.zip'), - 'function hello4 is packaged' - ); - t.false( - pathExistsSync('.serverless/hello5.zip'), - 'function hello5 is not packaged' + zipfiles.includes(`package${sep}boto3${sep}__init__.py`), + 'boto3 is packaged' ); - t.end(); }, - { skip: !hasPython(3.6) } + { skip: true } // sls v4 supports aws provider only ); diff --git a/tests/base/_slimPatterns.yml b/tests/base/_slimPatterns.yml index 02c631b4..443af9a0 100644 --- a/tests/base/_slimPatterns.yml +++ b/tests/base/_slimPatterns.yml @@ -1,2 +1,2 @@ slimPatterns: - - "**/__main__.py" + - '**/__main__.py' diff --git a/tests/base/custom_ssh b/tests/base/custom_ssh new file mode 100644 index 00000000..8a7c4203 --- /dev/null +++ b/tests/base/custom_ssh @@ -0,0 +1 @@ +SOME KEY diff --git a/tests/base/package.json b/tests/base/package.json index 43ce4eee..b07744c9 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/base/requirements-w-nested.txt b/tests/base/requirements-w-nested.txt index 4d73c837..b09aa52a 100644 --- a/tests/base/requirements-w-nested.txt +++ b/tests/base/requirements-w-nested.txt @@ -1,3 +1,3 @@ -flask +flask==2.0.3 bottle -r requirements-common.txt diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 6bb1f322..87423210 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -2,34 +2,40 @@ service: sls-py-req-test provider: name: aws - runtime: ${opt:runtime, 'python3.6'} + runtime: ${env:runtime, 'python3.9'} plugins: - serverless-python-requirements custom: pythonRequirements: - zip: ${opt:zip, self:custom.defaults.zip} - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} - slim: ${opt:slim, self:custom.defaults.slim} + zip: ${env:zip, self:custom.defaults.zip} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + dockerSsh: ${env:dockerSsh, self:custom.defaults.dockerSsh} + dockerPrivateKey: ${env:dockerPrivateKey, self:custom.defaults.dockerPrivateKey} + dockerImage: ${env:dockerImage, self:custom.defaults.dockerImage} + slim: ${env:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} - slimPatternsAppendDefaults: ${opt:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} - vendor: ${opt:vendor, ''} - fileName: ${opt:fileName, 'requirements.txt'} - useStaticCache: ${opt:useStaticCache, self:custom.defaults.useStaticCache} - useDownloadCache: ${opt:useDownloadCache, self:custom.defaults.useDownloadCache} - cacheLocation: ${opt:cacheLocation, ''} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + vendor: ${env:vendor, ''} + fileName: ${env:fileName, 'requirements.txt'} + useStaticCache: ${env:useStaticCache, self:custom.defaults.useStaticCache} + useDownloadCache: ${env:useDownloadCache, self:custom.defaults.useDownloadCache} + cacheLocation: ${env:cacheLocation, ''} defaults: slim: false slimPatterns: false slimPatternsAppendDefaults: true zip: false dockerizePip: false + dockerSsh: false + dockerPrivateKey: '' + dockerImage: '' individually: false useStaticCache: true useDownloadCache: true package: - individually: ${opt:individually, self:custom.defaults.individually} + individually: ${env:individually, self:custom.defaults.individually} patterns: - '!**/*' - 'handler.py' @@ -41,7 +47,7 @@ functions: handler: handler.hello hello3: handler: handler.hello - runtime: nodejs8.10 + runtime: nodejs14.x hello4: handler: fn2_handler.hello module: fn2 @@ -50,6 +56,3 @@ functions: - 'fn2/**' hello5: image: 000000000000.dkr.ecr.sa-east-1.amazonaws.com/test-lambda-docker@sha256:6bb600b4d6e1d7cf521097177dd0c4e9ea373edb91984a505333be8ac9455d38 - - - diff --git a/tests/individually/module2/requirements.txt b/tests/individually/module2/requirements.txt index 7e106024..c09d0264 100644 --- a/tests/individually/module2/requirements.txt +++ b/tests/individually/module2/requirements.txt @@ -1 +1 @@ -flask +flask==2.0.3 diff --git a/tests/individually/package.json b/tests/individually/package.json index 43ce4eee..b07744c9 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/individually/serverless.yml b/tests/individually/serverless.yml index 121bd89d..6409532b 100644 --- a/tests/individually/serverless.yml +++ b/tests/individually/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test-indiv provider: name: aws - runtime: python3.6 + runtime: python3.9 package: individually: true @@ -10,7 +10,7 @@ package: - '!node_modules/**' custom: pythonRequirements: - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} defaults: dockerizePip: false diff --git a/tests/individually_mixed_runtime/module1/handler1.ts b/tests/individually_mixed_runtime/module1/handler1.ts new file mode 100644 index 00000000..b8062f8b --- /dev/null +++ b/tests/individually_mixed_runtime/module1/handler1.ts @@ -0,0 +1,3 @@ +function hello() { + return "hello" +} diff --git a/tests/individually_mixed_runtime/module2/handler2.py b/tests/individually_mixed_runtime/module2/handler2.py new file mode 100644 index 00000000..d9f5c465 --- /dev/null +++ b/tests/individually_mixed_runtime/module2/handler2.py @@ -0,0 +1,6 @@ +import flask + +def hello(event, context): + return { + 'status': 200, + } diff --git a/tests/individually_mixed_runtime/module2/requirements.txt b/tests/individually_mixed_runtime/module2/requirements.txt new file mode 100644 index 00000000..c09d0264 --- /dev/null +++ b/tests/individually_mixed_runtime/module2/requirements.txt @@ -0,0 +1 @@ +flask==2.0.3 diff --git a/tests/individually_mixed_runtime/package.json b/tests/individually_mixed_runtime/package.json new file mode 100644 index 00000000..b07744c9 --- /dev/null +++ b/tests/individually_mixed_runtime/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" + } +} diff --git a/tests/individually_mixed_runtime/requirements-common.txt b/tests/individually_mixed_runtime/requirements-common.txt new file mode 100644 index 00000000..30ddf823 --- /dev/null +++ b/tests/individually_mixed_runtime/requirements-common.txt @@ -0,0 +1 @@ +boto3 diff --git a/tests/individually_mixed_runtime/serverless.yml b/tests/individually_mixed_runtime/serverless.yml new file mode 100644 index 00000000..7c602239 --- /dev/null +++ b/tests/individually_mixed_runtime/serverless.yml @@ -0,0 +1,39 @@ +service: sls-py-req-test-indiv-mixed-runtime + +provider: + name: aws + runtime: nodejs18.x + architecture: arm64 + +package: + individually: true + +custom: + pythonRequirements: + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + zip: ${env:zip, self:custom.defaults.zip} + defaults: + dockerizePip: false + zip: false + +functions: + hello1: + handler: handler1.hello + architecture: x86_64 + package: + patterns: + - '!**' + - 'module1/**' + + hello2: + handler: handler2.hello + module: module2 + runtime: python3.9 + architecture: x86_64 + package: + patterns: + - '!**' + - 'module2/**' + +plugins: + - serverless-python-requirements diff --git a/tests/non_build_pyproject/package.json b/tests/non_build_pyproject/package.json index 43ce4eee..b07744c9 100644 --- a/tests/non_build_pyproject/package.json +++ b/tests/non_build_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/non_build_pyproject/requirements.txt b/tests/non_build_pyproject/requirements.txt index aa55d989..09764fc3 100644 --- a/tests/non_build_pyproject/requirements.txt +++ b/tests/non_build_pyproject/requirements.txt @@ -1,2 +1,2 @@ -flask +flask==2.0.3 boto3 diff --git a/tests/non_build_pyproject/serverless.yml b/tests/non_build_pyproject/serverless.yml index 02e5a1f3..d1bbaee6 100644 --- a/tests/non_build_pyproject/serverless.yml +++ b/tests/non_build_pyproject/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/tests/non_poetry_pyproject/package.json b/tests/non_poetry_pyproject/package.json index 43ce4eee..b07744c9 100644 --- a/tests/non_poetry_pyproject/package.json +++ b/tests/non_poetry_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/non_poetry_pyproject/serverless.yml b/tests/non_poetry_pyproject/serverless.yml index 3d872a87..7338b10b 100644 --- a/tests/non_poetry_pyproject/serverless.yml +++ b/tests/non_poetry_pyproject/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/tests/pipenv/Pipfile b/tests/pipenv/Pipfile index 0d65eb75..30e51dda 100644 --- a/tests/pipenv/Pipfile +++ b/tests/pipenv/Pipfile @@ -1,9 +1,10 @@ [[source]] -url = "https://pypi.python.org/simple" +url = "https://pypi.org/simple" verify_ssl = true +name = "pypi" [packages] -Flask = "*" +Flask = "==2.0.3" bottle = "*" boto3 = "*" diff --git a/tests/pipenv/_slimPatterns.yml b/tests/pipenv/_slimPatterns.yml index 02c631b4..443af9a0 100644 --- a/tests/pipenv/_slimPatterns.yml +++ b/tests/pipenv/_slimPatterns.yml @@ -1,2 +1,2 @@ slimPatterns: - - "**/__main__.py" + - '**/__main__.py' diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index 43ce4eee..b07744c9 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/pipenv/serverless.yml b/tests/pipenv/serverless.yml index dd93e290..2b471526 100644 --- a/tests/pipenv/serverless.yml +++ b/tests/pipenv/serverless.yml @@ -2,17 +2,17 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.9 plugins: - serverless-python-requirements custom: pythonRequirements: - zip: ${opt:zip, self:custom.defaults.zip} - slim: ${opt:slim, self:custom.defaults.slim} + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} - slimPatternsAppendDefaults: ${opt:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} defaults: zip: false slimPatterns: false diff --git a/tests/poetry/_slimPatterns.yml b/tests/poetry/_slimPatterns.yml index 02c631b4..443af9a0 100644 --- a/tests/poetry/_slimPatterns.yml +++ b/tests/poetry/_slimPatterns.yml @@ -1,2 +1,2 @@ slimPatterns: - - "**/__main__.py" + - '**/__main__.py' diff --git a/tests/poetry/package.json b/tests/poetry/package.json index 43ce4eee..b07744c9 100644 --- a/tests/poetry/package.json +++ b/tests/poetry/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/poetry/pyproject.toml b/tests/poetry/pyproject.toml index b813968a..896b48e7 100644 --- a/tests/poetry/pyproject.toml +++ b/tests/poetry/pyproject.toml @@ -5,13 +5,13 @@ description = "" authors = ["Your Name "] [tool.poetry.dependencies] -python = "^3.6" -Flask = "^1.0" +python = "^3.7" +Flask = "2.0" bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} -boto3 = "^1.9" +boto3 = "1.29.6" [tool.poetry.dev-dependencies] [build-system] -requires = ["poetry>=0.12"] +requires = ["poetry"] build-backend = "poetry.masonry.api" diff --git a/tests/poetry/serverless.yml b/tests/poetry/serverless.yml index dd93e290..d10c4997 100644 --- a/tests/poetry/serverless.yml +++ b/tests/poetry/serverless.yml @@ -2,17 +2,18 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.9 plugins: - serverless-python-requirements custom: pythonRequirements: - zip: ${opt:zip, self:custom.defaults.zip} - slim: ${opt:slim, self:custom.defaults.slim} + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} - slimPatternsAppendDefaults: ${opt:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + requirePoetryLockFile: ${env:requirePoetryLockFile, false} defaults: zip: false slimPatterns: false diff --git a/tests/poetry_individually/module1/handler.py b/tests/poetry_individually/module1/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/poetry_individually/module1/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/poetry_individually/module1/pyproject.toml b/tests/poetry_individually/module1/pyproject.toml new file mode 100644 index 00000000..896b48e7 --- /dev/null +++ b/tests/poetry_individually/module1/pyproject.toml @@ -0,0 +1,17 @@ +[tool.poetry] +name = "poetry" +version = "0.1.0" +description = "" +authors = ["Your Name "] + +[tool.poetry.dependencies] +python = "^3.7" +Flask = "2.0" +bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} +boto3 = "1.29.6" + +[tool.poetry.dev-dependencies] + +[build-system] +requires = ["poetry"] +build-backend = "poetry.masonry.api" diff --git a/tests/poetry_individually/package.json b/tests/poetry_individually/package.json new file mode 100644 index 00000000..b07744c9 --- /dev/null +++ b/tests/poetry_individually/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" + } +} diff --git a/tests/poetry_individually/serverless.yml b/tests/poetry_individually/serverless.yml new file mode 100644 index 00000000..86dbb547 --- /dev/null +++ b/tests/poetry_individually/serverless.yml @@ -0,0 +1,32 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.9 + +plugins: + - serverless-python-requirements +custom: + pythonRequirements: + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + individually: true + +functions: + hello: + handler: handler.hello + module: module1 + package: + patterns: + - 'module1/**' diff --git a/tests/poetry_packages/_poetryGroups.yml b/tests/poetry_packages/_poetryGroups.yml new file mode 100644 index 00000000..25abd07a --- /dev/null +++ b/tests/poetry_packages/_poetryGroups.yml @@ -0,0 +1,8 @@ +empty: [] +poetryWithGroups: + - custom1 + - custom2 +poetryWithoutGroups: + - custom1 +poetryOnlyGroups: + - custom2 diff --git a/tests/poetry_packages/_slimPatterns.yml b/tests/poetry_packages/_slimPatterns.yml new file mode 100644 index 00000000..443af9a0 --- /dev/null +++ b/tests/poetry_packages/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - '**/__main__.py' diff --git a/tests/poetry_packages/handler.py b/tests/poetry_packages/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/poetry_packages/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/poetry_packages/package.json b/tests/poetry_packages/package.json new file mode 100644 index 00000000..b07744c9 --- /dev/null +++ b/tests/poetry_packages/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" + } +} diff --git a/tests/poetry_packages/pyproject.toml b/tests/poetry_packages/pyproject.toml new file mode 100644 index 00000000..0f9fc705 --- /dev/null +++ b/tests/poetry_packages/pyproject.toml @@ -0,0 +1,19 @@ +[tool.poetry] +name = "poetry" +version = "0.1.0" +description = "" +authors = ["Your Name "] + +[tool.poetry.dependencies] +python = "^3.7" +Flask = "2.0" + +[tool.poetry.group.custom1.dependencies] +bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} + +[tool.poetry.group.custom2.dependencies] +boto3 = "1.29.6" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/tests/poetry_packages/serverless.yml b/tests/poetry_packages/serverless.yml new file mode 100644 index 00000000..c6972ede --- /dev/null +++ b/tests/poetry_packages/serverless.yml @@ -0,0 +1,34 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.9 + +plugins: + - serverless-python-requirements +custom: + pythonRequirements: + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + requirePoetryLockFile: ${env:requirePoetryLockFile, false} + poetryWithGroups: ${file(./_poetryGroups.yml):${env:poetryWithGroups, "empty"}} + poetryWithoutGroups: ${file(./_poetryGroups.yml):${env:poetryWithoutGroups, "empty"}} + poetryOnlyGroups: ${file(./_poetryGroups.yml):${env:poetryOnlyGroups, "empty"}} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + patterns: + - '!**/*' + - 'handler.py' + +functions: + hello: + handler: handler.hello diff --git a/tests/scaleway_provider/_slimPatterns.yml b/tests/scaleway_provider/_slimPatterns.yml new file mode 100644 index 00000000..443af9a0 --- /dev/null +++ b/tests/scaleway_provider/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - '**/__main__.py' diff --git a/tests/scaleway_provider/handler.py b/tests/scaleway_provider/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/scaleway_provider/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/scaleway_provider/package.json b/tests/scaleway_provider/package.json new file mode 100644 index 00000000..d54b88e0 --- /dev/null +++ b/tests/scaleway_provider/package.json @@ -0,0 +1,15 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz", + "serverless-scaleway-functions": "^0.4.8" + } +} diff --git a/tests/scaleway_provider/requirements.txt b/tests/scaleway_provider/requirements.txt new file mode 100644 index 00000000..23bfb7a6 --- /dev/null +++ b/tests/scaleway_provider/requirements.txt @@ -0,0 +1,3 @@ +flask==0.12.5 +bottle +boto3 diff --git a/tests/scaleway_provider/serverless.yml b/tests/scaleway_provider/serverless.yml new file mode 100644 index 00000000..5d827bdf --- /dev/null +++ b/tests/scaleway_provider/serverless.yml @@ -0,0 +1,34 @@ +service: sls-py-req-test + +configValidationMode: off + +provider: + name: scaleway + runtime: python39 + +plugins: + - serverless-python-requirements + - serverless-scaleway-functions + +custom: + pythonRequirements: + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + patterns: + - '!**/*' + - 'handler.py' + +functions: + hello: + handler: handler.hello