From 50c2850874ded795fd50ae377f1db817a0212e7d Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Wed, 24 Nov 2021 11:42:35 +0100 Subject: [PATCH 01/90] refactor: Adapt to `async` version of `spawn` --- lib/docker.js | 37 ++++++------- lib/pip.js | 147 +++++++++++++++++++++++++++----------------------- lib/pipenv.js | 28 +++++----- lib/poetry.js | 47 ++++++++-------- package.json | 1 + 5 files changed, 136 insertions(+), 124 deletions(-) diff --git a/lib/docker.js b/lib/docker.js index 328e3088..94229b21 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -1,4 +1,4 @@ -const { spawnSync } = require('child_process'); +const spawn = require('child-process-ext/spawn'); const isWsl = require('is-wsl'); const fse = require('fs-extra'); const path = require('path'); @@ -8,18 +8,19 @@ const path = require('path'); * @param {string[]} options * @return {Object} */ -function dockerCommand(options) { +async function dockerCommand(options) { const cmd = 'docker'; - const ps = spawnSync(cmd, options, { encoding: 'utf-8' }); - if (ps.error) { - if (ps.error.code === 'ENOENT') { + try { + return await spawn(cmd, options, { encoding: 'utf-8' }); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { throw new Error('docker not found! Please install it.'); } - throw new Error(ps.error); - } else if (ps.status !== 0) { - throw new Error(ps.stderr); + throw e; } - return ps; } /** @@ -28,7 +29,7 @@ function dockerCommand(options) { * @param {string[]} extraArgs * @return {string} The name of the built docker image. */ -function buildImage(dockerFile, extraArgs) { +async function buildImage(dockerFile, extraArgs) { const imageName = 'sls-py-reqs-custom'; const options = ['build', '-f', dockerFile, '-t', imageName]; @@ -40,7 +41,7 @@ function buildImage(dockerFile, extraArgs) { options.push('.'); - dockerCommand(options); + await dockerCommand(options); return imageName; } @@ -72,7 +73,7 @@ function findTestFile(servicePath) { * @param {string} bindPath * @return {boolean} */ -function tryBindPath(serverless, bindPath, testFile) { +async function tryBindPath(serverless, bindPath, testFile) { const debug = process.env.SLS_DEBUG; const options = [ 'run', @@ -85,7 +86,7 @@ function tryBindPath(serverless, bindPath, testFile) { ]; try { if (debug) serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); - const ps = dockerCommand(options); + const ps = await dockerCommand(options); if (debug) serverless.cli.log(ps.stdout.trim()); return ps.stdout.trim() === `/test/${testFile}`; } catch (err) { @@ -100,14 +101,14 @@ function tryBindPath(serverless, bindPath, testFile) { * @param {string} servicePath * @return {string} The bind path. */ -function getBindPath(serverless, servicePath) { +async function getBindPath(serverless, servicePath) { // Determine bind path if (process.platform !== 'win32' && !isWsl) { return servicePath; } // test docker is available - dockerCommand(['version']); + await dockerCommand(['version']); // find good bind path for Windows let bindPaths = []; @@ -144,7 +145,7 @@ function getBindPath(serverless, servicePath) { for (let i = 0; i < bindPaths.length; i++) { const bindPath = bindPaths[i]; - if (tryBindPath(serverless, bindPath, testFile)) { + if (await tryBindPath(serverless, bindPath, testFile)) { return bindPath; } } @@ -157,7 +158,7 @@ function getBindPath(serverless, servicePath) { * @param {string} bindPath * @return {boolean} */ -function getDockerUid(bindPath) { +async function getDockerUid(bindPath) { const options = [ 'run', '--rm', @@ -169,7 +170,7 @@ function getDockerUid(bindPath) { '%u', '/bin/sh', ]; - const ps = dockerCommand(options); + const ps = await dockerCommand(options); return ps.stdout.trim(); } diff --git a/lib/pip.js b/lib/pip.js index 244010c8..78af2e20 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -3,7 +3,7 @@ const rimraf = require('rimraf'); const path = require('path'); const get = require('lodash.get'); const set = require('lodash.set'); -const { spawnSync } = require('child_process'); +const spawn = require('child-process-ext/spawn'); const { quote } = require('shell-quote'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); const { getStripCommand, getStripMode, deleteFiles } = require('./slim'); @@ -96,16 +96,23 @@ function generateRequirementsFile( } } -function pipAcceptsSystem(pythonBin) { +async function pipAcceptsSystem(pythonBin) { // Check if pip has Debian's --system option and set it if so - const pipTestRes = spawnSync(pythonBin, ['-m', 'pip', 'help', 'install']); - if (pipTestRes.error) { - if (pipTestRes.error.code === 'ENOENT') { + try { + const pipTestRes = await spawn(pythonBin, ['-m', 'pip', 'help', 'install']); + return ( + pipTestRes.stdoutBuffer && + pipTestRes.stdoutBuffer.toString().indexOf('--system') >= 0 + ); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { throw new Error(`${pythonBin} not found! Try the pythonBin option.`); } - throw pipTestRes.error; + throw e; } - return pipTestRes.stdout.toString().indexOf('--system') >= 0; } /** @@ -115,7 +122,7 @@ function pipAcceptsSystem(pythonBin) { * @param {Object} options * @return {undefined} */ -function installRequirements(targetFolder, serverless, options) { +async function installRequirements(targetFolder, serverless, options) { const targetRequirementsTxt = path.join(targetFolder, 'requirements.txt'); serverless.cli.log( @@ -176,7 +183,7 @@ function installRequirements(targetFolder, serverless, options) { pipCmd.push('--cache-dir', downloadCacheDir); } - if (pipAcceptsSystem(options.pythonBin)) { + if (await pipAcceptsSystem(options.pythonBin)) { pipCmd.push('--system'); } } @@ -191,7 +198,7 @@ function installRequirements(targetFolder, serverless, options) { serverless.cli.log( `Building custom docker image from ${options.dockerFile}...` ); - dockerImage = buildImage( + dockerImage = await buildImage( options.dockerFile, options.dockerBuildCmdExtraArgs ); @@ -201,7 +208,9 @@ function installRequirements(targetFolder, serverless, options) { serverless.cli.log(`Docker Image: ${dockerImage}`); // Prepare bind path depending on os platform - const bindPath = dockerPathForWin(getBindPath(serverless, targetFolder)); + const bindPath = dockerPathForWin( + await getBindPath(serverless, targetFolder) + ); dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); if (options.dockerSsh) { @@ -233,7 +242,7 @@ function installRequirements(targetFolder, serverless, options) { fse.closeSync( fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w') ); - const windowsized = getBindPath(serverless, downloadCacheDir); + const windowsized = await getBindPath(serverless, downloadCacheDir); // And now push it to a volume mount and to pip... dockerCmd.push('-v', `${windowsized}:${dockerDownloadCacheDir}:z`); pipCmd.push('--cache-dir', dockerDownloadCacheDir); @@ -262,7 +271,7 @@ function installRequirements(targetFolder, serverless, options) { ]); } else { // Use same user so --cache-dir works - dockerCmd.push('-u', getDockerUid(bindPath)); + dockerCmd.push('-u', await getDockerUid(bindPath)); } for (let path of options.dockerExtraFiles) { @@ -315,22 +324,23 @@ function installRequirements(targetFolder, serverless, options) { serverless.cli.log(`Running ${quote(dockerCmd)}...`); - filterCommands(mainCmds).forEach(([cmd, ...args]) => { - const res = spawnSync(cmd, args); - if (res.error) { - if (res.error.code === 'ENOENT') { + for (const [cmd, ...args] of mainCmds) { + try { + await spawn(cmd, args); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { const advice = cmd.indexOf('python') > -1 ? 'Try the pythonBin option' : 'Please install it'; throw new Error(`${cmd} not found! ${advice}`); } - throw res.error; - } - if (res.status !== 0) { - throw new Error(`STDOUT: ${res.stdout}\n\nSTDERR: ${res.stderr}`); + throw e; } - }); + } // If enabled slimming, delete files in slimPatterns if (options.slim === true || options.slim === 'true') { deleteFiles(options, targetFolder); @@ -489,7 +499,7 @@ function requirementsFileExists(servicePath, options, fileName) { * @param {Object} serverless * @return {string} */ -function installRequirementsIfNeeded( +async function installRequirementsIfNeeded( servicePath, modulePath, options, @@ -573,7 +583,7 @@ function installRequirementsIfNeeded( fse.copySync(slsReqsTxt, path.join(workingReqsFolder, 'requirements.txt')); // Then install our requirements from this folder - installRequirements(workingReqsFolder, serverless, options); + await installRequirements(workingReqsFolder, serverless, options); // Copy vendor libraries to requirements folder if (options.vendor) { @@ -596,7 +606,7 @@ function installRequirementsIfNeeded( * pip install the requirements to the requirements directory * @return {undefined} */ -function installAllRequirements() { +async function installAllRequirements() { // fse.ensureDirSync(path.join(this.servicePath, '.serverless')); // First, check and delete cache versions, if enabled checkForAndDeleteMaxCacheVersions(this.options, this.serverless); @@ -604,55 +614,56 @@ function installAllRequirements() { // Then if we're going to package functions individually... if (this.serverless.service.package.individually) { let doneModules = []; - this.targetFuncs - .filter((func) => - (func.runtime || this.serverless.service.provider.runtime).match( - /^python.*/ - ) + const filteredFuncs = this.targetFuncs.filter((func) => + (func.runtime || this.serverless.service.provider.runtime).match( + /^python.*/ ) - .map((f) => { - if (!get(f, 'module')) { - set(f, ['module'], '.'); - } - // If we didn't already process a module (functions can re-use modules) - if (!doneModules.includes(f.module)) { - const reqsInstalledAt = installRequirementsIfNeeded( - this.servicePath, - f.module, - this.options, - f, - this.serverless - ); - // Add modulePath into .serverless for each module so it's easier for injecting and for users to see where reqs are - let modulePath = path.join( - this.servicePath, - '.serverless', - `${f.module}`, - 'requirements' - ); - // Only do if we didn't already do it - if ( - reqsInstalledAt && - !fse.existsSync(modulePath) && - reqsInstalledAt != modulePath - ) { - if (this.options.useStaticCache) { - // Windows can't symlink so we have to copy on Windows, - // it's not as fast, but at least it works - if (process.platform == 'win32') { - fse.copySync(reqsInstalledAt, modulePath); - } else { - fse.symlink(reqsInstalledAt, modulePath); - } + ); + + for (const f of filteredFuncs) { + if (!get(f, 'module')) { + set(f, ['module'], '.'); + } + + // If we didn't already process a module (functions can re-use modules) + if (!doneModules.includes(f.module)) { + const reqsInstalledAt = await installRequirementsIfNeeded( + this.servicePath, + f.module, + this.options, + f, + this.serverless + ); + // Add modulePath into .serverless for each module so it's easier for injecting and for users to see where reqs are + let modulePath = path.join( + this.servicePath, + '.serverless', + `${f.module}`, + 'requirements' + ); + // Only do if we didn't already do it + if ( + reqsInstalledAt && + !fse.existsSync(modulePath) && + reqsInstalledAt != modulePath + ) { + if (this.options.useStaticCache) { + // Windows can't symlink so we have to copy on Windows, + // it's not as fast, but at least it works + if (process.platform == 'win32') { + fse.copySync(reqsInstalledAt, modulePath); } else { - fse.rename(reqsInstalledAt, modulePath); + fse.symlink(reqsInstalledAt, modulePath); } + } else { + fse.rename(reqsInstalledAt, modulePath); } - doneModules.push(f.module); } - }); + doneModules.push(f.module); + } + } } else { - const reqsInstalledAt = installRequirementsIfNeeded( + const reqsInstalledAt = await installRequirementsIfNeeded( this.servicePath, '', this.options, diff --git a/lib/pipenv.js b/lib/pipenv.js index 063fb5d8..e5731aaf 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -1,12 +1,12 @@ const fse = require('fs-extra'); const path = require('path'); -const { spawnSync } = require('child_process'); +const spawn = require('child-process-ext/spawn'); const { EOL } = require('os'); /** * pipenv install */ -function pipfileToRequirements() { +async function pipfileToRequirements() { if ( !this.options.usePipenv || !fse.existsSync(path.join(this.servicePath, 'Pipfile')) @@ -16,28 +16,26 @@ function pipfileToRequirements() { this.serverless.cli.log('Generating requirements.txt from Pipfile...'); - const res = spawnSync( - 'pipenv', - ['lock', '--requirements', '--keep-outdated'], - { + let res; + try { + res = await spawn('pipenv', ['lock', '--requirements', '--keep-outdated'], { cwd: this.servicePath, - } - ); - if (res.error) { - if (res.error.code === 'ENOENT') { + }); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { throw new Error( `pipenv not found! Install it with 'pip install pipenv'.` ); } - throw new Error(res.error); - } - if (res.status !== 0) { - throw new Error(res.stderr); + throw e; } fse.ensureDirSync(path.join(this.servicePath, '.serverless')); fse.writeFileSync( path.join(this.servicePath, '.serverless/requirements.txt'), - removeEditableFlagFromRequirementsString(res.stdout) + removeEditableFlagFromRequirementsString(res.stdoutBuffer) ); } diff --git a/lib/poetry.js b/lib/poetry.js index 553a1392..55f83289 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -1,44 +1,45 @@ const fs = require('fs'); const fse = require('fs-extra'); const path = require('path'); -const { spawnSync } = require('child_process'); +const spawn = require('child-process-ext/spawn'); const tomlParse = require('@iarna/toml/parse-string'); /** * poetry install */ -function pyprojectTomlToRequirements() { +async function pyprojectTomlToRequirements() { if (!this.options.usePoetry || !isPoetryProject(this.servicePath)) { return; } this.serverless.cli.log('Generating requirements.txt from pyproject.toml...'); - const res = spawnSync( - 'poetry', - [ - 'export', - '--without-hashes', - '-f', - 'requirements.txt', - '-o', - 'requirements.txt', - '--with-credentials', - ], - { - cwd: this.servicePath, - } - ); - if (res.error) { - if (res.error.code === 'ENOENT') { + try { + await spawn( + 'poetry', + [ + 'export', + '--without-hashes', + '-f', + 'requirements.txt', + '-o', + 'requirements.txt', + '--with-credentials', + ], + { + cwd: this.servicePath, + } + ); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { throw new Error( `poetry not found! Install it according to the poetry docs.` ); } - throw new Error(res.error); - } - if (res.status !== 0) { - throw new Error(res.stderr); + throw e; } const editableFlag = new RegExp(/^-e /gm); diff --git a/package.json b/package.json index 1fed4c39..c9d247a3 100644 --- a/package.json +++ b/package.json @@ -55,6 +55,7 @@ "@iarna/toml": "^2.2.5", "appdirectory": "^0.1.0", "bluebird": "^3.7.2", + "child-process-ext": "^2.1.1", "fs-extra": "^9.1.0", "glob-all": "^3.2.1", "is-wsl": "^2.2.0", From a79899ae5f6f66aa0c65e7fda8e0186d38ff446e Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 10:57:15 +0100 Subject: [PATCH 02/90] refactor: Adapt v3 log writing interfaces --- index.js | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index 2072bbc1..975ff750 100644 --- a/index.js +++ b/index.js @@ -109,9 +109,10 @@ class ServerlessPythonRequirements { * The plugin constructor * @param {Object} serverless * @param {Object} options + * @param {Object} v3Utils * @return {undefined} */ - constructor(serverless) { + constructor(serverless, cliOptions, v3Utils) { this.serverless = serverless; this.servicePath = this.serverless.config.servicePath; this.warningLogged = false; @@ -127,6 +128,13 @@ class ServerlessPythonRequirements { }, }); } + + if (v3Utils) { + this.log = v3Utils.log; + this.progress = v3Utils.progress; + this.writeText = v3Utils.writeText; + }; + this.commands = { requirements: { commands: { From 9e952df5e91abb98679ce9ea700a0c5409198205 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 12:09:29 +0100 Subject: [PATCH 03/90] refactor: Adapt `poetry` for modern logs --- lib/poetry.js | 104 +++++++++++++++++++++++++++++--------------------- 1 file changed, 61 insertions(+), 43 deletions(-) diff --git a/lib/poetry.js b/lib/poetry.js index 55f83289..65970cc4 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -1,6 +1,7 @@ const fs = require('fs'); const fse = require('fs-extra'); const path = require('path'); + const spawn = require('child-process-ext/spawn'); const tomlParse = require('@iarna/toml/parse-string'); @@ -12,58 +13,75 @@ async function pyprojectTomlToRequirements() { return; } - this.serverless.cli.log('Generating requirements.txt from pyproject.toml...'); + let generateRequirementsProgress; + if (this.progress) { + generateRequirementsProgress = this.progress.get( + 'python-generate-requirements-toml' + ); + } else { + this.serverless.cli.log( + 'Generating requirements.txt from pyproject.toml...' + ); + } try { - await spawn( - 'poetry', - [ - 'export', - '--without-hashes', - '-f', - 'requirements.txt', - '-o', - 'requirements.txt', - '--with-credentials', - ], - { - cwd: this.servicePath, - } - ); - } catch (e) { - if ( - e.stderrBuffer && - e.stderrBuffer.toString().includes('command not found') - ) { - throw new Error( - `poetry not found! Install it according to the poetry docs.` + try { + await spawn( + 'poetry', + [ + 'export', + '--without-hashes', + '-f', + 'requirements.txt', + '-o', + 'requirements.txt', + '--with-credentials', + ], + { + cwd: this.servicePath, + } ); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + throw new Error( + `poetry not found! Install it according to the poetry docs.` + ); + } + throw e; } - throw e; - } - const editableFlag = new RegExp(/^-e /gm); - const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); - const requirementsContents = fse.readFileSync(sourceRequirements, { - encoding: 'utf-8', - }); + const editableFlag = new RegExp(/^-e /gm); + const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); + const requirementsContents = fse.readFileSync(sourceRequirements, { + encoding: 'utf-8', + }); - if (requirementsContents.match(editableFlag)) { - this.serverless.cli.log( - 'The generated file contains -e flags, removing them...' - ); - fse.writeFileSync( + if (requirementsContents.match(editableFlag)) { + if (this.log) { + this.log.info('The generated file contains -e flags, removing them'); + } else { + this.serverless.cli.log( + 'The generated file contains -e flags, removing them...' + ); + } + fse.writeFileSync( + sourceRequirements, + requirementsContents.replace(editableFlag, '') + ); + } + + fse.ensureDirSync(path.join(this.servicePath, '.serverless')); + fse.moveSync( sourceRequirements, - requirementsContents.replace(editableFlag, '') + path.join(this.servicePath, '.serverless', 'requirements.txt'), + { overwrite: true } ); + } finally { + generateRequirementsProgress && generateRequirementsProgress.remove(); } - - fse.ensureDirSync(path.join(this.servicePath, '.serverless')); - fse.moveSync( - sourceRequirements, - path.join(this.servicePath, '.serverless', 'requirements.txt'), - { overwrite: true } - ); } /** From e3afe7bf4162b0be77764874641a5e024be5a91a Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 12:09:46 +0100 Subject: [PATCH 04/90] refactor: Adapt `pipenv` to modern logs --- lib/pipenv.js | 55 ++++++++++++++++++++++++++++++++++----------------- lib/poetry.js | 4 ++++ 2 files changed, 41 insertions(+), 18 deletions(-) diff --git a/lib/pipenv.js b/lib/pipenv.js index e5731aaf..5100a810 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -14,29 +14,48 @@ async function pipfileToRequirements() { return; } - this.serverless.cli.log('Generating requirements.txt from Pipfile...'); + let generateRequirementsProgress; + if (this.progress) { + generateRequirementsProgress = this.progress.get( + 'python-generate-requirements-pipfile' + ); + generateRequirementsProgress.update( + 'Generating requirements.txt from Pipfile', + { isMainEvent: true } + ); + } else { + this.serverless.cli.log('Generating requirements.txt from Pipfile...'); + } - let res; try { - res = await spawn('pipenv', ['lock', '--requirements', '--keep-outdated'], { - cwd: this.servicePath, - }); - } catch (e) { - if ( - e.stderrBuffer && - e.stderrBuffer.toString().includes('command not found') - ) { - throw new Error( - `pipenv not found! Install it with 'pip install pipenv'.` + let res; + try { + res = await spawn( + 'pipenv', + ['lock', '--requirements', '--keep-outdated'], + { + cwd: this.servicePath, + } ); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + throw new Error( + `pipenv not found! Install it with 'pip install pipenv'.` + ); + } + throw e; } - throw e; + fse.ensureDirSync(path.join(this.servicePath, '.serverless')); + fse.writeFileSync( + path.join(this.servicePath, '.serverless/requirements.txt'), + removeEditableFlagFromRequirementsString(res.stdoutBuffer) + ); + } finally { + generateRequirementsProgress && generateRequirementsProgress.remove(); } - fse.ensureDirSync(path.join(this.servicePath, '.serverless')); - fse.writeFileSync( - path.join(this.servicePath, '.serverless/requirements.txt'), - removeEditableFlagFromRequirementsString(res.stdoutBuffer) - ); } /** diff --git a/lib/poetry.js b/lib/poetry.js index 65970cc4..81988742 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -18,6 +18,10 @@ async function pyprojectTomlToRequirements() { generateRequirementsProgress = this.progress.get( 'python-generate-requirements-toml' ); + generateRequirementsProgress.update( + 'Generating requirements.txt from "pyproject.toml"', + { isMainEvent: true } + ); } else { this.serverless.cli.log( 'Generating requirements.txt from pyproject.toml...' From 1c8f911ef57c91b3efd5fd3c030df9548aa6ceae Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 12:12:29 +0100 Subject: [PATCH 05/90] refactor: Adapt `clean` to modern logs --- lib/clean.js | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/lib/clean.js b/lib/clean.js index e0bff238..88d7d03e 100644 --- a/lib/clean.js +++ b/lib/clean.js @@ -38,8 +38,16 @@ function cleanup() { function cleanupCache() { const cacheLocation = getUserCachePath(this.options); if (fse.existsSync(cacheLocation)) { + let cleanupProgress; if (this.serverless) { - this.serverless.cli.log(`Removing static caches at: ${cacheLocation}`); + if (this.progress) { + cleanupProgress = this.progress.get('python-cleanup-cache'); + cleanupProgress.notice(`Removing static caches at: ${cacheLocation}`, { + isMainEvent: true, + }); + } else { + this.serverless.cli.log(`Removing static caches at: ${cacheLocation}`); + } } // Only remove cache folders that we added, just incase someone accidentally puts a weird @@ -50,10 +58,19 @@ function cleanupCache() { .forEach((file) => { promises.push(fse.removeAsync(file)); }); - return BbPromise.all(promises); + return BbPromise.all(promises) + .then(() => cleanupProgress && cleanupProgress.remove()) + .catch((e) => { + cleanupProgress && cleanupProgress.remove(); + throw e; + }); } else { if (this.serverless) { - this.serverless.cli.log(`No static cache found`); + if (this.log) { + this.log.info(`No static cache found`); + } else { + this.serverless.cli.log(`No static cache found`); + } } return BbPromise.resolve(); } From 8ff97e6b7c279334e417dbdb65e64d0de2656986 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 12:12:44 +0100 Subject: [PATCH 06/90] refactor: Adapt `shared` to modern logs --- lib/shared.js | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/lib/shared.js b/lib/shared.js index 79b60cef..7baee58b 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -12,7 +12,7 @@ const sha256File = require('sha256-file'); * @param {Object} serverless * @return {undefined} */ -function checkForAndDeleteMaxCacheVersions(options, serverless) { +function checkForAndDeleteMaxCacheVersions({ serverless, options, log }) { // If we're using the static cache, and we have static cache max versions enabled if ( options.useStaticCache && @@ -42,10 +42,17 @@ function checkForAndDeleteMaxCacheVersions(options, serverless) { rimraf.sync(files[i]); items++; } + // Log the number of cache files flushed - serverless.cli.log( - `Removed ${items} items from cache because of staticCacheMaxVersions` - ); + if (log) { + log.info( + `Removed ${items} items from cache because of staticCacheMaxVersions` + ); + } else { + serverless.cli.log( + `Removed ${items} items from cache because of staticCacheMaxVersions` + ); + } } } } From 1162275d6eb95a756d174f87b40b9cfecd892bc7 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 12:13:23 +0100 Subject: [PATCH 07/90] refactor: Adapt `zip` to modern logs --- lib/zip.js | 81 +++++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 62 insertions(+), 19 deletions(-) diff --git a/lib/zip.js b/lib/zip.js index 2e872aa9..de61ce0f 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -30,9 +30,13 @@ function addVendorHelper() { }) .then((functions) => uniqBy(functions, (func) => func.module)) .map((f) => { - this.serverless.cli.log( - `Adding Python requirements helper to ${f.module}...` - ); + if (this.log) { + this.log.info(`Adding Python requirements helper to ${f.module}`); + } else { + this.serverless.cli.log( + `Adding Python requirements helper to ${f.module}...` + ); + } return fse.copyAsync( path.resolve(__dirname, '../unzip_requirements.py'), @@ -40,7 +44,11 @@ function addVendorHelper() { ); }); } else { - this.serverless.cli.log('Adding Python requirements helper...'); + if (this.log) { + this.log.info('Adding Python requirements helper'); + } else { + this.serverless.cli.log('Adding Python requirements helper...'); + } if (!get(this.serverless.service, 'package.patterns')) { set(this.serverless.service, ['package', 'patterns'], []); @@ -72,15 +80,25 @@ function removeVendorHelper() { }) .then((funcs) => uniqBy(funcs, (f) => f.module)) .map((f) => { - this.serverless.cli.log( - `Removing Python requirements helper from ${f.module}...` - ); + if (this.log) { + this.log.info( + `Removing Python requirements helper from ${f.module}` + ); + } else { + this.serverless.cli.log( + `Removing Python requirements helper from ${f.module}...` + ); + } return fse.removeAsync( path.join(this.servicePath, f.module, 'unzip_requirements.py') ); }); } else { - this.serverless.cli.log('Removing Python requirements helper...'); + if (this.log) { + this.log.info('Removing Python requirements helper'); + } else { + this.serverless.cli.log('Removing Python requirements helper...'); + } return fse.removeAsync( path.join(this.servicePath, 'unzip_requirements.py') ); @@ -104,21 +122,46 @@ function packRequirements() { }) .then((funcs) => uniqBy(funcs, (f) => f.module)) .map((f) => { - this.serverless.cli.log( - `Zipping required Python packages for ${f.module}...` - ); + let packProgress; + if (this.progress) { + packProgress = this.progress.get( + `python-pack-requirements-${f.module}` + ); + packProgress.update( + `Zipping required Python packages for ${f.module}`, + { isMainEvent: true } + ); + } else { + this.serverless.cli.log( + `Zipping required Python packages for ${f.module}...` + ); + } f.package.patterns.push(`${f.module}/.requirements.zip`); - return addTree( - new JSZip(), - `.serverless/${f.module}/requirements` - ).then((zip) => writeZip(zip, `${f.module}/.requirements.zip`)); + return addTree(new JSZip(), `.serverless/${f.module}/requirements`) + .then((zip) => writeZip(zip, `${f.module}/.requirements.zip`)) + .then(() => packProgress && packProgress.remove()) + .catch((e) => { + packProgress && packProgress.remove(); + throw e; + }); }); } else { - this.serverless.cli.log('Zipping required Python packages...'); + let packProgress; + if (this.progress) { + packProgress = this.progress.get(`python-pack-requirements`); + } else { + this.serverless.cli.log('Zipping required Python packages...'); + } this.serverless.service.package.patterns.push('.requirements.zip'); - return addTree(new JSZip(), '.serverless/requirements').then((zip) => - writeZip(zip, path.join(this.servicePath, '.requirements.zip')) - ); + return addTree(new JSZip(), '.serverless/requirements') + .then((zip) => + writeZip(zip, path.join(this.servicePath, '.requirements.zip')) + ) + .then(() => packProgress && packProgress.remove()) + .catch((e) => { + packProgress && packProgress.remove(); + throw e; + }); } } } From b7902aa047bd91359e41d65b42efca324ec15997 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 13:14:35 +0100 Subject: [PATCH 08/90] refactor: Adapt `pip` to modern logs --- index.js | 10 +- lib/pip.js | 566 ++++++++++++++++++++++++++++++----------------------- 2 files changed, 331 insertions(+), 245 deletions(-) diff --git a/index.js b/index.js index 975ff750..26616295 100644 --- a/index.js +++ b/index.js @@ -74,9 +74,13 @@ class ServerlessPythonRequirements { (options.dockerSsh || options.dockerImage || options.dockerFile) ) { if (!this.warningLogged) { - this.serverless.cli.log( - 'WARNING: You provided a docker related option but dockerizePip is set to false.' - ); + if (this.log) { + this.log.warning('You provided a docker related option but dockerizePip is set to false.'); + } else { + this.serverless.cli.log( + 'WARNING: You provided a docker related option but dockerizePip is set to false.' + ); + } this.warningLogged = true; } } diff --git a/lib/pip.js b/lib/pip.js index 78af2e20..89e8e74a 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -57,10 +57,9 @@ function mergeCommands(commands) { function generateRequirementsFile( requirementsPath, targetFile, - serverless, - servicePath, - options + pluginInstance ) { + const { serverless, servicePath, options, log } = pluginInstance; if ( options.usePoetry && fse.existsSync(path.join(servicePath, 'pyproject.toml')) && @@ -69,12 +68,15 @@ function generateRequirementsFile( filterRequirementsFile( path.join(servicePath, '.serverless/requirements.txt'), targetFile, - options, - serverless - ); - serverless.cli.log( - `Parsed requirements.txt from pyproject.toml in ${targetFile}...` + pluginInstance ); + if (log) { + log.info(`Parsed requirements.txt from pyproject.toml in ${targetFile}`); + } else { + serverless.cli.log( + `Parsed requirements.txt from pyproject.toml in ${targetFile}...` + ); + } } else if ( options.usePipenv && fse.existsSync(path.join(servicePath, 'Pipfile')) @@ -82,17 +84,26 @@ function generateRequirementsFile( filterRequirementsFile( path.join(servicePath, '.serverless/requirements.txt'), targetFile, - options, - serverless - ); - serverless.cli.log( - `Parsed requirements.txt from Pipfile in ${targetFile}...` + pluginInstance ); + if (log) { + log.info(`Parsed requirements.txt from Pipfile in ${targetFile}`); + } else { + serverless.cli.log( + `Parsed requirements.txt from Pipfile in ${targetFile}...` + ); + } } else { - filterRequirementsFile(requirementsPath, targetFile, options, serverless); - serverless.cli.log( - `Generated requirements from ${requirementsPath} in ${targetFile}...` - ); + filterRequirementsFile(requirementsPath, targetFile, pluginInstance); + if (log) { + log.info( + `Generated requirements from ${requirementsPath} in ${targetFile}` + ); + } else { + serverless.cli.log( + `Generated requirements from ${requirementsPath} in ${targetFile}...` + ); + } } } @@ -122,228 +133,290 @@ async function pipAcceptsSystem(pythonBin) { * @param {Object} options * @return {undefined} */ -async function installRequirements(targetFolder, serverless, options) { +async function installRequirements( + targetFolder, + { options, serverless, log, progress } +) { const targetRequirementsTxt = path.join(targetFolder, 'requirements.txt'); - serverless.cli.log( - `Installing requirements from ${targetRequirementsTxt} ...` - ); - - const dockerCmd = []; - const pipCmd = [options.pythonBin, '-m', 'pip', 'install']; - - if ( - Array.isArray(options.pipCmdExtraArgs) && - options.pipCmdExtraArgs.length > 0 - ) { - options.pipCmdExtraArgs.forEach((cmd) => { - const parts = cmd.split(/\s+/, 2); - pipCmd.push(...parts); - }); + let installProgress; + if (progress) { + installProgress = progress.get('python-install'); + installProgress.update( + `Installing requirements from "${targetRequirementsTxt}"`, + { isMainEvent: true } + ); + } else { + serverless.cli.log( + `Installing requirements from ${targetRequirementsTxt} ...` + ); } - const pipCmds = [pipCmd]; - const postCmds = []; - // Check if we're using the legacy --cache-dir command... - if (options.pipCmdExtraArgs.indexOf('--cache-dir') > -1) { - if (options.dockerizePip) { - throw ( - 'Error: You can not use --cache-dir with Docker any more, please\n' + - ' use the new option useDownloadCache instead. Please see:\n' + - ' https://github.com/UnitedIncome/serverless-python-requirements#caching' - ); - } else { - serverless.cli.log('=================================================='); - serverless.cli.log( - 'Warning: You are using a deprecated --cache-dir inside\n' + - ' your pipCmdExtraArgs which may not work properly, please use the\n' + - ' useDownloadCache option instead. Please see: \n' + - ' https://github.com/UnitedIncome/serverless-python-requirements#caching' - ); - serverless.cli.log('=================================================='); - } - } + try { + const dockerCmd = []; + const pipCmd = [options.pythonBin, '-m', 'pip', 'install']; - if (!options.dockerizePip) { - // Push our local OS-specific paths for requirements and target directory - pipCmd.push( - '-t', - dockerPathForWin(targetFolder), - '-r', - dockerPathForWin(targetRequirementsTxt) - ); - // If we want a download cache... - if (options.useDownloadCache) { - const downloadCacheDir = path.join( - getUserCachePath(options), - 'downloadCacheslspyc' - ); - serverless.cli.log(`Using download cache directory ${downloadCacheDir}`); - fse.ensureDirSync(downloadCacheDir); - pipCmd.push('--cache-dir', downloadCacheDir); + if ( + Array.isArray(options.pipCmdExtraArgs) && + options.pipCmdExtraArgs.length > 0 + ) { + options.pipCmdExtraArgs.forEach((cmd) => { + const parts = cmd.split(/\s+/, 2); + pipCmd.push(...parts); + }); } - if (await pipAcceptsSystem(options.pythonBin)) { - pipCmd.push('--system'); - } - } - // If we are dockerizing pip - if (options.dockerizePip) { - // Push docker-specific paths for requirements and target directory - pipCmd.push('-t', '/var/task/', '-r', '/var/task/requirements.txt'); - - // Build docker image if required - let dockerImage; - if (options.dockerFile) { - serverless.cli.log( - `Building custom docker image from ${options.dockerFile}...` - ); - dockerImage = await buildImage( - options.dockerFile, - options.dockerBuildCmdExtraArgs - ); - } else { - dockerImage = options.dockerImage; + const pipCmds = [pipCmd]; + const postCmds = []; + // Check if we're using the legacy --cache-dir command... + if (options.pipCmdExtraArgs.indexOf('--cache-dir') > -1) { + if (options.dockerizePip) { + throw ( + 'Error: You can not use --cache-dir with Docker any more, please\n' + + ' use the new option useDownloadCache instead. Please see:\n' + + ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + ); + } else { + if (log) { + log.warning( + 'You are using a deprecated --cache-dir inside\n' + + ' your pipCmdExtraArgs which may not work properly, please use the\n' + + ' useDownloadCache option instead. Please see: \n' + + ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + ); + } else { + serverless.cli.log( + '==================================================' + ); + serverless.cli.log( + 'Warning: You are using a deprecated --cache-dir inside\n' + + ' your pipCmdExtraArgs which may not work properly, please use the\n' + + ' useDownloadCache option instead. Please see: \n' + + ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + ); + serverless.cli.log( + '==================================================' + ); + } + } } - serverless.cli.log(`Docker Image: ${dockerImage}`); - - // Prepare bind path depending on os platform - const bindPath = dockerPathForWin( - await getBindPath(serverless, targetFolder) - ); - dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); - if (options.dockerSsh) { - // Mount necessary ssh files to work with private repos - dockerCmd.push( - '-v', - `${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z`, - '-v', - `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, - '-v', - `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`, - '-e', - 'SSH_AUTH_SOCK=/tmp/ssh_sock' + if (!options.dockerizePip) { + // Push our local OS-specific paths for requirements and target directory + pipCmd.push( + '-t', + dockerPathForWin(targetFolder), + '-r', + dockerPathForWin(targetRequirementsTxt) ); - } + // If we want a download cache... + if (options.useDownloadCache) { + const downloadCacheDir = path.join( + getUserCachePath(options), + 'downloadCacheslspyc' + ); + if (log) { + log.info(`Using download cache directory ${downloadCacheDir}`); + } else { + serverless.cli.log( + `Using download cache directory ${downloadCacheDir}` + ); + } + fse.ensureDirSync(downloadCacheDir); + pipCmd.push('--cache-dir', downloadCacheDir); + } - // If we want a download cache... - const dockerDownloadCacheDir = '/var/useDownloadCache'; - if (options.useDownloadCache) { - const downloadCacheDir = path.join( - getUserCachePath(options), - 'downloadCacheslspyc' - ); - serverless.cli.log(`Using download cache directory ${downloadCacheDir}`); - fse.ensureDirSync(downloadCacheDir); - // This little hack is necessary because getBindPath requires something inside of it to test... - // Ugh, this is so ugly, but someone has to fix getBindPath in some other way (eg: make it use - // its own temp file) - fse.closeSync( - fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w') - ); - const windowsized = await getBindPath(serverless, downloadCacheDir); - // And now push it to a volume mount and to pip... - dockerCmd.push('-v', `${windowsized}:${dockerDownloadCacheDir}:z`); - pipCmd.push('--cache-dir', dockerDownloadCacheDir); + if (pipAcceptsSystem(options.pythonBin)) { + pipCmd.push('--system'); + } } - if (options.dockerEnv) { - // Add environment variables to docker run cmd - options.dockerEnv.forEach(function (item) { - dockerCmd.push('-e', item); - }); - } + // If we are dockerizing pip + if (options.dockerizePip) { + // Push docker-specific paths for requirements and target directory + pipCmd.push('-t', '/var/task/', '-r', '/var/task/requirements.txt'); + + // Build docker image if required + let dockerImage; + if (options.dockerFile) { + let buildDockerImageProgress; + if (progress) { + buildDockerImageProgress = progress.get( + 'python-install-build-docker' + ); + buildDockerImageProgress.update( + `Building custom docker image from ${options.dockerFile}` + ); + } else { + serverless.cli.log( + `Building custom docker image from ${options.dockerFile}...` + ); + } + try { + dockerImage = buildImage( + options.dockerFile, + options.dockerBuildCmdExtraArgs + ); + } finally { + buildDockerImageProgress && buildDockerImageProgress.remove(); + } + } else { + dockerImage = options.dockerImage; + } + if (log) { + log.info(`Docker Image: ${dockerImage}`); + } else { + serverless.cli.log(`Docker Image: ${dockerImage}`); + } - if (process.platform === 'linux') { - // Use same user so requirements folder is not root and so --cache-dir works + // Prepare bind path depending on os platform + const bindPath = dockerPathForWin(getBindPath(serverless, targetFolder)); + + dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); + if (options.dockerSsh) { + // Mount necessary ssh files to work with private repos + dockerCmd.push( + '-v', + `${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z`, + '-v', + `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, + '-v', + `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`, + '-e', + 'SSH_AUTH_SOCK=/tmp/ssh_sock' + ); + } + + // If we want a download cache... + const dockerDownloadCacheDir = '/var/useDownloadCache'; if (options.useDownloadCache) { - // Set the ownership of the download cache dir to root - pipCmds.unshift(['chown', '-R', '0:0', dockerDownloadCacheDir]); + const downloadCacheDir = path.join( + getUserCachePath(options), + 'downloadCacheslspyc' + ); + if (log) { + log.info(`Using download cache directory ${downloadCacheDir}`); + } else { + serverless.cli.log( + `Using download cache directory ${downloadCacheDir}` + ); + } + fse.ensureDirSync(downloadCacheDir); + // This little hack is necessary because getBindPath requires something inside of it to test... + // Ugh, this is so ugly, but someone has to fix getBindPath in some other way (eg: make it use + // its own temp file) + fse.closeSync( + fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w') + ); + const windowsized = getBindPath(serverless, downloadCacheDir); + // And now push it to a volume mount and to pip... + dockerCmd.push('-v', `${windowsized}:${dockerDownloadCacheDir}:z`); + pipCmd.push('--cache-dir', dockerDownloadCacheDir); } - // Install requirements with pip - // Set the ownership of the current folder to user - pipCmds.push([ - 'chown', - '-R', - `${process.getuid()}:${process.getgid()}`, - '/var/task', - ]); - } else { - // Use same user so --cache-dir works - dockerCmd.push('-u', await getDockerUid(bindPath)); - } - for (let path of options.dockerExtraFiles) { - pipCmds.push(['cp', path, '/var/task/']); - } + if (options.dockerEnv) { + // Add environment variables to docker run cmd + options.dockerEnv.forEach(function (item) { + dockerCmd.push('-e', item); + }); + } - if (process.platform === 'linux') { - if (options.useDownloadCache) { - // Set the ownership of the download cache dir back to user + if (process.platform === 'linux') { + // Use same user so requirements folder is not root and so --cache-dir works + if (options.useDownloadCache) { + // Set the ownership of the download cache dir to root + pipCmds.unshift(['chown', '-R', '0:0', dockerDownloadCacheDir]); + } + // Install requirements with pip + // Set the ownership of the current folder to user pipCmds.push([ 'chown', '-R', `${process.getuid()}:${process.getgid()}`, - dockerDownloadCacheDir, + '/var/task', ]); + } else { + // Use same user so --cache-dir works + dockerCmd.push('-u', getDockerUid(bindPath)); + } + + for (let path of options.dockerExtraFiles) { + pipCmds.push(['cp', path, '/var/task/']); } + + if (process.platform === 'linux') { + if (options.useDownloadCache) { + // Set the ownership of the download cache dir back to user + pipCmds.push([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + dockerDownloadCacheDir, + ]); + } + } + + if (Array.isArray(options.dockerRunCmdExtraArgs)) { + dockerCmd.push(...options.dockerRunCmdExtraArgs); + } else { + throw new Error('dockerRunCmdExtraArgs option must be an array'); + } + + dockerCmd.push(dockerImage); } - if (Array.isArray(options.dockerRunCmdExtraArgs)) { - dockerCmd.push(...options.dockerRunCmdExtraArgs); - } else { - throw new Error('dockerRunCmdExtraArgs option must be an array'); + // If enabled slimming, strip so files + switch (getStripMode(options)) { + case 'docker': + pipCmds.push(getStripCommand(options, '/var/task')); + break; + case 'direct': + postCmds.push(getStripCommand(options, dockerPathForWin(targetFolder))); + break; } - dockerCmd.push(dockerImage); - } + let spawnArgs = { shell: true }; + if (process.env.SLS_DEBUG) { + spawnArgs.stdio = 'inherit'; + } + let mainCmds = []; + if (dockerCmd.length) { + dockerCmd.push(...mergeCommands(pipCmds)); + mainCmds = [dockerCmd]; + } else { + mainCmds = pipCmds; + } + mainCmds.push(...postCmds); - // If enabled slimming, strip so files - switch (getStripMode(options)) { - case 'docker': - pipCmds.push(getStripCommand(options, '/var/task')); - break; - case 'direct': - postCmds.push(getStripCommand(options, dockerPathForWin(targetFolder))); - break; - } + if (log) { + log.info(`Running ${quote(dockerCmd)}...`); + } else { + serverless.cli.log(`Running ${quote(dockerCmd)}...`); + } - let spawnArgs = { shell: true }; - if (process.env.SLS_DEBUG) { - spawnArgs.stdio = 'inherit'; - } - let mainCmds = []; - if (dockerCmd.length) { - dockerCmd.push(...mergeCommands(pipCmds)); - mainCmds = [dockerCmd]; - } else { - mainCmds = pipCmds; - } - mainCmds.push(...postCmds); - - serverless.cli.log(`Running ${quote(dockerCmd)}...`); - - for (const [cmd, ...args] of mainCmds) { - try { - await spawn(cmd, args); - } catch (e) { - if ( - e.stderrBuffer && - e.stderrBuffer.toString().includes('command not found') - ) { - const advice = - cmd.indexOf('python') > -1 - ? 'Try the pythonBin option' - : 'Please install it'; - throw new Error(`${cmd} not found! ${advice}`); + for (const [cmd, ...args] of mainCmds) { + try { + await spawn(cmd, args); + } catch (e) { + if ( + e.stderrBuffer && + e.stderrBuffer.toString().includes('command not found') + ) { + const advice = + cmd.indexOf('python') > -1 + ? 'Try the pythonBin option' + : 'Please install it'; + throw new Error(`${cmd} not found! ${advice}`); + } + throw e; } - throw e; } - } - // If enabled slimming, delete files in slimPatterns - if (options.slim === true || options.slim === 'true') { - deleteFiles(options, targetFolder); + // If enabled slimming, delete files in slimPatterns + if (options.slim === true || options.slim === 'true') { + deleteFiles(options, targetFolder); + } + } finally { + installProgress && installProgress.remove(); } } @@ -392,7 +465,7 @@ function getRequirements(source) { * @param {string} target requirements where results are written * @param {Object} options */ -function filterRequirementsFile(source, target, options, serverless) { +function filterRequirementsFile(source, target, { options, serverless, log }) { const noDeploy = new Set(options.noDeploy || []); const requirements = getRequirements(source); var prepend = []; @@ -414,9 +487,13 @@ function filterRequirementsFile(source, target, options, serverless) { // not required inside final archive and avoids pip bugs // see https://github.com/UnitedIncome/serverless-python-requirements/issues/240 req = req.split('-e')[1].trim(); - serverless.cli.log( - `Warning: Stripping -e flag from requirement ${req}` - ); + if (log) { + log.warning(`Stripping -e flag from requirement ${req}`); + } else { + serverless.cli.log( + `Warning: Stripping -e flag from requirement ${req}` + ); + } } // Keep options for later @@ -444,13 +521,19 @@ function filterRequirementsFile(source, target, options, serverless) { * @param {Object} serverless * @return {undefined} */ -function copyVendors(vendorFolder, targetFolder, serverless) { +function copyVendors(vendorFolder, targetFolder, { serverless, log }) { // Create target folder if it does not exist fse.ensureDirSync(targetFolder); - serverless.cli.log( - `Copying vendor libraries from ${vendorFolder} to ${targetFolder}...` - ); + if (log) { + log.info( + `Copying vendor libraries from ${vendorFolder} to ${targetFolder}` + ); + } else { + serverless.cli.log( + `Copying vendor libraries from ${vendorFolder} to ${targetFolder}...` + ); + } fse.readdirSync(vendorFolder).map((file) => { let source = path.join(vendorFolder, file); @@ -500,12 +583,11 @@ function requirementsFileExists(servicePath, options, fileName) { * @return {string} */ async function installRequirementsIfNeeded( - servicePath, modulePath, - options, funcOptions, - serverless + pluginInstance ) { + const { servicePath, options, serverless } = pluginInstance; // Our source requirements, under our service path, and our module path (if specified) const fileName = path.join(servicePath, modulePath, options.fileName); @@ -528,19 +610,19 @@ async function installRequirementsIfNeeded( fse.ensureDirSync(requirementsTxtDirectory); const slsReqsTxt = path.join(requirementsTxtDirectory, 'requirements.txt'); - generateRequirementsFile( - fileName, - slsReqsTxt, - serverless, - servicePath, - options - ); + generateRequirementsFile(fileName, slsReqsTxt, pluginInstance); // If no requirements file or an empty requirements file, then do nothing if (!fse.existsSync(slsReqsTxt) || fse.statSync(slsReqsTxt).size == 0) { - serverless.cli.log( - `Skipping empty output requirements.txt file from ${slsReqsTxt}` - ); + if (pluginInstance.log) { + pluginInstance.log.info( + `Skipping empty output requirements.txt file from ${slsReqsTxt}` + ); + } else { + serverless.cli.log( + `Skipping empty output requirements.txt file from ${slsReqsTxt}` + ); + } return false; } @@ -560,9 +642,15 @@ async function installRequirementsIfNeeded( fse.existsSync(path.join(workingReqsFolder, '.completed_requirements')) && workingReqsFolder.endsWith('_slspyc') ) { - serverless.cli.log( - `Using static cache of requirements found at ${workingReqsFolder} ...` - ); + if (pluginInstance.log) { + pluginInstance.log.info( + `Using static cache of requirements found at ${workingReqsFolder}` + ); + } else { + serverless.cli.log( + `Using static cache of requirements found at ${workingReqsFolder} ...` + ); + } // We'll "touch" the folder, as to bring it to the start of the FIFO cache fse.utimesSync(workingReqsFolder, new Date(), new Date()); return workingReqsFolder; @@ -583,14 +671,14 @@ async function installRequirementsIfNeeded( fse.copySync(slsReqsTxt, path.join(workingReqsFolder, 'requirements.txt')); // Then install our requirements from this folder - await installRequirements(workingReqsFolder, serverless, options); + await installRequirements(workingReqsFolder, pluginInstance); // Copy vendor libraries to requirements folder if (options.vendor) { - copyVendors(options.vendor, workingReqsFolder, serverless); + copyVendors(options.vendor, workingReqsFolder, pluginInstance); } if (funcOptions.vendor) { - copyVendors(funcOptions.vendor, workingReqsFolder, serverless); + copyVendors(funcOptions.vendor, workingReqsFolder, pluginInstance); } // Then touch our ".completed_requirements" file so we know we can use this for static cache @@ -609,7 +697,7 @@ async function installRequirementsIfNeeded( async function installAllRequirements() { // fse.ensureDirSync(path.join(this.servicePath, '.serverless')); // First, check and delete cache versions, if enabled - checkForAndDeleteMaxCacheVersions(this.options, this.serverless); + checkForAndDeleteMaxCacheVersions(this); // Then if we're going to package functions individually... if (this.serverless.service.package.individually) { @@ -663,13 +751,7 @@ async function installAllRequirements() { } } } else { - const reqsInstalledAt = await installRequirementsIfNeeded( - this.servicePath, - '', - this.options, - {}, - this.serverless - ); + const reqsInstalledAt = await installRequirementsIfNeeded('', {}, this); // Add symlinks into .serverless for so it's easier for injecting and for users to see where reqs are let symlinkPath = path.join( this.servicePath, From f43acea2a5ee207874f4b5120a95a6b0164bd405 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 13:23:49 +0100 Subject: [PATCH 09/90] refactor: Adapt `layer` to modern logs --- lib/layer.js | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/lib/layer.js b/lib/layer.js index 12d338ec..ddc90f6a 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -53,9 +53,24 @@ function layerRequirements() { return BbPromise.resolve(); } - this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); + let layerProgress; + if (this.progress) { + layerProgress = this.progress.get('python-layer-requirements'); + layerProgress.update('Packaging Python Requirements Lambda Layer', { + isMainEvent: true, + }); + } else { + this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); + } - return BbPromise.bind(this).then(zipRequirements).then(createLayers); + return BbPromise.bind(this) + .then(zipRequirements) + .then(createLayers) + .then(() => layerProgress && layerProgress.remove()) + .catch((e) => { + layerProgress && layerProgress.remove(); + throw e; + }); } module.exports = { From cbd7e9c4ecb335457ed59b6d4942636b0639a53a Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 13:34:52 +0100 Subject: [PATCH 10/90] refactor: Adapt `inject` to modern logs --- lib/inject.js | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/lib/inject.js b/lib/inject.js index 3cad758d..2d1bdc2b 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -92,10 +92,19 @@ function injectAllRequirements(funcArtifact) { return BbPromise.resolve(); } - this.serverless.cli.log('Injecting required Python packages to package...'); + let injectProgress; + if (this.progress) { + injectProgress = this.progress.get('python-inject-requirements'); + injectProgress.update('Injecting required Python packages to package', { + isMainEvent: true, + }); + } else { + this.serverless.cli.log('Injecting required Python packages to package...'); + } + let returnPromise; if (this.serverless.service.package.individually) { - return BbPromise.resolve(this.targetFuncs) + returnPromise = BbPromise.resolve(this.targetFuncs) .filter((func) => (func.runtime || this.serverless.service.provider.runtime).match( /^python.*/ @@ -132,12 +141,19 @@ function injectAllRequirements(funcArtifact) { ); }); } else if (!this.options.zip) { - return injectRequirements( + returnPromise = injectRequirements( path.join('.serverless', 'requirements'), this.serverless.service.package.artifact || funcArtifact, this.options ); } + + return returnPromise + .then(() => injectProgress && injectProgress.remove()) + .catch((e) => { + injectProgress && injectProgress.remove(); + throw e; + }); } module.exports = { injectAllRequirements }; From d70ca215eb8d0644697aed3d9515755c89c701e1 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 23 Nov 2021 13:43:51 +0100 Subject: [PATCH 11/90] refactor: Adapt `docker` for modern logs --- index.js | 2 +- lib/docker.js | 34 ++++++++++++++++++++++++++-------- lib/inject.js | 15 +++++++++------ lib/pip.js | 26 +++++++++++--------------- 4 files changed, 47 insertions(+), 30 deletions(-) diff --git a/index.js b/index.js index 26616295..cf2af38e 100644 --- a/index.js +++ b/index.js @@ -137,7 +137,7 @@ class ServerlessPythonRequirements { this.log = v3Utils.log; this.progress = v3Utils.progress; this.writeText = v3Utils.writeText; - }; + } this.commands = { requirements: { diff --git a/lib/docker.js b/lib/docker.js index 94229b21..9da6da6a 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -73,7 +73,7 @@ function findTestFile(servicePath) { * @param {string} bindPath * @return {boolean} */ -async function tryBindPath(serverless, bindPath, testFile) { +async function tryBindPath(bindPath, testFile, { serverless, log }) { const debug = process.env.SLS_DEBUG; const options = [ 'run', @@ -85,12 +85,30 @@ async function tryBindPath(serverless, bindPath, testFile) { `/test/${testFile}`, ]; try { - if (debug) serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); + if (debug) { + if (log) { + log.debug(`Trying bindPath ${bindPath} (${options})`); + } else { + serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); + } + } const ps = await dockerCommand(options); - if (debug) serverless.cli.log(ps.stdout.trim()); - return ps.stdout.trim() === `/test/${testFile}`; + if (debug) { + if (log) { + log.debug(ps.stdoutBuffer.trim()); + } else { + serverless.cli.log(ps.stdoutBuffer.trim()); + } + } + return ps.stdoutBuffer.trim() === `/test/${testFile}`; } catch (err) { - if (debug) serverless.cli.log(`Finding bindPath failed with ${err}`); + if (debug) { + if (log) { + log.debug(`Finding bindPath failed with ${err}`); + } else { + serverless.cli.log(`Finding bindPath failed with ${err}`); + } + } return false; } } @@ -101,7 +119,7 @@ async function tryBindPath(serverless, bindPath, testFile) { * @param {string} servicePath * @return {string} The bind path. */ -async function getBindPath(serverless, servicePath) { +async function getBindPath(servicePath, pluginInstance) { // Determine bind path if (process.platform !== 'win32' && !isWsl) { return servicePath; @@ -145,7 +163,7 @@ async function getBindPath(serverless, servicePath) { for (let i = 0; i < bindPaths.length; i++) { const bindPath = bindPaths[i]; - if (await tryBindPath(serverless, bindPath, testFile)) { + if (await tryBindPath(bindPath, testFile, pluginInstance)) { return bindPath; } } @@ -171,7 +189,7 @@ async function getDockerUid(bindPath) { '/bin/sh', ]; const ps = await dockerCommand(options); - return ps.stdout.trim(); + return ps.stdoutBuffer.trim(); } module.exports = { buildImage, getBindPath, getDockerUid }; diff --git a/lib/inject.js b/lib/inject.js index 2d1bdc2b..85bdf597 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -148,12 +148,15 @@ function injectAllRequirements(funcArtifact) { ); } - return returnPromise - .then(() => injectProgress && injectProgress.remove()) - .catch((e) => { - injectProgress && injectProgress.remove(); - throw e; - }); + return ( + returnPromise && + returnPromise + .then(() => injectProgress && injectProgress.remove()) + .catch((e) => { + injectProgress && injectProgress.remove(); + throw e; + }) + ); } module.exports = { injectAllRequirements }; diff --git a/lib/pip.js b/lib/pip.js index 89e8e74a..d88f5c7d 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -133,19 +133,15 @@ async function pipAcceptsSystem(pythonBin) { * @param {Object} options * @return {undefined} */ -async function installRequirements( - targetFolder, - { options, serverless, log, progress } -) { +async function installRequirements(targetFolder, pluginInstance) { + const { options, serverless, log, progress } = pluginInstance; const targetRequirementsTxt = path.join(targetFolder, 'requirements.txt'); let installProgress; if (progress) { + log.info(`Installing requirements from "${targetRequirementsTxt}"`); installProgress = progress.get('python-install'); - installProgress.update( - `Installing requirements from "${targetRequirementsTxt}"`, - { isMainEvent: true } - ); + installProgress.update('Installing requirements'); } else { serverless.cli.log( `Installing requirements from ${targetRequirementsTxt} ...` @@ -226,7 +222,7 @@ async function installRequirements( pipCmd.push('--cache-dir', downloadCacheDir); } - if (pipAcceptsSystem(options.pythonBin)) { + if (await pipAcceptsSystem(options.pythonBin)) { pipCmd.push('--system'); } } @@ -253,7 +249,7 @@ async function installRequirements( ); } try { - dockerImage = buildImage( + dockerImage = await buildImage( options.dockerFile, options.dockerBuildCmdExtraArgs ); @@ -270,7 +266,9 @@ async function installRequirements( } // Prepare bind path depending on os platform - const bindPath = dockerPathForWin(getBindPath(serverless, targetFolder)); + const bindPath = dockerPathForWin( + await getBindPath(targetFolder, pluginInstance) + ); dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); if (options.dockerSsh) { @@ -308,7 +306,7 @@ async function installRequirements( fse.closeSync( fse.openSync(path.join(downloadCacheDir, 'requirements.txt'), 'w') ); - const windowsized = getBindPath(serverless, downloadCacheDir); + const windowsized = await getBindPath(downloadCacheDir, pluginInstance); // And now push it to a volume mount and to pip... dockerCmd.push('-v', `${windowsized}:${dockerDownloadCacheDir}:z`); pipCmd.push('--cache-dir', dockerDownloadCacheDir); @@ -337,7 +335,7 @@ async function installRequirements( ]); } else { // Use same user so --cache-dir works - dockerCmd.push('-u', getDockerUid(bindPath)); + dockerCmd.push('-u', await getDockerUid(bindPath)); } for (let path of options.dockerExtraFiles) { @@ -716,9 +714,7 @@ async function installAllRequirements() { // If we didn't already process a module (functions can re-use modules) if (!doneModules.includes(f.module)) { const reqsInstalledAt = await installRequirementsIfNeeded( - this.servicePath, f.module, - this.options, f, this.serverless ); From 44b9591f01157a1811e3ca8b43e21265a155a976 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Wed, 24 Nov 2021 21:44:40 +0100 Subject: [PATCH 12/90] refactor: Ensure proper verbose progress logs --- lib/clean.js | 7 +++---- lib/inject.js | 7 +++---- lib/layer.js | 7 +++---- lib/pip.js | 2 +- lib/pipenv.js | 6 +++--- lib/poetry.js | 6 +++--- lib/zip.js | 6 +++--- 7 files changed, 19 insertions(+), 22 deletions(-) diff --git a/lib/clean.js b/lib/clean.js index 88d7d03e..e972f567 100644 --- a/lib/clean.js +++ b/lib/clean.js @@ -40,11 +40,10 @@ function cleanupCache() { if (fse.existsSync(cacheLocation)) { let cleanupProgress; if (this.serverless) { - if (this.progress) { + if (this.progress && this.log) { cleanupProgress = this.progress.get('python-cleanup-cache'); - cleanupProgress.notice(`Removing static caches at: ${cacheLocation}`, { - isMainEvent: true, - }); + cleanupProgress.notice('Removing static caches'); + this.log.info(`Removing static caches at: ${cacheLocation}`); } else { this.serverless.cli.log(`Removing static caches at: ${cacheLocation}`); } diff --git a/lib/inject.js b/lib/inject.js index 85bdf597..9f3ad77a 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -93,11 +93,10 @@ function injectAllRequirements(funcArtifact) { } let injectProgress; - if (this.progress) { + if (this.progress && this.log) { injectProgress = this.progress.get('python-inject-requirements'); - injectProgress.update('Injecting required Python packages to package', { - isMainEvent: true, - }); + injectProgress.update('Injecting required Python packages to package'); + this.log.info('Injecting required Python packages to package'); } else { this.serverless.cli.log('Injecting required Python packages to package...'); } diff --git a/lib/layer.js b/lib/layer.js index ddc90f6a..141d1fd7 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -54,11 +54,10 @@ function layerRequirements() { } let layerProgress; - if (this.progress) { + if (this.progress && this.log) { layerProgress = this.progress.get('python-layer-requirements'); - layerProgress.update('Packaging Python Requirements Lambda Layer', { - isMainEvent: true, - }); + layerProgress.update('Packaging Python Requirements Lambda Layer'); + this.log.info('Packaging Python Requirements Lambda Layer'); } else { this.serverless.cli.log('Packaging Python Requirements Lambda Layer...'); } diff --git a/lib/pip.js b/lib/pip.js index d88f5c7d..2f6d4571 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -716,7 +716,7 @@ async function installAllRequirements() { const reqsInstalledAt = await installRequirementsIfNeeded( f.module, f, - this.serverless + this ); // Add modulePath into .serverless for each module so it's easier for injecting and for users to see where reqs are let modulePath = path.join( diff --git a/lib/pipenv.js b/lib/pipenv.js index 5100a810..4949e924 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -15,14 +15,14 @@ async function pipfileToRequirements() { } let generateRequirementsProgress; - if (this.progress) { + if (this.progress && this.log) { generateRequirementsProgress = this.progress.get( 'python-generate-requirements-pipfile' ); generateRequirementsProgress.update( - 'Generating requirements.txt from Pipfile', - { isMainEvent: true } + 'Generating requirements.txt from Pipfile' ); + this.log.info('Generating requirements.txt from Pipfile'); } else { this.serverless.cli.log('Generating requirements.txt from Pipfile...'); } diff --git a/lib/poetry.js b/lib/poetry.js index 81988742..12904fd9 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -14,14 +14,14 @@ async function pyprojectTomlToRequirements() { } let generateRequirementsProgress; - if (this.progress) { + if (this.progress && this.log) { generateRequirementsProgress = this.progress.get( 'python-generate-requirements-toml' ); generateRequirementsProgress.update( - 'Generating requirements.txt from "pyproject.toml"', - { isMainEvent: true } + 'Generating requirements.txt from "pyproject.toml"' ); + this.log.info('Generating requirements.txt from "pyproject.toml"'); } else { this.serverless.cli.log( 'Generating requirements.txt from pyproject.toml...' diff --git a/lib/zip.js b/lib/zip.js index de61ce0f..aabb2333 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -123,14 +123,14 @@ function packRequirements() { .then((funcs) => uniqBy(funcs, (f) => f.module)) .map((f) => { let packProgress; - if (this.progress) { + if (this.progress && this.log) { packProgress = this.progress.get( `python-pack-requirements-${f.module}` ); packProgress.update( - `Zipping required Python packages for ${f.module}`, - { isMainEvent: true } + `Zipping required Python packages for ${f.module}` ); + this.log.info(`Zipping required Python packages for ${f.module}`); } else { this.serverless.cli.log( `Zipping required Python packages for ${f.module}...` From 9479a90b1d262f55a6808a9d12c478f220258da9 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 25 Nov 2021 11:49:11 +0100 Subject: [PATCH 13/90] refactor: Cleanup and use `finally` for code simplification --- lib/clean.js | 11 +++--- lib/inject.js | 99 ++++++++++++++++++++++++--------------------------- lib/layer.js | 6 +--- lib/zip.js | 12 ++----- 4 files changed, 54 insertions(+), 74 deletions(-) diff --git a/lib/clean.js b/lib/clean.js index e972f567..8aaf331e 100644 --- a/lib/clean.js +++ b/lib/clean.js @@ -40,7 +40,7 @@ function cleanupCache() { if (fse.existsSync(cacheLocation)) { let cleanupProgress; if (this.serverless) { - if (this.progress && this.log) { + if (this.log) { cleanupProgress = this.progress.get('python-cleanup-cache'); cleanupProgress.notice('Removing static caches'); this.log.info(`Removing static caches at: ${cacheLocation}`); @@ -57,12 +57,9 @@ function cleanupCache() { .forEach((file) => { promises.push(fse.removeAsync(file)); }); - return BbPromise.all(promises) - .then(() => cleanupProgress && cleanupProgress.remove()) - .catch((e) => { - cleanupProgress && cleanupProgress.remove(); - throw e; - }); + return BbPromise.all(promises).finally( + () => cleanupProgress && cleanupProgress.remove() + ); } else { if (this.serverless) { if (this.log) { diff --git a/lib/inject.js b/lib/inject.js index 9f3ad77a..f32c9d46 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -86,7 +86,7 @@ function moveModuleUp(source, target, module) { * Inject requirements into packaged application. * @return {Promise} the combined promise for requirements injection. */ -function injectAllRequirements(funcArtifact) { +async function injectAllRequirements(funcArtifact) { if (this.options.layer) { // The requirements will be placed in a Layer, so just resolve return BbPromise.resolve(); @@ -101,61 +101,56 @@ function injectAllRequirements(funcArtifact) { this.serverless.cli.log('Injecting required Python packages to package...'); } - let returnPromise; - if (this.serverless.service.package.individually) { - returnPromise = BbPromise.resolve(this.targetFuncs) - .filter((func) => - (func.runtime || this.serverless.service.provider.runtime).match( - /^python.*/ + try { + if (this.serverless.service.package.individually) { + await BbPromise.resolve(this.targetFuncs) + .filter((func) => + (func.runtime || this.serverless.service.provider.runtime).match( + /^python.*/ + ) ) - ) - .map((func) => { - if (!get(func, 'module')) { - set(func, ['module'], '.'); - } - return func; - }) - .map((func) => { - if (func.module !== '.') { - const artifact = func.package ? func.package.artifact : funcArtifact; - const newArtifact = path.join( - '.serverless', - `${func.module}-${func.name}.zip` - ); - func.package.artifact = newArtifact; - return moveModuleUp(artifact, newArtifact, func.module).then( - () => func - ); - } else { + .map((func) => { + if (!get(func, 'module')) { + set(func, ['module'], '.'); + } return func; - } - }) - .map((func) => { - return this.options.zip - ? func - : injectRequirements( - path.join('.serverless', func.module, 'requirements'), - func.package.artifact, - this.options + }) + .map((func) => { + if (func.module !== '.') { + const artifact = func.package + ? func.package.artifact + : funcArtifact; + const newArtifact = path.join( + '.serverless', + `${func.module}-${func.name}.zip` ); - }); - } else if (!this.options.zip) { - returnPromise = injectRequirements( - path.join('.serverless', 'requirements'), - this.serverless.service.package.artifact || funcArtifact, - this.options - ); + func.package.artifact = newArtifact; + return moveModuleUp(artifact, newArtifact, func.module).then( + () => func + ); + } else { + return func; + } + }) + .map((func) => { + return this.options.zip + ? func + : injectRequirements( + path.join('.serverless', func.module, 'requirements'), + func.package.artifact, + this.options + ); + }); + } else if (!this.options.zip) { + await injectRequirements( + path.join('.serverless', 'requirements'), + this.serverless.service.package.artifact || funcArtifact, + this.options + ); + } + } finally { + injectProgress && injectProgress.remove(); } - - return ( - returnPromise && - returnPromise - .then(() => injectProgress && injectProgress.remove()) - .catch((e) => { - injectProgress && injectProgress.remove(); - throw e; - }) - ); } module.exports = { injectAllRequirements }; diff --git a/lib/layer.js b/lib/layer.js index 141d1fd7..fe2a4a00 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -65,11 +65,7 @@ function layerRequirements() { return BbPromise.bind(this) .then(zipRequirements) .then(createLayers) - .then(() => layerProgress && layerProgress.remove()) - .catch((e) => { - layerProgress && layerProgress.remove(); - throw e; - }); + .finally(() => layerProgress && layerProgress.remove()); } module.exports = { diff --git a/lib/zip.js b/lib/zip.js index aabb2333..cba29450 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -139,11 +139,7 @@ function packRequirements() { f.package.patterns.push(`${f.module}/.requirements.zip`); return addTree(new JSZip(), `.serverless/${f.module}/requirements`) .then((zip) => writeZip(zip, `${f.module}/.requirements.zip`)) - .then(() => packProgress && packProgress.remove()) - .catch((e) => { - packProgress && packProgress.remove(); - throw e; - }); + .finally(() => packProgress && packProgress.remove()); }); } else { let packProgress; @@ -157,11 +153,7 @@ function packRequirements() { .then((zip) => writeZip(zip, path.join(this.servicePath, '.requirements.zip')) ) - .then(() => packProgress && packProgress.remove()) - .catch((e) => { - packProgress && packProgress.remove(); - throw e; - }); + .finally(() => packProgress && packProgress.remove()); } } } From cdb71110bc9c69b5087b6e18fb353d65962afe4a Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 12:16:27 +0100 Subject: [PATCH 14/90] refactor: Use `ServerlessError` in `docker` --- lib/docker.js | 36 ++++++++++++++++++++++-------------- lib/pip.js | 5 +++-- 2 files changed, 25 insertions(+), 16 deletions(-) diff --git a/lib/docker.js b/lib/docker.js index 9da6da6a..5157803f 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -8,7 +8,7 @@ const path = require('path'); * @param {string[]} options * @return {Object} */ -async function dockerCommand(options) { +async function dockerCommand(options, pluginInstance) { const cmd = 'docker'; try { return await spawn(cmd, options, { encoding: 'utf-8' }); @@ -17,7 +17,10 @@ async function dockerCommand(options) { e.stderrBuffer && e.stderrBuffer.toString().includes('command not found') ) { - throw new Error('docker not found! Please install it.'); + throw new pluginInstance.serverless.classes.Error( + 'docker not found! Please install it.', + 'PYTHON_REQUIREMENTS_DOCKER_NOT_FOUND' + ); } throw e; } @@ -29,19 +32,22 @@ async function dockerCommand(options) { * @param {string[]} extraArgs * @return {string} The name of the built docker image. */ -async function buildImage(dockerFile, extraArgs) { +async function buildImage(dockerFile, extraArgs, pluginInstance) { const imageName = 'sls-py-reqs-custom'; const options = ['build', '-f', dockerFile, '-t', imageName]; if (Array.isArray(extraArgs)) { options.push(...extraArgs); } else { - throw new Error('dockerRunCmdExtraArgs option must be an array'); + throw new pluginInstance.serverless.classes.Error( + 'dockerRunCmdExtraArgs option must be an array', + 'PYTHON_REQUIREMENTS_INVALID_DOCKER_EXTRA_ARGS' + ); } options.push('.'); - await dockerCommand(options); + await dockerCommand(options, pluginInstance); return imageName; } @@ -50,7 +56,7 @@ async function buildImage(dockerFile, extraArgs) { * @param {string} servicePath * @return {string} file name */ -function findTestFile(servicePath) { +function findTestFile(servicePath, pluginInstance) { if (fse.pathExistsSync(path.join(servicePath, 'serverless.yml'))) { return 'serverless.yml'; } @@ -63,8 +69,9 @@ function findTestFile(servicePath) { if (fse.pathExistsSync(path.join(servicePath, 'requirements.txt'))) { return 'requirements.txt'; } - throw new Error( - 'Unable to find serverless.{yml|yaml|json} or requirements.txt for getBindPath()' + throw new pluginInstance.serverless.classes.Error( + 'Unable to find serverless.{yml|yaml|json} or requirements.txt for getBindPath()', + 'PYTHON_REQUIREMENTS_MISSING_GET_BIND_PATH_FILE' ); } @@ -73,7 +80,8 @@ function findTestFile(servicePath) { * @param {string} bindPath * @return {boolean} */ -async function tryBindPath(bindPath, testFile, { serverless, log }) { +async function tryBindPath(bindPath, testFile, pluginInstance) { + const { serverless, log } = pluginInstance; const debug = process.env.SLS_DEBUG; const options = [ 'run', @@ -92,7 +100,7 @@ async function tryBindPath(bindPath, testFile, { serverless, log }) { serverless.cli.log(`Trying bindPath ${bindPath} (${options})`); } } - const ps = await dockerCommand(options); + const ps = await dockerCommand(options, pluginInstance); if (debug) { if (log) { log.debug(ps.stdoutBuffer.trim()); @@ -126,7 +134,7 @@ async function getBindPath(servicePath, pluginInstance) { } // test docker is available - await dockerCommand(['version']); + await dockerCommand(['version'], pluginInstance); // find good bind path for Windows let bindPaths = []; @@ -159,7 +167,7 @@ async function getBindPath(servicePath, pluginInstance) { bindPaths.push(`/mnt/${drive.toUpperCase()}/${path}`); bindPaths.push(`${drive.toUpperCase()}:/${path}`); - const testFile = findTestFile(servicePath); + const testFile = findTestFile(servicePath, pluginInstance); for (let i = 0; i < bindPaths.length; i++) { const bindPath = bindPaths[i]; @@ -176,7 +184,7 @@ async function getBindPath(servicePath, pluginInstance) { * @param {string} bindPath * @return {boolean} */ -async function getDockerUid(bindPath) { +async function getDockerUid(bindPath, pluginInstance) { const options = [ 'run', '--rm', @@ -188,7 +196,7 @@ async function getDockerUid(bindPath) { '%u', '/bin/sh', ]; - const ps = await dockerCommand(options); + const ps = await dockerCommand(options, pluginInstance); return ps.stdoutBuffer.trim(); } diff --git a/lib/pip.js b/lib/pip.js index 2f6d4571..24d4c55f 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -251,7 +251,8 @@ async function installRequirements(targetFolder, pluginInstance) { try { dockerImage = await buildImage( options.dockerFile, - options.dockerBuildCmdExtraArgs + options.dockerBuildCmdExtraArgs, + pluginInstance ); } finally { buildDockerImageProgress && buildDockerImageProgress.remove(); @@ -335,7 +336,7 @@ async function installRequirements(targetFolder, pluginInstance) { ]); } else { // Use same user so --cache-dir works - dockerCmd.push('-u', await getDockerUid(bindPath)); + dockerCmd.push('-u', await getDockerUid(bindPath, pluginInstance)); } for (let path of options.dockerExtraFiles) { From 395082761ae574c2664f1c272ea4970cfa3fd1f7 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 12:22:22 +0100 Subject: [PATCH 15/90] refactor: Use `ServerlessError` in `poetry` --- lib/poetry.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/poetry.js b/lib/poetry.js index 12904fd9..23f43dc0 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -50,8 +50,9 @@ async function pyprojectTomlToRequirements() { e.stderrBuffer && e.stderrBuffer.toString().includes('command not found') ) { - throw new Error( - `poetry not found! Install it according to the poetry docs.` + throw new this.serverless.classes.Error( + `poetry not found! Install it according to the poetry docs.`, + 'PYTHON_REQUIREMENTS_POETRY_NOT_FOUND' ); } throw e; From 618ef76c830349f34f5f8414dc6e381b167cfa43 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 12:23:04 +0100 Subject: [PATCH 16/90] refactor: Use `ServerlessError` in `pipenv` --- lib/pipenv.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/pipenv.js b/lib/pipenv.js index 4949e924..5856d47b 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -42,8 +42,9 @@ async function pipfileToRequirements() { e.stderrBuffer && e.stderrBuffer.toString().includes('command not found') ) { - throw new Error( - `pipenv not found! Install it with 'pip install pipenv'.` + throw new this.serverless.classes.Error( + `pipenv not found! Install it according to the poetry docs.`, + 'PYTHON_REQUIREMENTS_PIPENV_NOT_FOUND' ); } throw e; From 8a4bc83025b41acd7f6e88982cd72af24f8d9967 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 12:28:02 +0100 Subject: [PATCH 17/90] refactor: Use `ServerlessError` in `pip` --- lib/pip.js | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index 24d4c55f..ce348532 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -107,7 +107,7 @@ function generateRequirementsFile( } } -async function pipAcceptsSystem(pythonBin) { +async function pipAcceptsSystem(pythonBin, pluginInstance) { // Check if pip has Debian's --system option and set it if so try { const pipTestRes = await spawn(pythonBin, ['-m', 'pip', 'help', 'install']); @@ -120,7 +120,10 @@ async function pipAcceptsSystem(pythonBin) { e.stderrBuffer && e.stderrBuffer.toString().includes('command not found') ) { - throw new Error(`${pythonBin} not found! Try the pythonBin option.`); + throw new pluginInstance.serverless.classes.Error( + `${pythonBin} not found! Install it according to the poetry docs.`, + 'PYTHON_REQUIREMENTS_PYTHON_NOT_FOUND' + ); } throw e; } @@ -167,10 +170,9 @@ async function installRequirements(targetFolder, pluginInstance) { // Check if we're using the legacy --cache-dir command... if (options.pipCmdExtraArgs.indexOf('--cache-dir') > -1) { if (options.dockerizePip) { - throw ( - 'Error: You can not use --cache-dir with Docker any more, please\n' + - ' use the new option useDownloadCache instead. Please see:\n' + - ' https://github.com/UnitedIncome/serverless-python-requirements#caching' + throw new pluginInstance.serverless.classes.Error( + 'You cannot use --cache-dir with Docker any more, please use the new option useDownloadCache instead. Please see: https://github.com/UnitedIncome/serverless-python-requirements#caching for more details.', + 'PYTHON_REQUIREMENTS_CACHE_DIR_DOCKER_INVALID' ); } else { if (log) { @@ -222,7 +224,7 @@ async function installRequirements(targetFolder, pluginInstance) { pipCmd.push('--cache-dir', downloadCacheDir); } - if (await pipAcceptsSystem(options.pythonBin)) { + if (await pipAcceptsSystem(options.pythonBin, pluginInstance)) { pipCmd.push('--system'); } } @@ -358,7 +360,10 @@ async function installRequirements(targetFolder, pluginInstance) { if (Array.isArray(options.dockerRunCmdExtraArgs)) { dockerCmd.push(...options.dockerRunCmdExtraArgs); } else { - throw new Error('dockerRunCmdExtraArgs option must be an array'); + throw new pluginInstance.serverless.classes.Error( + 'dockerRunCmdExtraArgs option must be an array', + 'PYTHON_REQUIREMENTS_INVALID_DOCKER_EXTRA_ARGS' + ); } dockerCmd.push(dockerImage); @@ -405,7 +410,10 @@ async function installRequirements(targetFolder, pluginInstance) { cmd.indexOf('python') > -1 ? 'Try the pythonBin option' : 'Please install it'; - throw new Error(`${cmd} not found! ${advice}`); + throw new pluginInstance.serverless.classes.Error( + `${cmd} not found! ${advice}`, + 'PYTHON_REQUIREMENTS_COMMAND_NOT_FOUND' + ); } throw e; } From 1f0804cfa95e85375b5a075188b1fee1fcdcb42e Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 13:05:20 +0100 Subject: [PATCH 18/90] ci: Update validate CI workflow --- .github/workflows/lint.yml | 20 --- .github/workflows/test.yml | 54 -------- .github/workflows/validate.yml | 240 +++++++++++++++++++++++++++++++++ package.json | 11 +- 4 files changed, 248 insertions(+), 77 deletions(-) delete mode 100644 .github/workflows/lint.yml delete mode 100644 .github/workflows/test.yml create mode 100644 .github/workflows/validate.yml diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml deleted file mode 100644 index 1e6b9ee8..00000000 --- a/.github/workflows/lint.yml +++ /dev/null @@ -1,20 +0,0 @@ -name: Lint - -on: [push, pull_request] - -jobs: - build: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - - name: Set up Node ${{ matrix.node-version }} - uses: actions/setup-node@v1 - with: - node-version: 14 - - - name: Install deps - run: npm install - - - name: Lint - run: npm run ci:lint diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml deleted file mode 100644 index f89486bf..00000000 --- a/.github/workflows/test.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: Test - -on: [push, pull_request] - -jobs: - build: - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - os: [ubuntu-latest, windows-latest, macOS-latest] - python-version: [2.7, 3.6] - steps: - - uses: actions/checkout@v2 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Set up Node - uses: actions/setup-node@v1 - with: - node-version: 14 - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv poetry - - - name: Install serverless - run: npm install -g serverless@2 - - - name: Install deps - run: npm install - - - name: Test - run: npm run test - env: - LC_ALL: C.UTF-8 - LANG: C.UTF-8 - if: matrix.os != 'macOS-latest' - - - name: Test (Mac) - run: npm run test - env: - LC_ALL: en_US.UTF-8 - LANG: en_US.UTF-8 - if: matrix.os == 'macOS-latest' diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml new file mode 100644 index 00000000..3a9af5c4 --- /dev/null +++ b/.github/workflows/validate.yml @@ -0,0 +1,240 @@ +# PR's only + +name: Validate + +on: + pull_request: + branches: [master] + +env: + FORCE_COLOR: 1 + +jobs: + linuxNode16: + name: '[Linux] Node.js v16: Lint, Formatting & Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: + # For commitlint purpose ensure to have complete list of PR commits + # It's loose and imperfect assumption that PR has no more than 30 commits + fetch-depth: 30 + + - name: Retrieve last master commit (for `git diff` purposes) + run: | + git checkout -b pr + git fetch --prune --depth=30 origin +refs/heads/master:refs/remotes/origin/master + git checkout master + git checkout pr + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: | + npm-v16-${{ runner.os }}-${{ github.ref }}- + npm-v16-${{ runner.os }}-refs/heads/master- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 16.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Validate Prettier formatting + run: npm run prettier-check:updated + - name: Validate ESLint rules + run: npm run lint:updated + - name: Unit tests + run: script -e -c "npm test" + + windowsNode16: + name: '[Windows] Node.js v16: Unit tests' + runs-on: windows-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: | + npm-v16-${{ runner.os }}-${{ github.ref }}- + npm-v16-${{ runner.os }}-refs/heads/master- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 16.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + run: npm test + + linuxNode14: + name: '[Linux] Node.js 14: Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: | + npm-v14-${{ runner.os }}-${{ github.ref }}- + npm-v14-${{ runner.os }}-refs/heads/master- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 14.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + # Some tests depend on TTY support, which is missing in GA runner + # Workaround taken from https://github.com/actions/runner/issues/241#issuecomment-577360161 + run: script -e -c "npm test" + + linuxNode12: + name: '[Linux] Node.js v12: Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v12-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: | + npm-v12-${{ runner.os }}-${{ github.ref }}- + npm-v12-${{ runner.os }}-refs/heads/master- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 12.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + run: script -e -c "npm test" diff --git a/package.json b/package.json index c9d247a3..9d1f5852 100644 --- a/package.json +++ b/package.json @@ -38,14 +38,19 @@ "main": "index.js", "bin": {}, "scripts": { - "ci:lint": "eslint *.js lib/*.js --format junit --output-file ~/reports/eslint.xml && prettier -c '{.,lib}/*.{js,md}'", - "test": "node test.js", + "format": "prettier --write '{.,lib}/*.{js,md}'", "lint": "eslint *.js lib/*.js && prettier -c '{.,lib}/*.{js,md}'", - "format": "prettier --write '{.,lib}/*.{js,md}'" + "lint:updated": "pipe-git-updated --ext=js -- eslint", + "prettier-check": "prettier -c --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", + "prettier-check:updated": "pipe-git-updated --ext=css --ext=html --ext=js --ext=json --ext=md --ext=yaml --ext=yml -- prettier -c", + "prettify": "prettier --write --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", + "prettify:updated": "pipe-git-updated --ext=css --ext=html --ext=js --ext=json --ext=md --ext=yaml --ext=yml -- prettier --write", + "test": "node test.js" }, "devDependencies": { "cross-spawn": "*", "eslint": "^7.32.0", + "git-list-updated": "^1.2.1", "lodash": "^4.17.21", "prettier": "^2", "tape": "*", From 080b0ba4e834e4ba1d60db243d5dc9962fa9c9f0 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 13:57:45 +0100 Subject: [PATCH 19/90] ci: Introduce integrate CI workflow --- .github/workflows/integrate.yml | 239 ++++++++++++++++++++++++++++++++ 1 file changed, 239 insertions(+) create mode 100644 .github/workflows/integrate.yml diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml new file mode 100644 index 00000000..d241971c --- /dev/null +++ b/.github/workflows/integrate.yml @@ -0,0 +1,239 @@ +# master only + +name: Integrate + +on: + push: + branches: [master] + +env: + FORCE_COLOR: 1 + +jobs: + linuxNode16: + name: '[Linux] Node.js v16: Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: npm-v16-${{ runner.os }}-${{ github.ref }}- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 16.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + run: script -e -c "npm test" + + windowsNode16: + name: '[Windows] Node.js v16: Unit tests' + runs-on: windows-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: npm-v16-${{ runner.os }}-${{ github.ref }}- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 16.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + run: npm test + + linuxNode14: + name: '[Linux] Node.js 14: Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 14.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + run: script -e -c "npm test" + + linuxNode12: + name: '[Linux] Node.js v12: Unit tests' + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [2.7, 3.6] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve dependencies from cache + id: cacheNpm + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v12-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: npm-v12-${{ runner.os }}-${{ github.ref }}- + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v1 + with: + python-version: ${{ matrix.python-version }} + + - name: Install Node.js and npm + uses: actions/setup-node@v1 + with: + node-version: 12.x + + - name: Check python version + run: | + python --version + + - name: Install setuptools + run: python -m pip install --force setuptools wheel + + - name: Install pipenv / poetry + run: python -m pip install pipenv poetry + + - name: Install serverless + run: npm install -g serverless@2 + + - name: Install dependencies + if: steps.cacheNpm.outputs.cache-hit != 'true' + run: | + npm update --no-save + npm update --save-dev --no-save + - name: Unit tests + run: script -e -c "npm test" + + tagIfNewVersion: + name: Tag if new version + runs-on: ubuntu-latest + needs: [linuxNode14, windowsNode14, linuxNode16, linuxNode12] + steps: + - name: Checkout repository + uses: actions/checkout@v2 + with: + # Ensure to have complete history of commits pushed with given push operation + # It's loose and imperfect assumption that no more than 30 commits will be pushed at once + fetch-depth: 30 + # Tag needs to be pushed with real user token, otherwise pushed tag won't trigger the actions workflow + # Hence we're passing 'serverless-ci' user authentication token + token: ${{ secrets.USER_GITHUB_TOKEN }} + + - name: Tag if new version + run: | + NEW_VERSION=`git diff -U0 ${{ github.event.before }} package.json | grep '"version": "' | tail -n 1 | grep -oE "[0-9]+\.[0-9]+\.[0-9]+"` || : + if [ -n "$NEW_VERSION" ]; + then + git tag v$NEW_VERSION + git push --tags + fi From f4d87b459a82422347758f647774c77542525774 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 14:05:10 +0100 Subject: [PATCH 20/90] ci: Introduce new CI publish workflow --- .github/workflows/publish.yml | 50 +++++++++++++++++++++++++++++------ CHANGELOG.md | 4 +++ package.json | 2 ++ 3 files changed, 48 insertions(+), 8 deletions(-) create mode 100644 CHANGELOG.md diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 6a1e7d26..6eee5b45 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -1,18 +1,52 @@ +# Version tags only + name: Publish -on: [release] +on: + push: + tags: + - v[0-9]+.[0-9]+.[0-9]+ jobs: - publish-npm: + publish: + name: Publish runs-on: ubuntu-latest + env: + # It'll work with secrets.GITHUB_TOKEN (which is provided by GitHub unconditionally) + # Still then release author would be "github-actions". It's better if it's dedicated repo bot + GITHUB_TOKEN: ${{ secrets.USER_GITHUB_TOKEN }} steps: - - uses: actions/checkout@v2 + - name: Checkout repository + uses: actions/checkout@v2 + + - name: Retrieve node_modules from cache + id: cacheNodeModules + uses: actions/cache@v2 + with: + path: | + ~/.npm + node_modules + key: npm-v14-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} - - uses: actions/setup-node@v2 + - name: Install Node.js and npm + uses: actions/setup-node@v1 with: - version: 14 - registry-url: https://registry.npmjs.org/ + node-version: 14.x + registry-url: https://registry.npmjs.org - - run: npm publish + - name: Publish new version env: - NODE_AUTH_TOKEN: ${{secrets.npm_token}} + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + run: npm publish + + # Note: No need to install dependencies as: + # 1. We have retrieved cached `node_modules` for very same `package.json` + # as stored with recent `master `build + # 2. If for some reason cache retrieval fails `npx` will download and install + # `github-release-from-cc-changelog` + + - name: Publish release notes + run: | + TEMP_ARRAY=($(echo $GITHUB_REF | tr "/" "\n")) + TAG=${TEMP_ARRAY[@]: -1} + npx github-release-from-cc-changelog $TAG diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..cda73dee --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,4 @@ +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + diff --git a/package.json b/package.json index 9d1f5852..ec73734b 100644 --- a/package.json +++ b/package.json @@ -41,6 +41,7 @@ "format": "prettier --write '{.,lib}/*.{js,md}'", "lint": "eslint *.js lib/*.js && prettier -c '{.,lib}/*.{js,md}'", "lint:updated": "pipe-git-updated --ext=js -- eslint", + "prepare-release": "standard-version && prettier --write CHANGELOG.md", "prettier-check": "prettier -c --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", "prettier-check:updated": "pipe-git-updated --ext=css --ext=html --ext=js --ext=json --ext=md --ext=yaml --ext=yml -- prettier -c", "prettify": "prettier --write --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", @@ -51,6 +52,7 @@ "cross-spawn": "*", "eslint": "^7.32.0", "git-list-updated": "^1.2.1", + "github-release-from-cc-changelog": "^2.2.0", "lodash": "^4.17.21", "prettier": "^2", "tape": "*", From 274b8e52d9499612afbf096c023d73e75e0dd71a Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 14:07:57 +0100 Subject: [PATCH 21/90] ci: Add commitlint job to CI --- .github/workflows/validate.yml | 12 +++++++++++- CHANGELOG.md | 1 - commitlint.config.js | 31 +++++++++++++++++++++++++++++++ package.json | 1 + 4 files changed, 43 insertions(+), 2 deletions(-) create mode 100644 commitlint.config.js diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 3a9af5c4..9f546619 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -11,7 +11,7 @@ env: jobs: linuxNode16: - name: '[Linux] Node.js v16: Lint, Formatting & Unit tests' + name: '[Linux] Node.js v16: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' runs-on: ubuntu-latest strategy: matrix: @@ -75,6 +75,16 @@ jobs: run: npm run prettier-check:updated - name: Validate ESLint rules run: npm run lint:updated + - name: Validate commit messages + if: github.event.pull_request.base.repo.id == github.event.pull_request.head.repo.id + run: npx commitlint -f master + - name: Validate changelog (if new version) + run: | + NEW_VERSION=`git diff -U0 master package.json | grep '"version": "' | tail -n 1 | grep -oE "[0-9]+\.[0-9]+\.[0-9]+"` || : + if [ -n "$NEW_VERSION" ]; + then + npx dump-release-notes-from-cc-changelog $NEW_VERSION + fi - name: Unit tests run: script -e -c "npm test" diff --git a/CHANGELOG.md b/CHANGELOG.md index cda73dee..5c79a3c3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,4 +1,3 @@ # Changelog All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. - diff --git a/commitlint.config.js b/commitlint.config.js new file mode 100644 index 00000000..d23a0d6b --- /dev/null +++ b/commitlint.config.js @@ -0,0 +1,31 @@ +'use strict'; + +module.exports = { + rules: { + 'body-leading-blank': [2, 'always'], + 'footer-leading-blank': [2, 'always'], + 'header-max-length': [2, 'always', 72], + 'scope-enum': [2, 'always', ['', 'Config', 'Log']], + 'subject-case': [2, 'always', 'sentence-case'], + 'subject-empty': [2, 'never'], + 'subject-full-stop': [2, 'never', '.'], + 'type-case': [2, 'always', 'lower-case'], + 'type-empty': [2, 'never'], + 'type-enum': [ + 2, + 'always', + [ + 'build', + 'chore', + 'ci', + 'docs', + 'feat', + 'fix', + 'perf', + 'refactor', + 'style', + 'test', + ], + ], + }, +}; diff --git a/package.json b/package.json index ec73734b..be7bc48d 100644 --- a/package.json +++ b/package.json @@ -38,6 +38,7 @@ "main": "index.js", "bin": {}, "scripts": { + "commitlint": "commitlint -f HEAD@{15}", "format": "prettier --write '{.,lib}/*.{js,md}'", "lint": "eslint *.js lib/*.js && prettier -c '{.,lib}/*.{js,md}'", "lint:updated": "pipe-git-updated --ext=js -- eslint", From 29f957dd04505174e7f23f20945a2fb8307a7942 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 14:09:09 +0100 Subject: [PATCH 22/90] chore: Reformat with eslint & prettier --- .github/dependabot.yml | 20 +- .github/workflows/integrate.yml | 8 +- .github/workflows/publish.yml | 4 +- .github/workflows/validate.yml | 8 +- README.md | 12 +- index.js | 44 ++-- package.json | 11 +- test.js | 371 ++++++++++++++++---------------- tests/base/_slimPatterns.yml | 2 +- tests/base/serverless.yml | 3 - tests/pipenv/_slimPatterns.yml | 2 +- tests/poetry/_slimPatterns.yml | 2 +- 12 files changed, 249 insertions(+), 238 deletions(-) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index ac29398e..ab487438 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -1,12 +1,12 @@ version: 2 updates: -- package-ecosystem: npm - directory: "/" - schedule: - interval: daily - time: "10:00" - open-pull-requests-limit: 10 - ignore: - - dependency-name: eslint - versions: - - "> 7.22.0" + - package-ecosystem: npm + directory: '/' + schedule: + interval: daily + time: '10:00' + open-pull-requests-limit: 10 + ignore: + - dependency-name: eslint + versions: + - '> 7.22.0' diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index d241971c..64396542 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -59,7 +59,7 @@ jobs: npm update --no-save npm update --save-dev --no-save - name: Unit tests - run: script -e -c "npm test" + run: npm test windowsNode16: name: '[Windows] Node.js v16: Unit tests' @@ -161,7 +161,7 @@ jobs: npm update --no-save npm update --save-dev --no-save - name: Unit tests - run: script -e -c "npm test" + run: npm test linuxNode12: name: '[Linux] Node.js v12: Unit tests' @@ -212,12 +212,12 @@ jobs: npm update --no-save npm update --save-dev --no-save - name: Unit tests - run: script -e -c "npm test" + run: npm test tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest - needs: [linuxNode14, windowsNode14, linuxNode16, linuxNode12] + needs: [linuxNode16, windowsNode16, linuxNode14, linuxNode12] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 6eee5b45..b44da770 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -26,12 +26,12 @@ jobs: path: | ~/.npm node_modules - key: npm-v14-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} + key: npm-v16-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} - name: Install Node.js and npm uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 16.x registry-url: https://registry.npmjs.org - name: Publish new version diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 9f546619..d541cec7 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -86,7 +86,7 @@ jobs: npx dump-release-notes-from-cc-changelog $NEW_VERSION fi - name: Unit tests - run: script -e -c "npm test" + run: npm test windowsNode16: name: '[Windows] Node.js v16: Unit tests' @@ -192,9 +192,7 @@ jobs: npm update --no-save npm update --save-dev --no-save - name: Unit tests - # Some tests depend on TTY support, which is missing in GA runner - # Workaround taken from https://github.com/actions/runner/issues/241#issuecomment-577360161 - run: script -e -c "npm test" + run: npm test linuxNode12: name: '[Linux] Node.js v12: Unit tests' @@ -247,4 +245,4 @@ jobs: npm update --no-save npm update --save-dev --no-save - name: Unit tests - run: script -e -c "npm test" + run: npm test diff --git a/README.md b/README.md index 518d5ce1..abe6a175 100644 --- a/README.md +++ b/README.md @@ -7,13 +7,13 @@ A Serverless v1.x plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. ---- +--- _Originally developed by [**Capital One**](https://www.capitalone.com/tech/open-source/), now maintained in scope of Serverless, Inc_ _Capital One considers itself the bank a technology company would build. It's delivering best-in-class innovation so that its millions of customers can manage their finances with ease. Capital One is all-in on the cloud and is a leader in the adoption of open source, RESTful APIs, microservices and containers. We build our own products and release them with a speed and agility that allows us to get new customer experiences to market quickly. Our engineers use artificial intelligence and machine learning to transform real-time data, software and algorithms into the future of finance, reimagined._ ---- +--- ## Install @@ -557,10 +557,10 @@ package: - [@andrewfarley](https://github.com/andrewfarley) - Implemented download caching and static caching - [@bweigel](https://github.com/bweigel) - adding the `slimPatternsAppendDefaults` option & fixing per-function packaging when some functions don't have requirements & Porting tests from bats to js! - Poetry support - - [@squaresurf](https://github.com/squaresurf) - - [@drice](https://github.com/drice) - - [@ofercaspi](https://github.com/ofercaspi) - - [@tpansino](https://github.com/tpansino) + - [@squaresurf](https://github.com/squaresurf) + - [@drice](https://github.com/drice) + - [@ofercaspi](https://github.com/ofercaspi) + - [@tpansino](https://github.com/tpansino) - [@david-mk-lawrence](https://github.com/david-mk-lawrence) - added Lambda Layer support - [@bryantbriggs](https://github.com/bryantbiggs) - Fixing CI/CD - [@jacksgt](https://github.com/jacksgt) - Fixing pip issues diff --git a/index.js b/index.js index cf2af38e..7741a7f8 100644 --- a/index.js +++ b/index.js @@ -7,7 +7,7 @@ const values = require('lodash.values'); const { addVendorHelper, removeVendorHelper, - packRequirements + packRequirements, } = require('./lib/zip'); const { injectAllRequirements } = require('./lib/inject'); const { layerRequirements } = require('./lib/layer'); @@ -57,7 +57,7 @@ class ServerlessPythonRequirements { staticCacheMaxVersions: 0, pipCmdExtraArgs: [], noDeploy: [], - vendor: '' + vendor: '', }, (this.serverless.service.custom && this.serverless.service.custom.pythonRequirements) || @@ -75,7 +75,9 @@ class ServerlessPythonRequirements { ) { if (!this.warningLogged) { if (this.log) { - this.log.warning('You provided a docker related option but dockerizePip is set to false.'); + this.log.warning( + 'You provided a docker related option but dockerizePip is set to false.' + ); } else { this.serverless.cli.log( 'WARNING: You provided a docker related option but dockerizePip is set to false.' @@ -144,31 +146,31 @@ class ServerlessPythonRequirements { commands: { clean: { usage: 'Remove .requirements and requirements.zip', - lifecycleEvents: ['clean'] + lifecycleEvents: ['clean'], }, install: { usage: 'install requirements manually', - lifecycleEvents: ['install'] + lifecycleEvents: ['install'], }, cleanCache: { usage: 'Removes all items in the pip download/static cache (if present)', - lifecycleEvents: ['cleanCache'] - } - } - } + lifecycleEvents: ['cleanCache'], + }, + }, + }, }; - if (this.serverless.cli.generateCommandsHelp) { - Object.assign(this.commands.requirements, { - usage: 'Serverless plugin to bundle Python packages', - lifecycleEvents: ['requirements'] - }); - } else { - this.commands.requirements.type = 'container'; - } + if (this.serverless.cli.generateCommandsHelp) { + Object.assign(this.commands.requirements, { + usage: 'Serverless plugin to bundle Python packages', + lifecycleEvents: ['requirements'], + }); + } else { + this.commands.requirements.type = 'container'; + } - const isFunctionRuntimePython = args => { + const isFunctionRuntimePython = (args) => { // If functionObj.runtime is undefined, python. if (!args[1].functionObj || !args[1].functionObj.runtime) { return true; @@ -177,9 +179,7 @@ class ServerlessPythonRequirements { }; const clean = () => - BbPromise.bind(this) - .then(cleanup) - .then(removeVendorHelper); + BbPromise.bind(this).then(cleanup).then(removeVendorHelper); const setupArtifactPathCapturing = () => { // Reference: @@ -243,7 +243,7 @@ class ServerlessPythonRequirements { }, 'requirements:install:install': before, 'requirements:clean:clean': clean, - 'requirements:cleanCache:cleanCache': cleanCache + 'requirements:cleanCache:cleanCache': cleanCache, }; } } diff --git a/package.json b/package.json index be7bc48d..4db9497a 100644 --- a/package.json +++ b/package.json @@ -39,8 +39,7 @@ "bin": {}, "scripts": { "commitlint": "commitlint -f HEAD@{15}", - "format": "prettier --write '{.,lib}/*.{js,md}'", - "lint": "eslint *.js lib/*.js && prettier -c '{.,lib}/*.{js,md}'", + "lint": "eslint .", "lint:updated": "pipe-git-updated --ext=js -- eslint", "prepare-release": "standard-version && prettier --write CHANGELOG.md", "prettier-check": "prettier -c --ignore-path .gitignore \"**/*.{css,html,js,json,md,yaml,yml}\"", @@ -79,6 +78,14 @@ "peerDependencies": { "serverless": "^2.32" }, + "lint-staged": { + "*.js": [ + "eslint" + ], + "*.{css,html,js,json,md,yaml,yml}": [ + "prettier -c" + ] + }, "eslintConfig": { "extends": "eslint:recommended", "env": { diff --git a/test.js b/test.js index 0322ab91..f234229a 100644 --- a/test.js +++ b/test.js @@ -10,7 +10,7 @@ const { copySync, writeFileSync, statSync, - pathExistsSync + pathExistsSync, } = require('fs-extra'); const { quote } = require('shell-quote'); const { sep } = require('path'); @@ -19,30 +19,32 @@ const { getUserCachePath, sha256Path } = require('./lib/shared'); const initialWorkingDir = process.cwd(); -const mkCommand = cmd => (args, options = {}) => { - const { error, stdout, stderr, status } = crossSpawn.sync( - cmd, - args, - Object.assign( - { - env: Object.assign({}, process.env, { SLS_DEBUG: 't' }) - }, - options - ) - ); - if (error) { - console.error(`Error running: ${quote([cmd, ...args])}`); // eslint-disable-line no-console - throw error; - } - if (status) { - console.error('STDOUT: ', stdout.toString()); // eslint-disable-line no-console - console.error('STDERR: ', stderr.toString()); // eslint-disable-line no-console - throw new Error( - `${quote([cmd, ...args])} failed with status code ${status}` - ); - } - return stdout && stdout.toString().trim(); -}; +const mkCommand = + (cmd) => + (args, options = {}) => { + const { error, stdout, stderr, status } = crossSpawn.sync( + cmd, + args, + Object.assign( + { + env: Object.assign({}, process.env, { SLS_DEBUG: 't' }), + }, + options + ) + ); + if (error) { + console.error(`Error running: ${quote([cmd, ...args])}`); // eslint-disable-line no-console + throw error; + } + if (status) { + console.error('STDOUT: ', stdout.toString()); // eslint-disable-line no-console + console.error('STDERR: ', stderr.toString()); // eslint-disable-line no-console + throw new Error( + `${quote([cmd, ...args])} failed with status code ${status}` + ); + } + return stdout && stdout.toString().trim(); + }; const sls = mkCommand('sls'); const git = mkCommand('git'); const npm = mkCommand('npm'); @@ -73,8 +75,8 @@ const teardown = () => { 'serverless.yml.bak', 'module1/foobar', getUserCachePath(), - ...glob.sync('serverless-python-requirements-*.tgz') - ].map(path => removeSync(path)); + ...glob.sync('serverless-python-requirements-*.tgz'), + ].map((path) => removeSync(path)); if (!cwd.endsWith('base with a space')) { try { git(['checkout', 'serverless.yml']); @@ -93,15 +95,17 @@ const teardown = () => { const testFilter = (() => { const elems = process.argv.slice(2); // skip ['node', 'test.js'] if (elems.length) { - return desc => - elems.some(text => desc.search(text) != -1) ? tape.test : tape.test.skip; + return (desc) => + elems.some((text) => desc.search(text) != -1) + ? tape.test + : tape.test.skip; } else { return () => tape.test; } })(); const test = (desc, func, opts = {}) => - testFilter(desc)(desc, opts, async t => { + testFilter(desc)(desc, opts, async (t) => { setup(); let ended = false; try { @@ -124,7 +128,7 @@ const availablePythons = (() => { const mapping = {}; if (process.env.USE_PYTHON) { binaries.push( - ...process.env.USE_PYTHON.split(',').map(v => v.toString().trim()) + ...process.env.USE_PYTHON.split(',').map((v) => v.toString().trim()) ); } else { // For running outside of CI @@ -135,7 +139,7 @@ const availablePythons = (() => { const python = `${bin}${exe}`; const { stdout, status } = crossSpawn.sync(python, [ '-c', - 'import sys; sys.stdout.write(".".join(map(str, sys.version_info[:2])))' + 'import sys; sys.stdout.write(".".join(map(str, sys.version_info[:2])))', ]); const ver = stdout && stdout.toString().trim(); if (!status && ver) { @@ -152,29 +156,29 @@ const availablePythons = (() => { return mapping; })(); -const getPythonBin = version => { +const getPythonBin = (version) => { const bin = availablePythons[String(version)]; if (!bin) throw new Error(`No python version ${version} available`); return bin; }; -const hasPython = version => { +const hasPython = (version) => { return Boolean(availablePythons[String(version)]); }; -const listZipFiles = async function(filename) { +const listZipFiles = async function (filename) { const file = await readFile(filename); const zip = await new JSZip().loadAsync(file); return Object.keys(zip.files); }; -const listZipFilesWithMetaData = async function(filename) { +const listZipFilesWithMetaData = async function (filename) { const file = await readFile(filename); const zip = await new JSZip().loadAsync(file); return Object(zip.files); }; -const listRequirementsZipFiles = async function(filename) { +const listRequirementsZipFiles = async function (filename) { const file = await readFile(filename); const zip = await new JSZip().loadAsync(file); const reqsBuffer = await zip.file('.requirements.zip').async('nodebuffer'); @@ -197,7 +201,7 @@ const brokenOn = (...platforms) => platforms.indexOf(process.platform) != -1; test( 'default pythonBin can package flask with default options', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -212,7 +216,7 @@ test( test( 'py3.6 packages have the same hash', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -231,7 +235,7 @@ test( test( 'py3.6 can package flask with default options', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -246,14 +250,14 @@ test( test( 'py3.6 can package flask with hashes', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([ `--pythonBin=${getPythonBin(3)}`, '--fileName=requirements-w-hashes.txt', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); @@ -264,14 +268,14 @@ test( test( 'py3.6 can package flask with nested', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([ `--pythonBin=${getPythonBin(3)}`, '--fileName=requirements-w-nested.txt', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); @@ -283,7 +287,7 @@ test( test( 'py3.6 can package flask with zip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -308,7 +312,7 @@ test( test( 'py3.6 can package flask with slim option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -316,12 +320,13 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, '__main__.py files are packaged' ); t.end(); @@ -331,7 +336,7 @@ test( test( 'py3.6 can package flask with slim & slimPatterns options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -340,12 +345,12 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -356,7 +361,7 @@ test( test( "py3.6 doesn't package bottle with noDeploy option", - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -365,7 +370,7 @@ test( '-i.bak', '-e', 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' + 'serverless.yml', ]); sls([`--pythonBin=${getPythonBin(3)}`, 'package']); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -378,14 +383,14 @@ test( test( 'py3.6 can package boto3 with editable', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([ `--pythonBin=${getPythonBin(3)}`, '--fileName=requirements-w-editable.txt', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -400,7 +405,7 @@ test( test( 'py3.6 can package flask with dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -416,7 +421,7 @@ test( test( 'py3.6 can package flask with slim & dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -424,12 +429,13 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], '*.pyc files are NOT packaged' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, '__main__.py files are packaged' ); t.end(); @@ -439,7 +445,7 @@ test( test( 'py3.6 can package flask with slim & dockerizePip & slimPatterns options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -448,12 +454,12 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], '*.pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -464,7 +470,7 @@ test( test( 'py3.6 can package flask with zip & dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -497,7 +503,7 @@ test( test( 'py3.6 can package flask with zip & slim & dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -530,7 +536,7 @@ test( test( 'py2.7 can package flask with default options', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -545,7 +551,7 @@ test( test( 'py2.7 can package flask with slim option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -553,17 +559,18 @@ test( `--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', '--slim=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, '__main__.py files are packaged' ); t.end(); @@ -573,7 +580,7 @@ test( test( 'py2.7 can package flask with zip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -581,7 +588,7 @@ test( `--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', '--zip=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( @@ -603,7 +610,7 @@ test( test( 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -614,17 +621,17 @@ test( '--runtime=python2.7', '--dockerizePip=true', '--slim=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], '*.pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -635,7 +642,7 @@ test( test( "py2.7 doesn't package bottle with noDeploy option", - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -644,7 +651,7 @@ test( '-i.bak', '-e', 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' + 'serverless.yml', ]); sls([`--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', 'package']); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -657,7 +664,7 @@ test( test( 'py2.7 can package flask with zip & dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -666,7 +673,7 @@ test( '--runtime=python2.7', '--dockerizePip=true', '--zip=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -696,7 +703,7 @@ test( test( 'py2.7 can package flask with zip & slim & dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -706,7 +713,7 @@ test( '--dockerizePip=true', '--zip=true', '--slim=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -736,7 +743,7 @@ test( test( 'py2.7 can package flask with dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -744,7 +751,7 @@ test( `--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', '--dockerizePip=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -757,7 +764,7 @@ test( test( 'py2.7 can package flask with slim & dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -766,17 +773,18 @@ test( '--runtime=python2.7', '--dockerizePip=true', '--slim=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], '*.pyc files are NOT packaged' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, '__main__.py files are packaged' ); t.end(); @@ -786,7 +794,7 @@ test( test( 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -797,17 +805,17 @@ test( '--runtime=python2.7', '--dockerizePip=true', '--slim=true', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], '*.pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -818,7 +826,7 @@ test( test( 'pipenv py3.6 can package flask with default options', - async t => { + async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -837,7 +845,7 @@ test( test( 'pipenv py3.6 can package flask with slim option', - async t => { + async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -845,12 +853,13 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, '__main__.py files are packaged' ); t.end(); @@ -860,7 +869,7 @@ test( test( 'pipenv py3.6 can package flask with slim & slimPatterns options', - async t => { + async (t) => { process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -870,12 +879,12 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -886,7 +895,7 @@ test( test( 'pipenv py3.6 can package flask with zip option', - async t => { + async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -911,7 +920,7 @@ test( test( "pipenv py3.6 doesn't package bottle with noDeploy option", - async t => { + async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -920,7 +929,7 @@ test( '-i.bak', '-e', 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' + 'serverless.yml', ]); sls(['package']); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -933,7 +942,7 @@ test( test( 'non build pyproject.toml uses requirements.txt', - async t => { + async (t) => { process.chdir('tests/non_build_pyproject'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -948,7 +957,7 @@ test( test( 'non poetry pyproject.toml without requirements.txt packages handler only', - async t => { + async (t) => { process.chdir('tests/non_poetry_pyproject'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -962,7 +971,7 @@ test( test( 'poetry py3.6 can package flask with default options', - async t => { + async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -978,7 +987,7 @@ test( test( 'poetry py3.6 can package flask with slim option', - async t => { + async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -986,12 +995,13 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.true( - zipfiles.filter(filename => filename.endsWith('__main__.py')).length > 0, + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > + 0, '__main__.py files are packaged' ); t.end(); @@ -1001,7 +1011,7 @@ test( test( 'poetry py3.6 can package flask with slim & slimPatterns options', - async t => { + async (t) => { process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -1011,12 +1021,12 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1027,7 +1037,7 @@ test( test( 'poetry py3.6 can package flask with zip option', - async t => { + async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1052,7 +1062,7 @@ test( test( "poetry py3.6 doesn't package bottle with noDeploy option", - async t => { + async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1061,7 +1071,7 @@ test( '-i.bak', '-e', 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' + 'serverless.yml', ]); sls(['package']); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -1074,7 +1084,7 @@ test( test( 'py3.6 can package flask with zip option and no explicit include', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1101,7 +1111,7 @@ test( test( 'py3.6 can package lambda-decorators using vendor option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1120,7 +1130,7 @@ test( test( "Don't nuke execute perms", - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); const perm = '755'; @@ -1131,7 +1141,7 @@ test( '-i.bak', '-e', 's/(handler.py.*$)/$1\n - foobar/', - 'serverless.yml' + 'serverless.yml', ]); writeFileSync(`foobar`, ''); chmodSync(`foobar`, perm); @@ -1169,7 +1179,7 @@ test( test( 'py3.6 can package flask in a project with a space in it', - async t => { + async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); const path = npm(['pack', '../..']); @@ -1185,7 +1195,7 @@ test( test( 'py3.6 can package flask in a project with a space in it with docker', - async t => { + async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); const path = npm(['pack', '../..']); @@ -1201,7 +1211,7 @@ test( test( 'py3.6 supports custom file name with fileName option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); writeFileSync('puck', 'requests'); @@ -1227,7 +1237,7 @@ test( test( "py3.6 doesn't package bottle with zip option", - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1236,7 +1246,7 @@ test( '-i.bak', '-e', 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml' + 'serverless.yml', ]); sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -1270,7 +1280,7 @@ test( test( 'py3.6 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1280,11 +1290,11 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1295,7 +1305,7 @@ test( test( 'py3.6 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1304,17 +1314,17 @@ test( '--dockerizePip=true', '--slim=true', '--slimPatternsAppendDefaults=false', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1325,7 +1335,7 @@ test( test( 'py2.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1334,17 +1344,17 @@ test( '--runtime=python2.7', '--slim=true', '--slimPatternsAppendDefaults=false', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1355,7 +1365,7 @@ test( test( 'py2.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', - async t => { + async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1365,16 +1375,16 @@ test( '--runtime=python2.7', '--slim=true', '--slimPatternsAppendDefaults=false', - 'package' + 'package', ]); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1385,7 +1395,7 @@ test( test( 'pipenv py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', - async t => { + async (t) => { process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1395,11 +1405,11 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1410,7 +1420,7 @@ test( test( 'poetry py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', - async t => { + async (t) => { process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -1420,11 +1430,11 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( - zipfiles.filter(filename => filename.endsWith('.pyc')).length >= 1, + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, 'pyc files are packaged' ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('__main__.py')), + zipfiles.filter((filename) => filename.endsWith('__main__.py')), [], '__main__.py files are NOT packaged' ); @@ -1435,7 +1445,7 @@ test( test( 'py3.6 can package flask with package individually option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1520,11 +1530,9 @@ test( { skip: !hasPython(3.6) } ); - - test( 'py3.6 can package flask with package individually & slim option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1536,7 +1544,7 @@ test( 'handler.py is packaged in function hello' ); t.deepEqual( - zipfiles_hello.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello' ); @@ -1555,7 +1563,7 @@ test( 'handler.py is packaged in function hello2' ); t.deepEqual( - zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello2.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello2' ); @@ -1574,7 +1582,7 @@ test( 'handler.py is packaged in function hello3' ); t.deepEqual( - zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello3.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello3' ); @@ -1599,7 +1607,7 @@ test( 'flask is NOT packaged in function hello4' ); t.deepEqual( - zipfiles_hello4.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello4.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello4' ); @@ -1611,7 +1619,7 @@ test( test( 'py2.7 can package flask with package individually option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1682,7 +1690,7 @@ test( test( 'py2.7 can package flask with package individually & slim option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1690,7 +1698,7 @@ test( '--individually=true', '--runtime=python2.7', '--slim=true', - 'package' + 'package', ]); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); @@ -1699,7 +1707,7 @@ test( 'handler.py is packaged in function hello' ); t.deepEqual( - zipfiles_hello.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello' ); @@ -1718,7 +1726,7 @@ test( 'handler.py is packaged in function hello2' ); t.deepEqual( - zipfiles_hello2.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello2.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello2' ); @@ -1737,7 +1745,7 @@ test( 'handler.py is packaged in function hello3' ); t.deepEqual( - zipfiles_hello3.filter(filename => filename.endsWith('.pyc')), + zipfiles_hello3.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files packaged in function hello3' ); @@ -1773,7 +1781,7 @@ test( test( 'py2.7 can ignore functions defined with `image`', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1807,7 +1815,7 @@ test( test( 'py3.6 can package only requirements of module', - async t => { + async (t) => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1868,7 +1876,7 @@ test( test( 'py3.6 can package lambda-decorators using vendor and invidiually option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1950,7 +1958,7 @@ test( test( "Don't nuke execute perms when using individually", - async t => { + async (t) => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); const perm = '755'; @@ -1974,8 +1982,9 @@ test( const zipfiles_hello2 = await listZipFilesWithMetaData( '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' ); - const flaskPerm = statSync('.serverless/module2/requirements/bin/flask') - .mode; + const flaskPerm = statSync( + '.serverless/module2/requirements/bin/flask' + ).mode; t.true( zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm, @@ -1989,7 +1998,7 @@ test( test( "Don't nuke execute perms when using individually w/docker", - async t => { + async (t) => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); const perm = '755'; @@ -2013,8 +2022,9 @@ test( const zipfiles_hello2 = await listZipFilesWithMetaData( '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' ); - const flaskPerm = statSync('.serverless/module2/requirements/bin/flask') - .mode; + const flaskPerm = statSync( + '.serverless/module2/requirements/bin/flask' + ).mode; t.true( zipfiles_hello2['bin/flask'].unixPermissions === flaskPerm, @@ -2028,7 +2038,7 @@ test( test( 'py3.6 uses download cache by default option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2045,7 +2055,7 @@ test( test( 'py3.6 uses download cache by default', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2061,7 +2071,7 @@ test( test( 'py3.6 uses download cache with dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2078,14 +2088,14 @@ test( test( 'py3.6 uses download cache with dockerizePip by default option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls([ '--dockerizePip=true', '--cacheLocation=.requirements-cache', - 'package' + 'package', ]); t.true( pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), @@ -2098,7 +2108,7 @@ test( test( 'py3.6 uses static and download cache', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2120,7 +2130,7 @@ test( test( 'py3.6 uses static and download cache with dockerizePip option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2142,7 +2152,7 @@ test( test( 'py3.6 uses static cache', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2180,7 +2190,7 @@ test( test( 'py3.6 uses static cache with cacheLocation option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2204,7 +2214,7 @@ test( test( 'py3.6 uses static cache with dockerizePip & slim option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2235,7 +2245,7 @@ test( "static cache is really used when running 'sls package' again" ); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files are packaged' ); @@ -2247,7 +2257,7 @@ test( test( 'py3.6 uses download cache with dockerizePip & slim option', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -2261,7 +2271,7 @@ test( const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( - zipfiles.filter(filename => filename.endsWith('.pyc')), + zipfiles.filter((filename) => filename.endsWith('.pyc')), [], 'no pyc files are packaged' ); @@ -2273,13 +2283,12 @@ test( test( 'py3.6 can ignore functions defined with `image`', - async t => { + async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); sls(['--individually=true', 'package']); - t.true( pathExistsSync('.serverless/hello.zip'), 'function hello is packaged' diff --git a/tests/base/_slimPatterns.yml b/tests/base/_slimPatterns.yml index 02c631b4..443af9a0 100644 --- a/tests/base/_slimPatterns.yml +++ b/tests/base/_slimPatterns.yml @@ -1,2 +1,2 @@ slimPatterns: - - "**/__main__.py" + - '**/__main__.py' diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 6bb1f322..0b360e9b 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -50,6 +50,3 @@ functions: - 'fn2/**' hello5: image: 000000000000.dkr.ecr.sa-east-1.amazonaws.com/test-lambda-docker@sha256:6bb600b4d6e1d7cf521097177dd0c4e9ea373edb91984a505333be8ac9455d38 - - - diff --git a/tests/pipenv/_slimPatterns.yml b/tests/pipenv/_slimPatterns.yml index 02c631b4..443af9a0 100644 --- a/tests/pipenv/_slimPatterns.yml +++ b/tests/pipenv/_slimPatterns.yml @@ -1,2 +1,2 @@ slimPatterns: - - "**/__main__.py" + - '**/__main__.py' diff --git a/tests/poetry/_slimPatterns.yml b/tests/poetry/_slimPatterns.yml index 02c631b4..443af9a0 100644 --- a/tests/poetry/_slimPatterns.yml +++ b/tests/poetry/_slimPatterns.yml @@ -1,2 +1,2 @@ slimPatterns: - - "**/__main__.py" + - '**/__main__.py' From 9b84abf826d95a5ad152d6a70de7c722770b467b Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 26 Nov 2021 17:04:17 +0100 Subject: [PATCH 23/90] chore: Remove dependabot --- .github/dependabot.yml | 12 ------------ 1 file changed, 12 deletions(-) delete mode 100644 .github/dependabot.yml diff --git a/.github/dependabot.yml b/.github/dependabot.yml deleted file mode 100644 index ab487438..00000000 --- a/.github/dependabot.yml +++ /dev/null @@ -1,12 +0,0 @@ -version: 2 -updates: - - package-ecosystem: npm - directory: '/' - schedule: - interval: daily - time: '10:00' - open-pull-requests-limit: 10 - ignore: - - dependency-name: eslint - versions: - - '> 7.22.0' From 89b3bababd48d496e159c9694546b8ab18b2955b Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 29 Nov 2021 15:03:46 +0100 Subject: [PATCH 24/90] chore: Remove Node16 tests --- .github/workflows/integrate.yml | 63 +++--------------------------- .github/workflows/publish.yml | 4 +- .github/workflows/validate.yml | 69 ++++----------------------------- 3 files changed, 16 insertions(+), 120 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 64396542..953951df 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -10,59 +10,8 @@ env: FORCE_COLOR: 1 jobs: - linuxNode16: - name: '[Linux] Node.js v16: Unit tests' - runs-on: ubuntu-latest - strategy: - matrix: - python-version: [2.7, 3.6] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: npm-v16-${{ runner.os }}-${{ github.ref }}- - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 16.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv poetry - - - name: Install serverless - run: npm install -g serverless@2 - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test - - windowsNode16: - name: '[Windows] Node.js v16: Unit tests' + windowsNode14: + name: '[Windows] Node.js v14: Unit tests' runs-on: windows-latest strategy: matrix: @@ -78,8 +27,8 @@ jobs: path: | ~/.npm node_modules - key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: npm-v16-${{ runner.os }}-${{ github.ref }}- + key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 @@ -89,7 +38,7 @@ jobs: - name: Install Node.js and npm uses: actions/setup-node@v1 with: - node-version: 16.x + node-version: 14.x - name: Check python version run: | @@ -217,7 +166,7 @@ jobs: tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest - needs: [linuxNode16, windowsNode16, linuxNode14, linuxNode12] + needs: [windowsNode14, linuxNode14, linuxNode12] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index b44da770..6eee5b45 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -26,12 +26,12 @@ jobs: path: | ~/.npm node_modules - key: npm-v16-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} + key: npm-v14-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} - name: Install Node.js and npm uses: actions/setup-node@v1 with: - node-version: 16.x + node-version: 14.x registry-url: https://registry.npmjs.org - name: Publish new version diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index d541cec7..9215eee1 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -10,8 +10,8 @@ env: FORCE_COLOR: 1 jobs: - linuxNode16: - name: '[Linux] Node.js v16: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' + linuxNode14: + name: '[Linux] Node.js v14: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' runs-on: ubuntu-latest strategy: matrix: @@ -38,10 +38,10 @@ jobs: path: | ~/.npm node_modules - key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} restore-keys: | - npm-v16-${{ runner.os }}-${{ github.ref }}- - npm-v16-${{ runner.os }}-refs/heads/master- + npm-v14-${{ runner.os }}-${{ github.ref }}- + npm-v14-${{ runner.os }}-refs/heads/master- - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v1 @@ -51,7 +51,7 @@ jobs: - name: Install Node.js and npm uses: actions/setup-node@v1 with: - node-version: 16.x + node-version: 14.x - name: Check python version run: | @@ -88,62 +88,9 @@ jobs: - name: Unit tests run: npm test - windowsNode16: - name: '[Windows] Node.js v16: Unit tests' + windowsNode14: + name: '[Windows] Node.js v14: Unit tests' runs-on: windows-latest - strategy: - matrix: - python-version: [2.7, 3.6] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v16-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: | - npm-v16-${{ runner.os }}-${{ github.ref }}- - npm-v16-${{ runner.os }}-refs/heads/master- - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 16.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv poetry - - - name: Install serverless - run: npm install -g serverless@2 - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test - - linuxNode14: - name: '[Linux] Node.js 14: Unit tests' - runs-on: ubuntu-latest strategy: matrix: python-version: [2.7, 3.6] From 328cb016e58231d3c72399918553c4b10d4aa6d5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Wilczy=C5=84ski?= Date: Wed, 1 Dec 2021 20:13:59 +0100 Subject: [PATCH 25/90] feat: Add architecture to requirements cache directory name (#645) --- lib/pip.js | 3 ++- lib/shared.js | 7 +++++-- test.js | 35 +++++++++++++++++++++++++---------- 3 files changed, 32 insertions(+), 13 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index ce348532..7a0a0ceb 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -640,7 +640,8 @@ async function installRequirementsIfNeeded( const workingReqsFolder = getRequirementsWorkingPath( reqChecksum, requirementsTxtDirectory, - options + options, + serverless ); // Check if our static cache is present and is valid diff --git a/lib/shared.js b/lib/shared.js index 7baee58b..426d6c50 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -62,17 +62,20 @@ function checkForAndDeleteMaxCacheVersions({ serverless, options, log }) { * @param {string} subfolder * @param {string} servicePath * @param {Object} options + * @param {Object} serverless * @return {string} */ function getRequirementsWorkingPath( subfolder, requirementsTxtDirectory, - options + options, + serverless ) { // If we want to use the static cache if (options && options.useStaticCache) { if (subfolder) { - subfolder = subfolder + '_slspyc'; + const architecture = serverless.service.provider.architecture || 'x86_64'; + subfolder = `${subfolder}_${architecture}_slspyc`; } // If we have max number of cache items... diff --git a/test.js b/test.js index f234229a..ccd1920c 100644 --- a/test.js +++ b/test.js @@ -2115,12 +2115,15 @@ test( sls(['package']); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), 'http exists in download-cache' ); t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), 'flask exists in static-cache' ); t.end(); @@ -2137,12 +2140,15 @@ test( sls(['--dockerizePip=true', 'package']); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), 'http exists in download-cache' ); t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), 'flask exists in static-cache' ); t.end(); @@ -2159,20 +2165,23 @@ test( sls(['package']); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), 'flask exists in static-cache' ); t.true( pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` ), '.completed_requirements exists in static-cache' ); // py3.6 checking that static cache actually pulls from cache (by poisoning it) writeFileSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}injected_file_is_bad_form`, + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' ); sls(['package']); @@ -2197,13 +2206,16 @@ test( const cachepath = '.requirements-cache'; sls([`--cacheLocation=${cachepath}`, 'package']); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), 'flask exists in static-cache' ); t.true( pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` ), '.completed_requirements exists in static-cache' ); @@ -2221,20 +2233,23 @@ test( sls(['--dockerizePip=true', '--slim=true', 'package']); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; t.true( - pathExistsSync(`${cachepath}${sep}${cacheFolderHash}_slspyc${sep}flask`), + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), 'flask exists in static-cache' ); t.true( pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}.completed_requirements` + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` ), '.completed_requirements exists in static-cache' ); // py3.6 checking that static cache actually pulls from cache (by poisoning it) writeFileSync( - `${cachepath}${sep}${cacheFolderHash}_slspyc${sep}injected_file_is_bad_form`, + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' ); sls(['--dockerizePip=true', '--slim=true', 'package']); From 347245cc5a7d97c56dc32f0da4285e0be59cf535 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 30 Nov 2021 16:57:55 +0100 Subject: [PATCH 26/90] chore: Add `standard-version` config --- package.json | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/package.json b/package.json index 4db9497a..6a9923c4 100644 --- a/package.json +++ b/package.json @@ -55,6 +55,7 @@ "github-release-from-cc-changelog": "^2.2.0", "lodash": "^4.17.21", "prettier": "^2", + "standard-version": "^9.3.2", "tape": "*", "tape-promise": "*" }, @@ -100,6 +101,30 @@ "no-console": "off" } }, + "standard-version": { + "skip": { + "commit": true, + "tag": true + }, + "types": [ + { + "type": "feat", + "section": "Features" + }, + { + "type": "fix", + "section": "Bug Fixes" + }, + { + "type": "perf", + "section": "Performance Improvements" + }, + { + "type": "refactor", + "section": "Maintenance Improvements" + } + ] + }, "prettier": { "semi": true, "singleQuote": true From e3d9ebcdd3ec72cc2e3301d33e10dd10ef6a594d Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 30 Nov 2021 17:01:53 +0100 Subject: [PATCH 27/90] chore: Release v5.2.1 --- CHANGELOG.md | 14 ++++++++++++++ package.json | 2 +- 2 files changed, 15 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5c79a3c3..bd87ce72 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,17 @@ # Changelog All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. + +### [5.2.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.0...v5.2.1) (2021-11-30) + +### Maintenance Improvements + +- Adapt plugin to modern logs ([#646](https://github.com/serverless/serverless-python-requirements/pull/646)) ([8ff97e6](https://github.com/UnitedIncome/serverless-python-requirements/commit/8ff97e6b7c279334e417dbdb65e64d0de2656986)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Adapt to `async` version of `spawn` ([#648](https://github.com/serverless/serverless-python-requirements/pull/648)) ([50c2850](https://github.com/UnitedIncome/serverless-python-requirements/commit/50c2850874ded795fd50ae377f1db817a0212e7d)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Adapt v3 log writing interfaces ([#646](https://github.com/serverless/serverless-python-requirements/pull/646)) ([a79899a](https://github.com/UnitedIncome/serverless-python-requirements/commit/a79899ae5f6f66aa0c65e7fda8e0186d38ff446e)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Ensure proper verbose progress logs ([#646](https://github.com/serverless/serverless-python-requirements/pull/646)) ([44b9591](https://github.com/UnitedIncome/serverless-python-requirements/commit/44b9591f01157a1811e3ca8b43e21265a155a976)) ([Piotr Grzesik](https://github.com/pgrzesik)) +- Use `ServerlessError` ([#649](https://github.com/serverless/serverless-python-requirements/pull/649)) ([cdb7111](https://github.com/UnitedIncome/serverless-python-requirements/commit/cdb71110bc9c69b5087b6e18fb353d65962afe4a)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +# Changelog + +All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. diff --git a/package.json b/package.json index 6a9923c4..c6a1f5e6 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.2.0", + "version": "5.2.1", "engines": { "node": ">=12.0" }, From f60eed1225f091c090f9c253771a12b33fafcab0 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 2 Dec 2021 20:57:51 +0100 Subject: [PATCH 28/90] fix: Ensure cast `toString` before `trim` on buffer --- lib/docker.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/docker.js b/lib/docker.js index 5157803f..a3079ff7 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -103,12 +103,12 @@ async function tryBindPath(bindPath, testFile, pluginInstance) { const ps = await dockerCommand(options, pluginInstance); if (debug) { if (log) { - log.debug(ps.stdoutBuffer.trim()); + log.debug(ps.stdoutBuffer.toString().trim()); } else { - serverless.cli.log(ps.stdoutBuffer.trim()); + serverless.cli.log(ps.stdoutBuffer.toString().trim()); } } - return ps.stdoutBuffer.trim() === `/test/${testFile}`; + return ps.stdoutBuffer.toString().trim() === `/test/${testFile}`; } catch (err) { if (debug) { if (log) { @@ -197,7 +197,7 @@ async function getDockerUid(bindPath, pluginInstance) { '/bin/sh', ]; const ps = await dockerCommand(options, pluginInstance); - return ps.stdoutBuffer.trim(); + return ps.stdoutBuffer.toString().trim(); } module.exports = { buildImage, getBindPath, getDockerUid }; From c4808770713f451dcaa907fb40fcd34414bf7de7 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 3 Dec 2021 13:49:37 +0100 Subject: [PATCH 29/90] chore: Release v5.2.2 --- CHANGELOG.md | 6 ++++++ package.json | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bd87ce72..47570012 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [5.2.2](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.1...v5.2.2) (2021-12-03) + +### Bug Fixes + +- Ensure cast `toString` before `trim` on buffer ([#656](https://github.com/serverless/serverless-python-requirements/pull/656)) ([f60eed1](https://github.com/UnitedIncome/serverless-python-requirements/commit/f60eed1225f091c090f9c253771a12b33fafcab0)) ([Piotr Grzesik](https://github.com/pgrzesik)) + ### [5.2.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.0...v5.2.1) (2021-11-30) ### Maintenance Improvements diff --git a/package.json b/package.json index c6a1f5e6..44554659 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.2.1", + "version": "5.2.2", "engines": { "node": ">=12.0" }, From 406f6bac1ca934a34387048b5c00242aff3f581b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Wilczy=C5=84ski?= Date: Mon, 20 Dec 2021 18:06:23 +0100 Subject: [PATCH 30/90] feat: Support requirements layer caching (#644) --- lib/layer.js | 47 ++++++++++++++++++++++++++++++++++++++++++----- lib/shared.js | 21 +++++++++++++++++++++ 2 files changed, 63 insertions(+), 5 deletions(-) diff --git a/lib/layer.js b/lib/layer.js index fe2a4a00..6fe9ca4c 100644 --- a/lib/layer.js +++ b/lib/layer.js @@ -3,6 +3,7 @@ const fse = require('fs-extra'); const path = require('path'); const JSZip = require('jszip'); const { writeZip, addTree } = require('./zipTree'); +const { sha256Path, getRequirementsLayerPath } = require('./shared'); BbPromise.promisifyAll(fse); @@ -11,13 +12,49 @@ BbPromise.promisifyAll(fse); * @return {Promise} the JSZip object constructed. */ function zipRequirements() { - const rootZip = new JSZip(); const src = path.join('.serverless', 'requirements'); - const runtimepath = 'python'; - - return addTree(rootZip.folder(runtimepath), src).then(() => - writeZip(rootZip, path.join('.serverless', 'pythonRequirements.zip')) + const reqChecksum = sha256Path(path.join('.serverless', 'requirements.txt')); + const targetZipPath = path.join('.serverless', 'pythonRequirements.zip'); + const zipCachePath = getRequirementsLayerPath( + reqChecksum, + targetZipPath, + this.options, + this.serverless ); + + const promises = []; + if (fse.existsSync(zipCachePath)) { + let layerProgress; + if (this.progress && this.log) { + layerProgress = this.progress.get('python-layer-requirements'); + layerProgress.update( + 'Using cached Python Requirements Lambda Layer file' + ); + this.log.info('Found cached Python Requirements Lambda Layer file'); + } else { + this.serverless.cli.log( + 'Found cached Python Requirements Lambda Layer file' + ); + } + } else { + const rootZip = new JSZip(); + const runtimepath = 'python'; + + promises.push( + addTree(rootZip.folder(runtimepath), src).then(() => + writeZip(rootZip, zipCachePath) + ) + ); + } + return BbPromise.all(promises).then(() => { + if (zipCachePath !== targetZipPath) { + if (process.platform === 'win32') { + fse.copySync(zipCachePath, targetZipPath); + } else { + fse.symlink(zipCachePath, targetZipPath, 'file'); + } + } + }); } /** diff --git a/lib/shared.js b/lib/shared.js index 426d6c50..bebb3f09 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -86,6 +86,26 @@ function getRequirementsWorkingPath( return path.join(requirementsTxtDirectory, 'requirements'); } +/** + * Path of a cached requirements layer archive file + * @param {string} subfolder + * @param {string} fallback + * @param {Object} options + * @param {Object} serverless + * @return {string} + */ +function getRequirementsLayerPath(hash, fallback, options, serverless) { + // If we want to use the static cache + if (hash && options && options.useStaticCache) { + const architecture = serverless.service.provider.architecture || 'x86_64'; + hash = `${hash}_${architecture}_slspyc.zip`; + return path.join(getUserCachePath(options), hash); + } + + // If we don't want to use the static cache, then fallback to requirements file in .serverless directory + return fallback; +} + /** * The static cache path that will be used for this system + options, used if static cache is enabled * @param {Object} options @@ -117,6 +137,7 @@ function sha256Path(fullpath) { module.exports = { checkForAndDeleteMaxCacheVersions, getRequirementsWorkingPath, + getRequirementsLayerPath, getUserCachePath, sha256Path, }; From 6730d7e383ab2d0a2bb55b42c4713fdf12877690 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 21 Dec 2021 14:37:27 +0100 Subject: [PATCH 31/90] chore: Update `serverless` peer dependency --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 44554659..39afe62e 100644 --- a/package.json +++ b/package.json @@ -77,7 +77,7 @@ "shell-quote": "^1.7.3" }, "peerDependencies": { - "serverless": "^2.32" + "serverless": "^2.32 || 3" }, "lint-staged": { "*.js": [ From 95c694f9e64dfb4f59ac6e6da4dec83c1d572c5c Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 21 Dec 2021 14:38:59 +0100 Subject: [PATCH 32/90] chore: Release v5.3.0 --- CHANGELOG.md | 10 ++++++++++ package.json | 2 +- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 47570012..805a42e5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,16 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [5.3.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.1...v5.3.0) (2021-12-21) + +### Features + +- Support requirements layer caching ([#644](https://github.com/UnitedIncome/serverless-python-requirements/issues/644)) ([406f6ba](https://github.com/UnitedIncome/serverless-python-requirements/commit/406f6bac1ca934a34387048b5c00242aff3f581b)) ([Maciej Wilczyński](https://github.com/mLupine)) + +### Bug Fixes + +- Ensure cast `toString` before `trim` on buffer ([f60eed1](https://github.com/UnitedIncome/serverless-python-requirements/commit/f60eed1225f091c090f9c253771a12b33fafcab0)) + ### [5.2.2](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.1...v5.2.2) (2021-12-03) ### Bug Fixes diff --git a/package.json b/package.json index 39afe62e..a7e2b2a1 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.2.2", + "version": "5.3.0", "engines": { "node": ">=12.0" }, From b16c82dbdd31ca7f61093bb6b8ed50be31908a24 Mon Sep 17 00:00:00 2001 From: Shinichi Makino Date: Wed, 26 Jan 2022 19:13:04 +0900 Subject: [PATCH 33/90] fix: Address unknown path format error in `wsl2` (#667) --- lib/docker.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/docker.js b/lib/docker.js index a3079ff7..68cf935b 100644 --- a/lib/docker.js +++ b/lib/docker.js @@ -2,6 +2,7 @@ const spawn = require('child-process-ext/spawn'); const isWsl = require('is-wsl'); const fse = require('fs-extra'); const path = require('path'); +const os = require('os'); /** * Helper function to run a docker command @@ -129,7 +130,8 @@ async function tryBindPath(bindPath, testFile, pluginInstance) { */ async function getBindPath(servicePath, pluginInstance) { // Determine bind path - if (process.platform !== 'win32' && !isWsl) { + let isWsl1 = isWsl && !os.release().includes('microsoft-standard'); + if (process.platform !== 'win32' && !isWsl1) { return servicePath; } From 1668cbc2a91f9e4b84024a9609877631aa3d71aa Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 28 Jan 2022 15:12:40 +0100 Subject: [PATCH 34/90] chore: Release v5.3.1 --- CHANGELOG.md | 6 ++++++ package.json | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 805a42e5..bbe979bf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [5.3.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.3.0...v5.3.1) (2022-01-28) + +### Bug Fixes + +- Address unknown path format error in `wsl2` ([#667](https://github.com/UnitedIncome/serverless-python-requirements/issues/667)) ([b16c82d](https://github.com/UnitedIncome/serverless-python-requirements/commit/b16c82dbdd31ca7f61093bb6b8ed50be31908a24)) ([Shinichi Makino](https://github.com/snicmakino)) + ## [5.3.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.2.1...v5.3.0) (2021-12-21) ### Features diff --git a/package.json b/package.json index a7e2b2a1..50ef2246 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.3.0", + "version": "5.3.1", "engines": { "node": ">=12.0" }, From ec84948747a2fe578f7f112b443a3710e6202b42 Mon Sep 17 00:00:00 2001 From: Marcin Szleszynski <64603095+martinezpl@users.noreply.github.com> Date: Sun, 13 Feb 2022 15:30:22 +0100 Subject: [PATCH 35/90] test: Refactor tests to use `env` instead of `opt` vars (#672) --- test.js | 360 +++++++++++++++--------------- tests/base/serverless.yml | 22 +- tests/individually/serverless.yml | 2 +- tests/pipenv/serverless.yml | 8 +- tests/poetry/serverless.yml | 8 +- 5 files changed, 196 insertions(+), 204 deletions(-) diff --git a/test.js b/test.js index ccd1920c..11a7cce5 100644 --- a/test.js +++ b/test.js @@ -22,15 +22,15 @@ const initialWorkingDir = process.cwd(); const mkCommand = (cmd) => (args, options = {}) => { + options['env'] = Object.assign( + { SLS_DEBUG: 't' }, + process.env, + options['env'] + ); const { error, stdout, stderr, status } = crossSpawn.sync( cmd, args, - Object.assign( - { - env: Object.assign({}, process.env, { SLS_DEBUG: 't' }), - }, - options - ) + options ); if (error) { console.error(`Error running: ${quote([cmd, ...args])}`); // eslint-disable-line no-console @@ -45,6 +45,7 @@ const mkCommand = } return stdout && stdout.toString().trim(); }; + const sls = mkCommand('sls'); const git = mkCommand('git'); const npm = mkCommand('npm'); @@ -205,7 +206,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -220,9 +221,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const fileHash = sha256File('.serverless/sls-py-req-test.zip'); - sls(['package']); + sls(['package'], { env: {} }); t.equal( sha256File('.serverless/sls-py-req-test.zip'), fileHash, @@ -239,7 +240,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, 'package']); + sls(['package'], { env: { pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -254,11 +255,12 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(3)}`, - '--fileName=requirements-w-hashes.txt', - 'package', - ]); + sls(['package'], { + env: { + fileName: 'requirements-w-hashes.txt', + pythonBin: getPythonBin(3), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.end(); @@ -272,11 +274,12 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(3)}`, - '--fileName=requirements-w-nested.txt', - 'package', - ]); + sls(['package'], { + env: { + fileName: 'requirements-w-nested.txt', + pythonBin: getPythonBin(3), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -291,7 +294,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), @@ -316,7 +319,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--slim=true', 'package']); + sls(['package'], { env: { slim: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -341,7 +344,7 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', 'package']); + sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -372,7 +375,7 @@ test( 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml', ]); - sls([`--pythonBin=${getPythonBin(3)}`, 'package']); + sls(['package'], { env: { pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); @@ -387,11 +390,12 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(3)}`, - '--fileName=requirements-w-editable.txt', - 'package', - ]); + sls(['package'], { + env: { + fileName: 'requirements-w-editable.txt', + pythonBin: getPythonBin(3), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.true( @@ -409,8 +413,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); - + sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -425,7 +428,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--slim=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -450,7 +453,7 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--slim=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -474,8 +477,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--zip=true', 'package']); - + sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' @@ -507,8 +509,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--zip=true', '--slim=true', 'package']); - + sls(['package'], { + env: { dockerizePip: 'true', zip: 'true', slim: 'true' }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' @@ -540,7 +543,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', 'package']); + sls(['package'], { + env: { runtime: 'python2.7', pythonBin: getPythonBin(2) }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -555,12 +560,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--slim=true', - 'package', - ]); + sls(['package'], { + env: { runtime: 'python2.7', slim: 'true', pythonBin: getPythonBin(2) }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -584,12 +586,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--zip=true', - 'package', - ]); + sls(['package'], { + env: { runtime: 'python2.7', zip: 'true', pythonBin: getPythonBin(2) }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), @@ -616,13 +615,14 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--slim=true', - 'package', - ]); + sls(['package'], { + env: { + runtime: 'python2.7', + dockerizePip: 'true', + slim: 'true', + pythonBin: getPythonBin(2), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -653,7 +653,9 @@ test( 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml', ]); - sls([`--pythonBin=${getPythonBin(2)}`, '--runtime=python2.7', 'package']); + sls(['package'], { + env: { runtime: 'python2.7', pythonBin: getPythonBin(2) }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); @@ -668,14 +670,14 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--zip=true', - 'package', - ]); - + sls(['package'], { + env: { + runtime: 'python2.7', + dockerizePip: 'true', + zip: 'true', + pythonBin: getPythonBin(2), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' @@ -707,15 +709,15 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--zip=true', - '--slim=true', - 'package', - ]); - + sls(['package'], { + env: { + runtime: 'python2.7', + dockerizePip: 'true', + zip: 'true', + slim: 'true', + pythonBin: getPythonBin(2), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' @@ -747,13 +749,13 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - 'package', - ]); - + sls(['package'], { + env: { + runtime: 'python2.7', + dockerizePip: 'true', + pythonBin: getPythonBin(2), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -768,13 +770,14 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--slim=true', - 'package', - ]); + sls(['package'], { + env: { + runtime: 'python2.7', + dockerizePip: 'true', + slim: 'true', + pythonBin: getPythonBin(2), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -800,13 +803,14 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - `--pythonBin=${getPythonBin(2)}`, - '--runtime=python2.7', - '--dockerizePip=true', - '--slim=true', - 'package', - ]); + sls(['package'], { + env: { + runtime: 'python2.7', + dockerizePip: 'true', + slim: 'true', + pythonBin: getPythonBin(2), + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -830,7 +834,7 @@ test( process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -849,7 +853,7 @@ test( process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', 'package']); + sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -875,7 +879,7 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', 'package']); + sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -899,7 +903,7 @@ test( process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), @@ -931,7 +935,7 @@ test( 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml', ]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); @@ -946,7 +950,7 @@ test( process.chdir('tests/non_build_pyproject'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -961,7 +965,7 @@ test( process.chdir('tests/non_poetry_pyproject'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`handler.py`), 'handler is packaged'); t.end(); @@ -975,7 +979,7 @@ test( process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); @@ -991,7 +995,7 @@ test( process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', 'package']); + sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -1017,7 +1021,7 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', 'package']); + sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.deepEqual( @@ -1041,7 +1045,7 @@ test( process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), @@ -1073,7 +1077,7 @@ test( 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml', ]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); @@ -1090,7 +1094,7 @@ test( npm(['i', path]); perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); - sls(['--zip=true', 'package']); + sls(['package'], { env: { zip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('.requirements.zip'), @@ -1115,7 +1119,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([`--vendor=./vendor`, 'package']); + sls(['package'], { env: { vendor: './vendor' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -1145,8 +1149,7 @@ test( ]); writeFileSync(`foobar`, ''); chmodSync(`foobar`, perm); - sls(['--vendor=./vendor', 'package']); - + sls(['package'], { env: { vendor: './vendor' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -1184,7 +1187,7 @@ test( process.chdir('tests/base with a space'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -1200,7 +1203,7 @@ test( process.chdir('tests/base with a space'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); @@ -1216,7 +1219,7 @@ test( const path = npm(['pack', '../..']); writeFileSync('puck', 'requests'); npm(['i', path]); - sls(['--fileName=puck', 'package']); + sls(['package'], { env: { fileName: 'puck' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes(`requests${sep}__init__.py`), @@ -1248,7 +1251,7 @@ test( 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', 'serverless.yml', ]); - sls([`--pythonBin=${getPythonBin(3)}`, '--zip=true', 'package']); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); const zippedReqs = await listRequirementsZipFiles( '.serverless/sls-py-req-test.zip' @@ -1285,8 +1288,9 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); - + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( @@ -1310,13 +1314,13 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--dockerizePip=true', - '--slim=true', - '--slimPatternsAppendDefaults=false', - 'package', - ]); - + sls(['package'], { + env: { + dockerizePip: 'true', + slim: 'true', + slimPatternsAppendDefaults: 'false', + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( @@ -1340,13 +1344,13 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--runtime=python2.7', - '--slim=true', - '--slimPatternsAppendDefaults=false', - 'package', - ]); - + sls(['package'], { + env: { + runtime: 'python2.7', + slim: 'true', + slimPatternsAppendDefaults: 'false', + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( @@ -1370,13 +1374,14 @@ test( copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--dockerizePip=true', - '--runtime=python2.7', - '--slim=true', - '--slimPatternsAppendDefaults=false', - 'package', - ]); + sls(['package'], { + env: { + dockerizePip: 'true', + runtime: 'python2.7', + slim: 'true', + slimPatternsAppendDefaults: 'false', + }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( @@ -1401,7 +1406,9 @@ test( const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( @@ -1426,7 +1433,9 @@ test( const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--slim=true', '--slimPatternsAppendDefaults=false', 'package']); + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true( @@ -1449,8 +1458,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--individually=true', 'package']); - + sls(['package'], { env: { individually: 'true' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); t.false( zipfiles_hello.includes(`fn2${sep}__init__.py`), @@ -1536,8 +1544,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--individually=true', '--slim=true', 'package']); - + sls(['package'], { env: { individually: 'true', slim: 'true' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); t.true( zipfiles_hello.includes('handler.py'), @@ -1623,8 +1630,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--individually=true', '--runtime=python2.7', 'package']); - + sls(['package'], { env: { individually: 'true', runtime: 'python2.7' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); t.true( zipfiles_hello.includes('handler.py'), @@ -1694,13 +1700,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--individually=true', - '--runtime=python2.7', - '--slim=true', - 'package', - ]); - + sls(['package'], { + env: { individually: 'true', runtime: 'python2.7', slim: 'true' }, + }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); t.true( zipfiles_hello.includes('handler.py'), @@ -1785,8 +1787,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--individually=true', '--runtime=python2.7', 'package']); - + sls(['package'], { env: { individually: 'true', runtime: 'python2.7' } }); t.true( pathExistsSync('.serverless/hello.zip'), 'function hello is packaged' @@ -1819,8 +1820,7 @@ test( process.chdir('tests/individually'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); - + sls(['package'], { env: {} }); const zipfiles_hello = await listZipFiles( '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip' ); @@ -1880,8 +1880,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--individually=true', '--vendor=./vendor', 'package']); - + sls(['package'], { env: { individually: 'true', vendor: './vendor' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); t.true( zipfiles_hello.includes('handler.py'), @@ -1966,8 +1965,7 @@ test( chmodSync(`module1${sep}foobar`, perm); npm(['i', path]); - sls(['package']); - + sls(['package'], { env: {} }); const zipfiles_hello1 = await listZipFilesWithMetaData( '.serverless/hello1.zip' ); @@ -2006,8 +2004,7 @@ test( chmodSync(`module1${sep}foobar`, perm); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); - + sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles_hello = await listZipFilesWithMetaData( '.serverless/hello1.zip' ); @@ -2042,7 +2039,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const cachepath = getUserCachePath(); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), @@ -2059,7 +2056,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--cacheLocation=.requirements-cache', 'package']); + sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); t.true( pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), 'cache directory exists' @@ -2075,7 +2072,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true' } }); const cachepath = getUserCachePath(); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), @@ -2092,11 +2089,9 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls([ - '--dockerizePip=true', - '--cacheLocation=.requirements-cache', - 'package', - ]); + sls(['package'], { + env: { dockerizePip: 'true', cacheLocation: '.requirements-cache' }, + }); t.true( pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), 'cache directory exists' @@ -2112,7 +2107,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); const arch = 'x86_64'; @@ -2137,7 +2132,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true' } }); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); const arch = 'x86_64'; @@ -2162,7 +2157,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package']); + sls(['package'], { env: {} }); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); const arch = 'x86_64'; @@ -2184,8 +2179,7 @@ test( `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' ); - sls(['package']); - + sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('injected_file_is_bad_form'), @@ -2204,7 +2198,7 @@ test( const path = npm(['pack', '../..']); npm(['i', path]); const cachepath = '.requirements-cache'; - sls([`--cacheLocation=${cachepath}`, 'package']); + sls(['package'], { env: { cacheLocation: cachepath } }); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); const arch = 'x86_64'; t.true( @@ -2230,7 +2224,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--slim=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const cachepath = getUserCachePath(); const cacheFolderHash = sha256Path('.serverless/requirements.txt'); const arch = 'x86_64'; @@ -2252,8 +2246,7 @@ test( `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' ); - sls(['--dockerizePip=true', '--slim=true', 'package']); - + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true( zipfiles.includes('injected_file_is_bad_form'), @@ -2276,7 +2269,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--dockerizePip=true', '--slim=true', 'package']); + sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const cachepath = getUserCachePath(); t.true( pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), @@ -2302,8 +2295,7 @@ test( process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['--individually=true', 'package']); - + sls(['package'], { env: { individually: 'true' } }); t.true( pathExistsSync('.serverless/hello.zip'), 'function hello is packaged' diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 0b360e9b..37238158 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -2,22 +2,22 @@ service: sls-py-req-test provider: name: aws - runtime: ${opt:runtime, 'python3.6'} + runtime: ${env:runtime, 'python3.6'} plugins: - serverless-python-requirements custom: pythonRequirements: - zip: ${opt:zip, self:custom.defaults.zip} - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} - slim: ${opt:slim, self:custom.defaults.slim} + zip: ${env:zip, self:custom.defaults.zip} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + slim: ${env:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} - slimPatternsAppendDefaults: ${opt:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} - vendor: ${opt:vendor, ''} - fileName: ${opt:fileName, 'requirements.txt'} - useStaticCache: ${opt:useStaticCache, self:custom.defaults.useStaticCache} - useDownloadCache: ${opt:useDownloadCache, self:custom.defaults.useDownloadCache} - cacheLocation: ${opt:cacheLocation, ''} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + vendor: ${env:vendor, ''} + fileName: ${env:fileName, 'requirements.txt'} + useStaticCache: ${env:useStaticCache, self:custom.defaults.useStaticCache} + useDownloadCache: ${env:useDownloadCache, self:custom.defaults.useDownloadCache} + cacheLocation: ${env:cacheLocation, ''} defaults: slim: false slimPatterns: false @@ -29,7 +29,7 @@ custom: useDownloadCache: true package: - individually: ${opt:individually, self:custom.defaults.individually} + individually: ${env:individually, self:custom.defaults.individually} patterns: - '!**/*' - 'handler.py' diff --git a/tests/individually/serverless.yml b/tests/individually/serverless.yml index 121bd89d..a83ac7e0 100644 --- a/tests/individually/serverless.yml +++ b/tests/individually/serverless.yml @@ -10,7 +10,7 @@ package: - '!node_modules/**' custom: pythonRequirements: - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} defaults: dockerizePip: false diff --git a/tests/pipenv/serverless.yml b/tests/pipenv/serverless.yml index dd93e290..4b343bfc 100644 --- a/tests/pipenv/serverless.yml +++ b/tests/pipenv/serverless.yml @@ -8,11 +8,11 @@ plugins: - serverless-python-requirements custom: pythonRequirements: - zip: ${opt:zip, self:custom.defaults.zip} - slim: ${opt:slim, self:custom.defaults.slim} + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} - slimPatternsAppendDefaults: ${opt:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} defaults: zip: false slimPatterns: false diff --git a/tests/poetry/serverless.yml b/tests/poetry/serverless.yml index dd93e290..4b343bfc 100644 --- a/tests/poetry/serverless.yml +++ b/tests/poetry/serverless.yml @@ -8,11 +8,11 @@ plugins: - serverless-python-requirements custom: pythonRequirements: - zip: ${opt:zip, self:custom.defaults.zip} - slim: ${opt:slim, self:custom.defaults.slim} + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} - slimPatternsAppendDefaults: ${opt:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} - dockerizePip: ${opt:dockerizePip, self:custom.defaults.dockerizePip} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} defaults: zip: false slimPatterns: false From 769bc820eed1f65c8ae41ccfa74749c650560e6a Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 27 Feb 2022 22:26:21 +0100 Subject: [PATCH 36/90] ci: Upgrade `setup-python` github action --- .github/workflows/integrate.yml | 6 +++--- .github/workflows/validate.yml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 953951df..0d77acfc 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -31,7 +31,7 @@ jobs: restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} @@ -82,7 +82,7 @@ jobs: restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} @@ -133,7 +133,7 @@ jobs: restore-keys: npm-v12-${{ runner.os }}-${{ github.ref }}- - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 9215eee1..b4c245f5 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -44,7 +44,7 @@ jobs: npm-v14-${{ runner.os }}-refs/heads/master- - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} @@ -111,7 +111,7 @@ jobs: npm-v14-${{ runner.os }}-refs/heads/master- - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} @@ -164,7 +164,7 @@ jobs: npm-v12-${{ runner.os }}-refs/heads/master- - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} From ff11497cbcf42fe7f7d73fb2e8e2642c542dd8d7 Mon Sep 17 00:00:00 2001 From: Andrei Zhemaituk Date: Mon, 28 Feb 2022 07:10:15 -0500 Subject: [PATCH 37/90] refactor: Log child process command output on error (#679) --- lib/pip.js | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lib/pip.js b/lib/pip.js index 7a0a0ceb..79dec42a 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -415,6 +415,13 @@ async function installRequirements(targetFolder, pluginInstance) { 'PYTHON_REQUIREMENTS_COMMAND_NOT_FOUND' ); } + if (log) { + log.info(`Stdout: ${e.stdoutBuffer}`); + log.info(`Stderr: ${e.stderrBuffer}`); + } else { + serverless.cli.log(`Stdout: ${e.stdoutBuffer}`); + serverless.cli.log(`Stderr: ${e.stderrBuffer}`); + } throw e; } } From 3edf0e0cabeeb11ffadd9dcac6f198f22aee4a16 Mon Sep 17 00:00:00 2001 From: Marc Hassan Date: Wed, 2 Mar 2022 07:11:36 -0500 Subject: [PATCH 38/90] refactor: Replace `lodash.set` with `set-value` (#676) --- lib/inject.js | 2 +- lib/pip.js | 2 +- lib/zip.js | 2 +- package.json | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/inject.js b/lib/inject.js index f32c9d46..ea20e58d 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -2,7 +2,7 @@ const BbPromise = require('bluebird'); const fse = require('fs-extra'); const glob = require('glob-all'); const get = require('lodash.get'); -const set = require('lodash.set'); +const set = require('set-value'); const path = require('path'); const JSZip = require('jszip'); const { writeZip, zipFile } = require('./zipTree'); diff --git a/lib/pip.js b/lib/pip.js index 79dec42a..9e7c592e 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -2,7 +2,7 @@ const fse = require('fs-extra'); const rimraf = require('rimraf'); const path = require('path'); const get = require('lodash.get'); -const set = require('lodash.set'); +const set = require('set-value'); const spawn = require('child-process-ext/spawn'); const { quote } = require('shell-quote'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); diff --git a/lib/zip.js b/lib/zip.js index cba29450..4b652f98 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -1,7 +1,7 @@ const fse = require('fs-extra'); const path = require('path'); const get = require('lodash.get'); -const set = require('lodash.set'); +const set = require('set-value'); const uniqBy = require('lodash.uniqby'); const BbPromise = require('bluebird'); const JSZip = require('jszip'); diff --git a/package.json b/package.json index 50ef2246..d9422de5 100644 --- a/package.json +++ b/package.json @@ -69,10 +69,10 @@ "is-wsl": "^2.2.0", "jszip": "^3.7.1", "lodash.get": "^4.4.2", - "lodash.set": "^4.3.2", "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", "rimraf": "^3.0.2", + "set-value": "^4.1.0", "sha256-file": "1.0.0", "shell-quote": "^1.7.3" }, From 915bcadad2f8a3be5434d6e42771bc835271baf8 Mon Sep 17 00:00:00 2001 From: Marcin Szleszynski <64603095+martinezpl@users.noreply.github.com> Date: Tue, 8 Mar 2022 13:00:56 +0100 Subject: [PATCH 39/90] feat: Support `dockerPrivateKey` to specify path to SSH key (#674) --- README.md | 19 ++++++++++++++++--- index.js | 7 +++++-- lib/pip.js | 8 ++++++-- test.js | 33 ++++++++++++++++++++++++++++++--- tests/base/custom_ssh | 1 + tests/base/serverless.yml | 6 ++++++ 6 files changed, 64 insertions(+), 10 deletions(-) create mode 100644 tests/base/custom_ssh diff --git a/README.md b/README.md index abe6a175..63b1a32a 100644 --- a/README.md +++ b/README.md @@ -77,8 +77,20 @@ custom: ``` The `dockerSsh` option will mount your `$HOME/.ssh/id_rsa` and `$HOME/.ssh/known_hosts` as a -volume in the docker container. If your SSH key is password protected, you can use `ssh-agent` -because `$SSH_AUTH_SOCK` is also mounted & the env var set. +volume in the docker container. + +In case you want to use a different key, you can specify the path (absolute) to it through `dockerPrivateKey` option: + +```yaml +custom: + pythonRequirements: + dockerizePip: true + dockerSsh: true + dockerPrivateKey: /home/.ssh/id_ed25519 +``` + +If your SSH key is password protected, you can use `ssh-agent` +because `$SSH_AUTH_SOCK` is also mounted & the env var is set. It is important that the host of your private repositories has already been added in your `$HOME/.ssh/known_hosts` file, as the install process will fail otherwise due to host authenticity failure. @@ -213,7 +225,7 @@ the names in `slimPatterns` #### Option not to strip binaries -In some cases, stripping binaries leads to problems like "ELF load command address/offset not properly aligned", even when done in the Docker environment. You can still slim down the package without `*.so` files with +In some cases, stripping binaries leads to problems like "ELF load command address/offset not properly aligned", even when done in the Docker environment. You can still slim down the package without `*.so` files with: ```yaml custom: @@ -566,3 +578,4 @@ package: - [@jacksgt](https://github.com/jacksgt) - Fixing pip issues - [@lephuongbg](https://github.com/lephuongbg) - Fixing single function deployment - [@rileypriddle](https://github.com/rileypriddle) - Introducing schema validation for `module` property +- [@martinezpl](https://github.com/martinezpl) - Fixing test issues, adding `dockerPrivateKey` option diff --git a/index.js b/index.js index 7741a7f8..c6577fe0 100644 --- a/index.js +++ b/index.js @@ -15,7 +15,6 @@ const { installAllRequirements } = require('./lib/pip'); const { pipfileToRequirements } = require('./lib/pipenv'); const { pyprojectTomlToRequirements } = require('./lib/poetry'); const { cleanup, cleanupCache } = require('./lib/clean'); - BbPromise.promisifyAll(fse); /** @@ -45,6 +44,7 @@ class ServerlessPythonRequirements { : this.serverless.service.provider.runtime || 'python', dockerizePip: false, dockerSsh: false, + dockerPrivateKey: null, dockerImage: null, dockerFile: null, dockerEnv: false, @@ -71,7 +71,10 @@ class ServerlessPythonRequirements { } if ( !options.dockerizePip && - (options.dockerSsh || options.dockerImage || options.dockerFile) + (options.dockerSsh || + options.dockerImage || + options.dockerFile || + options.dockerPrivateKey) ) { if (!this.warningLogged) { if (this.log) { diff --git a/lib/pip.js b/lib/pip.js index 9e7c592e..9f950664 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -275,12 +275,16 @@ async function installRequirements(targetFolder, pluginInstance) { dockerCmd.push('docker', 'run', '--rm', '-v', `${bindPath}:/var/task:z`); if (options.dockerSsh) { + const homePath = require('os').homedir(); + const sshKeyPath = + options.dockerPrivateKey || `${homePath}/.ssh/id_rsa`; + // Mount necessary ssh files to work with private repos dockerCmd.push( '-v', - `${process.env.HOME}/.ssh/id_rsa:/root/.ssh/id_rsa:z`, + `${sshKeyPath}:/root/.ssh/${sshKeyPath.split('/').splice(-1)[0]}:z`, '-v', - `${process.env.HOME}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, + `${homePath}/.ssh/known_hosts:/root/.ssh/known_hosts:z`, '-v', `${process.env.SSH_AUTH_SOCK}:/tmp/ssh_sock:z`, '-e', diff --git a/test.js b/test.js index 11a7cce5..b228805e 100644 --- a/test.js +++ b/test.js @@ -3,6 +3,7 @@ const glob = require('glob-all'); const JSZip = require('jszip'); const sha256File = require('sha256-file'); const tape = require('tape-promise/tape'); + const { chmodSync, removeSync, @@ -23,7 +24,7 @@ const mkCommand = (cmd) => (args, options = {}) => { options['env'] = Object.assign( - { SLS_DEBUG: 't' }, + { SLS_DEBUG: 'true' }, process.env, options['env'] ); @@ -32,11 +33,11 @@ const mkCommand = args, options ); - if (error) { + if (error && !options['noThrow']) { console.error(`Error running: ${quote([cmd, ...args])}`); // eslint-disable-line no-console throw error; } - if (status) { + if (status && !options['noThrow']) { console.error('STDOUT: ', stdout.toString()); // eslint-disable-line no-console console.error('STDERR: ', stderr.toString()); // eslint-disable-line no-console throw new Error( @@ -200,6 +201,32 @@ const canUseDocker = () => { // Skip if running on these platforms. const brokenOn = (...platforms) => platforms.indexOf(process.platform) != -1; +test( + 'dockerPrivateKey option correctly resolves docker command', + async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + const stdout = sls(['package'], { + noThrow: true, + env: { + dockerizePip: true, + dockerSsh: true, + dockerPrivateKey: `${__dirname}${sep}tests${sep}base${sep}custom_ssh`, + dockerImage: 'break the build to log the command', + }, + }); + t.true( + stdout.includes( + `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z` + ), + 'docker command properly resolved' + ); + t.end(); + }, + { skip: !canUseDocker() || brokenOn('win32') } +); + test( 'default pythonBin can package flask with default options', async (t) => { diff --git a/tests/base/custom_ssh b/tests/base/custom_ssh new file mode 100644 index 00000000..8a7c4203 --- /dev/null +++ b/tests/base/custom_ssh @@ -0,0 +1 @@ +SOME KEY diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 37238158..6526246c 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -10,6 +10,9 @@ custom: pythonRequirements: zip: ${env:zip, self:custom.defaults.zip} dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + dockerSsh: ${env:dockerSsh, self:custom.defaults.dockerSsh} + dockerPrivateKey: ${env:dockerPrivateKey, self:custom.defaults.dockerPrivateKey} + dockerImage: ${env:dockerImage, self:custom.defaults.dockerImage} slim: ${env:slim, self:custom.defaults.slim} slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} @@ -24,6 +27,9 @@ custom: slimPatternsAppendDefaults: true zip: false dockerizePip: false + dockerSsh: false + dockerPrivateKey: '' + dockerImage: '' individually: false useStaticCache: true useDownloadCache: true From ebd12cb14ea352fb08c0957f213bda7dcce800df Mon Sep 17 00:00:00 2001 From: Brandon White Date: Mon, 14 Mar 2022 11:50:12 -0500 Subject: [PATCH 40/90] feat: Support individual packaging with `poetry` (#682) --- .gitignore | 1 - index.js | 2 -- lib/pip.js | 23 ++++--------- lib/poetry.js | 33 ++++++++++--------- test.js | 19 +++++++++++ tests/base/package.json | 2 +- tests/non_build_pyproject/package.json | 2 +- tests/non_poetry_pyproject/package.json | 2 +- tests/pipenv/package.json | 2 +- tests/poetry/package.json | 2 +- tests/poetry_individually/module1/handler.py | 5 +++ .../module1/pyproject.toml | 17 ++++++++++ tests/poetry_individually/package.json | 14 ++++++++ tests/poetry_individually/serverless.yml | 32 ++++++++++++++++++ 14 files changed, 116 insertions(+), 40 deletions(-) create mode 100644 tests/poetry_individually/module1/handler.py create mode 100644 tests/poetry_individually/module1/pyproject.toml create mode 100644 tests/poetry_individually/package.json create mode 100644 tests/poetry_individually/serverless.yml diff --git a/.gitignore b/.gitignore index ab0317f3..3707ff1e 100644 --- a/.gitignore +++ b/.gitignore @@ -59,7 +59,6 @@ dist/ downloads/ eggs/ .eggs/ -lib/ lib64/ parts/ sdist/ diff --git a/index.js b/index.js index c6577fe0..ebfc4017 100644 --- a/index.js +++ b/index.js @@ -13,7 +13,6 @@ const { injectAllRequirements } = require('./lib/inject'); const { layerRequirements } = require('./lib/layer'); const { installAllRequirements } = require('./lib/pip'); const { pipfileToRequirements } = require('./lib/pipenv'); -const { pyprojectTomlToRequirements } = require('./lib/poetry'); const { cleanup, cleanupCache } = require('./lib/clean'); BbPromise.promisifyAll(fse); @@ -203,7 +202,6 @@ class ServerlessPythonRequirements { } return BbPromise.bind(this) .then(pipfileToRequirements) - .then(pyprojectTomlToRequirements) .then(addVendorHelper) .then(installAllRequirements) .then(packRequirements) diff --git a/lib/pip.js b/lib/pip.js index 9f950664..ccb809c3 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -7,7 +7,7 @@ const spawn = require('child-process-ext/spawn'); const { quote } = require('shell-quote'); const { buildImage, getBindPath, getDockerUid } = require('./docker'); const { getStripCommand, getStripMode, deleteFiles } = require('./slim'); -const { isPoetryProject } = require('./poetry'); +const { isPoetryProject, pyprojectTomlToRequirements } = require('./poetry'); const { checkForAndDeleteMaxCacheVersions, sha256Path, @@ -60,16 +60,9 @@ function generateRequirementsFile( pluginInstance ) { const { serverless, servicePath, options, log } = pluginInstance; - if ( - options.usePoetry && - fse.existsSync(path.join(servicePath, 'pyproject.toml')) && - isPoetryProject(servicePath) - ) { - filterRequirementsFile( - path.join(servicePath, '.serverless/requirements.txt'), - targetFile, - pluginInstance - ); + const modulePath = path.dirname(requirementsPath); + if (options.usePoetry && isPoetryProject(modulePath)) { + filterRequirementsFile(targetFile, targetFile, pluginInstance); if (log) { log.info(`Parsed requirements.txt from pyproject.toml in ${targetFile}`); } else { @@ -570,11 +563,7 @@ function copyVendors(vendorFolder, targetFolder, { serverless, log }) { * @param {string} fileName */ function requirementsFileExists(servicePath, options, fileName) { - if ( - options.usePoetry && - fse.existsSync(path.join(servicePath, 'pyproject.toml')) && - isPoetryProject(servicePath) - ) { + if (options.usePoetry && isPoetryProject(path.dirname(fileName))) { return true; } @@ -609,6 +598,8 @@ async function installRequirementsIfNeeded( // Our source requirements, under our service path, and our module path (if specified) const fileName = path.join(servicePath, modulePath, options.fileName); + await pyprojectTomlToRequirements(modulePath, pluginInstance); + // Skip requirements generation, if requirements file doesn't exist if (!requirementsFileExists(servicePath, options, fileName)) { return false; diff --git a/lib/poetry.js b/lib/poetry.js index 23f43dc0..4003c1df 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -8,24 +8,25 @@ const tomlParse = require('@iarna/toml/parse-string'); /** * poetry install */ -async function pyprojectTomlToRequirements() { - if (!this.options.usePoetry || !isPoetryProject(this.servicePath)) { +async function pyprojectTomlToRequirements(modulePath, pluginInstance) { + const { serverless, servicePath, options, log, progress } = pluginInstance; + + const moduleProjectPath = path.join(servicePath, modulePath); + if (!options.usePoetry || !isPoetryProject(moduleProjectPath)) { return; } let generateRequirementsProgress; - if (this.progress && this.log) { - generateRequirementsProgress = this.progress.get( + if (progress && log) { + generateRequirementsProgress = progress.get( 'python-generate-requirements-toml' ); generateRequirementsProgress.update( 'Generating requirements.txt from "pyproject.toml"' ); - this.log.info('Generating requirements.txt from "pyproject.toml"'); + log.info('Generating requirements.txt from "pyproject.toml"'); } else { - this.serverless.cli.log( - 'Generating requirements.txt from pyproject.toml...' - ); + serverless.cli.log('Generating requirements.txt from pyproject.toml...'); } try { @@ -42,7 +43,7 @@ async function pyprojectTomlToRequirements() { '--with-credentials', ], { - cwd: this.servicePath, + cwd: moduleProjectPath, } ); } catch (e) { @@ -50,7 +51,7 @@ async function pyprojectTomlToRequirements() { e.stderrBuffer && e.stderrBuffer.toString().includes('command not found') ) { - throw new this.serverless.classes.Error( + throw new serverless.classes.Error( `poetry not found! Install it according to the poetry docs.`, 'PYTHON_REQUIREMENTS_POETRY_NOT_FOUND' ); @@ -59,16 +60,16 @@ async function pyprojectTomlToRequirements() { } const editableFlag = new RegExp(/^-e /gm); - const sourceRequirements = path.join(this.servicePath, 'requirements.txt'); + const sourceRequirements = path.join(moduleProjectPath, 'requirements.txt'); const requirementsContents = fse.readFileSync(sourceRequirements, { encoding: 'utf-8', }); if (requirementsContents.match(editableFlag)) { - if (this.log) { - this.log.info('The generated file contains -e flags, removing them'); + if (log) { + log.info('The generated file contains -e flags, removing them'); } else { - this.serverless.cli.log( + serverless.cli.log( 'The generated file contains -e flags, removing them...' ); } @@ -78,10 +79,10 @@ async function pyprojectTomlToRequirements() { ); } - fse.ensureDirSync(path.join(this.servicePath, '.serverless')); + fse.ensureDirSync(path.join(servicePath, '.serverless')); fse.moveSync( sourceRequirements, - path.join(this.servicePath, '.serverless', 'requirements.txt'), + path.join(servicePath, '.serverless', modulePath, 'requirements.txt'), { overwrite: true } ); } finally { diff --git a/test.js b/test.js index b228805e..e2bbdc2c 100644 --- a/test.js +++ b/test.js @@ -1479,6 +1479,25 @@ test( { skip: !hasPython(3.6) } ); +test( + 'poetry py3.6 can package flask with package individually option', + async (t) => { + process.chdir('tests/poetry_individually'); + const path = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles( + '.serverless/module1-sls-py-req-test-dev-hello.zip' + ); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); + }, + { skip: !hasPython(3.6) } +); + test( 'py3.6 can package flask with package individually option', async (t) => { diff --git a/tests/base/package.json b/tests/base/package.json index 43ce4eee..38630491 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" } } diff --git a/tests/non_build_pyproject/package.json b/tests/non_build_pyproject/package.json index 43ce4eee..38630491 100644 --- a/tests/non_build_pyproject/package.json +++ b/tests/non_build_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" } } diff --git a/tests/non_poetry_pyproject/package.json b/tests/non_poetry_pyproject/package.json index 43ce4eee..38630491 100644 --- a/tests/non_poetry_pyproject/package.json +++ b/tests/non_poetry_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" } } diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index 43ce4eee..38630491 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" } } diff --git a/tests/poetry/package.json b/tests/poetry/package.json index 43ce4eee..38630491 100644 --- a/tests/poetry/package.json +++ b/tests/poetry/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" } } diff --git a/tests/poetry_individually/module1/handler.py b/tests/poetry_individually/module1/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/poetry_individually/module1/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/poetry_individually/module1/pyproject.toml b/tests/poetry_individually/module1/pyproject.toml new file mode 100644 index 00000000..b813968a --- /dev/null +++ b/tests/poetry_individually/module1/pyproject.toml @@ -0,0 +1,17 @@ +[tool.poetry] +name = "poetry" +version = "0.1.0" +description = "" +authors = ["Your Name "] + +[tool.poetry.dependencies] +python = "^3.6" +Flask = "^1.0" +bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} +boto3 = "^1.9" + +[tool.poetry.dev-dependencies] + +[build-system] +requires = ["poetry>=0.12"] +build-backend = "poetry.masonry.api" diff --git a/tests/poetry_individually/package.json b/tests/poetry_individually/package.json new file mode 100644 index 00000000..38630491 --- /dev/null +++ b/tests/poetry_individually/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + } +} diff --git a/tests/poetry_individually/serverless.yml b/tests/poetry_individually/serverless.yml new file mode 100644 index 00000000..2cb2d160 --- /dev/null +++ b/tests/poetry_individually/serverless.yml @@ -0,0 +1,32 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.6 + +plugins: + - serverless-python-requirements +custom: + pythonRequirements: + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + individually: true + +functions: + hello: + handler: handler.hello + module: module1 + package: + patterns: + - 'module1/**' From 33f5d5a0dc5fd166086b9d548615e1dfdb0cbd12 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 14 Mar 2022 18:01:56 +0100 Subject: [PATCH 41/90] chore: Bump dependencies --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index d9422de5..c40b9cbf 100644 --- a/package.json +++ b/package.json @@ -52,7 +52,7 @@ "cross-spawn": "*", "eslint": "^7.32.0", "git-list-updated": "^1.2.1", - "github-release-from-cc-changelog": "^2.2.0", + "github-release-from-cc-changelog": "^2.2.1", "lodash": "^4.17.21", "prettier": "^2", "standard-version": "^9.3.2", @@ -65,7 +65,7 @@ "bluebird": "^3.7.2", "child-process-ext": "^2.1.1", "fs-extra": "^9.1.0", - "glob-all": "^3.2.1", + "glob-all": "^3.3.0", "is-wsl": "^2.2.0", "jszip": "^3.7.1", "lodash.get": "^4.4.2", From 3a898e5e707658c76f6063f44938366935b41812 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 14 Mar 2022 18:04:22 +0100 Subject: [PATCH 42/90] chore: Release v5.4.0 --- CHANGELOG.md | 12 ++++++++++++ package.json | 2 +- 2 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index bbe979bf..41041fd3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,18 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [5.4.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.3.1...v5.4.0) (2022-03-14) + +### Features + +- Support `dockerPrivateKey` to specify path to SSH key ([#674](https://github.com/UnitedIncome/serverless-python-requirements/issues/674)) ([915bcad](https://github.com/UnitedIncome/serverless-python-requirements/commit/915bcadad2f8a3be5434d6e42771bc835271baf8)) ([Marcin Szleszynski](https://github.com/martinezpl)) +- Support individual packaging with `poetry` ([#682](https://github.com/UnitedIncome/serverless-python-requirements/issues/682)) ([ebd12cb](https://github.com/UnitedIncome/serverless-python-requirements/commit/ebd12cb14ea352fb08c0957f213bda7dcce800df)) ([Brandon White](https://github.com/BrandonLWhite)) + +### Maintenance Improvements + +- Log child process command output on error ([#679](https://github.com/UnitedIncome/serverless-python-requirements/issues/679)) ([ff11497](https://github.com/UnitedIncome/serverless-python-requirements/commit/ff11497cbcf42fe7f7d73fb2e8e2642c542dd8d7)) ([Andrei Zhemaituk](https://github.com/zhemaituk)) +- Replace `lodash.set` with `set-value` ([#676](https://github.com/UnitedIncome/serverless-python-requirements/issues/676)) ([3edf0e0](https://github.com/UnitedIncome/serverless-python-requirements/commit/3edf0e0cabeeb11ffadd9dcac6f198f22aee4a16)) ([Marc Hassan](https://github.com/mhassan1)) + ### [5.3.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.3.0...v5.3.1) (2022-01-28) ### Bug Fixes diff --git a/package.json b/package.json index c40b9cbf..7985cb60 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.3.1", + "version": "5.4.0", "engines": { "node": ">=12.0" }, From 48234a79068c0ff51b9e2944e8dedb8ef88a191d Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 11 Apr 2022 14:41:58 +0200 Subject: [PATCH 43/90] test: Pin Flask to ensure support for Python 3.6 --- tests/base/requirements-w-nested.txt | 2 +- tests/individually/module2/requirements.txt | 2 +- tests/non_build_pyproject/requirements.txt | 2 +- tests/pipenv/Pipfile | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tests/base/requirements-w-nested.txt b/tests/base/requirements-w-nested.txt index 4d73c837..b09aa52a 100644 --- a/tests/base/requirements-w-nested.txt +++ b/tests/base/requirements-w-nested.txt @@ -1,3 +1,3 @@ -flask +flask==2.0.3 bottle -r requirements-common.txt diff --git a/tests/individually/module2/requirements.txt b/tests/individually/module2/requirements.txt index 7e106024..c09d0264 100644 --- a/tests/individually/module2/requirements.txt +++ b/tests/individually/module2/requirements.txt @@ -1 +1 @@ -flask +flask==2.0.3 diff --git a/tests/non_build_pyproject/requirements.txt b/tests/non_build_pyproject/requirements.txt index aa55d989..09764fc3 100644 --- a/tests/non_build_pyproject/requirements.txt +++ b/tests/non_build_pyproject/requirements.txt @@ -1,2 +1,2 @@ -flask +flask==2.0.3 boto3 diff --git a/tests/pipenv/Pipfile b/tests/pipenv/Pipfile index 0d65eb75..6770a12a 100644 --- a/tests/pipenv/Pipfile +++ b/tests/pipenv/Pipfile @@ -3,7 +3,7 @@ url = "https://pypi.python.org/simple" verify_ssl = true [packages] -Flask = "*" +Flask = "==2.0.3" bottle = "*" boto3 = "*" From a4cd36b1145b3cb45c44eaaff0653461472e9a3c Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 20 Jan 2022 10:42:16 +0100 Subject: [PATCH 44/90] docs: Remove reference to `v1` from docs --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 63b1a32a..7c09a4de 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ [![npm](https://img.shields.io/npm/v/serverless-python-requirements.svg)](https://www.npmjs.com/package/serverless-python-requirements) [![code style: prettier](https://img.shields.io/badge/code_style-prettier-ff69b4.svg)](https://github.com/prettier/prettier) -A Serverless v1.x plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. +A Serverless Framework plugin to automatically bundle dependencies from `requirements.txt` and make them available in your `PYTHONPATH`. --- From 29a9f5735d941ed8b4b85c0a409b758b02e2d607 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 27 Sep 2022 20:11:02 +0200 Subject: [PATCH 45/90] ci: Run tests against python3.7 instead of python3.6 --- .github/workflows/integrate.yml | 6 +- .github/workflows/validate.yml | 6 +- example/serverless.yml | 2 +- example_native_deps/serverless.yml | 2 +- test.js | 198 +++++++++++----------- tests/base/serverless.yml | 2 +- tests/individually/serverless.yml | 2 +- tests/non_build_pyproject/serverless.yml | 2 +- tests/non_poetry_pyproject/serverless.yml | 2 +- tests/pipenv/serverless.yml | 2 +- tests/poetry/serverless.yml | 2 +- tests/poetry_individually/serverless.yml | 2 +- 12 files changed, 114 insertions(+), 114 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 0d77acfc..cca8b6be 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -15,7 +15,7 @@ jobs: runs-on: windows-latest strategy: matrix: - python-version: [2.7, 3.6] + python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -66,7 +66,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [2.7, 3.6] + python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -117,7 +117,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [2.7, 3.6] + python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index b4c245f5..905bee55 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [2.7, 3.6] + python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -93,7 +93,7 @@ jobs: runs-on: windows-latest strategy: matrix: - python-version: [2.7, 3.6] + python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -146,7 +146,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [2.7, 3.6] + python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/example/serverless.yml b/example/serverless.yml index 9b58ead1..349cdcb8 100644 --- a/example/serverless.yml +++ b/example/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements diff --git a/example_native_deps/serverless.yml b/example_native_deps/serverless.yml index 0f4e632a..4deed44a 100644 --- a/example_native_deps/serverless.yml +++ b/example_native_deps/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements diff --git a/test.js b/test.js index e2bbdc2c..0ecd361d 100644 --- a/test.js +++ b/test.js @@ -239,11 +239,11 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 packages have the same hash', + 'py3.7 packages have the same hash', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -258,11 +258,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package flask with default options', + 'py3.7 can package flask with default options', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -277,7 +277,7 @@ test( ); test( - 'py3.6 can package flask with hashes', + 'py3.7 can package flask with hashes', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -296,7 +296,7 @@ test( ); test( - 'py3.6 can package flask with nested', + 'py3.7 can package flask with nested', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -316,7 +316,7 @@ test( ); test( - 'py3.6 can package flask with zip option', + 'py3.7 can package flask with zip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -341,7 +341,7 @@ test( ); test( - 'py3.6 can package flask with slim option', + 'py3.7 can package flask with slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -365,7 +365,7 @@ test( ); test( - 'py3.6 can package flask with slim & slimPatterns options', + 'py3.7 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -386,11 +386,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - "py3.6 doesn't package bottle with noDeploy option", + "py3.7 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -412,7 +412,7 @@ test( ); test( - 'py3.6 can package boto3 with editable', + 'py3.7 can package boto3 with editable', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -435,7 +435,7 @@ test( ); test( - 'py3.6 can package flask with dockerizePip option', + 'py3.7 can package flask with dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -446,11 +446,11 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 can package flask with slim & dockerizePip option', + 'py3.7 can package flask with slim & dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -470,11 +470,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 can package flask with slim & dockerizePip & slimPatterns options', + 'py3.7 can package flask with slim & dockerizePip & slimPatterns options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -495,11 +495,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 can package flask with zip & dockerizePip option', + 'py3.7 can package flask with zip & dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -527,11 +527,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 can package flask with zip & slim & dockerizePip option', + 'py3.7 can package flask with zip & slim & dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -561,7 +561,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( @@ -856,7 +856,7 @@ test( ); test( - 'pipenv py3.6 can package flask with default options', + 'pipenv py3.7 can package flask with default options', async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); @@ -871,11 +871,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'pipenv py3.6 can package flask with slim option', + 'pipenv py3.7 can package flask with slim option', async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); @@ -895,11 +895,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'pipenv py3.6 can package flask with slim & slimPatterns options', + 'pipenv py3.7 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/pipenv'); @@ -921,11 +921,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'pipenv py3.6 can package flask with zip option', + 'pipenv py3.7 can package flask with zip option', async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); @@ -946,11 +946,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - "pipenv py3.6 doesn't package bottle with noDeploy option", + "pipenv py3.7 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); @@ -968,7 +968,7 @@ test( t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( @@ -983,7 +983,7 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( @@ -997,11 +997,11 @@ test( t.true(zipfiles.includes(`handler.py`), 'handler is packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'poetry py3.6 can package flask with default options', + 'poetry py3.7 can package flask with default options', async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); @@ -1013,11 +1013,11 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'poetry py3.6 can package flask with slim option', + 'poetry py3.7 can package flask with slim option', async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); @@ -1037,11 +1037,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'poetry py3.6 can package flask with slim & slimPatterns options', + 'poetry py3.7 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/poetry'); @@ -1063,11 +1063,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'poetry py3.6 can package flask with zip option', + 'poetry py3.7 can package flask with zip option', async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); @@ -1092,7 +1092,7 @@ test( ); test( - "poetry py3.6 doesn't package bottle with noDeploy option", + "poetry py3.7 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); @@ -1110,11 +1110,11 @@ test( t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package flask with zip option and no explicit include', + 'py3.7 can package flask with zip option and no explicit include', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1137,11 +1137,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package lambda-decorators using vendor option', + 'py3.7 can package lambda-decorators using vendor option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1156,7 +1156,7 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( @@ -1204,11 +1204,11 @@ test( t.end(); }, - { skip: process.platform === 'win32' || !hasPython(3.6) } + { skip: process.platform === 'win32' || !hasPython(3.7) } ); test( - 'py3.6 can package flask in a project with a space in it', + 'py3.7 can package flask in a project with a space in it', async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); @@ -1220,11 +1220,11 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package flask in a project with a space in it with docker', + 'py3.7 can package flask in a project with a space in it with docker', async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); @@ -1236,11 +1236,11 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 supports custom file name with fileName option', + 'py3.7 supports custom file name with fileName option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1262,11 +1262,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - "py3.6 doesn't package bottle with zip option", + "py3.7 doesn't package bottle with zip option", async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1309,7 +1309,7 @@ test( ); test( - 'py3.6 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', + 'py3.7 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -1331,11 +1331,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + 'py3.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -1361,7 +1361,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( @@ -1426,7 +1426,7 @@ test( ); test( - 'pipenv py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', + 'pipenv py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -1449,11 +1449,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'poetry py3.6 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', + 'poetry py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -1476,11 +1476,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'poetry py3.6 can package flask with package individually option', + 'poetry py3.7 can package flask with package individually option', async (t) => { process.chdir('tests/poetry_individually'); const path = npm(['pack', '../..']); @@ -1495,11 +1495,11 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package flask with package individually option', + 'py3.7 can package flask with package individually option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1581,11 +1581,11 @@ test( t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package flask with package individually & slim option', + 'py3.7 can package flask with package individually & slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1667,7 +1667,7 @@ test( t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( @@ -1861,7 +1861,7 @@ test( ); test( - 'py3.6 can package only requirements of module', + 'py3.7 can package only requirements of module', async (t) => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); @@ -1917,11 +1917,11 @@ test( t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 can package lambda-decorators using vendor and invidiually option', + 'py3.7 can package lambda-decorators using vendor and invidiually option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1998,7 +1998,7 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( @@ -2037,7 +2037,7 @@ test( t.end(); }, - { skip: process.platform === 'win32' || !hasPython(3.6) } + { skip: process.platform === 'win32' || !hasPython(3.7) } ); test( @@ -2076,11 +2076,11 @@ test( t.end(); }, - { skip: !canUseDocker() || process.platform === 'win32' || !hasPython(3.6) } + { skip: !canUseDocker() || process.platform === 'win32' || !hasPython(3.7) } ); test( - 'py3.6 uses download cache by default option', + 'py3.7 uses download cache by default option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2093,11 +2093,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 uses download cache by default', + 'py3.7 uses download cache by default', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2109,11 +2109,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 uses download cache with dockerizePip option', + 'py3.7 uses download cache with dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2126,11 +2126,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 uses download cache with dockerizePip by default option', + 'py3.7 uses download cache with dockerizePip by default option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2144,11 +2144,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 uses static and download cache', + 'py3.7 uses static and download cache', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2169,11 +2169,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 uses static and download cache with dockerizePip option', + 'py3.7 uses static and download cache with dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2194,11 +2194,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 uses static cache', + 'py3.7 uses static cache', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2220,7 +2220,7 @@ test( '.completed_requirements exists in static-cache' ); - // py3.6 checking that static cache actually pulls from cache (by poisoning it) + // py3.7 checking that static cache actually pulls from cache (by poisoning it) writeFileSync( `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' @@ -2234,11 +2234,11 @@ test( t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 uses static cache with cacheLocation option', + 'py3.7 uses static cache with cacheLocation option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2261,11 +2261,11 @@ test( ); t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); test( - 'py3.6 uses static cache with dockerizePip & slim option', + 'py3.7 uses static cache with dockerizePip & slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2287,7 +2287,7 @@ test( '.completed_requirements exists in static-cache' ); - // py3.6 checking that static cache actually pulls from cache (by poisoning it) + // py3.7 checking that static cache actually pulls from cache (by poisoning it) writeFileSync( `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' @@ -2306,11 +2306,11 @@ test( t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 uses download cache with dockerizePip & slim option', + 'py3.7 uses download cache with dockerizePip & slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2332,11 +2332,11 @@ test( t.end(); }, - { skip: !canUseDocker() || !hasPython(3.6) || brokenOn('win32') } + { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } ); test( - 'py3.6 can ignore functions defined with `image`', + 'py3.7 can ignore functions defined with `image`', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -2365,5 +2365,5 @@ test( t.end(); }, - { skip: !hasPython(3.6) } + { skip: !hasPython(3.7) } ); diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index 6526246c..ef48e901 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: ${env:runtime, 'python3.6'} + runtime: ${env:runtime, 'python3.7'} plugins: - serverless-python-requirements diff --git a/tests/individually/serverless.yml b/tests/individually/serverless.yml index a83ac7e0..d73d613a 100644 --- a/tests/individually/serverless.yml +++ b/tests/individually/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test-indiv provider: name: aws - runtime: python3.6 + runtime: python3.7 package: individually: true diff --git a/tests/non_build_pyproject/serverless.yml b/tests/non_build_pyproject/serverless.yml index 02e5a1f3..b0436e61 100644 --- a/tests/non_build_pyproject/serverless.yml +++ b/tests/non_build_pyproject/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements diff --git a/tests/non_poetry_pyproject/serverless.yml b/tests/non_poetry_pyproject/serverless.yml index 3d872a87..2b16790c 100644 --- a/tests/non_poetry_pyproject/serverless.yml +++ b/tests/non_poetry_pyproject/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements diff --git a/tests/pipenv/serverless.yml b/tests/pipenv/serverless.yml index 4b343bfc..315f6741 100644 --- a/tests/pipenv/serverless.yml +++ b/tests/pipenv/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements diff --git a/tests/poetry/serverless.yml b/tests/poetry/serverless.yml index 4b343bfc..315f6741 100644 --- a/tests/poetry/serverless.yml +++ b/tests/poetry/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements diff --git a/tests/poetry_individually/serverless.yml b/tests/poetry_individually/serverless.yml index 2cb2d160..527a2846 100644 --- a/tests/poetry_individually/serverless.yml +++ b/tests/poetry_individually/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.6 + runtime: python3.7 plugins: - serverless-python-requirements From 332096484b0bf554ae60619a34a068d0bb36c5f5 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 27 Sep 2022 20:40:35 +0200 Subject: [PATCH 46/90] test: Freeze `pipenv` version to `2021.11.5` --- .github/workflows/integrate.yml | 6 +++--- .github/workflows/validate.yml | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index cca8b6be..211a75cf 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -48,7 +48,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless run: npm install -g serverless@2 @@ -99,7 +99,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless run: npm install -g serverless@2 @@ -150,7 +150,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless run: npm install -g serverless@2 diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 905bee55..0efc0ea7 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -61,7 +61,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless run: npm install -g serverless@2 @@ -128,7 +128,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless run: npm install -g serverless@2 @@ -181,7 +181,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless run: npm install -g serverless@2 From 7c6e4855c1afeb9fa55170c8f8df4f62e42867bc Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Wed, 28 Sep 2022 20:59:44 +0200 Subject: [PATCH 47/90] test: Remove tests against python2.7 --- .github/workflows/integrate.yml | 21 +- .github/workflows/validate.yml | 21 +- test.js | 2759 ++++++++++++------------------- 3 files changed, 1025 insertions(+), 1776 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 211a75cf..f4cfe708 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -13,9 +13,6 @@ jobs: windowsNode14: name: '[Windows] Node.js v14: Unit tests' runs-on: windows-latest - strategy: - matrix: - python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -30,10 +27,10 @@ jobs: key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python 3.7 uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: 3.7 - name: Install Node.js and npm uses: actions/setup-node@v1 @@ -64,9 +61,6 @@ jobs: linuxNode14: name: '[Linux] Node.js 14: Unit tests' runs-on: ubuntu-latest - strategy: - matrix: - python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -81,10 +75,10 @@ jobs: key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python 3.7 uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: 3.7 - name: Install Node.js and npm uses: actions/setup-node@v1 @@ -115,9 +109,6 @@ jobs: linuxNode12: name: '[Linux] Node.js v12: Unit tests' runs-on: ubuntu-latest - strategy: - matrix: - python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -132,10 +123,10 @@ jobs: key: npm-v12-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} restore-keys: npm-v12-${{ runner.os }}-${{ github.ref }}- - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python 3.7 uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: 3.7 - name: Install Node.js and npm uses: actions/setup-node@v1 diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 0efc0ea7..dd68b2f6 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -13,9 +13,6 @@ jobs: linuxNode14: name: '[Linux] Node.js v14: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' runs-on: ubuntu-latest - strategy: - matrix: - python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -43,10 +40,10 @@ jobs: npm-v14-${{ runner.os }}-${{ github.ref }}- npm-v14-${{ runner.os }}-refs/heads/master- - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python 3.7 uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: 3.7 - name: Install Node.js and npm uses: actions/setup-node@v1 @@ -91,9 +88,6 @@ jobs: windowsNode14: name: '[Windows] Node.js v14: Unit tests' runs-on: windows-latest - strategy: - matrix: - python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -110,10 +104,10 @@ jobs: npm-v14-${{ runner.os }}-${{ github.ref }}- npm-v14-${{ runner.os }}-refs/heads/master- - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python 3.7 uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: 3.7 - name: Install Node.js and npm uses: actions/setup-node@v1 @@ -144,9 +138,6 @@ jobs: linuxNode12: name: '[Linux] Node.js v12: Unit tests' runs-on: ubuntu-latest - strategy: - matrix: - python-version: [2.7, 3.7] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -163,10 +154,10 @@ jobs: npm-v12-${{ runner.os }}-${{ github.ref }}- npm-v12-${{ runner.os }}-refs/heads/master- - - name: Set up Python ${{ matrix.python-version }} + - name: Set up Python 3.7 uses: actions/setup-python@v2 with: - python-version: ${{ matrix.python-version }} + python-version: 3.7 - name: Install Node.js and npm uses: actions/setup-node@v1 diff --git a/test.js b/test.js index 0ecd361d..c20817a5 100644 --- a/test.js +++ b/test.js @@ -164,10 +164,6 @@ const getPythonBin = (version) => { return bin; }; -const hasPython = (version) => { - return Boolean(availablePythons[String(version)]); -}; - const listZipFiles = async function (filename) { const file = await readFile(filename); const zip = await new JSZip().loadAsync(file); @@ -227,54 +223,42 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test( - 'default pythonBin can package flask with default options', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 packages have the same hash', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const fileHash = sha256File('.serverless/sls-py-req-test.zip'); - sls(['package'], { env: {} }); - t.equal( - sha256File('.serverless/sls-py-req-test.zip'), - fileHash, - 'packages have the same hash' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package flask with default options', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3) } -); +test('default pythonBin can package flask with default options', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('py3.7 packages have the same hash', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const fileHash = sha256File('.serverless/sls-py-req-test.zip'); + sls(['package'], { env: {} }); + t.equal( + sha256File('.serverless/sls-py-req-test.zip'), + fileHash, + 'packages have the same hash' + ); + t.end(); +}); + +test('py3.7 can package flask with default options', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); test( 'py3.7 can package flask with hashes', @@ -292,147 +276,119 @@ test( t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.end(); }, - { skip: !hasPython(3) || brokenOn('win32') } -); - -test( - 'py3.7 can package flask with nested', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - fileName: 'requirements-w-nested.txt', - pythonBin: getPythonBin(3), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - 'py3.7 can package flask with zip option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - 'py3.7 can package flask with slim option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { slim: 'true', pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.true( - zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > - 0, - '__main__.py files are packaged' - ); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - 'py3.7 can package flask with slim & slimPatterns options', - async (t) => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { slim: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - "py3.7 doesn't package bottle with noDeploy option", - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml', - ]); - sls(['package'], { env: { pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); - }, - { skip: !hasPython(3) } + { skip: brokenOn('win32') } ); -test( - 'py3.7 can package boto3 with editable', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - fileName: 'requirements-w-editable.txt', - pythonBin: getPythonBin(3), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.true( - zipfiles.includes(`botocore${sep}__init__.py`), - 'botocore is packaged' - ); - t.end(); - }, - { skip: !hasPython(3) } -); +test('py3.7 can package flask with nested', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + fileName: 'requirements-w-nested.txt', + pythonBin: getPythonBin(3), + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('py3.7 can package flask with zip option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test('py3.7 can package flask with slim option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('py3.7 can package flask with slim & slimPatterns options', async (t) => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test("py3.7 doesn't package bottle with noDeploy option", async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: { pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test('py3.7 can package boto3 with editable', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + fileName: 'requirements-w-editable.txt', + pythonBin: getPythonBin(3), + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true( + zipfiles.includes(`botocore${sep}__init__.py`), + 'botocore is packaged' + ); + t.end(); +}); test( 'py3.7 can package flask with dockerizePip option', @@ -446,7 +402,7 @@ test( t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( @@ -470,7 +426,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( @@ -495,7 +451,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( @@ -527,7 +483,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( @@ -561,101 +517,415 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); +test('pipenv py3.7 can package flask with default options', async (t) => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.false( + zipfiles.includes(`pytest${sep}__init__.py`), + 'dev-package pytest is NOT packaged' + ); + t.end(); +}); + +test('pipenv py3.7 can package flask with slim option', async (t) => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('pipenv py3.7 can package flask with slim & slimPatterns options', async (t) => { + process.chdir('tests/pipenv'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('pipenv py3.7 can package flask with zip option', async (t) => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test("pipenv py3.7 doesn't package bottle with noDeploy option", async (t) => { + process.chdir('tests/pipenv'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test('non build pyproject.toml uses requirements.txt', async (t) => { + process.chdir('tests/non_build_pyproject'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('non poetry pyproject.toml without requirements.txt packages handler only', async (t) => { + process.chdir('tests/non_poetry_pyproject'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`handler.py`), 'handler is packaged'); + t.end(); +}); + +test('poetry py3.7 can package flask with default options', async (t) => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('poetry py3.7 can package flask with slim option', async (t) => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.true( + zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > 0, + '__main__.py files are packaged' + ); + t.end(); +}); + +test('poetry py3.7 can package flask with slim & slimPatterns options', async (t) => { + process.chdir('tests/poetry'); + + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { slim: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('poetry py3.7 can package flask with zip option', async (t) => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test("poetry py3.7 doesn't package bottle with noDeploy option", async (t) => { + process.chdir('tests/poetry'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.end(); +}); + +test('py3.7 can package flask with zip option and no explicit include', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); + perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); + sls(['package'], { env: { zip: 'true' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.end(); +}); + +test('py3.7 can package lambda-decorators using vendor option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { vendor: './vendor' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.true( + zipfiles.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged' + ); + t.end(); +}); + test( - 'py2.7 can package flask with default options', + "Don't nuke execute perms", async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); + const perm = '755'; + npm(['i', path]); - sls(['package'], { - env: { runtime: 'python2.7', pythonBin: getPythonBin(2) }, - }); + perl([ + '-p', + '-i.bak', + '-e', + 's/(handler.py.*$)/$1\n - foobar/', + 'serverless.yml', + ]); + writeFileSync(`foobar`, ''); + chmodSync(`foobar`, perm); + sls(['package'], { env: { vendor: './vendor' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(2) } -); + t.true( + zipfiles.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged' + ); + t.true(zipfiles.includes(`foobar`), 'foobar is packaged'); -test( - 'py2.7 can package flask with slim option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { runtime: 'python2.7', slim: 'true', pythonBin: getPythonBin(2) }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' + const zipfiles_with_metadata = await listZipFilesWithMetaData( + '.serverless/sls-py-req-test.zip' ); t.true( - zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > - 0, - '__main__.py files are packaged' + zipfiles_with_metadata['foobar'].unixPermissions + .toString(8) + .slice(3, 6) === perm, + 'foobar has retained its executable file permissions' + ); + + const flaskPerm = statSync('.serverless/requirements/bin/flask').mode; + t.true( + zipfiles_with_metadata['bin/flask'].unixPermissions === flaskPerm, + 'bin/flask has retained its executable file permissions' ); + t.end(); }, - { skip: !hasPython(2) } + { skip: process.platform === 'win32' } ); +test('py3.7 can package flask in a project with a space in it', async (t) => { + copySync('tests/base', 'tests/base with a space'); + process.chdir('tests/base with a space'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + test( - 'py2.7 can package flask with zip option', + 'py3.7 can package flask in a project with a space in it with docker', async (t) => { - process.chdir('tests/base'); + copySync('tests/base', 'tests/base with a space'); + process.chdir('tests/base with a space'); const path = npm(['pack', '../..']); npm(['i', path]); - sls(['package'], { - env: { runtime: 'python2.7', zip: 'true', pythonBin: getPythonBin(2) }, - }); + sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }, - { skip: !hasPython(2) } + { skip: !canUseDocker() || brokenOn('win32') } ); +test('py3.7 supports custom file name with fileName option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + writeFileSync('puck', 'requests'); + npm(['i', path]); + sls(['package'], { env: { fileName: 'puck' } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes(`requests${sep}__init__.py`), + 'requests is packaged' + ); + t.false(zipfiles.includes(`flask${sep}__init__.py`), 'flask is NOT packaged'); + t.false(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is NOT packaged'); + t.end(); +}); + +test("py3.7 doesn't package bottle with zip option", async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + perl([ + '-p', + '-i.bak', + '-e', + 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', + 'serverless.yml', + ]); + sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + const zippedReqs = await listRequirementsZipFiles( + '.serverless/sls-py-req-test.zip' + ); + t.true( + zipfiles.includes('.requirements.zip'), + 'zipped requirements are packaged' + ); + t.true(zipfiles.includes(`unzip_requirements.py`), 'unzip util is packaged'); + t.false( + zipfiles.includes(`flask${sep}__init__.py`), + "flask isn't packaged on its own" + ); + t.true( + zippedReqs.includes(`flask/__init__.py`), + 'flask is packaged in the .requirements.zip file' + ); + t.false( + zippedReqs.includes(`bottle.py`), + 'bottle is NOT packaged in the .requirements.zip file' + ); + t.end(); +}); + +test('py3.7 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { + process.chdir('tests/base'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + test( - 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', + 'py3.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { - runtime: 'python2.7', dockerizePip: 'true', slim: 'true', - pythonBin: getPythonBin(2), + slimPatternsAppendDefaults: 'false', }, }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - '*.pyc files are packaged' + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' ); t.deepEqual( zipfiles.filter((filename) => filename.endsWith('__main__.py')), @@ -664,1347 +934,371 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); +test('pipenv py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { + process.chdir('tests/pipenv'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('poetry py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { + process.chdir('tests/poetry'); + copySync('_slimPatterns.yml', 'slimPatterns.yml'); + const path = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { + env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true( + zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, + 'pyc files are packaged' + ); + t.deepEqual( + zipfiles.filter((filename) => filename.endsWith('__main__.py')), + [], + '__main__.py files are NOT packaged' + ); + t.end(); +}); + +test('poetry py3.7 can package flask with package individually option', async (t) => { + process.chdir('tests/poetry_individually'); + const path = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles( + '.serverless/module1-sls-py-req-test-dev-hello.zip' + ); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('py3.7 can package flask with package individually option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true' } }); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.false( + zipfiles_hello.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello' + ); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.false( + zipfiles_hello2.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.false( + zipfiles_hello3.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello3' + ); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.false( + zipfiles_hello4.includes(`fn2${sep}__init__.py`), + 'fn2 is NOT packaged in function hello4' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + + t.end(); +}); + +test('py3.7 can package flask with package individually & slim option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true', slim: 'true' } }); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged in function hello' + ); + t.deepEqual( + zipfiles_hello.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); + + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged in function hello2' + ); + t.deepEqual( + zipfiles_hello2.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); + + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged in function hello3' + ); + t.deepEqual( + zipfiles_hello3.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + t.deepEqual( + zipfiles_hello4.filter((filename) => filename.endsWith('.pyc')), + [], + 'no pyc files packaged in function hello4' + ); + + t.end(); +}); + +test('py3.7 can package only requirements of module', async (t) => { + process.chdir('tests/individually'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles_hello = await listZipFiles( + '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip' + ); + t.true( + zipfiles_hello.includes('handler1.py'), + 'handler1.py is packaged at root level in function hello1' + ); + t.false( + zipfiles_hello.includes('handler2.py'), + 'handler2.py is NOT packaged at root level in function hello1' + ); + t.true( + zipfiles_hello.includes(`pyaml${sep}__init__.py`), + 'pyaml is packaged in function hello1' + ); + t.true( + zipfiles_hello.includes(`boto3${sep}__init__.py`), + 'boto3 is packaged in function hello1' + ); + t.false( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello1' + ); + + const zipfiles_hello2 = await listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' + ); + t.true( + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes('handler1.py'), + 'handler1.py is NOT packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes(`pyaml${sep}__init__.py`), + 'pyaml is NOT packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + + t.end(); +}); + +test('py3.7 can package lambda-decorators using vendor and invidiually option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true', vendor: './vendor' } }); + const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); + t.true( + zipfiles_hello.includes('handler.py'), + 'handler.py is packaged at root level in function hello' + ); + t.true( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello' + ); + t.true( + zipfiles_hello.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged in function hello' + ); + t.false( + zipfiles_hello.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello' + ); + + const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); + t.true( + zipfiles_hello2.includes('handler.py'), + 'handler.py is packaged at root level in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + t.true( + zipfiles_hello2.includes(`lambda_decorators.py`), + 'lambda_decorators.py is packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello2' + ); + + const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); + t.true( + zipfiles_hello3.includes('handler.py'), + 'handler.py is packaged at root level in function hello3' + ); + t.false( + zipfiles_hello3.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`lambda_decorators.py`), + 'lambda_decorators.py is NOT packaged in function hello3' + ); + t.false( + zipfiles_hello3.includes(`dataclasses.py`), + 'dataclasses is NOT packaged in function hello3' + ); + + const zipfiles_hello4 = await listZipFiles( + '.serverless/fn2-sls-py-req-test-dev-hello4.zip' + ); + t.true( + zipfiles_hello4.includes('fn2_handler.py'), + 'fn2_handler is packaged in the zip-root in function hello4' + ); + t.true( + zipfiles_hello4.includes(`dataclasses.py`), + 'dataclasses is packaged in function hello4' + ); + t.false( + zipfiles_hello4.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello4' + ); + t.end(); +}); + test( - "py2.7 doesn't package bottle with noDeploy option", + "Don't nuke execute perms when using individually", async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml', - ]); - sls(['package'], { - env: { runtime: 'python2.7', pythonBin: getPythonBin(2) }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); - }, - { skip: !hasPython(2) } -); - -test( - 'py2.7 can package flask with zip & dockerizePip option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - runtime: 'python2.7', - dockerizePip: 'true', - zip: 'true', - pythonBin: getPythonBin(2), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = await listRequirementsZipFiles( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.true( - zippedReqs.includes(`flask/__init__.py`), - 'flask is packaged in the .requirements.zip file' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with zip & slim & dockerizePip option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - runtime: 'python2.7', - dockerizePip: 'true', - zip: 'true', - slim: 'true', - pythonBin: getPythonBin(2), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = await listRequirementsZipFiles( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.true( - zippedReqs.includes(`flask/__init__.py`), - 'flask is packaged in the .requirements.zip file' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with dockerizePip option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - runtime: 'python2.7', - dockerizePip: 'true', - pythonBin: getPythonBin(2), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with slim & dockerizePip option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - runtime: 'python2.7', - dockerizePip: 'true', - slim: 'true', - pythonBin: getPythonBin(2), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - '*.pyc files are NOT packaged' - ); - t.true( - zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > - 0, - '__main__.py files are packaged' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with slim & dockerizePip & slimPatterns options', - async (t) => { - process.chdir('tests/base'); - - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - runtime: 'python2.7', - dockerizePip: 'true', - slim: 'true', - pythonBin: getPythonBin(2), - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - '*.pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2) || brokenOn('win32') } -); - -test( - 'pipenv py3.7 can package flask with default options', - async (t) => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.false( - zipfiles.includes(`pytest${sep}__init__.py`), - 'dev-package pytest is NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'pipenv py3.7 can package flask with slim option', - async (t) => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { slim: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.true( - zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > - 0, - '__main__.py files are packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'pipenv py3.7 can package flask with slim & slimPatterns options', - async (t) => { - process.chdir('tests/pipenv'); - - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { slim: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'pipenv py3.7 can package flask with zip option', - async (t) => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - "pipenv py3.7 doesn't package bottle with noDeploy option", - async (t) => { - process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml', - ]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'non build pyproject.toml uses requirements.txt', - async (t) => { - process.chdir('tests/non_build_pyproject'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'non poetry pyproject.toml without requirements.txt packages handler only', - async (t) => { - process.chdir('tests/non_poetry_pyproject'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`handler.py`), 'handler is packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'poetry py3.7 can package flask with default options', - async (t) => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'poetry py3.7 can package flask with slim option', - async (t) => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { slim: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.true( - zipfiles.filter((filename) => filename.endsWith('__main__.py')).length > - 0, - '__main__.py files are packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'poetry py3.7 can package flask with slim & slimPatterns options', - async (t) => { - process.chdir('tests/poetry'); - - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { slim: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'poetry py3.7 can package flask with zip option', - async (t) => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - "poetry py3.7 doesn't package bottle with noDeploy option", - async (t) => { - process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml', - ]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package flask with zip option and no explicit include', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); - perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); - sls(['package'], { env: { zip: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package lambda-decorators using vendor option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { vendor: './vendor' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.true( - zipfiles.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - "Don't nuke execute perms", - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - const perm = '755'; - - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(handler.py.*$)/$1\n - foobar/', - 'serverless.yml', - ]); - writeFileSync(`foobar`, ''); - chmodSync(`foobar`, perm); - sls(['package'], { env: { vendor: './vendor' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.true( - zipfiles.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged' - ); - t.true(zipfiles.includes(`foobar`), 'foobar is packaged'); - - const zipfiles_with_metadata = await listZipFilesWithMetaData( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles_with_metadata['foobar'].unixPermissions - .toString(8) - .slice(3, 6) === perm, - 'foobar has retained its executable file permissions' - ); - - const flaskPerm = statSync('.serverless/requirements/bin/flask').mode; - t.true( - zipfiles_with_metadata['bin/flask'].unixPermissions === flaskPerm, - 'bin/flask has retained its executable file permissions' - ); - - t.end(); - }, - { skip: process.platform === 'win32' || !hasPython(3.7) } -); - -test( - 'py3.7 can package flask in a project with a space in it', - async (t) => { - copySync('tests/base', 'tests/base with a space'); - process.chdir('tests/base with a space'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package flask in a project with a space in it with docker', - async (t) => { - copySync('tests/base', 'tests/base with a space'); - process.chdir('tests/base with a space'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { dockerizePip: 'true' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } -); - -test( - 'py3.7 supports custom file name with fileName option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - writeFileSync('puck', 'requests'); - npm(['i', path]); - sls(['package'], { env: { fileName: 'puck' } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes(`requests${sep}__init__.py`), - 'requests is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged' - ); - t.false( - zipfiles.includes(`boto3${sep}__init__.py`), - 'boto3 is NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - "py3.7 doesn't package bottle with zip option", - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - perl([ - '-p', - '-i.bak', - '-e', - 's/(pythonRequirements:$)/\\1\\n noDeploy: [bottle]/', - 'serverless.yml', - ]); - sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - const zippedReqs = await listRequirementsZipFiles( - '.serverless/sls-py-req-test.zip' - ); - t.true( - zipfiles.includes('.requirements.zip'), - 'zipped requirements are packaged' - ); - t.true( - zipfiles.includes(`unzip_requirements.py`), - 'unzip util is packaged' - ); - t.false( - zipfiles.includes(`flask${sep}__init__.py`), - "flask isn't packaged on its own" - ); - t.true( - zippedReqs.includes(`flask/__init__.py`), - 'flask is packaged in the .requirements.zip file' - ); - t.false( - zippedReqs.includes(`bottle.py`), - 'bottle is NOT packaged in the .requirements.zip file' - ); - t.end(); - }, - { skip: !hasPython(3) } -); - -test( - 'py3.7 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', - async (t) => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', - async (t) => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - dockerizePip: 'true', - slim: 'true', - slimPatternsAppendDefaults: 'false', - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false options', - async (t) => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - runtime: 'python2.7', - slim: 'true', - slimPatternsAppendDefaults: 'false', - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(2.7) || brokenOn('win32') } -); - -test( - 'py2.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', - async (t) => { - process.chdir('tests/base'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { - dockerizePip: 'true', - runtime: 'python2.7', - slim: 'true', - slimPatternsAppendDefaults: 'false', - }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !canUseDocker() || !hasPython(2.7) || brokenOn('win32') } -); - -test( - 'pipenv py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', - async (t) => { - process.chdir('tests/pipenv'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - - sls(['package'], { - env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'poetry py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', - async (t) => { - process.chdir('tests/poetry'); - copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); - npm(['i', path]); - - sls(['package'], { - env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, - }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true( - zipfiles.filter((filename) => filename.endsWith('.pyc')).length >= 1, - 'pyc files are packaged' - ); - t.deepEqual( - zipfiles.filter((filename) => filename.endsWith('__main__.py')), - [], - '__main__.py files are NOT packaged' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'poetry py3.7 can package flask with package individually option', - async (t) => { - process.chdir('tests/poetry_individually'); - const path = npm(['pack', '../..']); - npm(['i', path]); - - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles( - '.serverless/module1-sls-py-req-test-dev-hello.zip' - ); - t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); - t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); - t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package flask with package individually option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { individually: 'true' } }); - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.false( - zipfiles_hello.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello' - ); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.false( - zipfiles_hello2.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.false( - zipfiles_hello3.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello3' - ); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.false( - zipfiles_hello4.includes(`fn2${sep}__init__.py`), - 'fn2 is NOT packaged in function hello4' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package flask with package individually & slim option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { individually: 'true', slim: 'true' } }); - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.deepEqual( - zipfiles_hello.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.deepEqual( - zipfiles_hello2.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.deepEqual( - zipfiles_hello3.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - t.deepEqual( - zipfiles_hello4.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello4' - ); - - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py2.7 can package flask with package individually option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { individually: 'true', runtime: 'python2.7' } }); - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - - t.end(); - }, - { skip: !hasPython(2.7) } -); - -test( - 'py2.7 can package flask with package individually & slim option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { - env: { individually: 'true', runtime: 'python2.7', slim: 'true' }, - }); - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged in function hello' - ); - t.deepEqual( - zipfiles_hello.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged in function hello2' - ); - t.deepEqual( - zipfiles_hello2.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged in function hello3' - ); - t.deepEqual( - zipfiles_hello3.filter((filename) => filename.endsWith('.pyc')), - [], - 'no pyc files packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - - t.end(); - }, - { skip: !hasPython(2.7) } -); - -test( - 'py2.7 can ignore functions defined with `image`', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { individually: 'true', runtime: 'python2.7' } }); - t.true( - pathExistsSync('.serverless/hello.zip'), - 'function hello is packaged' - ); - t.true( - pathExistsSync('.serverless/hello2.zip'), - 'function hello2 is packaged' - ); - t.true( - pathExistsSync('.serverless/hello3.zip'), - 'function hello3 is packaged' - ); - t.true( - pathExistsSync('.serverless/hello4.zip'), - 'function hello4 is packaged' - ); - t.false( - pathExistsSync('.serverless/hello5.zip'), - 'function hello5 is not packaged' - ); - - t.end(); - }, - { skip: !hasPython(2.7) } -); - -test( - 'py3.7 can package only requirements of module', - async (t) => { - process.chdir('tests/individually'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles_hello = await listZipFiles( - '.serverless/module1-sls-py-req-test-indiv-dev-hello1.zip' - ); - t.true( - zipfiles_hello.includes('handler1.py'), - 'handler1.py is packaged at root level in function hello1' - ); - t.false( - zipfiles_hello.includes('handler2.py'), - 'handler2.py is NOT packaged at root level in function hello1' - ); - t.true( - zipfiles_hello.includes(`pyaml${sep}__init__.py`), - 'pyaml is packaged in function hello1' - ); - t.true( - zipfiles_hello.includes(`boto3${sep}__init__.py`), - 'boto3 is packaged in function hello1' - ); - t.false( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello1' - ); - - const zipfiles_hello2 = await listZipFiles( - '.serverless/module2-sls-py-req-test-indiv-dev-hello2.zip' - ); - t.true( - zipfiles_hello2.includes('handler2.py'), - 'handler2.py is packaged at root level in function hello2' - ); - t.false( - zipfiles_hello2.includes('handler1.py'), - 'handler1.py is NOT packaged at root level in function hello2' - ); - t.false( - zipfiles_hello2.includes(`pyaml${sep}__init__.py`), - 'pyaml is NOT packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`boto3${sep}__init__.py`), - 'boto3 is NOT packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - 'py3.7 can package lambda-decorators using vendor and invidiually option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { individually: 'true', vendor: './vendor' } }); - const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); - t.true( - zipfiles_hello.includes('handler.py'), - 'handler.py is packaged at root level in function hello' - ); - t.true( - zipfiles_hello.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello' - ); - t.true( - zipfiles_hello.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged in function hello' - ); - t.false( - zipfiles_hello.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello' - ); - - const zipfiles_hello2 = await listZipFiles('.serverless/hello2.zip'); - t.true( - zipfiles_hello2.includes('handler.py'), - 'handler.py is packaged at root level in function hello2' - ); - t.true( - zipfiles_hello2.includes(`flask${sep}__init__.py`), - 'flask is packaged in function hello2' - ); - t.true( - zipfiles_hello2.includes(`lambda_decorators.py`), - 'lambda_decorators.py is packaged in function hello2' - ); - t.false( - zipfiles_hello2.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello2' - ); - - const zipfiles_hello3 = await listZipFiles('.serverless/hello3.zip'); - t.true( - zipfiles_hello3.includes('handler.py'), - 'handler.py is packaged at root level in function hello3' - ); - t.false( - zipfiles_hello3.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`lambda_decorators.py`), - 'lambda_decorators.py is NOT packaged in function hello3' - ); - t.false( - zipfiles_hello3.includes(`dataclasses.py`), - 'dataclasses is NOT packaged in function hello3' - ); - - const zipfiles_hello4 = await listZipFiles( - '.serverless/fn2-sls-py-req-test-dev-hello4.zip' - ); - t.true( - zipfiles_hello4.includes('fn2_handler.py'), - 'fn2_handler is packaged in the zip-root in function hello4' - ); - t.true( - zipfiles_hello4.includes(`dataclasses.py`), - 'dataclasses is packaged in function hello4' - ); - t.false( - zipfiles_hello4.includes(`flask${sep}__init__.py`), - 'flask is NOT packaged in function hello4' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); - -test( - "Don't nuke execute perms when using individually", - async (t) => { - process.chdir('tests/individually'); + process.chdir('tests/individually'); const path = npm(['pack', '../..']); const perm = '755'; writeFileSync(`module1${sep}foobar`, ''); @@ -2037,7 +1331,7 @@ test( t.end(); }, - { skip: process.platform === 'win32' || !hasPython(3.7) } + { skip: process.platform === 'win32' } ); test( @@ -2076,41 +1370,33 @@ test( t.end(); }, - { skip: !canUseDocker() || process.platform === 'win32' || !hasPython(3.7) } + { skip: !canUseDocker() || process.platform === 'win32' } ); -test( - 'py3.7 uses download cache by default option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const cachepath = getUserCachePath(); - t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'cache directory exists' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); +test('py3.7 uses download cache by default option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'cache directory exists' + ); + t.end(); +}); -test( - 'py3.7 uses download cache by default', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); - t.true( - pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), - 'cache directory exists' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); +test('py3.7 uses download cache by default', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); + t.true( + pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), + 'cache directory exists' + ); + t.end(); +}); test( 'py3.7 uses download cache with dockerizePip option', @@ -2126,7 +1412,7 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( @@ -2144,33 +1430,29 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); -test( - 'py3.7 uses static and download cache', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const cachepath = getUserCachePath(); - const cacheFolderHash = sha256Path('.serverless/requirements.txt'); - const arch = 'x86_64'; - t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'http exists in download-cache' - ); - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` - ), - 'flask exists in static-cache' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); +test('py3.7 uses static and download cache', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'http exists in download-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.end(); +}); test( 'py3.7 uses static and download cache with dockerizePip option', @@ -2194,75 +1476,67 @@ test( ); t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } -); - -test( - 'py3.7 uses static cache', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const cachepath = getUserCachePath(); - const cacheFolderHash = sha256Path('.serverless/requirements.txt'); - const arch = 'x86_64'; - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` - ), - 'flask exists in static-cache' - ); - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` - ), - '.completed_requirements exists in static-cache' - ); - - // py3.7 checking that static cache actually pulls from cache (by poisoning it) - writeFileSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, - 'injected new file into static cache folder' - ); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes('injected_file_is_bad_form'), - "static cache is really used when running 'sls package' again" - ); - - t.end(); - }, - { skip: !hasPython(3.7) } + { skip: !canUseDocker() || brokenOn('win32') } ); -test( - 'py3.7 uses static cache with cacheLocation option', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - const cachepath = '.requirements-cache'; - sls(['package'], { env: { cacheLocation: cachepath } }); - const cacheFolderHash = sha256Path('.serverless/requirements.txt'); - const arch = 'x86_64'; - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` - ), - 'flask exists in static-cache' - ); - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` - ), - '.completed_requirements exists in static-cache' - ); - t.end(); - }, - { skip: !hasPython(3.7) } -); +test('py3.7 uses static cache', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); + + // py3.7 checking that static cache actually pulls from cache (by poisoning it) + writeFileSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, + 'injected new file into static cache folder' + ); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes('injected_file_is_bad_form'), + "static cache is really used when running 'sls package' again" + ); + + t.end(); +}); + +test('py3.7 uses static cache with cacheLocation option', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + const cachepath = '.requirements-cache'; + sls(['package'], { env: { cacheLocation: cachepath } }); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}.completed_requirements` + ), + '.completed_requirements exists in static-cache' + ); + t.end(); +}); test( 'py3.7 uses static cache with dockerizePip & slim option', @@ -2306,7 +1580,7 @@ test( t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); test( @@ -2332,38 +1606,31 @@ test( t.end(); }, - { skip: !canUseDocker() || !hasPython(3.7) || brokenOn('win32') } + { skip: !canUseDocker() || brokenOn('win32') } ); -test( - 'py3.7 can ignore functions defined with `image`', - async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { individually: 'true' } }); - t.true( - pathExistsSync('.serverless/hello.zip'), - 'function hello is packaged' - ); - t.true( - pathExistsSync('.serverless/hello2.zip'), - 'function hello2 is packaged' - ); - t.true( - pathExistsSync('.serverless/hello3.zip'), - 'function hello3 is packaged' - ); - t.true( - pathExistsSync('.serverless/hello4.zip'), - 'function hello4 is packaged' - ); - t.false( - pathExistsSync('.serverless/hello5.zip'), - 'function hello5 is not packaged' - ); - - t.end(); - }, - { skip: !hasPython(3.7) } -); +test('py3.7 can ignore functions defined with `image`', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { individually: 'true' } }); + t.true(pathExistsSync('.serverless/hello.zip'), 'function hello is packaged'); + t.true( + pathExistsSync('.serverless/hello2.zip'), + 'function hello2 is packaged' + ); + t.true( + pathExistsSync('.serverless/hello3.zip'), + 'function hello3 is packaged' + ); + t.true( + pathExistsSync('.serverless/hello4.zip'), + 'function hello4 is packaged' + ); + t.false( + pathExistsSync('.serverless/hello5.zip'), + 'function hello5 is not packaged' + ); + + t.end(); +}); From cc146d088d362187641dd5ae3e9d0129a14c60e2 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 25 Sep 2022 21:53:40 +0200 Subject: [PATCH 48/90] refactor: Improve error message for docker failures --- lib/pip.js | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/lib/pip.js b/lib/pip.js index ccb809c3..20340ea2 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -412,6 +412,14 @@ async function installRequirements(targetFolder, pluginInstance) { 'PYTHON_REQUIREMENTS_COMMAND_NOT_FOUND' ); } + + if (cmd === 'docker' && e.stderrBuffer) { + throw new pluginInstance.serverless.classes.Error( + `Running ${cmd} failed with: "${e.stderrBuffer.toString().trim()}"`, + 'PYTHON_REQUIREMENTS_DOCKER_COMMAND_FAILED' + ); + } + if (log) { log.info(`Stdout: ${e.stdoutBuffer}`); log.info(`Stderr: ${e.stderrBuffer}`); From f0c41835df9af6a39d03cbb3fbc492deeca420bf Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 27 Sep 2022 19:02:37 +0200 Subject: [PATCH 49/90] test: Investigate issue on CI --- lib/pip.js | 4 +++- test.js | 1 + 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/pip.js b/lib/pip.js index 20340ea2..149c0285 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -415,7 +415,9 @@ async function installRequirements(targetFolder, pluginInstance) { if (cmd === 'docker' && e.stderrBuffer) { throw new pluginInstance.serverless.classes.Error( - `Running ${cmd} failed with: "${e.stderrBuffer.toString().trim()}"`, + `Running "${cmd} ${args.join(' ')}" failed with: "${e.stderrBuffer + .toString() + .trim()}"`, 'PYTHON_REQUIREMENTS_DOCKER_COMMAND_FAILED' ); } diff --git a/test.js b/test.js index c20817a5..3b8a242a 100644 --- a/test.js +++ b/test.js @@ -212,6 +212,7 @@ test( dockerImage: 'break the build to log the command', }, }); + console.log('STDOUT', stdout); t.true( stdout.includes( `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z` From 2ce9d8e4b70fd34ab3628bfd28d8e7be857e7da2 Mon Sep 17 00:00:00 2001 From: Giuseppe Lumia Date: Thu, 29 Sep 2022 23:35:05 +0200 Subject: [PATCH 50/90] docs: Add a warning about individual packaging with Poetry/Pipenv --- README.md | 3 +++ 1 file changed, 3 insertions(+) diff --git a/README.md b/README.md index 7c09a4de..6b5bffd6 100644 --- a/README.md +++ b/README.md @@ -360,6 +360,9 @@ custom: ### Per-function requirements +**Note: this feature does not work with Pipenv/Poetry, it requires `requirements.txt` +files for your Python modules.** + If you have different python functions, with different sets of requirements, you can avoid including all the unecessary dependencies of your functions by using the following structure: From 78795be24eb08dc78acd7566778b3960c28b263c Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 30 Sep 2022 23:59:38 +0200 Subject: [PATCH 51/90] fix: Properly recognize individual function (#725) --- index.js | 2 +- test.js | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/index.js b/index.js index ebfc4017..4c24bf8b 100644 --- a/index.js +++ b/index.js @@ -109,7 +109,7 @@ class ServerlessPythonRequirements { get targetFuncs() { let inputOpt = this.serverless.processedInput.options; return inputOpt.function - ? [inputOpt.functionObj] + ? [this.serverless.service.functions[inputOpt.function]] : values(this.serverless.service.functions).filter((func) => !func.image); } diff --git a/test.js b/test.js index 3b8a242a..c20817a5 100644 --- a/test.js +++ b/test.js @@ -212,7 +212,6 @@ test( dockerImage: 'break the build to log the command', }, }); - console.log('STDOUT', stdout); t.true( stdout.includes( `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z` From 6fbdde1123e82a3ddb7d36aa14d23daa4654be86 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Tue, 4 Oct 2022 00:29:27 +0200 Subject: [PATCH 52/90] ci: Run CI tests against Serverless v3 --- .github/workflows/integrate.yml | 15 ++++++++++++--- .github/workflows/validate.yml | 15 ++++++++++++--- 2 files changed, 24 insertions(+), 6 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index f4cfe708..6d5f57ac 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -13,6 +13,9 @@ jobs: windowsNode14: name: '[Windows] Node.js v14: Unit tests' runs-on: windows-latest + strategy: + matrix: + sls-version: [2, 3] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -48,7 +51,7 @@ jobs: run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless - run: npm install -g serverless@2 + run: npm install -g serverless@${{ matrix.sls-version }} - name: Install dependencies if: steps.cacheNpm.outputs.cache-hit != 'true' @@ -61,6 +64,9 @@ jobs: linuxNode14: name: '[Linux] Node.js 14: Unit tests' runs-on: ubuntu-latest + strategy: + matrix: + sls-version: [2, 3] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -96,7 +102,7 @@ jobs: run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless - run: npm install -g serverless@2 + run: npm install -g serverless@${{ matrix.sls-version }} - name: Install dependencies if: steps.cacheNpm.outputs.cache-hit != 'true' @@ -144,7 +150,7 @@ jobs: run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless - run: npm install -g serverless@2 + run: npm install -g serverless@${{ matrix.sls-version }} - name: Install dependencies if: steps.cacheNpm.outputs.cache-hit != 'true' @@ -157,6 +163,9 @@ jobs: tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest + strategy: + matrix: + sls-version: [2, 3] needs: [windowsNode14, linuxNode14, linuxNode12] steps: - name: Checkout repository diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index dd68b2f6..801b7194 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -13,6 +13,9 @@ jobs: linuxNode14: name: '[Linux] Node.js v14: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' runs-on: ubuntu-latest + strategy: + matrix: + sls-version: [2, 3] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -61,7 +64,7 @@ jobs: run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless - run: npm install -g serverless@2 + run: npm install -g serverless@${{ matrix.sls-version }} - name: Install dependencies if: steps.cacheNpm.outputs.cache-hit != 'true' @@ -88,6 +91,9 @@ jobs: windowsNode14: name: '[Windows] Node.js v14: Unit tests' runs-on: windows-latest + strategy: + matrix: + sls-version: [2, 3] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -125,7 +131,7 @@ jobs: run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless - run: npm install -g serverless@2 + run: npm install -g serverless@${{ matrix.sls-version }} - name: Install dependencies if: steps.cacheNpm.outputs.cache-hit != 'true' @@ -138,6 +144,9 @@ jobs: linuxNode12: name: '[Linux] Node.js v12: Unit tests' runs-on: ubuntu-latest + strategy: + matrix: + sls-version: [2, 3] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -175,7 +184,7 @@ jobs: run: python -m pip install pipenv==2021.11.5 poetry - name: Install serverless - run: npm install -g serverless@2 + run: npm install -g serverless@${{ matrix.sls-version }} - name: Install dependencies if: steps.cacheNpm.outputs.cache-hit != 'true' From e81d9e1824c135f110b4deccae2c26b0cbb26778 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fran=C3=A7ois-Michel=20L=27Heureux?= Date: Tue, 18 Oct 2022 17:39:04 -0400 Subject: [PATCH 53/90] feat: Introduce `requirePoetryLockFile` flag --- README.md | 9 +++++++++ index.js | 1 + lib/poetry.js | 26 +++++++++++++++++++++----- test.js | 20 ++++++++++++++++++++ tests/poetry/serverless.yml | 1 + 5 files changed, 52 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 6b5bffd6..9563ff51 100644 --- a/README.md +++ b/README.md @@ -131,6 +131,15 @@ custom: usePoetry: false ``` +Be aware that if no `poetry.lock` file is present, a new one will be generated on the fly. To help having predictable builds, +you can set the `requirePoetryLockFile` flag to true to throw an error when `poetry.lock` is missing. + +```yaml +custom: + pythonRequirements: + requirePoetryLockFile: false +``` + ### Poetry with git dependencies Poetry by default generates the exported requirements.txt file with `-e` and that breaks pip with `-t` parameter diff --git a/index.js b/index.js index 4c24bf8b..30803971 100644 --- a/index.js +++ b/index.js @@ -57,6 +57,7 @@ class ServerlessPythonRequirements { pipCmdExtraArgs: [], noDeploy: [], vendor: '', + requirePoetryLockFile: false, }, (this.serverless.service.custom && this.serverless.service.custom.pythonRequirements) || diff --git a/lib/poetry.js b/lib/poetry.js index 4003c1df..d324784b 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -21,12 +21,28 @@ async function pyprojectTomlToRequirements(modulePath, pluginInstance) { generateRequirementsProgress = progress.get( 'python-generate-requirements-toml' ); - generateRequirementsProgress.update( - 'Generating requirements.txt from "pyproject.toml"' - ); - log.info('Generating requirements.txt from "pyproject.toml"'); + } + + const emitMsg = (msg) => { + if (generateRequirementsProgress) { + generateRequirementsProgress.update(msg); + log.info(msg); + } else { + serverless.cli.log(msg); + } + }; + + if (fs.existsSync('poetry.lock')) { + emitMsg('Generating requirements.txt from poetry.lock'); } else { - serverless.cli.log('Generating requirements.txt from pyproject.toml...'); + if (options.requirePoetryLockFile) { + throw new serverless.classes.Error( + 'poetry.lock file not found - set requirePoetryLockFile to false to ' + + 'disable this error', + 'MISSING_REQUIRED_POETRY_LOCK' + ); + } + emitMsg('Generating poetry.lock and requirements.txt from pyproject.toml'); } try { diff --git a/test.js b/test.js index c20817a5..27db6884 100644 --- a/test.js +++ b/test.js @@ -1634,3 +1634,23 @@ test('py3.7 can ignore functions defined with `image`', async (t) => { t.end(); }); + +test('poetry py3.7 fails packaging if poetry.lock is missing and flag requirePoetryLockFile is set to true', async (t) => { + copySync('tests/poetry', 'tests/base with a space'); + process.chdir('tests/base with a space'); + removeSync('poetry.lock'); + + const path = npm(['pack', '../..']); + npm(['i', path]); + const stdout = sls(['package'], { + env: { requirePoetryLockFile: 'true', slim: 'true' }, + noThrow: true, + }); + t.true( + stdout.includes( + 'poetry.lock file not found - set requirePoetryLockFile to false to disable this error' + ), + 'flag works and error is properly reported' + ); + t.end(); +}); diff --git a/tests/poetry/serverless.yml b/tests/poetry/serverless.yml index 315f6741..2d032acd 100644 --- a/tests/poetry/serverless.yml +++ b/tests/poetry/serverless.yml @@ -13,6 +13,7 @@ custom: slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + requirePoetryLockFile: ${env:requirePoetryLockFile, false} defaults: zip: false slimPatterns: false From 8969fb2aa403f2be14d8fc4fed21f12e4d8b9b47 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 23 Oct 2022 17:10:05 +0200 Subject: [PATCH 54/90] chore: Add issue templates (#735) --- .github/ISSUE_TEMPLATE/bug-report.yml | 68 ++++++++++++++++++++++ .github/ISSUE_TEMPLATE/config.yml | 5 ++ .github/ISSUE_TEMPLATE/feature-request.yml | 21 +++++++ 3 files changed, 94 insertions(+) create mode 100644 .github/ISSUE_TEMPLATE/bug-report.yml create mode 100644 .github/ISSUE_TEMPLATE/config.yml create mode 100644 .github/ISSUE_TEMPLATE/feature-request.yml diff --git a/.github/ISSUE_TEMPLATE/bug-report.yml b/.github/ISSUE_TEMPLATE/bug-report.yml new file mode 100644 index 00000000..bde39a55 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/bug-report.yml @@ -0,0 +1,68 @@ +name: 🐛 Bug report +description: Create a bug report +body: + - type: checkboxes + attributes: + label: Are you certain it's a bug? + description: If you're uncertain, please report at https://github.com/serverless/serverless-python-requirements/discussions instead + options: + - label: Yes, it looks like a bug + required: true + - type: checkboxes + attributes: + label: Are you using the latest plugin release? + description: Latest version can be checked at https://github.com/serverless/serverless-python-requirements/releases/latest + options: + - label: Yes, I'm using the latest plugin release + required: true + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists + options: + - label: I have searched existing issues, it hasn't been reported yet + required: true + - type: textarea + attributes: + label: Issue description + validations: + required: true + - type: textarea + attributes: + label: Service configuration (serverless.yml) content + description: | + Provide COMPLETE content of serverless.yml, ensuring that: + • It consistently reproduces described issue + • It's as minimal as possible + • Ideally with no other plugins involved + • Has sensitive parts masked out + + If not applicable, fill with "N/A" + render: yaml + validations: + required: true + - type: input + attributes: + label: Command name and used flags + description: | + Full command name with used flags (If not applicable, fill with "N/A") + placeholder: serverless [...flags] + validations: + required: true + - type: textarea + attributes: + label: Command output + description: | + COMPLETE command output. + + If not applicable, fill with "N/A" + render: shell + validations: + required: true + - type: textarea + attributes: + label: Environment information + description: '"serverless --version" output + used version of the plugin' + render: shell + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml new file mode 100644 index 00000000..a7f83c6b --- /dev/null +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -0,0 +1,5 @@ +blank_issues_enabled: false +contact_links: + - name: Question + url: https://github.com/serverless/serverless-python-requirements/discussions + about: Please ask and answer questions here diff --git a/.github/ISSUE_TEMPLATE/feature-request.yml b/.github/ISSUE_TEMPLATE/feature-request.yml new file mode 100644 index 00000000..14907ec2 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/feature-request.yml @@ -0,0 +1,21 @@ +name: 🎉 Feature request +description: Suggest an idea +body: + - type: checkboxes + attributes: + label: Is there an existing issue for this? + description: Please search to see if an issue already exists + options: + - label: I have searched existing issues, it hasn't been reported yet + required: true + - type: textarea + attributes: + label: Use case description + description: Describe the use case that needs to be addressed + validations: + required: true + - type: textarea + attributes: + label: Proposed solution (optional) + description: | + e.g. propose how the configuration and implementation of the new feature could look From 853da8d39921dc83a23d59fd825b2180814f87ff Mon Sep 17 00:00:00 2001 From: Anders Steiner Date: Sun, 23 Oct 2022 15:49:06 -0500 Subject: [PATCH 55/90] fix: Adapt to support latest `pipenv` version (#718) BREAKING CHANGE: Requires `pipenv` in version `2022-04-08` or higher Co-authored-by: Randy Westergren Co-authored-by: Piotr Grzesik --- .github/workflows/integrate.yml | 6 +++--- .github/workflows/validate.yml | 6 +++--- lib/pipenv.js | 31 ++++++++++++++++++------------- tests/pipenv/Pipfile | 3 ++- 4 files changed, 26 insertions(+), 20 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 6d5f57ac..1f979b0a 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -48,7 +48,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==2021.11.5 poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -99,7 +99,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==2021.11.5 poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -147,7 +147,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==2021.11.5 poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 801b7194..31052279 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -61,7 +61,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==2021.11.5 poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -128,7 +128,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==2021.11.5 poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -181,7 +181,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==2021.11.5 poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} diff --git a/lib/pipenv.js b/lib/pipenv.js index 5856d47b..11331ee3 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -28,27 +28,32 @@ async function pipfileToRequirements() { } try { - let res; try { - res = await spawn( - 'pipenv', - ['lock', '--requirements', '--keep-outdated'], - { - cwd: this.servicePath, - } - ); + await spawn('pipenv', ['lock', '--keep-outdated'], { + cwd: this.servicePath, + }); } catch (e) { - if ( - e.stderrBuffer && - e.stderrBuffer.toString().includes('command not found') - ) { + const stderrBufferContent = + (e.stderrBuffer && e.stderrBuffer.toString()) || ''; + + if (stderrBufferContent.includes('must exist to use')) { + // No previous Pipfile.lock, we will try to generate it here + await spawn('pipenv', ['lock'], { + cwd: this.servicePath, + }); + } else if (stderrBufferContent.includes('command not found')) { throw new this.serverless.classes.Error( `pipenv not found! Install it according to the poetry docs.`, 'PYTHON_REQUIREMENTS_PIPENV_NOT_FOUND' ); + } else { + throw e; } - throw e; } + const res = await spawn('pipenv', ['requirements'], { + cwd: this.servicePath, + }); + fse.ensureDirSync(path.join(this.servicePath, '.serverless')); fse.writeFileSync( path.join(this.servicePath, '.serverless/requirements.txt'), diff --git a/tests/pipenv/Pipfile b/tests/pipenv/Pipfile index 6770a12a..30e51dda 100644 --- a/tests/pipenv/Pipfile +++ b/tests/pipenv/Pipfile @@ -1,6 +1,7 @@ [[source]] -url = "https://pypi.python.org/simple" +url = "https://pypi.org/simple" verify_ssl = true +name = "pypi" [packages] Flask = "==2.0.3" From 4ba3bbeb9296b4844feb476de695f33ee2a30056 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 23 Oct 2022 22:50:04 +0200 Subject: [PATCH 56/90] feat: Switch to official AWS docker images by default (#724) BREAKING CHANGE: Changes default `dockerImage` used for building dependencies --- index.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/index.js b/index.js index 30803971..5c568cc4 100644 --- a/index.js +++ b/index.js @@ -95,7 +95,9 @@ class ServerlessPythonRequirements { ); } else if (!options.dockerFile) { // If no dockerFile is provided, use default image - const defaultImage = `lambci/lambda:build-${this.serverless.service.provider.runtime}`; + const architecture = + this.serverless.service.provider.architecture || 'x86_64'; + const defaultImage = `public.ecr.aws/sam/build-${this.serverless.service.provider.runtime}:latest-${architecture}`; options.dockerImage = options.dockerImage || defaultImage; } if (options.layer) { From 8f12c58d63c9e2572f48f1441bc8951863c8d8a6 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 23 Oct 2022 22:55:30 +0200 Subject: [PATCH 57/90] chore: Bump dependencies --- package.json | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/package.json b/package.json index 7985cb60..ae0f87e6 100644 --- a/package.json +++ b/package.json @@ -52,10 +52,10 @@ "cross-spawn": "*", "eslint": "^7.32.0", "git-list-updated": "^1.2.1", - "github-release-from-cc-changelog": "^2.2.1", + "github-release-from-cc-changelog": "^2.3.0", "lodash": "^4.17.21", "prettier": "^2", - "standard-version": "^9.3.2", + "standard-version": "^9.5.0", "tape": "*", "tape-promise": "*" }, @@ -67,14 +67,14 @@ "fs-extra": "^9.1.0", "glob-all": "^3.3.0", "is-wsl": "^2.2.0", - "jszip": "^3.7.1", + "jszip": "^3.10.1", "lodash.get": "^4.4.2", "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", "rimraf": "^3.0.2", "set-value": "^4.1.0", "sha256-file": "1.0.0", - "shell-quote": "^1.7.3" + "shell-quote": "^1.7.4" }, "peerDependencies": { "serverless": "^2.32 || 3" From a5c6a819884ce54bb587403d52458675e4bb25f5 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 23 Oct 2022 22:56:52 +0200 Subject: [PATCH 58/90] chore: Bump `fs-extra` to v10 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index ae0f87e6..f3937b96 100644 --- a/package.json +++ b/package.json @@ -64,7 +64,7 @@ "appdirectory": "^0.1.0", "bluebird": "^3.7.2", "child-process-ext": "^2.1.1", - "fs-extra": "^9.1.0", + "fs-extra": "^10.1.0", "glob-all": "^3.3.0", "is-wsl": "^2.2.0", "jszip": "^3.10.1", From 2b98f89348ebb7d2759e04aa37535d119675e66d Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 23 Oct 2022 22:57:36 +0200 Subject: [PATCH 59/90] chore: Bump `eslint` to v8 --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index f3937b96..4d9683bf 100644 --- a/package.json +++ b/package.json @@ -50,7 +50,7 @@ }, "devDependencies": { "cross-spawn": "*", - "eslint": "^7.32.0", + "eslint": "^8.26.0", "git-list-updated": "^1.2.1", "github-release-from-cc-changelog": "^2.3.0", "lodash": "^4.17.21", From 8b8fe6668c092b4f5f7b48a476d84441a94ecf99 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 23 Oct 2022 23:03:19 +0200 Subject: [PATCH 60/90] chore: Release v6 --- CHANGELOG.md | 21 +++++++++++++++++++++ README.md | 2 ++ package.json | 2 +- 3 files changed, 24 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 41041fd3..fb11a43a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,27 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [6.0.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.4.0...v6.0.0) (2022-10-23) + +### ⚠ BREAKING CHANGES + +- Changes default `dockerImage` used for building dependencies (now uses images from `public.ecr.aws/sam` repository) +- Requires `pipenv` in version `2022-04-08` or higher + +### Features + +- Introduce `requirePoetryLockFile` flag ([#728](https://github.com/serverless/serverless-python-requirements/pull/728)) ([e81d9e1](https://github.com/UnitedIncome/serverless-python-requirements/commit/e81d9e1824c135f110b4deccae2c26b0cbb26778)) ([François-Michel L'Heureux](https://github.com/FinchPowers)) +- Switch to official AWS docker images by default ([#724](https://github.com/UnitedIncome/serverless-python-requirements/issues/724)) ([4ba3bbe](https://github.com/UnitedIncome/serverless-python-requirements/commit/4ba3bbeb9296b4844feb476de695f33ee2a30056)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +### Bug Fixes + +- Adapt to support latest `pipenv` version ([#718](https://github.com/UnitedIncome/serverless-python-requirements/issues/718)) ([853da8d](https://github.com/UnitedIncome/serverless-python-requirements/commit/853da8d39921dc83a23d59fd825b2180814f87ff)) ([Anders Steiner](https://github.com/andidev) & [Randy Westergren](https://github.com/rwestergren) & [Piotr Grzesik](https://github.com/pgrzesik)) +- Properly recognize individual function ([#725](https://github.com/UnitedIncome/serverless-python-requirements/issues/725)) ([78795be](https://github.com/UnitedIncome/serverless-python-requirements/commit/78795be24eb08dc78acd7566778b3960c28b263c)) ([Piotr Grzesik](https://github.com/pgrzesik)) + +### Maintenance Improvements + +- Improve error message for docker failures ([#723](https://github.com/serverless/serverless-python-requirements/pull/723))([cc146d0](https://github.com/UnitedIncome/serverless-python-requirements/commit/cc146d088d362187641dd5ae3e9d0129a14c60e2)) ([Piotr Grzesik](https://github.com/pgrzesik)) + ## [5.4.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.3.1...v5.4.0) (2022-03-14) ### Features diff --git a/README.md b/README.md index 9563ff51..e0f27ac3 100644 --- a/README.md +++ b/README.md @@ -109,6 +109,8 @@ custom: ## :sparkles::cake::sparkles: Pipenv support +Requires `pipenv` in version `2022-04-08` or higher. + If you include a `Pipfile` and have `pipenv` installed instead of a `requirements.txt` this will use `pipenv lock -r` to generate them. It is fully compatible with all options such as `zip` and `dockerizePip`. If you don't want this plugin to generate it for you, set the following option: diff --git a/package.json b/package.json index 4d9683bf..1aeeb4e5 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "5.4.0", + "version": "6.0.0", "engines": { "node": ">=12.0" }, From 762ca3e4fed9639cb035a4de5a199c29183c411e Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 31 Oct 2022 22:58:31 +0100 Subject: [PATCH 61/90] docs: Update references to default Docker images --- README.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index e0f27ac3..3c1f3339 100644 --- a/README.md +++ b/README.md @@ -33,8 +33,7 @@ If you're on a mac, check out [these notes](#applebeersnake-mac-brew-installed-p ## Cross compiling Compiling non-pure-Python modules or fetching their manylinux wheels is -supported on non-linux OSs via the use of Docker and the -[docker-lambda](https://github.com/lambci/docker-lambda) image. +supported on non-linux OSs via the use of Docker and [official AWS build](https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/serverless-image-repositories.html) images. To enable docker usage, add the following to your `serverless.yml`: ```yaml @@ -489,10 +488,10 @@ For usage of `dockerizePip` on Windows do Step 1 only if running serverless on w ## Native Code Dependencies During Build -Some Python packages require extra OS dependencies to build successfully. To deal with this, replace the default image (`lambci/lambda:python3.6`) with a `Dockerfile` like: +Some Python packages require extra OS dependencies to build successfully. To deal with this, replace the default image with a `Dockerfile` like: ```dockerfile -FROM lambci/lambda:build-python3.6 +FROM public.ecr.aws/sam/build-python3.9 # Install your dependencies RUN yum -y install mysql-devel From 012b55f402c588381733cbe50d2e94acc55a0517 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Wed, 7 Dec 2022 18:00:30 +0100 Subject: [PATCH 62/90] docs: Add contributing and code of conduct --- CODE_OF_CONDUCT.md | 75 ++++++++++++++++++++++++++++++++++++ CONTRIBUTING.md | 95 ++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 170 insertions(+) create mode 100644 CODE_OF_CONDUCT.md create mode 100644 CONTRIBUTING.md diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md new file mode 100644 index 00000000..9d7afa9c --- /dev/null +++ b/CODE_OF_CONDUCT.md @@ -0,0 +1,75 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of experience, +nationality, personal appearance, race, religion, or sexual identity and +orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +- Using welcoming and inclusive language +- Being respectful of differing viewpoints and experiences +- Gracefully accepting constructive criticism +- Focusing on what is best for the community +- Showing empathy towards other community members + +Examples of unacceptable behavior by participants include: + +- The use of sexualized language or imagery and unwelcome sexual attention or + advances +- Trolling, insulting/derogatory comments, and personal or political attacks +- Public or private harassment +- Publishing others' private information, such as a physical or electronic + address, without explicit permission +- Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. + +Project maintainers have the right and responsibility to remove, edit, or +reject comments, commits, code, wiki edits, issues, and other contributions +that are not aligned to this Code of Conduct, or to ban temporarily or +permanently any contributor for other behaviors that they deem inappropriate, +threatening, offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported by contacting our team at **hello@serverless.com**. As an alternative +feel free to reach out to any of us personally. All +complaints will be reviewed and investigated and will result in a response that +is deemed necessary and appropriate to the circumstances. The project team is +obligated to maintain confidentiality with regard to the reporter of an incident. +Further details of specific enforcement policies may be posted separately. + +Project maintainers who do not follow or enforce the Code of Conduct in good +faith may face temporary or permanent repercussions as determined by other +members of the project's leadership. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, +available at [http://contributor-covenant.org/version/1/4][version] + +[homepage]: http://contributor-covenant.org +[version]: http://contributor-covenant.org/version/1/4/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..efcf6d1f --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,95 @@ +# Contributing Guidelines + +We are always looking to promote good contributors to be maintainers and provide them a front-row seat to serverless innovation. + +If you would like to be a maintainer for the [Serverless Framework](https://github.com/serverless/serverless) or any of our plugins, please get started with making code contributions and engaging with open issues/PRs. Also, please reach out to any of [Serverless organization](https://github.com/serverless) members to express your interest. + +We'd love to collaborate closely with amazing developers as we drive the development of this open technology into the future. + +Welcome, and thanks in advance for your help! + +# How to contribute to `serverless-python-requirements` + +## Setup + +It is recommended to use Node v14 or v16 for development. + +Then, to begin development fork repository and run `npm install` in its root folder. + +## Getting started + +A good first step is to search for open [issues](https://github.com/serverless/serverless-python-requirements/issues). Issues are labeled, and some good issues to start with are labeled: [good first issue](https://github.com/serverless/serverless-python-requirements/labels/good%20first%20issue) and [help wanted](https://github.com/serverless/serverless-python-requirements/labels/help%20wanted). + +## When you propose a new feature or bug fix + +Please make sure there is an open issue discussing your contribution before jumping into a Pull Request! +There are just a few situations (listed below) in which it is fine to submit PR without a corresponding issue: + +- Documentation update +- Obvious bug fix +- Maintenance improvement + +In all other cases please check if there's an open an issue discussing the given proposal, if there is not, create an issue respecting all its template remarks. + +In non-trivial cases please propose and let us review an implementation spec (in the corresponding issue) before jumping into implementation. + +Do not submit draft PRs. Submit only finalized work which is ready for merge. If you have any doubts related to implementation work please discuss in the corresponding issue. + +Once a PR has been reviewed and some changes are suggested, please ensure to **re-request review** after all new changes are pushed. It's the best and quietest way to inform maintainers that your work is ready to be checked again. + +## When you want to work on an existing issue + +**Note:** Please write a quick comment in the corresponding issue and ask if the feature is still relevant and that you want to jump into the implementation. + +Check out our [help wanted](https://github.com/serverless/serverless-python-requirements/labels/help%20wanted) or [good first issue](https://github.com/serverless/serverless-python-requirements/labels/good%20first%20issue) labels to find issues we want to move forward with your help. + +We will do our best to respond/review/merge your PR according to priority. We hope that you stay engaged with us during this period to ensure QA. Please note that the PR will be closed if there hasn't been any activity for a long time (~ 30 days) to keep us focused and keep the repo clean. + +## Reviewing Pull Requests + +Another really useful way to contribute is to review other people's Pull Requests. Having feedback from multiple people is helpful and reduces the overall time to make a final decision about the Pull Request. + +## Providing support + +The easiest thing you can do to help us move forward and make an impact on our progress is to simply provide support to other people having difficulties with their projects. + +You can do that by replying to [issues on GitHub](https://github.com/serverless/serverless-python-requirements/issues), chatting with other community members in [our Community Slack](https://www.serverless.com/slack), or [GitHub Discussions](https://github.com/serverless/serverless-python-requirements/discussions). + +--- + +# Code Style + +We aim for a clean, consistent code style. We're using [Prettier](https://prettier.io/) to confirm one code formatting style and [ESlint](https://eslint.org/) helps us to stay away from obvious issues that can be picked via static analysis. + +Ideally, you should have Prettier and ESlint integrated into your code editor, which will help you not think about specific rules and be sure you submit the code that follows guidelines. + +## Verifying prettier formatting + +``` +npm run prettier-check +``` + +## Verifying linting style + +``` +npm run lint +``` + +## Other guidelines + +- Minimize [lodash](https://lodash.com/) usage - resort to it, only if given part of logic cannot be expressed easily with native language constructs +- When writing asynchronous code, ensure to take advantage of [async functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/async_function) and native `Promise` API. Do not rely on [Bluebird](http://bluebirdjs.com) even though still large parts of old code rely on it. We're looking forward to drop this dependency in the near future. + +# Testing + +When proposing a few feature or fixing a bug, it is recommended to also provide sufficient test coverage. All tests live in `./test.js` module. + +# Our Code of Conduct + +Finally, to make sure you have a pleasant experience while being in our welcoming community, please read our [code of conduct](CODE_OF_CONDUCT.md). It outlines our core values and beliefs and will make working together a happier experience. + +Thanks again for being a contributor to the Serverless Community :tada:! + +Cheers, + +The :zap: [Serverless](http://www.serverless.com) Team From 1436c17829848430ebb0b317cc0f208ce0954b85 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maciej=20Wilczy=C5=84ski?= Date: Sun, 8 Jan 2023 21:15:11 +0100 Subject: [PATCH 63/90] Add support for specifying custom dependency groups in Poetry (#746) --- README.md | 10 +++++ index.js | 3 ++ lib/poetry.js | 9 +++++ test.js | 49 +++++++++++++++++++++++++ tests/poetry_packages/_poetryGroups.yml | 8 ++++ tests/poetry_packages/_slimPatterns.yml | 2 + tests/poetry_packages/handler.py | 5 +++ tests/poetry_packages/package.json | 14 +++++++ tests/poetry_packages/pyproject.toml | 19 ++++++++++ tests/poetry_packages/serverless.yml | 34 +++++++++++++++++ 10 files changed, 153 insertions(+) create mode 100644 tests/poetry_packages/_poetryGroups.yml create mode 100644 tests/poetry_packages/_slimPatterns.yml create mode 100644 tests/poetry_packages/handler.py create mode 100644 tests/poetry_packages/package.json create mode 100644 tests/poetry_packages/pyproject.toml create mode 100644 tests/poetry_packages/serverless.yml diff --git a/README.md b/README.md index 3c1f3339..cc93b310 100644 --- a/README.md +++ b/README.md @@ -141,6 +141,16 @@ custom: requirePoetryLockFile: false ``` +If your Poetry configuration includes custom dependency groups, they will not be installed automatically. To include them in the deployment package, use the `poetryWithGroups`, `poetryWithoutGroups` and `poetryOnlyGroups` options which wrap `poetry export`'s `--with`, `--without` and `--only` parameters. + +```yaml +custom: + pythonRequirements: + poetryWithGroups: + - internal_dependencies + - lambda_dependencies +``` + ### Poetry with git dependencies Poetry by default generates the exported requirements.txt file with `-e` and that breaks pip with `-t` parameter diff --git a/index.js b/index.js index 5c568cc4..50a005e1 100644 --- a/index.js +++ b/index.js @@ -58,6 +58,9 @@ class ServerlessPythonRequirements { noDeploy: [], vendor: '', requirePoetryLockFile: false, + poetryWithGroups: [], + poetryWithoutGroups: [], + poetryOnlyGroups: [], }, (this.serverless.service.custom && this.serverless.service.custom.pythonRequirements) || diff --git a/lib/poetry.js b/lib/poetry.js index d324784b..17e3268f 100644 --- a/lib/poetry.js +++ b/lib/poetry.js @@ -57,6 +57,15 @@ async function pyprojectTomlToRequirements(modulePath, pluginInstance) { '-o', 'requirements.txt', '--with-credentials', + ...(options.poetryWithGroups.length + ? [`--with=${options.poetryWithGroups.join(',')}`] + : []), + ...(options.poetryWithoutGroups.length + ? [`--without=${options.poetryWithoutGroups.join(',')}`] + : []), + ...(options.poetryOnlyGroups.length + ? [`--only=${options.poetryOnlyGroups.join(',')}`] + : []), ], { cwd: moduleProjectPath, diff --git a/test.js b/test.js index 27db6884..c7232a6e 100644 --- a/test.js +++ b/test.js @@ -1654,3 +1654,52 @@ test('poetry py3.7 fails packaging if poetry.lock is missing and flag requirePoe ); t.end(); }); + +test('poetry py3.7 packages additional optional packages', async (t) => { + process.chdir('tests/poetry_packages'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + poetryWithGroups: 'poetryWithGroups', + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.true(zipfiles.includes(`bottle.py`), 'bottle is packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('poetry py3.7 skips additional optional packages specified in withoutGroups', async (t) => { + process.chdir('tests/poetry_packages'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + poetryWithGroups: 'poetryWithGroups', + poetryWithoutGroups: 'poetryWithoutGroups', + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true(zipfiles.includes(`flask${sep}__init__.py`), 'flask is packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); + +test('poetry py3.7 only installs optional packages specified in onlyGroups', async (t) => { + process.chdir('tests/poetry_packages'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { + env: { + poetryOnlyGroups: 'poetryOnlyGroups', + }, + }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.false(zipfiles.includes(`flask${sep}__init__.py`), 'flask is NOT packaged'); + t.false(zipfiles.includes(`bottle.py`), 'bottle is NOT packaged'); + t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); + t.end(); +}); diff --git a/tests/poetry_packages/_poetryGroups.yml b/tests/poetry_packages/_poetryGroups.yml new file mode 100644 index 00000000..25abd07a --- /dev/null +++ b/tests/poetry_packages/_poetryGroups.yml @@ -0,0 +1,8 @@ +empty: [] +poetryWithGroups: + - custom1 + - custom2 +poetryWithoutGroups: + - custom1 +poetryOnlyGroups: + - custom2 diff --git a/tests/poetry_packages/_slimPatterns.yml b/tests/poetry_packages/_slimPatterns.yml new file mode 100644 index 00000000..443af9a0 --- /dev/null +++ b/tests/poetry_packages/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - '**/__main__.py' diff --git a/tests/poetry_packages/handler.py b/tests/poetry_packages/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/poetry_packages/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/poetry_packages/package.json b/tests/poetry_packages/package.json new file mode 100644 index 00000000..781a4259 --- /dev/null +++ b/tests/poetry_packages/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + } +} diff --git a/tests/poetry_packages/pyproject.toml b/tests/poetry_packages/pyproject.toml new file mode 100644 index 00000000..7bbe30bf --- /dev/null +++ b/tests/poetry_packages/pyproject.toml @@ -0,0 +1,19 @@ +[tool.poetry] +name = "poetry" +version = "0.1.0" +description = "" +authors = ["Your Name "] + +[tool.poetry.dependencies] +python = "^3.6" +Flask = "^1.0" + +[tool.poetry.group.custom1.dependencies] +bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} + +[tool.poetry.group.custom2.dependencies] +boto3 = "^1.9" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/tests/poetry_packages/serverless.yml b/tests/poetry_packages/serverless.yml new file mode 100644 index 00000000..03652968 --- /dev/null +++ b/tests/poetry_packages/serverless.yml @@ -0,0 +1,34 @@ +service: sls-py-req-test + +provider: + name: aws + runtime: python3.7 + +plugins: + - serverless-python-requirements +custom: + pythonRequirements: + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + requirePoetryLockFile: ${env:requirePoetryLockFile, false} + poetryWithGroups: ${file(./_poetryGroups.yml):${env:poetryWithGroups, "empty"}} + poetryWithoutGroups: ${file(./_poetryGroups.yml):${env:poetryWithoutGroups, "empty"}} + poetryOnlyGroups: ${file(./_poetryGroups.yml):${env:poetryOnlyGroups, "empty"}} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + patterns: + - '!**/*' + - 'handler.py' + +functions: + hello: + handler: handler.hello From 22a1f832ac8051f0963328743f9e768f8e66649e Mon Sep 17 00:00:00 2001 From: Randy Westergren Date: Sun, 8 Jan 2023 15:17:07 -0500 Subject: [PATCH 64/90] fix: Add legacy `pipenv` backward compatability (#742) --- .github/workflows/integrate.yml | 9 ++-- .github/workflows/validate.yml | 9 ++-- README.md | 3 +- lib/pipenv.js | 94 ++++++++++++++++++++++++++------- package.json | 1 + 5 files changed, 88 insertions(+), 28 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 1f979b0a..b0ca0207 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -16,6 +16,7 @@ jobs: strategy: matrix: sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -48,7 +49,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -67,6 +68,7 @@ jobs: strategy: matrix: sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -99,7 +101,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -147,7 +149,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -166,6 +168,7 @@ jobs: strategy: matrix: sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] needs: [windowsNode14, linuxNode14, linuxNode12] steps: - name: Checkout repository diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 31052279..e77f6cce 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -16,6 +16,7 @@ jobs: strategy: matrix: sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -61,7 +62,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -94,6 +95,7 @@ jobs: strategy: matrix: sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -128,7 +130,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -147,6 +149,7 @@ jobs: strategy: matrix: sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -181,7 +184,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} diff --git a/README.md b/README.md index cc93b310..6032725a 100644 --- a/README.md +++ b/README.md @@ -110,8 +110,7 @@ custom: Requires `pipenv` in version `2022-04-08` or higher. -If you include a `Pipfile` and have `pipenv` installed instead of a `requirements.txt` this will use -`pipenv lock -r` to generate them. It is fully compatible with all options such as `zip` and +If you include a `Pipfile` and have `pipenv` installed, this will use `pipenv` to generate requirements instead of a `requirements.txt`. It is fully compatible with all options such as `zip` and `dockerizePip`. If you don't want this plugin to generate it for you, set the following option: ```yaml diff --git a/lib/pipenv.js b/lib/pipenv.js index 11331ee3..c59fe26a 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -2,6 +2,43 @@ const fse = require('fs-extra'); const path = require('path'); const spawn = require('child-process-ext/spawn'); const { EOL } = require('os'); +const semver = require('semver'); + +const LEGACY_PIPENV_VERSION = '2022.8.5'; + +async function getPipenvVersion() { + try { + const res = await spawn('pipenv', ['--version'], { + cwd: this.servicePath, + }); + + const stdoutBuffer = + (res.stdoutBuffer && res.stdoutBuffer.toString().trim()) || ''; + + const version = stdoutBuffer.split(' ')[2]; + + if (semver.valid(version)) { + return version; + } else { + throw new this.serverless.classes.Error( + `Unable to parse pipenv version!`, + 'PYTHON_REQUIREMENTS_PIPENV_VERSION_ERROR' + ); + } + } catch (e) { + const stderrBufferContent = + (e.stderrBuffer && e.stderrBuffer.toString()) || ''; + + if (stderrBufferContent.includes('command not found')) { + throw new this.serverless.classes.Error( + `pipenv not found! Install it according to the pipenv docs.`, + 'PYTHON_REQUIREMENTS_PIPENV_NOT_FOUND' + ); + } else { + throw e; + } + } +} /** * pipenv install @@ -28,31 +65,48 @@ async function pipfileToRequirements() { } try { - try { - await spawn('pipenv', ['lock', '--keep-outdated'], { - cwd: this.servicePath, - }); - } catch (e) { - const stderrBufferContent = - (e.stderrBuffer && e.stderrBuffer.toString()) || ''; + // Get and validate pipenv version + if (this.log) { + this.log.info('Getting pipenv version'); + } else { + this.serverless.cli.log('Getting pipenv version'); + } + + const pipenvVersion = await getPipenvVersion(); + let res; - if (stderrBufferContent.includes('must exist to use')) { - // No previous Pipfile.lock, we will try to generate it here - await spawn('pipenv', ['lock'], { + if (semver.gt(pipenvVersion, LEGACY_PIPENV_VERSION)) { + // Using new pipenv syntax ( >= 2022.8.13) + try { + await spawn('pipenv', ['lock', '--keep-outdated'], { cwd: this.servicePath, }); - } else if (stderrBufferContent.includes('command not found')) { - throw new this.serverless.classes.Error( - `pipenv not found! Install it according to the poetry docs.`, - 'PYTHON_REQUIREMENTS_PIPENV_NOT_FOUND' - ); - } else { - throw e; + } catch (e) { + const stderrBufferContent = + (e.stderrBuffer && e.stderrBuffer.toString()) || ''; + if (stderrBufferContent.includes('must exist to use')) { + // No previous Pipfile.lock, we will try to generate it here + await spawn('pipenv', ['lock'], { + cwd: this.servicePath, + }); + } else { + throw e; + } } + + res = await spawn('pipenv', ['requirements'], { + cwd: this.servicePath, + }); + } else { + // Falling back to legacy pipenv syntax + res = await spawn( + 'pipenv', + ['lock', '--requirements', '--keep-outdated'], + { + cwd: this.servicePath, + } + ); } - const res = await spawn('pipenv', ['requirements'], { - cwd: this.servicePath, - }); fse.ensureDirSync(path.join(this.servicePath, '.serverless')); fse.writeFileSync( diff --git a/package.json b/package.json index 1aeeb4e5..318eec59 100644 --- a/package.json +++ b/package.json @@ -72,6 +72,7 @@ "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", "rimraf": "^3.0.2", + "semver": "^7.3.8", "set-value": "^4.1.0", "sha256-file": "1.0.0", "shell-quote": "^1.7.4" From e8b2e51c265792046bacc3946f22f7bd842c60e6 Mon Sep 17 00:00:00 2001 From: Randy Westergren Date: Wed, 11 Jan 2023 17:31:37 -0500 Subject: [PATCH 65/90] fix: Fix integration test matrix configuration (#755) Matrix values appear to mistakenly added to `tagIfNewVersion` instead of `linuxNode12` --- .github/workflows/integrate.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index b0ca0207..b2b6f77d 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -117,6 +117,10 @@ jobs: linuxNode12: name: '[Linux] Node.js v12: Unit tests' runs-on: ubuntu-latest + strategy: + matrix: + sls-version: [2, 3] + pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -165,10 +169,6 @@ jobs: tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest - strategy: - matrix: - sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] needs: [windowsNode14, linuxNode14, linuxNode12] steps: - name: Checkout repository From ad40278629c63f4d0971637214b4d9bc20dbd288 Mon Sep 17 00:00:00 2001 From: Jeff Gordon <55799997+jfgordon2@users.noreply.github.com> Date: Thu, 17 Aug 2023 16:31:11 -0500 Subject: [PATCH 66/90] fix: Remove outdated Pipenv requirements flag (#780) --- .github/workflows/validate.yml | 10 +++++++--- lib/pipenv.js | 22 ++++++++++++++++------ tests/base/package.json | 2 +- tests/individually/package.json | 2 +- tests/non_build_pyproject/package.json | 2 +- tests/non_poetry_pyproject/package.json | 2 +- tests/pipenv/package.json | 2 +- tests/poetry/package.json | 2 +- tests/poetry_individually/package.json | 2 +- 9 files changed, 30 insertions(+), 16 deletions(-) diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index e77f6cce..227e6056 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -16,7 +16,11 @@ jobs: strategy: matrix: sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] + pipenv-version: ['2022.8.5', '2022.8.13', '2023.7.4', '2023.7.9'] + # pipenv 2202.8.13 marks deprecation of pipenv lock --requirements + # https://github.com/pypa/pipenv/blob/30067b458bd7a429f242736b7fde40c9bd4d4f14/CHANGELOG.rst#2022813-2022-08-13 + # pipenv 2023.7.9 marks deprecation of pipenv lock --keep-outdated + # https://github.com/pypa/pipenv/blob/30067b458bd7a429f242736b7fde40c9bd4d4f14/CHANGELOG.rst#202379-2023-07-09 steps: - name: Checkout repository uses: actions/checkout@v2 @@ -95,7 +99,7 @@ jobs: strategy: matrix: sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] + pipenv-version: ['2022.8.5', '2022.8.13', '2023.7.4', '2023.7.9'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -149,7 +153,7 @@ jobs: strategy: matrix: sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] + pipenv-version: ['2022.8.5', '2022.8.13', '2023.7.4', '2023.7.9'] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/lib/pipenv.js b/lib/pipenv.js index c59fe26a..1099b651 100644 --- a/lib/pipenv.js +++ b/lib/pipenv.js @@ -77,26 +77,36 @@ async function pipfileToRequirements() { if (semver.gt(pipenvVersion, LEGACY_PIPENV_VERSION)) { // Using new pipenv syntax ( >= 2022.8.13) + // Generate requirements from existing lock file. + // See: https://pipenv.pypa.io/en/latest/advanced/#generating-a-requirements-txt try { - await spawn('pipenv', ['lock', '--keep-outdated'], { + res = await spawn('pipenv', ['requirements'], { cwd: this.servicePath, }); } catch (e) { const stderrBufferContent = (e.stderrBuffer && e.stderrBuffer.toString()) || ''; - if (stderrBufferContent.includes('must exist to use')) { + if (stderrBufferContent.includes('FileNotFoundError')) { // No previous Pipfile.lock, we will try to generate it here + if (this.log) { + this.log.warning( + 'No Pipfile.lock found! Review https://pipenv.pypa.io/en/latest/pipfile/ for recommendations.' + ); + } else { + this.serverless.cli.log( + 'WARNING: No Pipfile.lock found! Review https://pipenv.pypa.io/en/latest/pipfile/ for recommendations.' + ); + } await spawn('pipenv', ['lock'], { cwd: this.servicePath, }); + res = await spawn('pipenv', ['requirements'], { + cwd: this.servicePath, + }); } else { throw e; } } - - res = await spawn('pipenv', ['requirements'], { - cwd: this.servicePath, - }); } else { // Falling back to legacy pipenv syntax res = await spawn( diff --git a/tests/base/package.json b/tests/base/package.json index 38630491..781a4259 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } diff --git a/tests/individually/package.json b/tests/individually/package.json index 43ce4eee..781a4259 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.1.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } diff --git a/tests/non_build_pyproject/package.json b/tests/non_build_pyproject/package.json index 38630491..781a4259 100644 --- a/tests/non_build_pyproject/package.json +++ b/tests/non_build_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } diff --git a/tests/non_poetry_pyproject/package.json b/tests/non_poetry_pyproject/package.json index 38630491..781a4259 100644 --- a/tests/non_poetry_pyproject/package.json +++ b/tests/non_poetry_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index 38630491..781a4259 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } diff --git a/tests/poetry/package.json b/tests/poetry/package.json index 38630491..781a4259 100644 --- a/tests/poetry/package.json +++ b/tests/poetry/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } diff --git a/tests/poetry_individually/package.json b/tests/poetry_individually/package.json index 38630491..781a4259 100644 --- a/tests/poetry_individually/package.json +++ b/tests/poetry_individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-5.3.1.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" } } From c1f5ca114de815ca19ad213a79e250b5b81f29b3 Mon Sep 17 00:00:00 2001 From: Jim Kirkbride Date: Thu, 17 Aug 2023 17:36:14 -0400 Subject: [PATCH 67/90] fix: Not crash when runtime is not `python` (#773) Co-authored-by: Marco Kleinlein --- .gitignore | 3 +++ .python-version | 1 + CONTRIBUTING.md | 14 +++++++++++--- index.js | 7 +++++++ test.js | 12 ++++++++++++ 5 files changed, 34 insertions(+), 3 deletions(-) create mode 100644 .python-version diff --git a/.gitignore b/.gitignore index 3707ff1e..64bdbd6a 100644 --- a/.gitignore +++ b/.gitignore @@ -76,3 +76,6 @@ unzip_requirements.py # Project ignores puck/ serverless.yml.bak + +# Generated packaging +*.tgz diff --git a/.python-version b/.python-version new file mode 100644 index 00000000..475ba515 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.7 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index efcf6d1f..4616858b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -12,9 +12,17 @@ Welcome, and thanks in advance for your help! ## Setup -It is recommended to use Node v14 or v16 for development. - -Then, to begin development fork repository and run `npm install` in its root folder. +Pre-Reqs: +* Python 3.7 +* [poetry](https://python-poetry.org/docs/) (if you use multiple versions of Python be sure to install it with python 3.7) +* Perl (used in the tests) +* Node v14 or v16 + +Then, to begin development: +1. fork the repository +2. `npm install -g serverless@` (check the peer dependencies in the root `package.json` file for the version) +3. run `npm install` in its root folder +4. run the tests via `npm run test` ## Getting started diff --git a/index.js b/index.js index 50a005e1..246b121e 100644 --- a/index.js +++ b/index.js @@ -66,6 +66,13 @@ class ServerlessPythonRequirements { this.serverless.service.custom.pythonRequirements) || {} ); + if ( + options.pythonBin === this.serverless.service.provider.runtime && + !options.pythonBin.startsWith('python') + ) { + options.pythonBin = 'python'; + } + if (options.dockerizePip === 'non-linux') { options.dockerizePip = process.platform !== 'linux'; } diff --git a/test.js b/test.js index c7232a6e..673bf631 100644 --- a/test.js +++ b/test.js @@ -1655,6 +1655,18 @@ test('poetry py3.7 fails packaging if poetry.lock is missing and flag requirePoe t.end(); }); +test('works with provider.runtime not being python', async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { runtime: 'nodejs12.x' } }); + t.true( + pathExistsSync('.serverless/sls-py-req-test.zip'), + 'sls-py-req-test is packaged' + ); + t.end(); +}); + test('poetry py3.7 packages additional optional packages', async (t) => { process.chdir('tests/poetry_packages'); const path = npm(['pack', '../..']); From c1992f2497b86ae12dbf4c9a8b582df4cf658d8a Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 29 Oct 2023 22:42:06 +0100 Subject: [PATCH 68/90] ci: Remove node12 from testing matrix (#795) --- .github/workflows/integrate.yml | 54 +-------------------------------- .github/workflows/validate.yml | 54 --------------------------------- 2 files changed, 1 insertion(+), 107 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index b2b6f77d..be4bd2c4 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -114,62 +114,10 @@ jobs: - name: Unit tests run: npm test - linuxNode12: - name: '[Linux] Node.js v12: Unit tests' - runs-on: ubuntu-latest - strategy: - matrix: - sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v12-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: npm-v12-${{ runner.os }}-${{ github.ref }}- - - - name: Set up Python 3.7 - uses: actions/setup-python@v2 - with: - python-version: 3.7 - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 12.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - - - name: Install serverless - run: npm install -g serverless@${{ matrix.sls-version }} - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test - tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest - needs: [windowsNode14, linuxNode14, linuxNode12] + needs: [windowsNode14, linuxNode14] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 227e6056..79548057 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -146,57 +146,3 @@ jobs: npm update --save-dev --no-save - name: Unit tests run: npm test - - linuxNode12: - name: '[Linux] Node.js v12: Unit tests' - runs-on: ubuntu-latest - strategy: - matrix: - sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13', '2023.7.4', '2023.7.9'] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v12-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: | - npm-v12-${{ runner.os }}-${{ github.ref }}- - npm-v12-${{ runner.os }}-refs/heads/master- - - - name: Set up Python 3.7 - uses: actions/setup-python@v2 - with: - python-version: 3.7 - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 12.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - - - name: Install serverless - run: npm install -g serverless@${{ matrix.sls-version }} - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test From 4b93513778929ed0a56b20af8f4b58cbc818e4c0 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 30 Oct 2023 07:55:57 +0100 Subject: [PATCH 69/90] ci: Temp skip of cache-related tests (#796) --- test.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test.js b/test.js index 673bf631..596edcce 100644 --- a/test.js +++ b/test.js @@ -1373,7 +1373,7 @@ test( { skip: !canUseDocker() || process.platform === 'win32' } ); -test('py3.7 uses download cache by default option', async (t) => { +test.skip('py3.7 uses download cache by default option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1386,7 +1386,7 @@ test('py3.7 uses download cache by default option', async (t) => { t.end(); }); -test('py3.7 uses download cache by default', async (t) => { +test.skip('py3.7 uses download cache by default', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1415,7 +1415,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test( +test.skip( 'py3.7 uses download cache with dockerizePip by default option', async (t) => { process.chdir('tests/base'); @@ -1433,7 +1433,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test('py3.7 uses static and download cache', async (t) => { +test.skip('py3.7 uses static and download cache', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); From 2bd9263ffb22cdf1395619cf16cd836660c7afb6 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 30 Oct 2023 08:07:50 +0100 Subject: [PATCH 70/90] ci: Fix test skips (#797) --- test.js | 103 +++++++++++++++++++++++++++++++------------------------- 1 file changed, 57 insertions(+), 46 deletions(-) diff --git a/test.js b/test.js index 596edcce..a4bb992b 100644 --- a/test.js +++ b/test.js @@ -1373,30 +1373,36 @@ test( { skip: !canUseDocker() || process.platform === 'win32' } ); -test.skip('py3.7 uses download cache by default option', async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const cachepath = getUserCachePath(); - t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'cache directory exists' - ); - t.end(); -}); +test('py3.7 uses download cache by default option', + async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'cache directory exists' + ); + t.end(); + }, + { skip: true } +); -test.skip('py3.7 uses download cache by default', async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); - t.true( - pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), - 'cache directory exists' - ); - t.end(); -}); +test('py3.7 uses download cache by default', + async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); + t.true( + pathExistsSync(`.requirements-cache${sep}downloadCacheslspyc${sep}http`), + 'cache directory exists' + ); + t.end(); + }, + { skip: true } +); test( 'py3.7 uses download cache with dockerizePip option', @@ -1412,10 +1418,11 @@ test( ); t.end(); }, - { skip: !canUseDocker() || brokenOn('win32') } + // { skip: !canUseDocker() || brokenOn('win32') } + { skip: true } ); -test.skip( +test( 'py3.7 uses download cache with dockerizePip by default option', async (t) => { process.chdir('tests/base'); @@ -1430,29 +1437,33 @@ test.skip( ); t.end(); }, - { skip: !canUseDocker() || brokenOn('win32') } + // { skip: !canUseDocker() || brokenOn('win32') } + { skip: true } ); -test.skip('py3.7 uses static and download cache', async (t) => { - process.chdir('tests/base'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const cachepath = getUserCachePath(); - const cacheFolderHash = sha256Path('.serverless/requirements.txt'); - const arch = 'x86_64'; - t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), - 'http exists in download-cache' - ); - t.true( - pathExistsSync( - `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` - ), - 'flask exists in static-cache' - ); - t.end(); -}); +test('py3.7 uses static and download cache', + async (t) => { + process.chdir('tests/base'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const cachepath = getUserCachePath(); + const cacheFolderHash = sha256Path('.serverless/requirements.txt'); + const arch = 'x86_64'; + t.true( + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + 'http exists in download-cache' + ); + t.true( + pathExistsSync( + `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}flask` + ), + 'flask exists in static-cache' + ); + t.end(); + }, + { skip: true } +); test( 'py3.7 uses static and download cache with dockerizePip option', From c86176b42f11540d52cf0001e3667a8dc3b1f868 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 30 Oct 2023 23:21:19 +0100 Subject: [PATCH 71/90] ci: Temporarily minimize testing matrix (#799) --- .github/workflows/integrate.yml | 57 ++---------------------------- .github/workflows/validate.yml | 61 +-------------------------------- 2 files changed, 3 insertions(+), 115 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index be4bd2c4..414f7d9b 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -10,65 +10,12 @@ env: FORCE_COLOR: 1 jobs: - windowsNode14: - name: '[Windows] Node.js v14: Unit tests' - runs-on: windows-latest - strategy: - matrix: - sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - - - name: Set up Python 3.7 - uses: actions/setup-python@v2 - with: - python-version: 3.7 - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 14.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - - - name: Install serverless - run: npm install -g serverless@${{ matrix.sls-version }} - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test - linuxNode14: name: '[Linux] Node.js 14: Unit tests' runs-on: ubuntu-latest strategy: matrix: sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13'] steps: - name: Checkout repository uses: actions/checkout@v2 @@ -101,7 +48,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -117,7 +64,7 @@ jobs: tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest - needs: [windowsNode14, linuxNode14] + needs: [linuxNode14] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 79548057..8957e7f7 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -16,11 +16,6 @@ jobs: strategy: matrix: sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13', '2023.7.4', '2023.7.9'] - # pipenv 2202.8.13 marks deprecation of pipenv lock --requirements - # https://github.com/pypa/pipenv/blob/30067b458bd7a429f242736b7fde40c9bd4d4f14/CHANGELOG.rst#2022813-2022-08-13 - # pipenv 2023.7.9 marks deprecation of pipenv lock --keep-outdated - # https://github.com/pypa/pipenv/blob/30067b458bd7a429f242736b7fde40c9bd4d4f14/CHANGELOG.rst#202379-2023-07-09 steps: - name: Checkout repository uses: actions/checkout@v2 @@ -66,7 +61,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry + run: python -m pip install pipenv poetry - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} @@ -92,57 +87,3 @@ jobs: fi - name: Unit tests run: npm test - - windowsNode14: - name: '[Windows] Node.js v14: Unit tests' - runs-on: windows-latest - strategy: - matrix: - sls-version: [2, 3] - pipenv-version: ['2022.8.5', '2022.8.13', '2023.7.4', '2023.7.9'] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: | - npm-v14-${{ runner.os }}-${{ github.ref }}- - npm-v14-${{ runner.os }}-refs/heads/master- - - - name: Set up Python 3.7 - uses: actions/setup-python@v2 - with: - python-version: 3.7 - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 14.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv==${{ matrix.pipenv-version }} poetry - - - name: Install serverless - run: npm install -g serverless@${{ matrix.sls-version }} - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test From bf6a4c34f0c6488b56dbc10427ab98c9cbe208f9 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Mon, 30 Oct 2023 23:23:10 +0100 Subject: [PATCH 72/90] ci: Temporarily disable test run on integrate (#800) --- .github/workflows/integrate.yml | 52 --------------------------------- 1 file changed, 52 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index 414f7d9b..d2da520e 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -10,61 +10,9 @@ env: FORCE_COLOR: 1 jobs: - linuxNode14: - name: '[Linux] Node.js 14: Unit tests' - runs-on: ubuntu-latest - strategy: - matrix: - sls-version: [2, 3] - steps: - - name: Checkout repository - uses: actions/checkout@v2 - - - name: Retrieve dependencies from cache - id: cacheNpm - uses: actions/cache@v2 - with: - path: | - ~/.npm - node_modules - key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} - restore-keys: npm-v14-${{ runner.os }}-${{ github.ref }}- - - - name: Set up Python 3.7 - uses: actions/setup-python@v2 - with: - python-version: 3.7 - - - name: Install Node.js and npm - uses: actions/setup-node@v1 - with: - node-version: 14.x - - - name: Check python version - run: | - python --version - - - name: Install setuptools - run: python -m pip install --force setuptools wheel - - - name: Install pipenv / poetry - run: python -m pip install pipenv poetry - - - name: Install serverless - run: npm install -g serverless@${{ matrix.sls-version }} - - - name: Install dependencies - if: steps.cacheNpm.outputs.cache-hit != 'true' - run: | - npm update --no-save - npm update --save-dev --no-save - - name: Unit tests - run: npm test - tagIfNewVersion: name: Tag if new version runs-on: ubuntu-latest - needs: [linuxNode14] steps: - name: Checkout repository uses: actions/checkout@v2 From 84ee8c5b1cba484f9c856541511d6bcfc5dba299 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 2 Nov 2023 09:49:49 +0100 Subject: [PATCH 73/90] Release v6.0.1 (#793) * chore: Bump dependencies * chore: Release v6.0.1 --- CHANGELOG.md | 12 ++++++++++++ package.json | 10 +++++----- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fb11a43a..f8667134 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,18 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [6.0.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.0...v6.0.1) (2023-10-22) + +### Bug Fixes + +- Add legacy `pipenv` backward compatability ([#742](https://github.com/UnitedIncome/serverless-python-requirements/issues/742)) ([22a1f83](https://github.com/UnitedIncome/serverless-python-requirements/commit/22a1f832ac8051f0963328743f9e768f8e66649e)) ([Randy Westergren](https://github.com/rwestergren)) +- Not crash when runtime is not `python` ([#773](https://github.com/UnitedIncome/serverless-python-requirements/issues/773)) ([c1f5ca1](https://github.com/UnitedIncome/serverless-python-requirements/commit/c1f5ca114de815ca19ad213a79e250b5b81f29b3)) ([Jim Kirkbride](https://github.com/jameskbride)) +- Remove outdated Pipenv requirements flag ([#780](https://github.com/UnitedIncome/serverless-python-requirements/issues/780)) ([ad40278](https://github.com/UnitedIncome/serverless-python-requirements/commit/ad40278629c63f4d0971637214b4d9bc20dbd288)) ([Jeff Gordon](https://github.com/jfgordon2)) + +### Maintenance Improvements + +- Fix integration test matrix configuration ([#755](https://github.com/UnitedIncome/serverless-python-requirements/issues/755)) ([e8b2e51](https://github.com/UnitedIncome/serverless-python-requirements/commit/e8b2e51c265792046bacc3946f22f7bd842c60e6)) ([Randy Westergren](https://github.com/rwestergren)) + ## [6.0.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v5.4.0...v6.0.0) (2022-10-23) ### ⚠ BREAKING CHANGES diff --git a/package.json b/package.json index 318eec59..c420e4d6 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "6.0.0", + "version": "6.0.1", "engines": { "node": ">=12.0" }, @@ -50,7 +50,7 @@ }, "devDependencies": { "cross-spawn": "*", - "eslint": "^8.26.0", + "eslint": "^8.52.0", "git-list-updated": "^1.2.1", "github-release-from-cc-changelog": "^2.3.0", "lodash": "^4.17.21", @@ -65,17 +65,17 @@ "bluebird": "^3.7.2", "child-process-ext": "^2.1.1", "fs-extra": "^10.1.0", - "glob-all": "^3.3.0", + "glob-all": "^3.3.1", "is-wsl": "^2.2.0", "jszip": "^3.10.1", "lodash.get": "^4.4.2", "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", "rimraf": "^3.0.2", - "semver": "^7.3.8", + "semver": "^7.5.4", "set-value": "^4.1.0", "sha256-file": "1.0.0", - "shell-quote": "^1.7.4" + "shell-quote": "^1.8.1" }, "peerDependencies": { "serverless": "^2.32 || 3" From ea2604ed29483ad04dbcf3a04d58961e88c6b3fb Mon Sep 17 00:00:00 2001 From: Daniel Schep Date: Fri, 17 Nov 2023 12:24:22 -0500 Subject: [PATCH 74/90] Update the description of myself (#802) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6032725a..37a9d1df 100644 --- a/README.md +++ b/README.md @@ -567,7 +567,7 @@ package: ## Contributors -- [@dschep](https://github.com/dschep) - Lead developer & original maintainer +- [@dschep](https://github.com/dschep) - Original developer - [@azurelogic](https://github.com/azurelogic) - logging & documentation fixes - [@abetomo](https://github.com/abetomo) - style & linting - [@angstwad](https://github.com/angstwad) - `deploy --function` support From 421e9a6e9a168b741dbd0ce9b6c1d39f7d8f55b8 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Fri, 24 Nov 2023 23:48:09 +0100 Subject: [PATCH 75/90] ci: Pin versions to speed up poetry deps resolution (#806) --- tests/poetry/pyproject.toml | 8 ++++---- tests/poetry_individually/module1/pyproject.toml | 8 ++++---- tests/poetry_packages/pyproject.toml | 6 +++--- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/tests/poetry/pyproject.toml b/tests/poetry/pyproject.toml index b813968a..896b48e7 100644 --- a/tests/poetry/pyproject.toml +++ b/tests/poetry/pyproject.toml @@ -5,13 +5,13 @@ description = "" authors = ["Your Name "] [tool.poetry.dependencies] -python = "^3.6" -Flask = "^1.0" +python = "^3.7" +Flask = "2.0" bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} -boto3 = "^1.9" +boto3 = "1.29.6" [tool.poetry.dev-dependencies] [build-system] -requires = ["poetry>=0.12"] +requires = ["poetry"] build-backend = "poetry.masonry.api" diff --git a/tests/poetry_individually/module1/pyproject.toml b/tests/poetry_individually/module1/pyproject.toml index b813968a..896b48e7 100644 --- a/tests/poetry_individually/module1/pyproject.toml +++ b/tests/poetry_individually/module1/pyproject.toml @@ -5,13 +5,13 @@ description = "" authors = ["Your Name "] [tool.poetry.dependencies] -python = "^3.6" -Flask = "^1.0" +python = "^3.7" +Flask = "2.0" bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} -boto3 = "^1.9" +boto3 = "1.29.6" [tool.poetry.dev-dependencies] [build-system] -requires = ["poetry>=0.12"] +requires = ["poetry"] build-backend = "poetry.masonry.api" diff --git a/tests/poetry_packages/pyproject.toml b/tests/poetry_packages/pyproject.toml index 7bbe30bf..0f9fc705 100644 --- a/tests/poetry_packages/pyproject.toml +++ b/tests/poetry_packages/pyproject.toml @@ -5,14 +5,14 @@ description = "" authors = ["Your Name "] [tool.poetry.dependencies] -python = "^3.6" -Flask = "^1.0" +python = "^3.7" +Flask = "2.0" [tool.poetry.group.custom1.dependencies] bottle = {git = "https://git@github.com/bottlepy/bottle.git", tag = "0.12.16"} [tool.poetry.group.custom2.dependencies] -boto3 = "^1.9" +boto3 = "1.29.6" [build-system] requires = ["poetry-core"] From e33b02da750acfc40b3d341c35edc71a7bea08aa Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sat, 25 Nov 2023 00:11:54 +0100 Subject: [PATCH 76/90] [ci] Update to Node18 (#803) --- .github/workflows/publish.yml | 4 ++-- .github/workflows/validate.yml | 12 ++++++------ 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 6eee5b45..21d7cb71 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -26,12 +26,12 @@ jobs: path: | ~/.npm node_modules - key: npm-v14-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} + key: npm-v18-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} - name: Install Node.js and npm uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 18.x registry-url: https://registry.npmjs.org - name: Publish new version diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 8957e7f7..03eea961 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -10,8 +10,8 @@ env: FORCE_COLOR: 1 jobs: - linuxNode14: - name: '[Linux] Node.js v14: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' + linuxNode18: + name: '[Linux] Node.js v18: Lint, Eventual Commitlint, Eventual Changelog, Formatting & Unit tests' runs-on: ubuntu-latest strategy: matrix: @@ -38,10 +38,10 @@ jobs: path: | ~/.npm node_modules - key: npm-v14-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} + key: npm-v18-${{ runner.os }}-${{ github.ref }}-${{ hashFiles('package.json') }} restore-keys: | - npm-v14-${{ runner.os }}-${{ github.ref }}- - npm-v14-${{ runner.os }}-refs/heads/master- + npm-v18-${{ runner.os }}-${{ github.ref }}- + npm-v18-${{ runner.os }}-refs/heads/master- - name: Set up Python 3.7 uses: actions/setup-python@v2 @@ -51,7 +51,7 @@ jobs: - name: Install Node.js and npm uses: actions/setup-node@v1 with: - node-version: 14.x + node-version: 18.x - name: Check python version run: | From 4bbb80ed0c0150e04696513f37537eb3ab0002a4 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sat, 25 Nov 2023 21:51:04 +0100 Subject: [PATCH 77/90] test: Update to py3.9 (#808) --- .github/workflows/validate.yml | 4 +- example/serverless.yml | 2 +- example_native_deps/serverless.yml | 2 +- test.js | 117 +++++++++++----------- tests/base/serverless.yml | 2 +- tests/individually/serverless.yml | 2 +- tests/non_build_pyproject/serverless.yml | 2 +- tests/non_poetry_pyproject/serverless.yml | 2 +- tests/pipenv/serverless.yml | 2 +- tests/poetry/serverless.yml | 2 +- tests/poetry_individually/serverless.yml | 2 +- tests/poetry_packages/serverless.yml | 2 +- 12 files changed, 72 insertions(+), 69 deletions(-) diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 03eea961..61935c3e 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -43,10 +43,10 @@ jobs: npm-v18-${{ runner.os }}-${{ github.ref }}- npm-v18-${{ runner.os }}-refs/heads/master- - - name: Set up Python 3.7 + - name: Set up Python 3.9 uses: actions/setup-python@v2 with: - python-version: 3.7 + python-version: 3.9 - name: Install Node.js and npm uses: actions/setup-node@v1 diff --git a/example/serverless.yml b/example/serverless.yml index 349cdcb8..e5c4c924 100644 --- a/example/serverless.yml +++ b/example/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/example_native_deps/serverless.yml b/example_native_deps/serverless.yml index 4deed44a..cfbd4913 100644 --- a/example_native_deps/serverless.yml +++ b/example_native_deps/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/test.js b/test.js index a4bb992b..f4afca45 100644 --- a/test.js +++ b/test.js @@ -234,7 +234,7 @@ test('default pythonBin can package flask with default options', async (t) => { t.end(); }); -test('py3.7 packages have the same hash', async (t) => { +test('py3.9 packages have the same hash', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -249,7 +249,7 @@ test('py3.7 packages have the same hash', async (t) => { t.end(); }); -test('py3.7 can package flask with default options', async (t) => { +test('py3.9 can package flask with default options', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -261,7 +261,7 @@ test('py3.7 can package flask with default options', async (t) => { }); test( - 'py3.7 can package flask with hashes', + 'py3.9 can package flask with hashes', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -279,7 +279,7 @@ test( { skip: brokenOn('win32') } ); -test('py3.7 can package flask with nested', async (t) => { +test('py3.9 can package flask with nested', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -295,7 +295,7 @@ test('py3.7 can package flask with nested', async (t) => { t.end(); }); -test('py3.7 can package flask with zip option', async (t) => { +test('py3.9 can package flask with zip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -313,7 +313,7 @@ test('py3.7 can package flask with zip option', async (t) => { t.end(); }); -test('py3.7 can package flask with slim option', async (t) => { +test('py3.9 can package flask with slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -332,7 +332,7 @@ test('py3.7 can package flask with slim option', async (t) => { t.end(); }); -test('py3.7 can package flask with slim & slimPatterns options', async (t) => { +test('py3.9 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -353,7 +353,7 @@ test('py3.7 can package flask with slim & slimPatterns options', async (t) => { t.end(); }); -test("py3.7 doesn't package bottle with noDeploy option", async (t) => { +test("py3.9 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -371,7 +371,7 @@ test("py3.7 doesn't package bottle with noDeploy option", async (t) => { t.end(); }); -test('py3.7 can package boto3 with editable', async (t) => { +test('py3.9 can package boto3 with editable', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -391,7 +391,7 @@ test('py3.7 can package boto3 with editable', async (t) => { }); test( - 'py3.7 can package flask with dockerizePip option', + 'py3.9 can package flask with dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -406,7 +406,7 @@ test( ); test( - 'py3.7 can package flask with slim & dockerizePip option', + 'py3.9 can package flask with slim & dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -430,7 +430,7 @@ test( ); test( - 'py3.7 can package flask with slim & dockerizePip & slimPatterns options', + 'py3.9 can package flask with slim & dockerizePip & slimPatterns options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -455,7 +455,7 @@ test( ); test( - 'py3.7 can package flask with zip & dockerizePip option', + 'py3.9 can package flask with zip & dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -487,7 +487,7 @@ test( ); test( - 'py3.7 can package flask with zip & slim & dockerizePip option', + 'py3.9 can package flask with zip & slim & dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -520,7 +520,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test('pipenv py3.7 can package flask with default options', async (t) => { +test('pipenv py3.9 can package flask with default options', async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -535,7 +535,7 @@ test('pipenv py3.7 can package flask with default options', async (t) => { t.end(); }); -test('pipenv py3.7 can package flask with slim option', async (t) => { +test('pipenv py3.9 can package flask with slim option', async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -554,7 +554,7 @@ test('pipenv py3.7 can package flask with slim option', async (t) => { t.end(); }); -test('pipenv py3.7 can package flask with slim & slimPatterns options', async (t) => { +test('pipenv py3.9 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -576,7 +576,7 @@ test('pipenv py3.7 can package flask with slim & slimPatterns options', async (t t.end(); }); -test('pipenv py3.7 can package flask with zip option', async (t) => { +test('pipenv py3.9 can package flask with zip option', async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -594,7 +594,7 @@ test('pipenv py3.7 can package flask with zip option', async (t) => { t.end(); }); -test("pipenv py3.7 doesn't package bottle with noDeploy option", async (t) => { +test("pipenv py3.9 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/pipenv'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -633,7 +633,7 @@ test('non poetry pyproject.toml without requirements.txt packages handler only', t.end(); }); -test('poetry py3.7 can package flask with default options', async (t) => { +test('poetry py3.9 can package flask with default options', async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -645,7 +645,7 @@ test('poetry py3.7 can package flask with default options', async (t) => { t.end(); }); -test('poetry py3.7 can package flask with slim option', async (t) => { +test('poetry py3.9 can package flask with slim option', async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -664,7 +664,7 @@ test('poetry py3.7 can package flask with slim option', async (t) => { t.end(); }); -test('poetry py3.7 can package flask with slim & slimPatterns options', async (t) => { +test('poetry py3.9 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -686,7 +686,7 @@ test('poetry py3.7 can package flask with slim & slimPatterns options', async (t t.end(); }); -test('poetry py3.7 can package flask with zip option', async (t) => { +test('poetry py3.9 can package flask with zip option', async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -704,7 +704,7 @@ test('poetry py3.7 can package flask with zip option', async (t) => { t.end(); }); -test("poetry py3.7 doesn't package bottle with noDeploy option", async (t) => { +test("poetry py3.9 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/poetry'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -722,7 +722,7 @@ test("poetry py3.7 doesn't package bottle with noDeploy option", async (t) => { t.end(); }); -test('py3.7 can package flask with zip option and no explicit include', async (t) => { +test('py3.9 can package flask with zip option and no explicit include', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -742,7 +742,7 @@ test('py3.7 can package flask with zip option and no explicit include', async (t t.end(); }); -test('py3.7 can package lambda-decorators using vendor option', async (t) => { +test('py3.9 can package lambda-decorators using vendor option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -805,7 +805,7 @@ test( { skip: process.platform === 'win32' } ); -test('py3.7 can package flask in a project with a space in it', async (t) => { +test('py3.9 can package flask in a project with a space in it', async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); const path = npm(['pack', '../..']); @@ -818,7 +818,7 @@ test('py3.7 can package flask in a project with a space in it', async (t) => { }); test( - 'py3.7 can package flask in a project with a space in it with docker', + 'py3.9 can package flask in a project with a space in it with docker', async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); @@ -833,7 +833,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test('py3.7 supports custom file name with fileName option', async (t) => { +test('py3.9 supports custom file name with fileName option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); writeFileSync('puck', 'requests'); @@ -849,7 +849,7 @@ test('py3.7 supports custom file name with fileName option', async (t) => { t.end(); }); -test("py3.7 doesn't package bottle with zip option", async (t) => { +test("py3.9 doesn't package bottle with zip option", async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -885,7 +885,7 @@ test("py3.7 doesn't package bottle with zip option", async (t) => { t.end(); }); -test('py3.7 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { +test('py3.9 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -908,7 +908,7 @@ test('py3.7 can package flask with slim, slimPatterns & slimPatternsAppendDefaul }); test( - 'py3.7 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', + 'py3.9 can package flask with slim & dockerizePip & slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); @@ -937,7 +937,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test('pipenv py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { +test('pipenv py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -960,7 +960,7 @@ test('pipenv py3.7 can package flask with slim & slimPatterns & slimPatternsAppe t.end(); }); -test('poetry py3.7 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { +test('poetry py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); const path = npm(['pack', '../..']); @@ -983,7 +983,7 @@ test('poetry py3.7 can package flask with slim & slimPatterns & slimPatternsAppe t.end(); }); -test('poetry py3.7 can package flask with package individually option', async (t) => { +test('poetry py3.9 can package flask with package individually option', async (t) => { process.chdir('tests/poetry_individually'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -998,7 +998,7 @@ test('poetry py3.7 can package flask with package individually option', async (t t.end(); }); -test('py3.7 can package flask with package individually option', async (t) => { +test('py3.9 can package flask with package individually option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1080,7 +1080,7 @@ test('py3.7 can package flask with package individually option', async (t) => { t.end(); }); -test('py3.7 can package flask with package individually & slim option', async (t) => { +test('py3.9 can package flask with package individually & slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1162,7 +1162,7 @@ test('py3.7 can package flask with package individually & slim option', async (t t.end(); }); -test('py3.7 can package only requirements of module', async (t) => { +test('py3.9 can package only requirements of module', async (t) => { process.chdir('tests/individually'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1218,7 +1218,7 @@ test('py3.7 can package only requirements of module', async (t) => { t.end(); }); -test('py3.7 can package lambda-decorators using vendor and invidiually option', async (t) => { +test('py3.9 can package lambda-decorators using vendor and invidiually option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1373,7 +1373,8 @@ test( { skip: !canUseDocker() || process.platform === 'win32' } ); -test('py3.7 uses download cache by default option', +test( + 'py3.9 uses download cache by default option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1381,7 +1382,7 @@ test('py3.7 uses download cache by default option', sls(['package'], { env: {} }); const cachepath = getUserCachePath(); t.true( - pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), + pathExistsSync(`${cachepath}${sep}downloadCacheslspyc${sep}http`), 'cache directory exists' ); t.end(); @@ -1389,7 +1390,8 @@ test('py3.7 uses download cache by default option', { skip: true } ); -test('py3.7 uses download cache by default', +test( + 'py3.9 uses download cache by default', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1405,7 +1407,7 @@ test('py3.7 uses download cache by default', ); test( - 'py3.7 uses download cache with dockerizePip option', + 'py3.9 uses download cache with dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1423,7 +1425,7 @@ test( ); test( - 'py3.7 uses download cache with dockerizePip by default option', + 'py3.9 uses download cache with dockerizePip by default option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1441,7 +1443,8 @@ test( { skip: true } ); -test('py3.7 uses static and download cache', +test( + 'py3.9 uses static and download cache', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1466,7 +1469,7 @@ test('py3.7 uses static and download cache', ); test( - 'py3.7 uses static and download cache with dockerizePip option', + 'py3.9 uses static and download cache with dockerizePip option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1490,7 +1493,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test('py3.7 uses static cache', async (t) => { +test('py3.9 uses static cache', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1511,7 +1514,7 @@ test('py3.7 uses static cache', async (t) => { '.completed_requirements exists in static-cache' ); - // py3.7 checking that static cache actually pulls from cache (by poisoning it) + // py3.9 checking that static cache actually pulls from cache (by poisoning it) writeFileSync( `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' @@ -1526,7 +1529,7 @@ test('py3.7 uses static cache', async (t) => { t.end(); }); -test('py3.7 uses static cache with cacheLocation option', async (t) => { +test('py3.9 uses static cache with cacheLocation option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1550,7 +1553,7 @@ test('py3.7 uses static cache with cacheLocation option', async (t) => { }); test( - 'py3.7 uses static cache with dockerizePip & slim option', + 'py3.9 uses static cache with dockerizePip & slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1572,7 +1575,7 @@ test( '.completed_requirements exists in static-cache' ); - // py3.7 checking that static cache actually pulls from cache (by poisoning it) + // py3.9 checking that static cache actually pulls from cache (by poisoning it) writeFileSync( `${cachepath}${sep}${cacheFolderHash}_${arch}_slspyc${sep}injected_file_is_bad_form`, 'injected new file into static cache folder' @@ -1595,7 +1598,7 @@ test( ); test( - 'py3.7 uses download cache with dockerizePip & slim option', + 'py3.9 uses download cache with dockerizePip & slim option', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); @@ -1620,7 +1623,7 @@ test( { skip: !canUseDocker() || brokenOn('win32') } ); -test('py3.7 can ignore functions defined with `image`', async (t) => { +test('py3.9 can ignore functions defined with `image`', async (t) => { process.chdir('tests/base'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1646,7 +1649,7 @@ test('py3.7 can ignore functions defined with `image`', async (t) => { t.end(); }); -test('poetry py3.7 fails packaging if poetry.lock is missing and flag requirePoetryLockFile is set to true', async (t) => { +test('poetry py3.9 fails packaging if poetry.lock is missing and flag requirePoetryLockFile is set to true', async (t) => { copySync('tests/poetry', 'tests/base with a space'); process.chdir('tests/base with a space'); removeSync('poetry.lock'); @@ -1678,7 +1681,7 @@ test('works with provider.runtime not being python', async (t) => { t.end(); }); -test('poetry py3.7 packages additional optional packages', async (t) => { +test('poetry py3.9 packages additional optional packages', async (t) => { process.chdir('tests/poetry_packages'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1694,7 +1697,7 @@ test('poetry py3.7 packages additional optional packages', async (t) => { t.end(); }); -test('poetry py3.7 skips additional optional packages specified in withoutGroups', async (t) => { +test('poetry py3.9 skips additional optional packages specified in withoutGroups', async (t) => { process.chdir('tests/poetry_packages'); const path = npm(['pack', '../..']); npm(['i', path]); @@ -1711,7 +1714,7 @@ test('poetry py3.7 skips additional optional packages specified in withoutGroups t.end(); }); -test('poetry py3.7 only installs optional packages specified in onlyGroups', async (t) => { +test('poetry py3.9 only installs optional packages specified in onlyGroups', async (t) => { process.chdir('tests/poetry_packages'); const path = npm(['pack', '../..']); npm(['i', path]); diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index ef48e901..a82187ff 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: ${env:runtime, 'python3.7'} + runtime: ${env:runtime, 'python3.9'} plugins: - serverless-python-requirements diff --git a/tests/individually/serverless.yml b/tests/individually/serverless.yml index d73d613a..6409532b 100644 --- a/tests/individually/serverless.yml +++ b/tests/individually/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test-indiv provider: name: aws - runtime: python3.7 + runtime: python3.9 package: individually: true diff --git a/tests/non_build_pyproject/serverless.yml b/tests/non_build_pyproject/serverless.yml index b0436e61..d1bbaee6 100644 --- a/tests/non_build_pyproject/serverless.yml +++ b/tests/non_build_pyproject/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/tests/non_poetry_pyproject/serverless.yml b/tests/non_poetry_pyproject/serverless.yml index 2b16790c..7338b10b 100644 --- a/tests/non_poetry_pyproject/serverless.yml +++ b/tests/non_poetry_pyproject/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/tests/pipenv/serverless.yml b/tests/pipenv/serverless.yml index 315f6741..2b471526 100644 --- a/tests/pipenv/serverless.yml +++ b/tests/pipenv/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/tests/poetry/serverless.yml b/tests/poetry/serverless.yml index 2d032acd..d10c4997 100644 --- a/tests/poetry/serverless.yml +++ b/tests/poetry/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/tests/poetry_individually/serverless.yml b/tests/poetry_individually/serverless.yml index 527a2846..86dbb547 100644 --- a/tests/poetry_individually/serverless.yml +++ b/tests/poetry_individually/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements diff --git a/tests/poetry_packages/serverless.yml b/tests/poetry_packages/serverless.yml index 03652968..c6972ede 100644 --- a/tests/poetry_packages/serverless.yml +++ b/tests/poetry_packages/serverless.yml @@ -2,7 +2,7 @@ service: sls-py-req-test provider: name: aws - runtime: python3.7 + runtime: python3.9 plugins: - serverless-python-requirements From 16c0e68b850d62eb1ce127b9c3886857ca955574 Mon Sep 17 00:00:00 2001 From: Carl Walsh Date: Mon, 1 Jan 2024 10:59:03 -0800 Subject: [PATCH 78/90] docs: Describe `cleanCache` in README (#794) --- README.md | 25 ++++++++++++++++++++----- 1 file changed, 20 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 37a9d1df..91172bf9 100644 --- a/README.md +++ b/README.md @@ -450,12 +450,27 @@ functions: vendor: ./hello-vendor # The option is also available at the function level ``` -## Manual invocations +## Manual invocation -The `.requirements` and `requirements.zip`(if using zip support) files are left -behind to speed things up on subsequent deploys. To clean them up, run -`sls requirements clean`. You can also create them (and `unzip_requirements` if -using zip support) manually with `sls requirements install`. +The `.requirements` and `requirements.zip` (if using zip support) files are left +behind to speed things up on subsequent deploys. To clean them up, run: + +```plaintext +sls requirements clean +``` + +You can also create them (and `unzip_requirements` if +using zip support) manually with: + +```plaintext +sls requirements install +``` + +The pip download/static cache is outside the serverless folder, and should be manually cleaned when i.e. changing python versions: + +```plaintext +sls requirements cleanCache +``` ## Invalidate requirements caches on package From 1b0faaeb6aadd2bc4b1b53526e35298a98d00aca Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 14 Jan 2024 22:46:13 +0100 Subject: [PATCH 79/90] feat: Support Scaleway provider (#812) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Andy Méry --- README.md | 25 +++++++++++++++++ index.js | 5 +++- lib/inject.js | 23 +++++++++++++-- test.js | 17 ++++++++++++ tests/scaleway_provider/_slimPatterns.yml | 2 ++ tests/scaleway_provider/handler.py | 5 ++++ tests/scaleway_provider/package.json | 15 ++++++++++ tests/scaleway_provider/requirements.txt | 3 ++ tests/scaleway_provider/serverless.yml | 34 +++++++++++++++++++++++ 9 files changed, 126 insertions(+), 3 deletions(-) create mode 100644 tests/scaleway_provider/_slimPatterns.yml create mode 100644 tests/scaleway_provider/handler.py create mode 100644 tests/scaleway_provider/package.json create mode 100644 tests/scaleway_provider/requirements.txt create mode 100644 tests/scaleway_provider/serverless.yml diff --git a/README.md b/README.md index 91172bf9..d9127adb 100644 --- a/README.md +++ b/README.md @@ -580,6 +580,31 @@ package: - '**' ``` +## Custom Provider Support + +### Scaleway + +This plugin is compatible with the [Scaleway Serverless Framework Plugin](https://github.com/scaleway/serverless-scaleway-functions) to package dependencies for Python functions deployed on [Scaleway](https://www.scaleway.com/en/serverless-functions/). To use it, add the following to your `serverless.yml`: + +```yaml +provider: + name: scaleway + runtime: python311 + +plugins: + - serverless-python-requirements + - serverless-scaleway-functions +``` + +To handle native dependencies, it's recommended to use the Docker builder with the image provided by Scaleway: + +```yaml +custom: + pythonRequirements: + # Can use any Python version supported by Scaleway + dockerImage: rg.fr-par.scw.cloud/scwfunctionsruntimes-public/python-dep:3.11 +``` + ## Contributors - [@dschep](https://github.com/dschep) - Original developer diff --git a/index.js b/index.js index 246b121e..25cc34cd 100644 --- a/index.js +++ b/index.js @@ -72,7 +72,10 @@ class ServerlessPythonRequirements { ) { options.pythonBin = 'python'; } - + if (/python3[0-9]+/.test(options.pythonBin)) { + // "google" and "scaleway" providers' runtimes uses python3XX + options.pythonBin = options.pythonBin.replace(/3([0-9]+)/, '3.$1'); + } if (options.dockerizePip === 'non-linux') { options.dockerizePip = process.platform !== 'linux'; } diff --git a/lib/inject.js b/lib/inject.js index ea20e58d..12267376 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -13,10 +13,16 @@ BbPromise.promisifyAll(fse); * Inject requirements into packaged application. * @param {string} requirementsPath requirements folder path * @param {string} packagePath target package path + * @param {string} injectionRelativePath installation directory in target package * @param {Object} options our options object * @return {Promise} the JSZip object constructed. */ -function injectRequirements(requirementsPath, packagePath, options) { +function injectRequirements( + requirementsPath, + packagePath, + injectionRelativePath, + options +) { const noDeploy = new Set(options.noDeploy || []); return fse @@ -29,7 +35,13 @@ function injectRequirements(requirementsPath, packagePath, options) { dot: true, }) ) - .map((file) => [file, path.relative(requirementsPath, file)]) + .map((file) => [ + file, + path.join( + injectionRelativePath, + path.relative(requirementsPath, file) + ), + ]) .filter( ([file, relativeFile]) => !file.endsWith('/') && @@ -101,6 +113,11 @@ async function injectAllRequirements(funcArtifact) { this.serverless.cli.log('Injecting required Python packages to package...'); } + let injectionRelativePath = '.'; + if (this.serverless.service.provider.name == 'scaleway') { + injectionRelativePath = 'package'; + } + try { if (this.serverless.service.package.individually) { await BbPromise.resolve(this.targetFuncs) @@ -138,6 +155,7 @@ async function injectAllRequirements(funcArtifact) { : injectRequirements( path.join('.serverless', func.module, 'requirements'), func.package.artifact, + injectionRelativePath, this.options ); }); @@ -145,6 +163,7 @@ async function injectAllRequirements(funcArtifact) { await injectRequirements( path.join('.serverless', 'requirements'), this.serverless.service.package.artifact || funcArtifact, + injectionRelativePath, this.options ); } diff --git a/test.js b/test.js index f4afca45..fad21273 100644 --- a/test.js +++ b/test.js @@ -1729,3 +1729,20 @@ test('poetry py3.9 only installs optional packages specified in onlyGroups', asy t.true(zipfiles.includes(`boto3${sep}__init__.py`), 'boto3 is packaged'); t.end(); }); + +test('py3.7 injects dependencies into `package` folder when using scaleway provider', async (t) => { + process.chdir('tests/scaleway_provider'); + const path = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes(`package${sep}flask${sep}__init__.py`), + 'flask is packaged' + ); + t.true( + zipfiles.includes(`package${sep}boto3${sep}__init__.py`), + 'boto3 is packaged' + ); + t.end(); +}); diff --git a/tests/scaleway_provider/_slimPatterns.yml b/tests/scaleway_provider/_slimPatterns.yml new file mode 100644 index 00000000..443af9a0 --- /dev/null +++ b/tests/scaleway_provider/_slimPatterns.yml @@ -0,0 +1,2 @@ +slimPatterns: + - '**/__main__.py' diff --git a/tests/scaleway_provider/handler.py b/tests/scaleway_provider/handler.py new file mode 100644 index 00000000..5e2e67ff --- /dev/null +++ b/tests/scaleway_provider/handler.py @@ -0,0 +1,5 @@ +import requests + + +def hello(event, context): + return requests.get('https://httpbin.org/get').json() diff --git a/tests/scaleway_provider/package.json b/tests/scaleway_provider/package.json new file mode 100644 index 00000000..d54b88e0 --- /dev/null +++ b/tests/scaleway_provider/package.json @@ -0,0 +1,15 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz", + "serverless-scaleway-functions": "^0.4.8" + } +} diff --git a/tests/scaleway_provider/requirements.txt b/tests/scaleway_provider/requirements.txt new file mode 100644 index 00000000..23bfb7a6 --- /dev/null +++ b/tests/scaleway_provider/requirements.txt @@ -0,0 +1,3 @@ +flask==0.12.5 +bottle +boto3 diff --git a/tests/scaleway_provider/serverless.yml b/tests/scaleway_provider/serverless.yml new file mode 100644 index 00000000..5d827bdf --- /dev/null +++ b/tests/scaleway_provider/serverless.yml @@ -0,0 +1,34 @@ +service: sls-py-req-test + +configValidationMode: off + +provider: + name: scaleway + runtime: python39 + +plugins: + - serverless-python-requirements + - serverless-scaleway-functions + +custom: + pythonRequirements: + zip: ${env:zip, self:custom.defaults.zip} + slim: ${env:slim, self:custom.defaults.slim} + slimPatterns: ${file(./slimPatterns.yml):slimPatterns, self:custom.defaults.slimPatterns} + slimPatternsAppendDefaults: ${env:slimPatternsAppendDefaults, self:custom.defaults.slimPatternsAppendDefaults} + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + defaults: + zip: false + slimPatterns: false + slimPatternsAppendDefaults: true + slim: false + dockerizePip: false + +package: + patterns: + - '!**/*' + - 'handler.py' + +functions: + hello: + handler: handler.hello From 787b4791306e9a3ded5f0177c304cfbce081c119 Mon Sep 17 00:00:00 2001 From: Justin Lyons Date: Sat, 10 Feb 2024 04:26:35 -0500 Subject: [PATCH 80/90] feat: Improved pip failure logging (#813) Co-authored-by: Justin Lyons --- lib/pip.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/pip.js b/lib/pip.js index 149c0285..060ce829 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -423,8 +423,8 @@ async function installRequirements(targetFolder, pluginInstance) { } if (log) { - log.info(`Stdout: ${e.stdoutBuffer}`); - log.info(`Stderr: ${e.stderrBuffer}`); + log.error(`Stdout: ${e.stdoutBuffer}`); + log.error(`Stderr: ${e.stderrBuffer}`); } else { serverless.cli.log(`Stdout: ${e.stdoutBuffer}`); serverless.cli.log(`Stderr: ${e.stderrBuffer}`); From 27b70f4d6a7e43fd0e9711bbb56752fee2762901 Mon Sep 17 00:00:00 2001 From: Stijn IJzermans Date: Sat, 10 Feb 2024 10:27:37 +0100 Subject: [PATCH 81/90] fix: Ensure proper support for mixed runtimes and architectures (#815) * feat: Use function runtime & arch for docker * docs: Update readme for python3.9 * feat: Do not zip req for non-py functions * ci: Bump internal package version / python version * fix: Rename mixed test name to be more descriptive --------- Co-authored-by: Stijn IJzermans --- .python-version | 2 +- CONTRIBUTING.md | 10 ++- index.js | 19 +++-- lib/pip.js | 13 +-- lib/zip.js | 5 ++ test.js | 83 +++++++++++++++++++ tests/base/package.json | 2 +- tests/individually/package.json | 2 +- .../module1/handler1.ts | 3 + .../module2/handler2.py | 6 ++ .../module2/requirements.txt | 1 + tests/individually_mixed_runtime/package.json | 14 ++++ .../requirements-common.txt | 1 + .../individually_mixed_runtime/serverless.yml | 39 +++++++++ tests/non_build_pyproject/package.json | 2 +- tests/non_poetry_pyproject/package.json | 2 +- tests/pipenv/package.json | 2 +- tests/poetry/package.json | 2 +- tests/poetry_individually/package.json | 2 +- tests/poetry_packages/package.json | 2 +- 20 files changed, 187 insertions(+), 25 deletions(-) create mode 100644 tests/individually_mixed_runtime/module1/handler1.ts create mode 100644 tests/individually_mixed_runtime/module2/handler2.py create mode 100644 tests/individually_mixed_runtime/module2/requirements.txt create mode 100644 tests/individually_mixed_runtime/package.json create mode 100644 tests/individually_mixed_runtime/requirements-common.txt create mode 100644 tests/individually_mixed_runtime/serverless.yml diff --git a/.python-version b/.python-version index 475ba515..bd28b9c5 100644 --- a/.python-version +++ b/.python-version @@ -1 +1 @@ -3.7 +3.9 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4616858b..900a425b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -13,12 +13,14 @@ Welcome, and thanks in advance for your help! ## Setup Pre-Reqs: -* Python 3.7 -* [poetry](https://python-poetry.org/docs/) (if you use multiple versions of Python be sure to install it with python 3.7) -* Perl (used in the tests) -* Node v14 or v16 + +- Python 3.9 +- [poetry](https://python-poetry.org/docs/) (if you use multiple versions of Python be sure to install it with python 3.9) +- Perl (used in the tests) +- Node v14 or v16 Then, to begin development: + 1. fork the repository 2. `npm install -g serverless@` (check the peer dependencies in the root `package.json` file for the version) 3. run `npm install` in its root folder diff --git a/index.js b/index.js index 25cc34cd..81e50981 100644 --- a/index.js +++ b/index.js @@ -106,13 +106,8 @@ class ServerlessPythonRequirements { throw new Error( 'Python Requirements: you can provide a dockerImage or a dockerFile option, not both.' ); - } else if (!options.dockerFile) { - // If no dockerFile is provided, use default image - const architecture = - this.serverless.service.provider.architecture || 'x86_64'; - const defaultImage = `public.ecr.aws/sam/build-${this.serverless.service.provider.runtime}:latest-${architecture}`; - options.dockerImage = options.dockerImage || defaultImage; } + if (options.layer) { // If layer was set as a boolean, set it to an empty object to use the layer defaults. if (options.layer === true) { @@ -188,6 +183,18 @@ class ServerlessPythonRequirements { this.commands.requirements.type = 'container'; } + this.dockerImageForFunction = (funcOptions) => { + const runtime = + funcOptions.runtime || this.serverless.service.provider.runtime; + + const architecture = + funcOptions.architecture || + this.serverless.service.provider.architecture || + 'x86_64'; + const defaultImage = `public.ecr.aws/sam/build-${runtime}:latest-${architecture}`; + return this.options.dockerImage || defaultImage; + }; + const isFunctionRuntimePython = (args) => { // If functionObj.runtime is undefined, python. if (!args[1].functionObj || !args[1].functionObj.runtime) { diff --git a/lib/pip.js b/lib/pip.js index 060ce829..16a802b0 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -125,12 +125,13 @@ async function pipAcceptsSystem(pythonBin, pluginInstance) { /** * Install requirements described from requirements in the targetFolder into that same targetFolder * @param {string} targetFolder - * @param {Object} serverless - * @param {Object} options + * @param {Object} pluginInstance + * @param {Object} funcOptions * @return {undefined} */ -async function installRequirements(targetFolder, pluginInstance) { - const { options, serverless, log, progress } = pluginInstance; +async function installRequirements(targetFolder, pluginInstance, funcOptions) { + const { options, serverless, log, progress, dockerImageForFunction } = + pluginInstance; const targetRequirementsTxt = path.join(targetFolder, 'requirements.txt'); let installProgress; @@ -253,7 +254,7 @@ async function installRequirements(targetFolder, pluginInstance) { buildDockerImageProgress && buildDockerImageProgress.remove(); } } else { - dockerImage = options.dockerImage; + dockerImage = dockerImageForFunction(funcOptions); } if (log) { log.info(`Docker Image: ${dockerImage}`); @@ -691,7 +692,7 @@ async function installRequirementsIfNeeded( fse.copySync(slsReqsTxt, path.join(workingReqsFolder, 'requirements.txt')); // Then install our requirements from this folder - await installRequirements(workingReqsFolder, pluginInstance); + await installRequirements(workingReqsFolder, pluginInstance, funcOptions); // Copy vendor libraries to requirements folder if (options.vendor) { diff --git a/lib/zip.js b/lib/zip.js index 4b652f98..3c21bbbf 100644 --- a/lib/zip.js +++ b/lib/zip.js @@ -114,6 +114,11 @@ function packRequirements() { if (this.options.zip) { if (this.serverless.service.package.individually) { return BbPromise.resolve(this.targetFuncs) + .filter((func) => { + return ( + func.runtime || this.serverless.service.provider.runtime + ).match(/^python.*/); + }) .map((f) => { if (!get(f, 'module')) { set(f, ['module'], '.'); diff --git a/test.js b/test.js index fad21273..b97f3fdc 100644 --- a/test.js +++ b/test.js @@ -1373,6 +1373,89 @@ test( { skip: !canUseDocker() || process.platform === 'win32' } ); +test( + 'py3.9 can package flask running in docker with module runtime & architecture of function', + async (t) => { + process.chdir('tests/individually_mixed_runtime'); + const path = npm(['pack', '../..']); + npm(['i', path]); + + sls(['package'], { + env: { dockerizePip: 'true' }, + }); + + const zipfiles_hello2 = await listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip' + ); + t.true( + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.true( + zipfiles_hello2.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2' + ); + }, + { + skip: !canUseDocker() || process.platform === 'win32', + } +); + +test( + 'py3.9 can package flask succesfully when using mixed architecture, docker and zipping', + async (t) => { + process.chdir('tests/individually_mixed_runtime'); + const path = npm(['pack', '../..']); + + npm(['i', path]); + sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } }); + + const zipfiles_hello = await listZipFiles('.serverless/hello1.zip'); + t.true( + zipfiles_hello.includes(`module1${sep}handler1.ts`), + 'handler1.ts is packaged in module dir for hello1' + ); + t.false( + zipfiles_hello.includes('handler2.py'), + 'handler2.py is NOT packaged at root level in function hello1' + ); + t.false( + zipfiles_hello.includes(`flask${sep}__init__.py`), + 'flask is NOT packaged in function hello1' + ); + + const zipfiles_hello2 = await listZipFiles( + '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip' + ); + const zippedReqs = await listRequirementsZipFiles( + '.serverless/module2-sls-py-req-test-indiv-mixed-runtime-dev-hello2.zip' + ); + t.true( + zipfiles_hello2.includes('handler2.py'), + 'handler2.py is packaged at root level in function hello2' + ); + t.false( + zipfiles_hello2.includes(`module1${sep}handler1.ts`), + 'handler1.ts is NOT included at module1 level in hello2' + ); + t.false( + zipfiles_hello2.includes(`pyaml${sep}__init__.py`), + 'pyaml is NOT packaged in function hello2' + ); + t.false( + zipfiles_hello2.includes(`boto3${sep}__init__.py`), + 'boto3 is NOT included in zipfile' + ); + t.true( + zippedReqs.includes(`flask${sep}__init__.py`), + 'flask is packaged in function hello2 in requirements.zip' + ); + + t.end(); + }, + { skip: !canUseDocker() || process.platform === 'win32' } +); + test( 'py3.9 uses download cache by default option', async (t) => { diff --git a/tests/base/package.json b/tests/base/package.json index 781a4259..b07744c9 100644 --- a/tests/base/package.json +++ b/tests/base/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/individually/package.json b/tests/individually/package.json index 781a4259..b07744c9 100644 --- a/tests/individually/package.json +++ b/tests/individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/individually_mixed_runtime/module1/handler1.ts b/tests/individually_mixed_runtime/module1/handler1.ts new file mode 100644 index 00000000..b8062f8b --- /dev/null +++ b/tests/individually_mixed_runtime/module1/handler1.ts @@ -0,0 +1,3 @@ +function hello() { + return "hello" +} diff --git a/tests/individually_mixed_runtime/module2/handler2.py b/tests/individually_mixed_runtime/module2/handler2.py new file mode 100644 index 00000000..d9f5c465 --- /dev/null +++ b/tests/individually_mixed_runtime/module2/handler2.py @@ -0,0 +1,6 @@ +import flask + +def hello(event, context): + return { + 'status': 200, + } diff --git a/tests/individually_mixed_runtime/module2/requirements.txt b/tests/individually_mixed_runtime/module2/requirements.txt new file mode 100644 index 00000000..c09d0264 --- /dev/null +++ b/tests/individually_mixed_runtime/module2/requirements.txt @@ -0,0 +1 @@ +flask==2.0.3 diff --git a/tests/individually_mixed_runtime/package.json b/tests/individually_mixed_runtime/package.json new file mode 100644 index 00000000..b07744c9 --- /dev/null +++ b/tests/individually_mixed_runtime/package.json @@ -0,0 +1,14 @@ +{ + "name": "example", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "author": "", + "license": "ISC", + "dependencies": { + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" + } +} diff --git a/tests/individually_mixed_runtime/requirements-common.txt b/tests/individually_mixed_runtime/requirements-common.txt new file mode 100644 index 00000000..30ddf823 --- /dev/null +++ b/tests/individually_mixed_runtime/requirements-common.txt @@ -0,0 +1 @@ +boto3 diff --git a/tests/individually_mixed_runtime/serverless.yml b/tests/individually_mixed_runtime/serverless.yml new file mode 100644 index 00000000..7c602239 --- /dev/null +++ b/tests/individually_mixed_runtime/serverless.yml @@ -0,0 +1,39 @@ +service: sls-py-req-test-indiv-mixed-runtime + +provider: + name: aws + runtime: nodejs18.x + architecture: arm64 + +package: + individually: true + +custom: + pythonRequirements: + dockerizePip: ${env:dockerizePip, self:custom.defaults.dockerizePip} + zip: ${env:zip, self:custom.defaults.zip} + defaults: + dockerizePip: false + zip: false + +functions: + hello1: + handler: handler1.hello + architecture: x86_64 + package: + patterns: + - '!**' + - 'module1/**' + + hello2: + handler: handler2.hello + module: module2 + runtime: python3.9 + architecture: x86_64 + package: + patterns: + - '!**' + - 'module2/**' + +plugins: + - serverless-python-requirements diff --git a/tests/non_build_pyproject/package.json b/tests/non_build_pyproject/package.json index 781a4259..b07744c9 100644 --- a/tests/non_build_pyproject/package.json +++ b/tests/non_build_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/non_poetry_pyproject/package.json b/tests/non_poetry_pyproject/package.json index 781a4259..b07744c9 100644 --- a/tests/non_poetry_pyproject/package.json +++ b/tests/non_poetry_pyproject/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/pipenv/package.json b/tests/pipenv/package.json index 781a4259..b07744c9 100644 --- a/tests/pipenv/package.json +++ b/tests/pipenv/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/poetry/package.json b/tests/poetry/package.json index 781a4259..b07744c9 100644 --- a/tests/poetry/package.json +++ b/tests/poetry/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/poetry_individually/package.json b/tests/poetry_individually/package.json index 781a4259..b07744c9 100644 --- a/tests/poetry_individually/package.json +++ b/tests/poetry_individually/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } diff --git a/tests/poetry_packages/package.json b/tests/poetry_packages/package.json index 781a4259..b07744c9 100644 --- a/tests/poetry_packages/package.json +++ b/tests/poetry_packages/package.json @@ -9,6 +9,6 @@ "author": "", "license": "ISC", "dependencies": { - "serverless-python-requirements": "file:serverless-python-requirements-6.0.0.tgz" + "serverless-python-requirements": "file:serverless-python-requirements-6.0.1.tgz" } } From 549aba00ff320e6ddabb6f0be44999e7922fc616 Mon Sep 17 00:00:00 2001 From: Jackson Borneman <11304426+jax-b@users.noreply.github.com> Date: Sat, 24 Feb 2024 18:41:31 -0500 Subject: [PATCH 82/90] test: Bump node version in test configs --- tests/base/serverless.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/base/serverless.yml b/tests/base/serverless.yml index a82187ff..87423210 100644 --- a/tests/base/serverless.yml +++ b/tests/base/serverless.yml @@ -47,7 +47,7 @@ functions: handler: handler.hello hello3: handler: handler.hello - runtime: nodejs8.10 + runtime: nodejs14.x hello4: handler: fn2_handler.hello module: fn2 From fa9ac03ea7ffa3b583aaf69bd8e615ec112cabcc Mon Sep 17 00:00:00 2001 From: hayden Date: Mon, 26 Feb 2024 00:42:20 +0900 Subject: [PATCH 83/90] Add docker rootless feature flag and its implementation for supporting docke rootless environment (#818) --- index.js | 1 + lib/pip.js | 33 +++++++++++++++++++++------------ 2 files changed, 22 insertions(+), 12 deletions(-) diff --git a/index.js b/index.js index 81e50981..ca8b191f 100644 --- a/index.js +++ b/index.js @@ -50,6 +50,7 @@ class ServerlessPythonRequirements { dockerBuildCmdExtraArgs: [], dockerRunCmdExtraArgs: [], dockerExtraFiles: [], + dockerRootless: false, useStaticCache: true, useDownloadCache: true, cacheLocation: false, diff --git a/lib/pip.js b/lib/pip.js index 16a802b0..40140d36 100644 --- a/lib/pip.js +++ b/lib/pip.js @@ -328,12 +328,17 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) { } // Install requirements with pip // Set the ownership of the current folder to user - pipCmds.push([ - 'chown', - '-R', - `${process.getuid()}:${process.getgid()}`, - '/var/task', - ]); + // If you use docker-rootless, you don't need to set the ownership + if (options.dockerRootless !== true) { + pipCmds.push([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + '/var/task', + ]); + } else { + pipCmds.push(['chown', '-R', '0:0', '/var/task']); + } } else { // Use same user so --cache-dir works dockerCmd.push('-u', await getDockerUid(bindPath, pluginInstance)); @@ -346,12 +351,16 @@ async function installRequirements(targetFolder, pluginInstance, funcOptions) { if (process.platform === 'linux') { if (options.useDownloadCache) { // Set the ownership of the download cache dir back to user - pipCmds.push([ - 'chown', - '-R', - `${process.getuid()}:${process.getgid()}`, - dockerDownloadCacheDir, - ]); + if (options.dockerRootless !== true) { + pipCmds.push([ + 'chown', + '-R', + `${process.getuid()}:${process.getgid()}`, + dockerDownloadCacheDir, + ]); + } else { + pipCmds.push(['chown', '-R', '0:0', dockerDownloadCacheDir]); + } } } From ca617bb3bc503db14f68d4e41cf532f798b40704 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 28 Mar 2024 00:05:29 +0100 Subject: [PATCH 84/90] chore: Bump dependencies --- package.json | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index c420e4d6..6ec63fa3 100644 --- a/package.json +++ b/package.json @@ -50,7 +50,7 @@ }, "devDependencies": { "cross-spawn": "*", - "eslint": "^8.52.0", + "eslint": "^8.57.0", "git-list-updated": "^1.2.1", "github-release-from-cc-changelog": "^2.3.0", "lodash": "^4.17.21", @@ -72,7 +72,7 @@ "lodash.uniqby": "^4.7.0", "lodash.values": "^4.3.0", "rimraf": "^3.0.2", - "semver": "^7.5.4", + "semver": "^7.6.0", "set-value": "^4.1.0", "sha256-file": "1.0.0", "shell-quote": "^1.8.1" From 840d28dd319340f61aa9627cb4ca68af643780eb Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Thu, 28 Mar 2024 00:09:48 +0100 Subject: [PATCH 85/90] chore: Release v6.1.0 --- CHANGELOG.md | 11 +++++++++++ package.json | 2 +- 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f8667134..c63f9d06 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,17 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +## [6.1.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.1...v6.1.0) (2024-03-27) + +### Features + +- Support Scaleway provider ([#812](https://github.com/UnitedIncome/serverless-python-requirements/issues/812)) ([1b0faae](https://github.com/UnitedIncome/serverless-python-requirements/commit/1b0faaeb6aadd2bc4b1b53526e35298a98d00aca)) ([Andy Méry](https://github.com/cyclimse)) +- Improved pip failure logging ([#813](https://github.com/UnitedIncome/serverless-python-requirements/issues/813)) ([787b479](https://github.com/UnitedIncome/serverless-python-requirements/commit/787b4791306e9a3ded5f0177c304cfbce081c119)) ([Justin Lyons](https://github.com/babyhuey)) + +### Bug Fixes + +- Ensure proper support for mixed runtimes and architectures ([#815](https://github.com/UnitedIncome/serverless-python-requirements/issues/815)) ([27b70f4](https://github.com/UnitedIncome/serverless-python-requirements/commit/27b70f4d6a7e43fd0e9711bbb56752fee2762901)) ([Stijn IJzermans](https://github.com/stijzermans)) + ### [6.0.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.0...v6.0.1) (2023-10-22) ### Bug Fixes diff --git a/package.json b/package.json index 6ec63fa3..3612c2cb 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "6.0.1", + "version": "6.1.0", "engines": { "node": ">=12.0" }, From 6e806c09686e57af93904af1d46b3b20aa62a202 Mon Sep 17 00:00:00 2001 From: Piotr Grzesik Date: Sun, 7 Apr 2024 15:28:52 +0200 Subject: [PATCH 86/90] ci: Update actions to latest versions --- .github/workflows/integrate.yml | 2 +- .github/workflows/publish.yml | 6 +++--- .github/workflows/validate.yml | 8 ++++---- 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/.github/workflows/integrate.yml b/.github/workflows/integrate.yml index d2da520e..01fb27a3 100644 --- a/.github/workflows/integrate.yml +++ b/.github/workflows/integrate.yml @@ -15,7 +15,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: # Ensure to have complete history of commits pushed with given push operation # It's loose and imperfect assumption that no more than 30 commits will be pushed at once diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 21d7cb71..0e3dc867 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -17,11 +17,11 @@ jobs: GITHUB_TOKEN: ${{ secrets.USER_GITHUB_TOKEN }} steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Retrieve node_modules from cache id: cacheNodeModules - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: | ~/.npm @@ -29,7 +29,7 @@ jobs: key: npm-v18-${{ runner.os }}-refs/heads/master-${{ hashFiles('package.json') }} - name: Install Node.js and npm - uses: actions/setup-node@v1 + uses: actions/setup-node@v4 with: node-version: 18.x registry-url: https://registry.npmjs.org diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 61935c3e..8ab2b366 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -18,7 +18,7 @@ jobs: sls-version: [2, 3] steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: # For commitlint purpose ensure to have complete list of PR commits # It's loose and imperfect assumption that PR has no more than 30 commits @@ -33,7 +33,7 @@ jobs: - name: Retrieve dependencies from cache id: cacheNpm - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: | ~/.npm @@ -44,12 +44,12 @@ jobs: npm-v18-${{ runner.os }}-refs/heads/master- - name: Set up Python 3.9 - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: 3.9 - name: Install Node.js and npm - uses: actions/setup-node@v1 + uses: actions/setup-node@v4 with: node-version: 18.x From d2e492f9a25b383f5acc6a926a45858c4f23ad5e Mon Sep 17 00:00:00 2001 From: Austen Date: Fri, 31 May 2024 12:00:39 -0700 Subject: [PATCH 87/90] chore: Update serverless dependency --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 3612c2cb..7a3b14a5 100644 --- a/package.json +++ b/package.json @@ -78,7 +78,7 @@ "shell-quote": "^1.8.1" }, "peerDependencies": { - "serverless": "^2.32 || 3" + "serverless": ">=2.32" }, "lint-staged": { "*.js": [ From 57f7c6b2189cb32272377117eb22ae6a4a4b4c7a Mon Sep 17 00:00:00 2001 From: Max Marze Date: Tue, 13 Aug 2024 13:28:33 -0400 Subject: [PATCH 88/90] chore: Release 6.1.1 (#840) * chore: Release 6.1.1 * fix: Add license key for tests * test: Update tests to support sls v4 --------- Co-authored-by: Tomasz Czubocha --- .github/workflows/validate.yml | 3 + CHANGELOG.md | 2 + package.json | 2 +- test.js | 179 +++++++++++++++++---------------- 4 files changed, 99 insertions(+), 87 deletions(-) diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 8ab2b366..38f69696 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -87,3 +87,6 @@ jobs: fi - name: Unit tests run: npm test + env: + SERVERLESS_PLATFORM_STAGE: dev + SERVERLESS_LICENSE_KEY: ${{ secrets.SERVERLESS_LICENSE_KEY }} diff --git a/CHANGELOG.md b/CHANGELOG.md index c63f9d06..3771a1e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,8 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [6.1.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.1.0...v6.1.1) (2024-06-03) + ## [6.1.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.1...v6.1.0) (2024-03-27) ### Features diff --git a/package.json b/package.json index 7a3b14a5..07466e23 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "6.1.0", + "version": "6.1.1", "engines": { "node": ">=12.0" }, diff --git a/test.js b/test.js index b97f3fdc..488b500f 100644 --- a/test.js +++ b/test.js @@ -44,7 +44,10 @@ const mkCommand = `${quote([cmd, ...args])} failed with status code ${status}` ); } - return stdout && stdout.toString().trim(); + return { + stdout: stdout && stdout.toString().trim(), + stderr: stderr && stderr.toString().trim(), + }; }; const sls = mkCommand('sls'); @@ -201,9 +204,9 @@ test( 'dockerPrivateKey option correctly resolves docker command', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - const stdout = sls(['package'], { + const { stderr } = sls(['package'], { noThrow: true, env: { dockerizePip: true, @@ -213,7 +216,7 @@ test( }, }); t.true( - stdout.includes( + stderr.includes( `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z` ), 'docker command properly resolved' @@ -225,7 +228,7 @@ test( test('default pythonBin can package flask with default options', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -236,7 +239,7 @@ test('default pythonBin can package flask with default options', async (t) => { test('py3.9 packages have the same hash', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const fileHash = sha256File('.serverless/sls-py-req-test.zip'); @@ -251,7 +254,7 @@ test('py3.9 packages have the same hash', async (t) => { test('py3.9 can package flask with default options', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -264,7 +267,7 @@ test( 'py3.9 can package flask with hashes', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -281,7 +284,7 @@ test( test('py3.9 can package flask with nested', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -297,7 +300,7 @@ test('py3.9 can package flask with nested', async (t) => { test('py3.9 can package flask with zip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -315,7 +318,7 @@ test('py3.9 can package flask with zip option', async (t) => { test('py3.9 can package flask with slim option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -335,7 +338,7 @@ test('py3.9 can package flask with slim option', async (t) => { test('py3.9 can package flask with slim & slimPatterns options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -355,7 +358,7 @@ test('py3.9 can package flask with slim & slimPatterns options', async (t) => { test("py3.9 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); perl([ '-p', @@ -373,7 +376,7 @@ test("py3.9 doesn't package bottle with noDeploy option", async (t) => { test('py3.9 can package boto3 with editable', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -394,7 +397,7 @@ test( 'py3.9 can package flask with dockerizePip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -409,7 +412,7 @@ test( 'py3.9 can package flask with slim & dockerizePip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -434,7 +437,7 @@ test( async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -458,7 +461,7 @@ test( 'py3.9 can package flask with zip & dockerizePip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -490,7 +493,7 @@ test( 'py3.9 can package flask with zip & slim & dockerizePip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', zip: 'true', slim: 'true' }, @@ -522,7 +525,7 @@ test( test('pipenv py3.9 can package flask with default options', async (t) => { process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -537,7 +540,7 @@ test('pipenv py3.9 can package flask with default options', async (t) => { test('pipenv py3.9 can package flask with slim option', async (t) => { process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -558,7 +561,7 @@ test('pipenv py3.9 can package flask with slim & slimPatterns options', async (t process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -578,7 +581,7 @@ test('pipenv py3.9 can package flask with slim & slimPatterns options', async (t test('pipenv py3.9 can package flask with zip option', async (t) => { process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -596,7 +599,7 @@ test('pipenv py3.9 can package flask with zip option', async (t) => { test("pipenv py3.9 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/pipenv'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); perl([ '-p', @@ -614,7 +617,7 @@ test("pipenv py3.9 doesn't package bottle with noDeploy option", async (t) => { test('non build pyproject.toml uses requirements.txt', async (t) => { process.chdir('tests/non_build_pyproject'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -625,7 +628,7 @@ test('non build pyproject.toml uses requirements.txt', async (t) => { test('non poetry pyproject.toml without requirements.txt packages handler only', async (t) => { process.chdir('tests/non_poetry_pyproject'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -635,7 +638,7 @@ test('non poetry pyproject.toml without requirements.txt packages handler only', test('poetry py3.9 can package flask with default options', async (t) => { process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -647,7 +650,7 @@ test('poetry py3.9 can package flask with default options', async (t) => { test('poetry py3.9 can package flask with slim option', async (t) => { process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -668,7 +671,7 @@ test('poetry py3.9 can package flask with slim & slimPatterns options', async (t process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -688,7 +691,7 @@ test('poetry py3.9 can package flask with slim & slimPatterns options', async (t test('poetry py3.9 can package flask with zip option', async (t) => { process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { zip: 'true', pythonBin: getPythonBin(3) } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -706,7 +709,7 @@ test('poetry py3.9 can package flask with zip option', async (t) => { test("poetry py3.9 doesn't package bottle with noDeploy option", async (t) => { process.chdir('tests/poetry'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); perl([ '-p', @@ -724,7 +727,7 @@ test("poetry py3.9 doesn't package bottle with noDeploy option", async (t) => { test('py3.9 can package flask with zip option and no explicit include', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); perl(['-p', '-i.bak', '-e', 's/include://', 'serverless.yml']); perl(['-p', '-i.bak', '-e', 's/^.*handler.py.*$//', 'serverless.yml']); @@ -744,7 +747,7 @@ test('py3.9 can package flask with zip option and no explicit include', async (t test('py3.9 can package lambda-decorators using vendor option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { vendor: './vendor' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -761,7 +764,7 @@ test( "Don't nuke execute perms", async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); const perm = '755'; npm(['i', path]); @@ -808,7 +811,7 @@ test( test('py3.9 can package flask in a project with a space in it', async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -822,7 +825,7 @@ test( async (t) => { copySync('tests/base', 'tests/base with a space'); process.chdir('tests/base with a space'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true' } }); const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); @@ -835,7 +838,7 @@ test( test('py3.9 supports custom file name with fileName option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); writeFileSync('puck', 'requests'); npm(['i', path]); sls(['package'], { env: { fileName: 'puck' } }); @@ -851,7 +854,7 @@ test('py3.9 supports custom file name with fileName option', async (t) => { test("py3.9 doesn't package bottle with zip option", async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); perl([ '-p', @@ -888,7 +891,7 @@ test("py3.9 doesn't package bottle with zip option", async (t) => { test('py3.9 can package flask with slim, slimPatterns & slimPatternsAppendDefaults=false options', async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { slim: 'true', slimPatternsAppendDefaults: 'false' }, @@ -912,7 +915,7 @@ test( async (t) => { process.chdir('tests/base'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -940,7 +943,7 @@ test( test('pipenv py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { process.chdir('tests/pipenv'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { @@ -963,7 +966,7 @@ test('pipenv py3.9 can package flask with slim & slimPatterns & slimPatternsAppe test('poetry py3.9 can package flask with slim & slimPatterns & slimPatternsAppendDefaults=false option', async (t) => { process.chdir('tests/poetry'); copySync('_slimPatterns.yml', 'slimPatterns.yml'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { @@ -985,7 +988,7 @@ test('poetry py3.9 can package flask with slim & slimPatterns & slimPatternsAppe test('poetry py3.9 can package flask with package individually option', async (t) => { process.chdir('tests/poetry_individually'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); @@ -1000,7 +1003,7 @@ test('poetry py3.9 can package flask with package individually option', async (t test('py3.9 can package flask with package individually option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { individually: 'true' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); @@ -1082,7 +1085,7 @@ test('py3.9 can package flask with package individually option', async (t) => { test('py3.9 can package flask with package individually & slim option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { individually: 'true', slim: 'true' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); @@ -1164,7 +1167,7 @@ test('py3.9 can package flask with package individually & slim option', async (t test('py3.9 can package only requirements of module', async (t) => { process.chdir('tests/individually'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const zipfiles_hello = await listZipFiles( @@ -1220,7 +1223,7 @@ test('py3.9 can package only requirements of module', async (t) => { test('py3.9 can package lambda-decorators using vendor and invidiually option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { individually: 'true', vendor: './vendor' } }); const zipfiles_hello = await listZipFiles('.serverless/hello.zip'); @@ -1299,7 +1302,7 @@ test( "Don't nuke execute perms when using individually", async (t) => { process.chdir('tests/individually'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); const perm = '755'; writeFileSync(`module1${sep}foobar`, ''); chmodSync(`module1${sep}foobar`, perm); @@ -1338,7 +1341,7 @@ test( "Don't nuke execute perms when using individually w/docker", async (t) => { process.chdir('tests/individually'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); const perm = '755'; writeFileSync(`module1${sep}foobar`, '', { mode: perm }); chmodSync(`module1${sep}foobar`, perm); @@ -1377,7 +1380,7 @@ test( 'py3.9 can package flask running in docker with module runtime & architecture of function', async (t) => { process.chdir('tests/individually_mixed_runtime'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { @@ -1405,7 +1408,7 @@ test( 'py3.9 can package flask succesfully when using mixed architecture, docker and zipping', async (t) => { process.chdir('tests/individually_mixed_runtime'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', zip: 'true' } }); @@ -1460,7 +1463,7 @@ test( 'py3.9 uses download cache by default option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const cachepath = getUserCachePath(); @@ -1477,7 +1480,7 @@ test( 'py3.9 uses download cache by default', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { cacheLocation: '.requirements-cache' } }); t.true( @@ -1493,7 +1496,7 @@ test( 'py3.9 uses download cache with dockerizePip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true' } }); const cachepath = getUserCachePath(); @@ -1511,7 +1514,7 @@ test( 'py3.9 uses download cache with dockerizePip by default option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', cacheLocation: '.requirements-cache' }, @@ -1530,7 +1533,7 @@ test( 'py3.9 uses static and download cache', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const cachepath = getUserCachePath(); @@ -1555,7 +1558,7 @@ test( 'py3.9 uses static and download cache with dockerizePip option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true' } }); const cachepath = getUserCachePath(); @@ -1578,7 +1581,7 @@ test( test('py3.9 uses static cache', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: {} }); const cachepath = getUserCachePath(); @@ -1614,7 +1617,7 @@ test('py3.9 uses static cache', async (t) => { test('py3.9 uses static cache with cacheLocation option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); const cachepath = '.requirements-cache'; sls(['package'], { env: { cacheLocation: cachepath } }); @@ -1639,7 +1642,7 @@ test( 'py3.9 uses static cache with dockerizePip & slim option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const cachepath = getUserCachePath(); @@ -1684,7 +1687,7 @@ test( 'py3.9 uses download cache with dockerizePip & slim option', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { dockerizePip: 'true', slim: 'true' } }); const cachepath = getUserCachePath(); @@ -1708,7 +1711,7 @@ test( test('py3.9 can ignore functions defined with `image`', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { individually: 'true' } }); t.true(pathExistsSync('.serverless/hello.zip'), 'function hello is packaged'); @@ -1737,14 +1740,14 @@ test('poetry py3.9 fails packaging if poetry.lock is missing and flag requirePoe process.chdir('tests/base with a space'); removeSync('poetry.lock'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - const stdout = sls(['package'], { + const { stderr } = sls(['package'], { env: { requirePoetryLockFile: 'true', slim: 'true' }, noThrow: true, }); t.true( - stdout.includes( + stderr.includes( 'poetry.lock file not found - set requirePoetryLockFile to false to disable this error' ), 'flag works and error is properly reported' @@ -1754,9 +1757,9 @@ test('poetry py3.9 fails packaging if poetry.lock is missing and flag requirePoe test('works with provider.runtime not being python', async (t) => { process.chdir('tests/base'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - sls(['package'], { env: { runtime: 'nodejs12.x' } }); + sls(['package'], { env: { runtime: 'nodejs20.x' } }); t.true( pathExistsSync('.serverless/sls-py-req-test.zip'), 'sls-py-req-test is packaged' @@ -1766,7 +1769,7 @@ test('works with provider.runtime not being python', async (t) => { test('poetry py3.9 packages additional optional packages', async (t) => { process.chdir('tests/poetry_packages'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -1782,7 +1785,7 @@ test('poetry py3.9 packages additional optional packages', async (t) => { test('poetry py3.9 skips additional optional packages specified in withoutGroups', async (t) => { process.chdir('tests/poetry_packages'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -1799,7 +1802,7 @@ test('poetry py3.9 skips additional optional packages specified in withoutGroups test('poetry py3.9 only installs optional packages specified in onlyGroups', async (t) => { process.chdir('tests/poetry_packages'); - const path = npm(['pack', '../..']); + const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); sls(['package'], { env: { @@ -1813,19 +1816,23 @@ test('poetry py3.9 only installs optional packages specified in onlyGroups', asy t.end(); }); -test('py3.7 injects dependencies into `package` folder when using scaleway provider', async (t) => { - process.chdir('tests/scaleway_provider'); - const path = npm(['pack', '../..']); - npm(['i', path]); - sls(['package'], { env: {} }); - const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); - t.true( - zipfiles.includes(`package${sep}flask${sep}__init__.py`), - 'flask is packaged' - ); - t.true( - zipfiles.includes(`package${sep}boto3${sep}__init__.py`), - 'boto3 is packaged' - ); - t.end(); -}); +test( + 'py3.7 injects dependencies into `package` folder when using scaleway provider', + async (t) => { + process.chdir('tests/scaleway_provider'); + const { stdout: path } = npm(['pack', '../..']); + npm(['i', path]); + sls(['package'], { env: {} }); + const zipfiles = await listZipFiles('.serverless/sls-py-req-test.zip'); + t.true( + zipfiles.includes(`package${sep}flask${sep}__init__.py`), + 'flask is packaged' + ); + t.true( + zipfiles.includes(`package${sep}boto3${sep}__init__.py`), + 'boto3 is packaged' + ); + t.end(); + }, + { skip: true } // sls v4 supports aws provider only +); From bceb7371dd64d59829377fe6fd16e17f631d0251 Mon Sep 17 00:00:00 2001 From: Tomasz Czubocha Date: Tue, 11 Feb 2025 18:56:23 +0100 Subject: [PATCH 89/90] fix: Use absolute paths to ensure compatibility with v4 Compose (#854) --- .github/workflows/validate.yml | 2 +- index.js | 2 +- lib/inject.js | 9 +++++++-- package.json | 3 --- test.js | 8 ++++---- 5 files changed, 13 insertions(+), 11 deletions(-) diff --git a/.github/workflows/validate.yml b/.github/workflows/validate.yml index 38f69696..23e2d67f 100644 --- a/.github/workflows/validate.yml +++ b/.github/workflows/validate.yml @@ -61,7 +61,7 @@ jobs: run: python -m pip install --force setuptools wheel - name: Install pipenv / poetry - run: python -m pip install pipenv poetry + run: python -m pip install pipenv poetry && poetry self add poetry-plugin-export - name: Install serverless run: npm install -g serverless@${{ matrix.sls-version }} diff --git a/index.js b/index.js index ca8b191f..44906956 100644 --- a/index.js +++ b/index.js @@ -74,7 +74,7 @@ class ServerlessPythonRequirements { options.pythonBin = 'python'; } if (/python3[0-9]+/.test(options.pythonBin)) { - // "google" and "scaleway" providers' runtimes uses python3XX + // "google" and "scaleway" providers' runtimes use python3XX options.pythonBin = options.pythonBin.replace(/3([0-9]+)/, '3.$1'); } if (options.dockerizePip === 'non-linux') { diff --git a/lib/inject.js b/lib/inject.js index 12267376..f4acde9d 100644 --- a/lib/inject.js +++ b/lib/inject.js @@ -153,7 +153,12 @@ async function injectAllRequirements(funcArtifact) { return this.options.zip ? func : injectRequirements( - path.join('.serverless', func.module, 'requirements'), + path.join( + this.serverless.serviceDir, + '.serverless', + func.module, + 'requirements' + ), func.package.artifact, injectionRelativePath, this.options @@ -161,7 +166,7 @@ async function injectAllRequirements(funcArtifact) { }); } else if (!this.options.zip) { await injectRequirements( - path.join('.serverless', 'requirements'), + path.join(this.serverless.serviceDir, '.serverless', 'requirements'), this.serverless.service.package.artifact || funcArtifact, injectionRelativePath, this.options diff --git a/package.json b/package.json index 07466e23..f5754f24 100644 --- a/package.json +++ b/package.json @@ -77,9 +77,6 @@ "sha256-file": "1.0.0", "shell-quote": "^1.8.1" }, - "peerDependencies": { - "serverless": ">=2.32" - }, "lint-staged": { "*.js": [ "eslint" diff --git a/test.js b/test.js index 488b500f..1967330b 100644 --- a/test.js +++ b/test.js @@ -206,7 +206,7 @@ test( process.chdir('tests/base'); const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - const { stderr } = sls(['package'], { + const { stdout } = sls(['package'], { noThrow: true, env: { dockerizePip: true, @@ -216,7 +216,7 @@ test( }, }); t.true( - stderr.includes( + stdout.includes( `-v ${__dirname}${sep}tests${sep}base${sep}custom_ssh:/root/.ssh/custom_ssh:z` ), 'docker command properly resolved' @@ -1742,12 +1742,12 @@ test('poetry py3.9 fails packaging if poetry.lock is missing and flag requirePoe const { stdout: path } = npm(['pack', '../..']); npm(['i', path]); - const { stderr } = sls(['package'], { + const { stdout } = sls(['package'], { env: { requirePoetryLockFile: 'true', slim: 'true' }, noThrow: true, }); t.true( - stderr.includes( + stdout.includes( 'poetry.lock file not found - set requirePoetryLockFile to false to disable this error' ), 'flag works and error is properly reported' From b58ce60097717c369eea9f1e807a1374c7e4937f Mon Sep 17 00:00:00 2001 From: Tomasz Czubocha Date: Tue, 11 Feb 2025 19:40:17 +0100 Subject: [PATCH 90/90] chore: Release v6.1.2 (#855) --- CHANGELOG.md | 6 ++++++ package.json | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3771a1e0..42026cdc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [6.1.2](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.1.1...v6.1.2) (2025-02-11) + +### Bug Fixes + +- Use absolute paths to ensure compatibility with v4 Compose ([#854](https://github.com/UnitedIncome/serverless-python-requirements/issues/854)) ([bceb737](https://github.com/UnitedIncome/serverless-python-requirements/commit/bceb7371dd64d59829377fe6fd16e17f631d0251)) + ### [6.1.1](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.1.0...v6.1.1) (2024-06-03) ## [6.1.0](https://github.com/UnitedIncome/serverless-python-requirements/compare/v6.0.1...v6.1.0) (2024-03-27) diff --git a/package.json b/package.json index f5754f24..55ab4989 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "serverless-python-requirements", - "version": "6.1.1", + "version": "6.1.2", "engines": { "node": ">=12.0" },