This rule is currently used to implement pip_import
,
@@ -60,20 +99,21 @@ See pip_import
for proper usage.
This rule imports a .whl
file as a py_library
:
whl_library(
name = "foo",
- whl = ":my-whl-file",
- requirements = "name of pip_import rule",
+ whls = [":my-whl-file", ...],
+ requirements = "",
)
-This rule defines a @foo//:pkg
py_library
target.
+This rule defines a @foo//:pkg
py_library
target and
+a @foo//:whl
filegroup
target.
Args:
- whl: The path to the .whl file (the name is expected to follow [this
+ whls: The paths to the .whl files (the names are expected to follow [this
convention](https://www.python.org/dev/peps/pep-0427/#file-name-convention))
requirements: The name of the pip_import repository rule from which to
- load this .whl's dependencies.
+ load each .whl
's dependencies.
- extras: A subset of the "extras" available from this .whl
for which
- requirements
has the dependencies.
+ extras: A subset of the "extras" available from these .whl
s for
+ which requirements
has the dependencies.
"""
diff --git a/rules_python/piptool.py b/rules_python/piptool.py
index f5d504aa87..1a048fea1d 100644
--- a/rules_python/piptool.py
+++ b/rules_python/piptool.py
@@ -15,28 +15,31 @@
import argparse
import atexit
-import json
import os
import pkgutil
-import pkg_resources
-import re
+# import pkg_resources
import shutil
import sys
import tempfile
-import zipfile
+import textwrap
# Note: We carefully import the following modules in a particular
# order, since these modules modify the import path and machinery.
import pkg_resources
+if sys.version_info < (3, 0):
+ _WHL_LIBRARY_RULE = 'whl_library'
+else:
+ _WHL_LIBRARY_RULE = 'whl3_library'
+
-def extract_packages(package_names):
+def _extract_packages(package_names):
"""Extract zipfile contents to disk and add to import path"""
# Set a safe extraction dir
extraction_tmpdir = tempfile.mkdtemp()
- atexit.register(lambda: shutil.rmtree(
- extraction_tmpdir, ignore_errors=True))
+ atexit.register(
+ lambda: shutil.rmtree(extraction_tmpdir, ignore_errors=True))
pkg_resources.set_extraction_path(extraction_tmpdir)
# Extract each package to disk
@@ -58,170 +61,288 @@ def extract_packages(package_names):
# Wheel, pip, and setuptools are much happier running from actual
# files on disk, rather than entries in a zipfile. Extract zipfile
# contents, add those contents to the path, then import them.
-extract_packages(['pip', 'setuptools', 'wheel'])
+_extract_packages(['pip', 'setuptools', 'wheel'])
# Defeat pip's attempt to mangle sys.path
-saved_sys_path = sys.path
+_SAVED_SYS_PATH = sys.path
sys.path = sys.path[:]
-import pip
-sys.path = saved_sys_path
+import pip # pylint: disable=C0413
+sys.path = _SAVED_SYS_PATH
-import setuptools
-import wheel
+# import setuptools
+# import wheel
-def pip_main(argv):
+def _pip_main(argv):
# Extract the certificates from the PAR following the example of get-pip.py
# https://github.com/pypa/get-pip/blob/430ba37776ae2ad89/template.py#L164-L168
cert_path = os.path.join(tempfile.mkdtemp(), "cacert.pem")
with open(cert_path, "wb") as cert:
- cert.write(pkgutil.get_data("pip._vendor.requests", "cacert.pem"))
+ cert.write(pkgutil.get_data("pip._vendor.requests", "cacert.pem"))
argv = ["--disable-pip-version-check", "--cert", cert_path] + argv
return pip.main(argv)
-from rules_python.whl import Wheel
-
-parser = argparse.ArgumentParser(
- description='Import Python dependencies into Bazel.')
-
-parser.add_argument('--name', action='store',
- help=('The namespace of the import.'))
-
-parser.add_argument('--input', action='store',
- help=('The requirements.txt file to import.'))
-
-parser.add_argument('--output', action='store',
- help=('The requirements.bzl file to export.'))
-
-parser.add_argument('--directory', action='store',
- help=('The directory into which to put .whl files.'))
-
-def determine_possible_extras(whls):
- """Determines the list of possible "extras" for each .whl
-
- The possibility of an extra is determined by looking at its
- additional requirements, and determinine whether they are
- satisfied by the complete list of available wheels.
-
- Args:
- whls: a list of Wheel objects
-
- Returns:
- a dict that is keyed by the Wheel objects in whls, and whose
- values are lists of possible extras.
- """
- whl_map = {
- whl.distribution(): whl
- for whl in whls
- }
-
- # TODO(mattmoor): Consider memoizing if this recursion ever becomes
- # expensive enough to warrant it.
- def is_possible(distro, extra):
- distro = distro.replace("-", "_")
- # If we don't have the .whl at all, then this isn't possible.
- if distro not in whl_map:
- return False
- whl = whl_map[distro]
- # If we have the .whl, and we don't need anything extra then
- # we can satisfy this dependency.
- if not extra:
- return True
- # If we do need something extra, then check the extra's
- # dependencies to make sure they are fully satisfied.
- for extra_dep in whl.dependencies(extra=extra):
- req = pkg_resources.Requirement.parse(extra_dep)
- # Check that the dep and any extras are all possible.
- if not is_possible(req.project_name, None):
- return False
- for e in req.extras:
- if not is_possible(req.project_name, e):
- return False
- # If all of the dependencies of the extra are satisfiable then
- # it is possible to construct this dependency.
- return True
-
- return {
- whl: [
- extra
- for extra in whl.extras()
- if is_possible(whl.distribution(), extra)
- ]
- for whl in whls
- }
+
+from rules_python.whl import Wheel # pylint: disable=C0413
+
def main():
- args = parser.parse_args()
+ args = _parse_args()
+
+ # https://github.com/pypa/pip/blob/9.0.1/pip/__init__.py#L209
+ if _pip_main(["wheel", "-w", args.directory, "-r", args.input]):
+ sys.exit(1)
+
+ # Enumerate the .whl files we downloaded.
+ def list_whl_files():
+ dir_ = args.directory + '/'
+ for root, unused_dirnames, filenames in os.walk(dir_):
+ for fname in filenames:
+ if fname.endswith('.whl'):
+ yield os.path.join(root, fname)
+
+ wheels = [Wheel(path) for path in list_whl_files()]
+
+ bzl_file_content = _make_bzl_file_content(
+ wheels=wheels,
+ reqs_repo_name=args.name,
+ input_requirements_file_path=args.input)
+ with open(args.output, 'w') as file_obj:
+ file_obj.write(bzl_file_content)
+
+
+def _parse_args():
+ parser = argparse.ArgumentParser(
+ description='Import Python dependencies into Bazel.')
+ parser.add_argument(
+ '--name', action='store', help='The namespace of the import.')
+ parser.add_argument(
+ '--input', action='store', help='The requirements.txt file to import.')
+ parser.add_argument(
+ '--output',
+ action='store',
+ help='The requirements.bzl file to export.')
+ parser.add_argument(
+ '--directory',
+ action='store',
+ help='The directory into which to put .whl files.')
+ return parser.parse_args()
+
+
+def _make_bzl_file_content(wheels, reqs_repo_name,
+ input_requirements_file_path):
+ wheel_to_extras = _make_wheel_to_extras(wheels)
+
+ join_str = ',\n '
+ pypi_name_to_py_library = join_str.join([
+ join_str.join([
+ '"{pypi_name}": "@{wheel_name}//:pkg"'.format(
+ pypi_name=wheel.distribution().lower(),
+ wheel_name=_make_wheel_name(reqs_repo_name, wheel))
+ ] + [
+ # For every extra that is possible from this requirements.txt
+ '"{pypi_name}[{extra}]": "@{wheel_name}//:{extra}"'.format(
+ pypi_name=wheel.distribution().lower(),
+ extra=extra.lower(),
+ wheel_name=_make_wheel_name(reqs_repo_name, wheel))
+ for extra in wheel_to_extras.get(wheel, [])
+ ]) for wheel in wheels
+ ])
- # https://github.com/pypa/pip/blob/9.0.1/pip/__init__.py#L209
- if pip_main(["wheel", "-w", args.directory, "-r", args.input]):
- sys.exit(1)
+ pypi_name_to_whl_filegroup = join_str.join([
+ join_str.join([
+ '"{pypi_name}": "@{wheel_name}//:whl"'.format(
+ pypi_name=wheel.distribution().lower(),
+ wheel_name=_make_wheel_name(reqs_repo_name, wheel))
+ ] + [
+ # For every extra that is possible from this requirements.txt
+ '"{pypi_name}[{extra}]": "@{wheel_name}//:{extra}_whl"'.format(
+ pypi_name=wheel.distribution().lower(),
+ extra=extra.lower(),
+ wheel_name=_make_wheel_name(reqs_repo_name, wheel))
+ for extra in wheel_to_extras.get(wheel, [])
+ ]) for wheel in wheels
+ ])
- # Enumerate the .whl files we downloaded.
- def list_whls():
- dir = args.directory + '/'
- for root, unused_dirnames, filenames in os.walk(dir):
- for fname in filenames:
- if fname.endswith('.whl'):
- yield os.path.join(root, fname)
+ merged_whl_repo_name = "{reqs_repo_name}_merged".format(
+ reqs_repo_name=reqs_repo_name)
+ merged_py_library = '"@{merged_whl_repo_name}//:pkg"'.format(
+ merged_whl_repo_name=merged_whl_repo_name)
+ merged_whl_filegroup = '"@{merged_whl_repo_name}//:whl"'.format(
+ merged_whl_repo_name=merged_whl_repo_name)
+
+ if wheels:
+ whl_library_rule_list = []
+ for wheel in wheels:
+ extras = ','.join(
+ ['"%s"' % extra for extra in wheel_to_extras.get(wheel, [])])
+ whl_library_rule = _make_whl_library_rule(
+ reqs_repo_name=reqs_repo_name,
+ whl_repo_name=_make_wheel_name(reqs_repo_name, wheel),
+ wheels=[wheel],
+ extras=extras)
+ whl_library_rule_list.append(whl_library_rule)
+ whl_library_rules = '\n'.join(whl_library_rule_list)
+
+ merged_whl_library_rule = _make_whl_library_rule(
+ reqs_repo_name=reqs_repo_name,
+ whl_repo_name=merged_whl_repo_name,
+ wheels=wheels,
+ extras='')
+ else:
+ whl_library_rules = 'pass'
+
+ return _populate_bzl_template(
+ input_requirements_file_path=input_requirements_file_path,
+ whl_library_rules=whl_library_rules,
+ pypi_name_to_py_library=pypi_name_to_py_library,
+ pypi_name_to_whl_filegroup=pypi_name_to_whl_filegroup,
+ merged_whl_library_rule=merged_whl_library_rule,
+ merged_py_library=merged_py_library,
+ merged_whl_filegroup=merged_whl_filegroup)
+
+
+def _make_wheel_to_extras(wheels):
+ """Determines the list of possible "extras" for each .whl file.
+
+ The possibility of an extra is determined by looking at its
+ additional requirements, and determinine whether they are
+ satisfied by the complete list of available wheels.
+
+ Args:
+ wheels: a list of Wheel objects
+
+ Returns:
+ a dict that is keyed by the Wheel objects in wheels, and whose
+ values are lists of possible extras.
+ """
+ pypi_name_to_wheel = {wheel.distribution(): wheel for wheel in wheels}
+
+ # TODO(mattmoor): Consider memoizing if this recursion ever becomes
+ # expensive enough to warrant it.
+ def is_possible(pypi_name, extra):
+ pypi_name = pypi_name.replace("-", "_")
+ # If we don't have the .whl at all, then this isn't possible.
+ if pypi_name not in pypi_name_to_wheel:
+ return False
+ wheel = pypi_name_to_wheel[pypi_name]
+ # If we have the .whl, and we don't need anything extra then
+ # we can satisfy this dependency.
+ if not extra:
+ return True
+ # If we do need something extra, then check the extra's
+ # dependencies to make sure they are fully satisfied.
+ for extra_dep in wheel.dependencies(extra=extra):
+ req = pkg_resources.Requirement.parse(extra_dep)
+ # Check that the dep and any extras are all possible.
+ if not is_possible(req.project_name, None):
+ return False
+ for extra_ in req.extras:
+ if not is_possible(req.project_name, extra_):
+ return False
+ # If all of the dependencies of the extra are satisfiable then
+ # it is possible to construct this dependency.
+ return True
+
+ return {
+ wheel: [
+ extra for extra in wheel.extras()
+ if is_possible(wheel.distribution(), extra)
+ ]
+ for wheel in wheels
+ }
+
+
+_WHL_LIBRARY_RULE_TEMPLATE = """
+ if "{whl_repo_name}" not in native.existing_rules():
+ {whl_library}(
+ name = "{whl_repo_name}",
+ whls = [{whls}],
+ requirements = "@{reqs_repo_name}//:requirements.bzl",
+ extras = [{extras}]
+ )"""
- whls = [Wheel(path) for path in list_whls()]
- possible_extras = determine_possible_extras(whls)
- def whl_library(wheel):
+def _make_whl_library_rule(reqs_repo_name, whl_repo_name, wheels, extras):
+ whls = ', '.join([
+ '"@{name}//:{path}"'.format(
+ name=reqs_repo_name, path=wheel.basename()) for wheel in wheels
+ ])
# Indentation here matters. whl_library must be within the scope
# of the function below. We also avoid reimporting an existing WHL.
- return """
- if "{repo_name}" not in native.existing_rules():
- whl_library(
- name = "{repo_name}",
- whl = "@{name}//:{path}",
- requirements = "@{name}//:requirements.bzl",
- extras = [{extras}]
- )""".format(name=args.name, repo_name=wheel.repository_name(),
- path=wheel.basename(),
- extras=','.join([
- '"%s"' % extra
- for extra in possible_extras.get(wheel, [])
- ]))
-
- whl_targets = ','.join([
- ','.join([
- '"%s": "@%s//:pkg"' % (whl.distribution().lower(), whl.repository_name())
- ] + [
- # For every extra that is possible from this requirements.txt
- '"%s[%s]": "@%s//:%s"' % (whl.distribution().lower(), extra.lower(),
- whl.repository_name(), extra)
- for extra in possible_extras.get(whl, [])
- ])
- for whl in whls
- ])
+ return _WHL_LIBRARY_RULE_TEMPLATE.format(
+ whl_repo_name=whl_repo_name,
+ reqs_repo_name=reqs_repo_name,
+ extras=extras,
+ whl_library=_WHL_LIBRARY_RULE,
+ whls=whls)
- with open(args.output, 'w') as f:
- f.write("""\
-# Install pip requirements.
-#
-# Generated from {input}
-load("@io_bazel_rules_python//python:whl.bzl", "whl_library")
+_BZL_TEMPLATE = textwrap.dedent("""\
+ # Install pip requirements.
+ #
+ # Generated from {input}
+
+ load("@io_bazel_rules_python//python:whl.bzl", "{whl_library}")
+
+ def pip_install():
+ {whl_library_rules}
+ {merged_whl_library_rule}
+
+ _requirements = {{
+ {pypi_name_to_py_library}
+ }}
+
+ _whl_requirements = {{
+ {pypi_name_to_whl_filegroup}
+ }}
+
+ _merged_py_library = {merged_py_library}
+ _merged_whl_filegroup = {merged_whl_filegroup}
+
+ def pypi_requirements():
+ return _merged_py_library
+
+ def pypi_whl_requirements():
+ return _merged_whl_filegroup
+
+ def pypi_whl_requirement(name):
+ name_key = _make_name_key(name)
+ if name_key not in _whl_requirements:
+ fail("Could not find pip-provided whl dependency: '%s'; available: %s" % (name, sorted(_whl_requirements.keys())))
+ return _whl_requirements[name_key]
+
+ # Deprecated; don't use.
+ def requirement(name):
+ name_key = _make_name_key(name)
+ if name_key not in _requirements:
+ fail("Could not find pip-provided dependency: '%s'; available: %s" % (name, sorted(_requirements.keys())))
+ return _requirements[name_key]
+
+ def _make_name_key(name):
+ name_key = name.replace("-", "_").lower()
+ return name_key
+""")
+
-def pip_install():
- {whl_libraries}
+def _populate_bzl_template(input_requirements_file_path, whl_library_rules,
+ pypi_name_to_py_library, pypi_name_to_whl_filegroup,
+ merged_whl_library_rule, merged_py_library,
+ merged_whl_filegroup):
+ return _BZL_TEMPLATE.format(
+ input=input_requirements_file_path,
+ whl_library_rules=whl_library_rules,
+ pypi_name_to_py_library=pypi_name_to_py_library,
+ pypi_name_to_whl_filegroup=pypi_name_to_whl_filegroup,
+ whl_library=_WHL_LIBRARY_RULE,
+ merged_whl_library_rule=merged_whl_library_rule,
+ merged_py_library=merged_py_library,
+ merged_whl_filegroup=merged_whl_filegroup)
-_requirements = {{
- {mappings}
-}}
-all_requirements = _requirements.values()
+def _make_wheel_name(namespace, wheel):
+ return "{}_{}".format(namespace, wheel.repository_name())
-def requirement(name):
- name_key = name.replace("-", "_").lower()
- if name_key not in _requirements:
- fail("Could not find pip-provided dependency: '%s'" % name)
- return _requirements[name_key]
-""".format(input=args.input,
- whl_libraries='\n'.join(map(whl_library, whls)) if whls else "pass",
- mappings=whl_targets))
if __name__ == '__main__':
- main()
+ main()
diff --git a/rules_python/whl.py b/rules_python/whl.py
index e3544d80d0..f252506100 100644
--- a/rules_python/whl.py
+++ b/rules_python/whl.py
@@ -16,158 +16,272 @@
import argparse
import json
import os
-import pkg_resources
import re
+import shutil
+import textwrap
import zipfile
+import pkg_resources
+
+
+# pylint: disable=R0914
+def main():
+ args = _parse_args()
+
+ dependency_list = []
+ whl_dependency_list = []
+ extra_list = []
+ whl_extra_list = []
+
+ whl_paths = args.whl_paths
+ if args.whl is not None:
+ whl_paths = whl_paths + [args.whl]
+
+ # Extract the files into the current directory.
+ for wheel_path in args.whl_paths:
+ wheel = Wheel(wheel_path)
+ wheel.expand(args.directory)
+
+ copied_whl_path = os.path.join(args.directory,
+ os.path.basename(wheel_path))
+ shutil.copy(wheel_path, copied_whl_path)
+
+ if args.track_deps:
+ for dependency in wheel.dependencies():
+ dependency_list.append('requirement("{}")'.format(dependency))
+ whl_dependency_list.append(
+ 'pypi_whl_requirement("{}")'.format(dependency))
+ for extra in args.extras:
+ extra_list.append(_make_extra(extra, wheel))
+ whl_extra_list.append(_make_whl_extra(extra, wheel))
+
+ # Generate BUILD file.
+ dependency_join_str = ',\n '
+ extras_join_str = '\n\n'
+
+ dependencies = dependency_join_str.join(dependency_list)
+ whl_dependencies = dependency_join_str.join(whl_dependency_list)
+ extras = extras_join_str.join(extra_list)
+ whl_extras = extras_join_str.join(whl_extra_list)
+
+ build_file_content = _make_build_file_content(
+ requirements_bzl=args.requirements,
+ dependencies=dependencies,
+ whl_dependencies=whl_dependencies,
+ extras=extras,
+ whl_extras=whl_extras)
+
+ with open(os.path.join(args.directory, 'BUILD'), 'w') as file_obj:
+ file_obj.write(build_file_content)
+
class Wheel(object):
+ def __init__(self, path):
+ self._path = path
- def __init__(self, path):
- self._path = path
-
- def path(self):
- return self._path
-
- def basename(self):
- return os.path.basename(self.path())
-
- def distribution(self):
- # See https://www.python.org/dev/peps/pep-0427/#file-name-convention
- parts = self.basename().split('-')
- return parts[0]
-
- def version(self):
- # See https://www.python.org/dev/peps/pep-0427/#file-name-convention
- parts = self.basename().split('-')
- return parts[1]
-
- def repository_name(self):
- # Returns the canonical name of the Bazel repository for this package.
- canonical = 'pypi__{}_{}'.format(self.distribution(), self.version())
- # Escape any illegal characters with underscore.
- return re.sub('[-.]', '_', canonical)
-
- def _dist_info(self):
- # Return the name of the dist-info directory within the .whl file.
- # e.g. google_cloud-0.27.0-py2.py3-none-any.whl ->
- # google_cloud-0.27.0.dist-info
- return '{}-{}.dist-info'.format(self.distribution(), self.version())
-
- def metadata(self):
- # Extract the structured data from metadata.json in the WHL's dist-info
- # directory.
- with zipfile.ZipFile(self.path(), 'r') as whl:
- # first check for metadata.json
- try:
- with whl.open(self._dist_info() + '/metadata.json') as f:
- return json.loads(f.read().decode("utf-8"))
- except KeyError:
- pass
- # fall back to METADATA file (https://www.python.org/dev/peps/pep-0427/)
- with whl.open(self._dist_info() + '/METADATA') as f:
- return self._parse_metadata(f.read().decode("utf-8"))
-
- def name(self):
- return self.metadata().get('name')
-
- def dependencies(self, extra=None):
- """Access the dependencies of this Wheel.
-
- Args:
- extra: if specified, include the additional dependencies
- of the named "extra".
-
- Yields:
- the names of requirements from the metadata.json
- """
- # TODO(mattmoor): Is there a schema to follow for this?
- run_requires = self.metadata().get('run_requires', [])
- for requirement in run_requires:
- if requirement.get('extra') != extra:
- # Match the requirements for the extra we're looking for.
- continue
- marker = requirement.get('environment')
- if marker and not pkg_resources.evaluate_marker(marker):
- # The current environment does not match the provided PEP 508 marker,
- # so ignore this requirement.
- continue
- requires = requirement.get('requires', [])
- for entry in requires:
- # Strip off any trailing versioning data.
- parts = re.split('[ ><=()]', entry)
- yield parts[0]
-
- def extras(self):
- return self.metadata().get('extras', [])
-
- def expand(self, directory):
- with zipfile.ZipFile(self.path(), 'r') as whl:
- whl.extractall(directory)
-
- # _parse_metadata parses METADATA files according to https://www.python.org/dev/peps/pep-0314/
- def _parse_metadata(self, content):
- # TODO: handle fields other than just name
- name_pattern = re.compile('Name: (.*)')
- return { 'name': name_pattern.search(content).group(1) }
-
-
-parser = argparse.ArgumentParser(
- description='Unpack a WHL file as a py_library.')
-
-parser.add_argument('--whl', action='store',
- help=('The .whl file we are expanding.'))
-
-parser.add_argument('--requirements', action='store',
- help='The pip_import from which to draw dependencies.')
-
-parser.add_argument('--directory', action='store', default='.',
- help='The directory into which to expand things.')
-
-parser.add_argument('--extras', action='append',
- help='The set of extras for which to generate library targets.')
+ def path(self):
+ return self._path
+
+ def basename(self):
+ return os.path.basename(self.path())
+
+ def distribution(self):
+ # See https://www.python.org/dev/peps/pep-0427/#file-name-convention
+ parts = self.basename().split('-')
+ return parts[0]
+
+ def version(self):
+ # See https://www.python.org/dev/peps/pep-0427/#file-name-convention
+ parts = self.basename().split('-')
+ return parts[1]
+
+ def repository_name(self):
+ # Returns the canonical name of the Bazel repository for this package.
+ canonical = 'pypi__{}_{}'.format(self.distribution(), self.version())
+ # Escape any illegal characters with underscore.
+ return re.sub('[-.]', '_', canonical)
+
+ def _dist_info(self):
+ # Return the name of the dist-info directory within the .whl file.
+ # e.g. google_cloud-0.27.0-py2.py3-none-any.whl ->
+ # google_cloud-0.27.0.dist-info
+ return '{}-{}.dist-info'.format(self.distribution(), self.version())
+
+ def metadata(self):
+ # Extract the structured data from metadata.json in the WHL's dist-info
+ # directory.
+ with zipfile.ZipFile(self.path(), 'r') as whl:
+ # first check for metadata.json
+ try:
+ with whl.open(
+ self._dist_info() + '/metadata.json') as file_obj:
+ return json.loads(file_obj.read().decode("utf-8"))
+ except KeyError:
+ pass
+ # fall back to METADATA file (https://www.python.org/dev/peps/pep-0427/)
+ with whl.open(self._dist_info() + '/METADATA') as file_obj:
+ return self._parse_metadata(file_obj.read().decode("utf-8"))
+
+ def name(self):
+ return self.metadata().get('name')
+
+ def dependencies(self, extra=None):
+ """Access the dependencies of this Wheel.
+
+ Args:
+ extra: if specified, include the additional dependencies of the named
+ "extra".
+
+ Yields:
+ the names of requirements from the metadata.json
+ """
+ # TODO(mattmoor): Is there a schema to follow for this?
+ run_requires = self.metadata().get('run_requires', [])
+ for requirement in run_requires:
+ if requirement.get('extra') != extra:
+ # Match the requirements for the extra we're looking for.
+ continue
+ marker = requirement.get('environment')
+ if marker and not pkg_resources.evaluate_marker(marker):
+ # The current environment does not match the provided PEP 508 marker,
+ # so ignore this requirement.
+ continue
+ requires = requirement.get('requires', [])
+ for entry in requires:
+ # Strip off any trailing versioning data.
+ parts = re.split('[ ><=()]', entry)
+ yield parts[0]
+
+ def extras(self):
+ return self.metadata().get('extras', [])
+
+ def expand(self, directory):
+ with zipfile.ZipFile(self.path(), 'r') as whl:
+ whl.extractall(directory)
+
+ # _parse_metadata parses METADATA files according to https://www.python.org/dev/peps/pep-0314/
+ def _parse_metadata(self, content):
+ # TODO: handle fields other than just name
+ name_pattern = re.compile('Name: (.*)')
+ return {'name': name_pattern.search(content).group(1)}
+
+
+def _parse_args():
+ parser = argparse.ArgumentParser(
+ description='Unpack a .whl file as a py_library.')
+
+ parser.add_argument(
+ '--whl_paths',
+ action='append',
+ default=[],
+ help=('The .whl files we are expanding.'))
+
+ parser.add_argument(
+ '--whl',
+ action='store',
+ default=None,
+ help='Deprecated; use --whl_paths')
+
+ parser.add_argument('--track_deps', action='store', type=bool)
+
+ parser.add_argument(
+ '--requirements',
+ action='store',
+ default=None,
+ help='The pip_import from which to draw dependencies.')
+
+ parser.add_argument(
+ '--directory',
+ action='store',
+ default='.',
+ help='The directory into which to expand things.')
+
+ parser.add_argument(
+ '--extras',
+ action='append',
+ help='The set of extras for which to generate library targets.')
+
+ return parser.parse_args()
+
+
+_EXTRA_TEMPLATE = textwrap.dedent("""\
+ py_library(
+ name = "{extra}",
+ deps = [
+ ":pkg",{deps}
+ ],
+ )
+""")
+_WHL_EXTRA_TEMPLATE = textwrap.dedent("""\
+ filegroup(
+ name = "{extra}_whl",
+ srcs = [
+ ":whl",{deps}
+ ],
+ )
+""")
+
+
+def _make_extra(extra, wheel):
+ return _EXTRA_TEMPLATE.format(
+ extra=extra,
+ deps=','.join(
+ ['requirement("%s")' % dep for dep in wheel.dependencies(extra)]),
+ )
+
+
+def _make_whl_extra(extra, wheel):
+ _WHL_EXTRA_TEMPLATE.format(
+ extra=extra,
+ deps=','.join([
+ 'pypi_whl_requirement("%s")' % dep
+ for dep in wheel.dependencies(extra)
+ ]),
+ )
+
+
+def _make_build_file_content(requirements_bzl, dependencies, whl_dependencies,
+ extras, whl_extras):
+ if requirements_bzl:
+ template = (
+ 'load("{requirements_bzl}", "requirement", "pypi_whl_requirement")'
+ )
+ load_requirements_statement = template.format(
+ requirements_bzl=requirements_bzl)
+ else:
+ load_requirements_statement = ''
+
+ return textwrap.dedent("""\
+ package(default_visibility = ["//visibility:public"])
+
+ {load_requirements_statement}
+
+ py_library(
+ name = "pkg",
+ srcs = glob(["**/*.py"]),
+ data = glob(["**/*"], exclude=["**/*.py", "**/* *", "BUILD", "WORKSPACE", "**/*.whl"]),
+ # This makes this directory a top-level in the python import
+ # search path for anything that depends on this.
+ imports = ["."],
+ deps = [{dependencies}],
+ )
+
+ filegroup(
+ name = "whl",
+ srcs = glob(["**/*.whl"]) + [{whl_dependencies}],
+ )
+
+ {extras}
+
+ {whl_extras}
+ """).format(
+ requirements_bzl=requirements_bzl,
+ dependencies=dependencies,
+ whl_dependencies=whl_dependencies,
+ extras=extras,
+ whl_extras=whl_extras,
+ load_requirements_statement=load_requirements_statement)
-def main():
- args = parser.parse_args()
- whl = Wheel(args.whl)
-
- # Extract the files into the current directory
- whl.expand(args.directory)
-
- with open(os.path.join(args.directory, 'BUILD'), 'w') as f:
- f.write("""
-package(default_visibility = ["//visibility:public"])
-
-load("{requirements}", "requirement")
-
-py_library(
- name = "pkg",
- srcs = glob(["**/*.py"]),
- data = glob(["**/*"], exclude=["**/*.py", "**/* *", "BUILD", "WORKSPACE"]),
- # This makes this directory a top-level in the python import
- # search path for anything that depends on this.
- imports = ["."],
- deps = [{dependencies}],
-)
-{extras}""".format(
- requirements=args.requirements,
- dependencies=','.join([
- 'requirement("%s")' % d
- for d in whl.dependencies()
- ]),
- extras='\n\n'.join([
- """py_library(
- name = "{extra}",
- deps = [
- ":pkg",{deps}
- ],
-)""".format(extra=extra,
- deps=','.join([
- 'requirement("%s")' % dep
- for dep in whl.dependencies(extra)
- ]))
- for extra in args.extras or []
- ])))
if __name__ == '__main__':
- main()
+ main()
diff --git a/rules_python/whl_test.py b/rules_python/whl_test.py
index a63d625088..ec3626005c 100644
--- a/rules_python/whl_test.py
+++ b/rules_python/whl_test.py
@@ -21,100 +21,108 @@
def TestData(name):
- return os.path.join(os.environ['TEST_SRCDIR'], name)
+ return os.path.join(os.environ['TEST_SRCDIR'], name)
class WheelTest(unittest.TestCase):
+ def test_grpc_whl(self):
+ td = TestData(
+ 'grpc_whl/file/grpcio-1.6.0-cp27-cp27m-manylinux1_i686.whl')
+ wheel = whl.Wheel(td)
+ self.assertEqual(wheel.name(), 'grpcio')
+ self.assertEqual(wheel.distribution(), 'grpcio')
+ self.assertEqual(wheel.version(), '1.6.0')
+ self.assertEqual(
+ set(wheel.dependencies()),
+ set(['enum34', 'futures', 'protobuf', 'six']))
+ self.assertEqual('pypi__grpcio_1_6_0', wheel.repository_name())
+ self.assertEqual([], wheel.extras())
+
+ def test_futures_whl(self):
+ td = TestData('futures_3_1_1_whl/file/futures-3.1.1-py2-none-any.whl')
+ wheel = whl.Wheel(td)
+ self.assertEqual(wheel.name(), 'futures')
+ self.assertEqual(wheel.distribution(), 'futures')
+ self.assertEqual(wheel.version(), '3.1.1')
+ self.assertEqual(set(wheel.dependencies()), set())
+ self.assertEqual('pypi__futures_3_1_1', wheel.repository_name())
+ self.assertEqual([], wheel.extras())
+
+ def test_whl_with_METADATA_file(self):
+ td = TestData(
+ 'futures_2_2_0_whl/file/futures-2.2.0-py2.py3-none-any.whl')
+ wheel = whl.Wheel(td)
+ self.assertEqual(wheel.name(), 'futures')
+ self.assertEqual(wheel.distribution(), 'futures')
+ self.assertEqual(wheel.version(), '2.2.0')
+ self.assertEqual(set(wheel.dependencies()), set())
+ self.assertEqual('pypi__futures_2_2_0', wheel.repository_name())
+
+ @patch('platform.python_version', return_value='2.7.13')
+ def test_mock_whl(self, *args):
+ td = TestData('mock_whl/file/mock-2.0.0-py2.py3-none-any.whl')
+ wheel = whl.Wheel(td)
+ self.assertEqual(wheel.name(), 'mock')
+ self.assertEqual(wheel.distribution(), 'mock')
+ self.assertEqual(wheel.version(), '2.0.0')
+ self.assertEqual(
+ set(wheel.dependencies()), set(['funcsigs', 'pbr', 'six']))
+ self.assertEqual('pypi__mock_2_0_0', wheel.repository_name())
+
+ @patch('platform.python_version', return_value='3.3.0')
+ def test_mock_whl_3_3(self, *args):
+ td = TestData('mock_whl/file/mock-2.0.0-py2.py3-none-any.whl')
+ wheel = whl.Wheel(td)
+ self.assertEqual(set(wheel.dependencies()), set(['pbr', 'six']))
+
+ @patch('platform.python_version', return_value='2.7.13')
+ def test_mock_whl_extras(self, *args):
+ td = TestData('mock_whl/file/mock-2.0.0-py2.py3-none-any.whl')
+ wheel = whl.Wheel(td)
+ self.assertEqual(['docs', 'test'], wheel.extras())
+ self.assertEqual(
+ set(wheel.dependencies(extra='docs')), set(['sphinx']))
+ self.assertEqual(
+ set(wheel.dependencies(extra='test')), set(['unittest2']))
+
+ @patch('platform.python_version', return_value='3.0.0')
+ def test_mock_whl_extras_3_0(self, *args):
+ td = TestData('mock_whl/file/mock-2.0.0-py2.py3-none-any.whl')
+ wheel = whl.Wheel(td)
+ self.assertEqual(['docs', 'test'], wheel.extras())
+ self.assertEqual(
+ set(wheel.dependencies(extra='docs')),
+ set(['sphinx', 'Pygments', 'jinja2']))
+ self.assertEqual(
+ set(wheel.dependencies(extra='test')), set(['unittest2']))
+
+ @patch('platform.python_version', return_value='2.7.13')
+ def test_google_cloud_language_whl(self, *args):
+ td = TestData('google_cloud_language_whl/file/' +
+ 'google_cloud_language-0.29.0-py2.py3-none-any.whl')
+ wheel = whl.Wheel(td)
+ self.assertEqual(wheel.name(), 'google-cloud-language')
+ self.assertEqual(wheel.distribution(), 'google_cloud_language')
+ self.assertEqual(wheel.version(), '0.29.0')
+ expected_deps = [
+ 'google-gax', 'google-cloud-core',
+ 'googleapis-common-protos[grpc]', 'enum34'
+ ]
+ self.assertEqual(set(wheel.dependencies()), set(expected_deps))
+ self.assertEqual('pypi__google_cloud_language_0_29_0',
+ wheel.repository_name())
+ self.assertEqual([], wheel.extras())
+
+ @patch('platform.python_version', return_value='3.4.0')
+ def test_google_cloud_language_whl_3_4(self, *args):
+ td = TestData('google_cloud_language_whl/file/' +
+ 'google_cloud_language-0.29.0-py2.py3-none-any.whl')
+ wheel = whl.Wheel(td)
+ expected_deps = [
+ 'google-gax', 'google-cloud-core', 'googleapis-common-protos[grpc]'
+ ]
+ self.assertEqual(set(wheel.dependencies()), set(expected_deps))
- def test_grpc_whl(self):
- td = TestData('grpc_whl/file/grpcio-1.6.0-cp27-cp27m-manylinux1_i686.whl')
- wheel = whl.Wheel(td)
- self.assertEqual(wheel.name(), 'grpcio')
- self.assertEqual(wheel.distribution(), 'grpcio')
- self.assertEqual(wheel.version(), '1.6.0')
- self.assertEqual(set(wheel.dependencies()),
- set(['enum34', 'futures', 'protobuf', 'six']))
- self.assertEqual('pypi__grpcio_1_6_0', wheel.repository_name())
- self.assertEqual([], wheel.extras())
-
- def test_futures_whl(self):
- td = TestData('futures_3_1_1_whl/file/futures-3.1.1-py2-none-any.whl')
- wheel = whl.Wheel(td)
- self.assertEqual(wheel.name(), 'futures')
- self.assertEqual(wheel.distribution(), 'futures')
- self.assertEqual(wheel.version(), '3.1.1')
- self.assertEqual(set(wheel.dependencies()), set())
- self.assertEqual('pypi__futures_3_1_1', wheel.repository_name())
- self.assertEqual([], wheel.extras())
-
- def test_whl_with_METADATA_file(self):
- td = TestData('futures_2_2_0_whl/file/futures-2.2.0-py2.py3-none-any.whl')
- wheel = whl.Wheel(td)
- self.assertEqual(wheel.name(), 'futures')
- self.assertEqual(wheel.distribution(), 'futures')
- self.assertEqual(wheel.version(), '2.2.0')
- self.assertEqual(set(wheel.dependencies()), set())
- self.assertEqual('pypi__futures_2_2_0', wheel.repository_name())
-
- @patch('platform.python_version', return_value='2.7.13')
- def test_mock_whl(self, *args):
- td = TestData('mock_whl/file/mock-2.0.0-py2.py3-none-any.whl')
- wheel = whl.Wheel(td)
- self.assertEqual(wheel.name(), 'mock')
- self.assertEqual(wheel.distribution(), 'mock')
- self.assertEqual(wheel.version(), '2.0.0')
- self.assertEqual(set(wheel.dependencies()),
- set(['funcsigs', 'pbr', 'six']))
- self.assertEqual('pypi__mock_2_0_0', wheel.repository_name())
-
- @patch('platform.python_version', return_value='3.3.0')
- def test_mock_whl_3_3(self, *args):
- td = TestData('mock_whl/file/mock-2.0.0-py2.py3-none-any.whl')
- wheel = whl.Wheel(td)
- self.assertEqual(set(wheel.dependencies()),
- set(['pbr', 'six']))
-
- @patch('platform.python_version', return_value='2.7.13')
- def test_mock_whl_extras(self, *args):
- td = TestData('mock_whl/file/mock-2.0.0-py2.py3-none-any.whl')
- wheel = whl.Wheel(td)
- self.assertEqual(['docs', 'test'], wheel.extras())
- self.assertEqual(set(wheel.dependencies(extra='docs')), set(['sphinx']))
- self.assertEqual(set(wheel.dependencies(extra='test')), set(['unittest2']))
-
- @patch('platform.python_version', return_value='3.0.0')
- def test_mock_whl_extras_3_0(self, *args):
- td = TestData('mock_whl/file/mock-2.0.0-py2.py3-none-any.whl')
- wheel = whl.Wheel(td)
- self.assertEqual(['docs', 'test'], wheel.extras())
- self.assertEqual(set(wheel.dependencies(extra='docs')), set(['sphinx', 'Pygments', 'jinja2']))
- self.assertEqual(set(wheel.dependencies(extra='test')), set(['unittest2']))
-
- @patch('platform.python_version', return_value='2.7.13')
- def test_google_cloud_language_whl(self, *args):
- td = TestData('google_cloud_language_whl/file/' +
- 'google_cloud_language-0.29.0-py2.py3-none-any.whl')
- wheel = whl.Wheel(td)
- self.assertEqual(wheel.name(), 'google-cloud-language')
- self.assertEqual(wheel.distribution(), 'google_cloud_language')
- self.assertEqual(wheel.version(), '0.29.0')
- expected_deps = ['google-gax', 'google-cloud-core',
- 'googleapis-common-protos[grpc]', 'enum34']
- self.assertEqual(set(wheel.dependencies()),
- set(expected_deps))
- self.assertEqual('pypi__google_cloud_language_0_29_0',
- wheel.repository_name())
- self.assertEqual([], wheel.extras())
-
- @patch('platform.python_version', return_value='3.4.0')
- def test_google_cloud_language_whl_3_4(self, *args):
- td = TestData('google_cloud_language_whl/file/' +
- 'google_cloud_language-0.29.0-py2.py3-none-any.whl')
- wheel = whl.Wheel(td)
- expected_deps = ['google-gax', 'google-cloud-core',
- 'googleapis-common-protos[grpc]']
- self.assertEqual(set(wheel.dependencies()),
- set(expected_deps))
if __name__ == '__main__':
- unittest.main()
+ unittest.main()
diff --git a/tools/piptool.par b/tools/piptool.par
index 206a33d322..ae01863c94 100755
Binary files a/tools/piptool.par and b/tools/piptool.par differ
diff --git a/tools/whltool.par b/tools/whltool.par
index fece5b067e..447f2b9f08 100755
Binary files a/tools/whltool.par and b/tools/whltool.par differ