diff --git a/build_docs.py b/build_docs.py index 3edaa90..e3b4184 100755 --- a/build_docs.py +++ b/build_docs.py @@ -150,34 +150,29 @@ def title(self): } -def shell_out(cmd, shell=False): - cmdstring = cmd if shell else shlex.join(cmd) - logging.debug("Running command: %s", cmdstring) - try: - output = subprocess.check_output( - cmd, - shell=shell, - stdin=subprocess.PIPE, - stderr=subprocess.STDOUT, - encoding="utf-8", - errors="backslashreplace", +def run(cmd) -> subprocess.CompletedProcess: + """Like subprocess.run, with logging before and after the command execution.""" + cmdstring = shlex.join(cmd) + logging.debug("Run: %r", cmdstring) + result = subprocess.run( + cmd, + stdin=subprocess.PIPE, + stderr=subprocess.STDOUT, + stdout=subprocess.PIPE, + encoding="utf-8", + errors="backslashreplace", + ) + if result.returncode: + # Log last 20 lines, those are likely the interesting ones. + logging.error( + "Run KO: %r:\n%s", + cmdstring, + indent("\n".join(result.stdout.split("\n")[-20:]), " "), ) - if output: - logging.debug( - "Command executed successfully: %s\n%s", - cmdstring, - indent(output, " "), - ) - return output - except subprocess.CalledProcessError as e: - if sentry_sdk: - with sentry_sdk.push_scope() as scope: - scope.fingerprint = ["{{ default }}", str(cmd)] - sentry_sdk.capture_exception(e) - if e.output: - logging.error("Command %s failed:\n%s", cmdstring, indent(e.output, " ")) - else: - logging.error("Command %s failed.", cmdstring) + else: + logging.debug("Run OK: %r", cmdstring) + result.check_returncode() + return result def changed_files(left, right): @@ -207,20 +202,18 @@ def git_clone(repository, directory, branch=None): try: if not os.path.isdir(os.path.join(directory, ".git")): raise AssertionError("Not a git repository.") - shell_out(["git", "-C", directory, "fetch"]) + run(["git", "-C", directory, "fetch"]) if branch: - shell_out(["git", "-C", directory, "checkout", branch]) - shell_out(["git", "-C", directory, "reset", "--hard", "origin/" + branch]) + run(["git", "-C", directory, "checkout", branch]) + run(["git", "-C", directory, "reset", "--hard", "origin/" + branch]) except (subprocess.CalledProcessError, AssertionError): if os.path.exists(directory): shutil.rmtree(directory) logging.info("Cloning %s into %s", repository, directory) os.makedirs(directory, mode=0o775) - shell_out( - ["git", "clone", "--depth=1", "--no-single-branch", repository, directory] - ) + run(["git", "clone", "--depth=1", "--no-single-branch", repository, directory]) if branch: - shell_out(["git", "-C", directory, "checkout", branch]) + run(["git", "-C", directory, "checkout", branch]) def version_to_tuple(version): @@ -271,7 +264,7 @@ def translation_branch(locale_repo, locale_clone_dir, needed_version): returns the name of the nearest existing branch. """ git_clone(locale_repo, locale_clone_dir) - remote_branches = shell_out(["git", "-C", locale_clone_dir, "branch", "-r"]) + remote_branches = run(["git", "-C", locale_clone_dir, "branch", "-r"]).stdout branches = [] for branch in remote_branches.split("\n"): if re.match(r".*/[0-9]+\.[0-9]+$", branch): @@ -423,7 +416,7 @@ def build_one( sphinxbuild = os.path.join(venv, "bin/sphinx-build") blurb = os.path.join(venv, "bin/blurb") # Disable cpython switchers, we handle them now: - shell_out( + run( [ "sed", "-i", @@ -434,7 +427,7 @@ def build_one( setup_indexsidebar( os.path.join(checkout, "Doc", "tools", "templates", "indexsidebar.html") ) - shell_out( + run( [ "make", "-C", @@ -448,7 +441,7 @@ def build_one( maketarget, ] ) - shell_out(["chgrp", "-R", group, log_directory]) + run(["chgrp", "-R", group, log_directory]) setup_switchers(os.path.join(checkout, "Doc", "build", "html")) logging.info("Build done for version: %s, language: %s", version.name, language.tag) @@ -464,8 +457,8 @@ def build_venv(build_root, version, theme): "sphinx=={}".format(version.sphinx_version), ] venv_path = os.path.join(build_root, "venv-with-sphinx-" + version.sphinx_version) - shell_out(["python3", "-m", "venv", venv_path]) - shell_out( + run(["python3", "-m", "venv", venv_path]) + run( [os.path.join(venv_path, "bin", "python"), "-m", "pip", "install"] + requirements ) @@ -473,6 +466,9 @@ def build_venv(build_root, version, theme): def build_robots_txt(www_root, group, skip_cache_invalidation): + if not Path(www_root).exists(): + logging.info("Skipping robots.txt generation (www root does not even exists).") + return robots_file = os.path.join(www_root, "robots.txt") with open(HERE / "templates" / "robots.txt") as robots_txt_template_file: with open(robots_file, "w") as robots_txt_file: @@ -481,12 +477,15 @@ def build_robots_txt(www_root, group, skip_cache_invalidation): template.render(languages=LANGUAGES, versions=VERSIONS) + "\n" ) os.chmod(robots_file, 0o775) - shell_out(["chgrp", group, robots_file]) + run(["chgrp", group, robots_file]) if not skip_cache_invalidation: - shell_out(["curl", "-XPURGE", "https://docs.python.org/robots.txt"]) + run(["curl", "-XPURGE", "https://docs.python.org/robots.txt"]) def build_sitemap(www_root): + if not Path(www_root).exists(): + logging.info("Skipping sitemap generation (www root does not even exists).") + return with open(HERE / "templates" / "sitemap.xml") as sitemap_template_file: with open(os.path.join(www_root, "sitemap.xml"), "w") as sitemap_file: template = jinja2.Template(sitemap_template_file.read()) @@ -518,7 +517,7 @@ def copy_build_to_webroot( language_dir = os.path.join(www_root, language.tag) os.makedirs(language_dir, exist_ok=True) try: - shell_out(["chgrp", "-R", group, language_dir]) + run(["chgrp", "-R", group, language_dir]) except subprocess.CalledProcessError as err: logging.warning("Can't change group of %s: %s", language_dir, str(err)) os.chmod(language_dir, 0o775) @@ -530,15 +529,15 @@ def copy_build_to_webroot( except PermissionError as err: logging.warning("Can't change mod of %s: %s", target, str(err)) try: - shell_out(["chgrp", "-R", group, target]) + run(["chgrp", "-R", group, target]) except subprocess.CalledProcessError as err: logging.warning("Can't change group of %s: %s", target, str(err)) changed = changed_files(os.path.join(checkout, "Doc/build/html"), target) logging.info("Copying HTML files to %s", target) - shell_out(["chown", "-R", ":" + group, os.path.join(checkout, "Doc/build/html/")]) - shell_out(["chmod", "-R", "o+r", os.path.join(checkout, "Doc/build/html/")]) - shell_out( + run(["chown", "-R", ":" + group, os.path.join(checkout, "Doc/build/html/")]) + run(["chmod", "-R", "o+r", os.path.join(checkout, "Doc/build/html/")]) + run( [ "find", os.path.join(checkout, "Doc/build/html/"), @@ -552,9 +551,9 @@ def copy_build_to_webroot( ] ) if quick: - shell_out(["rsync", "-a", os.path.join(checkout, "Doc/build/html/"), target]) + run(["rsync", "-a", os.path.join(checkout, "Doc/build/html/"), target]) else: - shell_out( + run( [ "rsync", "-a", @@ -567,18 +566,17 @@ def copy_build_to_webroot( ) if not quick: logging.debug("Copying dist files") - shell_out(["chown", "-R", ":" + group, os.path.join(checkout, "Doc/dist/")]) - shell_out( - ["chmod", "-R", "o+r", os.path.join(checkout, os.path.join("Doc/dist/"))] - ) - shell_out(["mkdir", "-m", "o+rx", "-p", os.path.join(target, "archives")]) - shell_out(["chown", ":" + group, os.path.join(target, "archives")]) - shell_out( - "cp -a {src} {dst}".format( - src=os.path.join(checkout, "Doc/dist/*"), - dst=os.path.join(target, "archives"), - ), - shell=True, + run(["chown", "-R", ":" + group, os.path.join(checkout, "Doc/dist/")]) + run(["chmod", "-R", "o+r", os.path.join(checkout, os.path.join("Doc/dist/"))]) + run(["mkdir", "-m", "o+rx", "-p", os.path.join(target, "archives")]) + run(["chown", ":" + group, os.path.join(target, "archives")]) + run( + [ + "cp", + "-a", + *[str(dist) for dist in (Path(checkout) / "Doc" / "dist").glob("*")], + os.path.join(target, "archives"), + ] ) changed.append("archives/") for fn in os.listdir(os.path.join(target, "archives")): @@ -587,16 +585,14 @@ def copy_build_to_webroot( logging.info("%s files changed", len(changed)) if changed and not skip_cache_invalidation: targets_dir = www_root - prefixes = shell_out(["find", "-L", targets_dir, "-samefile", target]) + prefixes = run(["find", "-L", targets_dir, "-samefile", target]).stdout prefixes = prefixes.replace(targets_dir + "/", "") prefixes = [prefix + "/" for prefix in prefixes.split("\n") if prefix] to_purge = prefixes[:] for prefix in prefixes: to_purge.extend(prefix + p for p in changed) logging.info("Running CDN purge") - shell_out( - ["curl", "-XPURGE", "https://docs.python.org/{%s}" % ",".join(to_purge)] - ) + run(["curl", "-XPURGE", "https://docs.python.org/{%s}" % ",".join(to_purge)]) logging.info( "Publishing done for version: %s, language: %s", version.name, language.tag )