diff --git a/.travis.yml b/.travis.yml index 0592d2c2b3a..85b4c77921a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -170,13 +170,8 @@ py36_osx_config: &py36_osx_config packages: &py36_osx_config_brew_packages - openssl env: - # Fix Python 3 issue linking to OpenSSL - &py36_osx_config_env > - PATH="/usr/local/opt/openssl/bin:$PATH" - LDFLAGS="-L/usr/local/opt/openssl/lib" - CPPFLAGS="-I/usr/local/opt/openssl/include" - PYENV_ROOT="${HOME}/.pyenv" - PATH="${PYENV_ROOT}/shims:${PATH}" + PATH="/usr/local/opt/openssl/bin:$PATH" LDFLAGS="-L/usr/local/opt/openssl/lib" CPPFLAGS="-I/usr/local/opt/openssl/include" PYENV_ROOT="${HOME}/.pyenv" PATH="${PYENV_ROOT}/shims:${PATH}" before_install: - curl -L https://github.com/stedolan/jq/releases/download/jq-1.5/jq-osx-amd64 -o /usr/local/bin/jq - chmod 755 /usr/local/bin/jq @@ -208,11 +203,7 @@ py36_osx_test_config: &py36_osx_test_config env: # Must duplicate py36_osx_config's env because it cannot be merged into a new anchor - &py36_osx_test_config_env > - PATH="/usr/local/opt/openssl/bin:$PATH" - LDFLAGS="-L/usr/local/opt/openssl/lib" - CPPFLAGS="-I/usr/local/opt/openssl/include" - PYENV_ROOT="${HOME}/.pyenv" - PATH="${PYENV_ROOT}/shims:${PATH}" + PATH="/usr/local/opt/openssl/bin:$PATH" LDFLAGS="-L/usr/local/opt/openssl/lib" CPPFLAGS="-I/usr/local/opt/openssl/include" PYENV_ROOT="${HOME}/.pyenv" PATH="${PYENV_ROOT}/shims:${PATH}" BOOTSTRAPPED_PEX_KEY_SUFFIX=py36.osx linux_with_fuse: &linux_with_fuse @@ -381,10 +372,20 @@ cargo_audit: &cargo_audit # Build wheels # ------------------------------------------------------------------------- +# N.B. With Python 2, we must build pantsbuild.pants with both UCS2 and UCS4 to provide full +# compatibility for end users. This is because we constrain our ABI due to the native engine. +# See https://www.python.org/dev/peps/pep-0513/#ucs-2-vs-ucs-4-builds. Note this distinction is +# not necessary with Python 3.3+ due to flexible storage of Unicode strings (https://www.python.org/dev/peps/pep-0393/). +# +# We treat both Linux UCS4 and OSX UCS2 normally, as these are the defaults for those environments. +# The Linux UCS2 and OSX UCS4 shards, however, must rebuild Python with +# `PYTHON_CONFIGURE_OPTS=--enable-unicode=ucs{2,4}` set, along with bootstrapping Pants again rather +# than pulling the PEX from AWS. + base_build_wheels: &base_build_wheels stage: *test env: - - &base_build_wheels_env RUN_PANTS_FROM_PEX=1 PREPARE_DEPLOY=1 + - &base_build_wheels_env PREPARE_DEPLOY=1 base_linux_build_wheels: &base_linux_build_wheels # Similar to the bootstrap shard, we build Linux wheels in a docker image to maximize compatibility. @@ -398,51 +399,102 @@ base_linux_build_wheels: &base_linux_build_wheels travis_ci:latest sh -c "RUN_PANTS_FROM_PEX=1 ./build-support/bin/release.sh ${RELEASE_ARGS} -n" -py27_linux_build_wheels: &py27_linux_build_wheels - <<: *py27_linux_test_config +py27_linux_build_wheels_ucs2: &py27_linux_build_wheels_ucs2 + <<: *py27_linux_config + <<: *base_linux_build_wheels + <<: *native_engine_cache_config + name: "Build wheels - Linux and cp27m (UCS2)" + env: + - *base_build_wheels_env + - CACHE_NAME=linuxwheelsbuild.ucs2 + script: + - docker build --rm -t travis_ci_py27_ucs2 + --build-arg "TRAVIS_USER=$(id -un)" + --build-arg "TRAVIS_UID=$(id -u)" + --build-arg "TRAVIS_GROUP=$(id -gn)" + --build-arg "TRAVIS_GID=$(id -g)" + build-support/docker/travis_ci_py27_ucs2/ + - &docker_dry_run_release docker run --rm -t + -v "${HOME}:/travis/home" + -v "${TRAVIS_BUILD_DIR}:/travis/workdir" + travis_ci_py27_ucs2:latest + sh -c "PEX_VERBOSE=9 ./build-support/bin/ci.sh -2b && RUN_PANTS_FROM_PEX=1 PEX_VERBOSE=9 ./build-support/bin/release.sh -n" + +py27_linux_build_wheels_ucs4: &py27_linux_build_wheels_ucs4 <<: *base_linux_build_wheels - name: "Build Linux wheels (Py2.7 PEX)" + <<: *py27_linux_test_config + # `py27_linux_test_config` overrides the stage set by `base_build_wheels`, so we re-override it. + stage: *test + name: "Build wheels - Linux and cp27mu (UCS4)" env: - *py27_linux_test_config_env - *base_build_wheels_env - RELEASE_ARGS='' - - CACHE_NAME=linuxwheelsbuild.py27 + - CACHE_NAME=linuxwheelsbuild.ucs4 py36_linux_build_wheels: &py36_linux_build_wheels - <<: *py36_linux_test_config <<: *base_linux_build_wheels - name: "Build Linux wheels (Py3.6 PEX)" + <<: *py36_linux_test_config + name: "Build wheels - Linux and abi3 (Py3.6+)" env: - *py36_linux_test_config_env - *base_build_wheels_env - RELEASE_ARGS='-3' - - CACHE_NAME=linuxwheelsbuild.py36 + - CACHE_NAME=linuxwheelsbuild.abi3 base_osx_build_wheels: &base_osx_build_wheels <<: *base_build_wheels osx_image: xcode8 script: - - ./build-support/bin/release.sh ${RELEASE_ARGS} -n + - RUN_PANTS_FROM_PEX=1 ./build-support/bin/release.sh ${RELEASE_ARGS} -n -py27_osx_build_wheels: &py27_osx_build_wheels +py27_osx_build_wheels_ucs2: &py27_osx_build_wheels_ucs2 <<: *py27_osx_test_config <<: *base_osx_build_wheels - name: "Build OSX wheels (Py2.7 PEX)" + name: "Build wheels - OSX and cp27m (UCS2)" env: - *py27_osx_test_config_env - *base_build_wheels_env - RELEASE_ARGS='' - - CACHE_NAME=osxwheelsbuild.py27 + - CACHE_NAME=osxwheelsbuild.ucs2 + +py27_osx_build_wheels_ucs4: &py27_osx_build_wheels_ucs4 + <<: *py27_osx_config + <<: *base_osx_build_wheels + <<: *native_engine_cache_config + name: "Build wheels - OSX and cp27mu (UCS4)" + addons: + brew: + packages: + - openssl + env: + - *base_build_wheels_env + - CACHE_NAME=osxwheelsbuild.ucs4 + - PATH="/usr/local/opt/openssl/bin:$PATH" LDFLAGS="-L/usr/local/opt/openssl/lib" CPPFLAGS="-I/usr/local/opt/openssl/include" PYENV_ROOT="${HOME}/.pyenv" PATH="${PYENV_ROOT}/shims:${PATH}" + + - PYTHON_CONFIGURE_OPTS=--enable-unicode=ucs4 + # We set $PY to ensure the UCS4 interpreter is used when bootstrapping the PEX. + - PY=${PYENV_ROOT}/shims/python2.7 + before_install: + - curl -L https://github.com/stedolan/jq/releases/download/jq-1.5/jq-osx-amd64 -o /usr/local/bin/jq + - chmod 755 /usr/local/bin/jq + - ./build-support/bin/install_aws_cli_for_ci.sh + - git clone https://github.com/pyenv/pyenv ${PYENV_ROOT} + - ${PYENV_ROOT}/bin/pyenv install 2.7.13 + - ${PYENV_ROOT}/bin/pyenv global 2.7.13 + script: + - PEX_VERBOSE=9 ./build-support/bin/ci.sh -2b + - RUN_PANTS_FROM_PEX=1 ./build-support/bin/release.sh -n py36_osx_build_wheels: &py36_osx_build_wheels <<: *py36_osx_test_config <<: *base_osx_build_wheels - name: "Build OSX wheels (Py3.6 PEX)" + name: "Build wheels - OSX and abi3 (Py3.6+)" env: - *py36_osx_test_config_env - *base_build_wheels_env - RELEASE_ARGS='-3' - - CACHE_NAME=osxwheelsbuild.py36 + - CACHE_NAME=osxwheelsbuild.abi3 # ------------------------------------------------------------------------- # Rust tests @@ -644,11 +696,6 @@ matrix: - <<: *linux_rust_clippy - <<: *cargo_audit - - <<: *py27_linux_build_wheels - - <<: *py36_linux_build_wheels - - <<: *py27_osx_build_wheels - - <<: *py36_osx_build_wheels - - <<: *py27_linux_test_config name: "Unit tests for pants and pants-plugins (Py2.7 PEX)" stage: *test @@ -666,6 +713,14 @@ matrix: script: - ./build-support/bin/travis-ci.sh -lp + - <<: *py27_linux_build_wheels_ucs2 + - <<: *py27_linux_build_wheels_ucs4 + - <<: *py36_linux_build_wheels + + - <<: *py27_osx_build_wheels_ucs2 + - <<: *py27_osx_build_wheels_ucs4 + - <<: *py36_osx_build_wheels + - <<: *py36_linux_test_config name: "Integration tests for pants - shard 0 (Py3.6 PEX)" env: diff --git a/build-support/bin/ci.sh b/build-support/bin/ci.sh index 7f294494f65..46dfc332d37 100755 --- a/build-support/bin/ci.sh +++ b/build-support/bin/ci.sh @@ -104,26 +104,31 @@ esac # We're running against a Pants clone. export PANTS_DEV=1 -# Note that we set PY, and when running with Python 3, also set PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS. -# This would usually not be necessary when developing locally, because the `./pants` and `./pants3` -# scripts set these constraints for us already. However, we must set the values here because in non-bootstrap shards -# we run CI using `./pants.pex` instead of the scripts `./pants` and `./pants3`, so those scripts cannot set -# the relevant environment variables. Without setting these environment variables, the Python 3 shards will try to -# execute subprocesses using Python 2, which results in the _Py_Dealloc error (#6985), and shards that do not -# pull down `./pants.pex` but still use a virtualenv (such as Rust Tests) will fail to execute. +# Determine the Python version to use for bootstrapping pants.pex. This would usually not be +# necessary to set when developing locally, because the `./pants` and `./pants3` scripts set +# these constraints for us already. However, we must set the values here because in +# non-bootstrap shards we run CI using `./pants.pex` instead of the scripts `./pants` +# and `./pants3`, so those scripts cannot set the relevant environment variables. if [[ "${python_two:-false}" == "false" ]]; then - py_version_number="3.6" + py_major_minor="3.6" bootstrap_pants_script="./pants3" - export PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS="['CPython==${py_version_number}.*']" else - py_version_number="2.7" + py_major_minor="2.7" bootstrap_pants_script="./pants" fi -export PY="python${py_version_number}" -banner "Using Python ${py_version_number} to execute spawned subprocesses (e.g. tests)" +export PY="${PY:-python${py_major_minor}}" + +# Also set PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS. We set this to the exact Python version +# to resolve any potential ambiguity when multiple Python interpreters are discoverable, such as +# Python 2.7.10 vs. 2.7.13. When running with Python 3, we must also set this constraint to ensure +# all spawned subprocesses use Python 3 rather than the default of Python 2. This is in part +# necessary to avoid the _Py_Dealloc error (#6985). +py_major_minor_patch=$(${PY} -c 'import sys; print(".".join(map(str, sys.version_info[0:3])))') +export PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS="${PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS:-['CPython==${py_major_minor_patch}']}" +banner "Setting interpreter constraints to ${PANTS_PYTHON_SETUP_INTERPRETER_CONSTRAINTS}" if [[ "${run_bootstrap:-false}" == "true" ]]; then - start_travis_section "Bootstrap" "Bootstrapping pants as a Python ${py_version_number} PEX" + start_travis_section "Bootstrap" "Bootstrapping pants as a Python ${py_major_minor_patch} PEX" ( if [[ "${run_bootstrap_clean:-false}" == "true" ]]; then ./build-support/python/clean.sh || die "Failed to clean before bootstrapping pants." diff --git a/build-support/bin/pre-commit.sh b/build-support/bin/pre-commit.sh index f6e0d075345..3981f3bbdb6 100755 --- a/build-support/bin/pre-commit.sh +++ b/build-support/bin/pre-commit.sh @@ -51,36 +51,36 @@ printf "%s\n" "${ADDED_FILES[@]}" \ echo "* Checking for banned imports" ./build-support/bin/check_banned_imports.sh -if git diff master --name-only | grep '\.rs$' > /dev/null; then - echo "* Checking formatting of rust files" && ./build-support/bin/check_rust_formatting.sh || exit 1 - # Clippy happens on a different shard because of separate caching concerns. - if [[ "${RUNNING_VIA_TRAVIS_CI_SCRIPT}" != "1" ]]; then - echo "* Running cargo clippy" && ./build-support/bin/check_clippy.sh || exit 1 - fi - echo "* Checking rust target headers" && build-support/bin/check_rust_target_headers.sh || exit 1 -fi - echo "* Checking for bad shell patterns" && ./build-support/bin/check_shell.sh || exit 1 -$(git rev-parse --verify master > /dev/null 2>&1) -if [[ $? -eq 0 ]]; then +# When travis builds a tag, it does so in a shallow clone without master fetched, which +# fails in pants changed. +if git rev-parse --verify "master" &>/dev/null; then echo "* Checking imports" && ./build-support/bin/isort.sh || \ die "To fix import sort order, run \`\"$(pwd)/build-support/bin/isort.sh\" -f\`" + # TODO(CMLivingston) Make lint use `-q` option again after addressing proper workunit labeling: # https://github.com/pantsbuild/pants/issues/6633 # TODO: add a test case for this while including a pexrc file, as python checkstyle currently fails # quite often with a pexrc available. echo "* Checking lint" && ./pants --exclude-target-regexp='testprojects/.*' --changed-parent=master lint || exit 1 + + if git diff master --name-only | grep '\.rs$' > /dev/null; then + echo "* Checking formatting of rust files" && ./build-support/bin/check_rust_formatting.sh || exit 1 + # Clippy happens on a different shard because of separate caching concerns. + if [[ "${RUNNING_VIA_TRAVIS_CI_SCRIPT}" != "1" ]]; then + echo "* Running cargo clippy" && ./build-support/bin/check_clippy.sh || exit 1 + fi + echo "* Checking rust target headers" && build-support/bin/check_rust_target_headers.sh || exit 1 + fi + + if git diff master --name-only | grep build-support/travis > /dev/null; then + echo "* Checking .travis.yml generation" && \ + actual_travis_yml=$(<.travis.yml) && \ + expected_travis_yml=$(./pants --quiet run build-support/travis:generate_travis_yml) && \ + [ "${expected_travis_yml}" == "${actual_travis_yml}" ] || \ + die "Travis config generator changed but .travis.yml file not regenerated. See top of that file for instructions." + fi else - # When travis builds a tag, it does so in a shallow clone without master fetched, which - # fails in pants changed. echo "* Skipping import/lint checks in partial working copy." fi - -if git diff master --name-only | grep build-support/travis > /dev/null; then - echo "* Checking .travis.yml generation" && \ - actual_travis_yml=$(<.travis.yml) && \ - expected_travis_yml=$(./pants --quiet run build-support/travis:generate_travis_yml) && \ - [ "${expected_travis_yml}" == "${actual_travis_yml}" ] || \ - die "Travis config generator changed but .travis.yml file not regenerated. See top of that file for instructions." -fi diff --git a/build-support/bin/release.sh b/build-support/bin/release.sh index ee8514ec292..cfbb9332c3e 100755 --- a/build-support/bin/release.sh +++ b/build-support/bin/release.sh @@ -114,7 +114,7 @@ function run_pex() { curl -sSL "${PEX_DOWNLOAD_PREFIX}/v${PEX_VERSION}/${PEX_PEX}" > "${pex}" chmod +x "${pex}" - "${pex}" "$@" + "${pex}" -vvvvvvvvv "$@" ) } diff --git a/build-support/docker/travis_ci_py27_ucs2/Dockerfile b/build-support/docker/travis_ci_py27_ucs2/Dockerfile new file mode 100644 index 00000000000..a3bd9df456b --- /dev/null +++ b/build-support/docker/travis_ci_py27_ucs2/Dockerfile @@ -0,0 +1,45 @@ +# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +# This file duplicates travis_ci/Dockerfile, except it installs Python 2.7 instead of Python 3.6 +# and adds the env var PYTHON_CONFIGURE_OPTS to install it with UCS2. + +# Use our custom Centos6 image for binary compatibility with old linux distros. +FROM pantsbuild/centos6:latest + +# Note we use 2.7.15, rather than 2.7.13, as the centos6 image already comes with 2.7.13 +# installed, which uses UCS4 instead of UCS2. This allows us to disambiguate which Python 2 +# interpreter to use when `ci.sh` sets the interpreter constraints for Pants and PEX. We +# set $PY to the exact Python 2.7 version we want to ensure the PEX is bootstrapped +# with UCS 2. +ARG PYTHON_2_VERSION=2.7.15 +RUN yum install sqlite-devel -y +ENV PYENV_ROOT /pyenv-docker-build +RUN mkdir ${PYENV_ROOT} +RUN git clone https://github.com/pyenv/pyenv ${PYENV_ROOT} +ENV PYTHON_CONFIGURE_OPTS --enable-unicode=ucs2 +RUN /usr/bin/scl enable devtoolset-7 -- bash -c '\ + ${PYENV_ROOT}/bin/pyenv install ${PYTHON_2_VERSION} \ + && ${PYENV_ROOT}/bin/pyenv global ${PYTHON_2_VERSION}' +ENV PATH "${PYENV_ROOT}/shims:${PATH}" +ENV PY "${PYENV_ROOT}/shims/python2.7" +ENV PEX_PYTHON "${PYENV_ROOT}/shims/python2.7" + +# Setup mount points for the travis ci user & workdir. +VOLUME /travis/home +VOLUME /travis/workdir + +# Setup a non-root user to execute the build under (avoids problems with npm install). +ARG TRAVIS_USER=travis_ci +ARG TRAVIS_UID=1000 +ARG TRAVIS_GROUP=root +ARG TRAVIS_GID=0 + +RUN groupadd --gid ${TRAVIS_GID} ${TRAVIS_GROUP} || true +RUN useradd -d /travis/home -g ${TRAVIS_GROUP} --uid ${TRAVIS_UID} ${TRAVIS_USER} +USER ${TRAVIS_USER}:${TRAVIS_GROUP} + +# Our newly created user is unlikely to have a sane environment: set a locale at least. +ENV LC_ALL="en_US.UTF-8" + +WORKDIR /travis/workdir diff --git a/build-support/travis/env_osx_with_pyenv.mustache b/build-support/travis/env_osx_with_pyenv.mustache new file mode 100644 index 00000000000..39d3ef68fe9 --- /dev/null +++ b/build-support/travis/env_osx_with_pyenv.mustache @@ -0,0 +1 @@ +PATH="/usr/local/opt/openssl/bin:$PATH" LDFLAGS="-L/usr/local/opt/openssl/lib" CPPFLAGS="-I/usr/local/opt/openssl/include" PYENV_ROOT="${HOME}/.pyenv" PATH="${PYENV_ROOT}/shims:${PATH}" diff --git a/build-support/travis/generate_travis_yml.py b/build-support/travis/generate_travis_yml.py index 0977b2e60b7..64568642fe0 100644 --- a/build-support/travis/generate_travis_yml.py +++ b/build-support/travis/generate_travis_yml.py @@ -25,12 +25,14 @@ def generate_travis_yml(): """Generates content for a .travis.yml file from templates.""" - template = pkg_resources.resource_string( - __name__, 'travis.yml.mustache').decode('utf-8') - before_install_linux = pkg_resources.resource_string( - __name__, 'before_install_linux.mustache').decode('utf-8') - before_install_osx = pkg_resources.resource_string( - __name__, 'before_install_osx.mustache').decode('utf-8') + def get_mustache_file(file_name): + return pkg_resources.resource_string(__name__, file_name).decode('utf-8') + + template = get_mustache_file('travis.yml.mustache') + before_install_linux = get_mustache_file('before_install_linux.mustache') + before_install_osx = get_mustache_file('before_install_osx.mustache') + env_osx_with_pyenv = get_mustache_file('env_osx_with_pyenv.mustache') + context = { 'header': HEADER, 'py3_integration_shards': range(0, num_py3_integration_shards), @@ -42,6 +44,7 @@ def generate_travis_yml(): } renderer = pystache.Renderer(partials={ 'before_install_linux': before_install_linux, - 'before_install_osx': before_install_osx + 'before_install_osx': before_install_osx, + 'env_osx_with_pyenv': env_osx_with_pyenv }) print(renderer.render(template, context)) diff --git a/build-support/travis/travis.yml.mustache b/build-support/travis/travis.yml.mustache index 7034d8902bd..31fcdb138bd 100644 --- a/build-support/travis/travis.yml.mustache +++ b/build-support/travis/travis.yml.mustache @@ -156,13 +156,8 @@ py36_osx_config: &py36_osx_config packages: &py36_osx_config_brew_packages - openssl env: - # Fix Python 3 issue linking to OpenSSL - &py36_osx_config_env > - PATH="/usr/local/opt/openssl/bin:$PATH" - LDFLAGS="-L/usr/local/opt/openssl/lib" - CPPFLAGS="-I/usr/local/opt/openssl/include" - PYENV_ROOT="${HOME}/.pyenv" - PATH="${PYENV_ROOT}/shims:${PATH}" + {{>env_osx_with_pyenv}} before_install: {{>before_install_osx}} # Clone pyenv directly from GitHub. For multiple osx images, brew's version of pyenv is too old to get @@ -192,11 +187,7 @@ py36_osx_test_config: &py36_osx_test_config env: # Must duplicate py36_osx_config's env because it cannot be merged into a new anchor - &py36_osx_test_config_env > - PATH="/usr/local/opt/openssl/bin:$PATH" - LDFLAGS="-L/usr/local/opt/openssl/lib" - CPPFLAGS="-I/usr/local/opt/openssl/include" - PYENV_ROOT="${HOME}/.pyenv" - PATH="${PYENV_ROOT}/shims:${PATH}" + {{>env_osx_with_pyenv}} BOOTSTRAPPED_PEX_KEY_SUFFIX=py36.osx linux_with_fuse: &linux_with_fuse @@ -360,10 +351,20 @@ cargo_audit: &cargo_audit # Build wheels # ------------------------------------------------------------------------- +# N.B. With Python 2, we must build pantsbuild.pants with both UCS2 and UCS4 to provide full +# compatibility for end users. This is because we constrain our ABI due to the native engine. +# See https://www.python.org/dev/peps/pep-0513/#ucs-2-vs-ucs-4-builds. Note this distinction is +# not necessary with Python 3.3+ due to flexible storage of Unicode strings (https://www.python.org/dev/peps/pep-0393/). +# +# We treat both Linux UCS4 and OSX UCS2 normally, as these are the defaults for those environments. +# The Linux UCS2 and OSX UCS4 shards, however, must rebuild Python with +# `PYTHON_CONFIGURE_OPTS=--enable-unicode=ucs{2,4}` set, along with bootstrapping Pants again rather +# than pulling the PEX from AWS. + base_build_wheels: &base_build_wheels stage: *test env: - - &base_build_wheels_env RUN_PANTS_FROM_PEX=1 PREPARE_DEPLOY=1 + - &base_build_wheels_env PREPARE_DEPLOY=1 base_linux_build_wheels: &base_linux_build_wheels # Similar to the bootstrap shard, we build Linux wheels in a docker image to maximize compatibility. @@ -377,51 +378,99 @@ base_linux_build_wheels: &base_linux_build_wheels travis_ci:latest sh -c "RUN_PANTS_FROM_PEX=1 ./build-support/bin/release.sh ${RELEASE_ARGS} -n" -py27_linux_build_wheels: &py27_linux_build_wheels - <<: *py27_linux_test_config +py27_linux_build_wheels_ucs2: &py27_linux_build_wheels_ucs2 + <<: *py27_linux_config + <<: *base_linux_build_wheels + <<: *native_engine_cache_config + name: "Build wheels - Linux and cp27m (UCS2)" + env: + - *base_build_wheels_env + - CACHE_NAME=linuxwheelsbuild.ucs2 + script: + - docker build --rm -t travis_ci_py27_ucs2 + --build-arg "TRAVIS_USER=$(id -un)" + --build-arg "TRAVIS_UID=$(id -u)" + --build-arg "TRAVIS_GROUP=$(id -gn)" + --build-arg "TRAVIS_GID=$(id -g)" + build-support/docker/travis_ci_py27_ucs2/ + - &docker_dry_run_release docker run --rm -t + -v "${HOME}:/travis/home" + -v "${TRAVIS_BUILD_DIR}:/travis/workdir" + travis_ci_py27_ucs2:latest + sh -c "PEX_VERBOSE=9 ./build-support/bin/ci.sh -2b && RUN_PANTS_FROM_PEX=1 PEX_VERBOSE=9 ./build-support/bin/release.sh -n" + +py27_linux_build_wheels_ucs4: &py27_linux_build_wheels_ucs4 <<: *base_linux_build_wheels - name: "Build Linux wheels (Py2.7 PEX)" + <<: *py27_linux_test_config + # `py27_linux_test_config` overrides the stage set by `base_build_wheels`, so we re-override it. + stage: *test + name: "Build wheels - Linux and cp27mu (UCS4)" env: - *py27_linux_test_config_env - *base_build_wheels_env - RELEASE_ARGS='' - - CACHE_NAME=linuxwheelsbuild.py27 + - CACHE_NAME=linuxwheelsbuild.ucs4 py36_linux_build_wheels: &py36_linux_build_wheels - <<: *py36_linux_test_config <<: *base_linux_build_wheels - name: "Build Linux wheels (Py3.6 PEX)" + <<: *py36_linux_test_config + name: "Build wheels - Linux and abi3 (Py3.6+)" env: - *py36_linux_test_config_env - *base_build_wheels_env - RELEASE_ARGS='-3' - - CACHE_NAME=linuxwheelsbuild.py36 + - CACHE_NAME=linuxwheelsbuild.abi3 base_osx_build_wheels: &base_osx_build_wheels <<: *base_build_wheels osx_image: xcode8 script: - - ./build-support/bin/release.sh ${RELEASE_ARGS} -n + - RUN_PANTS_FROM_PEX=1 ./build-support/bin/release.sh ${RELEASE_ARGS} -n -py27_osx_build_wheels: &py27_osx_build_wheels +py27_osx_build_wheels_ucs2: &py27_osx_build_wheels_ucs2 <<: *py27_osx_test_config <<: *base_osx_build_wheels - name: "Build OSX wheels (Py2.7 PEX)" + name: "Build wheels - OSX and cp27m (UCS2)" env: - *py27_osx_test_config_env - *base_build_wheels_env - RELEASE_ARGS='' - - CACHE_NAME=osxwheelsbuild.py27 + - CACHE_NAME=osxwheelsbuild.ucs2 + +py27_osx_build_wheels_ucs4: &py27_osx_build_wheels_ucs4 + <<: *py27_osx_config + <<: *base_osx_build_wheels + <<: *native_engine_cache_config + name: "Build wheels - OSX and cp27mu (UCS4)" + addons: + brew: + packages: + - openssl + env: + - *base_build_wheels_env + - CACHE_NAME=osxwheelsbuild.ucs4 + - {{>env_osx_with_pyenv}} + - PYTHON_CONFIGURE_OPTS=--enable-unicode=ucs4 + # We set $PY to ensure the UCS4 interpreter is used when bootstrapping the PEX. + - PY=${PYENV_ROOT}/shims/python2.7 + before_install: + {{>before_install_osx}} + - git clone https://github.com/pyenv/pyenv ${PYENV_ROOT} + - ${PYENV_ROOT}/bin/pyenv install 2.7.13 + - ${PYENV_ROOT}/bin/pyenv global 2.7.13 + script: + - PEX_VERBOSE=9 ./build-support/bin/ci.sh -2b + - RUN_PANTS_FROM_PEX=1 ./build-support/bin/release.sh -n py36_osx_build_wheels: &py36_osx_build_wheels <<: *py36_osx_test_config <<: *base_osx_build_wheels - name: "Build OSX wheels (Py3.6 PEX)" + name: "Build wheels - OSX and abi3 (Py3.6+)" env: - *py36_osx_test_config_env - *base_build_wheels_env - RELEASE_ARGS='-3' - - CACHE_NAME=osxwheelsbuild.py36 + - CACHE_NAME=osxwheelsbuild.abi3 # ------------------------------------------------------------------------- # Rust tests @@ -623,11 +672,6 @@ matrix: - <<: *linux_rust_clippy - <<: *cargo_audit - - <<: *py27_linux_build_wheels - - <<: *py36_linux_build_wheels - - <<: *py27_osx_build_wheels - - <<: *py36_osx_build_wheels - - <<: *py27_linux_test_config name: "Unit tests for pants and pants-plugins (Py2.7 PEX)" stage: *test @@ -645,6 +689,14 @@ matrix: script: - ./build-support/bin/travis-ci.sh -lp + - <<: *py27_linux_build_wheels_ucs2 + - <<: *py27_linux_build_wheels_ucs4 + - <<: *py36_linux_build_wheels + + - <<: *py27_osx_build_wheels_ucs2 + - <<: *py27_osx_build_wheels_ucs4 + - <<: *py36_osx_build_wheels + {{#py3_integration_shards}} - <<: *py36_linux_test_config name: "Integration tests for pants - shard {{.}} (Py3.6 PEX)" diff --git a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/scrooge_gen.py b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/scrooge_gen.py index 7f7841c4455..d81b9512c7a 100644 --- a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/scrooge_gen.py +++ b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/scrooge_gen.py @@ -24,7 +24,7 @@ from pants.contrib.scrooge.tasks.java_thrift_library_fingerprint_strategy import \ JavaThriftLibraryFingerprintStrategy -from pants.contrib.scrooge.tasks.thrift_util import calculate_compile_sources +from pants.contrib.scrooge.tasks.thrift_util import calculate_include_paths class ScroogeGen(SimpleCodegenTask, NailgunTask): @@ -148,7 +148,7 @@ def execute_codegen(self, target, target_workdir): self.gen(partial_cmd, target, target_workdir) def gen(self, partial_cmd, target, target_workdir): - import_paths, _ = calculate_compile_sources([target], self.is_gentarget) + import_paths = calculate_include_paths([target], self.is_gentarget) args = list(partial_cmd.compiler_args) diff --git a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_linter.py b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_linter.py index 33733b2f89a..ab32008fd11 100644 --- a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_linter.py +++ b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_linter.py @@ -15,7 +15,7 @@ from pants.option.ranked_value import RankedValue from pants.task.lint_task_mixin import LintTaskMixin -from pants.contrib.scrooge.tasks.thrift_util import calculate_compile_sources +from pants.contrib.scrooge.tasks.thrift_util import calculate_include_paths class ThriftLintError(Exception): @@ -87,14 +87,14 @@ def _lint(self, target, classpath): if not self._is_strict(target): config_args.append('--ignore-errors') - include_paths , paths = calculate_compile_sources([target], self._is_thrift) + paths = list(target.sources_relative_to_buildroot()) + include_paths = calculate_include_paths([target], self._is_thrift) if target.include_paths: include_paths |= set(target.include_paths) for p in include_paths: config_args.extend(['--include-path', p]) - args = config_args + list(paths) - + args = config_args + paths # If runjava returns non-zero, this marks the workunit as a # FAILURE, and there is no way to wrap this here. diff --git a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_util.py b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_util.py index 436513a07ec..6dc89cf4d4e 100644 --- a/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_util.py +++ b/contrib/scrooge/src/python/pants/contrib/scrooge/tasks/thrift_util.py @@ -57,22 +57,19 @@ def find_root_thrifts(basedirs, sources, log=None): return root_sources -def calculate_compile_sources(targets, is_thrift_target): - """Calculates the set of thrift source files that need to be compiled. - It does not exclude sources that are included in other sources. - - A tuple of (include basedirs, thrift sources) is returned. +def calculate_include_paths(targets, is_thrift_target): + """Calculates the set of import paths for the given targets. :targets: The targets to examine. :is_thrift_target: A predicate to pick out thrift targets for consideration in the analysis. + + :returns: Include basedirs for the target. """ basedirs = set() - sources = set() - def collect_sources(target): + def collect_paths(target): basedirs.add(target.target_base) - sources.update(target.sources_relative_to_buildroot()) for target in targets: - target.walk(collect_sources, predicate=is_thrift_target) - return basedirs, sources + target.walk(collect_paths, predicate=is_thrift_target) + return basedirs diff --git a/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_thrift_linter.py b/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_thrift_linter.py index c40c8bc0ebf..1bf4be95537 100644 --- a/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_thrift_linter.py +++ b/contrib/scrooge/tests/python/pants_test/contrib/scrooge/tasks/test_thrift_linter.py @@ -31,25 +31,47 @@ def alias_groups(cls): def task_type(cls): return ThriftLinter - @patch('pants.contrib.scrooge.tasks.thrift_linter.calculate_compile_sources') - def test_lint(self, mock_calculate_compile_sources): + @patch('pants.contrib.scrooge.tasks.thrift_linter.calculate_include_paths') + def test_lint(self, mock_calculate_include_paths): def get_default_jvm_options(): return self.task_type().get_jvm_options_default(self.context().options.for_global_scope()) - thrift_target = self.create_library('a', 'java_thrift_library', 'a', ['A.thrift']) + thrift_target = self.create_library('src/thrift/tweet', 'java_thrift_library', 'a', ['A.thrift']) task = self.create_task(self.context(target_roots=thrift_target)) self._prepare_mocks(task) expected_include_paths = ['src/thrift/users', 'src/thrift/tweet'] - expected_paths = ['src/thrift/tweet/a.thrift', 'src/thrift/tweet/b.thrift'] - mock_calculate_compile_sources.return_value = (expected_include_paths, expected_paths) + mock_calculate_include_paths.return_value = expected_include_paths task._lint(thrift_target, task.tool_classpath('scrooge-linter')) self._run_java_mock.assert_called_once_with( classpath='foo_classpath', main='com.twitter.scrooge.linter.Main', args=['--fatal-warnings', '--ignore-errors', '--include-path', 'src/thrift/users', - '--include-path', 'src/thrift/tweet', 'src/thrift/tweet/a.thrift', - 'src/thrift/tweet/b.thrift'], + '--include-path', 'src/thrift/tweet', 'src/thrift/tweet/A.thrift'], + jvm_options=get_default_jvm_options(), + workunit_labels=[WorkUnitLabel.COMPILER, WorkUnitLabel.SUPPRESS_LABEL]) + + @patch('pants.contrib.scrooge.tasks.thrift_linter.calculate_include_paths') + def test_lint_direct_only(self, mock_calculate_include_paths): + # Validate that we do lint only the direct sources of a target, rather than including the + # sources of its transitive deps. + + def get_default_jvm_options(): + return self.task_type().get_jvm_options_default(self.context().options.for_global_scope()) + + self.create_library('src/thrift/tweet', 'java_thrift_library', 'a', ['A.thrift']) + target_b = self.create_library('src/thrift/tweet', 'java_thrift_library', 'b', ['B.thrift'], dependencies=[':a']) + task = self.create_task(self.context(target_roots=target_b)) + self._prepare_mocks(task) + mock_calculate_include_paths.return_value = ['src/thrift/tweet'] + task._lint(target_b, task.tool_classpath('scrooge-linter')) + + # Confirm that we did not include the sources of the dependency. + self._run_java_mock.assert_called_once_with( + classpath='foo_classpath', + main='com.twitter.scrooge.linter.Main', + args=['--fatal-warnings', '--ignore-errors', + '--include-path', 'src/thrift/tweet', 'src/thrift/tweet/B.thrift'], jvm_options=get_default_jvm_options(), workunit_labels=[WorkUnitLabel.COMPILER, WorkUnitLabel.SUPPRESS_LABEL]) diff --git a/examples/src/python/example/3rdparty_py.md b/examples/src/python/example/3rdparty_py.md index 64d8c328dfa..0b200bcc374 100644 --- a/examples/src/python/example/3rdparty_py.md +++ b/examples/src/python/example/3rdparty_py.md @@ -80,6 +80,9 @@ with which your binary is intended to be compatible in the `platforms` field of wheel files for each package and platform available at build time. +Pants will use the explicitly specified `platforms` field of your `python_binary` +target if set for both itself and its dependencies, or will otherwise fall back to the `python-setup.platforms` option value. + Pants will look for those files in the location specified in the [[`python-repos`|pants('src/docs:setup_repo')#redirecting-python-requirements-to-other-servers]] field in pants.ini. It can understand either a simple local directory of .whl files or a "find links"-friendly diff --git a/examples/src/wire/org/pantsbuild/example/element/BUILD b/examples/src/wire/org/pantsbuild/example/element/BUILD index 916a7f943a1..0afde70a649 100644 --- a/examples/src/wire/org/pantsbuild/example/element/BUILD +++ b/examples/src/wire/org/pantsbuild/example/element/BUILD @@ -3,10 +3,12 @@ java_wire_library( sources=[ - 'elements.proto', # Order matters here. + # NB: Order matters for these two paths, so we set `ordered_sources=True` below. + 'elements.proto', 'compound.proto', ], dependencies=[ 'examples/src/wire/org/pantsbuild/example/temperature', ], + ordered_sources=True, ) diff --git a/src/docs/common_tasks/BUILD b/src/docs/common_tasks/BUILD index d799aed4088..4c9bc4e558e 100644 --- a/src/docs/common_tasks/BUILD +++ b/src/docs/common_tasks/BUILD @@ -132,3 +132,9 @@ page( name='thrift_gen', source='thrift_gen.md', ) + + +page( + name='python_proto_gen', + source='python_proto_gen.md', +) diff --git a/src/docs/common_tasks/python_proto_gen.md b/src/docs/common_tasks/python_proto_gen.md new file mode 100644 index 00000000000..124a94e6eea --- /dev/null +++ b/src/docs/common_tasks/python_proto_gen.md @@ -0,0 +1,50 @@ +# Python gRPC + protobufs + +## Problem +You have `.proto` files defining protobufs and grpc services and you want Pants to generate Python code from them that you can use from a Python application. + +## Solution +Create `python_grpc_library` targets and use the gen goal to generate code from the `.proto` files. There is a codegen task grpcio-run, that uses Python's grpcio library https://grpc.io/ and generates python code from .proto files. + +## Usage + +in a `BUILD` file near your proto files, create a `python_grpcio_library` target with your protos as a `sources`. + +```build +python_grpcio_library( + sources=['service.proto'], + dependencies=[ + '3rdparty/python:protobuf', + ] +) +``` + +Then, you can add a dependency on this target in your python binary's `BUILD` file `dependencies` section: + +```build +python_binary( + source='server.py', + dependencies=[ +# [...] + 'examples/src/protobuf/org/pantsbuild/example/grpcio/service' + ], +) +``` + +## Example: +An example Python grpc client/server can be found in [/examples/src/python/example/grpcio](https://github.com/pantsbuild/pants/tree/master/examples/src/python/example/grpcio) + +to create a gRPC server execute +```bash +./pants run examples/src/python/example/grpcio/server +``` + +and when server is running, run client example: +```bash +./pants run examples/src/python/example/grpcio/client +``` + +generated code can be found as usual in pants output directory: +```bash +./pants.d/gen/grpcio-run/current/examples.src.protobuf.org.pantsbuild.example.service.service/current/org/pantsbuild/example/service +``` diff --git a/src/docs/docsite.json b/src/docs/docsite.json index d36c5bd33f3..bfbbc705ed8 100644 --- a/src/docs/docsite.json +++ b/src/docs/docsite.json @@ -96,6 +96,7 @@ "test_suite": "dist/markdown/html/src/docs/common_tasks/test_suite.html", "thrift_deps": "dist/markdown/html/examples/src/thrift/org/pantsbuild/example/README.html", "thrift_gen": "dist/markdown/html/src/docs/common_tasks/thrift_gen.html", + "grpcio_gen": "dist/markdown/html/src/docs/common_tasks/python_proto_gen.html", "tshoot": "dist/markdown/html/src/docs/tshoot.html", "why_use_pants": "dist/markdown/html/src/docs/why_use_pants.html" }, @@ -173,6 +174,7 @@ {"heading": "Code & Doc Generation"}, {"pages" : [ "thrift_deps", + "grpcio_gen", "page" ] }, diff --git a/src/python/pants/BUILD b/src/python/pants/BUILD index f35f51e30b7..e79b46788d2 100644 --- a/src/python/pants/BUILD +++ b/src/python/pants/BUILD @@ -10,7 +10,7 @@ target( python_library( name='pants-packaged', - sources=[], + sources=['dummy.c'], dependencies=[ ':version', ], @@ -18,7 +18,7 @@ python_library( name='pantsbuild.pants', description='A scalable build tool for large, complex, heterogeneous repos.', namespace_packages=['pants', 'pants.backend'], - ext_modules=[('native_engine', {'sources': []})], + ext_modules=[('native_engine', {'sources': ['src/pants/dummy.c']})], ).with_binaries( pants='src/python/pants/bin:pants', ) diff --git a/src/python/pants/VERSION b/src/python/pants/VERSION index 2df5c067b0c..96cdf36dad0 100644 --- a/src/python/pants/VERSION +++ b/src/python/pants/VERSION @@ -1 +1 @@ -1.14.0rc0 +1.15.0.dev1 diff --git a/src/python/pants/backend/codegen/wire/java/java_wire_library.py b/src/python/pants/backend/codegen/wire/java/java_wire_library.py index d5cbd9b3fbe..93391af49c4 100644 --- a/src/python/pants/backend/codegen/wire/java/java_wire_library.py +++ b/src/python/pants/backend/codegen/wire/java/java_wire_library.py @@ -32,6 +32,7 @@ def __init__(self, registry_class=None, enum_options=None, no_options=None, + ordered_sources=None, **kwargs): """ :param string service_writer: the name of the class to pass as the --service_writer option to @@ -43,6 +44,9 @@ def __init__(self, doubt, specify com.squareup.wire.SimpleServiceWriter :param list enum_options: list of enums to pass to as the --enum-enum_options option, # optional :param boolean no_options: boolean that determines if --no_options flag is passed + :param boolean ordered_sources: boolean that declares whether the sources argument represents + literal ordered sources to be passed directly to the compiler. If false, no ordering is + guaranteed for the sources passed to an individual compiler invoke. """ if not service_writer and service_writer_options: @@ -59,6 +63,7 @@ def __init__(self, 'registry_class': PrimitiveField(registry_class or None), 'enum_options': PrimitiveField(enum_options or []), 'no_options': PrimitiveField(no_options or False), + 'ordered_sources': PrimitiveField(ordered_sources or False), }) super(JavaWireLibrary, self).__init__(payload=payload, **kwargs) diff --git a/src/python/pants/backend/codegen/wire/java/wire_gen.py b/src/python/pants/backend/codegen/wire/java/wire_gen.py index 84b378d9136..acfb8d58f11 100644 --- a/src/python/pants/backend/codegen/wire/java/wire_gen.py +++ b/src/python/pants/backend/codegen/wire/java/wire_gen.py @@ -13,10 +13,12 @@ from pants.backend.jvm.targets.java_library import JavaLibrary from pants.backend.jvm.tasks.nailgun_task import NailgunTaskBase from pants.base.build_environment import get_buildroot -from pants.base.exceptions import TaskError +from pants.base.exceptions import TargetDefinitionException, TaskError from pants.base.workunit import WorkUnitLabel from pants.java.jar.jar_dependency import JarDependency +from pants.source.filespec import globs_matches from pants.task.simple_codegen_task import SimpleCodegenTask +from pants.util.dirutil import fast_relpath logger = logging.getLogger(__name__) @@ -61,24 +63,47 @@ def synthetic_target_extra_dependencies(self, target, target_workdir): wire_runtime_deps_spec = self.get_options().javadeps return self.resolve_deps([wire_runtime_deps_spec]) - def format_args_for_target(self, target, target_workdir): - """Calculate the arguments to pass to the command line for a single target.""" - sources = OrderedSet(target.sources_relative_to_buildroot()) - + def _compute_sources(self, target): relative_sources = OrderedSet() - source_roots = set() - for source in sources: + source_roots = OrderedSet() + + def capture_and_relativize_to_source_root(source): source_root = self.context.source_roots.find_by_path(source) if not source_root: source_root = self.context.source_roots.find(target) source_roots.add(source_root.path) - relative_source = os.path.relpath(source, source_root.path) - relative_sources.add(relative_source) + return fast_relpath(source, source_root.path) + + if target.payload.get_field_value('ordered_sources'): + # Re-match the filespecs against the sources in order to apply them in the literal order + # they were specified in. + filespec = target.globs_relative_to_buildroot() + excludes = filespec.get('excludes', []) + for filespec in filespec.get('globs', []): + sources = [s for s in target.sources_relative_to_buildroot() + if globs_matches([s], [filespec], excludes)] + if len(sources) != 1: + raise TargetDefinitionException( + target, + 'With `ordered_sources=True`, expected one match for each file literal, ' + 'but got: {} for literal `{}`.'.format(sources, filespec) + ) + relative_sources.add(capture_and_relativize_to_source_root(sources[0])) + else: + # Otherwise, use the default (unspecified) snapshot ordering. + for source in target.sources_relative_to_buildroot(): + relative_sources.add(capture_and_relativize_to_source_root(source)) + return relative_sources, source_roots + + def format_args_for_target(self, target, target_workdir): + """Calculate the arguments to pass to the command line for a single target.""" args = ['--java_out={0}'.format(target_workdir)] # Add all params in payload to args + relative_sources, source_roots = self._compute_sources(target) + if target.payload.get_field_value('no_options'): args.append('--no_options') diff --git a/src/python/pants/backend/graph_info/tasks/cloc.py b/src/python/pants/backend/graph_info/tasks/cloc.py index 6019ef33792..e1903d9dfd5 100644 --- a/src/python/pants/backend/graph_info/tasks/cloc.py +++ b/src/python/pants/backend/graph_info/tasks/cloc.py @@ -40,7 +40,7 @@ def console_output(self, targets): input_snapshots = tuple( target.sources_snapshot(scheduler=self.context._scheduler) for target in targets ) - input_files = {f.path for snapshot in input_snapshots for f in snapshot.files} + input_files = {f for snapshot in input_snapshots for f in snapshot.files} # TODO: Work out a nice library-like utility for writing an argfile, as this will be common. with temporary_dir() as tmpdir: diff --git a/src/python/pants/backend/jvm/tasks/coursier/coursier_subsystem.py b/src/python/pants/backend/jvm/tasks/coursier/coursier_subsystem.py index fd6ffc2fb3d..a0f92ba78bb 100644 --- a/src/python/pants/backend/jvm/tasks/coursier/coursier_subsystem.py +++ b/src/python/pants/backend/jvm/tasks/coursier/coursier_subsystem.py @@ -77,9 +77,8 @@ def bootstrap_coursier(self, workunit_factory): bootstrap_jar_path = os.path.join(coursier_bootstrap_dir, 'coursier.jar') - with workunit_factory(name='bootstrap-coursier', labels=[WorkUnitLabel.TOOL]) as workunit: - - if not os.path.exists(bootstrap_jar_path): + if not os.path.exists(bootstrap_jar_path): + with workunit_factory(name='bootstrap-coursier', labels=[WorkUnitLabel.TOOL]) as workunit: with safe_concurrent_creation(bootstrap_jar_path) as temp_path: fetcher = Fetcher(get_buildroot()) checksummer = fetcher.ChecksumListener(digest=hashlib.sha1()) @@ -98,4 +97,4 @@ def bootstrap_coursier(self, workunit_factory): else: workunit.set_outcome(WorkUnit.SUCCESS) - return bootstrap_jar_path + return bootstrap_jar_path diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/javac/javac_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/javac/javac_compile.py index f9f783947ab..dee55bc492f 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/javac/javac_compile.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/javac/javac_compile.py @@ -209,8 +209,8 @@ def _execute_hermetic_compile(self, cmd, ctx): # Assume no extra .class files to grab. We'll fix up that case soon. # Drop the source_root from the file path. # Assumes `-d .` has been put in the command. - os.path.relpath(f.path.replace('.java', '.class'), ctx.target.target_base) - for f in input_snapshot.files if f.path.endswith('.java') + os.path.relpath(f.replace('.java', '.class'), ctx.target.target_base) + for f in input_snapshot.files if f.endswith('.java') ) exec_process_request = ExecuteProcessRequest( argv=tuple(cmd), diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py index 0d0732f59dc..6eada1e9327 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py @@ -401,7 +401,7 @@ def do_compile(self, invalidation_check, compile_contexts, classpath_product): valid_targets = [vt.target for vt in invalidation_check.all_vts if vt.valid] if self.execution_strategy == self.HERMETIC: - self._set_direcotry_digests_for_valid_target_classpath_directories(valid_targets, compile_contexts) + self._set_directory_digests_for_valid_target_classpath_directories(valid_targets, compile_contexts) for valid_target in valid_targets: cc = self.select_runtime_context(compile_contexts[valid_target]) @@ -451,12 +451,12 @@ def _record_compile_classpath(self, classpath, target, outdir): with open(path, 'w') as f: f.write(text) - def _set_direcotry_digests_for_valid_target_classpath_directories(self, valid_targets, compile_contexts): + def _set_directory_digests_for_valid_target_classpath_directories(self, valid_targets, compile_contexts): snapshots = self.context._scheduler.capture_snapshots( tuple(PathGlobsAndRoot(PathGlobs( [self._get_relative_classes_dir_from_target(target, compile_contexts)] ), get_buildroot()) for target in valid_targets)) - [self._set_direcotry_digest_for_compile_context( + [self._set_directory_digest_for_compile_context( snapshot.directory_digest, target, compile_contexts) for target, snapshot in list(zip(valid_targets, snapshots))] @@ -464,7 +464,7 @@ def _get_relative_classes_dir_from_target(self, target, compile_contexts): cc = self.select_runtime_context(compile_contexts[target]) return fast_relpath(cc.classes_dir.path, get_buildroot()) + '/**' - def _set_direcotry_digest_for_compile_context(self, directory_digest, target, compile_contexts): + def _set_directory_digest_for_compile_context(self, directory_digest, target, compile_contexts): cc = self.select_runtime_context(compile_contexts[target]) new_classpath_entry = ClasspathEntry(cc.classes_dir.path, directory_digest) cc.classes_dir = new_classpath_entry diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py index eec90ae5ecb..4266f4f5e02 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/rsc/rsc_compile.py @@ -33,8 +33,7 @@ from pants.java.jar.jar_dependency import JarDependency from pants.reporting.reporting_utils import items_to_report_element from pants.util.contextutil import Timer -from pants.util.dirutil import (fast_relpath, fast_relpath_optional, maybe_read_file, - safe_file_dump, safe_mkdir) +from pants.util.dirutil import fast_relpath, fast_relpath_optional, safe_mkdir from pants.util.memo import memoized_property @@ -60,20 +59,6 @@ def stdout_contents(wu): return f.read().rstrip() -def dump_digest(output_dir, digest): - safe_file_dump('{}.digest'.format(output_dir), - '{}:{}'.format(digest.fingerprint, digest.serialized_bytes_length), mode='w') - - -def load_digest(output_dir): - read_file = maybe_read_file('{}.digest'.format(output_dir), binary_mode=False) - if read_file: - fingerprint, length = read_file.split(':') - return Digest(fingerprint, int(length)) - else: - return None - - def _create_desandboxify_fn(possible_path_patterns): # Takes a collection of possible canonical prefixes, and returns a function that # if it finds a matching prefix, strips the path prior to the prefix and returns it @@ -130,7 +115,7 @@ def __init__(self, *args, **kwargs): @classmethod def implementation_version(cls): - return super(RscCompile, cls).implementation_version() + [('RscCompile', 171)] + return super(RscCompile, cls).implementation_version() + [('RscCompile', 172)] @classmethod def register_options(cls, register): @@ -199,11 +184,11 @@ def _nailgunnable_combined_classpath(self): # Overrides the normal zinc compiler classpath, which only contains zinc. def get_zinc_compiler_classpath(self): - return self.do_for_execution_strategy_variant({ + return self.execution_strategy_enum.resolve_for_enum_variant({ self.HERMETIC: lambda: super(RscCompile, self).get_zinc_compiler_classpath(), self.SUBPROCESS: lambda: super(RscCompile, self).get_zinc_compiler_classpath(), self.NAILGUN: lambda: self._nailgunnable_combined_classpath, - }) + })() def register_extra_products_from_contexts(self, targets, compile_contexts): super(RscCompile, self).register_extra_products_from_contexts(targets, compile_contexts) @@ -216,7 +201,7 @@ def pathglob_for(filename): def to_classpath_entries(paths, scheduler): # list of path -> # list of (path, optional) -> - path_and_digests = [(p, load_digest(os.path.dirname(p))) for p in paths] + path_and_digests = [(p, Digest.load(os.path.dirname(p))) for p in paths] # partition: list of path, list of tuples paths_without_digests = [p for (p, d) in path_and_digests if not d] if paths_without_digests: @@ -823,7 +808,7 @@ def _runtool_hermetic(self, main, tool_name, args, distribution, tgt=None, input raise TaskError(res.stderr) if output_dir: - dump_digest(output_dir, res.output_directory_digest) + res.output_directory_digest.dump(output_dir) self.context._scheduler.materialize_directories(( DirectoryToMaterialize( # NB the first element here is the root to materialize into, not the dir to snapshot @@ -859,7 +844,7 @@ def _runtool_nonhermetic(self, parent_workunit, classpath, main, tool_name, args def _runtool(self, main, tool_name, args, distribution, tgt=None, input_files=tuple(), input_digest=None, output_dir=None): with self.context.new_workunit(tool_name) as wu: - return self.do_for_execution_strategy_variant({ + return self.execution_strategy_enum.resolve_for_enum_variant({ self.HERMETIC: lambda: self._runtool_hermetic( main, tool_name, args, distribution, tgt=tgt, input_files=input_files, input_digest=input_digest, output_dir=output_dir), @@ -867,7 +852,7 @@ def _runtool(self, main, tool_name, args, distribution, wu, self.tool_classpath(tool_name), main, tool_name, args, distribution), self.NAILGUN: lambda: self._runtool_nonhermetic( wu, self._nailgunnable_combined_classpath, main, tool_name, args, distribution), - }) + })() def _run_metai_tool(self, distribution, diff --git a/src/python/pants/backend/jvm/tasks/jvm_compile/zinc/zinc_compile.py b/src/python/pants/backend/jvm/tasks/jvm_compile/zinc/zinc_compile.py index 4cdef91fb23..a447cdb0af1 100644 --- a/src/python/pants/backend/jvm/tasks/jvm_compile/zinc/zinc_compile.py +++ b/src/python/pants/backend/jvm/tasks/jvm_compile/zinc/zinc_compile.py @@ -386,71 +386,90 @@ def relative_to_exec_root(path): with open(ctx.zinc_args_file, 'w') as fp: for arg in zinc_args: # NB: in Python 2, options are stored sometimes as bytes and sometimes as unicode in the OptionValueContainer. - # This is due to how Python 2 natively stores attributes as a map of `str` (aka `bytes`) to their value. So, + # This is due to how Python 2 natively stores attributes as a map of `str` (aka `bytes`) to their value. So, # the setattr() and getattr() functions sometimes use bytes. if PY2: arg = ensure_text(arg) fp.write(arg) fp.write('\n') - if self.execution_strategy == self.HERMETIC: - zinc_relpath = fast_relpath(self._zinc.zinc, get_buildroot()) - - snapshots = [ - self._zinc.snapshot(self.context._scheduler), - ctx.target.sources_snapshot(self.context._scheduler), - ] - - relevant_classpath_entries = dependency_classpath + [compiler_bridge_classpath_entry] - directory_digests = tuple( - entry.directory_digest for entry in relevant_classpath_entries if entry.directory_digest - ) - if len(directory_digests) != len(relevant_classpath_entries): - for dep in relevant_classpath_entries: - if dep.directory_digest is None: - logger.warning( - "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic " - "execution".format(dep) - ) - - snapshots.extend( - classpath_entry.directory_digest for classpath_entry in scalac_classpath_entries - ) - - merged_input_digest = self.context._scheduler.merge_directories( - tuple(s.directory_digest for s in (snapshots)) + directory_digests - ) - - # TODO: Extract something common from Executor._create_command to make the command line - # TODO: Lean on distribution for the bin/java appending here - argv = tuple(['.jdk/bin/java'] + jvm_options + ['-cp', zinc_relpath, Zinc.ZINC_COMPILE_MAIN] + zinc_args) - req = ExecuteProcessRequest( - argv=argv, - input_files=merged_input_digest, - output_directories=(classes_dir,), - description="zinc compile for {}".format(ctx.target.address.spec), - # TODO: These should always be unicodes - # Since this is always hermetic, we need to use `underlying_dist` - jdk_home=text_type(self._zinc.underlying_dist.home), - ) - res = self.context.execute_process_synchronously_or_raise(req, self.name(), [WorkUnitLabel.COMPILER]) - - # TODO: Materialize as a batch in do_compile or somewhere - self.context._scheduler.materialize_directories(( - DirectoryToMaterialize(get_buildroot(), res.output_directory_digest), - )) - - # TODO: This should probably return a ClasspathEntry rather than a Digest - return res.output_directory_digest - else: - if self.runjava(classpath=self.get_zinc_compiler_classpath(), - main=Zinc.ZINC_COMPILE_MAIN, - jvm_options=jvm_options, - args=zinc_args, - workunit_name=self.name(), - workunit_labels=[WorkUnitLabel.COMPILER], - dist=self._zinc.dist): - raise TaskError('Zinc compile failed.') + return self.execution_strategy_enum.resolve_for_enum_variant({ + self.HERMETIC: lambda: self._compile_hermetic( + jvm_options, ctx, classes_dir, zinc_args, compiler_bridge_classpath_entry, + dependency_classpath, scalac_classpath_entries), + self.SUBPROCESS: lambda: self._compile_nonhermetic(jvm_options, zinc_args), + self.NAILGUN: lambda: self._compile_nonhermetic(jvm_options, zinc_args), + })() + + class ZincCompileError(TaskError): + """An exception type specifically to signal a failed zinc execution.""" + + def _compile_nonhermetic(self, jvm_options, zinc_args): + exit_code = self.runjava(classpath=self.get_zinc_compiler_classpath(), + main=Zinc.ZINC_COMPILE_MAIN, + jvm_options=jvm_options, + args=zinc_args, + workunit_name=self.name(), + workunit_labels=[WorkUnitLabel.COMPILER], + dist=self._zinc.dist) + if exit_code != 0: + raise self.ZincCompileError('Zinc compile failed.', exit_code=exit_code) + + def _compile_hermetic(self, jvm_options, ctx, classes_dir, zinc_args, + compiler_bridge_classpath_entry, dependency_classpath, + scalac_classpath_entries): + zinc_relpath = fast_relpath(self._zinc.zinc, get_buildroot()) + + snapshots = [ + self._zinc.snapshot(self.context._scheduler), + ctx.target.sources_snapshot(self.context._scheduler), + ] + + relevant_classpath_entries = dependency_classpath + [compiler_bridge_classpath_entry] + directory_digests = tuple( + entry.directory_digest for entry in relevant_classpath_entries if entry.directory_digest + ) + if len(directory_digests) != len(relevant_classpath_entries): + for dep in relevant_classpath_entries: + if dep.directory_digest is None: + logger.warning( + "ClasspathEntry {} didn't have a Digest, so won't be present for hermetic " + "execution".format(dep) + ) + + snapshots.extend( + classpath_entry.directory_digest for classpath_entry in scalac_classpath_entries + ) + + # TODO: Extract something common from Executor._create_command to make the command line + # TODO: Lean on distribution for the bin/java appending here + merged_input_digest = self.context._scheduler.merge_directories( + tuple(s.directory_digest for s in snapshots) + directory_digests + ) + argv = ['.jdk/bin/java'] + jvm_options + [ + '-cp', zinc_relpath, + Zinc.ZINC_COMPILE_MAIN + ] + zinc_args + + req = ExecuteProcessRequest( + argv=tuple(argv), + input_files=merged_input_digest, + output_directories=(classes_dir,), + description="zinc compile for {}".format(ctx.target.address.spec), + # TODO: These should always be unicodes + # Since this is always hermetic, we need to use `underlying_dist` + jdk_home=text_type(self._zinc.underlying_dist.home), + ) + res = self.context.execute_process_synchronously_or_raise( + req, self.name(), [WorkUnitLabel.COMPILER]) + + # TODO: Materialize as a batch in do_compile or somewhere + self.context._scheduler.materialize_directories(( + DirectoryToMaterialize(get_buildroot(), res.output_directory_digest), + )) + + # TODO: This should probably return a ClasspathEntry rather than a Digest + return res.output_directory_digest def get_zinc_compiler_classpath(self): """Get the classpath for the zinc compiler JVM tool. diff --git a/src/python/pants/backend/jvm/tasks/nailgun_task.py b/src/python/pants/backend/jvm/tasks/nailgun_task.py index ef6cef73318..dbc3229a5e3 100644 --- a/src/python/pants/backend/jvm/tasks/nailgun_task.py +++ b/src/python/pants/backend/jvm/tasks/nailgun_task.py @@ -15,6 +15,7 @@ from pants.process.subprocess import Subprocess from pants.task.task import Task, TaskBase from pants.util.memo import memoized_property +from pants.util.objects import enum, register_enum_option class NailgunTaskBase(JvmToolTaskMixin, TaskBase): @@ -24,30 +25,16 @@ class NailgunTaskBase(JvmToolTaskMixin, TaskBase): SUBPROCESS = 'subprocess' HERMETIC = 'hermetic' - class InvalidExecutionStrategyMapping(Exception): pass - - _all_execution_strategies = frozenset([NAILGUN, SUBPROCESS, HERMETIC]) - - def do_for_execution_strategy_variant(self, mapping): - """Invoke the method in `mapping` with the key corresponding to the execution strategy. - - `mapping` is a dict mapping execution strategy -> zero-argument lambda. - """ - variants = frozenset(mapping.keys()) - if variants != self._all_execution_strategies: - raise self.InvalidExecutionStrategyMapping( - 'Must specify a mapping with exactly the keys {} (was: {})' - .format(self._all_execution_strategies, variants)) - method_for_variant = mapping[self.execution_strategy] - # The methods need not return a value, but we pass it along if they do. - return method_for_variant() + class ExecutionStrategy(enum([NAILGUN, SUBPROCESS, HERMETIC])): pass @classmethod def register_options(cls, register): super(NailgunTaskBase, cls).register_options(register) - register('--execution-strategy', choices=[cls.NAILGUN, cls.SUBPROCESS, cls.HERMETIC], default=cls.NAILGUN, - help='If set to nailgun, nailgun will be enabled and repeated invocations of this ' - 'task will be quicker. If set to subprocess, then the task will be run without nailgun.') + register_enum_option( + register, cls.ExecutionStrategy, '--execution-strategy', + help='If set to nailgun, nailgun will be enabled and repeated invocations of this ' + 'task will be quicker. If set to subprocess, then the task will be run without nailgun. ' + 'Hermetic execution is an experimental subprocess execution framework.') register('--nailgun-timeout-seconds', advanced=True, default=10, type=float, help='Timeout (secs) for nailgun startup.') register('--nailgun-connect-attempts', advanced=True, default=5, type=int, @@ -60,6 +47,13 @@ def register_options(cls, register): rev='0.9.1'), ]) + @memoized_property + def execution_strategy_enum(self): + # TODO: This .create() call can be removed when the enum interface is more stable as the option + # is converted into an instance of self.ExecutionStrategy via the `type` argument through + # register_enum_option(). + return self.ExecutionStrategy.create(self.get_options().execution_strategy) + @classmethod def subsystem_dependencies(cls): return super(NailgunTaskBase, cls).subsystem_dependencies() + (Subprocess.Factory,) @@ -76,9 +70,10 @@ def __init__(self, *args, **kwargs): self._executor_workdir = os.path.join(self.context.options.for_global_scope().pants_workdir, *id_tuple) - @memoized_property + # TODO: eventually deprecate this when we can move all subclasses to use the enum! + @property def execution_strategy(self): - return self.get_options().execution_strategy + return self.execution_strategy_enum.value def create_java_executor(self, dist=None): """Create java executor that uses this task's ng daemon, if allowed. diff --git a/src/python/pants/backend/native/config/environment.py b/src/python/pants/backend/native/config/environment.py index 6d8a55b30ea..b433535668f 100644 --- a/src/python/pants/backend/native/config/environment.py +++ b/src/python/pants/backend/native/config/environment.py @@ -5,46 +5,123 @@ from __future__ import absolute_import, division, print_function, unicode_literals import os -from abc import abstractproperty +from abc import abstractmethod, abstractproperty from pants.engine.rules import SingletonRule +from pants.util.memo import memoized_classproperty from pants.util.meta import AbstractClass -from pants.util.objects import datatype +from pants.util.objects import datatype, enum from pants.util.osutil import all_normalized_os_names, get_normalized_os_name from pants.util.strutil import create_path_env_var -class Platform(datatype(['normalized_os_name'])): +class Platform(enum('normalized_os_name', all_normalized_os_names())): - class UnsupportedPlatformError(Exception): - """Thrown if pants is running on an unrecognized platform.""" + default_value = get_normalized_os_name() - @classmethod - def create(cls): - return Platform(get_normalized_os_name()) - _NORMALIZED_OS_NAMES = frozenset(all_normalized_os_names()) +def _list_field(func): + """A decorator for methods corresponding to list-valued fields of an `ExtensibleAlgebraic`. - def resolve_platform_specific(self, platform_specific_funs): - arg_keys = frozenset(platform_specific_funs.keys()) - unknown_plats = self._NORMALIZED_OS_NAMES - arg_keys - if unknown_plats: - raise self.UnsupportedPlatformError( - "platform_specific_funs {} must support platforms {}" - .format(platform_specific_funs, list(unknown_plats))) - extra_plats = arg_keys - self._NORMALIZED_OS_NAMES - if extra_plats: - raise self.UnsupportedPlatformError( - "platform_specific_funs {} has unrecognized platforms {}" - .format(platform_specific_funs, list(extra_plats))) + The result is also wrapped in `abstractproperty`. + """ + wrapped = abstractproperty(func) + wrapped._field_type = 'list' + return wrapped - fun_for_platform = platform_specific_funs[self.normalized_os_name] - return fun_for_platform() +def _algebraic_data(metaclass): + """A class decorator to pull out `_list_fields` from a mixin class for use with a `datatype`.""" + def wrapper(cls): + cls.__bases__ += (metaclass,) + cls._list_fields = metaclass._list_fields + return cls + return wrapper -class Executable(AbstractClass): - @abstractproperty +# NB: prototypal inheritance seems *deeply* linked with the idea here! +# TODO: since we are calling these methods from other files, we should remove the leading underscore +# and add testing! +class _ExtensibleAlgebraic(AbstractClass): + """A mixin to make it more concise to coalesce datatypes with related collection fields.""" + + @memoized_classproperty + def _list_fields(cls): + all_list_fields = [] + for field_name in cls.__abstractmethods__: + f = getattr(cls, field_name) + if getattr(f, '_field_type', None) == 'list': + all_list_fields.append(field_name) + return frozenset(all_list_fields) + + @abstractmethod + def copy(self, **kwargs): + """Implementations should have the same behavior as a `datatype()`'s `copy()` method.""" + + class AlgebraicDataError(Exception): pass + + def _single_list_field_operation(self, field_name, list_value, prepend=True): + if field_name not in self._list_fields: + raise self.AlgebraicDataError( + "Field '{}' is not in this object's set of declared list fields: {} (this object is : {})." + .format(field_name, self._list_fields, self)) + cur_value = getattr(self, field_name) + + if prepend: + new_value = list_value + cur_value + else: + new_value = cur_value + list_value + + arg_dict = {field_name: new_value} + return self.copy(**arg_dict) + + def prepend_field(self, field_name, list_value): + """Return a copy of this object with `list_value` prepended to the field named `field_name`.""" + return self._single_list_field_operation(field_name, list_value, prepend=True) + + def append_field(self, field_name, list_value): + """Return a copy of this object with `list_value` appended to the field named `field_name`.""" + return self._single_list_field_operation(field_name, list_value, prepend=False) + + def sequence(self, other, exclude_list_fields=None): + """Return a copy of this object which combines all the fields common to both `self` and `other`. + + List fields will be concatenated. + + The return type of this method is the type of `self` (or whatever `.copy()` returns), but the + `other` argument can be any `_ExtensibleAlgebraic` instance. + """ + exclude_list_fields = frozenset(exclude_list_fields or []) + overwrite_kwargs = {} + + nonexistent_excluded_fields = exclude_list_fields - self._list_fields + if nonexistent_excluded_fields: + raise self.AlgebraicDataError( + "Fields {} to exclude from a sequence() were not found in this object's list fields: {}. " + "This object is {}, the other object is {}." + .format(nonexistent_excluded_fields, self._list_fields, self, other)) + + shared_list_fields = (self._list_fields + & other._list_fields + - exclude_list_fields) + if not shared_list_fields: + raise self.AlgebraicDataError( + "Objects to sequence have no shared fields after excluding {}. " + "This object is {}, with list fields: {}. " + "The other object is {}, with list fields: {}." + .format(exclude_list_fields, self, self._list_fields, other, other._list_fields)) + + for list_field_name in shared_list_fields: + lhs_value = getattr(self, list_field_name) + rhs_value = getattr(other, list_field_name) + overwrite_kwargs[list_field_name] = lhs_value + rhs_value + + return self.copy(**overwrite_kwargs) + + +class _Executable(_ExtensibleAlgebraic): + + @_list_field def path_entries(self): """A list of directory paths containing this executable, to be used in a subprocess's PATH. @@ -60,63 +137,65 @@ def exe_filename(self): :rtype: str """ - # TODO: rename this to 'runtime_library_dirs'! - @abstractproperty - def library_dirs(self): + @_list_field + def runtime_library_dirs(self): """Directories containing shared libraries that must be on the runtime library search path. - Note: this is for libraries needed for the current Executable to run -- see LinkerMixin below + Note: this is for libraries needed for the current _Executable to run -- see _LinkerMixin below for libraries that are needed at link time. - :rtype: list of str """ - @property + @_list_field def extra_args(self): - """Additional arguments used when invoking this Executable. + """Additional arguments used when invoking this _Executable. These are typically placed before the invocation-specific command line arguments. :rtype: list of str """ - return [] _platform = Platform.create() @property - def as_invocation_environment_dict(self): - """A dict to use as this Executable's execution environment. + def invocation_environment_dict(self): + """A dict to use as this _Executable's execution environment. + + This isn't made into an "algebraic" field because its contents (the keys of the dict) are + generally known to the specific class which is overriding this property. Implementations of this + property can then make use of the data in the algebraic fields to populate this dict. :rtype: dict of string -> string """ - lib_env_var = self._platform.resolve_platform_specific({ - 'darwin': lambda: 'DYLD_LIBRARY_PATH', - 'linux': lambda: 'LD_LIBRARY_PATH', + lib_env_var = self._platform.resolve_for_enum_variant({ + 'darwin': 'DYLD_LIBRARY_PATH', + 'linux': 'LD_LIBRARY_PATH', }) return { 'PATH': create_path_env_var(self.path_entries), - lib_env_var: create_path_env_var(self.library_dirs), + lib_env_var: create_path_env_var(self.runtime_library_dirs), } +@_algebraic_data(_Executable) class Assembler(datatype([ 'path_entries', 'exe_filename', - 'library_dirs', -]), Executable): - pass + 'runtime_library_dirs', + 'extra_args', +])): pass -class LinkerMixin(Executable): +class _LinkerMixin(_Executable): - @abstractproperty + @_list_field def linking_library_dirs(self): """Directories to search for libraries needed at link time. :rtype: list of str """ - @abstractproperty + @_list_field def extra_object_files(self): """A list of object files required to perform a successful link. @@ -126,8 +205,8 @@ def extra_object_files(self): """ @property - def as_invocation_environment_dict(self): - ret = super(LinkerMixin, self).as_invocation_environment_dict.copy() + def invocation_environment_dict(self): + ret = super(_LinkerMixin, self).invocation_environment_dict.copy() full_library_path_dirs = self.linking_library_dirs + [ os.path.dirname(f) for f in self.extra_object_files @@ -141,19 +220,20 @@ def as_invocation_environment_dict(self): return ret +@_algebraic_data(_LinkerMixin) class Linker(datatype([ 'path_entries', 'exe_filename', - 'library_dirs', + 'runtime_library_dirs', 'linking_library_dirs', 'extra_args', 'extra_object_files', -]), LinkerMixin): pass +])): pass -class CompilerMixin(Executable): +class _CompilerMixin(_Executable): - @abstractproperty + @_list_field def include_dirs(self): """Directories to search for header files to #include during compilation. @@ -161,8 +241,8 @@ def include_dirs(self): """ @property - def as_invocation_environment_dict(self): - ret = super(CompilerMixin, self).as_invocation_environment_dict.copy() + def invocation_environment_dict(self): + ret = super(_CompilerMixin, self).invocation_environment_dict.copy() if self.include_dirs: ret['CPATH'] = create_path_env_var(self.include_dirs) @@ -170,34 +250,36 @@ def as_invocation_environment_dict(self): return ret +@_algebraic_data(_CompilerMixin) class CCompiler(datatype([ 'path_entries', 'exe_filename', - 'library_dirs', + 'runtime_library_dirs', 'include_dirs', 'extra_args', -]), CompilerMixin): +])): @property - def as_invocation_environment_dict(self): - ret = super(CCompiler, self).as_invocation_environment_dict.copy() + def invocation_environment_dict(self): + ret = super(CCompiler, self).invocation_environment_dict.copy() ret['CC'] = self.exe_filename return ret +@_algebraic_data(_CompilerMixin) class CppCompiler(datatype([ 'path_entries', 'exe_filename', - 'library_dirs', + 'runtime_library_dirs', 'include_dirs', 'extra_args', -]), CompilerMixin): +])): @property - def as_invocation_environment_dict(self): - ret = super(CppCompiler, self).as_invocation_environment_dict.copy() + def invocation_environment_dict(self): + ret = super(CppCompiler, self).invocation_environment_dict.copy() ret['CXX'] = self.exe_filename diff --git a/src/python/pants/backend/native/subsystems/binaries/binutils.py b/src/python/pants/backend/native/subsystems/binaries/binutils.py index 69c50463001..d8b3375b0c4 100644 --- a/src/python/pants/backend/native/subsystems/binaries/binutils.py +++ b/src/python/pants/backend/native/subsystems/binaries/binutils.py @@ -24,13 +24,14 @@ def assembler(self): return Assembler( path_entries=self.path_entries(), exe_filename='as', - library_dirs=[]) + runtime_library_dirs=[], + extra_args=[]) def linker(self): return Linker( path_entries=self.path_entries(), exe_filename='ld', - library_dirs=[], + runtime_library_dirs=[], linking_library_dirs=[], extra_args=[], extra_object_files=[], diff --git a/src/python/pants/backend/native/subsystems/binaries/gcc.py b/src/python/pants/backend/native/subsystems/binaries/gcc.py index 5f48e12fb85..b0696375d79 100644 --- a/src/python/pants/backend/native/subsystems/binaries/gcc.py +++ b/src/python/pants/backend/native/subsystems/binaries/gcc.py @@ -44,9 +44,9 @@ def path_entries(self): @memoized_method def _common_lib_dirs(self, platform): - lib64_tuples = platform.resolve_platform_specific({ - 'darwin': lambda: [], - 'linux': lambda: [('lib64',)], + lib64_tuples = platform.resolve_for_enum_variant({ + 'darwin': [], + 'linux': [('lib64',)], }) return self._filemap(lib64_tuples + [ ('lib',), @@ -65,7 +65,7 @@ def c_compiler(self, platform): return CCompiler( path_entries=self.path_entries, exe_filename='gcc', - library_dirs=self._common_lib_dirs(platform), + runtime_library_dirs=self._common_lib_dirs(platform), include_dirs=self._common_include_dirs, extra_args=[]) @@ -91,7 +91,7 @@ def cpp_compiler(self, platform): return CppCompiler( path_entries=self.path_entries, exe_filename='g++', - library_dirs=self._common_lib_dirs(platform), + runtime_library_dirs=self._common_lib_dirs(platform), include_dirs=(self._common_include_dirs + self._cpp_include_dirs), extra_args=[]) diff --git a/src/python/pants/backend/native/subsystems/binaries/llvm.py b/src/python/pants/backend/native/subsystems/binaries/llvm.py index a49146cbbd1..9786e5c3990 100644 --- a/src/python/pants/backend/native/subsystems/binaries/llvm.py +++ b/src/python/pants/backend/native/subsystems/binaries/llvm.py @@ -80,16 +80,13 @@ def _filemap(self, all_components_list): def path_entries(self): return self._filemap([('bin',)]) - _PLATFORM_SPECIFIC_LINKER_NAME = { - 'darwin': lambda: 'ld64.lld', - 'linux': lambda: 'lld', - } - def linker(self, platform): return Linker( path_entries=self.path_entries, - exe_filename=platform.resolve_platform_specific( - self._PLATFORM_SPECIFIC_LINKER_NAME), + exe_filename=platform.resolve_for_enum_variant({ + 'darwin': 'ld64.lld', + 'linux': 'lld', + }), library_dirs=[], linking_library_dirs=[], extra_args=[], @@ -108,7 +105,7 @@ def c_compiler(self): return CCompiler( path_entries=self.path_entries, exe_filename='clang', - library_dirs=self._common_lib_dirs, + runtime_library_dirs=self._common_lib_dirs, include_dirs=self._common_include_dirs, extra_args=[]) @@ -120,7 +117,7 @@ def cpp_compiler(self): return CppCompiler( path_entries=self.path_entries, exe_filename='clang++', - library_dirs=self._common_lib_dirs, + runtime_library_dirs=self._common_lib_dirs, include_dirs=(self._cpp_include_dirs + self._common_include_dirs), extra_args=[]) diff --git a/src/python/pants/backend/native/subsystems/conan.py b/src/python/pants/backend/native/subsystems/conan.py index fb8fef89172..ae7d53880bd 100644 --- a/src/python/pants/backend/native/subsystems/conan.py +++ b/src/python/pants/backend/native/subsystems/conan.py @@ -20,6 +20,7 @@ class Conan(PythonToolBase): 'pylint==1.9.3', ] default_entry_point = 'conans.conan' + default_interpreter_constraints = ['CPython>=2.7,<4'] @classmethod def register_options(cls, register): diff --git a/src/python/pants/backend/native/subsystems/native_build_step.py b/src/python/pants/backend/native/subsystems/native_build_step.py index b30f51c07e6..55ffea10dd4 100644 --- a/src/python/pants/backend/native/subsystems/native_build_step.py +++ b/src/python/pants/backend/native/subsystems/native_build_step.py @@ -10,14 +10,10 @@ from pants.subsystem.subsystem import Subsystem from pants.util.memo import memoized_property from pants.util.meta import classproperty -from pants.util.objects import enum +from pants.util.objects import enum, register_enum_option -class ToolchainVariant(enum('descriptor', ['gnu', 'llvm'])): - - @property - def is_gnu(self): - return self.descriptor == 'gnu' +class ToolchainVariant(enum(['gnu', 'llvm'])): pass class NativeBuildStep(CompilerOptionSetsMixin, MirroredTargetOptionMixin, Subsystem): @@ -39,11 +35,10 @@ def register_options(cls, register): help='The default for the "compiler_option_sets" argument ' 'for targets of this language.') - register('--toolchain-variant', type=str, fingerprint=True, advanced=True, - choices=ToolchainVariant.allowed_values, - default=ToolchainVariant.default_value, - help="Whether to use gcc (gnu) or clang (llvm) to compile C and C++. Currently all " - "linking is done with binutils ld on Linux, and the XCode CLI Tools on MacOS.") + register_enum_option( + register, ToolchainVariant, '--toolchain-variant', advanced=True, + help="Whether to use gcc (gnu) or clang (llvm) to compile C and C++. Currently all " + "linking is done with binutils ld on Linux, and the XCode CLI Tools on MacOS.") def get_compiler_option_sets_for_target(self, target): return self.get_target_mirrored_option('compiler_option_sets', target) diff --git a/src/python/pants/backend/native/subsystems/native_toolchain.py b/src/python/pants/backend/native/subsystems/native_toolchain.py index 7f4dec31efc..1885c68e0d9 100644 --- a/src/python/pants/backend/native/subsystems/native_toolchain.py +++ b/src/python/pants/backend/native/subsystems/native_toolchain.py @@ -4,6 +4,8 @@ from __future__ import absolute_import, division, print_function, unicode_literals +from builtins import object + from pants.backend.native.config.environment import (Assembler, CCompiler, CppCompiler, CppToolchain, CToolchain, Linker, Platform) from pants.backend.native.subsystems.binaries.binutils import Binutils @@ -67,10 +69,21 @@ def _libc_dev(self): class LibcObjects(datatype(['crti_object_paths'])): pass -class GCCLinker(datatype([('linker', Linker)])): pass +class LinkerWrapperMixin(object): + + def for_compiler(self, compiler, platform): + """Return a Linker object which is intended to be compatible with the given `compiler`.""" + return (self.linker + # TODO(#6143): describe why the compiler needs to be first on the PATH! + .sequence(compiler, exclude_list_fields=['extra_args', 'path_entries']) + .prepend_field('path_entries', compiler.path_entries) + .copy(exe_filename=compiler.exe_filename)) + + +class GCCLinker(datatype([('linker', Linker)]), LinkerWrapperMixin): pass -class LLVMLinker(datatype([('linker', Linker)])): pass +class LLVMLinker(datatype([('linker', Linker)]), LinkerWrapperMixin): pass class GCCCToolchain(datatype([('c_toolchain', CToolchain)])): pass @@ -87,10 +100,11 @@ class LLVMCppToolchain(datatype([('cpp_toolchain', CppToolchain)])): pass @rule(LibcObjects, [Select(Platform), Select(NativeToolchain)]) def select_libc_objects(platform, native_toolchain): - paths = platform.resolve_platform_specific({ + # We use lambdas here to avoid searching for libc on osx, where it will fail. + paths = platform.resolve_for_enum_variant({ 'darwin': lambda: [], 'linux': lambda: native_toolchain._libc_dev.get_libc_objects(), - }) + })() yield LibcObjects(paths) @@ -127,8 +141,7 @@ def select_gcc_linker(native_toolchain): base_linker = yield Get(BaseLinker, NativeToolchain, native_toolchain) linker = base_linker.linker libc_objects = yield Get(LibcObjects, NativeToolchain, native_toolchain) - linker_with_libc = linker.copy( - extra_object_files=(linker.extra_object_files + libc_objects.crti_object_paths)) + linker_with_libc = linker.append_field('extra_object_files', libc_objects.crti_object_paths) yield GCCLinker(linker_with_libc) @@ -159,36 +172,24 @@ def select_gcc_install_location(gcc): def select_llvm_c_toolchain(platform, native_toolchain): provided_clang = yield Get(CCompiler, LLVM, native_toolchain._llvm) - # These arguments are shared across platforms. - llvm_c_compiler_args = [ - '-x', 'c', '-std=c11', - ] - if platform.normalized_os_name == 'darwin': xcode_clang = yield Get(CCompiler, XCodeCLITools, native_toolchain._xcode_cli_tools) - working_c_compiler = provided_clang.copy( - path_entries=(provided_clang.path_entries + xcode_clang.path_entries), - library_dirs=(provided_clang.library_dirs + xcode_clang.library_dirs), - include_dirs=(provided_clang.include_dirs + xcode_clang.include_dirs), - extra_args=(provided_clang.extra_args + llvm_c_compiler_args + xcode_clang.extra_args)) + joined_c_compiler = provided_clang.sequence(xcode_clang) else: gcc_install = yield Get(GCCInstallLocationForLLVM, GCC, native_toolchain._gcc) provided_gcc = yield Get(CCompiler, GCC, native_toolchain._gcc) - working_c_compiler = provided_clang.copy( - # We need g++'s version of the GLIBCXX library to be able to run, unfortunately. - library_dirs=(provided_gcc.library_dirs + provided_clang.library_dirs), - include_dirs=provided_gcc.include_dirs, - extra_args=(llvm_c_compiler_args + provided_clang.extra_args + gcc_install.as_clang_argv)) + joined_c_compiler = (provided_clang + .sequence(provided_gcc) + .append_field('extra_args', gcc_install.as_clang_argv) + # We need g++'s version of the GLIBCXX library to be able to run. + .prepend_field('runtime_library_dirs', provided_gcc.runtime_library_dirs)) - llvm_linker_wrapper = yield Get(LLVMLinker, NativeToolchain, native_toolchain) - llvm_linker = llvm_linker_wrapper.linker + working_c_compiler = joined_c_compiler.prepend_field('extra_args', [ + '-x', 'c', '-std=c11', + ]) - # TODO(#6855): introduce a more concise way to express these compositions of executables. - working_linker = llvm_linker.copy( - path_entries=(llvm_linker.path_entries + working_c_compiler.path_entries), - exe_filename=working_c_compiler.exe_filename, - library_dirs=(llvm_linker.library_dirs + working_c_compiler.library_dirs), - ) + llvm_linker_wrapper = yield Get(LLVMLinker, NativeToolchain, native_toolchain) + working_linker = llvm_linker_wrapper.for_compiler(working_c_compiler, platform) yield LLVMCToolchain(CToolchain(working_c_compiler, working_linker)) @@ -197,52 +198,42 @@ def select_llvm_c_toolchain(platform, native_toolchain): def select_llvm_cpp_toolchain(platform, native_toolchain): provided_clangpp = yield Get(CppCompiler, LLVM, native_toolchain._llvm) - # These arguments are shared across platforms. - llvm_cpp_compiler_args = [ - '-x', 'c++', '-std=c++11', - # This flag is intended to avoid using any of the headers from our LLVM distribution's C++ - # stdlib implementation, or any from the host system, and instead, use include dirs from the - # XCodeCLITools or GCC. - # TODO(#6143): Determine precisely what this flag does and why it's necessary. - '-nostdinc++', - ] - + # On OSX, we use the libc++ (LLVM) C++ standard library implementation. This is feature-complete + # for OSX, but not for Linux (see https://libcxx.llvm.org/ for more info). if platform.normalized_os_name == 'darwin': - xcode_clangpp = yield Get(CppCompiler, XCodeCLITools, native_toolchain._xcode_cli_tools) - working_cpp_compiler = provided_clangpp.copy( - path_entries=(provided_clangpp.path_entries + xcode_clangpp.path_entries), - library_dirs=(provided_clangpp.library_dirs + xcode_clangpp.library_dirs), - include_dirs=(provided_clangpp.include_dirs + xcode_clangpp.include_dirs), - # On OSX, this uses the libc++ (LLVM) C++ standard library implementation. This is - # feature-complete for OSX, but not for Linux (see https://libcxx.llvm.org/ for more info). - extra_args=(llvm_cpp_compiler_args + provided_clangpp.extra_args + xcode_clangpp.extra_args)) - extra_linking_library_dirs = [] + xcode_clang = yield Get(CppCompiler, XCodeCLITools, native_toolchain._xcode_cli_tools) + joined_cpp_compiler = provided_clangpp.sequence(xcode_clang) + extra_llvm_linking_library_dirs = [] linker_extra_args = [] else: gcc_install = yield Get(GCCInstallLocationForLLVM, GCC, native_toolchain._gcc) provided_gpp = yield Get(CppCompiler, GCC, native_toolchain._gcc) - working_cpp_compiler = provided_clangpp.copy( - # We need g++'s version of the GLIBCXX library to be able to run, unfortunately. - library_dirs=(provided_gpp.library_dirs + provided_clangpp.library_dirs), - # NB: we use g++'s headers on Linux, and therefore their C++ standard library. - include_dirs=provided_gpp.include_dirs, - extra_args=(llvm_cpp_compiler_args + provided_clangpp.extra_args + gcc_install.as_clang_argv)) - # TODO(#6855): why are these necessary? this is very mysterious. - extra_linking_library_dirs = provided_gpp.library_dirs + provided_clangpp.library_dirs + joined_cpp_compiler = (provided_clangpp + .sequence(provided_gpp) + # NB: we use g++'s headers on Linux, and therefore their C++ standard + # library. + .copy(include_dirs=provided_gpp.include_dirs) + .append_field('extra_args', gcc_install.as_clang_argv) + # We need g++'s version of the GLIBCXX library to be able to run. + .prepend_field('runtime_library_dirs', provided_gpp.runtime_library_dirs)) + extra_llvm_linking_library_dirs = provided_gpp.runtime_library_dirs + provided_clangpp.runtime_library_dirs # Ensure we use libstdc++, provided by g++, during the linking stage. linker_extra_args=['-stdlib=libstdc++'] - llvm_linker_wrapper = yield Get(LLVMLinker, NativeToolchain, native_toolchain) - llvm_linker = llvm_linker_wrapper.linker + working_cpp_compiler = joined_cpp_compiler.prepend_field('extra_args', [ + '-x', 'c++', '-std=c++11', + # This flag is intended to avoid using any of the headers from our LLVM distribution's C++ + # stdlib implementation, or any from the host system, and instead, use include dirs from the + # XCodeCLITools or GCC. + # TODO(#6143): Determine precisely what this flag does and why it's necessary. + '-nostdinc++', + ]) - working_linker = llvm_linker.copy( - path_entries=(llvm_linker.path_entries + working_cpp_compiler.path_entries), - exe_filename=working_cpp_compiler.exe_filename, - library_dirs=(llvm_linker.library_dirs + working_cpp_compiler.library_dirs), - linking_library_dirs=(llvm_linker.linking_library_dirs + - extra_linking_library_dirs), - extra_args=(llvm_linker.extra_args + linker_extra_args), - ) + llvm_linker_wrapper = yield Get(LLVMLinker, NativeToolchain, native_toolchain) + working_linker = (llvm_linker_wrapper + .for_compiler(working_cpp_compiler, platform) + .append_field('linking_library_dirs', extra_llvm_linking_library_dirs) + .prepend_field('extra_args', linker_extra_args)) yield LLVMCppToolchain(CppToolchain(working_cpp_compiler, working_linker)) @@ -251,35 +242,23 @@ def select_llvm_cpp_toolchain(platform, native_toolchain): def select_gcc_c_toolchain(platform, native_toolchain): provided_gcc = yield Get(CCompiler, GCC, native_toolchain._gcc) - # GCC needs an assembler, so we provide that (platform-specific) tool here. - assembler = yield Get(Assembler, NativeToolchain, native_toolchain) - - gcc_c_compiler_args = [ - '-x', 'c', '-std=c11', - ] - if platform.normalized_os_name == 'darwin': # GCC needs access to some headers that are only provided by the XCode toolchain # currently (e.g. "_stdio.h"). These headers are unlikely to change across versions, so this is # probably safe. xcode_clang = yield Get(CCompiler, XCodeCLITools, native_toolchain._xcode_cli_tools) - new_include_dirs = provided_gcc.include_dirs + xcode_clang.include_dirs + joined_c_compiler = provided_gcc.sequence(xcode_clang) else: - new_include_dirs = provided_gcc.include_dirs + joined_c_compiler = provided_gcc - working_c_compiler = provided_gcc.copy( - path_entries=(provided_gcc.path_entries + assembler.path_entries), - include_dirs=new_include_dirs, - extra_args=gcc_c_compiler_args) + # GCC needs an assembler, so we provide that (platform-specific) tool here. + assembler = yield Get(Assembler, NativeToolchain, native_toolchain) + working_c_compiler = joined_c_compiler.sequence(assembler).prepend_field('extra_args', [ + '-x', 'c', '-std=c11', + ]) gcc_linker_wrapper = yield Get(GCCLinker, NativeToolchain, native_toolchain) - gcc_linker = gcc_linker_wrapper.linker - - working_linker = gcc_linker.copy( - path_entries=(working_c_compiler.path_entries + gcc_linker.path_entries), - exe_filename=working_c_compiler.exe_filename, - library_dirs=(gcc_linker.library_dirs + working_c_compiler.library_dirs), - ) + working_linker = gcc_linker_wrapper.for_compiler(working_c_compiler, platform) yield GCCCToolchain(CToolchain(working_c_compiler, working_linker)) @@ -288,18 +267,6 @@ def select_gcc_c_toolchain(platform, native_toolchain): def select_gcc_cpp_toolchain(platform, native_toolchain): provided_gpp = yield Get(CppCompiler, GCC, native_toolchain._gcc) - # GCC needs an assembler, so we provide that (platform-specific) tool here. - assembler = yield Get(Assembler, NativeToolchain, native_toolchain) - - gcc_cpp_compiler_args = [ - '-x', 'c++', '-std=c++11', - # This flag is intended to avoid using any of the headers from our LLVM distribution's C++ - # stdlib implementation, or any from the host system, and instead, use include dirs from the - # XCodeCLITools or GCC. - # TODO(#6143): Determine precisely what this flag does and why it's necessary. - '-nostdinc++', - ] - if platform.normalized_os_name == 'darwin': # GCC needs access to some headers that are only provided by the XCode toolchain # currently (e.g. "_stdio.h"). These headers are unlikely to change across versions, so this is @@ -307,29 +274,23 @@ def select_gcc_cpp_toolchain(platform, native_toolchain): # TODO: we should be providing all of these (so we can eventually phase out XCodeCLITools # entirely). xcode_clangpp = yield Get(CppCompiler, XCodeCLITools, native_toolchain._xcode_cli_tools) - working_cpp_compiler = provided_gpp.copy( - path_entries=(provided_gpp.path_entries + assembler.path_entries), - include_dirs=(provided_gpp.include_dirs + xcode_clangpp.include_dirs), - extra_args=(gcc_cpp_compiler_args + provided_gpp.extra_args + xcode_clangpp.extra_args), - ) - extra_linking_library_dirs = [] + joined_cpp_compiler = provided_gpp.sequence(xcode_clangpp) else: - provided_clangpp = yield Get(CppCompiler, LLVM, native_toolchain._llvm) - working_cpp_compiler = provided_gpp.copy( - path_entries=(provided_gpp.path_entries + assembler.path_entries), - extra_args=(gcc_cpp_compiler_args + provided_gpp.extra_args), - ) - extra_linking_library_dirs = provided_gpp.library_dirs + provided_clangpp.library_dirs + joined_cpp_compiler = provided_gpp - gcc_linker_wrapper = yield Get(GCCLinker, NativeToolchain, native_toolchain) - gcc_linker = gcc_linker_wrapper.linker + # GCC needs an assembler, so we provide that (platform-specific) tool here. + assembler = yield Get(Assembler, NativeToolchain, native_toolchain) + working_cpp_compiler = joined_cpp_compiler.sequence(assembler).prepend_field('extra_args', [ + '-x', 'c++', '-std=c++11', + # This flag is intended to avoid using any of the headers from our LLVM distribution's C++ + # stdlib implementation, or any from the host system, and instead, use include dirs from the + # XCodeCLITools or GCC. + # TODO(#6143): Determine precisely what this flag does and why it's necessary. + '-nostdinc++', + ]) - working_linker = gcc_linker.copy( - path_entries=(working_cpp_compiler.path_entries + gcc_linker.path_entries), - exe_filename=working_cpp_compiler.exe_filename, - library_dirs=(gcc_linker.library_dirs + working_cpp_compiler.library_dirs), - linking_library_dirs=(gcc_linker.linking_library_dirs + extra_linking_library_dirs), - ) + gcc_linker_wrapper = yield Get(GCCLinker, NativeToolchain, native_toolchain) + working_linker = gcc_linker_wrapper.for_compiler(working_cpp_compiler, platform) yield GCCCppToolchain(CppToolchain(working_cpp_compiler, working_linker)) @@ -343,8 +304,12 @@ class ToolchainVariantRequest(datatype([ @rule(CToolchain, [Select(ToolchainVariantRequest)]) def select_c_toolchain(toolchain_variant_request): native_toolchain = toolchain_variant_request.toolchain - # TODO: make an enum exhaustiveness checking method that works with `yield Get(...)` statements! - if toolchain_variant_request.variant.is_gnu: + # TODO(#5933): make an enum exhaustiveness checking method that works with `yield Get(...)`! + use_gcc = toolchain_variant_request.variant.resolve_for_enum_variant({ + 'gnu': True, + 'llvm': False, + }) + if use_gcc: toolchain_resolved = yield Get(GCCCToolchain, NativeToolchain, native_toolchain) else: toolchain_resolved = yield Get(LLVMCToolchain, NativeToolchain, native_toolchain) @@ -354,7 +319,12 @@ def select_c_toolchain(toolchain_variant_request): @rule(CppToolchain, [Select(ToolchainVariantRequest)]) def select_cpp_toolchain(toolchain_variant_request): native_toolchain = toolchain_variant_request.toolchain - if toolchain_variant_request.variant.is_gnu: + # TODO(#5933): make an enum exhaustiveness checking method that works with `yield Get(...)`! + use_gcc = toolchain_variant_request.variant.resolve_for_enum_variant({ + 'gnu': True, + 'llvm': False, + }) + if use_gcc: toolchain_resolved = yield Get(GCCCppToolchain, NativeToolchain, native_toolchain) else: toolchain_resolved = yield Get(LLVMCppToolchain, NativeToolchain, native_toolchain) diff --git a/src/python/pants/backend/native/subsystems/xcode_cli_tools.py b/src/python/pants/backend/native/subsystems/xcode_cli_tools.py index 4ea8fceaf53..3c2e472785c 100644 --- a/src/python/pants/backend/native/subsystems/xcode_cli_tools.py +++ b/src/python/pants/backend/native/subsystems/xcode_cli_tools.py @@ -134,14 +134,15 @@ def assembler(self): return Assembler( path_entries=self.path_entries(), exe_filename='as', - library_dirs=[]) + runtime_library_dirs=[], + extra_args=[]) @memoized_method def linker(self): return Linker( path_entries=self.path_entries(), exe_filename='ld', - library_dirs=[], + runtime_library_dirs=[], linking_library_dirs=[], extra_args=[MIN_OSX_VERSION_ARG], extra_object_files=[], @@ -152,7 +153,7 @@ def c_compiler(self): return CCompiler( path_entries=self.path_entries(), exe_filename='clang', - library_dirs=self.lib_dirs(), + runtime_library_dirs=self.lib_dirs(), include_dirs=self.include_dirs(), extra_args=[MIN_OSX_VERSION_ARG]) @@ -161,7 +162,7 @@ def cpp_compiler(self): return CppCompiler( path_entries=self.path_entries(), exe_filename='clang++', - library_dirs=self.lib_dirs(), + runtime_library_dirs=self.lib_dirs(), include_dirs=self.include_dirs(include_cpp_inc=True), extra_args=[MIN_OSX_VERSION_ARG]) diff --git a/src/python/pants/backend/native/targets/native_artifact.py b/src/python/pants/backend/native/targets/native_artifact.py index dc8461d642c..b6ba3bb132b 100644 --- a/src/python/pants/backend/native/targets/native_artifact.py +++ b/src/python/pants/backend/native/targets/native_artifact.py @@ -22,9 +22,9 @@ def alias(cls): def as_shared_lib(self, platform): # TODO: check that the name conforms to some format in the constructor (e.g. no dots?). - return platform.resolve_platform_specific({ - 'darwin': lambda: 'lib{}.dylib'.format(self.lib_name), - 'linux': lambda: 'lib{}.so'.format(self.lib_name), + return platform.resolve_for_enum_variant({ + 'darwin': 'lib{}.dylib'.format(self.lib_name), + 'linux': 'lib{}.so'.format(self.lib_name), }) def _compute_fingerprint(self): diff --git a/src/python/pants/backend/native/tasks/conan_fetch.py b/src/python/pants/backend/native/tasks/conan_fetch.py index 6ffa7fe4416..5f9eb11a14f 100644 --- a/src/python/pants/backend/native/tasks/conan_fetch.py +++ b/src/python/pants/backend/native/tasks/conan_fetch.py @@ -124,9 +124,9 @@ def _conan_user_home(self, conan, in_workdir=False): @memoized_property def _conan_os_name(self): - return Platform.create().resolve_platform_specific({ - 'darwin': lambda: 'Macos', - 'linux': lambda: 'Linux', + return Platform.create().resolve_for_enum_variant({ + 'darwin': 'Macos', + 'linux': 'Linux', }) @property diff --git a/src/python/pants/backend/native/tasks/link_shared_libraries.py b/src/python/pants/backend/native/tasks/link_shared_libraries.py index 4f3efc0b69f..913fa9f334a 100644 --- a/src/python/pants/backend/native/tasks/link_shared_libraries.py +++ b/src/python/pants/backend/native/tasks/link_shared_libraries.py @@ -142,11 +142,6 @@ def _make_link_request(self, vt, compiled_objects_product): return link_request - _SHARED_CMDLINE_ARGS = { - 'darwin': lambda: ['-Wl,-dylib'], - 'linux': lambda: ['-shared'], - } - def _execute_link_request(self, link_request): object_files = link_request.object_files @@ -163,7 +158,10 @@ def _execute_link_request(self, link_request): self.context.log.debug("resulting_shared_lib_path: {}".format(resulting_shared_lib_path)) # We are executing in the results_dir, so get absolute paths for everything. cmd = ([linker.exe_filename] + - self.platform.resolve_platform_specific(self._SHARED_CMDLINE_ARGS) + + self.platform.resolve_for_enum_variant({ + 'darwin': ['-Wl,-dylib'], + 'linux': ['-shared'], + }) + linker.extra_args + ['-o', os.path.abspath(resulting_shared_lib_path)] + ['-L{}'.format(lib_dir) for lib_dir in link_request.external_lib_dirs] + @@ -173,7 +171,7 @@ def _execute_link_request(self, link_request): self.context.log.info("selected linker exe name: '{}'".format(linker.exe_filename)) self.context.log.debug("linker argv: {}".format(cmd)) - env = linker.as_invocation_environment_dict + env = linker.invocation_environment_dict self.context.log.debug("linker invocation environment: {}".format(env)) with self.context.new_workunit(name='link-shared-libraries', diff --git a/src/python/pants/backend/native/tasks/native_compile.py b/src/python/pants/backend/native/tasks/native_compile.py index 8533b9a9567..1c313e2e3ca 100644 --- a/src/python/pants/backend/native/tasks/native_compile.py +++ b/src/python/pants/backend/native/tasks/native_compile.py @@ -8,19 +8,18 @@ from abc import abstractmethod from collections import defaultdict -from pants.backend.native.config.environment import Executable from pants.backend.native.tasks.native_task import NativeTask from pants.base.build_environment import get_buildroot from pants.base.exceptions import TaskError from pants.base.workunit import WorkUnit, WorkUnitLabel from pants.util.memo import memoized_method, memoized_property from pants.util.meta import AbstractClass, classproperty -from pants.util.objects import SubclassesOf, datatype +from pants.util.objects import datatype from pants.util.process_handler import subprocess class NativeCompileRequest(datatype([ - ('compiler', SubclassesOf(Executable)), + 'compiler', # TODO: add type checking for Collection.of()! 'include_dirs', 'sources', @@ -134,11 +133,11 @@ def _compile_settings(self): @abstractmethod def get_compiler(self, native_library_target): - """An instance of `Executable` which can be invoked to compile files. + """An instance of `_CompilerMixin` which can be invoked to compile files. NB: Subclasses will be queried for the compiler instance once and the result cached. - :return: :class:`pants.backend.native.config.environment.Executable` + :return: :class:`pants.backend.native.config.environment._CompilerMixin` """ def _compiler(self, native_library_target): @@ -229,7 +228,7 @@ def _compile(self, compile_request): compiler = compile_request.compiler output_dir = compile_request.output_dir - env = compiler.as_invocation_environment_dict + env = compiler.invocation_environment_dict with self.context.new_workunit( name=self.workunit_label, labels=[WorkUnitLabel.COMPILER]) as workunit: diff --git a/src/python/pants/backend/python/interpreter_cache.py b/src/python/pants/backend/python/interpreter_cache.py index e0126f74e13..95908ddbd75 100644 --- a/src/python/pants/backend/python/interpreter_cache.py +++ b/src/python/pants/backend/python/interpreter_cache.py @@ -115,9 +115,13 @@ def select_interpreter_for_targets(self, targets): # Return the lowest compatible interpreter. return min(allowed_interpreters) - def _interpreter_from_path(self, path, filters=()): + def _interpreter_from_relpath(self, path, filters=()): + path = os.path.join(self._cache_dir, path) try: executable = os.readlink(os.path.join(path, 'python')) + if not os.path.exists(executable): + self._purge_interpreter(path) + return None except OSError: return None interpreter = PythonInterpreter.from_binary(executable, include_site_extras=False) @@ -125,7 +129,8 @@ def _interpreter_from_path(self, path, filters=()): return self._resolve(interpreter) return None - def _setup_interpreter(self, interpreter, cache_target_path): + def _setup_interpreter(self, interpreter, identity_str): + cache_target_path = os.path.join(self._cache_dir, identity_str) with safe_concurrent_creation(cache_target_path) as safe_path: os.mkdir(safe_path) # Parent will already have been created by safe_concurrent_creation. os.symlink(interpreter.binary, os.path.join(safe_path, 'python')) @@ -134,22 +139,19 @@ def _setup_interpreter(self, interpreter, cache_target_path): def _setup_cached(self, filters=()): """Find all currently-cached interpreters.""" for interpreter_dir in os.listdir(self._cache_dir): - path = os.path.join(self._cache_dir, interpreter_dir) - if os.path.isdir(path): - pi = self._interpreter_from_path(path, filters=filters) - if pi: - logger.debug('Detected interpreter {}: {}'.format(pi.binary, str(pi.identity))) - yield pi + pi = self._interpreter_from_relpath(interpreter_dir, filters=filters) + if pi: + logger.debug('Detected interpreter {}: {}'.format(pi.binary, str(pi.identity))) + yield pi def _setup_paths(self, paths, filters=()): """Find interpreters under paths, and cache them.""" for interpreter in self._matching(PythonInterpreter.all(paths), filters=filters): identity_str = str(interpreter.identity) - cache_path = os.path.join(self._cache_dir, identity_str) - pi = self._interpreter_from_path(cache_path, filters=filters) + pi = self._interpreter_from_relpath(identity_str, filters=filters) if pi is None: - self._setup_interpreter(interpreter, cache_path) - pi = self._interpreter_from_path(cache_path, filters=filters) + self._setup_interpreter(interpreter, identity_str) + pi = self._interpreter_from_relpath(identity_str, filters=filters) if pi: yield pi @@ -251,3 +253,14 @@ def _resolve_and_link(self, interpreter, requirement, target_link): _safe_link(target_location, target_link) logger.debug(' installed {}'.format(target_location)) return Package.from_href(target_location) + + def _purge_interpreter(self, interpreter_dir): + try: + logger.info('Detected stale interpreter `{}` in the interpreter cache, purging.' + .format(interpreter_dir)) + shutil.rmtree(interpreter_dir, ignore_errors=True) + except Exception as e: + logger.warn( + 'Caught exception {!r} during interpreter purge. Please run `./pants clean-all`!' + .format(e) + ) diff --git a/src/python/pants/backend/python/rules/python_test_runner.py b/src/python/pants/backend/python/rules/python_test_runner.py index 70c6457f77c..26ac23c39e5 100644 --- a/src/python/pants/backend/python/rules/python_test_runner.py +++ b/src/python/pants/backend/python/rules/python_test_runner.py @@ -64,7 +64,7 @@ def run_python_test(transitive_hydrated_target, pytest): # pex27, where it should be hermetically provided in some way. output_pytest_requirements_pex_filename = 'pytest-with-requirements.pex' requirements_pex_argv = [ - './{}'.format(pex_snapshot.files[0].path), + './{}'.format(pex_snapshot.files[0]), '--python', python_binary, '-e', 'pytest:main', '-o', output_pytest_requirements_pex_filename, diff --git a/src/python/pants/backend/python/subsystems/pex_build_util.py b/src/python/pants/backend/python/subsystems/pex_build_util.py index 1e2140f3794..417875eaeea 100644 --- a/src/python/pants/backend/python/subsystems/pex_build_util.py +++ b/src/python/pants/backend/python/subsystems/pex_build_util.py @@ -7,6 +7,7 @@ import logging import os from builtins import str +from collections import defaultdict from future.utils import PY2 from pex.fetcher import Fetcher @@ -50,6 +51,19 @@ def has_python_requirements(tgt): return isinstance(tgt, PythonRequirementLibrary) +def can_have_python_platform(tgt): + return isinstance(tgt, (PythonBinary, PythonDistribution)) + + +def targets_by_platform(targets, python_setup): + d = defaultdict(OrderedSet) + for target in targets: + if can_have_python_platform(target): + for platform in target.platforms if target.platforms else python_setup.platforms: + d[platform].add(target) + return d + + def _create_source_dumper(builder, tgt): if type(tgt) == Files: # Loose `Files` as opposed to `Resources` or `PythonTarget`s have no (implied) package structure @@ -241,8 +255,9 @@ def add_interpreter_constraints_from(self, constraint_tgts): # TODO this would be a great place to validate the constraints and present a good error message # if they are incompatible because all the sources of the constraints are available. # See: https://github.com/pantsbuild/pex/blob/584b6e367939d24bc28aa9fa36eb911c8297dac8/pex/interpreter_constraints.py - for tgt in constraint_tgts: - for constraint in tgt.compatibility: + constraint_tuples = {self._python_setup_subsystem.compatibility_or_constraints(tgt) for tgt in constraint_tgts} + for constraint_tuple in constraint_tuples: + for constraint in constraint_tuple: self.add_interpreter_constraint(constraint) def add_direct_requirements(self, reqs): diff --git a/src/python/pants/backend/python/subsystems/pytest.py b/src/python/pants/backend/python/subsystems/pytest.py index e3eb7f9e1b6..cbf53f953dd 100644 --- a/src/python/pants/backend/python/subsystems/pytest.py +++ b/src/python/pants/backend/python/subsystems/pytest.py @@ -14,7 +14,9 @@ class PyTest(Subsystem): def register_options(cls, register): super(PyTest, cls).register_options(register) # TODO: This is currently bounded below `3.7` due to #6282. - register('--requirements', advanced=True, default='pytest>=3.0.7,<3.7', + # TODO: Additionally, this is temporarily pinned to 3.0.7 due to more-itertools 6.0.0 dropping + # Python 2 support: https://github.com/pytest-dev/pytest/issues/4770. + register('--requirements', advanced=True, default='pytest==3.0.7', help='Requirements string for the pytest library.') register('--timeout-requirements', advanced=True, default='pytest-timeout>=1.2,<1.3', help='Requirements string for the pytest-timeout library.') diff --git a/src/python/pants/backend/python/subsystems/python_native_code.py b/src/python/pants/backend/python/subsystems/python_native_code.py index 1b7dc702905..7c8bde33385 100644 --- a/src/python/pants/backend/python/subsystems/python_native_code.py +++ b/src/python/pants/backend/python/subsystems/python_native_code.py @@ -5,13 +5,12 @@ from __future__ import absolute_import, division, print_function, unicode_literals from builtins import str -from collections import defaultdict from pants.backend.native.subsystems.native_toolchain import NativeToolchain from pants.backend.native.targets.native_library import NativeLibrary from pants.backend.python.python_requirement import PythonRequirement +from pants.backend.python.subsystems import pex_build_util from pants.backend.python.subsystems.python_setup import PythonSetup -from pants.backend.python.targets.python_binary import PythonBinary from pants.backend.python.targets.python_distribution import PythonDistribution from pants.base.exceptions import IncompatiblePlatformsError from pants.binaries.executable_pex_tool import ExecutablePexTool @@ -75,7 +74,7 @@ def _any_targets_have_native_sources(self, targets): return True return False - def get_targets_by_declared_platform(self, targets): + def _get_targets_by_declared_platform_with_placeholders(self, targets_by_platform): """ Aggregates a dict that maps a platform string to a list of targets that specify the platform. If no targets have platforms arguments, return a dict containing platforms inherited from @@ -84,19 +83,12 @@ def get_targets_by_declared_platform(self, targets): :param tgts: a list of :class:`Target` objects. :returns: a dict mapping a platform string to a list of targets that specify the platform. """ - targets_by_platforms = defaultdict(list) - for tgt in targets: - for platform in tgt.platforms: - targets_by_platforms[platform].append(tgt) - - if not targets_by_platforms: + if not targets_by_platform: for platform in self._python_setup.platforms: - targets_by_platforms[platform] = ['(No target) Platform inherited from either the ' + targets_by_platform[platform] = ['(No target) Platform inherited from either the ' '--platforms option or a pants.ini file.'] - return targets_by_platforms - - _PYTHON_PLATFORM_TARGETS_CONSTRAINT = SubclassesOf(PythonBinary, PythonDistribution) + return targets_by_platform def check_build_for_current_platform_only(self, targets): """ @@ -110,9 +102,8 @@ def check_build_for_current_platform_only(self, targets): if not self._any_targets_have_native_sources(targets): return False - targets_with_platforms = [target for target in targets - if self._PYTHON_PLATFORM_TARGETS_CONSTRAINT.satisfied_by(target)] - platforms_with_sources = self.get_targets_by_declared_platform(targets_with_platforms) + targets_by_platform = pex_build_util.targets_by_platform(targets, self._python_setup) + platforms_with_sources = self._get_targets_by_declared_platform_with_placeholders(targets_by_platform) platform_names = list(platforms_with_sources.keys()) if len(platform_names) < 1: diff --git a/src/python/pants/backend/python/subsystems/python_tool_base.py b/src/python/pants/backend/python/subsystems/python_tool_base.py index 149f671a82c..54b6495e1b0 100644 --- a/src/python/pants/backend/python/subsystems/python_tool_base.py +++ b/src/python/pants/backend/python/subsystems/python_tool_base.py @@ -13,10 +13,16 @@ class PythonToolBase(Subsystem): # Subclasses must set. default_requirements = None default_entry_point = None + # Subclasses need not override. + default_interpreter_constraints = [] @classmethod def register_options(cls, register): super(PythonToolBase, cls).register_options(register) + register('--interpreter-constraints', type=list, advanced=True, fingerprint=True, + default=cls.default_interpreter_constraints, + help='Python interpreter constraints for this tool. An empty list uses the default ' + 'interpreter constraints for the repo.') register('--requirements', type=list, advanced=True, fingerprint=True, default=cls.default_requirements, help='Python requirement strings for the tool.') @@ -24,6 +30,9 @@ def register_options(cls, register): default=cls.default_entry_point, help='The main module for the tool.') + def get_interpreter_constraints(self): + return self.get_options().interpreter_constraints + def get_requirement_specs(self): return self.get_options().requirements diff --git a/src/python/pants/backend/python/tasks/python_binary_create.py b/src/python/pants/backend/python/tasks/python_binary_create.py index 84debe8333a..b4abb43d5e7 100644 --- a/src/python/pants/backend/python/tasks/python_binary_create.py +++ b/src/python/pants/backend/python/tasks/python_binary_create.py @@ -140,7 +140,7 @@ def _create_binary(self, binary_tgt, results_dir): if is_python_target(tgt): constraint_tgts.append(tgt) - # Add target's interpreter compatibility constraints to pex info. + # Add target-level and possibly global interpreter compatibility constraints to pex info. pex_builder.add_interpreter_constraints_from(constraint_tgts) # Dump everything into the builder's chroot. diff --git a/src/python/pants/backend/python/tasks/python_tool_prep_base.py b/src/python/pants/backend/python/tasks/python_tool_prep_base.py index 7f0de9db4d6..203da570afd 100644 --- a/src/python/pants/backend/python/tasks/python_tool_prep_base.py +++ b/src/python/pants/backend/python/tasks/python_tool_prep_base.py @@ -5,6 +5,7 @@ from __future__ import absolute_import, division, print_function, unicode_literals import os +from builtins import str from contextlib import contextmanager from pex.pex import PEX @@ -13,6 +14,8 @@ from pants.backend.python.interpreter_cache import PythonInterpreterCache from pants.backend.python.python_requirement import PythonRequirement from pants.backend.python.subsystems.pex_build_util import PexBuilderWrapper +from pants.base.build_environment import get_pants_cachedir +from pants.base.hash_utils import stable_json_sha1 from pants.base.workunit import WorkUnitLabel from pants.task.task import Task from pants.util.dirutil import safe_concurrent_creation @@ -23,11 +26,16 @@ class PythonToolInstance(object): def __init__(self, pex_path, interpreter): self._pex = PEX(pex_path, interpreter=interpreter) + self._interpreter = interpreter @property def pex(self): return self._pex + @property + def interpreter(self): + return self._interpreter + def _pretty_cmdline(self, args): return safe_shlex_join(self._pex.cmdline(args)) @@ -63,6 +71,12 @@ def run(self, *args, **kwargs): return cmdline, exit_code +# TODO: This python tool setup ends up eagerly generating each pex for each task in every goal which +# is transitively required by the command-line goals, even for tasks which no-op. This requires each +# pex for each relevant python tool to be buildable on the current host, even if it may never be +# intended to be invoked. Especially given the existing clear separation of concerns into +# PythonToolBase/PythonToolInstance/PythonToolPrepBase, this seems like an extremely ripe use case +# for some v2 rules for free caching and no-op when not required for the command-line goals. class PythonToolPrepBase(Task): """Base class for tasks that resolve a python tool to be invoked out-of-process.""" @@ -97,16 +111,30 @@ def _build_tool_pex(self, tool_subsystem, interpreter, pex_path): pex_builder.set_entry_point(tool_subsystem.get_entry_point()) pex_builder.freeze() + def _generate_fingerprinted_pex_path(self, tool_subsystem, interpreter): + # `tool_subsystem.get_requirement_specs()` is a list, but order shouldn't actually matter. This + # should probably be sorted, but it's possible a user could intentionally tweak order to work + # around a particular requirement resolution resolve-order issue. In practice the lists are + # expected to be mostly static, so we accept the risk of too-fine-grained caching creating lots + # of pexes in the cache dir. + specs_fingerprint = stable_json_sha1(tool_subsystem.get_requirement_specs()) + return os.path.join( + get_pants_cachedir(), + 'python', + str(interpreter.identity), + self.fingerprint, + '{}-{}.pex'.format(tool_subsystem.options_scope, specs_fingerprint), + ) + def execute(self): tool_subsystem = self.tool_subsystem_cls.scoped_instance(self) - pex_name = tool_subsystem.options_scope - pex_path = os.path.join(self.workdir, self.fingerprint, '{}.pex'.format(pex_name)) interpreter_cache = PythonInterpreterCache.global_instance() - interpreter = interpreter_cache.select_interpreter_for_targets([]) + interpreter = min(interpreter_cache.setup(filters=tool_subsystem.get_interpreter_constraints())) + pex_path = self._generate_fingerprinted_pex_path(tool_subsystem, interpreter) if not os.path.exists(pex_path): - with self.context.new_workunit(name='create-{}-pex'.format(pex_name), + with self.context.new_workunit(name='create-{}-pex'.format(tool_subsystem.options_scope), labels=[WorkUnitLabel.PREP]): self._build_tool_pex(tool_subsystem=tool_subsystem, interpreter=interpreter, diff --git a/src/python/pants/backend/python/tasks/resolve_requirements_task_base.py b/src/python/pants/backend/python/tasks/resolve_requirements_task_base.py index 74598a77b34..29c665c3c9f 100644 --- a/src/python/pants/backend/python/tasks/resolve_requirements_task_base.py +++ b/src/python/pants/backend/python/tasks/resolve_requirements_task_base.py @@ -13,8 +13,10 @@ from pex.pex_builder import PEXBuilder from pants.backend.python.python_requirement import PythonRequirement +from pants.backend.python.subsystems import pex_build_util from pants.backend.python.subsystems.pex_build_util import PexBuilderWrapper from pants.backend.python.subsystems.python_native_code import PythonNativeCode +from pants.backend.python.subsystems.python_setup import PythonSetup from pants.backend.python.targets.python_requirement_library import PythonRequirementLibrary from pants.base.hash_utils import hash_all from pants.invalidation.cache_manager import VersionedTargetSet @@ -36,12 +38,17 @@ def subsystem_dependencies(cls): return super(ResolveRequirementsTaskBase, cls).subsystem_dependencies() + ( PexBuilderWrapper.Factory, PythonNativeCode.scoped(cls), + PythonSetup.scoped(cls), ) @memoized_property def _python_native_code_settings(self): return PythonNativeCode.scoped_instance(self) + @memoized_property + def _python_setup(self): + return PythonSetup.global_instance() + @classmethod def prepare(cls, options, round_manager): super(ResolveRequirementsTaskBase, cls).prepare(options, round_manager) @@ -70,11 +77,11 @@ def resolve_requirements(self, interpreter, req_libs): # We need to ensure that we are resolving for only the current platform if we are # including local python dist targets that have native extensions. - tgts = self.context.targets() - if self._python_native_code_settings.check_build_for_current_platform_only(tgts): - maybe_platforms = ['current'] + targets_by_platform = pex_build_util.targets_by_platform(self.context.targets(), self._python_setup) + if self._python_native_code_settings.check_build_for_current_platform_only(targets_by_platform): + platforms = ['current'] else: - maybe_platforms = None + platforms = list(sorted(targets_by_platform.keys())) path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id)) # Note that we check for the existence of the directory, instead of for invalid_vts, @@ -84,7 +91,7 @@ def resolve_requirements(self, interpreter, req_libs): pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(path=safe_path, interpreter=interpreter, copy=True), log=self.context.log) - pex_builder.add_requirement_libs_from(req_libs, platforms=maybe_platforms) + pex_builder.add_requirement_libs_from(req_libs, platforms=platforms) pex_builder.freeze() return PEX(path, interpreter=interpreter) diff --git a/src/python/pants/backend/python/tasks/select_interpreter.py b/src/python/pants/backend/python/tasks/select_interpreter.py index 8c0905a90e2..24a06809f47 100644 --- a/src/python/pants/backend/python/tasks/select_interpreter.py +++ b/src/python/pants/backend/python/tasks/select_interpreter.py @@ -78,6 +78,9 @@ def execute(self): interpreter_path_file = self._interpreter_path_file(target_set_id) if not os.path.exists(interpreter_path_file): self._create_interpreter_path_file(interpreter_path_file, python_tgts) + else: + if self._detect_and_purge_invalid_interpreter(interpreter_path_file): + self._create_interpreter_path_file(interpreter_path_file, python_tgts) interpreter = self._get_interpreter(interpreter_path_file) self.context.products.register_data(PythonInterpreter, interpreter) @@ -95,6 +98,15 @@ def _create_interpreter_path_file(self, interpreter_path_file, targets): def _interpreter_path_file(self, target_set_id): return os.path.join(self.workdir, target_set_id, 'interpreter.info') + def _detect_and_purge_invalid_interpreter(self, interpreter_path_file): + interpreter = self._get_interpreter(interpreter_path_file) + if not os.path.exists(interpreter.binary): + self.context.log.info('Stale interpreter reference detected: {}, removing reference and ' + 'selecting a new interpreter.'.format(interpreter.binary)) + os.remove(interpreter_path_file) + return True + return False + @staticmethod def _get_interpreter(interpreter_path_file): with open(interpreter_path_file, 'r') as infile: diff --git a/src/python/pants/backend/python/tasks/unpack_wheels.py b/src/python/pants/backend/python/tasks/unpack_wheels.py index 632395e0cd3..e66483a584e 100644 --- a/src/python/pants/backend/python/tasks/unpack_wheels.py +++ b/src/python/pants/backend/python/tasks/unpack_wheels.py @@ -105,9 +105,9 @@ def _name_and_platform(whl): @memoized_classproperty def _current_platform_abbreviation(cls): - return NativeBackendPlatform.create().resolve_platform_specific({ - 'darwin': lambda: 'macosx', - 'linux': lambda: 'linux', + return NativeBackendPlatform.create().resolve_for_enum_variant({ + 'darwin': 'macosx', + 'linux': 'linux', }) @classmethod diff --git a/src/python/pants/bin/daemon_pants_runner.py b/src/python/pants/bin/daemon_pants_runner.py index 02055340ff7..ff0da1ed964 100644 --- a/src/python/pants/bin/daemon_pants_runner.py +++ b/src/python/pants/bin/daemon_pants_runner.py @@ -231,18 +231,16 @@ def nailgunned_stdio(cls, sock, env, handle_stdin=True): ) as finalizer: yield finalizer + # TODO: there's no testing for this method, and this caused a user-visible failure -- see #7008! def _raise_deferred_exc(self): """Raises deferred exceptions from the daemon's synchronous path in the post-fork client.""" if self._deferred_exception: - exc_type, exc_value, exc_traceback = self._deferred_exception - if exc_type == GracefulTerminationException: - self._exiter.exit(exc_value.exit_code) try: - # Expect `_deferred_exception` to be a 3-item tuple of the values returned by sys.exc_info(). - # This permits use the 3-arg form of the `raise` statement to preserve the original traceback. - raise_with_traceback(exc_type(exc_value), exc_traceback) - except ValueError: - # If `_deferred_exception` isn't a 3-item tuple, treat it like a bare exception. + exc_type, exc_value, exc_traceback = self._deferred_exception + raise_with_traceback(exc_value, exc_traceback) + except TypeError: + # If `_deferred_exception` isn't a 3-item tuple (raising a TypeError on the above + # destructuring), treat it like a bare exception. raise self._deferred_exception def _maybe_get_client_start_time_from_env(self, env): @@ -327,6 +325,10 @@ def post_fork_child(self): runner.run() except KeyboardInterrupt: self._exiter.exit_and_fail('Interrupted by user.\n') + except GracefulTerminationException as e: + ExceptionSink.log_exception( + 'Encountered graceful termination exception {}; exiting'.format(e)) + self._exiter.exit(e.exit_code) except Exception: ExceptionSink._log_unhandled_exception_and_exit() else: diff --git a/src/python/pants/build_graph/build_graph.py b/src/python/pants/build_graph/build_graph.py index cca13ecd14f..4a6af6e05d5 100644 --- a/src/python/pants/build_graph/build_graph.py +++ b/src/python/pants/build_graph/build_graph.py @@ -448,7 +448,7 @@ def _walk_rec(addr): _walk_rec(address) def transitive_dependees_of_addresses(self, addresses, predicate=None, postorder=False): - """Returns all transitive dependees of `address`. + """Returns all transitive dependees of `addresses`. Note that this uses `walk_transitive_dependee_graph` and the predicate is passed through, hence it trims graphs rather than just filtering out Targets that do not match the predicate. diff --git a/src/python/pants/build_graph/target_filter_subsystem.py b/src/python/pants/build_graph/target_filter_subsystem.py new file mode 100644 index 00000000000..ae53b7cd743 --- /dev/null +++ b/src/python/pants/build_graph/target_filter_subsystem.py @@ -0,0 +1,44 @@ +# coding=utf-8 +# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +from __future__ import absolute_import, division, print_function, unicode_literals + +import logging +from builtins import object, set + +from pants.subsystem.subsystem import Subsystem + + +logger = logging.getLogger(__name__) + + +class TargetFilter(Subsystem): + """Filter targets matching configured criteria. + + :API: public + """ + + options_scope = 'target-filter' + + @classmethod + def register_options(cls, register): + super(TargetFilter, cls).register_options(register) + + register('--exclude-tags', type=list, + default=[], fingerprint=True, + help='Skip targets with given tag(s).') + + def apply(self, targets): + exclude_tags = set(self.get_options().exclude_tags) + return TargetFiltering(exclude_tags).apply_tag_blacklist(targets) + + +class TargetFiltering(object): + """Apply filtering logic against targets.""" + + def __init__(self, exclude_tags): + self.exclude_tags = exclude_tags + + def apply_tag_blacklist(self, targets): + return [t for t in targets if not self.exclude_tags.intersection(t.tags)] diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/a b/src/python/pants/dummy.c similarity index 100% rename from testprojects/src/resources/org/pantsbuild/testproject/ordering/a rename to src/python/pants/dummy.c diff --git a/src/python/pants/engine/BUILD b/src/python/pants/engine/BUILD index 168201a5719..cab6d0cbcee 100644 --- a/src/python/pants/engine/BUILD +++ b/src/python/pants/engine/BUILD @@ -50,6 +50,7 @@ python_library( dependencies=[ '3rdparty/python/twitter/commons:twitter.common.collections', '3rdparty/python:future', + ':objects', ':rules', ':selectors', 'src/python/pants/base:project_tree', @@ -121,7 +122,10 @@ python_library( name='objects', sources=['objects.py'], dependencies=[ + '3rdparty/python:future', 'src/python/pants/util:meta', + 'src/python/pants/util:memo', + 'src/python/pants/util:objects', ] ) @@ -172,6 +176,7 @@ python_library( ':isolated_process', ':native', ':nodes', + ':objects', ':rules', 'src/python/pants/base:exceptions', 'src/python/pants/base:specs', diff --git a/src/python/pants/engine/addressable.py b/src/python/pants/engine/addressable.py index 508a9471c61..25f4233c701 100644 --- a/src/python/pants/engine/addressable.py +++ b/src/python/pants/engine/addressable.py @@ -11,9 +11,9 @@ from future.utils import string_types from pants.build_graph.address import Address, BuildFileAddress -from pants.engine.objects import Resolvable, Serializable +from pants.engine.objects import Collection, Resolvable, Serializable from pants.util.collections_abc_backport import MutableMapping, MutableSequence -from pants.util.objects import Collection, TypeConstraintError +from pants.util.objects import TypeConstraintError Addresses = Collection.of(Address) diff --git a/src/python/pants/engine/build_files.py b/src/python/pants/engine/build_files.py index ee65bd92119..84430073f35 100644 --- a/src/python/pants/engine/build_files.py +++ b/src/python/pants/engine/build_files.py @@ -219,7 +219,7 @@ def addresses_from_address_families(address_mapper, specs): """ # Capture a Snapshot covering all paths for these Specs, then group by directory. snapshot = yield Get(Snapshot, PathGlobs, _spec_to_globs(address_mapper, specs)) - dirnames = {dirname(f.stat.path) for f in snapshot.files} + dirnames = {dirname(f) for f in snapshot.files} address_families = yield [Get(AddressFamily, Dir(d)) for d in dirnames] address_family_by_directory = {af.namespace: af for af in address_families} diff --git a/src/python/pants/engine/fs.py b/src/python/pants/engine/fs.py index c43b41cf858..8d009f01c06 100644 --- a/src/python/pants/engine/fs.py +++ b/src/python/pants/engine/fs.py @@ -4,13 +4,16 @@ from __future__ import absolute_import, division, print_function, unicode_literals +import os + from future.utils import binary_type, text_type -from pants.base.project_tree import Dir, File +from pants.engine.objects import Collection from pants.engine.rules import RootRule from pants.option.custom_types import GlobExpansionConjunction from pants.option.global_options import GlobMatchErrorBehavior -from pants.util.objects import Collection, datatype +from pants.util.dirutil import maybe_read_file, safe_delete, safe_file_dump +from pants.util.objects import Exactly, datatype class FileContent(datatype([('path', text_type), ('content', binary_type)])): @@ -56,12 +59,9 @@ def __new__(cls, include, exclude=(), glob_match_error_behavior=None, conjunctio cls, include=tuple(include), exclude=tuple(exclude), - glob_match_error_behavior=GlobMatchErrorBehavior.create(glob_match_error_behavior), - conjunction=GlobExpansionConjunction.create(conjunction)) - - -class PathGlobsAndRoot(datatype([('path_globs', PathGlobs), ('root', text_type)])): - pass + glob_match_error_behavior=GlobMatchErrorBehavior.create(glob_match_error_behavior, + none_is_default=True), + conjunction=GlobExpansionConjunction.create(conjunction, none_is_default=True)) class Digest(datatype([('fingerprint', text_type), ('serialized_bytes_length', int)])): @@ -82,6 +82,33 @@ class Digest(datatype([('fingerprint', text_type), ('serialized_bytes_length', i https://github.com/pantsbuild/pants/issues/5802 """ + @classmethod + def _path(cls, directory): + return '{}.digest'.format(directory.rstrip(os.sep)) + + @classmethod + def clear(cls, directory): + """Clear any existing Digest file adjacent to the given directory.""" + safe_delete(cls._path(directory)) + + @classmethod + def load(cls, directory): + """Load a Digest from a `.digest` file adjacent to the given directory. + + :return: A Digest, or None if the Digest did not exist. + """ + read_file = maybe_read_file(cls._path(directory), binary_mode=False) + if read_file: + fingerprint, length = read_file.split(':') + return Digest(fingerprint, int(length)) + else: + return None + + def dump(self, directory): + """Dump this Digest object adjacent to the given directory.""" + payload = '{}:{}'.format(self.fingerprint, self.serialized_bytes_length) + safe_file_dump(self._path(directory), payload=payload, mode='w') + def __repr__(self): return '''Digest(fingerprint={}, serialized_bytes_length={})'''.format( self.fingerprint, @@ -92,8 +119,25 @@ def __str__(self): return repr(self) -class Snapshot(datatype([('directory_digest', Digest), ('path_stats', tuple)])): - """A Snapshot is a collection of Files and Dirs fingerprinted by their names/content. +class PathGlobsAndRoot(datatype([ + ('path_globs', PathGlobs), + ('root', text_type), + ('digest_hint', Exactly(Digest, type(None))), +])): + """A set of PathGlobs to capture relative to some root (which may exist outside of the buildroot). + + If the `digest_hint` is set, it must be the Digest that we would expect to get if we were to + expand and Digest the globs. The hint is an optimization that allows for bypassing filesystem + operations in cases where the expected Digest is known, and the content for the Digest is already + stored. + """ + + def __new__(cls, path_globs, root, digest_hint=None): + return super(PathGlobsAndRoot, cls).__new__(cls, path_globs, root, digest_hint) + + +class Snapshot(datatype([('directory_digest', Digest), ('files', tuple), ('dirs', tuple)])): + """A Snapshot is a collection of file paths and dir paths fingerprinted by their names/content. Snapshots are used to make it easier to isolate process execution by fixing the contents of the files being operated on and easing their movement to and from isolated execution @@ -104,22 +148,6 @@ class Snapshot(datatype([('directory_digest', Digest), ('path_stats', tuple)])): def is_empty(self): return self == EMPTY_SNAPSHOT - @property - def dirs(self): - return [p for p in self.path_stats if type(p.stat) == Dir] - - @property - def dir_stats(self): - return [p.stat for p in self.dirs] - - @property - def files(self): - return [p for p in self.path_stats if type(p.stat) == File] - - @property - def file_stats(self): - return [p.stat for p in self.files] - class MergedDirectories(datatype([('directories', tuple)])): pass @@ -148,7 +176,8 @@ class UrlToFetch(datatype([('url', text_type), ('digest', Digest)])): EMPTY_SNAPSHOT = Snapshot( directory_digest=EMPTY_DIRECTORY_DIGEST, - path_stats=(), + files=(), + dirs=() ) diff --git a/src/python/pants/engine/legacy/BUILD b/src/python/pants/engine/legacy/BUILD index cb8086944d8..8db00aa36d4 100644 --- a/src/python/pants/engine/legacy/BUILD +++ b/src/python/pants/engine/legacy/BUILD @@ -75,6 +75,7 @@ python_library( 'src/python/pants/build_graph', 'src/python/pants/engine:build_files', 'src/python/pants/engine:mapper', + 'src/python/pants/engine:objects', 'src/python/pants/engine:parser', 'src/python/pants/engine:selectors', 'src/python/pants/option', diff --git a/src/python/pants/engine/legacy/graph.py b/src/python/pants/engine/legacy/graph.py index 72368f1a6db..7bd0efbeaf8 100644 --- a/src/python/pants/engine/legacy/graph.py +++ b/src/python/pants/engine/legacy/graph.py @@ -26,13 +26,14 @@ from pants.engine.legacy.address_mapper import LegacyAddressMapper from pants.engine.legacy.structs import BundleAdaptor, BundlesField, SourcesField, TargetAdaptor from pants.engine.mapper import AddressMapper +from pants.engine.objects import Collection from pants.engine.parser import SymbolTable, TargetAdaptorContainer from pants.engine.rules import RootRule, rule from pants.engine.selectors import Get, Select from pants.option.global_options import GlobMatchErrorBehavior from pants.source.filespec import any_matches_filespec from pants.source.wrapped_globs import EagerFilesetWithSpec, FilesetRelPathWrapper -from pants.util.objects import Collection, datatype +from pants.util.objects import datatype logger = logging.getLogger(__name__) diff --git a/src/python/pants/engine/native.py b/src/python/pants/engine/native.py index 5026b055382..960e397621a 100644 --- a/src/python/pants/engine/native.py +++ b/src/python/pants/engine/native.py @@ -376,6 +376,12 @@ def extern_store_i64(self, context_handle, i64): c = self._ffi.from_handle(context_handle) return c.to_value(i64) + @_extern_decl('Handle', ['ExternContext*', 'double']) + def extern_store_f64(self, context_handle, f64): + """Given a context and double, return a new Handle to represent the double.""" + c = self._ffi.from_handle(context_handle) + return c.to_value(f64) + @_extern_decl('Handle', ['ExternContext*', '_Bool']) def extern_store_bool(self, context_handle, b): """Given a context and _Bool, return a new Handle to represent the _Bool.""" @@ -634,6 +640,7 @@ def init_externs(): self.ffi_lib.extern_store_bytes, self.ffi_lib.extern_store_utf8, self.ffi_lib.extern_store_i64, + self.ffi_lib.extern_store_f64, self.ffi_lib.extern_store_bool, self.ffi_lib.extern_project_ignoring_type, self.ffi_lib.extern_project_multi, @@ -690,10 +697,6 @@ def new_scheduler(self, construct_snapshot, construct_file_content, construct_files_content, - construct_path_stat, - construct_dir, - construct_file, - construct_link, construct_process_result, constraint_address, constraint_path_globs, @@ -722,10 +725,6 @@ def tc(constraint): func(construct_snapshot), func(construct_file_content), func(construct_files_content), - func(construct_path_stat), - func(construct_dir), - func(construct_file), - func(construct_link), func(construct_process_result), # TypeConstraints. tc(constraint_address), diff --git a/src/python/pants/engine/objects.py b/src/python/pants/engine/objects.py index a0b0e784a7c..48b017d688d 100644 --- a/src/python/pants/engine/objects.py +++ b/src/python/pants/engine/objects.py @@ -5,10 +5,16 @@ from __future__ import absolute_import, division, print_function, unicode_literals import inspect +import sys from abc import abstractmethod, abstractproperty +from builtins import object from collections import namedtuple +from future.utils import PY2 + +from pants.util.memo import memoized_classmethod from pants.util.meta import AbstractClass +from pants.util.objects import Exactly, TypedCollection, datatype class SerializationError(Exception): @@ -146,3 +152,38 @@ def validate(self): :raises: :class:`ValidationError` if this object is invalid. """ + + +class Collection(object): + """Constructs classes representing collections of objects of a particular type. + + The produced class will expose its values under a field named dependencies - this is a stable API + which may be consumed e.g. over FFI from the engine. + + Python consumers of a Collection should prefer to use its standard iteration API. + + Note that elements of a Collection are type-checked upon construction. + """ + + @memoized_classmethod + def of(cls, *element_types): + union = '|'.join(element_type.__name__ for element_type in element_types) + type_name = '{}.of({})'.format(cls.__name__, union) + if PY2: + type_name = type_name.encode('utf-8') + type_checked_collection_class = datatype([ + # Create a datatype with a single field 'dependencies' which is type-checked on construction + # to be a collection containing elements of only the exact `element_types` specified. + ('dependencies', TypedCollection(Exactly(*element_types))) + ], superclass_name=cls.__name__) + supertypes = (cls, type_checked_collection_class) + properties = {'element_types': element_types} + collection_of_type = type(type_name, supertypes, properties) + + # Expose the custom class type at the module level to be pickle compatible. + setattr(sys.modules[cls.__module__], type_name, collection_of_type) + + return collection_of_type + + def __iter__(self): + return iter(self.dependencies) diff --git a/src/python/pants/engine/scheduler.py b/src/python/pants/engine/scheduler.py index 72d04e661f8..c4374c2f713 100644 --- a/src/python/pants/engine/scheduler.py +++ b/src/python/pants/engine/scheduler.py @@ -14,17 +14,17 @@ from pants.base.project_tree import Dir, File, Link from pants.build_graph.address import Address from pants.engine.fs import (Digest, DirectoryToMaterialize, FileContent, FilesContent, - MergedDirectories, Path, PathGlobs, PathGlobsAndRoot, Snapshot, - UrlToFetch) + MergedDirectories, PathGlobs, PathGlobsAndRoot, Snapshot, UrlToFetch) from pants.engine.isolated_process import ExecuteProcessRequest, FallibleExecuteProcessResult from pants.engine.native import Function, TypeConstraint, TypeId from pants.engine.nodes import Return, Throw +from pants.engine.objects import Collection from pants.engine.rules import RuleIndex, SingletonRule, TaskRule from pants.engine.selectors import Params, Select, constraint_for from pants.rules.core.exceptions import GracefulTerminationException from pants.util.contextutil import temporary_file_path from pants.util.dirutil import check_no_overlapping_paths -from pants.util.objects import Collection, datatype +from pants.util.objects import datatype from pants.util.strutil import pluralize @@ -100,10 +100,6 @@ def __init__( construct_snapshot=Snapshot, construct_file_content=FileContent, construct_files_content=FilesContent, - construct_path_stat=Path, - construct_dir=Dir, - construct_file=File, - construct_link=Link, construct_process_result=FallibleExecuteProcessResult, constraint_address=constraint_for(Address), constraint_path_globs=constraint_for(PathGlobs), @@ -282,8 +278,7 @@ def visualize_to_dir(self): return self._visualize_to_dir def _metrics(self, session): - metrics_val = self._native.lib.scheduler_metrics(self._scheduler, session) - return {k: v for k, v in self._from_value(metrics_val)} + return self._from_value(self._native.lib.scheduler_metrics(self._scheduler, session)) def with_fork_context(self, func): """See the rustdocs for `scheduler_fork_context` for more information.""" diff --git a/src/python/pants/goal/pantsd_stats.py b/src/python/pants/goal/pantsd_stats.py index 12fc801451d..5eefef92058 100644 --- a/src/python/pants/goal/pantsd_stats.py +++ b/src/python/pants/goal/pantsd_stats.py @@ -11,24 +11,18 @@ class PantsDaemonStats(object): """Tracks various stats about the daemon.""" def __init__(self): - self.target_root_size = 0 - self.affected_targets_size = 0 - self.affected_targets_file_count = 0 self.scheduler_metrics = {} def set_scheduler_metrics(self, scheduler_metrics): self.scheduler_metrics = scheduler_metrics def set_target_root_size(self, size): - self.target_root_size = size + self.scheduler_metrics['target_root_size'] = size def set_affected_targets_size(self, size): - self.affected_targets_size = size + self.scheduler_metrics['affected_targets_size'] = size def get_all(self): - res = dict(self.scheduler_metrics) - res.update({ - 'target_root_size': self.target_root_size, - 'affected_targets_size': self.affected_targets_size, - }) - return res + for key in ['target_root_size', 'affected_targets_size']: + self.scheduler_metrics.setdefault(key, 0) + return self.scheduler_metrics diff --git a/src/python/pants/init/engine_initializer.py b/src/python/pants/init/engine_initializer.py index 1b35f31692d..41f042c8844 100644 --- a/src/python/pants/init/engine_initializer.py +++ b/src/python/pants/init/engine_initializer.py @@ -346,7 +346,8 @@ def setup_legacy_graph_extended( rules = ( [ RootRule(Console), - SingletonRule.from_instance(GlobMatchErrorBehavior.create(glob_match_error_behavior)), + SingletonRule.from_instance(GlobMatchErrorBehavior.create(glob_match_error_behavior, + none_is_default=True)), SingletonRule.from_instance(build_configuration), SingletonRule(SymbolTable, symbol_table), ] + diff --git a/src/python/pants/java/nailgun_executor.py b/src/python/pants/java/nailgun_executor.py index d4a16123dd0..5f118496531 100644 --- a/src/python/pants/java/nailgun_executor.py +++ b/src/python/pants/java/nailgun_executor.py @@ -228,8 +228,8 @@ def ensure_connectable(self, nailgun): def _spawn_nailgun_server(self, fingerprint, jvm_options, classpath, stdout, stderr, stdin): """Synchronously spawn a new nailgun server.""" # Truncate the nailguns stdout & stderr. - safe_file_dump(self._ng_stdout, b'') - safe_file_dump(self._ng_stderr, b'') + safe_file_dump(self._ng_stdout, b'', mode='wb') + safe_file_dump(self._ng_stderr, b'', mode='wb') jvm_options = jvm_options + [self._PANTS_NG_BUILDROOT_ARG, self._create_owner_arg(self._workdir), diff --git a/src/python/pants/notes/1.14.x.rst b/src/python/pants/notes/1.14.x.rst index 45e5658ae23..8b2d4e2ef9a 100644 --- a/src/python/pants/notes/1.14.x.rst +++ b/src/python/pants/notes/1.14.x.rst @@ -3,6 +3,67 @@ This document describes releases leading up to the ``1.14.x`` ``stable`` series. +1.14.0rc3 (2/21/2019) +--------------------- + +API Changes +~~~~~~~~~~~ + +* Add flags to processs_executor that say where to materialize output and what output is (#7201) + `PR #7201 `_ + +* Resolve all platforms from all python targets (#7156) + `PR #7156 `_ + +* Remove deprecated test classes (#7243) + `PR #7243 `_ + +Bugfixes +~~~~~~~~ + +* Revert remote execution from tower to grpcio (#7256) + `PR #7256 `_ + +* Avoid capturing Snapshots for previously digested codegen outputs (#7241) + `PR #7241 `_ + +* Validate and maybe prune interpreter cache run over run (#7225) + `PR #7225 `_ + +1.14.0rc2 (2/15/2019) +--------------------- + +API Changes +~~~~~~~~~~~ + +* Pin pytest version to avoid induced breakage from more-itertools transitive dep (#7238) + `PR #7238 `_ + `PR #7240 `_ + +1.14.0rc1 (2/06/2019) +--------------------- + +Bugfixes +~~~~~~~~ + +* Only lint the direct sources of a linted target. (#7219) + `PR #7219 `_ + +* Do not render the coursier workunit unless it will run. (#7218) + `PR #7218 `_ + +* Revert "make GoTest subclass PartitionedTestRunnerTaskMixin to test transitively" (#7212) + `PR #7212 `_ + +* Only run master-dependent commithooks on master (#7214) + `PR #7214 `_ + +* Absolute-ify GIT_DIR (#7210) + `PR #7210 `_ + +* Fix release script ownership check command name. (#7204) + `PR #7204 `_ + 1.14.0rc0 (2/01/2019) --------------------- diff --git a/src/python/pants/notes/master.rst b/src/python/pants/notes/master.rst index 5661e9b3489..7c4448c07fb 100644 --- a/src/python/pants/notes/master.rst +++ b/src/python/pants/notes/master.rst @@ -4,6 +4,138 @@ Master Pre-Releases This document describes development releases which occur weekly from master, and which have not undergone the vetting associated with ``stable`` releases. +1.15.0.dev1 (2/22/2019) +----------------------- + +New Features +~~~~~~~~~~~~ + +* Allow passing floating point numbers from rust to python (#7259) + `PR #7259 `_ + +Bugfixes +~~~~~~~~ + +* Fix nightly cron ctypes enum failure (#7249) + `PR #7249 `_ + +* Revert remote execution from tower to grpcio (#7256) + `PR #7256 `_ + +* Pin pytest version to avoid induced breakage from more-itertools transitive dep (#7238) + `Issue #4770#issuecomment-462869367, `_ + `PR #7238 `_ + +* Validate and maybe prune interpreter cache run over run (#7225) + `PR #7225 `_ + +Documentation +~~~~~~~~~~~~~ + +* [jvm-compile] fix typo: s/direcotry/directory/ (#7265) + `PR #7265 `_ + +Refactoring, Improvements, and Tooling +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* cache python tools in ~/.cache/pants (#7236) + `PR #7236 `_ + +* Prepare 1.14.0rc3 (#7274) + `PR #7274 `_ + +* Node is Display (#7264) + `PR #7264 `_ + +* Scheduler returns metrics as a dictionary instead of a tuple of tuples (#7255) + `PR #7255 `_ + +* Prepare 1.14.0.rc2 instead. (#7251) + `PR #7251 `_ + +* Prepare 1.14.0 (#7246) + `PR #7246 `_ + +* Avoid capturing Snapshots for previously digested codegen outputs (#7241) + `PR #7241 `_ + +* Add checks if values of flags zipkin-trace-id and zipkin-parent-id are valid (#7242) + `PR #7242 `_ + +* Remove deprecated test classes (#7243) + `PR #7243 `_ + +* fix expected pytest output for pytest integration after pinning to 3.0.7 (#7240) + `PR #7240 `_ + +* Canonicalize enum pattern matching for execution strategy, platform, and elsewhere (#7226) + `PR #7226 `_ + +* add a TypedCollection type constraint to reduce boilerplate for datatype tuple fields (#7115) + `PR #7115 `_ + +1.15.0.dev0 (2/8/2019) +---------------------- + +API Changes +~~~~~~~~~~~ + +* deprecate implicit usage of binary_mode=True and mode='wb' in dirutil methods (#7120) + `PR #7120 `_ + +* Resolve all platforms from all python targets (#7156) + `PR #7156 `_ + +* Only lint the direct sources of a linted target. (#7219) + `PR #7219 `_ + +New Features +~~~~~~~~~~~~ + +* Add flag reporting-zipkin-sample-rate (#7211) + `PR #7211 `_ + +* Add flags to prosecc_executor that say where to materialize output and what is output (#7201) + `PR #7201 `_ + +Bugfixes +~~~~~~~~ + +* Do not render the coursier workunit unless it will run. (#7218) + `PR #7218 `_ + +* Output non-empty digest for empty directory as output_directories when running ExecuteProcessRequest (#7208) + `PR #7208 `_ + +Documentation +~~~~~~~~~~~~~ + +* documentation for grpcio (#7155) + `PR #7155 `_ + +Refactoring, Improvements, and Tooling +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +* Make Resettable lazy again (#7222) + `PR #7222 `_ + +* fix _raise_deferred_exc() (#7008) + `PR #7008 `_ + +* Skip flaky test (#7209) + `PR #7209 `_ + +* Only run master-dependent commithooks on master (#7214) + `PR #7214 `_ + +* Revert "make GoTest subclass PartitionedTestRunnerTaskMixin to test transitively" (#7212) + `PR #7212 `_ + +* Absolute-ify GIT_DIR (#7210) + `PR #7210 `_ + +* Fix release script ownership check command name. (#7204) + `PR #7204 `_ 1.14.0rc0 (2/01/2019) --------------------- diff --git a/src/python/pants/option/global_options.py b/src/python/pants/option/global_options.py index 31160095c0e..c346f85c4fd 100644 --- a/src/python/pants/option/global_options.py +++ b/src/python/pants/option/global_options.py @@ -16,7 +16,7 @@ from pants.option.optionable import Optionable from pants.option.scope import ScopeInfo from pants.subsystem.subsystem_client_mixin import SubsystemClientMixin -from pants.util.objects import datatype, enum +from pants.util.objects import datatype, enum, register_enum_option class GlobMatchErrorBehavior(enum('failure_behavior', ['ignore', 'warn', 'error'])): @@ -26,8 +26,6 @@ class GlobMatchErrorBehavior(enum('failure_behavior', ['ignore', 'warn', 'error' be aware of any changes to this object's definition. """ - default_option_value = 'warn' - class ExecutionOptions(datatype([ 'remote_store_server', @@ -197,12 +195,12 @@ def register_bootstrap_options(cls, register): help='Paths to ignore for all filesystem operations performed by pants ' '(e.g. BUILD file scanning, glob matching, etc). ' 'Patterns use the gitignore syntax (https://git-scm.com/docs/gitignore).') - register('--glob-expansion-failure', type=str, - choices=GlobMatchErrorBehavior.allowed_values, - default=GlobMatchErrorBehavior.default_option_value, - advanced=True, - help="Raise an exception if any targets declaring source files " - "fail to match any glob provided in the 'sources' argument.") + register_enum_option( + # TODO: allow using the attribute `GlobMatchErrorBehavior.warn` for more safety! + register, GlobMatchErrorBehavior, '--glob-expansion-failure', default='warn', + advanced=True, + help="Raise an exception if any targets declaring source files " + "fail to match any glob provided in the 'sources' argument.") register('--exclude-target-regexp', advanced=True, type=list, default=[], daemon=False, metavar='', help='Exclude target roots that match these regexes.') diff --git a/src/python/pants/option/options_bootstrapper.py b/src/python/pants/option/options_bootstrapper.py index 0d727329532..09ff274e610 100644 --- a/src/python/pants/option/options_bootstrapper.py +++ b/src/python/pants/option/options_bootstrapper.py @@ -103,7 +103,7 @@ def create(cls, env=None, args=None): short_flags = set() def filecontent_for(path): - return FileContent(ensure_text(path), read_file(path)) + return FileContent(ensure_text(path), read_file(path, binary_mode=True)) def capture_the_flags(*args, **kwargs): for arg in args: diff --git a/src/python/pants/pantsd/process_manager.py b/src/python/pants/pantsd/process_manager.py index e42cbfa6ecc..4c14f418bae 100644 --- a/src/python/pants/pantsd/process_manager.py +++ b/src/python/pants/pantsd/process_manager.py @@ -191,7 +191,7 @@ def write_metadata_by_name(self, name, metadata_key, metadata_value): """ self._maybe_init_metadata_dir_by_name(name) file_path = self._metadata_file_path(name, metadata_key) - safe_file_dump(file_path, metadata_value, binary_mode=False) + safe_file_dump(file_path, metadata_value, mode='w') def await_metadata_by_name(self, name, metadata_key, timeout, caster=None): """Block up to a timeout for process metadata to arrive on disk. diff --git a/src/python/pants/pantsd/watchman.py b/src/python/pants/pantsd/watchman.py index 09be21e948d..409c7f0f725 100644 --- a/src/python/pants/pantsd/watchman.py +++ b/src/python/pants/pantsd/watchman.py @@ -82,7 +82,7 @@ def _normalize_watchman_path(self, watchman_path): def _maybe_init_metadata(self): safe_mkdir(self._watchman_work_dir) # Initialize watchman with an empty, but valid statefile so it doesn't complain on startup. - safe_file_dump(self._state_file, b'{}') + safe_file_dump(self._state_file, b'{}', mode='wb') def _construct_cmd(self, cmd_parts, state_file, sock_file, pid_file, log_file, log_level): return [part for part in cmd_parts] + ['--no-save-state', diff --git a/src/python/pants/releases/reversion.py b/src/python/pants/releases/reversion.py index 541fcdb6524..8ed8a5638e2 100644 --- a/src/python/pants/releases/reversion.py +++ b/src/python/pants/releases/reversion.py @@ -30,7 +30,7 @@ def replace_in_file(workspace, src_file_path, from_str, to_str): return None dst_file_path = src_file_path.replace(from_str, to_str) - safe_file_dump(os.path.join(workspace, dst_file_path), data.replace(from_bytes, to_bytes)) + safe_file_dump(os.path.join(workspace, dst_file_path), data.replace(from_bytes, to_bytes), mode='wb') if src_file_path != dst_file_path: os.unlink(os.path.join(workspace, src_file_path)) return dst_file_path @@ -88,7 +88,7 @@ def rewrite_record_file(workspace, src_record_file, mutated_file_tuples): output_line = line output_records.append(output_line) - safe_file_dump(file_name, '\r\n'.join(output_records) + '\r\n', binary_mode=False) + safe_file_dump(file_name, '\r\n'.join(output_records) + '\r\n', mode='w') # The wheel METADATA file will contain a line like: `Version: 1.11.0.dev3+7951ec01`. diff --git a/src/python/pants/reporting/reporting.py b/src/python/pants/reporting/reporting.py index 25d7c82c16e..245f4eef4cc 100644 --- a/src/python/pants/reporting/reporting.py +++ b/src/python/pants/reporting/reporting.py @@ -51,15 +51,17 @@ def register_options(cls, register): help='The full HTTP URL of a zipkin server to which traces should be posted. ' 'No traces will be made if this is not set.') register('--zipkin-trace-id', advanced=True, default=None, - help='The overall 64 or 128-bit ID of the trace. ' - 'Set if Pants trace should be a part of larger trace ' - 'for systems that invoke Pants. If zipkin-trace-id ' - 'and zipkin-parent-id are not set, a trace_id value is randomly generated for a ' - 'Zipkin trace') + help='The overall 64 or 128-bit ID of the trace (the format is 16-character or ' + '32-character hex string). Set if the Pants trace should be a part of a larger ' + 'trace for systems that invoke Pants. If flags zipkin-trace-id and ' + 'zipkin-parent-id are not set, a trace_id value is randomly generated ' + 'for a Zipkin trace.') register('--zipkin-parent-id', advanced=True, default=None, - help='The 64-bit ID for a parent span that invokes Pants. ' - 'zipkin-trace-id and zipkin-parent-id must both either be set or not set ' - 'when run Pants command') + help='The 64-bit ID for a parent span that invokes Pants (the format is 16-character ' + 'hex string). Flags zipkin-trace-id and zipkin-parent-id must both either be set ' + 'or not set when running a Pants command.') + register('--zipkin-sample-rate', advanced=True, default=100.0, + help='Rate at which to sample Zipkin traces. Value 0.0 - 100.0.') def initialize(self, run_tracker, all_options, start_time=None): """Initialize with the given RunTracker. @@ -100,6 +102,7 @@ def initialize(self, run_tracker, all_options, start_time=None): zipkin_endpoint = self.get_options().zipkin_endpoint trace_id = self.get_options().zipkin_trace_id parent_id = self.get_options().zipkin_parent_id + sample_rate = self.get_options().zipkin_sample_rate if zipkin_endpoint is None and trace_id is not None and parent_id is not None: raise ValueError( @@ -109,11 +112,21 @@ def initialize(self, run_tracker, all_options, start_time=None): raise ValueError( "Flags zipkin-trace-id and zipkin-parent-id must both either be set or not set." ) + if trace_id and (len(trace_id) != 16 and len(trace_id) != 32 or not is_hex_string(trace_id)): + raise ValueError( + "Value of the flag zipkin-trace-id must be a 16-character or 32-character hex string. " + + "Got {}.".format(trace_id) + ) + if parent_id and (len(parent_id) != 16 or not is_hex_string(parent_id)): + raise ValueError( + "Value of the flag zipkin-parent-id must be a 16-character hex string. " + + "Got {}.".format(parent_id) + ) if zipkin_endpoint is not None: zipkin_reporter_settings = ZipkinReporter.Settings(log_level=Report.INFO) zipkin_reporter = ZipkinReporter( - run_tracker, zipkin_reporter_settings, zipkin_endpoint, trace_id, parent_id + run_tracker, zipkin_reporter_settings, zipkin_endpoint, trace_id, parent_id, sample_rate ) report.add_reporter('zipkin', zipkin_reporter) @@ -192,3 +205,12 @@ def update_reporting(self, global_options, is_quiet, run_tracker): invalidation_report.set_filename(outfile) return invalidation_report + + +def is_hex_string(id_value): + return all(is_hex_ch(ch) for ch in id_value) + + +def is_hex_ch(ch): + num = ord(ch) + return ord('0') <= num <= ord('9') or ord('a') <= num <= ord('f') or ord('A') <= num <= ord('F') diff --git a/src/python/pants/reporting/zipkin_reporter.py b/src/python/pants/reporting/zipkin_reporter.py index 48deac3c892..68dd252df43 100644 --- a/src/python/pants/reporting/zipkin_reporter.py +++ b/src/python/pants/reporting/zipkin_reporter.py @@ -10,7 +10,7 @@ from py_zipkin import Encoding from py_zipkin.transport import BaseTransportHandler from py_zipkin.util import generate_random_64bit_string -from py_zipkin.zipkin import ZipkinAttrs, zipkin_span +from py_zipkin.zipkin import ZipkinAttrs, create_attrs_for_span, zipkin_span from pants.base.workunit import WorkUnitLabel from pants.reporting.reporter import Reporter @@ -42,7 +42,7 @@ class ZipkinReporter(Reporter): Reporter that implements Zipkin tracing. """ - def __init__(self, run_tracker, settings, endpoint, trace_id, parent_id): + def __init__(self, run_tracker, settings, endpoint, trace_id, parent_id, sample_rate): """ When trace_id and parent_id are set a Zipkin trace will be created with given trace_id and parent_id. If trace_id and parent_id are set to None, a trace_id will be randomly @@ -53,6 +53,7 @@ def __init__(self, run_tracker, settings, endpoint, trace_id, parent_id): :param string endpoint: The full HTTP URL of a zipkin server to which traces should be posted. :param string trace_id: The overall 64 or 128-bit ID of the trace. May be None. :param string parent_id: The 64-bit ID for a parent span that invokes Pants. May be None. + :param float sample_rate: Rate at which to sample Zipkin traces. Value 0.0 - 100.0. """ super(ZipkinReporter, self).__init__(run_tracker, settings) # We keep track of connection between workunits and spans @@ -61,6 +62,7 @@ def __init__(self, run_tracker, settings, endpoint, trace_id, parent_id): self.handler = HTTPTransportHandler(endpoint) self.trace_id = trace_id self.parent_id = parent_id + self.sample_rate = float(sample_rate) def start_workunit(self, workunit): """Implementation of Reporter callback.""" @@ -84,13 +86,14 @@ def start_workunit(self, workunit): is_sampled=True, ) else: - zipkin_attrs = None + zipkin_attrs = create_attrs_for_span( + sample_rate=self.sample_rate, # Value between 0.0 and 100.0 + ) span = zipkin_span( service_name=service_name, span_name=workunit.name, transport_handler=self.handler, - sample_rate=100.0, # Value between 0.0 and 100.0 encoding=Encoding.V1_THRIFT, zipkin_attrs=zipkin_attrs ) @@ -104,7 +107,7 @@ def start_workunit(self, workunit): # Goals and tasks save their start time at the beginning of their run. # This start time is passed to workunit, because the workunit may be created much later. span.start_timestamp = workunit.start_time - if first_span: + if first_span and span.zipkin_attrs.is_sampled: span.logging_context.start_timestamp = workunit.start_time def end_workunit(self, workunit): diff --git a/src/python/pants/source/filespec.py b/src/python/pants/source/filespec.py index c36d37176a4..77e967043f5 100644 --- a/src/python/pants/source/filespec.py +++ b/src/python/pants/source/filespec.py @@ -9,6 +9,9 @@ def glob_to_regex(pattern): """Given a glob pattern, return an equivalent regex expression. + + TODO: Replace with implementation in `fs.rs`. See https://github.com/pantsbuild/pants/issues/6795. + :param string glob: The glob pattern. "**" matches 0 or more dirs recursively. "*" only matches patterns in a single dir. :returns: A regex string that matches same paths as the input glob does. diff --git a/src/python/pants/source/wrapped_globs.py b/src/python/pants/source/wrapped_globs.py index 2e68472e748..1deab4c9dda 100644 --- a/src/python/pants/source/wrapped_globs.py +++ b/src/python/pants/source/wrapped_globs.py @@ -99,8 +99,10 @@ def files(self): @memoized_property def files_relative_to_buildroot(self): - fds = self._snapshot.path_stats if self._include_dirs else self._snapshot.files - return tuple(fd.path for fd in fds) + res = self._snapshot.files + if self._include_dirs: + res += self._snapshot.dirs + return res @property def files_hash(self): diff --git a/src/python/pants/task/fmt_task_mixin.py b/src/python/pants/task/fmt_task_mixin.py index 25c1d7a6aa3..d4279d36089 100644 --- a/src/python/pants/task/fmt_task_mixin.py +++ b/src/python/pants/task/fmt_task_mixin.py @@ -11,3 +11,4 @@ class FmtTaskMixin(HasSkipAndTransitiveGoalOptionsMixin): """A mixin to combine with code formatting tasks.""" goal_options_registrar_cls = SkipAndTransitiveGoalOptionsRegistrar + target_filtering_enabled = True diff --git a/src/python/pants/task/lint_task_mixin.py b/src/python/pants/task/lint_task_mixin.py index 549a5978be0..d64100b0dda 100644 --- a/src/python/pants/task/lint_task_mixin.py +++ b/src/python/pants/task/lint_task_mixin.py @@ -11,3 +11,4 @@ class LintTaskMixin(HasSkipAndTransitiveGoalOptionsMixin): """A mixin to combine with lint tasks.""" goal_options_registrar_cls = SkipAndTransitiveGoalOptionsRegistrar + target_filtering_enabled = True diff --git a/src/python/pants/task/simple_codegen_task.py b/src/python/pants/task/simple_codegen_task.py index 276c50fe370..162b8dd4d2e 100644 --- a/src/python/pants/task/simple_codegen_task.py +++ b/src/python/pants/task/simple_codegen_task.py @@ -18,11 +18,11 @@ from pants.base.workunit import WorkUnitLabel from pants.build_graph.address import Address from pants.build_graph.address_lookup_error import AddressLookupError -from pants.engine.fs import PathGlobs, PathGlobsAndRoot +from pants.engine.fs import Digest, PathGlobs, PathGlobsAndRoot from pants.source.wrapped_globs import EagerFilesetWithSpec, FilesetRelPathWrapper from pants.task.task import Task from pants.util.collections_abc_backport import OrderedDict -from pants.util.dirutil import safe_delete +from pants.util.dirutil import fast_relpath, safe_delete logger = logging.getLogger(__name__) @@ -113,6 +113,10 @@ def synthetic_target_extra_dependencies(self, target, target_workdir): """ return [] + @classmethod + def implementation_version(cls): + return super(SimpleCodegenTask, cls).implementation_version() + [('SimpleCodegenTask', 2)] + def synthetic_target_extra_exports(self, target, target_workdir): """Gets any extra exports generated synthetic targets should have. @@ -206,7 +210,7 @@ def _do_validate_sources_present(self, target): def _get_synthetic_address(self, target, target_workdir): synthetic_name = target.id - sources_rel_path = os.path.relpath(target_workdir, get_buildroot()) + sources_rel_path = fast_relpath(target_workdir, get_buildroot()) synthetic_address = Address(sources_rel_path, synthetic_name) return synthetic_address @@ -230,32 +234,26 @@ def execute(self): with self.context.new_workunit(name='execute', labels=[WorkUnitLabel.MULTITOOL]): vts_to_sources = OrderedDict() for vt in invalidation_check.all_vts: - synthetic_target_dir = self.synthetic_target_dir(vt.target, vt.results_dir) - key = (vt, synthetic_target_dir) - vts_to_sources[key] = None + vts_to_sources[vt] = None # Build the target and handle duplicate sources. if not vt.valid: if self._do_validate_sources_present(vt.target): - self.execute_codegen(vt.target, vt.results_dir) - sources = self._capture_sources((key,))[0] + self.execute_codegen(vt.target, vt.current_results_dir) + sources = self._capture_sources((vt,))[0] # _handle_duplicate_sources may delete files from the filesystem, so we need to # re-capture the sources. - if not self._handle_duplicate_sources(vt.target, vt.results_dir, sources): - vts_to_sources[key] = sources + if not self._handle_duplicate_sources(vt, sources): + vts_to_sources[vt] = sources vt.update() vts_to_capture = tuple(key for key, sources in vts_to_sources.items() if sources is None) filesets = self._capture_sources(vts_to_capture) for key, fileset in zip(vts_to_capture, filesets): vts_to_sources[key] = fileset - for (vt, synthetic_target_dir), fileset in vts_to_sources.items(): - self._inject_synthetic_target( - vt.target, - synthetic_target_dir, - fileset, - ) + for vt, fileset in vts_to_sources.items(): + self._inject_synthetic_target(vt, fileset) self._mark_transitive_invalidation_hashes_dirty( vt.target.address for vt in invalidation_check.all_vts ) @@ -280,17 +278,23 @@ def synthetic_target_dir(self, target, target_workdir): """ return target_workdir - # Accepts tuple of tuples of (target, synthetic_target_dir) + # Accepts tuple of VersionedTarget instances. # Returns tuple of EagerFilesetWithSpecs in matching order. - def _capture_sources(self, targets_and_dirs): + def _capture_sources(self, vts): to_capture = [] results_dirs = [] filespecs = [] - for target, synthetic_target_dir in targets_and_dirs: + for vt in vts: + target = vt.target + # Compute the (optional) subdirectory of the results_dir to generate code to. This + # path will end up in the generated FilesetWithSpec and target, and thus needs to be + # located below the stable/symlinked `vt.results_dir`. + synthetic_target_dir = self.synthetic_target_dir(target, vt.results_dir) + files = self.sources_globs - results_dir_relpath = os.path.relpath(synthetic_target_dir, get_buildroot()) + results_dir_relpath = fast_relpath(synthetic_target_dir, get_buildroot()) buildroot_relative_globs = tuple(os.path.join(results_dir_relpath, file) for file in files) buildroot_relative_excludes = tuple( os.path.join(results_dir_relpath, file) @@ -300,6 +304,8 @@ def _capture_sources(self, targets_and_dirs): PathGlobsAndRoot( PathGlobs(buildroot_relative_globs, buildroot_relative_excludes), text_type(get_buildroot()), + # The digest is stored adjacent to the hash-versioned `vt.current_results_dir`. + Digest.load(vt.current_results_dir), ) ) results_dirs.append(results_dir_relpath) @@ -307,33 +313,35 @@ def _capture_sources(self, targets_and_dirs): snapshots = self.context._scheduler.capture_snapshots(tuple(to_capture)) + for snapshot, vt in zip(snapshots, vts): + snapshot.directory_digest.dump(vt.current_results_dir) + return tuple(EagerFilesetWithSpec( results_dir_relpath, filespec, snapshot, ) for (results_dir_relpath, filespec, snapshot) in zip(results_dirs, filespecs, snapshots)) - def _inject_synthetic_target( - self, - target, - target_workdir, - sources, - ): + def _inject_synthetic_target(self, vt, sources): """Create, inject, and return a synthetic target for the given target and workdir. - :param target: The target to inject a synthetic target for. - :param target_workdir: The work directory containing the generated code for the target. + :param vt: A codegen input VersionedTarget to inject a synthetic target for. + :param sources: A FilesetWithSpec to inject for the target. """ + target = vt.target + # NB: For stability, the injected target exposes the stable-symlinked `vt.results_dir`, + # rather than the hash-named `vt.current_results_dir`. + synthetic_target_dir = self.synthetic_target_dir(target, vt.results_dir) synthetic_target_type = self.synthetic_target_type(target) - synthetic_extra_dependencies = self.synthetic_target_extra_dependencies(target, target_workdir) + synthetic_extra_dependencies = self.synthetic_target_extra_dependencies(target, synthetic_target_dir) copied_attributes = {} for attribute in self._copy_target_attributes: copied_attributes[attribute] = getattr(target, attribute) if self._supports_exports(synthetic_target_type): - extra_exports = self.synthetic_target_extra_exports(target, target_workdir) + extra_exports = self.synthetic_target_extra_exports(target, synthetic_target_dir) extra_exports_not_in_extra_dependencies = set(extra_exports).difference( set(synthetic_extra_dependencies)) @@ -349,7 +357,7 @@ def _inject_synthetic_target( copied_attributes['exports'] = sorted(union) synthetic_target = self.context.add_new_target( - address=self._get_synthetic_address(target, target_workdir), + address=self._get_synthetic_address(target, synthetic_target_dir), target_type=synthetic_target_type, dependencies=synthetic_extra_dependencies, sources=sources, @@ -405,7 +413,7 @@ def execute_codegen(self, target, target_workdir): :param target_workdir: A clean directory into which to generate code """ - def _handle_duplicate_sources(self, target, target_workdir, sources): + def _handle_duplicate_sources(self, vt, sources): """Handles duplicate sources generated by the given gen target by either failure or deletion. This method should be called after all dependencies have been injected into the graph, but @@ -420,6 +428,8 @@ def _handle_duplicate_sources(self, target, target_workdir, sources): default, this behavior is disabled, and duplication in generated sources will raise a TaskError. This is controlled by the --allow-dups flag. """ + target = vt.target + target_workdir = vt.results_dir # Walk dependency gentargets and record any sources owned by those targets that are also # owned by this target. @@ -457,6 +467,8 @@ def record_duplicates(dep): for duped_source in duped_sources: safe_delete(os.path.join(target_workdir, duped_source)) did_modify = True + if did_modify: + Digest.clear(vt.current_results_dir) return did_modify class DuplicateSourceError(TaskError): diff --git a/src/python/pants/task/task.py b/src/python/pants/task/task.py index d7b5e28dc90..1c1ff115fa0 100644 --- a/src/python/pants/task/task.py +++ b/src/python/pants/task/task.py @@ -7,7 +7,7 @@ import os import sys from abc import abstractmethod -from builtins import filter, map, object, str, zip +from builtins import filter, map, object, set, str, zip from contextlib import contextmanager from hashlib import sha1 from itertools import repeat @@ -16,6 +16,7 @@ from pants.base.exceptions import TaskError from pants.base.worker_pool import Work +from pants.build_graph.target_filter_subsystem import TargetFilter from pants.cache.artifact_cache import UnreadableArtifact, call_insert, call_use_cached_files from pants.cache.cache_setup import CacheSetup from pants.invalidation.build_invalidator import (BuildInvalidator, CacheKeyGenerator, @@ -29,7 +30,7 @@ from pants.subsystem.subsystem_client_mixin import SubsystemClientMixin from pants.util.dirutil import safe_mkdir, safe_rm_oldest_items_in_dir from pants.util.memo import memoized_method, memoized_property -from pants.util.meta import AbstractClass +from pants.util.meta import AbstractClass, classproperty class TaskBase(SubsystemClientMixin, Optionable, AbstractClass): @@ -96,7 +97,8 @@ def _compute_stable_name(cls): @classmethod def subsystem_dependencies(cls): return (super(TaskBase, cls).subsystem_dependencies() + - (CacheSetup.scoped(cls), BuildInvalidator.Factory, SourceRootConfig)) + (CacheSetup.scoped(cls), BuildInvalidator.Factory, SourceRootConfig) + + ((TargetFilter.scoped(cls),) if cls.target_filtering_enabled else tuple())) @classmethod def product_types(cls): @@ -222,6 +224,17 @@ def act_transitively(self): """ return True + @classproperty + def target_filtering_enabled(cls): + """Whether this task should apply configured filters against targets. + + Tasks can override to enable target filtering (e.g. based on tags) and must + access targets via get_targets() + + :API: public + """ + return False + def get_targets(self, predicate=None): """Returns the candidate targets this task should act on. @@ -237,8 +250,24 @@ def get_targets(self, predicate=None): :API: public """ - return (self.context.targets(predicate) if self.act_transitively - else list(filter(predicate, self.context.target_roots))) + initial_targets = (self.context.targets(predicate) if self.act_transitively + else list(filter(predicate, self.context.target_roots))) + + if not self.target_filtering_enabled: + return initial_targets + else: + return self._filter_targets(initial_targets) + + def _filter_targets(self, targets): + included_targets = TargetFilter.scoped_instance(self).apply(targets) + excluded_targets = set(targets).difference(included_targets) + + if excluded_targets: + self.context.log.info("{} target(s) excluded".format(len(excluded_targets))) + for target in excluded_targets: + self.context.log.debug("{} excluded".format(target.address.spec)) + + return included_targets @memoized_property def workdir(self): diff --git a/src/python/pants/util/BUILD b/src/python/pants/util/BUILD index bf1b1f55b64..5e2bd8f4a00 100644 --- a/src/python/pants/util/BUILD +++ b/src/python/pants/util/BUILD @@ -56,6 +56,7 @@ python_library( dependencies = [ ':strutil', '3rdparty/python:future', + 'src/python/pants/base:deprecated', ], ) diff --git a/src/python/pants/util/dirutil.py b/src/python/pants/util/dirutil.py index 7dd542b9dcb..7391f072b15 100644 --- a/src/python/pants/util/dirutil.py +++ b/src/python/pants/util/dirutil.py @@ -16,6 +16,7 @@ from collections import defaultdict from contextlib import contextmanager +from pants.base.deprecated import deprecated_conditional from pants.util.strutil import ensure_text @@ -100,15 +101,17 @@ def safe_mkdir_for_all(paths): created_dirs.add(dir_to_make) -def safe_file_dump(filename, payload, binary_mode=None, mode=None): +# TODO(#6742): payload should be Union[str, bytes] in type hint syntax, but from +# https://pythonhosted.org/an_example_pypi_project/sphinx.html#full-code-example it doesn't appear +# that is possible to represent in docstring type syntax. +def safe_file_dump(filename, payload='', binary_mode=None, mode=None): """Write a string to a file. This method is "safe" to the extent that `safe_open` is "safe". See the explanation on the method doc there. - TODO: The `binary_mode` flag should be deprecated and removed from existing callsites. Once - `binary_mode` is removed, mode can directly default to `wb`. - see https://github.com/pantsbuild/pants/issues/6543 + When `payload` is an empty string (the default), this method can be used as a concise way to + create an empty file along with its containing directory (or truncate it if it already exists). :param string filename: The filename of the file to write to. :param string payload: The string to write to the file. @@ -116,9 +119,19 @@ def safe_file_dump(filename, payload, binary_mode=None, mode=None): :param string mode: A mode argument for the python `open` builtin. Mutually exclusive with binary_mode. """ + deprecated_conditional( + lambda: binary_mode is not None, + removal_version='1.16.0.dev2', + entity_description='The binary_mode argument in safe_file_dump()', + hint_message='Use the mode argument instead!') if binary_mode is not None and mode is not None: raise AssertionError('Only one of `binary_mode` and `mode` may be specified.') + deprecated_conditional( + lambda: mode is None, + removal_version='1.16.0.dev2', + entity_description='Not specifying mode explicitly in safe_file_dump()', + hint_message="Function will default to unicode ('w') when pants migrates to python 3!") if mode is None: if binary_mode is False: mode = 'w' @@ -129,7 +142,7 @@ def safe_file_dump(filename, payload, binary_mode=None, mode=None): f.write(payload) -def maybe_read_file(filename, binary_mode=True): +def maybe_read_file(filename, binary_mode=None): """Read and return the contents of a file in a single file.read(). :param string filename: The filename of the file to read. @@ -137,13 +150,22 @@ def maybe_read_file(filename, binary_mode=True): :returns: The contents of the file, or opening the file fails for any reason :rtype: string """ + # TODO(#7121): Default binary_mode=False after the python 3 switchover! + deprecated_conditional( + lambda: binary_mode is None, + removal_version='1.16.0.dev2', + entity_description='Not specifying binary_mode explicitly in maybe_read_file()', + hint_message='Function will default to unicode when pants migrates to python 3!') + if binary_mode is None: + binary_mode = True + try: return read_file(filename, binary_mode=binary_mode) except IOError: return None -def read_file(filename, binary_mode=True): +def read_file(filename, binary_mode=None): """Read and return the contents of a file in a single file.read(). :param string filename: The filename of the file to read. @@ -151,6 +173,15 @@ def read_file(filename, binary_mode=True): :returns: The contents of the file. :rtype: string """ + # TODO(#7121): Default binary_mode=False after the python 3 switchover! + deprecated_conditional( + lambda: binary_mode is None, + removal_version='1.16.0.dev2', + entity_description='Not specifying binary_mode explicitly in read_file()', + hint_message='Function will default to unicode when pants migrates to python 3!') + if binary_mode is None: + binary_mode = True + mode = 'rb' if binary_mode else 'r' with open(filename, mode) as f: return f.read() diff --git a/src/python/pants/util/objects.py b/src/python/pants/util/objects.py index ddc51f388fb..f66ebc5d717 100644 --- a/src/python/pants/util/objects.py +++ b/src/python/pants/util/objects.py @@ -4,19 +4,33 @@ from __future__ import absolute_import, division, print_function, unicode_literals -import sys from abc import abstractmethod -from builtins import object, zip +from builtins import zip from collections import namedtuple -from future.utils import PY2 from twitter.common.collections import OrderedSet -from pants.util.collections_abc_backport import OrderedDict -from pants.util.memo import memoized, memoized_classproperty -from pants.util.meta import AbstractClass +from pants.util.collections_abc_backport import Iterable, OrderedDict +from pants.util.memo import memoized_classproperty +from pants.util.meta import AbstractClass, classproperty +class TypeCheckError(TypeError): + + # TODO: make some wrapper exception class to make this kind of + # prefixing easy (maybe using a class field format string?). + def __init__(self, type_name, msg, *args, **kwargs): + formatted_msg = "type check error in class {}: {}".format(type_name, msg) + super(TypeCheckError, self).__init__(formatted_msg, *args, **kwargs) + + +class TypedDatatypeInstanceConstructionError(TypeCheckError): + """Raised when a datatype()'s fields fail a type check upon construction.""" + + +# TODO: create a mixin which declares/implements the methods we define on the generated class in +# datatype() and enum() to decouple the class's logic from the way it's created. This may also make +# migration to python 3 dataclasses as per #7074 easier. def datatype(field_decls, superclass_name=None, **kwargs): """A wrapper for `namedtuple` that accounts for the type of the object in equality. @@ -58,9 +72,20 @@ def datatype(field_decls, superclass_name=None, **kwargs): namedtuple_cls = namedtuple(superclass_name, field_names, **kwargs) class DataType(namedtuple_cls): + @classproperty + def type_check_error_type(cls): + """The exception type to use in make_type_error().""" + return TypedDatatypeInstanceConstructionError + @classmethod def make_type_error(cls, msg, *args, **kwargs): - return TypeCheckError(cls.__name__, msg, *args, **kwargs) + """A helper method to generate an exception type for type checking errors. + + This method uses `cls.type_check_error_type` to ensure that type checking errors can be caught + with a reliable exception type. The type returned by `cls.type_check_error_type` should ensure + that the exception messages are prefixed with enough context to be useful and *not* confusing. + """ + return cls.type_check_error_type(cls.__name__, msg, *args, **kwargs) def __new__(cls, *args, **kwargs): # TODO: Ideally we could execute this exactly once per `cls` but it should be a @@ -71,7 +96,8 @@ def __new__(cls, *args, **kwargs): try: this_object = super(DataType, cls).__new__(cls, *args, **kwargs) except TypeError as e: - raise cls.make_type_error(e) + raise cls.make_type_error( + "error in namedtuple() base constructor: {}".format(e)) # TODO: Make this kind of exception pattern (filter for errors then display them all at once) # more ergonomic. @@ -84,7 +110,9 @@ def __new__(cls, *args, **kwargs): type_failure_msgs.append( "field '{}' was invalid: {}".format(field_name, e)) if type_failure_msgs: - raise cls.make_type_error('\n'.join(type_failure_msgs)) + raise cls.make_type_error( + 'errors type checking constructor arguments:\n{}' + .format('\n'.join(type_failure_msgs))) return this_object @@ -104,13 +132,16 @@ def __eq__(self, other): def __ne__(self, other): return not (self == other) + # NB: in Python 3, whenever __eq__ is overridden, __hash__() must also be + # explicitly implemented, otherwise Python will raise "unhashable type". See + # https://docs.python.org/3/reference/datamodel.html#object.__hash__. def __hash__(self): return super(DataType, self).__hash__() # NB: As datatype is not iterable, we need to override both __iter__ and all of the # namedtuple methods that expect self to be iterable. def __iter__(self): - raise TypeError("'{}' object is not iterable".format(type(self).__name__)) + raise self.make_type_error("datatype object is not iterable") def _super_iter(self): return super(DataType, self).__iter__() @@ -170,17 +201,37 @@ def __str__(self): return type(superclass_name.encode('utf-8'), (DataType,), {}) -def enum(field_name, all_values): +class EnumVariantSelectionError(TypeCheckError): + """Raised when an invalid variant for an enum() is constructed or matched against.""" + + +def enum(*args): """A datatype which can take on a finite set of values. This method is experimental and unstable. Any enum subclass can be constructed with its create() classmethod. This method will use the first - element of `all_values` as the enum value if none is specified. - - :param field_name: A string used as the field for the datatype. Note that enum does not yet - support type checking as with datatype. - :param all_values: An iterable of objects representing all possible values for the enum. - NB: `all_values` must be a finite, non-empty iterable with unique values! + element of `all_values` as the default value, but enum classes can override this behavior by + setting `default_value` in the class body. + + NB: Relying on the `field_name` directly is discouraged in favor of using + resolve_for_enum_variant() in Python code. The `field_name` argument is exposed to make enum + instances more readable when printed, and to allow code in another language using an FFI to + reliably extract the value from an enum instance. + + :param string field_name: A string used as the field for the datatype. This positional argument is + optional, and defaults to 'value'. Note that `enum()` does not yet + support type checking as with `datatype()`. + :param Iterable all_values: A nonempty iterable of objects representing all possible values for + the enum. This argument must be a finite, non-empty iterable with + unique values. + :raises: :class:`ValueError` """ + if len(args) == 1: + field_name = 'value' + all_values, = args + elif len(args) == 2: + field_name, all_values = args + else: + raise ValueError("enum() accepts only 1 or 2 args! args = {!r}".format(args)) # This call to list() will eagerly evaluate any `all_values` which would otherwise be lazy, such # as a generator. @@ -188,84 +239,141 @@ def enum(field_name, all_values): # `OrderedSet` maintains the order of the input iterable, but is faster to check membership. allowed_values_set = OrderedSet(all_values_realized) - if len(allowed_values_set) < len(all_values_realized): + if len(allowed_values_set) == 0: + raise ValueError("all_values must be a non-empty iterable!") + elif len(allowed_values_set) < len(all_values_realized): raise ValueError("When converting all_values ({}) to a set, at least one duplicate " "was detected. The unique elements of all_values were: {}." - .format(all_values_realized, allowed_values_set)) + .format(all_values_realized, list(allowed_values_set))) class ChoiceDatatype(datatype([field_name])): - allowed_values = allowed_values_set - default_value = next(iter(allowed_values)) + default_value = next(iter(allowed_values_set)) + + # Overriden from datatype() so providing an invalid variant is catchable as a TypeCheckError, + # but more specific. + type_check_error_type = EnumVariantSelectionError @memoized_classproperty def _singletons(cls): - """Generate memoized instances of this enum wrapping each of this enum's allowed values.""" - return { value: cls(value) for value in cls.allowed_values } + """Generate memoized instances of this enum wrapping each of this enum's allowed values. + + NB: The implementation of enum() should use this property as the source of truth for allowed + values and enum instances from those values. + """ + return OrderedDict((value, cls._make_singleton(value)) for value in allowed_values_set) @classmethod - def _check_value(cls, value): - if value not in cls.allowed_values: - raise cls.make_type_error( - "Value {!r} for '{}' must be one of: {!r}." - .format(value, field_name, cls.allowed_values)) + def _make_singleton(cls, value): + """ + We convert uses of the constructor to call create(), so we then need to go around __new__ to + bootstrap singleton creation from datatype()'s __new__. + """ + return super(ChoiceDatatype, cls).__new__(cls, value) + + @classproperty + def _allowed_values(cls): + """The values provided to the enum() type constructor, for use in error messages.""" + return list(cls._singletons.keys()) + + def __new__(cls, value): + """Forward `value` to the .create() factory method. + + The .create() factory method is preferred, but forwarding the constructor like this allows us + to use the generated enum type both as a type to check against with isinstance() as well as a + function to create instances with. This makes it easy to use as a pants option type. + """ + return cls.create(value) + + # TODO: figure out if this will always trigger on primitives like strings, and what situations + # won't call this __eq__ (and therefore won't raise like we want). + def __eq__(self, other): + """Redefine equality to raise to nudge people to use static pattern matching.""" + raise self.make_type_error( + "enum equality is defined to be an error -- use .resolve_for_enum_variant() instead!") + # Redefine the canary so datatype __new__ doesn't raise. + __eq__._eq_override_canary = None + + # NB: as noted in datatype(), __hash__ must be explicitly implemented whenever __eq__ is + # overridden. See https://docs.python.org/3/reference/datamodel.html#object.__hash__. + def __hash__(self): + return super(ChoiceDatatype, self).__hash__() @classmethod - def create(cls, value=None): + def create(cls, *args, **kwargs): + """Create an instance of this enum, using the default value if specified. + + :param value: Use this as the enum value. If `value` is an instance of this class, return it, + otherwise it is checked against the enum's allowed values. This positional + argument is optional, and if not specified, `cls.default_value` is used. + :param bool none_is_default: If this is True, a None `value` is converted into + `cls.default_value` before being checked against the enum's + allowed values. + """ + none_is_default = kwargs.pop('none_is_default', False) + if kwargs: + raise ValueError('unrecognized keyword arguments for {}.create(): {!r}' + .format(cls.__name__, kwargs)) + + if len(args) == 0: + value = cls.default_value + elif len(args) == 1: + value = args[0] + if none_is_default and value is None: + value = cls.default_value + else: + raise ValueError('{}.create() accepts 0 or 1 positional args! *args = {!r}' + .format(cls.__name__, args)) + # If we get an instance of this enum class, just return it. This means you can call .create() - # on None, an allowed value for the enum, or an existing instance of the enum. + # on an allowed value for the enum, or an existing instance of the enum. if isinstance(value, cls): return value - # Providing an explicit value that is not None will *not* use the default value! - if value is None: - value = cls.default_value - - # We actually circumvent the constructor in this method due to the cls._singletons - # memoized_classproperty, but we want to raise the same error, so we move checking into a - # common method. - cls._check_value(value) - + if value not in cls._singletons: + raise cls.make_type_error( + "Value {!r} for '{}' must be one of: {!r}." + .format(value, field_name, cls._allowed_values)) return cls._singletons[value] - def __new__(cls, *args, **kwargs): - this_object = super(ChoiceDatatype, cls).__new__(cls, *args, **kwargs) - - field_value = getattr(this_object, field_name) - - cls._check_value(field_value) + def resolve_for_enum_variant(self, mapping): + """Return the object in `mapping` with the key corresponding to the enum value. - return this_object - - return ChoiceDatatype - - -class TypedDatatypeClassConstructionError(Exception): - - # TODO: make some wrapper exception class to make this kind of - # prefixing easy (maybe using a class field format string?). - def __init__(self, type_name, msg, *args, **kwargs): - full_msg = "error: while trying to generate typed datatype {}: {}".format( - type_name, msg) - super(TypedDatatypeClassConstructionError, self).__init__( - full_msg, *args, **kwargs) + `mapping` is a dict mapping enum variant value -> arbitrary object. All variant values must be + provided. + NB: The objects in `mapping` should be made into lambdas if lazy execution is desired, as this + will "evaluate" all of the values in `mapping`. + """ + keys = frozenset(mapping.keys()) + if keys != frozenset(self._allowed_values): + raise self.make_type_error( + "pattern matching must have exactly the keys {} (was: {})" + .format(self._allowed_values, list(keys))) + match_for_variant = mapping[getattr(self, field_name)] + return match_for_variant -class TypedDatatypeInstanceConstructionError(TypeError): + @classmethod + def iterate_enum_variants(cls): + """Iterate over all instances of this enum, in the declared order. - def __init__(self, type_name, msg, *args, **kwargs): - full_msg = "error: in constructor of type {}: {}".format(type_name, msg) - super(TypedDatatypeInstanceConstructionError, self).__init__( - full_msg, *args, **kwargs) + NB: This method is exposed for testing enum variants easily. resolve_for_enum_variant() should + be used for performing conditional logic based on an enum instance's value. + """ + # TODO(#7232): use this method to register attributes on the generated type object for each of + # the singletons! + return cls._singletons.values() + return ChoiceDatatype -class TypeCheckError(TypedDatatypeInstanceConstructionError): - def __init__(self, type_name, msg, *args, **kwargs): - formatted_msg = "type check error:\n{}".format(msg) - super(TypeCheckError, self).__init__( - type_name, formatted_msg, *args, **kwargs) +# TODO(#7233): allow usage of the normal register() by using an enum class as the `type` argument! +def register_enum_option(register, enum_cls, *args, **kwargs): + """A helper method for declaring a pants option from an `enum()`.""" + default_value = kwargs.pop('default', enum_cls.default_value) + register(*args, choices=enum_cls._allowed_values, default=default_value, **kwargs) +# TODO: make these members of the `TypeConstraint` class! class TypeConstraintError(TypeError): """Indicates a :class:`TypeConstraint` violation.""" @@ -273,43 +381,99 @@ class TypeConstraintError(TypeError): class TypeConstraint(AbstractClass): """Represents a type constraint. - Not intended for direct use; instead, use one of :class:`SuperclassesOf`, :class:`Exact` or + Not intended for direct use; instead, use one of :class:`SuperclassesOf`, :class:`Exactly` or :class:`SubclassesOf`. """ - def __init__(self, *types, **kwargs): + def __init__(self, description): """Creates a type constraint centered around the given types. The type constraint is satisfied as a whole if satisfied for at least one of the given types. - :param type *types: The focus of this type constraint. - :param str description: A description for this constraint if the list of types is too long. + :param str description: A concise, readable description of what the type constraint represents. + Used directly as the __str__ implementation. + """ + self._description = description + + @abstractmethod + def satisfied_by(self, obj): + """Return `True` if the given object satisfies this type constraint. + + :rtype: bool + """ + + def make_type_constraint_error(self, obj, constraint): + return TypeConstraintError( + "value {!r} (with type {!r}) must satisfy this type constraint: {}." + .format(obj, type(obj).__name__, constraint)) + + # TODO: disallow overriding this method with some form of mixin/decorator along with datatype + # __eq__! + def validate_satisfied_by(self, obj): + """Return `obj` if the object satisfies this type constraint, or raise. + + :raises: `TypeConstraintError` if `obj` does not satisfy the constraint. + """ + + if self.satisfied_by(obj): + return obj + + raise self.make_type_constraint_error(obj, self) + + def __ne__(self, other): + return not (self == other) + + def __str__(self): + return self._description + + +class TypeOnlyConstraint(TypeConstraint): + """A `TypeConstraint` predicated only on the object's type. + + `TypeConstraint` subclasses may override `.satisfied_by()` to perform arbitrary validation on the + object itself -- however, this class implements `.satisfied_by()` with a guarantee that it will + only act on the object's `type` via `.satisfied_by_type()`. This kind of type checking is faster + and easier to understand than the more complex validation allowed by `.satisfied_by()`. + """ + + # TODO: make an @abstract_classproperty decorator to do this boilerplate! + @classproperty + def _variance_symbol(cls): + """This is propagated to the the `TypeConstraint` constructor.""" + raise NotImplementedError('{} must implement the _variance_symbol classproperty!' + .format(cls.__name__)) + + def __init__(self, *types): + """Creates a type constraint based on some logic to match the given types. + + NB: A `TypeOnlyConstraint` implementation should ensure that the type constraint is satisfied as + a whole if satisfied for at least one of the given `types`. + + :param type *types: The types this constraint will match in some way. """ + if not types: raise ValueError('Must supply at least one type') if any(not isinstance(t, type) for t in types): raise TypeError('Supplied types must be types. {!r}'.format(types)) - # NB: `types` is converted to tuple here because self.types's docstring says - # it returns a tuple. Does it matter what type this field is? + if len(types) == 1: + type_list = types[0].__name__ + else: + type_list = ' or '.join(t.__name__ for t in types) + description = '{}({})'.format(type(self).__name__, type_list) + + super(TypeOnlyConstraint, self).__init__(description=description) + + # NB: This is made into a tuple so that we can use self._types in issubclass() and others! self._types = tuple(types) - self._desc = kwargs.get('description', None) + # TODO(#7114): remove this after the engine is converted to use `TypeId` instead of + # `TypeConstraint`! @property def types(self): - """Return the subject types of this type constraint. - - :type: tuple of type - """ return self._types - def satisfied_by(self, obj): - """Return `True` if the given object satisfies this type constraint. - - :rtype: bool - """ - return self.satisfied_by_type(type(obj)) - @abstractmethod def satisfied_by_type(self, obj_type): """Return `True` if the given object satisfies this type constraint. @@ -317,18 +481,8 @@ def satisfied_by_type(self, obj_type): :rtype: bool """ - def validate_satisfied_by(self, obj): - """Return `obj` if the object satisfies this type constraint, or raise. - - :raises: `TypeConstraintError` if `obj` does not satisfy the constraint. - """ - - if self.satisfied_by(obj): - return obj - - raise TypeConstraintError( - "value {!r} (with type {!r}) must satisfy this type constraint: {!r}." - .format(obj, type(obj).__name__, self)) + def satisfied_by(self, obj): + return self.satisfied_by_type(type(obj)) def __hash__(self): return hash((type(self), self._types)) @@ -336,44 +490,23 @@ def __hash__(self): def __eq__(self, other): return type(self) == type(other) and self._types == other._types - def __ne__(self, other): - return not (self == other) - - def __str__(self): - if self._desc: - constrained_type = '({})'.format(self._desc) - else: - if len(self._types) == 1: - constrained_type = self._types[0].__name__ - else: - constrained_type = '({})'.format(', '.join(t.__name__ for t in self._types)) - return '{variance_symbol}{constrained_type}'.format(variance_symbol=self._variance_symbol, - constrained_type=constrained_type) - def __repr__(self): - if self._desc: - constrained_type = self._desc - else: - constrained_type = ', '.join(t.__name__ for t in self._types) + constrained_type = ', '.join(t.__name__ for t in self._types) return ('{type_constraint_type}({constrained_type})' .format(type_constraint_type=type(self).__name__, - constrained_type=constrained_type)) + constrained_type=constrained_type)) -class SuperclassesOf(TypeConstraint): +class SuperclassesOf(TypeOnlyConstraint): """Objects of the exact type as well as any super-types are allowed.""" - _variance_symbol = '-' - def satisfied_by_type(self, obj_type): return any(issubclass(t, obj_type) for t in self._types) -class Exactly(TypeConstraint): +class Exactly(TypeOnlyConstraint): """Only objects of the exact type are allowed.""" - _variance_symbol = '=' - def satisfied_by_type(self, obj_type): return obj_type in self._types @@ -384,41 +517,66 @@ def graph_str(self): return repr(self) -class SubclassesOf(TypeConstraint): +class SubclassesOf(TypeOnlyConstraint): """Objects of the exact type as well as any sub-types are allowed.""" - _variance_symbol = '+' - def satisfied_by_type(self, obj_type): return issubclass(obj_type, self._types) -class Collection(object): - """Constructs classes representing collections of objects of a particular type. +class TypedCollection(TypeConstraint): + """A `TypeConstraint` which accepts a TypeOnlyConstraint and validates a collection.""" - The produced class will expose its values under a field named dependencies - this is a stable API - which may be consumed e.g. over FFI from the engine. + _iterable_constraint = SubclassesOf(Iterable) - Python consumers of a Collection should prefer to use its standard iteration API. - """ - # TODO: could we check that the input is iterable in the ctor? - - @classmethod - @memoized - def of(cls, *element_types): - union = '|'.join(element_type.__name__ for element_type in element_types) - type_name = '{}.of({})'.format(cls.__name__, union) - if PY2: - type_name = type_name.encode('utf-8') - # TODO: could we allow type checking in the datatype() invocation here? - supertypes = (cls, datatype(['dependencies'], superclass_name='Collection')) - properties = {'element_types': element_types} - collection_of_type = type(type_name, supertypes, properties) - - # Expose the custom class type at the module level to be pickle compatible. - setattr(sys.modules[cls.__module__], type_name, collection_of_type) - - return collection_of_type - - def __iter__(self): - return iter(self.dependencies) + def __init__(self, constraint): + """Create a `TypeConstraint` which validates each member of a collection with `constraint`. + + :param TypeOnlyConstraint constraint: the `TypeConstraint` to apply to each element. This is + currently required to be a `TypeOnlyConstraint` to avoid + complex prototypal type relationships. + """ + + if not isinstance(constraint, TypeOnlyConstraint): + raise TypeError("constraint for collection must be a {}! was: {}" + .format(TypeOnlyConstraint.__name__, constraint)) + + description = '{}({})'.format(type(self).__name__, constraint) + + self._constraint = constraint + + super(TypedCollection, self).__init__(description=description) + + # TODO: consider making this a private method of TypeConstraint, as it now duplicates the logic in + # self.validate_satisfied_by()! + def satisfied_by(self, obj): + if self._iterable_constraint.satisfied_by(obj): + return all(self._constraint.satisfied_by(el) for el in obj) + return False + + def make_collection_type_constraint_error(self, base_obj, el): + base_error = self.make_type_constraint_error(el, self._constraint) + return TypeConstraintError("in wrapped constraint {} matching iterable object {}: {}" + .format(self, base_obj, base_error)) + + def validate_satisfied_by(self, obj): + if self._iterable_constraint.satisfied_by(obj): + for el in obj: + if not self._constraint.satisfied_by(el): + raise self.make_collection_type_constraint_error(obj, el) + return obj + + base_iterable_error = self.make_type_constraint_error(obj, self._iterable_constraint) + raise TypeConstraintError( + "in wrapped constraint {}: {}".format(self, base_iterable_error)) + + def __hash__(self): + return hash((type(self), self._constraint)) + + def __eq__(self, other): + return type(self) == type(other) and self._constraint == other._constraint + + def __repr__(self): + return ('{type_constraint_type}({constraint!r})' + .format(type_constraint_type=type(self).__name__, + constraint=self._constraint)) diff --git a/src/rust/engine/Cargo.lock b/src/rust/engine/Cargo.lock index 4c13841e6d2..2648f2b9e20 100644 --- a/src/rust/engine/Cargo.lock +++ b/src/rust/engine/Cargo.lock @@ -98,7 +98,6 @@ dependencies = [ "grpcio 0.3.0 (git+https://github.com/pantsbuild/grpc-rs.git?rev=4dfafe9355dc996d7d0702e7386a6fedcd9734c0)", "grpcio-compiler 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", "hashing 0.0.1", - "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "prost 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "prost-derive 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "prost-types 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", @@ -1280,28 +1279,18 @@ dependencies = [ "fs 0.0.1", "futures 0.1.25 (registry+https://github.com/rust-lang/crates.io-index)", "futures-timer 0.1.1 (git+https://github.com/pantsbuild/futures-timer?rev=0b747e565309a58537807ab43c674d8951f9e5a0)", - "h2 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", + "grpcio 0.3.0 (git+https://github.com/pantsbuild/grpc-rs.git?rev=4dfafe9355dc996d7d0702e7386a6fedcd9734c0)", "hashing 0.0.1", - "http 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "mock 0.0.1", - "parking_lot 0.6.4 (registry+https://github.com/rust-lang/crates.io-index)", - "prost 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", - "prost-types 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", "protobuf 2.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "resettable 0.0.1", "sha2 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "tempfile 3.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "testutil 0.0.1", "time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", "tokio-codec 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "tokio-connect 0.1.0 (git+https://github.com/pantsbuild/tokio-connect?rev=f7ad1ca437973d6e24037ac6f7d5ef1013833c0b)", "tokio-process 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "tower-grpc 0.1.0 (git+https://github.com/pantsbuild/tower-grpc.git?rev=ef19f2e1715f415ecb699e8f17f5845ad2b45daf)", - "tower-h2 0.1.0 (git+https://github.com/pantsbuild/tower-h2?rev=44b0efb4983b769283efd5b2a3bc3decbf7c33de)", - "tower-http 0.1.0 (git+https://github.com/pantsbuild/tower-http?rev=56049ee7f31d4f6c549f5d1d5fbbfd7937df3d00)", - "tower-util 0.1.0 (git+https://github.com/pantsbuild/tower?rev=7b61c1fc1992c1df684fd3f179644ef0ca9bfa4c)", ] [[package]] @@ -1316,7 +1305,6 @@ dependencies = [ "hashing 0.0.1", "process_execution 0.0.1", "resettable 0.0.1", - "tokio 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] diff --git a/src/rust/engine/fs/src/snapshot.rs b/src/rust/engine/fs/src/snapshot.rs index 1badce3abc7..3701fd2278e 100644 --- a/src/rust/engine/fs/src/snapshot.rs +++ b/src/rust/engine/fs/src/snapshot.rs @@ -3,7 +3,7 @@ use crate::glob_matching::GlobMatching; use crate::pool::ResettablePool; -use crate::{File, PathGlobs, PathStat, PosixFS, Store}; +use crate::{Dir, File, PathGlobs, PathStat, PosixFS, Store}; use bazel_protos; use boxfuture::{try_future, BoxFuture, Boxable}; use futures::future::{self, join_all}; @@ -44,27 +44,57 @@ impl Snapshot { >( store: Store, file_digester: &S, - path_stats: Vec, + mut path_stats: Vec, ) -> BoxFuture { - let mut sorted_path_stats = path_stats.clone(); - sorted_path_stats.sort_by(|a, b| a.path().cmp(b.path())); + path_stats.sort_by(|a, b| a.path().cmp(b.path())); // The helper assumes that if a Path has multiple children, it must be a directory. // Proactively error if we run into identically named files, because otherwise we will treat // them like empty directories. - sorted_path_stats.dedup_by(|a, b| a.path() == b.path()); - if sorted_path_stats.len() != path_stats.len() { + let pre_dedupe_len = path_stats.len(); + path_stats.dedup_by(|a, b| a.path() == b.path()); + if path_stats.len() != pre_dedupe_len { return future::err(format!( "Snapshots must be constructed from unique path stats; got duplicates in {:?}", path_stats )) .to_boxed(); } - Snapshot::ingest_directory_from_sorted_path_stats(store, file_digester, &sorted_path_stats) + Snapshot::ingest_directory_from_sorted_path_stats(store, file_digester, &path_stats) .map(|digest| Snapshot { digest, path_stats }) .to_boxed() } + pub fn from_digest(store: Store, digest: Digest) -> BoxFuture { + store + .walk(digest, |_, path_so_far, _, directory| { + let mut path_stats = Vec::new(); + path_stats.extend(directory.get_directories().iter().map(move |dir_node| { + let path = path_so_far.join(dir_node.get_name()); + PathStat::dir(path.clone(), Dir(path)) + })); + path_stats.extend(directory.get_files().iter().map(move |file_node| { + let path = path_so_far.join(file_node.get_name()); + PathStat::file( + path.clone(), + File { + path, + is_executable: file_node.is_executable, + }, + ) + })); + future::ok(path_stats).to_boxed() + }) + .map(move |path_stats_per_directory| { + let mut path_stats = + Iterator::flatten(path_stats_per_directory.into_iter().map(|v| v.into_iter())) + .collect::>(); + path_stats.sort_by(|l, r| l.path().cmp(&r.path())); + Snapshot { digest, path_stats } + }) + .to_boxed() + } + pub fn digest_from_path_stats< S: StoreFileByDigest + Sized + Clone, Error: fmt::Debug + 'static + Send, @@ -312,29 +342,44 @@ impl Snapshot { .to_boxed() } - pub fn capture_snapshot_from_arbitrary_root>( + /// + /// Capture a Snapshot of a presumed-immutable piece of the filesystem. + /// + /// Note that we don't use a Graph here, and don't cache any intermediate steps, we just place + /// the resultant Snapshot into the store and return it. This is important, because we're reading + /// things from arbitrary filepaths which we don't want to cache in the graph, as we don't watch + /// them for changes. + /// + /// If the `digest_hint` is given, first attempt to load the Snapshot using that Digest, and only + /// fall back to actually walking the filesystem if we don't have it (either due to garbage + /// collection or Digest-oblivious legacy caching). + /// + pub fn capture_snapshot_from_arbitrary_root + Send + 'static>( store: Store, fs_pool: Arc, root_path: P, path_globs: PathGlobs, + digest_hint: Option, ) -> BoxFuture { - // Note that we don't use a Graph here, and don't cache any intermediate steps, we just place - // the resultant Snapshot into the store and return it. This is important, because we're reading - // things from arbitrary filepaths which we don't want to cache in the graph, as we don't watch - // them for changes. - // We assume that this Snapshot is of an immutable piece of the filesystem. - - let posix_fs = Arc::new(try_future!(PosixFS::new(root_path, fs_pool, &[]))); - - posix_fs - .expand(path_globs) - .map_err(|err| format!("Error expanding globs: {:?}", err)) - .and_then(|path_stats| { - Snapshot::from_path_stats( - store.clone(), - &OneOffStoreFileByDigest::new(store, posix_fs), - path_stats, - ) + // Attempt to use the digest hint to load a Snapshot without expanding the globs; otherwise, + // expand the globs to capture a Snapshot. + let store2 = store.clone(); + future::result(digest_hint.ok_or_else(|| "No digest hint provided.".to_string())) + .and_then(move |digest| Snapshot::from_digest(store, digest)) + .or_else(|_| { + let posix_fs = Arc::new(try_future!(PosixFS::new(root_path, fs_pool, &[]))); + + posix_fs + .expand(path_globs) + .map_err(|err| format!("Error expanding globs: {:?}", err)) + .and_then(|path_stats| { + Snapshot::from_path_stats( + store2.clone(), + &OneOffStoreFileByDigest::new(store2, posix_fs), + path_stats, + ) + }) + .to_boxed() }) .to_boxed() } @@ -507,6 +552,27 @@ mod tests { ); } + #[test] + fn snapshot_from_digest() { + let (store, dir, posix_fs, digester) = setup(); + + let cats = PathBuf::from("cats"); + let roland = cats.join("roland"); + std::fs::create_dir_all(&dir.path().join(cats)).unwrap(); + make_file(&dir.path().join(&roland), STR.as_bytes(), 0o600); + + let path_stats = expand_all_sorted(posix_fs); + let expected_snapshot = Snapshot::from_path_stats(store.clone(), &digester, path_stats.clone()) + .wait() + .unwrap(); + assert_eq!( + expected_snapshot, + Snapshot::from_digest(store, expected_snapshot.digest) + .wait() + .unwrap(), + ); + } + #[test] fn snapshot_recursive_directories_including_empty() { let (store, dir, posix_fs, digester) = setup(); @@ -535,7 +601,7 @@ mod tests { .unwrap(), 232, ), - path_stats: unsorted_path_stats, + path_stats: sorted_path_stats, } ); } diff --git a/src/rust/engine/fs/src/store.rs b/src/rust/engine/fs/src/store.rs index 1e1f5a2fbcf..412be087044 100644 --- a/src/rust/engine/fs/src/store.rs +++ b/src/rust/engine/fs/src/store.rs @@ -461,51 +461,22 @@ impl Store { } pub fn expand_directory(&self, digest: Digest) -> BoxFuture, String> { - let accumulator = Arc::new(Mutex::new(HashMap::new())); - - self - .expand_directory_helper(digest, accumulator.clone()) - .map(|()| { - Arc::try_unwrap(accumulator) - .expect("Arc should have been unwrappable") - .into_inner() - }) - .to_boxed() - } - - fn expand_directory_helper( - &self, - digest: Digest, - accumulator: Arc>>, - ) -> BoxFuture<(), String> { - let store = self.clone(); self - .load_directory(digest) - .and_then(move |maybe_directory| match maybe_directory { - Some(directory) => { - { - let mut accumulator = accumulator.lock(); - accumulator.insert(digest, EntryType::Directory); - for file in directory.get_files() { - accumulator.insert(try_future!(file.get_digest().into()), EntryType::File); - } - } - future::join_all( - directory - .get_directories() - .iter() - .map(move |subdir| { - store.clone().expand_directory_helper( - try_future!(subdir.get_digest().into()), - accumulator.clone(), - ) - }) - .collect::>(), - ) - .map(|_| ()) - .to_boxed() + .walk(digest, |_, _, digest, directory| { + let mut digest_types = Vec::new(); + digest_types.push((digest, EntryType::Directory)); + for file in directory.get_files() { + digest_types.push((try_future!(file.get_digest().into()), EntryType::File)); } - None => future::err(format!("Could not expand unknown directory: {:?}", digest)).to_boxed(), + future::ok(digest_types).to_boxed() + }) + .map(|digest_pairs_per_directory| { + Iterator::flatten( + digest_pairs_per_directory + .into_iter() + .map(|v| v.into_iter()), + ) + .collect() }) .to_boxed() } @@ -579,78 +550,124 @@ impl Store { } // Returns files sorted by their path. - pub fn contents_for_directory( - &self, - directory: &bazel_protos::remote_execution::Directory, - ) -> BoxFuture, String> { - let accumulator = Arc::new(Mutex::new(HashMap::new())); + pub fn contents_for_directory(&self, digest: Digest) -> BoxFuture, String> { self - .contents_for_directory_helper(directory, PathBuf::new(), accumulator.clone()) - .map(|()| { - let map = Arc::try_unwrap(accumulator).unwrap().into_inner(); - let mut vec: Vec = map - .into_iter() - .map(|(path, content)| FileContent { path, content }) - .collect(); + .walk(digest, |store, path_so_far, _, directory| { + future::join_all( + directory + .get_files() + .iter() + .map(move |file_node| { + let path = path_so_far.join(file_node.get_name()); + store + .load_file_bytes_with(try_future!(file_node.get_digest().into()), |b| b) + .and_then(move |maybe_bytes| { + maybe_bytes + .ok_or_else(|| format!("Couldn't find file contents for {:?}", path)) + .map(|content| FileContent { path, content }) + }) + .to_boxed() + }) + .collect::>(), + ) + .to_boxed() + }) + .map(|file_contents_per_directory| { + let mut vec = Iterator::flatten( + file_contents_per_directory + .into_iter() + .map(|v| v.into_iter()), + ) + .collect::>(); vec.sort_by(|l, r| l.path.cmp(&r.path)); vec }) .to_boxed() } - // Assumes that all fingerprints it encounters are valid. - fn contents_for_directory_helper( + /// + /// Given the Digest for a Directory, recursively walk the Directory, calling the given function + /// with the path so far, and the new Directory. + /// + /// The recursive walk will proceed concurrently, so if order matters, a caller should sort the + /// output after the call. + /// + pub fn walk< + T: Send + 'static, + F: Fn( + &Store, + &PathBuf, + Digest, + &bazel_protos::remote_execution::Directory, + ) -> BoxFuture + + Send + + Sync + + 'static, + >( &self, - directory: &bazel_protos::remote_execution::Directory, + digest: Digest, + f: F, + ) -> BoxFuture, String> { + let f = Arc::new(f); + let accumulator = Arc::new(Mutex::new(Vec::new())); + self + .walk_helper(digest, PathBuf::new(), f, accumulator.clone()) + .map(|()| { + Arc::try_unwrap(accumulator) + .unwrap_or_else(|_| panic!("walk_helper violated its contract.")) + .into_inner() + }) + .to_boxed() + } + + fn walk_helper< + T: Send + 'static, + F: Fn( + &Store, + &PathBuf, + Digest, + &bazel_protos::remote_execution::Directory, + ) -> BoxFuture + + Send + + Sync + + 'static, + >( + &self, + digest: Digest, path_so_far: PathBuf, - contents_wrapped: Arc>>, + f: Arc, + accumulator: Arc>>, ) -> BoxFuture<(), String> { - let contents_wrapped_copy = contents_wrapped.clone(); - let path_so_far_copy = path_so_far.clone(); - let store_copy = self.clone(); - let file_futures = future::join_all( - directory - .get_files() - .iter() - .map(move |file_node| { - let path = path_so_far_copy.join(file_node.get_name()); - let contents_wrapped_copy = contents_wrapped_copy.clone(); - store_copy - .load_file_bytes_with(try_future!(file_node.get_digest().into()), |b| b) - .and_then(move |maybe_bytes| { - maybe_bytes - .ok_or_else(|| format!("Couldn't find file contents for {:?}", path)) - .map(move |bytes| { - let mut contents = contents_wrapped_copy.lock(); - contents.insert(path, bytes); - }) - }) - .to_boxed() - }) - .collect::>(), - ); let store = self.clone(); - let dir_futures = future::join_all( - directory - .get_directories() - .iter() - .map(move |dir_node| { - let digest = try_future!(dir_node.get_digest().into()); - let path = path_so_far.join(dir_node.get_name()); - let store = store.clone(); - let contents_wrapped = contents_wrapped.clone(); - store - .load_directory(digest) - .and_then(move |maybe_dir| { - maybe_dir - .ok_or_else(|| format!("Could not find sub-directory with digest {:?}", digest)) + self + .load_directory(digest) + .and_then(move |maybe_directory| match maybe_directory { + Some(directory) => { + let result_for_directory = f(&store, &path_so_far, digest, &directory); + result_for_directory + .and_then(move |r| { + { + let mut accumulator = accumulator.lock(); + accumulator.push(r); + } + future::join_all( + directory + .get_directories() + .iter() + .map(move |dir_node| { + let subdir_digest = try_future!(dir_node.get_digest().into()); + let path = path_so_far.join(dir_node.get_name()); + store.walk_helper(subdir_digest, path, f.clone(), accumulator.clone()) + }) + .collect::>(), + ) + .map(|_| ()) }) - .and_then(move |dir| store.contents_for_directory_helper(&dir, path, contents_wrapped)) .to_boxed() - }) - .collect::>(), - ); - file_futures.join(dir_futures).map(|(_, _)| ()).to_boxed() + } + None => future::err(format!("Could not walk unknown directory: {:?}", digest)).to_boxed(), + }) + .to_boxed() } } @@ -3501,7 +3518,7 @@ mod tests { let store = new_local_store(store_dir.path()); let file_contents = store - .contents_for_directory(&TestDirectory::empty().directory()) + .contents_for_directory(TestDirectory::empty().digest()) .wait() .expect("Getting FileContents"); @@ -3535,7 +3552,7 @@ mod tests { .expect("Error saving catnip file bytes"); let file_contents = store - .contents_for_directory(&recursive_testdir.directory()) + .contents_for_directory(recursive_testdir.digest()) .wait() .expect("Getting FileContents"); diff --git a/src/rust/engine/graph/src/entry.rs b/src/rust/engine/graph/src/entry.rs index 09bf6ebc056..1abe49f3dfa 100644 --- a/src/rust/engine/graph/src/entry.rs +++ b/src/rust/engine/graph/src/entry.rs @@ -574,6 +574,6 @@ impl Entry { Some(Err(ref x)) => format!("{:?}", x), None => "".to_string(), }; - format!("{} == {}", self.node.content().format(), state).replace("\"", "\\\"") + format!("{} == {}", self.node.content(), state).replace("\"", "\\\"") } } diff --git a/src/rust/engine/graph/src/lib.rs b/src/rust/engine/graph/src/lib.rs index f0c554ff2b1..497c1c13db8 100644 --- a/src/rust/engine/graph/src/lib.rs +++ b/src/rust/engine/graph/src/lib.rs @@ -370,7 +370,7 @@ impl InnerGraph { let format = |eid: EntryId, depth: usize, is_last: bool| -> String { let entry = self.unsafe_entry_for_id(eid); let indent = " ".repeat(depth); - let output = format!("{}Computing {}", indent, entry.node().format()); + let output = format!("{}Computing {}", indent, entry.node()); if is_last { format!( "{}\n{} {}", @@ -430,7 +430,7 @@ impl InnerGraph { if deps.peek().is_none() { // If the entry has no running deps, it is a leaf. Emit it. - res.insert(self.unsafe_entry_for_id(id).node().format(), duration); + res.insert(format!("{}", self.unsafe_entry_for_id(id).node()), duration); if res.len() >= k { break; } @@ -1055,10 +1055,6 @@ mod tests { } } - fn format(&self) -> String { - format!("{:?}", self) - } - fn digest(_result: Self::Item) -> Option { None } @@ -1068,6 +1064,12 @@ mod tests { } } + impl std::fmt::Display for TNode { + fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { + write!(f, "{:?}", self) + } + } + impl TNode { /// /// Validates the given TNode output. Both node ids and context ids should increase left to diff --git a/src/rust/engine/graph/src/node.rs b/src/rust/engine/graph/src/node.rs index b7a34812ecd..19f9f91124b 100644 --- a/src/rust/engine/graph/src/node.rs +++ b/src/rust/engine/graph/src/node.rs @@ -1,7 +1,7 @@ // Copyright 2018 Pants project contributors (see CONTRIBUTORS.md). // Licensed under the Apache License, Version 2.0 (see LICENSE). -use std::fmt::Debug; +use std::fmt::{Debug, Display}; use std::hash::Hash; use boxfuture::BoxFuture; @@ -21,7 +21,7 @@ pub type EntryId = stable_graph::NodeIndex; /// /// Note that it is assumed that Nodes are very cheap to clone. /// -pub trait Node: Clone + Debug + Eq + Hash + Send + 'static { +pub trait Node: Clone + Debug + Display + Eq + Hash + Send + 'static { type Context: NodeContext; type Item: Clone + Debug + Eq + Send + 'static; @@ -29,9 +29,6 @@ pub trait Node: Clone + Debug + Eq + Hash + Send + 'static { fn run(self, context: Self::Context) -> BoxFuture; - // TODO: Use a `Display` bound instead. - fn format(&self) -> String; - /// /// If the given Node output represents an FS operation, returns its Digest. /// diff --git a/src/rust/engine/process_execution/Cargo.toml b/src/rust/engine/process_execution/Cargo.toml index affa75eeae1..2bbef270ffb 100644 --- a/src/rust/engine/process_execution/Cargo.toml +++ b/src/rust/engine/process_execution/Cargo.toml @@ -13,28 +13,18 @@ bytes = "0.4.5" digest = "0.8" fs = { path = "../fs" } futures = "^0.1.16" -# TODO: Switch to a release once https://github.com/alexcrichton/futures-timer/pull/11 and https://github.com/alexcrichton/futures-timer/pull/12 merge -futures-timer = { git = "https://github.com/pantsbuild/futures-timer", rev = "0b747e565309a58537807ab43c674d8951f9e5a0" } -h2 = "0.1.13" +grpcio = { git = "https://github.com/pantsbuild/grpc-rs.git", rev = "4dfafe9355dc996d7d0702e7386a6fedcd9734c0", default_features = false, features = ["protobuf-codec"] } hashing = { path = "../hashing" } -http = "0.1" log = "0.4" -parking_lot = "0.6" -prost = "0.4" -prost-types = "0.4" protobuf = { version = "2.0.4", features = ["with-bytes"] } resettable = { path = "../resettable" } sha2 = "0.8" tempfile = "3" +# TODO: Switch to a release once https://github.com/alexcrichton/futures-timer/pull/11 and https://github.com/alexcrichton/futures-timer/pull/12 merge +futures-timer = { git = "https://github.com/pantsbuild/futures-timer", rev = "0b747e565309a58537807ab43c674d8951f9e5a0" } time = "0.1.40" -tokio = "0.1.14" tokio-codec = "0.1" -tokio-connect = { git = "https://github.com/pantsbuild/tokio-connect.git", rev = "f7ad1ca437973d6e24037ac6f7d5ef1013833c0b" } tokio-process = "0.2.1" -tower-grpc = { git = "https://github.com/pantsbuild/tower-grpc.git", rev = "ef19f2e1715f415ecb699e8f17f5845ad2b45daf" } -tower-h2 = { git = "https://github.com/pantsbuild/tower-h2.git", rev = "44b0efb4983b769283efd5b2a3bc3decbf7c33de" } -tower-http = { git = "https://github.com/pantsbuild/tower-http.git", rev = "56049ee7f31d4f6c549f5d1d5fbbfd7937df3d00" } -tower-util = { git = "https://github.com/pantsbuild/tower.git", rev = "7b61c1fc1992c1df684fd3f179644ef0ca9bfa4c" } [dev-dependencies] mock = { path = "../testutil/mock" } diff --git a/src/rust/engine/process_execution/bazel_protos/Cargo.toml b/src/rust/engine/process_execution/bazel_protos/Cargo.toml index 6890cef290f..7cd044c7471 100644 --- a/src/rust/engine/process_execution/bazel_protos/Cargo.toml +++ b/src/rust/engine/process_execution/bazel_protos/Cargo.toml @@ -10,7 +10,6 @@ bytes = "0.4.5" futures = "^0.1.16" grpcio = { git = "https://github.com/pantsbuild/grpc-rs.git", rev = "4dfafe9355dc996d7d0702e7386a6fedcd9734c0", default_features = false, features = ["protobuf-codec"] } hashing = { path = "../../hashing" } -log = "0.4" prost = "0.4" prost-derive = "0.4" prost-types = "0.4" diff --git a/src/rust/engine/process_execution/bazel_protos/build.rs b/src/rust/engine/process_execution/bazel_protos/build.rs index 23c441961f3..0c9e136ad9a 100644 --- a/src/rust/engine/process_execution/bazel_protos/build.rs +++ b/src/rust/engine/process_execution/bazel_protos/build.rs @@ -176,11 +176,9 @@ fn generate_for_tower(thirdpartyprotobuf: &Path, out_dir: PathBuf) { .enable_server(true) .enable_client(true) .build( - &[ - PathBuf::from("build/bazel/remote/execution/v2/remote_execution.proto"), - PathBuf::from("google/rpc/code.proto"), - PathBuf::from("google/rpc/error_details.proto"), - ], + &[PathBuf::from( + "build/bazel/remote/execution/v2/remote_execution.proto", + )], &std::fs::read_dir(&thirdpartyprotobuf) .unwrap() .into_iter() diff --git a/src/rust/engine/process_execution/bazel_protos/src/conversions.rs b/src/rust/engine/process_execution/bazel_protos/src/conversions.rs index e46767fb1e6..f017612f321 100644 --- a/src/rust/engine/process_execution/bazel_protos/src/conversions.rs +++ b/src/rust/engine/process_execution/bazel_protos/src/conversions.rs @@ -1,7 +1,4 @@ -use bytes::BytesMut; use hashing; -use log::error; -use prost::Message; impl<'a> From<&'a hashing::Digest> for crate::remote_execution::Digest { fn from(d: &hashing::Digest) -> Self { @@ -21,31 +18,19 @@ impl<'a> From<&'a hashing::Digest> for crate::build::bazel::remote::execution::v } } -impl<'a> From<&'a crate::remote_execution::Digest> for Result { - fn from(d: &crate::remote_execution::Digest) -> Self { +impl<'a> From<&'a super::remote_execution::Digest> for Result { + fn from(d: &super::remote_execution::Digest) -> Self { hashing::Fingerprint::from_hex_string(d.get_hash()) .map_err(|err| format!("Bad fingerprint in Digest {:?}: {:?}", d.get_hash(), err)) .map(|fingerprint| hashing::Digest(fingerprint, d.get_size_bytes() as usize)) } } -impl<'a> From<&'a crate::build::bazel::remote::execution::v2::Digest> - for Result -{ - fn from(d: &crate::build::bazel::remote::execution::v2::Digest) -> Self { - hashing::Fingerprint::from_hex_string(&d.hash) - .map_err(|err| format!("Bad fingerprint in Digest {:?}: {:?}", d.hash, err)) - .map(|fingerprint| hashing::Digest(fingerprint, d.size_bytes as usize)) - } -} - impl From for crate::operations::Operation { fn from(op: crate::google::longrunning::Operation) -> Self { let mut dst = Self::new(); dst.set_name(op.name); - if let Some(metadata) = op.metadata { - dst.set_metadata(prost_any_to_gcprio_any(metadata)); - } + dst.set_metadata(prost_any_to_gcprio_any(op.metadata.unwrap())); dst.set_done(op.done); match op.result { Some(crate::google::longrunning::operation::Result::Response(response)) => { @@ -60,87 +45,6 @@ impl From for crate::operations::Operatio } } -// This should only be used in test contexts. It should be deleted when the mock systems use tower. -impl From - for crate::build::bazel::remote::execution::v2::ExecuteRequest -{ - fn from(req: crate::remote_execution::ExecuteRequest) -> Self { - if req.has_execution_policy() || req.has_results_cache_policy() { - panic!("Can't convert ExecuteRequest protos with execution policy or results cache policy"); - } - let digest: Result = req.get_action_digest().into(); - Self { - action_digest: Some((&digest.expect("Bad digest converting ExecuteRequest proto")).into()), - instance_name: req.instance_name, - execution_policy: None, - results_cache_policy: None, - skip_cache_lookup: req.skip_cache_lookup, - } - } -} - -// This should only be used in test contexts. It should be deleted when the mock systems use tower. -impl From - for crate::remote_execution::ExecuteRequest -{ - fn from(req: crate::build::bazel::remote::execution::v2::ExecuteRequest) -> Self { - if req.execution_policy.is_some() || req.results_cache_policy.is_some() { - panic!("Can't convert ExecuteRequest protos with execution policy or results cache policy"); - } - let digest: Result = (&req - .action_digest - .expect("Missing digest converting ExecuteRequest proto")) - .into(); - - let mut ret = Self::new(); - ret.set_action_digest((&digest.expect("Bad digest converting ExecuteRequest proto")).into()); - ret.set_instance_name(req.instance_name); - ret.set_skip_cache_lookup(req.skip_cache_lookup); - ret - } -} - -// This should only be used in test contexts. It should be deleted when the mock systems use tower. -impl Into for crate::google::rpc::Status { - fn into(self) -> grpcio::RpcStatus { - let mut buf = BytesMut::with_capacity(self.encoded_len()); - self.encode(&mut buf).unwrap(); - grpcio::RpcStatus { - status: self.code.into(), - details: None, - status_proto_bytes: Some(buf.to_vec()), - } - } -} - -// TODO: Use num_enum or similar here when TryInto is stable. -pub fn code_from_i32(i: i32) -> crate::google::rpc::Code { - use crate::google::rpc::Code::*; - match i { - 0 => Ok, - 1 => Cancelled, - 2 => Unknown, - 3 => InvalidArgument, - 4 => DeadlineExceeded, - 5 => NotFound, - 6 => AlreadyExists, - 7 => PermissionDenied, - 8 => ResourceExhausted, - 9 => FailedPrecondition, - 10 => Aborted, - 11 => OutOfRange, - 12 => Unimplemented, - 13 => Internal, - 14 => Unavailable, - 15 => DataLoss, - 16 => Unauthenticated, - _ => { - error!("Unknown grpc error code: {}, default to Unknown", i); - Unknown - } - } -} - pub fn prost_any_to_gcprio_any(any: prost_types::Any) -> protobuf::well_known_types::Any { let prost_types::Any { type_url, value } = any; let mut dst = protobuf::well_known_types::Any::new(); diff --git a/src/rust/engine/process_execution/bazel_protos/src/lib.rs b/src/rust/engine/process_execution/bazel_protos/src/lib.rs index df86e9d656a..0bfd0d1bcae 100644 --- a/src/rust/engine/process_execution/bazel_protos/src/lib.rs +++ b/src/rust/engine/process_execution/bazel_protos/src/lib.rs @@ -13,6 +13,5 @@ mod gen_for_tower; pub use crate::gen_for_tower::*; mod conversions; -pub use crate::conversions::code_from_i32; mod verification; pub use crate::verification::verify_directory_canonical; diff --git a/src/rust/engine/process_execution/src/remote.rs b/src/rust/engine/process_execution/src/remote.rs index b0272cc5828..bebf1ee61f0 100644 --- a/src/rust/engine/process_execution/src/remote.rs +++ b/src/rust/engine/process_execution/src/remote.rs @@ -1,5 +1,7 @@ use std::collections::HashMap; +use std::mem::drop; use std::path::PathBuf; +use std::sync::Arc; use std::time::{Duration, Instant}; use bazel_protos; @@ -9,11 +11,10 @@ use digest::{Digest as DigestTrait, FixedOutput}; use fs::{self, File, PathStat, Store}; use futures::{future, Future, Stream}; use futures_timer::Delay; +use grpcio; use hashing::{Digest, Fingerprint}; use log::{debug, trace, warn}; -use parking_lot::Mutex; -use prost::Message; -use protobuf::{self, Message as GrpcioMessage, ProtobufEnum}; +use protobuf::{self, Message, ProtobufEnum}; use sha2::Sha256; use time; @@ -21,36 +22,26 @@ use super::{ExecuteProcessRequest, ExecutionStats, FallibleExecuteProcessResult} use std; use std::cmp::min; -use std::net::SocketAddr; -use std::net::ToSocketAddrs; -use tokio::executor::DefaultExecutor; -use tokio::net::tcp::{ConnectFuture, TcpStream}; -use tower_grpc::Request; -use tower_h2::client; -use tower_util::MakeService; - // Environment variable which is exclusively used for cache key invalidation. // This may be not specified in an ExecuteProcessRequest, and may be populated only by the // CommandRunner. const CACHE_KEY_GEN_VERSION_ENV_VAR_NAME: &str = "PANTS_CACHE_KEY_GEN_VERSION"; -type Connection = tower_http::add_origin::AddOrigin< - tower_h2::client::Connection, ->; - -struct Clients { - execution_client: - Mutex>, - operations_client: Mutex>, +#[derive(Debug)] +enum OperationOrStatus { + Operation(bazel_protos::operations::Operation), + Status(bazel_protos::status::Status), } #[derive(Clone)] -#[allow(clippy::type_complexity)] pub struct CommandRunner { cache_key_gen_version: Option, instance_name: Option, authorization_header: Option, - clients: futures::future::Shared>, + channel: grpcio::Channel, + env: Arc, + execution_client: Arc, + operations_client: Arc, store: Store, futures_timer_thread: resettable::Resettable, } @@ -81,36 +72,35 @@ impl CommandRunner { // behavior. fn oneshot_execute( &self, - execute_request: bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest, - ) -> impl Future { - let command_runner = self.clone(); - self - .clients - .clone() - .map_err(|err| format!("Error getting execution_client: {}", err)) - .and_then(move |clients| { - clients - .execution_client - .lock() - .execute(command_runner.make_request(execute_request)) - .map_err(towergrpcerror_to_string) - .and_then(|response_stream| { - response_stream - .into_inner() - .take(1) - .into_future() - .map_err(|err| { - format!( - "Error getting response from remote process execution {:?}", - err - ) - }) - .and_then(|(resp, stream)| { - std::mem::drop(stream); - resp.ok_or_else(|| "Didn't get response from remote process execution".to_owned()) - }) - }) + execute_request: &Arc, + ) -> BoxFuture { + let stream = try_future!(self + .execution_client + .execute_opt(&execute_request, self.call_option()) + .map_err(rpcerror_to_string)); + stream + .take(1) + .into_future() + // If there was a response, drop the _stream to disconnect so that the server doesn't keep + // the connection alive and continue sending on it. + .map(|(maybe_operation, stream)| { + drop(stream); + maybe_operation + }) + // If there was an error, drop the _stream to disconnect so that the server doesn't keep the + // connection alive and continue sending on it. + .map_err(|(error, stream)| { + drop(stream); + error + }) + .then(|maybe_operation_result| match maybe_operation_result { + Ok(Some(operation)) => Ok(OperationOrStatus::Operation(operation)), + Ok(None) => { + Err("Didn't get proper stream response from server during remote execution".to_owned()) + } + Err(err) => rpcerror_to_status_or_string(err).map(OperationOrStatus::Status), }) + .to_boxed() } } @@ -135,7 +125,7 @@ impl super::CommandRunner for CommandRunner { /// TODO: Request jdk_home be created if set. /// fn run(&self, req: ExecuteProcessRequest) -> BoxFuture { - let clients = self.clients.clone(); + let operations_client = self.operations_client.clone(); let store = self.store.clone(); let execute_request_result = @@ -154,6 +144,8 @@ impl super::CommandRunner for CommandRunner { Ok((action, command, execute_request)) => { let command_runner = self.clone(); let command_runner2 = self.clone(); + let command_runner3 = self.clone(); + let execute_request = Arc::new(execute_request); let execute_request2 = execute_request.clone(); let futures_timer_thread = self.futures_timer_thread.clone(); @@ -174,7 +166,7 @@ impl super::CommandRunner for CommandRunner { command ); command_runner - .oneshot_execute(execute_request) + .oneshot_execute(&execute_request) .join(future::ok(history)) }) .and_then(move |(operation, history)| { @@ -187,9 +179,9 @@ impl super::CommandRunner for CommandRunner { let execute_request2 = execute_request2.clone(); let store = store.clone(); - let clients = clients.clone(); + let operations_client = operations_client.clone(); let command_runner2 = command_runner2.clone(); - let command_runner3 = command_runner2.clone(); + let command_runner3 = command_runner3.clone(); let futures_timer_thread = futures_timer_thread.clone(); let f = command_runner2.extract_execute_response(operation, &mut history); f.map(future::Loop::Break).or_else(move |value| { @@ -220,7 +212,7 @@ impl super::CommandRunner for CommandRunner { let mut history = history; history.current_attempt += summary; command_runner2 - .oneshot_execute(execute_request) + .oneshot_execute(&execute_request) .join(future::ok(history)) }) // Reset `iter_num` on `MissingDigests` @@ -228,11 +220,9 @@ impl super::CommandRunner for CommandRunner { .to_boxed() } ExecutionError::NotFinished(operation_name) => { - let operation_name2 = operation_name.clone(); - let operation_request = - bazel_protos::google::longrunning::GetOperationRequest { - name: operation_name.clone(), - }; + let mut operation_request = + bazel_protos::operations::GetOperationRequest::new(); + operation_request.set_name(operation_name.clone()); let backoff_period = min( CommandRunner::BACKOFF_MAX_WAIT_MILLIS, @@ -261,23 +251,19 @@ impl super::CommandRunner for CommandRunner { ) }) .and_then(move |_| { - clients - .map_err(|err| format!("{}", err)) - .and_then(move |clients| { - clients - .operations_client - .lock() - .get_operation(command_runner3.make_request(operation_request)) - .map(|r| r.into_inner()) - .or_else(move |err| { - rpcerror_recover_cancelled(operation_name2, err) - }) - .map_err(towergrpcerror_to_string) - }) - .map(move |operation| { - future::Loop::Continue((history, operation, iter_num + 1)) - }) - .to_boxed() + future::done( + operations_client + .get_operation_opt(&operation_request, command_runner3.call_option()) + .or_else(move |err| { + rpcerror_recover_cancelled(operation_request.take_name(), err) + }) + .map(OperationOrStatus::Operation) + .map_err(rpcerror_to_string), + ) + .map(move |operation| { + future::Loop::Continue((history, operation, iter_num + 1)) + }) + .to_boxed() }) .to_boxed() } @@ -315,84 +301,57 @@ impl CommandRunner { address: &str, cache_key_gen_version: Option, instance_name: Option, + root_ca_certs: Option>, oauth_bearer_token: Option, + thread_count: usize, store: Store, futures_timer_thread: resettable::Resettable, - ) -> Result { - struct Dst(SocketAddr); - - impl tokio_connect::Connect for Dst { - type Connected = TcpStream; - type Error = ::std::io::Error; - type Future = ConnectFuture; - - fn connect(&self) -> Self::Future { - TcpStream::connect(&self.0) + ) -> CommandRunner { + let env = Arc::new(grpcio::Environment::new(thread_count)); + let channel = { + let builder = grpcio::ChannelBuilder::new(env.clone()); + if let Some(_root_ca_certs) = root_ca_certs { + panic!("Sorry, we dropped secure grpc support until we can either make openssl link properly, or switch to tower"); + /* + let creds = grpcio::ChannelCredentialsBuilder::new() + .root_cert(root_ca_certs) + .build(); + builder.secure_connect(address, creds) + */ + } else { + builder.connect(address) } - } - - // TODO: Support https - let uri: http::Uri = format!("http://{}", address) - .parse() - .map_err(|err| format!("Failed to parse remote server address URL: {}", err))?; - let socket_addr = address - .to_socket_addrs() - .map_err(|err| format!("Failed to resolve remote socket address URL: {}", err))? - .next() - .ok_or_else(|| "Remote server address resolved to no addresses".to_owned())?; - let conn = client::Connect::new( - Dst(socket_addr), - h2::client::Builder::default(), - DefaultExecutor::current(), - ) - .make_service(()) - .map_err(|err| format!("Error connecting to remote execution server: {}", err)) - .and_then(move |conn| { - tower_http::add_origin::Builder::new() - .uri(uri) - .build(conn) - .map_err(|err| { - format!( - "Failed to add origin for remote execution server: {:?}", - err - ) - }) - .map(Mutex::new) - }); - let clients = conn - .map(|conn| { - let conn = conn.lock(); - let execution_client = Mutex::new( - bazel_protos::build::bazel::remote::execution::v2::client::Execution::new(conn.clone()), - ); - let operations_client = Mutex::new( - bazel_protos::google::longrunning::client::Operations::new(conn.clone()), - ); - Clients { - execution_client, - operations_client, - } - }) - .to_boxed() - .shared(); - Ok(CommandRunner { + }; + let execution_client = Arc::new(bazel_protos::remote_execution_grpc::ExecutionClient::new( + channel.clone(), + )); + let operations_client = Arc::new(bazel_protos::operations_grpc::OperationsClient::new( + channel.clone(), + )); + + CommandRunner { cache_key_gen_version, instance_name, authorization_header: oauth_bearer_token.map(|t| format!("Bearer {}", t)), - clients, + channel, + env, + execution_client, + operations_client, store, futures_timer_thread, - }) + } } - fn make_request(&self, message: T) -> Request { - let mut request = Request::new(message); + fn call_option(&self) -> grpcio::CallOption { + let mut call_option = grpcio::CallOption::default(); if let Some(ref authorization_header) = self.authorization_header { - request - .metadata_mut() - .insert("authorization", authorization_header.parse().unwrap()); + let mut builder = grpcio::MetadataBuilder::with_capacity(1); + builder + .add_str("authorization", &authorization_header) + .unwrap(); + call_option = call_option.headers(builder.build()); } - request + call_option } fn store_proto_locally( @@ -411,113 +370,102 @@ impl CommandRunner { fn extract_execute_response( &self, - operation: bazel_protos::google::longrunning::Operation, + operation_or_status: OperationOrStatus, attempts: &mut ExecutionHistory, ) -> BoxFuture { - trace!("Got operation response: {:?}", operation); - - if !operation.done { - return future::err(ExecutionError::NotFinished(operation.name)).to_boxed(); - } - let execute_response = if let Some(result) = operation.result { - match result { - bazel_protos::google::longrunning::operation::Result::Error(ref status) => { - return future::err(ExecutionError::Fatal(format_error(status))).to_boxed(); - } - bazel_protos::google::longrunning::operation::Result::Response(ref any) => try_future!( - bazel_protos::build::bazel::remote::execution::v2::ExecuteResponse::decode(&any.value) - .map_err(|e| ExecutionError::Fatal(format!("Invalid ExecuteResponse: {:?}", e))) - ), - } - } else { - return future::err(ExecutionError::Fatal( - "Operation finished but no response supplied".to_string(), - )) - .to_boxed(); - }; + trace!("Got operation response: {:?}", operation_or_status); - trace!("Got (nested) execute response: {:?}", execute_response); - - if let Some(ref result) = execute_response.result { - if let Some(ref metadata) = result.execution_metadata { - let enqueued = timespec_from(&metadata.queued_timestamp); - let worker_start = timespec_from(&metadata.worker_start_timestamp); - let input_fetch_start = timespec_from(&metadata.input_fetch_start_timestamp); - let input_fetch_completed = timespec_from(&metadata.input_fetch_completed_timestamp); - let execution_start = timespec_from(&metadata.execution_start_timestamp); - let execution_completed = timespec_from(&metadata.execution_completed_timestamp); - let output_upload_start = timespec_from(&metadata.output_upload_start_timestamp); - let output_upload_completed = timespec_from(&metadata.output_upload_completed_timestamp); - - match (worker_start - enqueued).to_std() { - Ok(duration) => attempts.current_attempt.remote_queue = Some(duration), - Err(err) => warn!("Got negative remote queue time: {}", err), - } - match (input_fetch_completed - input_fetch_start).to_std() { - Ok(duration) => attempts.current_attempt.remote_input_fetch = Some(duration), - Err(err) => warn!("Got negative remote input fetch time: {}", err), + let status = match operation_or_status { + OperationOrStatus::Operation(mut operation) => { + if !operation.get_done() { + return future::err(ExecutionError::NotFinished(operation.take_name())).to_boxed(); } - match (execution_completed - execution_start).to_std() { - Ok(duration) => attempts.current_attempt.remote_execution = Some(duration), - Err(err) => warn!("Got negative remote execution time: {}", err), + if operation.has_error() { + return future::err(ExecutionError::Fatal(format_error(&operation.get_error()))) + .to_boxed(); } - match (output_upload_completed - output_upload_start).to_std() { - Ok(duration) => attempts.current_attempt.remote_output_store = Some(duration), - Err(err) => warn!("Got negative remote output store time: {}", err), + if !operation.has_response() { + return future::err(ExecutionError::Fatal( + "Operation finished but no response supplied".to_string(), + )) + .to_boxed(); } - attempts.current_attempt.was_cache_hit = execute_response.cached_result; - } - } - let mut execution_attempts = std::mem::replace(&mut attempts.attempts, vec![]); - execution_attempts.push(attempts.current_attempt); - - let maybe_result = execute_response.result; + let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new(); + try_future!(execute_response + .merge_from_bytes(operation.get_response().get_value()) + .map_err(|e| ExecutionError::Fatal(format!("Invalid ExecuteResponse: {:?}", e)))); + trace!("Got (nested) execute response: {:?}", execute_response); + + if execute_response.get_result().has_execution_metadata() { + let metadata = execute_response.get_result().get_execution_metadata(); + let enqueued = timespec_from(metadata.get_queued_timestamp()); + let worker_start = timespec_from(metadata.get_worker_start_timestamp()); + let input_fetch_start = timespec_from(metadata.get_input_fetch_start_timestamp()); + let input_fetch_completed = timespec_from(metadata.get_input_fetch_completed_timestamp()); + let execution_start = timespec_from(metadata.get_execution_start_timestamp()); + let execution_completed = timespec_from(metadata.get_execution_completed_timestamp()); + let output_upload_start = timespec_from(metadata.get_output_upload_start_timestamp()); + let output_upload_completed = + timespec_from(metadata.get_output_upload_completed_timestamp()); + + match (worker_start - enqueued).to_std() { + Ok(duration) => attempts.current_attempt.remote_queue = Some(duration), + Err(err) => warn!("Got negative remote queue time: {}", err), + } + match (input_fetch_completed - input_fetch_start).to_std() { + Ok(duration) => attempts.current_attempt.remote_input_fetch = Some(duration), + Err(err) => warn!("Got negative remote input fetch time: {}", err), + } + match (execution_completed - execution_start).to_std() { + Ok(duration) => attempts.current_attempt.remote_execution = Some(duration), + Err(err) => warn!("Got negative remote execution time: {}", err), + } + match (output_upload_completed - output_upload_start).to_std() { + Ok(duration) => attempts.current_attempt.remote_output_store = Some(duration), + Err(err) => warn!("Got negative remote output store time: {}", err), + } + attempts.current_attempt.was_cache_hit = execute_response.cached_result; + } - let status = execute_response - .status - .unwrap_or_else(|| bazel_protos::google::rpc::Status { - code: bazel_protos::google::rpc::Code::Ok.into(), - message: String::new(), - details: vec![], - }); - if status.code == bazel_protos::google::rpc::Code::Ok.into() { - if let Some(result) = maybe_result { - return self - .extract_stdout(&result) - .join(self.extract_stderr(&result)) - .join(self.extract_output_files(&result)) - .and_then(move |((stdout, stderr), output_directory)| { - Ok(FallibleExecuteProcessResult { - stdout: stdout, - stderr: stderr, - exit_code: result.exit_code, - output_directory: output_directory, - execution_attempts: execution_attempts, + let mut execution_attempts = std::mem::replace(&mut attempts.attempts, vec![]); + execution_attempts.push(attempts.current_attempt); + + let status = execute_response.take_status(); + if grpcio::RpcStatusCode::from(status.get_code()) == grpcio::RpcStatusCode::Ok { + return self + .extract_stdout(&execute_response) + .join(self.extract_stderr(&execute_response)) + .join(self.extract_output_files(&execute_response)) + .and_then(move |((stdout, stderr), output_directory)| { + Ok(FallibleExecuteProcessResult { + stdout: stdout, + stderr: stderr, + exit_code: execute_response.get_result().get_exit_code(), + output_directory: output_directory, + execution_attempts: execution_attempts, + }) }) - }) - .to_boxed(); - } else { - return futures::future::err(ExecutionError::Fatal( - "No result found on ExecuteResponse".to_owned(), - )) - .to_boxed(); + .to_boxed(); + } + status } - } + OperationOrStatus::Status(status) => status, + }; - match bazel_protos::code_from_i32(status.code) { - bazel_protos::google::rpc::Code::Ok => unreachable!(), - bazel_protos::google::rpc::Code::FailedPrecondition => { - if status.details.len() != 1 { + match grpcio::RpcStatusCode::from(status.get_code()) { + grpcio::RpcStatusCode::Ok => unreachable!(), + grpcio::RpcStatusCode::FailedPrecondition => { + if status.get_details().len() != 1 { return future::err(ExecutionError::Fatal(format!( "Received multiple details in FailedPrecondition ExecuteResponse's status field: {:?}", - status.details + status.get_details() ))) .to_boxed(); } - let details = &status.details[0]; + let details = status.get_details().get(0).unwrap(); let mut precondition_failure = bazel_protos::error_details::PreconditionFailure::new(); - if details.type_url + if details.get_type_url() != format!( "type.googleapis.com/{}", precondition_failure.descriptor().full_name() @@ -526,12 +474,13 @@ impl CommandRunner { return future::err(ExecutionError::Fatal(format!( "Received FailedPrecondition, but didn't know how to resolve it: {},\ protobuf type {}", - status.message, details.type_url + status.get_message(), + details.get_type_url() ))) .to_boxed(); } try_future!(precondition_failure - .merge_from_bytes(&details.value) + .merge_from_bytes(details.get_value()) .map_err(|e| ExecutionError::Fatal(format!( "Error deserializing FailedPrecondition proto: {:?}", e @@ -579,7 +528,8 @@ impl CommandRunner { } code => future::err(ExecutionError::Fatal(format!( "Error from remote execution: {:?}: {:?}", - code, status.message + code, + status.get_message() ))) .to_boxed(), } @@ -588,10 +538,11 @@ impl CommandRunner { fn extract_stdout( &self, - result: &bazel_protos::build::bazel::remote::execution::v2::ActionResult, + execute_response: &bazel_protos::remote_execution::ExecuteResponse, ) -> BoxFuture { - if let Some(ref stdout_digest) = result.stdout_digest { - let stdout_digest_result: Result = stdout_digest.into(); + if execute_response.get_result().has_stdout_digest() { + let stdout_digest_result: Result = + execute_response.get_result().get_stdout_digest().into(); let stdout_digest = try_future!(stdout_digest_result .map_err(|err| ExecutionError::Fatal(format!("Error extracting stdout: {}", err)))); self @@ -613,7 +564,7 @@ impl CommandRunner { }) .to_boxed() } else { - let stdout_raw = Bytes::from(result.stdout_raw.as_slice()); + let stdout_raw = Bytes::from(execute_response.get_result().get_stdout_raw()); let stdout_copy = stdout_raw.clone(); self .store @@ -628,10 +579,11 @@ impl CommandRunner { fn extract_stderr( &self, - result: &bazel_protos::build::bazel::remote::execution::v2::ActionResult, + execute_response: &bazel_protos::remote_execution::ExecuteResponse, ) -> BoxFuture { - if let Some(ref stderr_digest) = result.stderr_digest { - let stderr_digest_result: Result = stderr_digest.into(); + if execute_response.get_result().has_stderr_digest() { + let stderr_digest_result: Result = + execute_response.get_result().get_stderr_digest().into(); let stderr_digest = try_future!(stderr_digest_result .map_err(|err| ExecutionError::Fatal(format!("Error extracting stderr: {}", err)))); self @@ -653,7 +605,7 @@ impl CommandRunner { }) .to_boxed() } else { - let stderr_raw = Bytes::from(result.stderr_raw.as_slice()); + let stderr_raw = Bytes::from(execute_response.get_result().get_stderr_raw()); let stderr_copy = stderr_raw.clone(); self .store @@ -668,16 +620,21 @@ impl CommandRunner { fn extract_output_files( &self, - result: &bazel_protos::build::bazel::remote::execution::v2::ActionResult, + execute_response: &bazel_protos::remote_execution::ExecuteResponse, ) -> BoxFuture { // Get Digests of output Directories. // Then we'll make a Directory for the output files, and merge them. - let output_directories = result.output_directories.clone(); - let mut directory_digests = Vec::with_capacity(output_directories.len() + 1); + let mut directory_digests = + Vec::with_capacity(execute_response.get_result().get_output_directories().len() + 1); + // TODO: Maybe take rather than clone + let output_directories = execute_response + .get_result() + .get_output_directories() + .to_owned(); for dir in output_directories { - let digest_result: Result = (&dir.tree_digest.unwrap()).into(); + let digest_result: Result = dir.get_tree_digest().into(); let mut digest = future::done(digest_result).to_boxed(); - for component in dir.path.rsplit('/') { + for component in dir.get_path().rsplit('/') { let component = component.to_owned(); let store = self.store.clone(); digest = digest @@ -700,21 +657,19 @@ impl CommandRunner { // Make a directory for the files let mut path_map = HashMap::new(); - let output_files = result.output_files.clone(); - let path_stats_result: Result, String> = output_files - .into_iter() + let path_stats_result: Result, String> = execute_response + .get_result() + .get_output_files() + .iter() .map(|output_file| { - let output_file_path_buf = PathBuf::from(output_file.path); - let digest = output_file - .digest - .ok_or_else(|| "No digest on remote execution output file".to_string())?; - let digest: Result = (&digest).into(); + let output_file_path_buf = PathBuf::from(output_file.get_path()); + let digest: Result = output_file.get_digest().into(); path_map.insert(output_file_path_buf.clone(), digest?); Ok(PathStat::file( output_file_path_buf.clone(), File { path: output_file_path_buf, - is_executable: output_file.is_executable, + is_executable: output_file.get_is_executable(), }, )) }) @@ -782,7 +737,7 @@ fn make_execute_request( ( bazel_protos::remote_execution::Action, bazel_protos::remote_execution::Command, - bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest, + bazel_protos::remote_execution::ExecuteRequest, ), String, > { @@ -851,43 +806,38 @@ fn make_execute_request( action.set_command_digest((&digest(&command)?).into()); action.set_input_root_digest((&req.input_files).into()); - let execute_request = bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest { - action_digest: Some((&digest(&action)?).into()), - skip_cache_lookup: false, - instance_name: instance_name.clone().unwrap_or_default(), - execution_policy: None, - results_cache_policy: None, - }; + let mut execute_request = bazel_protos::remote_execution::ExecuteRequest::new(); + if let Some(instance_name) = instance_name { + execute_request.set_instance_name(instance_name.clone()); + } + execute_request.set_action_digest((&digest(&action)?).into()); Ok((action, command, execute_request)) } -fn format_error(error: &bazel_protos::google::rpc::Status) -> String { - let error_code_enum = bazel_protos::code::Code::from_i32(error.code); +fn format_error(error: &bazel_protos::status::Status) -> String { + let error_code_enum = bazel_protos::code::Code::from_i32(error.get_code()); let error_code = match error_code_enum { Some(x) => format!("{:?}", x), - None => format!("{:?}", error.code), + None => format!("{:?}", error.get_code()), }; - format!("{}: {}", error_code, error.message) + format!("{}: {}", error_code, error.get_message()) } /// /// If the given operation represents a cancelled request, recover it into /// ExecutionError::NotFinished. /// -fn rpcerror_recover_cancelled( +fn rpcerror_recover_cancelled( operation_name: String, - err: tower_grpc::Error, -) -> Result> { + err: grpcio::Error, +) -> Result { // If the error represented cancellation, return an Operation for the given Operation name. match &err { - &tower_grpc::Error::Grpc(ref status) if status.code() == tower_grpc::Code::Cancelled => { - return Ok(bazel_protos::google::longrunning::Operation { - name: operation_name, - done: false, - metadata: None, - result: None, - }); + &grpcio::Error::RpcFailure(ref rs) if rs.status == grpcio::RpcStatusCode::Cancelled => { + let mut next_operation = bazel_protos::operations::Operation::new(); + next_operation.set_name(operation_name); + return Ok(next_operation); } _ => {} } @@ -895,21 +845,41 @@ fn rpcerror_recover_cancelled( Err(err) } -fn towergrpcerror_to_string(error: tower_grpc::Error) -> String { +fn rpcerror_to_status_or_string( + error: grpcio::Error, +) -> Result { match error { - tower_grpc::Error::Grpc(status) => { - let error_message = if status.error_message() == "" { - "[no message]" - } else { - &status.error_message() - }; - format!("{:?}: {}", status.code(), error_message) + grpcio::Error::RpcFailure(grpcio::RpcStatus { + status_proto_bytes: Some(status_proto_bytes), + .. + }) => { + let mut status_proto = bazel_protos::status::Status::new(); + status_proto.merge_from_bytes(&status_proto_bytes).unwrap(); + Ok(status_proto) } - tower_grpc::Error::Inner(v) => format!("{:?}", v), + grpcio::Error::RpcFailure(grpcio::RpcStatus { + status, details, .. + }) => Err(format!( + "{:?}: {:?}", + status, + details.unwrap_or_else(|| "[no message]".to_string()) + )), + err => Err(format!("{:?}", err)), + } +} + +fn rpcerror_to_string(error: grpcio::Error) -> String { + match error { + grpcio::Error::RpcFailure(status) => format!( + "{:?}: {:?}", + status.status, + status.details.unwrap_or_else(|| "[no message]".to_string()) + ), + err => format!("{:?}", err), } } -fn digest(message: &dyn GrpcioMessage) -> Result { +fn digest(message: &dyn Message) -> Result { let bytes = message.write_to_bytes().map_err(|e| format!("{:?}", e))?; let mut hasher = Sha256::default(); @@ -921,25 +891,20 @@ fn digest(message: &dyn GrpcioMessage) -> Result { )) } -fn timespec_from(timestamp: &Option) -> time::Timespec { - if let Some(timestamp) = timestamp { - time::Timespec::new(timestamp.seconds, timestamp.nanos) - } else { - time::Timespec::new(0, 0) - } +fn timespec_from(timestamp: &protobuf::well_known_types::Timestamp) -> time::Timespec { + time::Timespec::new(timestamp.seconds, timestamp.nanos) } #[cfg(test)] mod tests { use bazel_protos; - use bytes::{Bytes, BytesMut}; + use bytes::Bytes; use fs; use futures::Future; + use grpcio; use hashing::{Digest, Fingerprint}; use mock; - use prost::Message; - use prost_types; - use protobuf::{self, ProtobufEnum}; + use protobuf::{self, Message, ProtobufEnum}; use tempfile::TempDir; use testutil::data::{TestData, TestDirectory}; use testutil::{as_bytes, owned_string_vec}; @@ -1024,19 +989,17 @@ mod tests { ); want_action.set_input_root_digest((&input_directory.digest()).into()); - let want_execute_request = bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest { - action_digest: Some( - (&Digest( - Fingerprint::from_hex_string( - "844c929423444f3392e0dcc89ebf1febbfdf3a2e2fcab7567cc474705a5385e4", - ) - .unwrap(), - 140, - )) - .into(), - ), - ..Default::default() - }; + let mut want_execute_request = bazel_protos::remote_execution::ExecuteRequest::new(); + want_execute_request.set_action_digest( + (&Digest( + Fingerprint::from_hex_string( + "844c929423444f3392e0dcc89ebf1febbfdf3a2e2fcab7567cc474705a5385e4", + ) + .unwrap(), + 140, + )) + .into(), + ); assert_eq!( super::make_execute_request(&req, &None, &None), @@ -1112,21 +1075,6 @@ mod tests { .into(), ); - let want_execute_request = bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest { - action_digest: Some( - (&Digest( - Fingerprint::from_hex_string( - "844c929423444f3392e0dcc89ebf1febbfdf3a2e2fcab7567cc474705a5385e4", - ) - .unwrap(), - 140, - )) - .into(), - ), - instance_name: "dark-tower".to_owned(), - ..Default::default() - }; - assert_eq!( super::make_execute_request(&req, &Some("dark-tower".to_owned()), &None), Ok((want_action, want_command, want_execute_request)) @@ -1194,19 +1142,17 @@ mod tests { ); want_action.set_input_root_digest((&input_directory.digest()).into()); - let want_execute_request = bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest { - action_digest: Some( - (&Digest( - Fingerprint::from_hex_string( - "0ee5d4c8ac12513a87c8d949c6883ac533a264d30215126af71a9028c4ab6edf", - ) - .unwrap(), - 140, - )) - .into(), - ), - ..Default::default() - }; + let mut want_execute_request = bazel_protos::remote_execution::ExecuteRequest::new(); + want_execute_request.set_action_digest( + (&Digest( + Fingerprint::from_hex_string( + "0ee5d4c8ac12513a87c8d949c6883ac533a264d30215126af71a9028c4ab6edf", + ) + .unwrap(), + 140, + )) + .into(), + ); assert_eq!( super::make_execute_request(&req, &None, &Some("meep".to_owned())), @@ -1251,19 +1197,17 @@ mod tests { ); want_action.set_input_root_digest((&input_directory.digest()).into()); - let want_execute_request = bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest { - action_digest: Some( - (&Digest( - Fingerprint::from_hex_string( - "b1fb7179ce496995a4e3636544ec000dca1b951f1f6216493f6c7608dc4dd910", - ) - .unwrap(), - 140, - )) - .into(), - ), - ..Default::default() - }; + let mut want_execute_request = bazel_protos::remote_execution::ExecuteRequest::new(); + want_execute_request.set_action_digest( + (&Digest( + Fingerprint::from_hex_string( + "b1fb7179ce496995a4e3636544ec000dca1b951f1f6216493f6c7608dc4dd910", + ) + .unwrap(), + 140, + )) + .into(), + ); assert_eq!( super::make_execute_request(&req, &None, &None), @@ -1301,7 +1245,7 @@ mod tests { let error = run_command_remote(mock_server.address(), execute_request).expect_err("Want Err"); assert_eq!( error, - "InvalidArgument: Did not expect this request".to_string() + "InvalidArgument: \"Did not expect this request\"".to_string() ); } @@ -1444,19 +1388,17 @@ mod tests { ) .expect("Failed to make store"); - let mut rt = tokio::runtime::Runtime::new().unwrap(); - let cmd_runner = CommandRunner::new( &mock_server.address(), None, None, None, + None, + 1, store, timer_thread, - ) - .unwrap(); - let result = rt.block_on(cmd_runner.run(echo_roland_request())).unwrap(); - rt.shutdown_now().wait().unwrap(); + ); + let result = cmd_runner.run(echo_roland_request()).wait().unwrap(); assert_eq!( result.without_execution_attempts(), FallibleExecuteProcessResult { @@ -1621,17 +1563,21 @@ mod tests { vec![ make_incomplete_operation(&op_name), MockOperation::new({ - bazel_protos::google::longrunning::Operation { - name: op_name.clone(), - done: true, - result: Some( - bazel_protos::google::longrunning::operation::Result::Response(prost_types::Any { - type_url: "build.bazel.remote.execution.v2.ExecuteResponse".to_string(), - value: vec![0x00, 0x00, 0x00], - }), - ), - ..Default::default() - } + let mut op = bazel_protos::operations::Operation::new(); + op.set_name(op_name.clone()); + op.set_done(true); + op.set_response({ + let mut response_wrapper = protobuf::well_known_types::Any::new(); + response_wrapper.set_type_url(format!( + "type.googleapis.com/{}", + bazel_protos::remote_execution::ExecuteResponse::new() + .descriptor() + .full_name() + )); + response_wrapper.set_value(vec![0x00, 0x00, 0x00]); + response_wrapper + }); + op }), ], )) @@ -1652,20 +1598,18 @@ mod tests { super::make_execute_request(&execute_request, &None, &None) .unwrap() .2, - vec![MockOperation::new( - bazel_protos::google::longrunning::Operation { - name: op_name.clone(), - done: true, - result: Some(bazel_protos::google::longrunning::operation::Result::Error( - bazel_protos::google::rpc::Status { - code: bazel_protos::code::Code::INTERNAL.value(), - message: "Something went wrong".to_string(), - details: vec![], - }, - )), - ..Default::default() - }, - )], + vec![MockOperation::new({ + let mut op = bazel_protos::operations::Operation::new(); + op.set_name(op_name.to_string()); + op.set_done(true); + op.set_error({ + let mut error = bazel_protos::status::Status::new(); + error.set_code(bazel_protos::code::Code::INTERNAL.value()); + error.set_message("Something went wrong".to_string()); + error + }); + op + })], )) }; @@ -1688,17 +1632,17 @@ mod tests { .2, vec![ make_incomplete_operation(&op_name), - MockOperation::new(bazel_protos::google::longrunning::Operation { - name: op_name.clone(), - done: true, - result: Some(bazel_protos::google::longrunning::operation::Result::Error( - bazel_protos::google::rpc::Status { - code: bazel_protos::code::Code::INTERNAL.value(), - message: "Something went wrong".to_string(), - details: vec![], - }, - )), - ..Default::default() + MockOperation::new({ + let mut op = bazel_protos::operations::Operation::new(); + op.set_name(op_name.to_string()); + op.set_done(true); + op.set_error({ + let mut error = bazel_protos::status::Status::new(); + error.set_code(bazel_protos::code::Code::INTERNAL.value()); + error.set_message("Something went wrong".to_string()); + error + }); + op }), ], )) @@ -1721,14 +1665,12 @@ mod tests { super::make_execute_request(&execute_request, &None, &None) .unwrap() .2, - vec![MockOperation::new( - bazel_protos::google::longrunning::Operation { - name: op_name.clone(), - done: true, - result: None, - ..Default::default() - }, - )], + vec![MockOperation::new({ + let mut op = bazel_protos::operations::Operation::new(); + op.set_name(op_name.to_string()); + op.set_done(true); + op + })], )) }; @@ -1751,11 +1693,11 @@ mod tests { .2, vec![ make_incomplete_operation(&op_name), - MockOperation::new(bazel_protos::google::longrunning::Operation { - name: op_name.clone(), - done: true, - result: None, - ..Default::default() + MockOperation::new({ + let mut op = bazel_protos::operations::Operation::new(); + op.set_name(op_name.to_string()); + op.set_done(true); + op }), ], )) @@ -1822,23 +1764,21 @@ mod tests { .wait() .expect("Saving directory bytes to store"); - let mut rt = tokio::runtime::Runtime::new().unwrap(); - - let result = rt.block_on( - CommandRunner::new( - &mock_server.address(), - None, - None, - None, - store, - timer_thread, - ) - .unwrap() - .run(cat_roland_request()), - ); - rt.shutdown_now().wait().unwrap(); + let result = CommandRunner::new( + &mock_server.address(), + None, + None, + None, + None, + 1, + store, + timer_thread, + ) + .run(cat_roland_request()) + .wait() + .unwrap(); assert_eq!( - result.unwrap().without_execution_attempts(), + result.without_execution_attempts(), FallibleExecuteProcessResult { stdout: roland.bytes(), stderr: Bytes::from(""), @@ -1862,9 +1802,17 @@ mod tests { let mock_server = { let op_name = "cat".to_owned(); - let status = make_precondition_failure_status(vec![missing_preconditionfailure_violation( - &roland.digest(), - )]); + let status = grpcio::RpcStatus { + status: grpcio::RpcStatusCode::FailedPrecondition, + details: None, + status_proto_bytes: Some( + make_precondition_failure_status(vec![missing_preconditionfailure_violation( + &roland.digest(), + )]) + .write_to_bytes() + .unwrap(), + ), + }; mock::execution_server::TestServer::new(mock::execution_server::MockExecution::new( op_name.clone(), @@ -1912,19 +1860,18 @@ mod tests { .wait() .expect("Saving file bytes to store"); - let mut rt = tokio::runtime::Runtime::new().unwrap(); - let result = rt.block_on( - CommandRunner::new( - &mock_server.address(), - None, - None, - None, - store, - timer_thread, - ) - .unwrap() - .run(cat_roland_request()), - ); + let result = CommandRunner::new( + &mock_server.address(), + None, + None, + None, + None, + 1, + store, + timer_thread, + ) + .run(cat_roland_request()) + .wait(); assert_eq!( result, Ok(FallibleExecuteProcessResult { @@ -1981,31 +1928,27 @@ mod tests { ) .expect("Failed to make store"); - let mut rt = tokio::runtime::Runtime::new().unwrap(); - let result = rt.block_on( - CommandRunner::new( - &mock_server.address(), - None, - None, - None, - store, - timer_thread, - ) - .unwrap() - .run(cat_roland_request()), - ); - rt.shutdown_now().wait().unwrap(); - let error = result.expect_err("Want error"); + let error = CommandRunner::new( + &mock_server.address(), + None, + None, + None, + None, + 1, + store, + timer_thread, + ) + .run(cat_roland_request()) + .wait() + .expect_err("Want error"); assert_contains(&error, &format!("{}", missing_digest.0)); } #[test] fn format_error_complete() { - let error = bazel_protos::google::rpc::Status { - code: bazel_protos::code::Code::CANCELLED.value(), - message: "Oops, oh well!".to_string(), - details: vec![], - }; + let mut error = bazel_protos::status::Status::new(); + error.set_code(bazel_protos::code::Code::CANCELLED.value()); + error.set_message("Oops, oh well!".to_string()); assert_eq!( super::format_error(&error), "CANCELLED: Oops, oh well!".to_string() @@ -2014,11 +1957,9 @@ mod tests { #[test] fn extract_execute_response_unknown_code() { - let error = bazel_protos::google::rpc::Status { - code: 555, - message: "Oops, oh well!".to_string(), - details: vec![], - }; + let mut error = bazel_protos::status::Status::new(); + error.set_code(555); + error.set_message("Oops, oh well!".to_string()); assert_eq!( super::format_error(&error), "555: Oops, oh well!".to_string() @@ -2035,35 +1976,28 @@ mod tests { execution_attempts: vec![], }; - let response = bazel_protos::build::bazel::remote::execution::v2::ExecuteResponse { - result: Some( - bazel_protos::build::bazel::remote::execution::v2::ActionResult { - exit_code: want_result.exit_code, - stdout_raw: want_result.stdout.to_vec(), - stderr_raw: want_result.stderr.to_vec(), - output_files: vec![ - bazel_protos::build::bazel::remote::execution::v2::OutputFile { - path: "cats/roland".to_string(), - digest: Some((&TestData::roland().digest()).into()), - is_executable: false, - }, - ], - ..Default::default() - }, - ), - ..Default::default() - }; - - let operation = bazel_protos::google::longrunning::Operation { - name: "cat".to_owned(), - done: true, - result: Some( - bazel_protos::google::longrunning::operation::Result::Response( - make_any_prost_executeresponse(&response), - ), - ), - ..Default::default() - }; + let mut output_file = bazel_protos::remote_execution::OutputFile::new(); + output_file.set_path("cats/roland".into()); + output_file.set_digest((&TestData::roland().digest()).into()); + output_file.set_is_executable(false); + let mut output_files = protobuf::RepeatedField::new(); + output_files.push(output_file); + + let mut operation = bazel_protos::operations::Operation::new(); + operation.set_name("cat".to_owned()); + operation.set_done(true); + operation.set_response(make_any_proto(&{ + let mut response = bazel_protos::remote_execution::ExecuteResponse::new(); + response.set_result({ + let mut result = bazel_protos::remote_execution::ActionResult::new(); + result.set_exit_code(want_result.exit_code); + result.set_stdout_raw(Bytes::from(want_result.stdout.clone())); + result.set_stderr_raw(Bytes::from(want_result.stderr.clone())); + result.set_output_files(output_files); + result + }); + response + })); assert_eq!( extract_execute_response(operation) @@ -2076,11 +2010,9 @@ mod tests { #[test] fn extract_execute_response_pending() { let operation_name = "cat".to_owned(); - let operation = bazel_protos::google::longrunning::Operation { - name: operation_name.clone(), - done: false, - ..Default::default() - }; + let mut operation = bazel_protos::operations::Operation::new(); + operation.set_name(operation_name.clone()); + operation.set_done(false); assert_eq!( extract_execute_response(operation), @@ -2115,10 +2047,11 @@ mod tests { fn extract_execute_response_missing_other_things() { let missing = vec![ missing_preconditionfailure_violation(&TestData::roland().digest()), - bazel_protos::google::rpc::precondition_failure::Violation { - type_: "MISSING".to_string(), - subject: "monkeys".to_string(), - description: "".to_string(), + { + let mut violation = bazel_protos::error_details::PreconditionFailure_Violation::new(); + violation.set_field_type("MISSING".to_owned()); + violation.set_subject("monkeys".to_owned()); + violation }, ]; @@ -2135,9 +2068,10 @@ mod tests { #[test] fn extract_execute_response_other_failed_precondition() { - let missing = vec![bazel_protos::google::rpc::precondition_failure::Violation { - type_: "OUT_OF_CAPACITY".to_string(), - ..Default::default() + let missing = vec![{ + let mut violation = bazel_protos::error_details::PreconditionFailure_Violation::new(); + violation.set_field_type("OUT_OF_CAPACITY".to_owned()); + violation }]; let operation = make_precondition_failure_operation(missing) @@ -2168,24 +2102,18 @@ mod tests { #[test] fn extract_execute_response_other_status() { - let operation = bazel_protos::google::longrunning::Operation { - name: "cat".to_owned(), - done: true, - result: Some( - bazel_protos::google::longrunning::operation::Result::Response( - make_any_prost_executeresponse( - &bazel_protos::build::bazel::remote::execution::v2::ExecuteResponse { - status: Some(bazel_protos::google::rpc::Status { - code: bazel_protos::google::rpc::Code::PermissionDenied.into(), - ..Default::default() - }), - ..Default::default() - }, - ), - ), - ), - ..Default::default() - }; + let mut operation = bazel_protos::operations::Operation::new(); + operation.set_name("cat".to_owned()); + operation.set_done(true); + operation.set_response(make_any_proto(&{ + let mut response = bazel_protos::remote_execution::ExecuteResponse::new(); + response.set_status({ + let mut status = bazel_protos::status::Status::new(); + status.set_code(grpcio::RpcStatusCode::PermissionDenied as i32); + status + }); + response + })); match extract_execute_response(operation) { Err(ExecutionError::Fatal(err)) => assert_contains(&err, "PermissionDenied"), @@ -2314,90 +2242,103 @@ mod tests { #[test] fn extract_output_files_from_response_one_file() { - let result = bazel_protos::build::bazel::remote::execution::v2::ActionResult { - exit_code: 0, - output_files: vec![ - bazel_protos::build::bazel::remote::execution::v2::OutputFile { - path: "roland".to_string(), - digest: Some((&TestData::roland().digest()).into()), - is_executable: false, - }, - ], - ..Default::default() - }; + let mut output_file = bazel_protos::remote_execution::OutputFile::new(); + output_file.set_path("roland".into()); + output_file.set_digest((&TestData::roland().digest()).into()); + output_file.set_is_executable(false); + let mut output_files = protobuf::RepeatedField::new(); + output_files.push(output_file); + + let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new(); + execute_response.set_result({ + let mut result = bazel_protos::remote_execution::ActionResult::new(); + result.set_exit_code(0); + result.set_output_files(output_files); + result + }); + assert_eq!( - extract_output_files_from_response(&result), + extract_output_files_from_response(&execute_response), Ok(TestDirectory::containing_roland().digest()) ) } #[test] fn extract_output_files_from_response_two_files_not_nested() { - let output_files = vec![ - bazel_protos::build::bazel::remote::execution::v2::OutputFile { - path: "roland".to_string(), - digest: Some((&TestData::roland().digest()).into()), - is_executable: false, - }, - bazel_protos::build::bazel::remote::execution::v2::OutputFile { - path: "treats".to_string(), - digest: Some((&TestData::catnip().digest()).into()), - is_executable: false, - }, - ]; - - let result = bazel_protos::build::bazel::remote::execution::v2::ActionResult { - output_files, - ..Default::default() - }; + let mut output_file_1 = bazel_protos::remote_execution::OutputFile::new(); + output_file_1.set_path("roland".into()); + output_file_1.set_digest((&TestData::roland().digest()).into()); + output_file_1.set_is_executable(false); + + let mut output_file_2 = bazel_protos::remote_execution::OutputFile::new(); + output_file_2.set_path("treats".into()); + output_file_2.set_digest((&TestData::catnip().digest()).into()); + output_file_2.set_is_executable(false); + let mut output_files = protobuf::RepeatedField::new(); + output_files.push(output_file_1); + output_files.push(output_file_2); + + let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new(); + execute_response.set_result({ + let mut result = bazel_protos::remote_execution::ActionResult::new(); + result.set_exit_code(0); + result.set_output_files(output_files); + result + }); assert_eq!( - extract_output_files_from_response(&result), + extract_output_files_from_response(&execute_response), Ok(TestDirectory::containing_roland_and_treats().digest()) ) } #[test] fn extract_output_files_from_response_two_files_nested() { - let output_files = vec![ - bazel_protos::build::bazel::remote::execution::v2::OutputFile { - path: "cats/roland".to_string(), - digest: Some((&TestData::roland().digest()).into()), - is_executable: false, - }, - bazel_protos::build::bazel::remote::execution::v2::OutputFile { - path: "treats".to_string(), - digest: Some((&TestData::catnip().digest()).into()), - is_executable: false, - }, - ]; - - let result = bazel_protos::build::bazel::remote::execution::v2::ActionResult { - output_files, - ..Default::default() - }; + let mut output_file_1 = bazel_protos::remote_execution::OutputFile::new(); + output_file_1.set_path("cats/roland".into()); + output_file_1.set_digest((&TestData::roland().digest()).into()); + output_file_1.set_is_executable(false); + + let mut output_file_2 = bazel_protos::remote_execution::OutputFile::new(); + output_file_2.set_path("treats".into()); + output_file_2.set_digest((&TestData::catnip().digest()).into()); + output_file_2.set_is_executable(false); + let mut output_files = protobuf::RepeatedField::new(); + output_files.push(output_file_1); + output_files.push(output_file_2); + + let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new(); + execute_response.set_result({ + let mut result = bazel_protos::remote_execution::ActionResult::new(); + result.set_exit_code(0); + result.set_output_files(output_files); + result + }); assert_eq!( - extract_output_files_from_response(&result), + extract_output_files_from_response(&execute_response), Ok(TestDirectory::recursive().digest()) ) } #[test] fn extract_output_files_from_response_just_directory() { - let result = bazel_protos::build::bazel::remote::execution::v2::ActionResult { - exit_code: 0, - output_directories: vec![ - bazel_protos::build::bazel::remote::execution::v2::OutputDirectory { - path: "cats".to_owned(), - tree_digest: Some((&TestDirectory::containing_roland().digest()).into()), - }, - ], - ..Default::default() - }; + let mut output_directory = bazel_protos::remote_execution::OutputDirectory::new(); + output_directory.set_path("cats".into()); + output_directory.set_tree_digest((&TestDirectory::containing_roland().digest()).into()); + let mut output_directories = protobuf::RepeatedField::new(); + output_directories.push(output_directory); + + let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new(); + execute_response.set_result({ + let mut result = bazel_protos::remote_execution::ActionResult::new(); + result.set_exit_code(0); + result.set_output_directories(output_directories); + result + }); assert_eq!( - extract_output_files_from_response(&result), + extract_output_files_from_response(&execute_response), Ok(TestDirectory::nested().digest()) ) } @@ -2408,29 +2349,40 @@ mod tests { // /pets/cats/roland // /pets/dogs/robin - let result = bazel_protos::build::bazel::remote::execution::v2::ActionResult { - output_files: vec![ - bazel_protos::build::bazel::remote::execution::v2::OutputFile { - path: "treats".to_owned(), - digest: Some((&TestData::catnip().digest()).into()), - is_executable: false, - }, - ], - output_directories: vec![ - bazel_protos::build::bazel::remote::execution::v2::OutputDirectory { - path: "pets/cats".to_owned(), - tree_digest: Some((&TestDirectory::containing_roland().digest()).into()), - }, - bazel_protos::build::bazel::remote::execution::v2::OutputDirectory { - path: "pets/dogs".to_owned(), - tree_digest: Some((&TestDirectory::containing_robin().digest()).into()), - }, - ], - ..Default::default() - }; + let mut output_directories = protobuf::RepeatedField::new(); + output_directories.push({ + let mut output_directory = bazel_protos::remote_execution::OutputDirectory::new(); + output_directory.set_path("pets/cats".into()); + output_directory.set_tree_digest((&TestDirectory::containing_roland().digest()).into()); + output_directory + }); + output_directories.push({ + let mut output_directory = bazel_protos::remote_execution::OutputDirectory::new(); + output_directory.set_path("pets/dogs".into()); + output_directory.set_tree_digest((&TestDirectory::containing_robin().digest()).into()); + output_directory + }); + + let mut execute_response = bazel_protos::remote_execution::ExecuteResponse::new(); + execute_response.set_result({ + let mut result = bazel_protos::remote_execution::ActionResult::new(); + result.set_exit_code(0); + result.set_output_directories(output_directories); + result.set_output_files({ + let mut output_files = protobuf::RepeatedField::new(); + output_files.push({ + let mut output_file = bazel_protos::remote_execution::OutputFile::new(); + output_file.set_path("treats".into()); + output_file.set_digest((&TestData::catnip().digest()).into()); + output_file + }); + output_files + }); + result + }); assert_eq!( - extract_output_files_from_response(&result), + extract_output_files_from_response(&execute_response), Ok(Digest( Fingerprint::from_hex_string( "639b4b84bb58a9353d49df8122e7987baf038efe54ed035e67910846c865b1e2" @@ -2462,19 +2414,16 @@ mod tests { } fn make_incomplete_operation(operation_name: &str) -> MockOperation { - MockOperation::new(bazel_protos::google::longrunning::Operation { - name: operation_name.to_string(), - done: false, - ..Default::default() - }) + let mut op = bazel_protos::operations::Operation::new(); + op.set_name(operation_name.to_string()); + op.set_done(false); + MockOperation::new(op) } fn make_delayed_incomplete_operation(operation_name: &str, delay: Duration) -> MockOperation { - let op = bazel_protos::google::longrunning::Operation { - name: operation_name.to_string(), - done: false, - ..Default::default() - }; + let mut op = bazel_protos::operations::Operation::new(); + op.set_name(operation_name.to_string()); + op.set_done(false); MockOperation { op: Ok(Some(op)), duration: Some(delay), @@ -2487,74 +2436,72 @@ mod tests { stderr: StderrType, exit_code: i32, ) -> MockOperation { - let (stdout_raw, stdout_digest) = match stdout { - StdoutType::Raw(stdout_raw) => (stdout_raw.as_bytes().to_vec(), None), - StdoutType::Digest(stdout_digest) => (vec![], Some((&stdout_digest).into())), - }; - - let (stderr_raw, stderr_digest) = match stderr { - StderrType::Raw(stderr_raw) => (stderr_raw.as_bytes().to_vec(), None), - StderrType::Digest(stderr_digest) => (vec![], Some((&stderr_digest).into())), - }; - - let response_proto = bazel_protos::build::bazel::remote::execution::v2::ExecuteResponse { - result: Some( - bazel_protos::build::bazel::remote::execution::v2::ActionResult { - stdout_raw, - stdout_digest, - stderr_raw, - stderr_digest, - exit_code, - ..Default::default() - }, - ), - ..Default::default() - }; + let mut op = bazel_protos::operations::Operation::new(); + op.set_name(operation_name.to_string()); + op.set_done(true); + op.set_response({ + let mut response_proto = bazel_protos::remote_execution::ExecuteResponse::new(); + response_proto.set_result({ + let mut action_result = bazel_protos::remote_execution::ActionResult::new(); + match stdout { + StdoutType::Raw(stdout_raw) => { + action_result.set_stdout_raw(Bytes::from(stdout_raw)); + } + StdoutType::Digest(stdout_digest) => { + action_result.set_stdout_digest((&stdout_digest).into()); + } + } + match stderr { + StderrType::Raw(stderr_raw) => { + action_result.set_stderr_raw(Bytes::from(stderr_raw)); + } + StderrType::Digest(stderr_digest) => { + action_result.set_stderr_digest((&stderr_digest).into()); + } + } + action_result.set_exit_code(exit_code); + action_result + }); - let op = bazel_protos::google::longrunning::Operation { - name: operation_name.to_string(), - done: true, - result: Some( - bazel_protos::google::longrunning::operation::Result::Response( - make_any_prost_executeresponse(&response_proto), - ), - ), - ..Default::default() - }; + let mut response_wrapper = protobuf::well_known_types::Any::new(); + response_wrapper.set_type_url(format!( + "type.googleapis.com/{}", + response_proto.descriptor().full_name() + )); + let response_proto_bytes = response_proto.write_to_bytes().unwrap(); + response_wrapper.set_value(response_proto_bytes); + response_wrapper + }); MockOperation::new(op) } fn make_precondition_failure_operation( - violations: Vec, + violations: Vec, ) -> MockOperation { - let response = bazel_protos::build::bazel::remote::execution::v2::ExecuteResponse { - status: Some(make_precondition_failure_status(violations)), - ..Default::default() - }; - let operation = bazel_protos::google::longrunning::Operation { - name: "cat".to_string(), - done: true, - result: Some( - bazel_protos::google::longrunning::operation::Result::Response( - make_any_prost_executeresponse(&response), - ), - ), - ..Default::default() - }; + let mut operation = bazel_protos::operations::Operation::new(); + operation.set_name("cat".to_owned()); + operation.set_done(true); + operation.set_response(make_any_proto(&{ + let mut response = bazel_protos::remote_execution::ExecuteResponse::new(); + response.set_status(make_precondition_failure_status(violations)); + response + })); MockOperation::new(operation) } fn make_precondition_failure_status( - violations: Vec, - ) -> bazel_protos::google::rpc::Status { - bazel_protos::google::rpc::Status { - code: bazel_protos::google::rpc::Code::FailedPrecondition.into(), - details: vec![make_any_prost_proto( - "google.rpc.PreconditionFailure", - &bazel_protos::google::rpc::PreconditionFailure { violations }, - )], - ..Default::default() - } + violations: Vec, + ) -> bazel_protos::status::Status { + let mut status = bazel_protos::status::Status::new(); + status.set_code(grpcio::RpcStatusCode::FailedPrecondition as i32); + status.mut_details().push(make_any_proto(&{ + let mut precondition_failure = bazel_protos::error_details::PreconditionFailure::new(); + for violation in violations.into_iter() { + precondition_failure.mut_violations().push(violation); + } + precondition_failure + })); + status } fn run_command_remote( @@ -2565,11 +2512,8 @@ mod tests { .file(&TestData::roland()) .directory(&TestDirectory::containing_roland()) .build(); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); let command_runner = create_command_runner(address, &cas); - let result = runtime.block_on(command_runner.run(request)); - runtime.shutdown_now().wait().unwrap(); - result + command_runner.run(request).wait() } fn create_command_runner(address: String, cas: &mock::StubCAS) -> CommandRunner { @@ -2591,8 +2535,7 @@ mod tests { ) .expect("Failed to make store"); - CommandRunner::new(&address, None, None, None, store, timer_thread) - .expect("Failed to make command runner") + CommandRunner::new(&address, None, None, None, None, 1, store, timer_thread) } fn timer_thread() -> resettable::Resettable { @@ -2600,62 +2543,52 @@ mod tests { } fn extract_execute_response( - operation: bazel_protos::google::longrunning::Operation, + operation: bazel_protos::operations::Operation, ) -> Result { let cas = mock::StubCAS::builder() .file(&TestData::roland()) .directory(&TestDirectory::containing_roland()) .build(); - let mut runtime = tokio::runtime::Runtime::new().unwrap(); - let command_runner = create_command_runner("127.0.0.1:0".to_owned(), &cas); - let result = runtime.block_on( - command_runner.extract_execute_response(operation, &mut ExecutionHistory::default()), - ); - - runtime.shutdown_now().wait().unwrap(); - result + let command_runner = create_command_runner("".to_owned(), &cas); + command_runner + .extract_execute_response( + super::OperationOrStatus::Operation(operation), + &mut ExecutionHistory::default(), + ) + .wait() } fn extract_output_files_from_response( - result: &bazel_protos::build::bazel::remote::execution::v2::ActionResult, + execute_response: &bazel_protos::remote_execution::ExecuteResponse, ) -> Result { let cas = mock::StubCAS::builder() .file(&TestData::roland()) .directory(&TestDirectory::containing_roland()) .build(); - - let mut runtime = tokio::runtime::Runtime::new().unwrap(); - let command_runner = create_command_runner("127.0.0.1:0".to_owned(), &cas); - let result = runtime.block_on(command_runner.extract_output_files(result)); - runtime.shutdown_now().wait().unwrap(); - result - } - - fn make_any_prost_executeresponse( - message: &bazel_protos::build::bazel::remote::execution::v2::ExecuteResponse, - ) -> prost_types::Any { - make_any_prost_proto("build.bazel.remote.execution.v2.ExecuteResponse", message) + let command_runner = create_command_runner("".to_owned(), &cas); + command_runner + .extract_output_files(&execute_response) + .wait() } - fn make_any_prost_proto(message_name: &str, message: &M) -> prost_types::Any { - let size = message.encoded_len(); - let mut value = BytesMut::with_capacity(size); - message.encode(&mut value).expect("Error serializing proto"); - prost_types::Any { - type_url: format!("type.googleapis.com/{}", message_name), - value: value.to_vec(), - } + fn make_any_proto(message: &dyn Message) -> protobuf::well_known_types::Any { + let mut any = protobuf::well_known_types::Any::new(); + any.set_type_url(format!( + "type.googleapis.com/{}", + message.descriptor().full_name() + )); + any.set_value(message.write_to_bytes().expect("Error serializing proto")); + any } fn missing_preconditionfailure_violation( digest: &Digest, - ) -> bazel_protos::google::rpc::precondition_failure::Violation { + ) -> bazel_protos::error_details::PreconditionFailure_Violation { { - bazel_protos::google::rpc::precondition_failure::Violation { - type_: "MISSING".to_owned(), - subject: format!("blobs/{}/{}", digest.0, digest.1), - ..Default::default() - } + let mut violation = bazel_protos::error_details::PreconditionFailure_Violation::new(); + violation.set_field_type("MISSING".to_owned()); + violation.set_subject(format!("blobs/{}/{}", digest.0, digest.1)); + violation } } diff --git a/src/rust/engine/process_executor/Cargo.toml b/src/rust/engine/process_executor/Cargo.toml index 87453d8b797..d4c45ad05ec 100644 --- a/src/rust/engine/process_executor/Cargo.toml +++ b/src/rust/engine/process_executor/Cargo.toml @@ -15,4 +15,3 @@ hashing = { path = "../hashing" } futures = "^0.1.16" process_execution = { path = "../process_execution" } resettable = { path = "../resettable" } -tokio = "0.1.14" diff --git a/src/rust/engine/process_executor/src/main.rs b/src/rust/engine/process_executor/src/main.rs index 247b40872f1..4946381b05f 100644 --- a/src/rust/engine/process_executor/src/main.rs +++ b/src/rust/engine/process_executor/src/main.rs @@ -92,6 +92,13 @@ fn main() { If unspecified, local execution will be performed.", ), ) + .arg( + Arg::with_name("execution-root-ca-cert-file") + .help("Path to file containing root certificate authority certificates for the execution server. If not set, TLS will not be used when connecting to the execution server.") + .takes_value(true) + .long("execution-root-ca-cert-file") + .required(false) + ) .arg( Arg::with_name("execution-oauth-bearer-token-path") .help("Path to file containing oauth bearer token for communication with the execution server. If not set, no authorization will be provided to remote servers.") @@ -283,6 +290,12 @@ fn main() { let runner: Box = match server_arg { Some(address) => { + let root_ca_certs = if let Some(path) = args.value_of("execution-root-ca-cert-file") { + Some(std::fs::read(path).expect("Error reading root CA certs file")) + } else { + None + }; + let oauth_bearer_token = if let Some(path) = args.value_of("execution-oauth-bearer-token-path") { Some(std::fs::read_to_string(path).expect("Error reading oauth bearer token file")) @@ -290,17 +303,16 @@ fn main() { None }; - Box::new( - process_execution::remote::CommandRunner::new( - address, - args.value_of("cache-key-gen-version").map(str::to_owned), - remote_instance_arg, - oauth_bearer_token, - store.clone(), - timer_thread, - ) - .expect("Could not initialize remote execution client"), - ) as Box + Box::new(process_execution::remote::CommandRunner::new( + address, + args.value_of("cache-key-gen-version").map(str::to_owned), + remote_instance_arg, + root_ca_certs, + oauth_bearer_token, + 1, + store.clone(), + timer_thread, + )) as Box } None => Box::new(process_execution::local::CommandRunner::new( store.clone(), @@ -309,18 +321,17 @@ fn main() { true, )) as Box, }; - let mut rt = tokio::runtime::Runtime::new().unwrap(); - let result = rt.block_on(runner.run(request)).unwrap(); + + let result = runner.run(request).wait().expect("Error executing"); if let Some(output) = args.value_of("materialize-output-to").map(PathBuf::from) { - rt.block_on(store.materialize_directory(output, result.output_directory)) + store + .materialize_directory(output, result.output_directory) + .wait() .unwrap(); - }; + } print!("{}", String::from_utf8(result.stdout.to_vec()).unwrap()); eprint!("{}", String::from_utf8(result.stderr.to_vec()).unwrap()); - - rt.shutdown_now().wait().unwrap(); - exit(result.exit_code); } diff --git a/src/rust/engine/resettable/src/lib.rs b/src/rust/engine/resettable/src/lib.rs index 513c4b5be09..a05d990a477 100644 --- a/src/rust/engine/resettable/src/lib.rs +++ b/src/rust/engine/resettable/src/lib.rs @@ -65,19 +65,33 @@ where T: Send + Sync, { pub fn new T + 'static>(make: F) -> Resettable { - let val = (make)(); Resettable { - val: Arc::new(RwLock::new(Some(val))), + val: Arc::new(RwLock::new(None)), make: Arc::new(make), } } + /// + /// Execute f with the value in the Resettable. + /// May lazily initialize the value in the Resettable. + /// + /// TODO Explore the use of parking_lot::RWLock::upgradable_read + /// to avoid reacquiring the lock for initialization. + /// This can be used if we are sure that a deadlock won't happen + /// when two readers are trying to upgrade at the same time. + /// pub fn with O>(&self, f: F) -> O { - let val_opt = self.val.read(); - let val = val_opt - .as_ref() - .unwrap_or_else(|| panic!("A Resettable value cannot be used while it is shutdown.")); - f(val) + { + let val_opt = self.val.read(); + if let Some(val) = val_opt.as_ref() { + return f(val); + } + } + let mut val_write_opt = self.val.write(); + if val_write_opt.as_ref().is_none() { + *val_write_opt = Some((self.make)()) + } + f(val_write_opt.as_ref().unwrap()) } /// @@ -89,9 +103,7 @@ where { let mut val = self.val.write(); *val = None; - let t = f(); - *val = Some((self.make)()); - t + f() } } @@ -106,10 +118,6 @@ where /// be sure that dropping it will actually deallocate the resource. /// pub fn get(&self) -> T { - let val_opt = self.val.read(); - let val = val_opt - .as_ref() - .unwrap_or_else(|| panic!("A Resettable value cannot be used while it is shutdown.")); - val.clone() + self.with(T::clone) } } diff --git a/src/rust/engine/src/context.rs b/src/rust/engine/src/context.rs index 1fac707784c..024f9845c3e 100644 --- a/src/rust/engine/src/context.rs +++ b/src/rust/engine/src/context.rs @@ -130,23 +130,23 @@ impl Core { .unwrap_or_else(|e| panic!("Could not initialize Store: {:?}", e)); let underlying_command_runner: Box = match &remote_execution_server { - Some(ref address) => Box::new( - process_execution::remote::CommandRunner::new( - address, - remote_execution_process_cache_namespace.clone(), - remote_instance_name.clone(), - oauth_bearer_token.clone(), - store.clone(), - futures_timer_thread2.clone(), - ) - .expect("Could not initialize remote execution client"), - ) as Box, + Some(ref address) => Box::new(process_execution::remote::CommandRunner::new( + address, + remote_execution_process_cache_namespace.clone(), + remote_instance_name.clone(), + root_ca_certs.clone(), + oauth_bearer_token.clone(), + // Allow for some overhead for bookkeeping threads (if any). + process_execution_parallelism + 2, + store.clone(), + futures_timer_thread2.clone(), + )), None => Box::new(process_execution::local::CommandRunner::new( store.clone(), fs_pool2.clone(), work_dir.clone(), process_execution_cleanup_local_dirs, - )) as Box, + )), }; let command_runner = diff --git a/src/rust/engine/src/externs.rs b/src/rust/engine/src/externs.rs index 2dbaf487835..c81bee215e1 100644 --- a/src/rust/engine/src/externs.rs +++ b/src/rust/engine/src/externs.rs @@ -82,7 +82,6 @@ pub fn store_set>(values: I) -> Value { /// /// The underlying slice _must_ contain an even number of elements. /// -#[allow(dead_code)] pub fn store_dict(keys_and_values_interleaved: &[(Value)]) -> Value { if keys_and_values_interleaved.len() % 2 != 0 { panic!("store_dict requires an even number of elements"); @@ -121,6 +120,11 @@ pub fn store_i64(val: i64) -> Value { with_externs(|e| (e.store_i64)(e.context, val).into()) } +#[allow(dead_code)] +pub fn store_f64(val: f64) -> Value { + with_externs(|e| (e.store_f64)(e.context, val).into()) +} + #[allow(dead_code)] pub fn store_bool(val: bool) -> Value { with_externs(|e| (e.store_bool)(e.context, val).into()) @@ -344,6 +348,7 @@ pub struct Externs { pub store_bytes: StoreBytesExtern, pub store_utf8: StoreUtf8Extern, pub store_i64: StoreI64Extern, + pub store_f64: StoreF64Extern, pub store_bool: StoreBoolExtern, pub project_ignoring_type: ProjectIgnoringTypeExtern, pub project_multi: ProjectMultiExtern, @@ -383,6 +388,8 @@ pub type StoreUtf8Extern = extern "C" fn(*const ExternContext, *const u8, u64) - pub type StoreI64Extern = extern "C" fn(*const ExternContext, i64) -> Handle; +pub type StoreF64Extern = extern "C" fn(*const ExternContext, f64) -> Handle; + pub type StoreBoolExtern = extern "C" fn(*const ExternContext, bool) -> Handle; /// diff --git a/src/rust/engine/src/lib.rs b/src/rust/engine/src/lib.rs index 54a0ed4af0c..cc5d3d5ee98 100644 --- a/src/rust/engine/src/lib.rs +++ b/src/rust/engine/src/lib.rs @@ -62,8 +62,8 @@ use crate::externs::{ Buffer, BufferBuffer, CallExtern, CloneValExtern, CreateExceptionExtern, DropHandlesExtern, EqualsExtern, EvalExtern, ExternContext, Externs, GeneratorSendExtern, HandleBuffer, IdentifyExtern, LogExtern, ProjectIgnoringTypeExtern, ProjectMultiExtern, PyResult, - SatisfiedByExtern, SatisfiedByTypeExtern, StoreBoolExtern, StoreBytesExtern, StoreI64Extern, - StoreTupleExtern, StoreUtf8Extern, TypeIdBuffer, TypeToStrExtern, ValToStrExtern, + SatisfiedByExtern, SatisfiedByTypeExtern, StoreBoolExtern, StoreBytesExtern, StoreF64Extern, + StoreI64Extern, StoreTupleExtern, StoreUtf8Extern, TypeIdBuffer, TypeToStrExtern, ValToStrExtern, }; use crate::handles::Handle; use crate::rule_graph::{GraphMaker, RuleGraph}; @@ -119,6 +119,7 @@ pub extern "C" fn externs_set( store_bytes: StoreBytesExtern, store_utf8: StoreUtf8Extern, store_i64: StoreI64Extern, + store_f64: StoreF64Extern, store_bool: StoreBoolExtern, project_ignoring_type: ProjectIgnoringTypeExtern, project_multi: ProjectMultiExtern, @@ -146,6 +147,7 @@ pub extern "C" fn externs_set( store_bytes, store_utf8, store_i64, + store_f64, store_bool, project_ignoring_type, project_multi, @@ -177,10 +179,6 @@ pub extern "C" fn scheduler_create( construct_snapshot: Function, construct_file_content: Function, construct_files_content: Function, - construct_path_stat: Function, - construct_dir: Function, - construct_file: Function, - construct_link: Function, construct_process_result: Function, type_address: TypeConstraint, type_path_globs: TypeConstraint, @@ -224,10 +222,6 @@ pub extern "C" fn scheduler_create( construct_snapshot: construct_snapshot, construct_file_content: construct_file_content, construct_files_content: construct_files_content, - construct_path_stat: construct_path_stat, - construct_dir: construct_dir, - construct_file: construct_file, - construct_link: construct_link, construct_process_result: construct_process_result, address: type_address, path_globs: type_path_globs, @@ -315,7 +309,8 @@ pub extern "C" fn scheduler_create( } /// -/// Returns a Handle representing a tuple of tuples of metric name string and metric value int. +/// Returns a Handle representing a dictionary where key is metric name string and value is +/// metric value int. /// #[no_mangle] pub extern "C" fn scheduler_metrics( @@ -327,11 +322,9 @@ pub extern "C" fn scheduler_metrics( let values = scheduler .metrics(session) .into_iter() - .map(|(metric, value)| { - externs::store_tuple(&[externs::store_utf8(metric), externs::store_i64(value)]) - }) + .flat_map(|(metric, value)| vec![externs::store_utf8(metric), externs::store_i64(value)]) .collect::>(); - externs::store_tuple(&values).into() + externs::store_dict(&values).into() }) }) } @@ -666,15 +659,24 @@ pub extern "C" fn capture_snapshots( path_globs_and_root_tuple_wrapper: Handle, ) -> PyResult { let values = externs::project_multi(&path_globs_and_root_tuple_wrapper.into(), "dependencies"); - let path_globs_and_roots_result: Result, String> = values + let path_globs_and_roots_result = values .iter() .map(|value| { let root = PathBuf::from(externs::project_str(&value, "root")); let path_globs = nodes::Snapshot::lift_path_globs(&externs::project_ignoring_type(&value, "path_globs")); - path_globs.map(|path_globs| (path_globs, root)) + let digest_hint = { + let maybe_digest = externs::project_ignoring_type(&value, "digest_hint"); + // TODO: Extract a singleton Key for None. + if maybe_digest == externs::eval("None").unwrap() { + None + } else { + Some(nodes::lift_digest(&maybe_digest)?) + } + }; + path_globs.map(|path_globs| (path_globs, root, digest_hint)) }) - .collect(); + .collect::, _>>(); let path_globs_and_roots = match path_globs_and_roots_result { Ok(v) => v, @@ -689,13 +691,14 @@ pub extern "C" fn capture_snapshots( futures::future::join_all( path_globs_and_roots .into_iter() - .map(|(path_globs, root)| { + .map(|(path_globs, root, digest_hint)| { let core = core.clone(); fs::Snapshot::capture_snapshot_from_arbitrary_root( core.store(), core.fs_pool.clone(), root, path_globs, + digest_hint, ) .map(move |snapshot| nodes::Snapshot::store_snapshot(&core, &snapshot)) }) diff --git a/src/rust/engine/src/nodes.rs b/src/rust/engine/src/nodes.rs index 3a9587e741e..e1d5259d63d 100644 --- a/src/rust/engine/src/nodes.rs +++ b/src/rust/engine/src/nodes.rs @@ -2,6 +2,7 @@ // Licensed under the Apache License, Version 2.0 (see LICENSE). use std::collections::{BTreeMap, HashMap}; +use std::fmt::Display; use std::io::Write; use std::path::{Path, PathBuf}; use std::sync::Arc; @@ -216,22 +217,11 @@ impl WrappedNode for Select { lift_digest(&directory_digest_val).map_err(|str| throw(&str)) }) .and_then(move |digest| { - let store = context.core.store(); context .core .store() - .load_directory(digest) + .contents_for_directory(digest) .map_err(|str| throw(&str)) - .and_then(move |maybe_directory| { - maybe_directory - .ok_or_else(|| format!("Could not find directory with digest {:?}", digest)) - .map_err(|str| throw(&str)) - }) - .and_then(move |directory| { - store - .contents_for_directory(&directory) - .map_err(|str| throw(&str)) - }) .map(move |files_content| Snapshot::store_files_content(&context, &files_content)) }) .to_boxed() @@ -540,16 +530,24 @@ impl Snapshot { } pub fn store_snapshot(core: &Arc, item: &fs::Snapshot) -> Value { - let path_stats: Vec<_> = item - .path_stats - .iter() - .map(|ps| Self::store_path_stat(core, ps)) - .collect(); + let mut files = Vec::new(); + let mut dirs = Vec::new(); + for ps in &item.path_stats { + match ps { + &PathStat::File { ref path, .. } => { + files.push(Self::store_path(path)); + } + &PathStat::Dir { ref path, .. } => { + dirs.push(Self::store_path(path)); + } + } + } externs::unsafe_call( &core.types.construct_snapshot, &[ Self::store_directory(core, &item.digest), - externs::store_tuple(&path_stats), + externs::store_tuple(&files), + externs::store_tuple(&dirs), ], ) } @@ -558,28 +556,6 @@ impl Snapshot { externs::store_utf8_osstr(item.as_os_str()) } - fn store_dir(core: &Arc, item: &Dir) -> Value { - let args = [Self::store_path(item.0.as_path())]; - externs::unsafe_call(&core.types.construct_dir, &args) - } - - fn store_file(core: &Arc, item: &File) -> Value { - let args = [Self::store_path(item.path.as_path())]; - externs::unsafe_call(&core.types.construct_file, &args) - } - - fn store_path_stat(core: &Arc, item: &PathStat) -> Value { - let args = match item { - &PathStat::Dir { ref path, ref stat } => { - vec![Self::store_path(path), Self::store_dir(core, stat)] - } - &PathStat::File { ref path, ref stat } => { - vec![Self::store_path(path), Self::store_file(core, stat)] - } - }; - externs::unsafe_call(&core.types.construct_path_stat, &args) - } - fn store_file_content(context: &Context, item: &FileContent) -> Value { externs::unsafe_call( &context.core.types.construct_file_content, @@ -1086,27 +1062,6 @@ impl Node for NodeKey { } } - fn format(&self) -> String { - fn keystr(key: &Key) -> String { - externs::key_to_str(&key) - } - fn typstr(tc: &TypeConstraint) -> String { - externs::key_to_str(&tc.0) - } - // TODO: these should all be converted to fmt::Debug implementations, and then this method can - // go away in favor of the auto-derived Debug for this type. - match self { - &NodeKey::DigestFile(ref s) => format!("DigestFile({:?})", s.0), - &NodeKey::DownloadedFile(ref s) => format!("DownloadedFile({:?})", s.0), - &NodeKey::ExecuteProcess(ref s) => format!("ExecuteProcess({:?}", s.0), - &NodeKey::ReadLink(ref s) => format!("ReadLink({:?})", s.0), - &NodeKey::Scandir(ref s) => format!("Scandir({:?})", s.0), - &NodeKey::Select(ref s) => format!("Select({}, {})", s.params, typstr(&s.product)), - &NodeKey::Task(ref s) => format!("{:?}", s), - &NodeKey::Snapshot(ref s) => format!("Snapshot({})", keystr(&s.0)), - } - } - fn digest(res: NodeResult) -> Option { match res { NodeResult::Digest(d) => Some(d), @@ -1128,6 +1083,26 @@ impl Node for NodeKey { } } +impl Display for NodeKey { + fn fmt(&self, f: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> { + match self { + &NodeKey::DigestFile(ref s) => write!(f, "DigestFile({:?})", s.0), + &NodeKey::DownloadedFile(ref s) => write!(f, "DownloadedFile({:?})", s.0), + &NodeKey::ExecuteProcess(ref s) => write!(f, "ExecuteProcess({:?}", s.0), + &NodeKey::ReadLink(ref s) => write!(f, "ReadLink({:?})", s.0), + &NodeKey::Scandir(ref s) => write!(f, "Scandir({:?})", s.0), + &NodeKey::Select(ref s) => write!( + f, + "Select({}, {})", + s.params, + externs::key_to_str(&s.product.0) + ), + &NodeKey::Task(ref s) => write!(f, "{:?}", s), + &NodeKey::Snapshot(ref s) => write!(f, "Snapshot({})", externs::key_to_str(&s.0)), + } + } +} + impl NodeError for Failure { fn invalidated() -> Failure { Failure::Invalidated diff --git a/src/rust/engine/src/scheduler.rs b/src/rust/engine/src/scheduler.rs index 0ca660865bf..dff4f06bdb2 100644 --- a/src/rust/engine/src/scheduler.rs +++ b/src/rust/engine/src/scheduler.rs @@ -13,7 +13,7 @@ use crate::context::{Context, Core}; use crate::core::{Failure, Params, TypeConstraint, Value}; use crate::nodes::{NodeKey, Select, Tracer, TryInto, Visualizer}; use crate::selectors; -use graph::{EntryId, Graph, InvalidationResult, Node, NodeContext}; +use graph::{EntryId, Graph, InvalidationResult, NodeContext}; use indexmap::IndexMap; use log::{debug, info, warn}; use parking_lot::Mutex; @@ -210,10 +210,7 @@ impl Scheduler { // Otherwise (if it is a success, some other type of Failure, or if we've run // out of retries) recover to complete the join, which will cause the results to // propagate to the user. - debug!( - "Root {} completed.", - NodeKey::Select(Box::new(root)).format() - ); + debug!("Root {} completed.", NodeKey::Select(Box::new(root))); Ok(other.map(|res| { res .try_into() diff --git a/src/rust/engine/src/types.rs b/src/rust/engine/src/types.rs index c4333deb737..3b517e7919c 100644 --- a/src/rust/engine/src/types.rs +++ b/src/rust/engine/src/types.rs @@ -5,10 +5,6 @@ pub struct Types { pub construct_snapshot: Function, pub construct_file_content: Function, pub construct_files_content: Function, - pub construct_path_stat: Function, - pub construct_dir: Function, - pub construct_file: Function, - pub construct_link: Function, pub construct_process_result: Function, pub address: TypeConstraint, pub path_globs: TypeConstraint, diff --git a/src/rust/engine/testutil/mock/src/execution_server.rs b/src/rust/engine/testutil/mock/src/execution_server.rs index d61c5c89ce5..f4a7973aeb9 100644 --- a/src/rust/engine/testutil/mock/src/execution_server.rs +++ b/src/rust/engine/testutil/mock/src/execution_server.rs @@ -22,13 +22,12 @@ use protobuf; /// #[derive(Clone, Debug)] pub struct MockOperation { - pub op: - Result, bazel_protos::google::rpc::Status>, + pub op: Result, grpcio::RpcStatus>, pub duration: Option, } impl MockOperation { - pub fn new(op: bazel_protos::google::longrunning::Operation) -> MockOperation { + pub fn new(op: bazel_protos::operations::Operation) -> MockOperation { MockOperation { op: Ok(Some(op)), duration: None, @@ -54,12 +53,12 @@ impl MockExecution { /// pub fn new( name: String, - execute_request: bazel_protos::build::bazel::remote::execution::v2::ExecuteRequest, + execute_request: bazel_protos::remote_execution::ExecuteRequest, operation_responses: Vec, ) -> MockExecution { MockExecution { name: name, - execute_request: execute_request.into(), + execute_request: execute_request, operation_responses: Arc::new(Mutex::new(VecDeque::from(operation_responses))), } } @@ -199,9 +198,9 @@ impl MockResponder { } if let Ok(Some(op)) = op { // Complete the channel with the op. - sink.success(op.clone().into()); + sink.success(op.clone()); } else if let Err(status) = op { - sink.fail(status.into()); + sink.fail(status); } else { // Cancel the request by dropping the sink. drop(sink); @@ -227,13 +226,13 @@ impl MockResponder { if let Ok(Some(op)) = op { ctx.spawn( sink - .send((op.clone().into(), grpcio::WriteFlags::default())) + .send((op.clone(), grpcio::WriteFlags::default())) .map(|mut stream| stream.close()) .map(|_| ()) .map_err(|_| ()), ) } else if let Err(status) = op { - sink.fail(status.into()); + sink.fail(status); } else { // Cancel the request by dropping the sink. drop(sink) diff --git a/testprojects/src/resources/org/pantsbuild/testproject/buildfile_path/BUILD b/testprojects/src/resources/org/pantsbuild/testproject/buildfile_path/BUILD index 861ff069618..7e66776349a 100644 --- a/testprojects/src/resources/org/pantsbuild/testproject/buildfile_path/BUILD +++ b/testprojects/src/resources/org/pantsbuild/testproject/buildfile_path/BUILD @@ -1,6 +1,3 @@ target( - dependencies=[ - 'testprojects/src/resources/org/pantsbuild/testproject/ordering:literal' - ], description='''This target exists at path {}.'''.format(buildfile_path()), ) diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/BUILD b/testprojects/src/resources/org/pantsbuild/testproject/ordering/BUILD deleted file mode 100644 index af2b0e8d07f..00000000000 --- a/testprojects/src/resources/org/pantsbuild/testproject/ordering/BUILD +++ /dev/null @@ -1,11 +0,0 @@ -SOURCES=['p', 'a', 'n', 't', 's', 'b', 'u', 'i', 'l', 'd', 'p', 'a', 'n', 't', 's'] - -resources( - name='literal', - sources=SOURCES, -) - -resources( - name='globs', - sources=globs(*SOURCES), -) diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/b b/testprojects/src/resources/org/pantsbuild/testproject/ordering/b deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/d b/testprojects/src/resources/org/pantsbuild/testproject/ordering/d deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/i b/testprojects/src/resources/org/pantsbuild/testproject/ordering/i deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/l b/testprojects/src/resources/org/pantsbuild/testproject/ordering/l deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/n b/testprojects/src/resources/org/pantsbuild/testproject/ordering/n deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/p b/testprojects/src/resources/org/pantsbuild/testproject/ordering/p deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/s b/testprojects/src/resources/org/pantsbuild/testproject/ordering/s deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/t b/testprojects/src/resources/org/pantsbuild/testproject/ordering/t deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/testprojects/src/resources/org/pantsbuild/testproject/ordering/u b/testprojects/src/resources/org/pantsbuild/testproject/ordering/u deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/tests/python/pants_test/BUILD b/tests/python/pants_test/BUILD index fb9a4961b25..af7bd0b8845 100644 --- a/tests/python/pants_test/BUILD +++ b/tests/python/pants_test/BUILD @@ -5,7 +5,6 @@ python_library( name='test_infra', dependencies=[ '3rdparty/python:future', - ':base_test', 'tests/python/pants_test:int-test-for-export', 'tests/python/pants_test:test_base', 'tests/python/pants_test/jvm:jar_task_test_base', @@ -24,28 +23,6 @@ python_library( ) ) -python_library( - name = 'base_test', - sources = ['base_test.py'], - dependencies = [ - '3rdparty/python:future', - 'src/python/pants/base:build_file', - 'src/python/pants/base:build_root', - 'src/python/pants/base:cmd_line_spec_parser', - 'src/python/pants/base:deprecated', - 'src/python/pants/base:exceptions', - 'src/python/pants/base:project_tree', - 'src/python/pants/build_graph', - 'src/python/pants/init', - 'src/python/pants/source', - 'src/python/pants/subsystem', - 'src/python/pants/task', - 'src/python/pants/util:dirutil', - 'tests/python/pants_test/base:context_utils', - 'tests/python/pants_test/option/util', - ] -) - python_library( name = 'int-test-for-export', sources = [ @@ -119,7 +96,7 @@ python_tests( name = 'test_maven_layout', sources = ['test_maven_layout.py'], dependencies = [ - ':base_test', + ':test_base', 'src/python/pants/backend/jvm/subsystems:junit', 'src/python/pants/build_graph', 'src/python/pants/source', diff --git a/tests/python/pants_test/backend/codegen/antlr/java/test_antlr_java_gen.py b/tests/python/pants_test/backend/codegen/antlr/java/test_antlr_java_gen.py index c775a60e2a7..d7406a6d71a 100644 --- a/tests/python/pants_test/backend/codegen/antlr/java/test_antlr_java_gen.py +++ b/tests/python/pants_test/backend/codegen/antlr/java/test_antlr_java_gen.py @@ -17,9 +17,16 @@ from pants.base.exceptions import TaskError from pants.build_graph.build_file_aliases import BuildFileAliases from pants.util.dirutil import safe_mkdtemp +from pants.util.objects import datatype from pants_test.jvm.nailgun_task_test_base import NailgunTaskTestBase +class DummyVersionedTarget(datatype(['target', 'results_dir'])): + @property + def current_results_dir(self): + return self.results_dir + + class AntlrJavaGenTest(NailgunTaskTestBase): @classmethod def task_type(cls): @@ -74,11 +81,12 @@ def execute_antlr_test(self, expected_package, target_workdir_fun=None): # Do not use task.workdir here, because when we calculating hash for synthetic target # we need persistent source paths in terms of relative position to build root. target_workdir = target_workdir_fun(self.build_root) + vt = DummyVersionedTarget(target, target_workdir) # Generate code, then create a synthetic target. task.execute_codegen(target, target_workdir) - sources = task._capture_sources(((target, target_workdir),))[0] - syn_target = task._inject_synthetic_target(target, target_workdir, sources) + sources = task._capture_sources((vt,))[0] + syn_target = task._inject_synthetic_target(vt, sources) actual_sources = [s for s in Fileset.rglobs('*.java', root=target_workdir)] expected_sources = syn_target.sources_relative_to_source_root() diff --git a/tests/python/pants_test/backend/codegen/protobuf/java/BUILD b/tests/python/pants_test/backend/codegen/protobuf/java/BUILD index da9c887c158..90451d248da 100644 --- a/tests/python/pants_test/backend/codegen/protobuf/java/BUILD +++ b/tests/python/pants_test/backend/codegen/protobuf/java/BUILD @@ -26,4 +26,5 @@ python_tests( 'tests/python/pants_test:int-test', ], tags = {'integration'}, + timeout = 240, ) diff --git a/tests/python/pants_test/backend/codegen/protobuf/java/test_protobuf_integration.py b/tests/python/pants_test/backend/codegen/protobuf/java/test_protobuf_integration.py index 4c1c944cfa7..4075f0ea228 100644 --- a/tests/python/pants_test/backend/codegen/protobuf/java/test_protobuf_integration.py +++ b/tests/python/pants_test/backend/codegen/protobuf/java/test_protobuf_integration.py @@ -112,9 +112,7 @@ def find_protoc_blocks(lines): out=pants_run.stderr_data, blocks=block_text)) - biggest_proto = -1 for block in all_blocks: - last_proto = -1 seen_extracted = False for line in block: # Make sure import bases appear after the bases for actual sources. @@ -124,14 +122,3 @@ def find_protoc_blocks(lines): else: self.assertFalse(seen_extracted, 'Local protoc bases must be ordered before imported bases!') - continue - # Check to make sure, eg, testproto4.proto never precedes testproto2.proto. - match = re.search(r'(?P\d+)\.proto[\\.]?$', line) - if match: - number = int(match.group('sequence')) - self.assertTrue(number > last_proto, '{proto} succeeded proto #{number}!\n{blocks}' - .format(proto=line, number=last_proto, blocks=block_text)) - last_proto = number - if last_proto > biggest_proto: - biggest_proto = last_proto - self.assertEqual(biggest_proto, 6, 'Not all protos were seen!\n{}'.format(block_text)) diff --git a/tests/python/pants_test/backend/codegen/wire/java/BUILD b/tests/python/pants_test/backend/codegen/wire/java/BUILD index 99e8abec771..03c82fe8d84 100644 --- a/tests/python/pants_test/backend/codegen/wire/java/BUILD +++ b/tests/python/pants_test/backend/codegen/wire/java/BUILD @@ -6,6 +6,7 @@ python_tests( sources = globs('*.py', exclude=[globs('*_integration.py')]), dependencies = [ '3rdparty/python/twitter/commons:twitter.common.collections', + '3rdparty/python:parameterized', 'src/python/pants/backend/codegen/wire/java', 'src/python/pants/java/jar', 'src/python/pants/backend/jvm/targets:jvm', @@ -29,4 +30,5 @@ python_tests( 'tests/python/pants_test:int-test', ], tags = {'integration'}, + timeout = 300, ) diff --git a/tests/python/pants_test/backend/codegen/wire/java/test_wire_gen.py b/tests/python/pants_test/backend/codegen/wire/java/test_wire_gen.py index e05d37b0f4e..b297af56110 100644 --- a/tests/python/pants_test/backend/codegen/wire/java/test_wire_gen.py +++ b/tests/python/pants_test/backend/codegen/wire/java/test_wire_gen.py @@ -4,6 +4,8 @@ from __future__ import absolute_import, division, print_function, unicode_literals +from parameterized import parameterized + from pants.backend.codegen.wire.java.java_wire_library import JavaWireLibrary from pants.backend.codegen.wire.java.register import build_file_aliases as register_codegen from pants.backend.codegen.wire.java.wire_gen import WireGen @@ -59,28 +61,35 @@ def test_compiler_args_wirev1(self): 'bar.proto'], task.format_args_for_target(wire_targetv1, self.TARGET_WORKDIR)) - def test_compiler_args_all(self): + @parameterized.expand([(True,), (False,)]) + def test_compiler_args_all(self, ordered_sources): self._create_fake_wire_tool(version='1.8.0') kitchen_sink = self.make_target('src/wire:kitchen-sink', JavaWireLibrary, sources=['foo.proto', 'bar.proto', 'baz.proto'], registry_class='org.pantsbuild.Registry', service_writer='org.pantsbuild.DummyServiceWriter', no_options=True, + ordered_sources=ordered_sources, roots=['root1', 'root2', 'root3'], enum_options=['enum1', 'enum2', 'enum3'],) task = self.create_task(self.context(target_roots=[kitchen_sink])) - self.assertEqual([ - '--java_out={}'.format(self.TARGET_WORKDIR), - '--no_options', - '--service_writer=org.pantsbuild.DummyServiceWriter', - '--registry_class=org.pantsbuild.Registry', - '--roots=root1,root2,root3', - '--enum_options=enum1,enum2,enum3', - '--proto_path={}/src/wire'.format(self.build_root), - 'foo.proto', - 'bar.proto', - 'baz.proto'], - task.format_args_for_target(kitchen_sink, self.TARGET_WORKDIR)) + expected = [ + '--java_out={}'.format(self.TARGET_WORKDIR), + '--no_options', + '--service_writer=org.pantsbuild.DummyServiceWriter', + '--registry_class=org.pantsbuild.Registry', + '--roots=root1,root2,root3', + '--enum_options=enum1,enum2,enum3', + '--proto_path={}/src/wire'.format(self.build_root), + 'foo.proto', + 'bar.proto', + 'baz.proto', + ] + actual = task.format_args_for_target(kitchen_sink, self.TARGET_WORKDIR) + if not ordered_sources: + expected = set(expected) + actual = set(actual) + self.assertEqual(expected, actual) def test_compiler_args_proto_paths(self): self._create_fake_wire_tool() diff --git a/tests/python/pants_test/backend/jvm/subsystems/test_incomplete_custom_scala.py b/tests/python/pants_test/backend/jvm/subsystems/test_incomplete_custom_scala.py index 5ed3a074b62..6b8243a9d1b 100644 --- a/tests/python/pants_test/backend/jvm/subsystems/test_incomplete_custom_scala.py +++ b/tests/python/pants_test/backend/jvm/subsystems/test_incomplete_custom_scala.py @@ -37,7 +37,7 @@ def tmp_custom_scala(self, path_suffix): def tmp_scalastyle_config(self): with temporary_dir(root_dir=get_buildroot()) as scalastyle_dir: path = os.path.join(scalastyle_dir, 'config.xml') - safe_file_dump(path, '''''', binary_mode=False) + safe_file_dump(path, '''''', mode='w') yield '--lint-scalastyle-config={}'.format(path) def pants_run(self, options=None): diff --git a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_zinc_compile_integration.py b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_zinc_compile_integration.py index ce342b50d0a..8d10295183f 100644 --- a/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_zinc_compile_integration.py +++ b/tests/python/pants_test/backend/jvm/tasks/jvm_compile/java/test_zinc_compile_integration.py @@ -235,6 +235,7 @@ def test_failed_hermetic_incremental_compile(self): config, ) self.assert_failure(pants_run) + self.assertIn('Please use --no-compile-zinc-incremental', pants_run.stdout_data) def test_failed_compile_with_hermetic(self): with temporary_dir() as cache_dir: @@ -258,6 +259,36 @@ def test_failed_compile_with_hermetic(self): config, ) self.assert_failure(pants_run) + self.assertIn('package System2 does not exist', pants_run.stderr_data) + self.assertIn( + 'Failed jobs: compile(testprojects/src/java/org/pantsbuild/testproject/dummies:' + 'compilation_failure_target)', + pants_run.stdout_data) + + def test_failed_compile_with_subprocess(self): + with temporary_dir() as cache_dir: + config = { + 'cache.compile.zinc': {'write_to': [cache_dir]}, + 'compile.zinc': { + 'execution_strategy': 'subprocess', + 'use_classpath_jars': False, + 'incremental': False, + } + } + + with self.temporary_workdir() as workdir: + pants_run = self.run_pants_with_workdir( + [ + # NB: We don't use -q here because subprocess squashes the error output + # See https://github.com/pantsbuild/pants/issues/5646 + 'compile', + 'testprojects/src/java/org/pantsbuild/testproject/dummies:compilation_failure_target', + ], + workdir, + config, + ) + self.assert_failure(pants_run) + self.assertIn('package System2 does not exist', pants_run.stdout_data) self.assertIn( 'Failed jobs: compile(testprojects/src/java/org/pantsbuild/testproject/dummies:' 'compilation_failure_target)', diff --git a/tests/python/pants_test/backend/jvm/tasks/test_bundle_create.py b/tests/python/pants_test/backend/jvm/tasks/test_bundle_create.py index 94085f0077e..a235052cd3c 100644 --- a/tests/python/pants_test/backend/jvm/tasks/test_bundle_create.py +++ b/tests/python/pants_test/backend/jvm/tasks/test_bundle_create.py @@ -38,7 +38,7 @@ def add_consolidated_bundle(self, context, tgt, files_dict): entry_path = safe_mkdtemp(dir=target_dir) classpath_dir = safe_mkdtemp(dir=target_dir) for rel_path, content in files_dict.items(): - safe_file_dump(os.path.join(entry_path, rel_path), content, binary_mode=False) + safe_file_dump(os.path.join(entry_path, rel_path), content, mode='w') # Create Jar to mimic consolidate classpath behavior. jarpath = os.path.join(classpath_dir, 'output-0.jar') @@ -71,12 +71,12 @@ def setUp(self): JarDependency(org='org.gnu', name='gary', rev='4.0.0', ext='tar.gz')]) - safe_file_dump(os.path.join(self.build_root, 'resources/foo/file'), '// dummy content', binary_mode=False) + safe_file_dump(os.path.join(self.build_root, 'resources/foo/file'), '// dummy content', mode='w') self.resources_target = self.make_target('//resources:foo-resources', Resources, sources=['foo/file']) # This is so that payload fingerprint can be computed. - safe_file_dump(os.path.join(self.build_root, 'foo/Foo.java'), '// dummy content', binary_mode=False) + safe_file_dump(os.path.join(self.build_root, 'foo/Foo.java'), '// dummy content', mode='w') self.java_lib_target = self.make_target('//foo:foo-library', JavaLibrary, sources=['Foo.java']) self.binary_target = self.make_target(spec='//foo:foo-binary', diff --git a/tests/python/pants_test/backend/jvm/tasks/test_consolidate_classpath.py b/tests/python/pants_test/backend/jvm/tasks/test_consolidate_classpath.py index b30dbf7a17c..79e62c917dd 100644 --- a/tests/python/pants_test/backend/jvm/tasks/test_consolidate_classpath.py +++ b/tests/python/pants_test/backend/jvm/tasks/test_consolidate_classpath.py @@ -48,12 +48,12 @@ def setUp(self): JarDependency(org='org.gnu', name='gary', rev='4.0.0', ext='tar.gz')]) - safe_file_dump(os.path.join(self.build_root, 'resources/foo/file'), '// dummy content', binary_mode=False) + safe_file_dump(os.path.join(self.build_root, 'resources/foo/file'), '// dummy content', mode='w') self.resources_target = self.make_target('//resources:foo-resources', Resources, sources=['foo/file']) # This is so that payload fingerprint can be computed. - safe_file_dump(os.path.join(self.build_root, 'foo/Foo.java'), '// dummy content', binary_mode=False) + safe_file_dump(os.path.join(self.build_root, 'foo/Foo.java'), '// dummy content', mode='w') self.java_lib_target = self.make_target('//foo:foo-library', JavaLibrary, sources=['Foo.java']) self.binary_target = self.make_target(spec='//foo:foo-binary', diff --git a/tests/python/pants_test/backend/jvm/tasks/test_junit_run.py b/tests/python/pants_test/backend/jvm/tasks/test_junit_run.py index c25edde0b26..43e4ff619cc 100644 --- a/tests/python/pants_test/backend/jvm/tasks/test_junit_run.py +++ b/tests/python/pants_test/backend/jvm/tasks/test_junit_run.py @@ -218,7 +218,7 @@ def test_request_classes_by_source(self): # Existing files (with and without the method name) should trigger. srcfile = os.path.join(self.test_workdir, 'this.is.a.source.file.scala') - safe_file_dump(srcfile, 'content!', binary_mode=False) + safe_file_dump(srcfile, 'content!', mode='w') self.assertTrue(JUnitRun.request_classes_by_source([srcfile])) self.assertTrue(JUnitRun.request_classes_by_source(['{}#method'.format(srcfile)])) diff --git a/tests/python/pants_test/backend/native/subsystems/test_native_toolchain.py b/tests/python/pants_test/backend/native/subsystems/test_native_toolchain.py index 3dcda4e72cf..d3f822fb042 100644 --- a/tests/python/pants_test/backend/native/subsystems/test_native_toolchain.py +++ b/tests/python/pants_test/backend/native/subsystems/test_native_toolchain.py @@ -56,7 +56,7 @@ def test_gcc_version(self): gcc = gcc_c_toolchain.c_toolchain.c_compiler gcc_version_out = self._invoke_capturing_output( [gcc.exe_filename, '--version'], - env=gcc.as_invocation_environment_dict) + env=gcc.invocation_environment_dict) gcc_version_regex = re.compile('^gcc.*{}$'.format(re.escape(self.gcc_version)), flags=re.MULTILINE) @@ -70,7 +70,7 @@ def test_gpp_version(self): gpp = gcc_cpp_toolchain.cpp_toolchain.cpp_compiler gpp_version_out = self._invoke_capturing_output( [gpp.exe_filename, '--version'], - env=gpp.as_invocation_environment_dict) + env=gpp.invocation_environment_dict) gpp_version_regex = re.compile(r'^g\+\+.*{}$'.format(re.escape(self.gcc_version)), flags=re.MULTILINE) @@ -84,7 +84,7 @@ def test_clang_version(self): clang = llvm_c_toolchain.c_toolchain.c_compiler clang_version_out = self._invoke_capturing_output( [clang.exe_filename, '--version'], - env=clang.as_invocation_environment_dict) + env=clang.invocation_environment_dict) clang_version_regex = re.compile('^clang version {}'.format(re.escape(self.llvm_version)), flags=re.MULTILINE) @@ -100,7 +100,7 @@ def test_clangpp_version(self): clangpp = llvm_cpp_toolchain.cpp_toolchain.cpp_compiler clanggpp_version_out = self._invoke_capturing_output( [clangpp.exe_filename, '--version'], - env=clangpp.as_invocation_environment_dict) + env=clangpp.invocation_environment_dict) self.assertIsNotNone(clangpp_version_regex.search(clanggpp_version_out)) @@ -120,7 +120,7 @@ def _hello_world_source_environment(self, toolchain_type, file_name, contents): def _invoke_compiler(self, compiler, args): cmd = [compiler.exe_filename] + compiler.extra_args + args - env = compiler.as_invocation_environment_dict + env = compiler.invocation_environment_dict # TODO: add an `extra_args`-like field to `Executable`s which allows for overriding env vars # like this, but declaratively! env['LC_ALL'] = 'C' @@ -130,7 +130,7 @@ def _invoke_linker(self, linker, args): cmd = [linker.exe_filename] + linker.extra_args + args return self._invoke_capturing_output( cmd, - linker.as_invocation_environment_dict) + linker.invocation_environment_dict) def _invoke_capturing_output(self, cmd, env=None): env = env or {} diff --git a/tests/python/pants_test/backend/native/tasks/test_cpp_compile.py b/tests/python/pants_test/backend/native/tasks/test_cpp_compile.py index e9f831ff55a..4389f60b22f 100644 --- a/tests/python/pants_test/backend/native/tasks/test_cpp_compile.py +++ b/tests/python/pants_test/backend/native/tasks/test_cpp_compile.py @@ -70,6 +70,7 @@ def test_target_level_toolchain_variant_llvm(self): task = self.create_task(self.context(target_roots=[cpp_lib_target])) compiler = task.get_compiler(cpp_lib_target) + # TODO(#6866): test specifically which compiler is selected, traversing the PATH if necessary. self.assertIn('llvm', compiler.path_entries[0]) def test_target_level_toolchain_variant_default_llvm(self): diff --git a/tests/python/pants_test/backend/python/tasks/BUILD b/tests/python/pants_test/backend/python/tasks/BUILD index 072b3380e38..cf8ef3dcd88 100644 --- a/tests/python/pants_test/backend/python/tasks/BUILD +++ b/tests/python/pants_test/backend/python/tasks/BUILD @@ -54,7 +54,6 @@ python_tests( 'tests/python/pants_test/backend/python/tasks/util', 'tests/python/pants_test/engine:scheduler_test_base', 'tests/python/pants_test/subsystem:subsystem_utils', - 'tests/python/pants_test/tasks:task_test_base', 'tests/python/pants_test:task_test_base', ], timeout=600 diff --git a/tests/python/pants_test/backend/python/tasks/native/BUILD b/tests/python/pants_test/backend/python/tasks/native/BUILD index 7bbccd25921..278a8f16ecd 100644 --- a/tests/python/pants_test/backend/python/tasks/native/BUILD +++ b/tests/python/pants_test/backend/python/tasks/native/BUILD @@ -33,7 +33,6 @@ python_tests( 'tests/python/pants_test/backend/python/tasks/util', 'tests/python/pants_test/engine:scheduler_test_base', 'tests/python/pants_test/subsystem:subsystem_utils', - 'tests/python/pants_test/tasks:task_test_base', 'tests/python/pants_test:task_test_base', ], tags={'platform_specific_behavior'}, diff --git a/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py b/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py index 92922794a1e..4dd86c367e2 100644 --- a/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py +++ b/tests/python/pants_test/backend/python/tasks/native/test_ctypes_integration.py @@ -7,6 +7,7 @@ import glob import os import re +from functools import wraps from zipfile import ZipFile from pants.backend.native.config.environment import Platform @@ -24,6 +25,14 @@ def invoke_pex_for_output(pex_file_to_run): return subprocess.check_output([pex_file_to_run], stderr=subprocess.STDOUT) +def _toolchain_variants(func): + @wraps(func) + def wrapper(*args, **kwargs): + for variant in ToolchainVariant.iterate_enum_variants(): + func(*args, toolchain_variant=variant, **kwargs) + return wrapper + + class CTypesIntegrationTest(PantsRunIntegrationTest): _binary_target_dir = 'testprojects/src/python/python_distribution/ctypes' @@ -38,32 +47,16 @@ class CTypesIntegrationTest(PantsRunIntegrationTest): 'testprojects/src/python/python_distribution/ctypes_with_extra_compiler_flags:bin' ) - def test_ctypes_binary_creation(self): + @_toolchain_variants + def test_ctypes_binary_creation(self, toolchain_variant): """Create a python_binary() with all native toolchain variants, and test the result.""" - # TODO: this pattern could be made more ergonomic for `enum()`, along with exhaustiveness - # checking. - for variant in ToolchainVariant.allowed_values: - self._assert_ctypes_binary_creation(variant) - - _compiler_names_for_variant = { - 'gnu': ['gcc', 'g++'], - 'llvm': ['clang', 'clang++'], - } - - # All of our toolchains currently use the C++ compiler's filename as argv[0] for the linker. - _linker_names_for_variant = { - 'gnu': ['g++'], - 'llvm': ['clang++'], - } - - def _assert_ctypes_binary_creation(self, toolchain_variant): with temporary_dir() as tmp_dir: pants_run = self.run_pants(command=['binary', self._binary_target], config={ GLOBAL_SCOPE_CONFIG_SECTION: { 'pants_distdir': tmp_dir, }, 'native-build-step': { - 'toolchain_variant': toolchain_variant, + 'toolchain_variant': toolchain_variant.value, }, }) @@ -71,12 +64,23 @@ def _assert_ctypes_binary_creation(self, toolchain_variant): # Check that we have selected the appropriate compilers for our selected toolchain variant, # for both C and C++ compilation. - # TODO(#6866): don't parse info logs for testing! - for compiler_name in self._compiler_names_for_variant[toolchain_variant]: + # TODO(#6866): don't parse info logs for testing! There is a TODO in test_cpp_compile.py + # in the native backend testing to traverse the PATH to find the selected compiler. + compiler_names_to_check = toolchain_variant.resolve_for_enum_variant({ + 'gnu': ['gcc', 'g++'], + 'llvm': ['clang', 'clang++'], + }) + for compiler_name in compiler_names_to_check: self.assertIn("selected compiler exe name: '{}'".format(compiler_name), pants_run.stdout_data) - for linker_name in self._linker_names_for_variant[toolchain_variant]: + # All of our toolchains currently use the C++ compiler's filename as argv[0] for the linker, + # so there is only one name to check. + linker_names_to_check = toolchain_variant.resolve_for_enum_variant({ + 'gnu': ['g++'], + 'llvm': ['clang++'], + }) + for linker_name in linker_names_to_check: self.assertIn("selected linker exe name: '{}'".format(linker_name), pants_run.stdout_data) @@ -92,9 +96,9 @@ def _assert_ctypes_binary_creation(self, toolchain_variant): dist_name, dist_version, wheel_platform = name_and_platform(wheel_dist) self.assertEqual(dist_name, 'ctypes_test') - contains_current_platform = Platform.create().resolve_platform_specific({ - 'darwin': lambda: wheel_platform.startswith('macosx'), - 'linux': lambda: wheel_platform.startswith('linux'), + contains_current_platform = Platform.create().resolve_for_enum_variant({ + 'darwin': wheel_platform.startswith('macosx'), + 'linux': wheel_platform.startswith('linux'), }) self.assertTrue(contains_current_platform) @@ -110,16 +114,8 @@ def _assert_ctypes_binary_creation(self, toolchain_variant): binary_run_output = invoke_pex_for_output(pex) self.assertEqual(b'x=3, f(x)=17\n', binary_run_output) - def test_ctypes_native_language_interop(self): - for variant in ToolchainVariant.allowed_values: - self._assert_ctypes_interop_with_mock_buildroot(variant) - - _include_not_found_message_for_variant = { - 'gnu': "fatal error: some_math.h: No such file or directory", - 'llvm': "fatal error: 'some_math.h' file not found" - } - - def _assert_ctypes_interop_with_mock_buildroot(self, toolchain_variant): + @_toolchain_variants + def test_ctypes_native_language_interop(self, toolchain_variant): # TODO: consider making this mock_buildroot/run_pants_with_workdir into a # PantsRunIntegrationTest method! with self.mock_buildroot( @@ -138,7 +134,7 @@ def _assert_ctypes_interop_with_mock_buildroot(self, toolchain_variant): # Explicitly set to True (although this is the default). config={ 'native-build-step': { - 'toolchain_variant': toolchain_variant, + 'toolchain_variant': toolchain_variant.value, }, # TODO(#6848): don't make it possible to forget to add the toolchain_variant option! 'native-build-settings': { @@ -148,19 +144,25 @@ def _assert_ctypes_interop_with_mock_buildroot(self, toolchain_variant): workdir=os.path.join(buildroot.new_buildroot, '.pants.d'), build_root=buildroot.new_buildroot) self.assert_failure(pants_binary_strict_deps_failure) - self.assertIn(self._include_not_found_message_for_variant[toolchain_variant], + self.assertIn(toolchain_variant.resolve_for_enum_variant({ + 'gnu': "fatal error: some_math.h: No such file or directory", + 'llvm': "fatal error: 'some_math.h' file not found", + }), pants_binary_strict_deps_failure.stdout_data) # TODO(#6848): we need to provide the libstdc++.so.6.dylib which comes with gcc on osx in the # DYLD_LIBRARY_PATH during the 'run' goal somehow. - attempt_pants_run = Platform.create().resolve_platform_specific({ - 'darwin': lambda: toolchain_variant != 'gnu', - 'linux': lambda: True, + attempt_pants_run = Platform.create().resolve_for_enum_variant({ + 'darwin': toolchain_variant.resolve_for_enum_variant({ + 'gnu': False, + 'llvm': True, + }), + 'linux': True, }) if attempt_pants_run: pants_run_interop = self.run_pants(['-q', 'run', self._binary_target_with_interop], config={ 'native-build-step': { - 'toolchain_variant': toolchain_variant, + 'toolchain_variant': toolchain_variant.value, }, 'native-build-settings': { 'strict_deps': True, @@ -169,28 +171,28 @@ def _assert_ctypes_interop_with_mock_buildroot(self, toolchain_variant): self.assert_success(pants_run_interop) self.assertEqual('x=3, f(x)=299\n', pants_run_interop.stdout_data) - def test_ctypes_third_party_integration(self): - for variant in ToolchainVariant.allowed_values: - self._assert_ctypes_third_party_integration(variant) - - def _assert_ctypes_third_party_integration(self, toolchain_variant): + @_toolchain_variants + def test_ctypes_third_party_integration(self, toolchain_variant): pants_binary = self.run_pants(['binary', self._binary_target_with_third_party], config={ 'native-build-step': { - 'toolchain_variant': toolchain_variant, + 'toolchain_variant': toolchain_variant.value, }, }) self.assert_success(pants_binary) # TODO(#6848): this fails when run with gcc on osx as it requires gcc's libstdc++.so.6.dylib to # be available on the runtime library path. - attempt_pants_run = Platform.create().resolve_platform_specific({ - 'darwin': lambda: toolchain_variant != 'gnu', - 'linux': lambda: True, + attempt_pants_run = Platform.create().resolve_for_enum_variant({ + 'darwin': toolchain_variant.resolve_for_enum_variant({ + 'gnu': False, + 'llvm': True, + }), + 'linux': True, }) if attempt_pants_run: pants_run = self.run_pants(['-q', 'run', self._binary_target_with_third_party], config={ 'native-build-step': { - 'toolchain_variant': toolchain_variant, + 'toolchain_variant': toolchain_variant.value, }, }) self.assert_success(pants_run) @@ -220,23 +222,32 @@ def test_pants_native_source_detection_for_local_ctypes_dists_for_current_platfo self.assert_success(pants_run) self.assertIn('x=3, f(x)=17', pants_run.stdout_data) - def test_native_compiler_option_sets_integration(self): + @_toolchain_variants + def test_native_compiler_option_sets_integration(self, toolchain_variant): """Test that native compilation includes extra compiler flags from target definitions. This target uses the ndebug and asdf option sets. If either of these are not present (disabled), this test will fail. """ - for variant in ToolchainVariant.allowed_values: - self._assert_ctypes_third_party_integration(variant) + # TODO(#6848): this fails when run with gcc on osx as it requires gcc's libstdc++.so.6.dylib to + # be available on the runtime library path. + attempt_pants_run = Platform.create().resolve_for_enum_variant({ + 'darwin': toolchain_variant.resolve_for_enum_variant({ + 'gnu': False, + 'llvm': True, + }), + 'linux': True, + }) + if not attempt_pants_run: + return - def _assert_native_compiler_option_sets_integration(self, toolchain_variant): command = [ 'run', self._binary_target_with_compiler_option_sets ] pants_run = self.run_pants(command=command, config={ 'native-build-step': { - 'toolchain_variant': toolchain_variant, + 'toolchain_variant': toolchain_variant.value, }, 'native-build-step.cpp-compile-settings': { 'compiler_option_sets_enabled_args': { diff --git a/tests/python/pants_test/backend/python/tasks/test_python_binary_integration.py b/tests/python/pants_test/backend/python/tasks/test_python_binary_integration.py index 95cfa87023e..a6669c0a561 100644 --- a/tests/python/pants_test/backend/python/tasks/test_python_binary_integration.py +++ b/tests/python/pants_test/backend/python/tasks/test_python_binary_integration.py @@ -15,6 +15,12 @@ from pants_test.pants_run_integration_test import PantsRunIntegrationTest +_LINUX_PLATFORM = "linux-x86_64" +_LINUX_WHEEL_SUBSTRING = "manylinux" +_OSX_PLATFORM = "macosx-10.13-x86_64" +_OSX_WHEEL_SUBSTRING = "macosx" + + class PythonBinaryIntegrationTest(PantsRunIntegrationTest): @staticmethod @contextmanager @@ -68,9 +74,52 @@ def test_zipsafe_caching(self): self.assert_success(build()) self.assert_pex_attribute(test_pex, 'zip_safe', True) - def test_platforms(self): - """Ensure that changing platforms invalidates the generated pex binaries.""" - + def test_platform_defaults_to_config(self): + self.platforms_test_impl( + target_platforms=None, + config_platforms=[_OSX_PLATFORM], + want_present_platforms=[_OSX_WHEEL_SUBSTRING], + want_missing_platforms=[_LINUX_PLATFORM], + ) + + def test_target_platform_without_config(self): + self.platforms_test_impl( + target_platforms=[_LINUX_PLATFORM], + config_platforms=None, + want_present_platforms=[_LINUX_WHEEL_SUBSTRING], + want_missing_platforms=[_OSX_WHEEL_SUBSTRING], + ) + + def test_target_platform_overrides_config(self): + self.platforms_test_impl( + target_platforms=[_LINUX_PLATFORM], + config_platforms=[_OSX_WHEEL_SUBSTRING], + want_present_platforms=[_LINUX_WHEEL_SUBSTRING], + want_missing_platforms=[_OSX_WHEEL_SUBSTRING], + ) + + def test_target_platform_narrows_config(self): + self.platforms_test_impl( + target_platforms=[_LINUX_PLATFORM], + config_platforms=[_LINUX_WHEEL_SUBSTRING, _OSX_WHEEL_SUBSTRING], + want_present_platforms=[_LINUX_WHEEL_SUBSTRING], + want_missing_platforms=[_OSX_WHEEL_SUBSTRING], + ) + + def test_target_platform_expands_config(self): + self.platforms_test_impl( + target_platforms=[_LINUX_PLATFORM, _OSX_PLATFORM], + config_platforms=[_LINUX_WHEEL_SUBSTRING], + want_present_platforms=[_LINUX_WHEEL_SUBSTRING, _OSX_WHEEL_SUBSTRING], + ) + + def platforms_test_impl( + self, + target_platforms, + config_platforms, + want_present_platforms, + want_missing_platforms=(), + ): def numpy_deps(deps): return [d for d in deps if 'numpy' in d] def assertInAny(substring, collection): @@ -79,6 +128,7 @@ def assertInAny(substring, collection): def assertNotInAny(substring, collection): self.assertTrue(all(substring not in d for d in collection), 'Expected an entry matching "{}" in {}'.format(substring, collection)) + test_project = 'testprojects/src/python/cache_fields' test_build = os.path.join(test_project, 'BUILD') test_src = os.path.join(test_project, 'main.py') @@ -88,19 +138,16 @@ def assertNotInAny(substring, collection): config['python-setup'] = { 'platforms': None } - build = functools.partial( - self.run_pants_with_workdir, - command=['binary', test_project], - workdir=os.path.join(buildroot.new_buildroot, '.pants.d'), - config=config, - build_root=buildroot.new_buildroot - ) buildroot.write_file(test_src, '') buildroot.write_file(test_build, dedent(""" - python_binary(source='main.py', dependencies=[':numpy']) + python_binary( + source='main.py', + dependencies=[':numpy'], + {target_platforms} + ) python_requirement_library( name='numpy', requirements=[ @@ -108,26 +155,26 @@ def assertNotInAny(substring, collection): ] ) - """) + """.format( + target_platforms="platforms = [{}],".format(", ".join(["'{}'".format(p) for p in target_platforms])) if target_platforms is not None else "", + )) ) # When only the linux platform is requested, # only linux wheels should end up in the pex. - config['python-setup']['platforms'] = ['linux-x86_64'] - build() - - with open_zip(test_pex) as z: - deps = numpy_deps(z.namelist()) - assertInAny('manylinux', deps) - assertNotInAny('macosx', deps) - - # When both linux and macosx platforms are requested, - # wheels for both should end up in the pex. - config['python-setup']['platforms'] = [ - 'linux-x86_64', - 'macosx-10.13-x86_64'] - build() + if config_platforms is not None: + config['python-setup']['platforms'] = config_platforms + result = self.run_pants_with_workdir( + command=['binary', test_project], + workdir=os.path.join(buildroot.new_buildroot, '.pants.d'), + config=config, + build_root=buildroot.new_buildroot, + tee_output=True, + ) + self.assert_success(result) with open_zip(test_pex) as z: deps = numpy_deps(z.namelist()) - assertInAny('manylinux', deps) - assertInAny('macosx', deps) + for platform in want_present_platforms: + assertInAny(platform, deps) + for platform in want_missing_platforms: + assertNotInAny(platform, deps) diff --git a/tests/python/pants_test/backend/python/tasks/test_python_tool.py b/tests/python/pants_test/backend/python/tasks/test_python_tool.py new file mode 100644 index 00000000000..2c030473f10 --- /dev/null +++ b/tests/python/pants_test/backend/python/tasks/test_python_tool.py @@ -0,0 +1,87 @@ +# coding=utf-8 +# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +from __future__ import absolute_import, division, print_function, unicode_literals + +import os +import re + +from pants.backend.python.subsystems.python_tool_base import PythonToolBase +from pants.backend.python.tasks.python_tool_prep_base import PythonToolInstance, PythonToolPrepBase +from pants.task.task import Task +from pants.util.contextutil import temporary_dir +from pants_test.backend.python.tasks.python_task_test_base import PythonTaskTestBase + + +class Tool(PythonToolBase): + options_scope = 'test-tool' + default_requirements = [ + 'pex==1.5.3', + ] + default_entry_point = 'pex.bin.pex:main' + + +class ToolInstance(PythonToolInstance): + pass + + +class ToolPrep(PythonToolPrepBase): + options_scope = 'tool-prep-task' + tool_subsystem_cls = Tool + tool_instance_cls = ToolInstance + + +class ToolTask(Task): + options_scope = 'tool-task' + + @classmethod + def prepare(cls, options, round_manager): + super(ToolTask, cls).prepare(options, round_manager) + round_manager.require_data(ToolPrep.tool_instance_cls) + + def execute(self): + tool_for_pex = self.context.products.get_data(ToolPrep.tool_instance_cls) + stdout, _, exit_code, _ = tool_for_pex.output(['--version']) + assert re.match(r'.*\.pex 1.5.3', stdout) + assert 0 == exit_code + + +class PythonToolPrepTest(PythonTaskTestBase): + + @classmethod + def task_type(cls): + return ToolTask + + def _assert_tool_execution_for_python_version(self, use_py3=True): + scope_string = '3' if use_py3 else '2' + constraint_string = 'CPython>=3' if use_py3 else 'CPython<3' + tool_prep_type = self.synthesize_task_subtype(ToolPrep, 'tp_scope_py{}'.format(scope_string)) + with temporary_dir() as tmp_dir: + context = self.context(for_task_types=[tool_prep_type], for_subsystems=[Tool], options={ + '': { + 'pants_bootstrapdir': tmp_dir, + }, + 'test-tool': { + 'interpreter_constraints': [constraint_string], + }, + }) + tool_prep_task = tool_prep_type(context, os.path.join( + self.pants_workdir, 'tp_py{}'.format(scope_string))) + tool_prep_task.execute() + # Check that the tool can be created and executed successfully. + self.create_task(context).execute() + pex_tool = context.products.get_data(ToolPrep.tool_instance_cls) + # Check that our pex tool wrapper was constructed with the expected interpreter. + self.assertTrue(pex_tool.interpreter.identity.matches(constraint_string)) + return pex_tool + + def test_tool_execution(self): + """Test that python tools are fingerprinted by python interpreter.""" + py3_pex_tool = self._assert_tool_execution_for_python_version(use_py3=True) + py3_pex_tool_path = py3_pex_tool.pex.path() + self.assertTrue(os.path.isdir(py3_pex_tool_path)) + py2_pex_tool = self._assert_tool_execution_for_python_version(use_py3=False) + py2_pex_tool_path = py2_pex_tool.pex.path() + self.assertTrue(os.path.isdir(py2_pex_tool_path)) + self.assertNotEqual(py3_pex_tool_path, py2_pex_tool_path) diff --git a/tests/python/pants_test/backend/python/test_interpreter_cache.py b/tests/python/pants_test/backend/python/test_interpreter_cache.py index 86196d710ed..84cf353cf2d 100644 --- a/tests/python/pants_test/backend/python/test_interpreter_cache.py +++ b/tests/python/pants_test/backend/python/test_interpreter_cache.py @@ -5,6 +5,8 @@ from __future__ import absolute_import, division, print_function, unicode_literals import os +import shutil +import sys from builtins import str from contextlib import contextmanager @@ -15,7 +17,8 @@ from pants.backend.python.interpreter_cache import PythonInterpreter, PythonInterpreterCache from pants.subsystem.subsystem import Subsystem -from pants.util.contextutil import temporary_dir +from pants.util.contextutil import environment_as, temporary_dir +from pants.util.dirutil import safe_mkdir from pants_test.backend.python.interpreter_selection_utils import (PY_27, PY_36, python_interpreter_path, skip_unless_python27_and_python36) @@ -171,3 +174,34 @@ def test_setup_cached_warm(self): def test_setup_cached_cold(self): with self._setup_cache() as (cache, _): self.assertEqual([], list(cache._setup_cached())) + + def test_interpreter_from_relpath_purges_stale_interpreter(self): + """ + Simulates a stale interpreter cache and tests that _interpreter_from_relpath + properly detects it and removes the stale dist directory. + + See https://github.com/pantsbuild/pants/issues/3416 for more info. + """ + with temporary_dir() as temp_dir: + # Setup a interpreter distribution that we can safely mutate. + test_interpreter_binary = os.path.join(temp_dir, 'python2.7') + src = os.path.realpath(sys.executable) + sys_exe_dist = os.path.dirname(os.path.dirname(src)) + shutil.copy2(src, test_interpreter_binary) + with environment_as( + PYTHONPATH='{}'.format(os.path.join(sys_exe_dist, 'lib/python2.7')) + ): + with self._setup_cache(constraints=[]) as (cache, path): + # Setup cache for test interpreter distribution. + identity_str = str(PythonInterpreter.from_binary(test_interpreter_binary).identity) + cached_interpreter_dir = os.path.join(cache._cache_dir, identity_str) + safe_mkdir(cached_interpreter_dir) + cached_symlink = os.path.join(cached_interpreter_dir, 'python') + os.symlink(test_interpreter_binary, cached_symlink) + + # Remove the test interpreter binary from filesystem and assert that the cache is purged. + os.remove(test_interpreter_binary) + self.assertEqual(os.path.exists(test_interpreter_binary), False) + self.assertEqual(os.path.exists(cached_interpreter_dir), True) + cache._interpreter_from_relpath(identity_str) + self.assertEqual(os.path.exists(cached_interpreter_dir), False) diff --git a/tests/python/pants_test/base/test_exception_sink_integration.py b/tests/python/pants_test/base/test_exception_sink_integration.py index fa98cee6a7a..9ea4e38f673 100644 --- a/tests/python/pants_test/base/test_exception_sink_integration.py +++ b/tests/python/pants_test/base/test_exception_sink_integration.py @@ -123,6 +123,7 @@ def test_dumps_logs_on_terminate(self): self._assert_graceful_signal_log_matches( waiter_run.pid, signal.SIGTERM, read_file(shared_log_file, binary_mode=False)) + @unittest.skip('Hangs a lot: https://github.com/pantsbuild/pants/issues/7199') def test_dumps_traceback_on_sigabrt(self): # SIGABRT sends a traceback to the log file for the current process thanks to # faulthandler.enable(). @@ -197,7 +198,7 @@ def test_reset_interactive_output_stream(self): with temporary_dir() as tmpdir: some_file = os.path.join(tmpdir, 'some_file') - safe_file_dump(some_file, b'', binary_mode=True) + safe_file_dump(some_file, b'', mode='wb') redirected_pants_run = self.run_pants([ "--lifecycle-stubs-new-interactive-stream-output-file={}".format(some_file), ] + lifecycle_stub_cmdline) diff --git a/tests/python/pants_test/base_test.py b/tests/python/pants_test/base_test.py deleted file mode 100644 index 4fef0bc5cee..00000000000 --- a/tests/python/pants_test/base_test.py +++ /dev/null @@ -1,504 +0,0 @@ -# coding=utf-8 -# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from __future__ import absolute_import, division, print_function, unicode_literals - -import itertools -import logging -import os -import unittest -from builtins import object, open -from collections import defaultdict -from contextlib import contextmanager -from tempfile import mkdtemp -from textwrap import dedent - -from future.utils import PY2 - -from pants.base.build_file import BuildFile -from pants.base.build_root import BuildRoot -from pants.base.cmd_line_spec_parser import CmdLineSpecParser -from pants.base.deprecated import deprecated_module -from pants.base.exceptions import TaskError -from pants.base.file_system_project_tree import FileSystemProjectTree -from pants.build_graph.address import Address -from pants.build_graph.build_configuration import BuildConfiguration -from pants.build_graph.build_file_address_mapper import BuildFileAddressMapper -from pants.build_graph.build_file_aliases import BuildFileAliases -from pants.build_graph.build_file_parser import BuildFileParser -from pants.build_graph.mutable_build_graph import MutableBuildGraph -from pants.build_graph.target import Target -from pants.init.util import clean_global_runtime_state -from pants.option.options_bootstrapper import OptionsBootstrapper -from pants.option.scope import GLOBAL_SCOPE -from pants.source.source_root import SourceRootConfig -from pants.subsystem.subsystem import Subsystem -from pants.task.goal_options_mixin import GoalOptionsMixin -from pants.util.dirutil import safe_mkdir, safe_open, safe_rmtree -from pants_test.base.context_utils import create_context_from_options -from pants_test.option.util.fakes import create_options_for_optionables - - -# Fix this during a dev release -deprecated_module('1.13.0.dev2', 'Use pants_test.test_base instead') - - -class TestGenerator(object): - """A mixin that facilitates test generation at runtime.""" - - @classmethod - def generate_tests(cls): - """Generate tests for a given class. - - This should be called against the composing class in it's defining module, e.g. - - class ThingTest(TestGenerator): - ... - - ThingTest.generate_tests() - - """ - raise NotImplementedError() - - @classmethod - def add_test(cls, method_name, method): - """A classmethod that adds dynamic test methods to a given class. - - :param string method_name: The name of the test method (e.g. `test_thing_x`). - :param callable method: A callable representing the method. This should take a 'self' argument - as its first parameter for instance method binding. - """ - assert not hasattr(cls, method_name), ( - 'a test with name `{}` already exists on `{}`!'.format(method_name, cls.__name__) - ) - assert method_name.startswith('test_'), '{} is not a valid test name!'.format(method_name) - setattr(cls, method_name, method) - - -class BaseTest(unittest.TestCase): - """A baseclass useful for tests requiring a temporary buildroot. - - :API: public - - """ - - def build_path(self, relpath): - """Returns the canonical BUILD file path for the given relative build path. - - :API: public - """ - if os.path.basename(relpath).startswith('BUILD'): - return relpath - else: - return os.path.join(relpath, 'BUILD') - - def create_dir(self, relpath): - """Creates a directory under the buildroot. - - :API: public - - relpath: The relative path to the directory from the build root. - """ - path = os.path.join(self.build_root, relpath) - safe_mkdir(path) - return path - - def create_workdir_dir(self, relpath): - """Creates a directory under the work directory. - - :API: public - - relpath: The relative path to the directory from the work directory. - """ - path = os.path.join(self.pants_workdir, relpath) - safe_mkdir(path) - return path - - def create_file(self, relpath, contents='', mode='w'): - """Writes to a file under the buildroot. - - :API: public - - relpath: The relative path to the file from the build root. - contents: A string containing the contents of the file - '' by default.. - mode: The mode to write to the file in - over-write by default. - """ - path = os.path.join(self.build_root, relpath) - with safe_open(path, mode=mode) as fp: - fp.write(contents) - return path - - def create_workdir_file(self, relpath, contents='', mode='w'): - """Writes to a file under the work directory. - - :API: public - - relpath: The relative path to the file from the work directory. - contents: A string containing the contents of the file - '' by default.. - mode: The mode to write to the file in - over-write by default. - """ - path = os.path.join(self.pants_workdir, relpath) - with safe_open(path, mode=mode) as fp: - fp.write(contents) - return path - - def add_to_build_file(self, relpath, target): - """Adds the given target specification to the BUILD file at relpath. - - :API: public - - relpath: The relative path to the BUILD file from the build root. - target: A string containing the target definition as it would appear in a BUILD file. - """ - self.create_file(self.build_path(relpath), target, mode='a') - return BuildFile(self.address_mapper._project_tree, relpath=self.build_path(relpath)) - - def make_target(self, - spec='', - target_type=Target, - dependencies=None, - derived_from=None, - synthetic=False, - **kwargs): - """Creates a target and injects it into the test's build graph. - - :API: public - - :param string spec: The target address spec that locates this target. - :param type target_type: The concrete target subclass to create this new target from. - :param list dependencies: A list of target instances this new target depends on. - :param derived_from: The target this new target was derived from. - :type derived_from: :class:`pants.build_graph.target.Target` - """ - address = Address.parse(spec) - target = target_type(name=address.target_name, - address=address, - build_graph=self.build_graph, - **kwargs) - dependencies = dependencies or [] - - self.build_graph.apply_injectables([target]) - self.build_graph.inject_target(target, - dependencies=[dep.address for dep in dependencies], - derived_from=derived_from, - synthetic=synthetic) - - # TODO(John Sirois): This re-creates a little bit too much work done by the BuildGraph. - # Fixup the BuildGraph to deal with non BuildFileAddresses better and just leverage it. - traversables = [target.compute_dependency_specs(payload=target.payload)] - - for dependency_spec in itertools.chain(*traversables): - dependency_address = Address.parse(dependency_spec, relative_to=address.spec_path) - dependency_target = self.build_graph.get_target(dependency_address) - if not dependency_target: - raise ValueError('Tests must make targets for dependency specs ahead of them ' - 'being traversed, {} tried to traverse {} which does not exist.' - .format(target, dependency_address)) - if dependency_target not in target.dependencies: - self.build_graph.inject_dependency(dependent=target.address, - dependency=dependency_address) - target.mark_transitive_invalidation_hash_dirty() - - return target - - @property - def alias_groups(self): - """ - :API: public - """ - return BuildFileAliases(targets={'target': Target}) - - @property - def build_ignore_patterns(self): - """ - :API: public - """ - return None - - def setUp(self): - """ - :API: public - """ - super(BaseTest, self).setUp() - # Avoid resetting the Runtracker here, as that is specific to fork'd process cleanup. - clean_global_runtime_state(reset_subsystem=True) - - self.real_build_root = BuildRoot().path - - self.build_root = os.path.realpath(mkdtemp(suffix='_BUILD_ROOT')) - self.subprocess_dir = os.path.join(self.build_root, '.pids') - self.addCleanup(safe_rmtree, self.build_root) - - self.pants_workdir = os.path.join(self.build_root, '.pants.d') - safe_mkdir(self.pants_workdir) - - self.options = defaultdict(dict) # scope -> key-value mapping. - self.options[GLOBAL_SCOPE] = { - 'pants_workdir': self.pants_workdir, - 'pants_supportdir': os.path.join(self.build_root, 'build-support'), - 'pants_distdir': os.path.join(self.build_root, 'dist'), - 'pants_configdir': os.path.join(self.build_root, 'config'), - 'pants_subprocessdir': self.subprocess_dir, - 'cache_key_gen_version': '0-test', - } - self.options['cache'] = { - 'read_from': [], - 'write_to': [], - } - - BuildRoot().path = self.build_root - self.addCleanup(BuildRoot().reset) - - self._build_configuration = BuildConfiguration() - self._build_configuration.register_aliases(self.alias_groups) - self.build_file_parser = BuildFileParser(self._build_configuration, self.build_root) - self.project_tree = FileSystemProjectTree(self.build_root) - self.reset_build_graph() - - def buildroot_files(self, relpath=None): - """Returns the set of all files under the test build root. - - :API: public - - :param string relpath: If supplied, only collect files from this subtree. - :returns: All file paths found. - :rtype: set - """ - def scan(): - for root, dirs, files in os.walk(os.path.join(self.build_root, relpath or '')): - for f in files: - yield os.path.relpath(os.path.join(root, f), self.build_root) - return set(scan()) - - def reset_build_graph(self): - """Start over with a fresh build graph with no targets in it.""" - self.address_mapper = BuildFileAddressMapper(self.build_file_parser, self.project_tree, - build_ignore_patterns=self.build_ignore_patterns) - self.build_graph = MutableBuildGraph(address_mapper=self.address_mapper) - - def set_options_for_scope(self, scope, **kwargs): - self.options[scope].update(kwargs) - - def context(self, for_task_types=None, for_subsystems=None, options=None, - target_roots=None, console_outstream=None, workspace=None, - scheduler=None, **kwargs): - """ - :API: public - - :param dict **kwargs: keyword arguments passed in to `create_options_for_optionables`. - """ - # Many tests use source root functionality via the SourceRootConfig.global_instance(). - # (typically accessed via Target.target_base), so we always set it up, for convenience. - for_subsystems = set(for_subsystems or ()) - for subsystem in for_subsystems: - if subsystem.options_scope is None: - raise TaskError('You must set a scope on your subsystem type before using it in tests.') - - optionables = {SourceRootConfig} | self._build_configuration.optionables() | for_subsystems - - for_task_types = for_task_types or () - for task_type in for_task_types: - scope = task_type.options_scope - if scope is None: - raise TaskError('You must set a scope on your task type before using it in tests.') - optionables.add(task_type) - # If task is expected to inherit goal-level options, register those directly on the task, - # by subclassing the goal options registrar and settings its scope to the task scope. - if issubclass(task_type, GoalOptionsMixin): - subclass_name = 'test_{}_{}_{}'.format( - task_type.__name__, task_type.goal_options_registrar_cls.options_scope, - task_type.options_scope) - if PY2: - subclass_name = subclass_name.encode('utf-8') - optionables.add(type(subclass_name, (task_type.goal_options_registrar_cls, ), - {'options_scope': task_type.options_scope})) - - # Now expand to all deps. - all_optionables = set() - for optionable in optionables: - all_optionables.update(si.optionable_cls for si in optionable.known_scope_infos()) - - # Now default the option values and override with any caller-specified values. - # TODO(benjy): Get rid of the options arg, and require tests to call set_options. - options = options.copy() if options else {} - for s, opts in self.options.items(): - scoped_opts = options.setdefault(s, {}) - scoped_opts.update(opts) - - fake_options = create_options_for_optionables( - all_optionables, options=options, **kwargs) - - Subsystem.reset(reset_options=True) - Subsystem.set_options(fake_options) - - context = create_context_from_options(fake_options, - target_roots=target_roots, - build_graph=self.build_graph, - build_file_parser=self.build_file_parser, - address_mapper=self.address_mapper, - console_outstream=console_outstream, - workspace=workspace, - scheduler=scheduler) - return context - - def tearDown(self): - """ - :API: public - """ - super(BaseTest, self).tearDown() - BuildFile.clear_cache() - Subsystem.reset() - - def target(self, spec): - """Resolves the given target address to a Target object. - - :API: public - - address: The BUILD target address to resolve. - - Returns the corresponding Target or else None if the address does not point to a defined Target. - """ - address = Address.parse(spec) - self.build_graph.inject_address_closure(address) - return self.build_graph.get_target(address) - - def targets(self, spec): - """Resolves a target spec to one or more Target objects. - - :API: public - - spec: Either BUILD target address or else a target glob using the siblings ':' or - descendants '::' suffixes. - - Returns the set of all Targets found. - """ - - spec = CmdLineSpecParser(self.build_root).parse_spec(spec) - addresses = list(self.address_mapper.scan_specs([spec])) - for address in addresses: - self.build_graph.inject_address_closure(address) - targets = [self.build_graph.get_target(address) for address in addresses] - return targets - - def create_files(self, path, files): - """Writes to a file under the buildroot with contents same as file name. - - :API: public - - path: The relative path to the file from the build root. - files: List of file names. - """ - for f in files: - self.create_file(os.path.join(path, f), contents=f) - - def create_library(self, path, target_type, name, sources=None, **kwargs): - """Creates a library target of given type at the BUILD file at path with sources - - :API: public - - path: The relative path to the BUILD file from the build root. - target_type: valid pants target type. - name: Name of the library target. - sources: List of source file at the path relative to path. - **kwargs: Optional attributes that can be set for any library target. - Currently it includes support for resources, java_sources, provides - and dependencies. - """ - if sources: - self.create_files(path, sources) - self.add_to_build_file(path, dedent(''' - %(target_type)s(name='%(name)s', - %(sources)s - %(java_sources)s - %(provides)s - %(dependencies)s - ) - ''' % dict(target_type=target_type, - name=name, - sources=('sources=%s,' % repr(sources) - if sources else ''), - java_sources=('java_sources=[%s],' - % ','.join('"%s"' % str_target for str_target in kwargs.get('java_sources')) - if 'java_sources' in kwargs else ''), - provides=('provides=%s,' % kwargs.get('provides') - if 'provides' in kwargs else ''), - dependencies=('dependencies=%s,' % kwargs.get('dependencies') - if 'dependencies' in kwargs else ''), - ))) - return self.target('%s:%s' % (path, name)) - - def create_resources(self, path, name, *sources): - """ - :API: public - """ - return self.create_library(path, 'resources', name, sources) - - def assertUnorderedPrefixEqual(self, expected, actual_iter): - """Consumes len(expected) items from the given iter, and asserts that they match, unordered. - - :API: public - """ - actual = list(itertools.islice(actual_iter, len(expected))) - self.assertEqual(sorted(expected), sorted(actual)) - - def assertPrefixEqual(self, expected, actual_iter): - """Consumes len(expected) items from the given iter, and asserts that they match, in order. - - :API: public - """ - self.assertEqual(expected, list(itertools.islice(actual_iter, len(expected)))) - - def assertInFile(self, string, file_path): - """Verifies that a string appears in a file - - :API: public - """ - - with open(file_path, 'r') as f: - content = f.read() - self.assertIn(string, content, '"{}" is not in the file {}:\n{}'.format(string, f.name, content)) - - def get_bootstrap_options(self, cli_options=()): - """Retrieves bootstrap options. - - :param cli_options: An iterable of CLI flags to pass as arguments to `OptionsBootstrapper`. - """ - # Can't parse any options without a pants.ini. - self.create_file('pants.ini') - return OptionsBootstrapper.create(args=cli_options).bootstrap_options.for_global_scope() - - class LoggingRecorder(object): - """Simple logging handler to record warnings.""" - - def __init__(self): - self._records = [] - self.level = logging.DEBUG - - def handle(self, record): - self._records.append(record) - - def _messages_for_level(self, levelname): - return ['{}: {}'.format(record.name, record.getMessage()) - for record in self._records if record.levelname == levelname] - - def infos(self): - return self._messages_for_level('INFO') - - def warnings(self): - return self._messages_for_level('WARNING') - - @contextmanager - def captured_logging(self, level=None): - root_logger = logging.getLogger() - - old_level = root_logger.level - root_logger.setLevel(level or logging.NOTSET) - - handler = self.LoggingRecorder() - root_logger.addHandler(handler) - try: - yield handler - finally: - root_logger.setLevel(old_level) - root_logger.removeHandler(handler) diff --git a/tests/python/pants_test/binaries/test_binary_util.py b/tests/python/pants_test/binaries/test_binary_util.py index 150d09907b5..a81224d27c9 100644 --- a/tests/python/pants_test/binaries/test_binary_util.py +++ b/tests/python/pants_test/binaries/test_binary_util.py @@ -346,11 +346,11 @@ def test_select_argv(self): """Test invoking binary_util.py as a standalone script.""" with temporary_dir() as tmp_dir: config_file_loc = os.path.join(tmp_dir, 'pants.ini') - safe_file_dump(config_file_loc, """\ + safe_file_dump(config_file_loc, mode='w', payload="""\ [GLOBAL] allow_external_binary_tool_downloads: True pants_bootstrapdir: {} -""".format(tmp_dir), binary_mode=False) +""".format(tmp_dir)) expected_output_glob = os.path.join( tmp_dir, 'bin', 'cmake', '*', '*', '3.9.5', 'cmake') with environment_as(PANTS_CONFIG_FILES='[{!r}]'.format(config_file_loc)): diff --git a/tests/python/pants_test/build_graph/BUILD b/tests/python/pants_test/build_graph/BUILD index 534c499179f..e8ab092819e 100644 --- a/tests/python/pants_test/build_graph/BUILD +++ b/tests/python/pants_test/build_graph/BUILD @@ -146,3 +146,14 @@ python_tests( 'tests/python/pants_test:test_base', ] ) + +python_tests( + name = 'target_filter_subsystem', + sources = ['test_target_filter_subsystem.py'], + dependencies = [ + '3rdparty/python:future', + 'src/python/pants/build_graph', + 'src/python/pants/task', + 'tests/python/pants_test:task_test_base', + ] +) diff --git a/tests/python/pants_test/build_graph/test_subproject_integration.py b/tests/python/pants_test/build_graph/test_subproject_integration.py index 208cce39fd5..8861003fce6 100644 --- a/tests/python/pants_test/build_graph/test_subproject_integration.py +++ b/tests/python/pants_test/build_graph/test_subproject_integration.py @@ -76,7 +76,7 @@ def harness(): try: for name, content in BUILD_FILES.items(): - safe_file_dump(name, dedent(content), binary_mode=False) + safe_file_dump(name, dedent(content), mode='w') yield finally: safe_rmtree(SUBPROJ_SPEC) @@ -102,7 +102,7 @@ def test_subproject_with_flag(self): """ with harness(): # Has dependencies below the subproject. - pants_args = ['--subproject-roots={}'.format(SUBPROJ_ROOT), + pants_args = ['--subproject-roots={}'.format(SUBPROJ_ROOT), 'dependencies', SUBPROJ_SPEC] self.assert_success(self.run_pants(pants_args)) diff --git a/tests/python/pants_test/build_graph/test_target_filter_subsystem.py b/tests/python/pants_test/build_graph/test_target_filter_subsystem.py new file mode 100644 index 00000000000..734ae7c89d4 --- /dev/null +++ b/tests/python/pants_test/build_graph/test_target_filter_subsystem.py @@ -0,0 +1,62 @@ +# coding=utf-8 +# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +from __future__ import absolute_import, division, print_function, unicode_literals + +from builtins import set + +from pants.build_graph.target_filter_subsystem import TargetFilter, TargetFiltering +from pants.task.task import Task +from pants_test.task_test_base import TaskTestBase + + +class TestTargetFilter(TaskTestBase): + + class DummyTask(Task): + options_scope = 'dummy' + target_filtering_enabled = True + + def execute(self): + self.context.products.safe_create_data('task_targets', self.get_targets) + + @classmethod + def task_type(cls): + return cls.DummyTask + + def test_task_execution_with_filter(self): + a = self.make_target('a', tags=['skip-me']) + b = self.make_target('b', dependencies=[a], tags=[]) + + context = self.context(for_task_types=[self.DummyTask], for_subsystems=[TargetFilter], target_roots=[b], options={ + TargetFilter.options_scope: { + 'exclude_tags': ['skip-me'] + } + }) + + self.create_task(context).execute() + self.assertEqual([b], context.products.get_data('task_targets')) + + def test_filtering_single_tag(self): + a = self.make_target('a', tags=[]) + b = self.make_target('b', tags=['skip-me']) + c = self.make_target('c', tags=['tag1', 'skip-me']) + + filtered_targets = TargetFiltering({'skip-me'}).apply_tag_blacklist([a, b, c]) + self.assertEqual([a], filtered_targets) + + def test_filtering_multiple_tags(self): + a = self.make_target('a', tags=['tag1', 'skip-me']) + b = self.make_target('b', tags=['tag1', 'tag2', 'skip-me']) + c = self.make_target('c', tags=['tag2']) + + filtered_targets = TargetFiltering({'skip-me', 'tag2'}).apply_tag_blacklist([a, b, c]) + self.assertEqual([], filtered_targets) + + def test_filtering_no_tags(self): + a = self.make_target('a', tags=['tag1']) + b = self.make_target('b', tags=['tag1', 'tag2']) + c = self.make_target('c', tags=['tag2']) + + filtered_targets = TargetFiltering(set()).apply_tag_blacklist([a, b, c]) + self.assertEqual([a, b, c], filtered_targets) diff --git a/tests/python/pants_test/engine/BUILD b/tests/python/pants_test/engine/BUILD index c5849d62d7d..38b4d2cb4ad 100644 --- a/tests/python/pants_test/engine/BUILD +++ b/tests/python/pants_test/engine/BUILD @@ -156,6 +156,7 @@ python_tests( 'src/python/pants/build_graph', 'src/python/pants/engine:build_files', 'src/python/pants/engine:mapper', + 'src/python/pants/engine:objects', 'src/python/pants/engine:struct', 'src/python/pants/util:dirutil', 'tests/python/pants_test/engine/examples:mapper_test', @@ -232,3 +233,12 @@ python_library( 'src/python/pants/util:dirutil', ] ) + +python_tests( + name='objects', + sources=['test_objects.py'], + dependencies=[ + 'src/python/pants/engine:objects', + 'tests/python/pants_test:test_base', + ], +) diff --git a/tests/python/pants_test/engine/legacy/BUILD b/tests/python/pants_test/engine/legacy/BUILD index 367f31219c2..308bd57441a 100644 --- a/tests/python/pants_test/engine/legacy/BUILD +++ b/tests/python/pants_test/engine/legacy/BUILD @@ -129,6 +129,7 @@ python_tests( 'src/python/pants/engine/legacy:graph', 'src/python/pants/init', 'tests/python/pants_test/engine:util', + 'tests/python/pants_test:test_base' ] ) diff --git a/tests/python/pants_test/engine/legacy/test_address_mapper.py b/tests/python/pants_test/engine/legacy/test_address_mapper.py index fef53234a73..0c2323d9ca5 100644 --- a/tests/python/pants_test/engine/legacy/test_address_mapper.py +++ b/tests/python/pants_test/engine/legacy/test_address_mapper.py @@ -40,15 +40,15 @@ def create_build_files(self): safe_mkdir(dir_b) safe_mkdir(dir_a_subdir) - safe_file_dump(os.path.join(self.build_root, 'BUILD'), 'target(name="a")\ntarget(name="b")', binary_mode=False) - safe_file_dump(os.path.join(self.build_root, 'BUILD.other'), 'target(name="c")', binary_mode=False) + safe_file_dump(os.path.join(self.build_root, 'BUILD'), 'target(name="a")\ntarget(name="b")', mode='w') + safe_file_dump(os.path.join(self.build_root, 'BUILD.other'), 'target(name="c")', mode='w') - safe_file_dump(os.path.join(dir_a, 'BUILD'), 'target(name="a")\ntarget(name="b")', binary_mode=False) - safe_file_dump(os.path.join(dir_a, 'BUILD.other'), 'target(name="c")', binary_mode=False) + safe_file_dump(os.path.join(dir_a, 'BUILD'), 'target(name="a")\ntarget(name="b")', mode='w') + safe_file_dump(os.path.join(dir_a, 'BUILD.other'), 'target(name="c")', mode='w') - safe_file_dump(os.path.join(dir_b, 'BUILD'), 'target(name="a")', binary_mode=False) + safe_file_dump(os.path.join(dir_b, 'BUILD'), 'target(name="a")', mode='w') - safe_file_dump(os.path.join(dir_a_subdir, 'BUILD'), 'target(name="a")', binary_mode=False) + safe_file_dump(os.path.join(dir_a_subdir, 'BUILD'), 'target(name="a")', mode='w') def test_is_valid_single_address(self): self.create_build_files() diff --git a/tests/python/pants_test/engine/legacy/test_console_rule_integration.py b/tests/python/pants_test/engine/legacy/test_console_rule_integration.py index c0e92db21b9..7c274b5fec8 100644 --- a/tests/python/pants_test/engine/legacy/test_console_rule_integration.py +++ b/tests/python/pants_test/engine/legacy/test_console_rule_integration.py @@ -78,7 +78,7 @@ def test_v2_list_loop(self): rel_tmpdir = fast_relpath(tmpdir, get_buildroot()) def dump(content): - safe_file_dump(os.path.join(tmpdir, 'BUILD'), content, mode="w") + safe_file_dump(os.path.join(tmpdir, 'BUILD'), content, mode='w') # Dump an initial target before starting the loop. dump('target(name="one")') diff --git a/tests/python/pants_test/engine/legacy/test_graph.py b/tests/python/pants_test/engine/legacy/test_graph.py index 18fb8e997e6..0ca73b013bc 100644 --- a/tests/python/pants_test/engine/legacy/test_graph.py +++ b/tests/python/pants_test/engine/legacy/test_graph.py @@ -6,23 +6,12 @@ import functools import os -import unittest from builtins import str -from contextlib import contextmanager -import mock - -from pants.build_graph.address import Address from pants.build_graph.address_lookup_error import AddressLookupError from pants.build_graph.build_file_aliases import BuildFileAliases, TargetMacro -from pants.build_graph.target import Target -from pants.init.engine_initializer import EngineInitializer -from pants.init.options_initializer import BuildConfigInitializer -from pants.init.target_roots_calculator import TargetRootsCalculator -from pants.option.options_bootstrapper import OptionsBootstrapper -from pants.subsystem.subsystem import Subsystem -from pants.util.contextutil import temporary_dir -from pants_test.engine.util import init_native +from pants.build_graph.files import Files +from pants_test.test_base import TestBase # Macro that adds the specified tag. @@ -32,164 +21,60 @@ def macro(target_cls, tag, parse_context, tags=None, **kwargs): parse_context.create_object(target_cls, tags=tags, **kwargs) -class GraphTestBase(unittest.TestCase): - - _native = init_native() - - def _make_setup_args(self, specs): - options = mock.Mock(target_specs=specs) - options.for_scope.return_value = mock.Mock(diffspec=None, changes_since=None) - options.for_global_scope.return_value = mock.Mock(owner_of=None) - return options - - def _default_build_config(self, options_bootstrapper, build_file_aliases=None): - # TODO: Get default BuildFileAliases by extending BaseTest post - # https://github.com/pantsbuild/pants/issues/4401 - build_config = BuildConfigInitializer.get(options_bootstrapper) - if build_file_aliases: - build_config.register_aliases(build_file_aliases) - return build_config - - @contextmanager - def graph_helper(self, - build_configuration=None, - build_file_imports_behavior='allow', - include_trace_on_error=True, - path_ignore_patterns=None): - - with temporary_dir() as work_dir: - with temporary_dir() as local_store_dir: - path_ignore_patterns = path_ignore_patterns or [] - options_bootstrapper = OptionsBootstrapper.create() - build_config = build_configuration or self._default_build_config(options_bootstrapper) - # TODO: This test should be swapped to using TestBase. - graph_helper = EngineInitializer.setup_legacy_graph_extended( - path_ignore_patterns, - work_dir, - local_store_dir, - build_file_imports_behavior, - options_bootstrapper=options_bootstrapper, - build_configuration=build_config, - native=self._native, - include_trace_on_error=include_trace_on_error - ) - yield graph_helper - - @contextmanager - def open_scheduler(self, specs, build_configuration=None): - with self.graph_helper(build_configuration=build_configuration) as graph_helper: - graph, target_roots = self.create_graph_from_specs(graph_helper, specs) - addresses = tuple(graph.inject_roots_closure(target_roots)) - yield graph, addresses, graph_helper.scheduler.new_session() - - def create_graph_from_specs(self, graph_helper, specs): - Subsystem.reset() - session = graph_helper.new_session() - target_roots = self.create_target_roots(specs, session, session.symbol_table) - graph = session.create_build_graph(target_roots)[0] - return graph, target_roots - - def create_target_roots(self, specs, session, symbol_table): - return TargetRootsCalculator.create(self._make_setup_args(specs), session, symbol_table) - - -class GraphTargetScanFailureTests(GraphTestBase): +class GraphTest(TestBase): + + _TAG = 'tag_added_by_macro' + + @classmethod + def alias_groups(cls): + return super(GraphTest, cls).alias_groups().merge( + BuildFileAliases(targets={ + 'files': Files, + 'tagged_files': TargetMacro.Factory.wrap(functools.partial(macro, Files, cls._TAG), Files), + })) def test_with_missing_target_in_existing_build_file(self): + self.create_library('3rdparty/python', 'target', 'Markdown') + self.create_library('3rdparty/python', 'target', 'Pygments') # When a target is missing, # the suggestions should be in order # and there should only be one copy of the error if tracing is off. - with self.assertRaises(AddressLookupError) as cm: - with self.graph_helper(include_trace_on_error=False) as graph_helper: - self.create_graph_from_specs(graph_helper, ['3rdparty/python:rutabaga']) - self.fail('Expected an exception.') - - error_message = str(cm.exception) expected_message = '"rutabaga" was not found in namespace "3rdparty/python".' \ - ' Did you mean one of:\n' \ - ' :Markdown\n' \ - ' :Pygments\n' - self.assertIn(expected_message, error_message) - self.assertTrue(error_message.count(expected_message) == 1) + '.*Did you mean one of:\n' \ + '.*:Markdown\n' \ + '.*:Pygments\n' + with self.assertRaisesRegexp(AddressLookupError, expected_message): + self.targets('3rdparty/python:rutabaga') def test_with_missing_directory_fails(self): with self.assertRaises(AddressLookupError) as cm: - with self.graph_helper() as graph_helper: - self.create_graph_from_specs(graph_helper, ['no-such-path:']) + self.targets('no-such-path:') self.assertIn('Path "no-such-path" does not contain any BUILD files', str(cm.exception)) - def test_with_existing_directory_with_no_build_files_fails(self): - with self.assertRaises(AddressLookupError) as cm: - path_ignore_patterns=[ - # This is a symlink that points out of the build root. - '/build-support/bin/native/src' - ] - with self.graph_helper(path_ignore_patterns=path_ignore_patterns) as graph_helper: - self.create_graph_from_specs(graph_helper, ['build-support/bin::']) - - self.assertIn('does not match any targets.', str(cm.exception)) - - def test_inject_bad_dir(self): - with self.assertRaises(AddressLookupError) as cm: - with self.graph_helper() as graph_helper: - graph, target_roots = self.create_graph_from_specs(graph_helper, ['3rdparty/python:']) - - graph.inject_address_closure(Address('build-support/bin', 'wat')) - - self.assertIn('Path "build-support/bin" does not contain any BUILD files', - str(cm.exception)) - - -class GraphInvalidationTest(GraphTestBase): - def test_invalidate_fsnode(self): # NB: Invalidation is now more directly tested in unit tests in the `graph` crate. - with self.open_scheduler(['3rdparty/python::']) as (_, _, scheduler): - invalidated_count = scheduler.invalidate_files(['3rdparty/python/BUILD']) - self.assertGreater(invalidated_count, 0) + self.create_library('src/example', 'target', 'things') + self.targets('src/example::') + invalidated_count = self.invalidate_for('src/example/BUILD') + self.assertGreater(invalidated_count, 0) - def test_invalidate_fsnode_incremental(self): - # NB: Invalidation is now more directly tested in unit tests in the `graph` crate. - with self.open_scheduler(['//:', '3rdparty/::']) as (_, _, scheduler): - # Invalidate the '3rdparty/python' DirectoryListing, the `3rdparty` DirectoryListing, - # and then the root DirectoryListing by "touching" files/dirs. - for filename in ('3rdparty/python/BUILD', '3rdparty/jvm', 'non_existing_file'): - invalidated_count = scheduler.invalidate_files([filename]) - self.assertGreater(invalidated_count, - 0, - 'File {} did not invalidate any Nodes.'.format(filename)) - - def _ordering_test(self, spec, expected_sources=None): - expected_sources = expected_sources or ['p', 'a', 'n', 't', 's', 'b', 'u', 'i', 'l', 'd'] - with self.open_scheduler([spec]) as (graph, _, _): - target = graph.get_target(Address.parse(spec)) - sources = [os.path.basename(s) for s in target.sources_relative_to_buildroot()] - self.assertEqual(expected_sources, sources) - - def test_sources_ordering_literal(self): - self._ordering_test('testprojects/src/resources/org/pantsbuild/testproject/ordering:literal') - - def test_sources_ordering_glob(self): - self._ordering_test('testprojects/src/resources/org/pantsbuild/testproject/ordering:globs') + def test_sources_ordering(self): + input_sources = ['p', 'a', 'n', 't', 's', 'b', 'u', 'i', 'l', 'd'] + expected_sources = sorted(input_sources) + self.create_library('src/example', 'files', 'things', sources=input_sources) + + target = self.target('src/example:things') + sources = [os.path.basename(s) for s in target.sources_relative_to_buildroot()] + self.assertEqual(expected_sources, sources) def test_target_macro_override(self): """Tests that we can "wrap" an existing target type with additional functionality. Installs an additional TargetMacro that wraps `target` aliases to add a tag to all definitions. """ - spec = 'testprojects/tests/python/pants/build_parsing:' - - tag = 'tag_added_by_macro' - target_cls = Target - tag_macro = functools.partial(macro, target_cls, tag) - target_symbols = {'target': TargetMacro.Factory.wrap(tag_macro, target_cls)} - - build_config = self._default_build_config(OptionsBootstrapper.create(), BuildFileAliases(targets=target_symbols)) - # Confirm that python_tests in a small directory are marked. - with self.open_scheduler([spec], build_configuration=build_config) as (graph, addresses, _): - self.assertTrue(len(addresses) > 0, 'No targets matched by {}'.format(addresses)) - for address in addresses: - self.assertIn(tag, graph.get_target(address).tags) + files = self.create_library('src/example', 'tagged_files', 'things') + self.assertIn(self._TAG, files.tags) + self.assertEqual(type(files), Files) diff --git a/tests/python/pants_test/engine/legacy/test_graph_integration.py b/tests/python/pants_test/engine/legacy/test_graph_integration.py index dc8751c068e..f517d3953c0 100644 --- a/tests/python/pants_test/engine/legacy/test_graph_integration.py +++ b/tests/python/pants_test/engine/legacy/test_graph_integration.py @@ -61,12 +61,12 @@ def _list_target_check_warnings_sources(self, target_name): _ERR_TARGETS = { 'testprojects/src/python/sources:some-missing-some-not': [ "globs('*.txt', '*.rs')", - "Snapshot(PathGlobs(include=({unicode_literal}\'testprojects/src/python/sources/*.txt\', {unicode_literal}\'testprojects/src/python/sources/*.rs\'), exclude=(), glob_match_error_behavior<=GlobMatchErrorBehavior>=GlobMatchErrorBehavior(failure_behavior=error), conjunction<=GlobExpansionConjunction>=GlobExpansionConjunction(conjunction=all_match)))".format(unicode_literal='u' if PY2 else ''), + "Snapshot(PathGlobs(include=({unicode_literal}\'testprojects/src/python/sources/*.txt\', {unicode_literal}\'testprojects/src/python/sources/*.rs\'), exclude=(), glob_match_error_behavior=GlobMatchErrorBehavior(failure_behavior=error), conjunction=GlobExpansionConjunction(conjunction=all_match)))".format(unicode_literal='u' if PY2 else ''), "Globs did not match. Excludes were: []. Unmatched globs were: [\"testprojects/src/python/sources/*.rs\"].", ], 'testprojects/src/python/sources:missing-sources': [ "*.scala", - "Snapshot(PathGlobs(include=({unicode_literal}\'testprojects/src/python/sources/*.scala\',), exclude=({unicode_literal}\'testprojects/src/python/sources/*Test.scala\', {unicode_literal}\'testprojects/src/python/sources/*Spec.scala\'), glob_match_error_behavior<=GlobMatchErrorBehavior>=GlobMatchErrorBehavior(failure_behavior=error), conjunction<=GlobExpansionConjunction>=GlobExpansionConjunction(conjunction=any_match)))".format(unicode_literal='u' if PY2 else ''), + "Snapshot(PathGlobs(include=({unicode_literal}\'testprojects/src/python/sources/*.scala\',), exclude=({unicode_literal}\'testprojects/src/python/sources/*Test.scala\', {unicode_literal}\'testprojects/src/python/sources/*Spec.scala\'), glob_match_error_behavior=GlobMatchErrorBehavior(failure_behavior=error), conjunction=GlobExpansionConjunction(conjunction=any_match)))".format(unicode_literal='u' if PY2 else ''), "Globs did not match. Excludes were: [\"testprojects/src/python/sources/*Test.scala\", \"testprojects/src/python/sources/*Spec.scala\"]. Unmatched globs were: [\"testprojects/src/python/sources/*.scala\"].", ], 'testprojects/src/java/org/pantsbuild/testproject/bundle:missing-bundle-fileset': [ @@ -75,7 +75,7 @@ def _list_target_check_warnings_sources(self, target_name): "Globs('*.aaaa')", "ZGlobs('**/*.abab')", "['file1.aaaa', 'file2.aaaa']", - "Snapshot(PathGlobs(include=({unicode_literal}\'testprojects/src/java/org/pantsbuild/testproject/bundle/*.aaaa\',), exclude=(), glob_match_error_behavior<=GlobMatchErrorBehavior>=GlobMatchErrorBehavior(failure_behavior=error), conjunction<=GlobExpansionConjunction>=GlobExpansionConjunction(conjunction=all_match)))".format(unicode_literal='u' if PY2 else ''), + "Snapshot(PathGlobs(include=({unicode_literal}\'testprojects/src/java/org/pantsbuild/testproject/bundle/*.aaaa\',), exclude=(), glob_match_error_behavior=GlobMatchErrorBehavior(failure_behavior=error), conjunction=GlobExpansionConjunction(conjunction=all_match)))".format(unicode_literal='u' if PY2 else ''), "Globs did not match. Excludes were: []. Unmatched globs were: [\"testprojects/src/java/org/pantsbuild/testproject/bundle/*.aaaa\"].", ] } @@ -138,6 +138,14 @@ def test_existing_bundles(self): self.assert_success(pants_run) self.assertNotIn("WARN]", pants_run.stderr_data) + def test_existing_directory_with_no_build_files_fails(self): + options = [ + '--pants-ignore=+["/build-support/bin/native/src"]', + ] + pants_run = self.run_pants(options + ['list', 'build-support/bin::']) + self.assert_failure(pants_run) + self.assertIn("does not match any targets.", pants_run.stderr_data) + def test_error_message(self): for k in self._ERR_TARGETS: self._list_target_check_error(k) diff --git a/tests/python/pants_test/engine/test_build_files.py b/tests/python/pants_test/engine/test_build_files.py index 1c5c94850f8..625d3c7ab57 100644 --- a/tests/python/pants_test/engine/test_build_files.py +++ b/tests/python/pants_test/engine/test_build_files.py @@ -8,14 +8,13 @@ import re import unittest -from pants.base.project_tree import Dir, File +from pants.base.project_tree import Dir from pants.base.specs import SiblingAddresses, SingleAddress, Specs from pants.build_graph.address import Address from pants.engine.addressable import addressable, addressable_dict from pants.engine.build_files import (ResolvedTypeMismatchError, addresses_from_address_families, create_graph_rules, parse_address_family) -from pants.engine.fs import (Digest, FileContent, FilesContent, Path, PathGlobs, Snapshot, - create_fs_rules) +from pants.engine.fs import Digest, FileContent, FilesContent, PathGlobs, Snapshot, create_fs_rules from pants.engine.legacy.structs import TargetAdaptor from pants.engine.mapper import AddressFamily, AddressMapper, ResolveError from pants.engine.nodes import Return, Throw @@ -34,7 +33,7 @@ def test_empty(self): """Test that parsing an empty BUILD file results in an empty AddressFamily.""" address_mapper = AddressMapper(JsonParser(TestTable())) af = run_rule(parse_address_family, address_mapper, Dir('/dev/null'), { - (Snapshot, PathGlobs): lambda _: Snapshot(Digest('abc', 10), (File('/dev/null/BUILD'),)), + (Snapshot, PathGlobs): lambda _: Snapshot(Digest('abc', 10), ('/dev/null/BUILD',), ()), (FilesContent, Digest): lambda _: FilesContent([FileContent('/dev/null/BUILD', b'')]), }) self.assertEqual(len(af.objects_by_name), 0) @@ -46,9 +45,7 @@ def _address_mapper(self): return AddressMapper(JsonParser(TestTable())) def _snapshot(self): - return Snapshot( - Digest('xx', 2), - (Path('root/BUILD', File('root/BUILD')),)) + return Snapshot(Digest('xx', 2), ('root/BUILD',), ()) def _resolve_build_file_addresses(self, specs, address_family, snapshot, address_mapper): return run_rule(addresses_from_address_families, address_mapper, specs, { @@ -59,8 +56,7 @@ def _resolve_build_file_addresses(self, specs, address_family, snapshot, address def test_duplicated(self): """Test that matching the same Spec twice succeeds.""" address = SingleAddress('a', 'a') - snapshot = Snapshot(Digest('xx', 2), - (Path('a/BUILD', File('a/BUILD')),)) + snapshot = Snapshot(Digest('xx', 2), ('a/BUILD',), ()) address_family = AddressFamily('a', {'a': ('a/BUILD', 'this is an object!')}) specs = Specs([address, address]) diff --git a/tests/python/pants_test/engine/test_engine.py b/tests/python/pants_test/engine/test_engine.py index 1944c2e3260..9ad331b0aa9 100644 --- a/tests/python/pants_test/engine/test_engine.py +++ b/tests/python/pants_test/engine/test_engine.py @@ -123,8 +123,8 @@ def test_include_trace_error_raises_error_with_trace(self): self.assert_equal_with_printing(dedent(''' 1 Exception encountered: - Computing Select(, =A) - Computing Task(nested_raise, , =A, true) + Computing Select(, Exactly(A)) + Computing Task(nested_raise, , Exactly(A), true) Throw(An exception for B) Traceback (most recent call last): File LOCATION-INFO, in call @@ -175,8 +175,8 @@ def a_from_c_and_d(c, d): self.assert_equal_with_printing(dedent(''' 1 Exception encountered: - Computing Select(, =A) - Computing Task(a_from_c_and_d, , =A, true) + Computing Select(, Exactly(A)) + Computing Task(a_from_c_and_d, , Exactly(A), true) Computing Task(d_from_b_nested_raise, , =D, true) Throw(An exception for B) Traceback (most recent call last): @@ -189,8 +189,8 @@ def a_from_c_and_d(c, d): Exception: An exception for B - Computing Select(, =A) - Computing Task(a_from_c_and_d, , =A, true) + Computing Select(, Exactly(A)) + Computing Task(a_from_c_and_d, , Exactly(A), true) Computing Task(c_from_b_nested_raise, , =C, true) Throw(An exception for B) Traceback (most recent call last): diff --git a/tests/python/pants_test/engine/test_fs.py b/tests/python/pants_test/engine/test_fs.py index 4f6c8cfaa8d..ecfdf29afc6 100644 --- a/tests/python/pants_test/engine/test_fs.py +++ b/tests/python/pants_test/engine/test_fs.py @@ -61,7 +61,7 @@ def assert_walk_snapshot(self, field, filespecs_or_globs, paths, ignore_patterns if prepare: prepare(project_tree) result = self.execute(scheduler, Snapshot, self.specs(filespecs_or_globs))[0] - self.assertEqual(sorted([p.path for p in getattr(result, field)]), sorted(paths)) + self.assertEqual(sorted(getattr(result, field)), sorted(paths)) def assert_content(self, filespecs_or_globs, expected_content): with self.mk_project_tree() as project_tree: @@ -76,7 +76,7 @@ def assert_digest(self, filespecs_or_globs, expected_files): scheduler = self.mk_scheduler(rules=create_fs_rules(), project_tree=project_tree) result = self.execute(scheduler, Snapshot, self.specs(filespecs_or_globs))[0] # Confirm all expected files were digested. - self.assertEqual(set(expected_files), {f.path for f in result.files}) + self.assertEqual(set(expected_files), set(result.files)) self.assertTrue(result.directory_digest.fingerprint is not None) def test_walk_literal(self): @@ -270,7 +270,7 @@ def test_snapshot_from_outside_buildroot_failure(self): self.assertIn("doesnotexist", str(cm.exception)) def assert_snapshot_equals(self, snapshot, files, directory_digest): - self.assertEqual([file.path for file in snapshot.files], files) + self.assertEqual(list(snapshot.files), files) self.assertEqual(snapshot.directory_digest, directory_digest) def test_merge_zero_directories(self): diff --git a/tests/python/pants_test/engine/test_isolated_process.py b/tests/python/pants_test/engine/test_isolated_process.py index 5a47b1412d0..3e62fda72f7 100644 --- a/tests/python/pants_test/engine/test_isolated_process.py +++ b/tests/python/pants_test/engine/test_isolated_process.py @@ -53,7 +53,7 @@ def bin_path(self): return self.binary_location.bin_path def argv_from_snapshot(self, snapshot): - cat_file_paths = [f.path for f in snapshot.files] + cat_file_paths = snapshot.files option_like_files = [p for p in cat_file_paths if p.startswith('-')] if option_like_files: @@ -138,9 +138,7 @@ def bin_path(self): return self.binary_location.bin_path def argv_from_source_snapshot(self, snapshot): - snapshot_file_paths = [f.path for f in snapshot.files] - - return (self.bin_path,) + tuple(snapshot_file_paths) + return (self.bin_path,) + snapshot.files class JavacCompileResult(datatype([ diff --git a/tests/python/pants_test/engine/test_mapper.py b/tests/python/pants_test/engine/test_mapper.py index 3c8ce754d36..716d0bcd5ec 100644 --- a/tests/python/pants_test/engine/test_mapper.py +++ b/tests/python/pants_test/engine/test_mapper.py @@ -17,12 +17,12 @@ from pants.engine.fs import create_fs_rules from pants.engine.mapper import (AddressFamily, AddressMap, AddressMapper, DifferingFamiliesError, DuplicateNameError, UnaddressableObjectError) +from pants.engine.objects import Collection from pants.engine.parser import SymbolTable from pants.engine.rules import rule from pants.engine.selectors import Get, Select from pants.engine.struct import Struct from pants.util.dirutil import safe_open -from pants.util.objects import Collection from pants_test.engine.examples.parsers import JsonParser from pants_test.engine.scheduler_test_base import SchedulerTestBase from pants_test.engine.util import Target, TargetTable diff --git a/tests/python/pants_test/engine/test_objects.py b/tests/python/pants_test/engine/test_objects.py new file mode 100644 index 00000000000..0a194edeab8 --- /dev/null +++ b/tests/python/pants_test/engine/test_objects.py @@ -0,0 +1,33 @@ +# coding=utf-8 +# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md). +# Licensed under the Apache License, Version 2.0 (see LICENSE). + +from __future__ import absolute_import, division, print_function, unicode_literals + +import re + +from future.utils import PY3, text_type + +from pants.engine.objects import Collection +from pants.util.objects import TypeCheckError +from pants_test.test_base import TestBase + + +class CollectionTest(TestBase): + def test_collection_iteration(self): + self.assertEqual([1, 2], [x for x in Collection.of(int)([1, 2])]) + + def test_element_typechecking(self): + IntColl = Collection.of(int) + with self.assertRaisesRegexp(TypeCheckError, re.escape("""\ +field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int)) matching iterable object [3, {u}'hello']: value {u}'hello' (with type '{string_type}') must satisfy this type constraint: Exactly(int).""" + .format(u='' if PY3 else 'u', + string_type='str' if PY3 else 'unicode'))): + IntColl([3, "hello"]) + + IntOrStringColl = Collection.of(int, text_type) + self.assertEqual([3, "hello"], [x for x in IntOrStringColl([3, "hello"])]) + with self.assertRaisesRegexp(TypeCheckError, re.escape("""\ +field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int or {string_type})) matching iterable object [()]: value () (with type 'tuple') must satisfy this type constraint: Exactly(int or {string_type}).""" + .format(string_type='str' if PY3 else 'unicode'))): + IntOrStringColl([()]) diff --git a/tests/python/pants_test/engine/test_scheduler.py b/tests/python/pants_test/engine/test_scheduler.py index 108d2bda5cc..586fbfb6e00 100644 --- a/tests/python/pants_test/engine/test_scheduler.py +++ b/tests/python/pants_test/engine/test_scheduler.py @@ -122,8 +122,8 @@ def test_trace_includes_rule_exception_traceback(self): trace = remove_locations_from_traceback(trace) assert_equal_with_printing(self, dedent(''' - Computing Select(, =A) - Computing Task(nested_raise, , =A, true) + Computing Select(, Exactly(A)) + Computing Task(nested_raise, , Exactly(A), true) Throw(An exception for B) Traceback (most recent call last): File LOCATION-INFO, in call diff --git a/tests/python/pants_test/jvm/jvm_task_test_base.py b/tests/python/pants_test/jvm/jvm_task_test_base.py index 5481ecc31ad..e48a0a2689a 100644 --- a/tests/python/pants_test/jvm/jvm_task_test_base.py +++ b/tests/python/pants_test/jvm/jvm_task_test_base.py @@ -53,7 +53,7 @@ def add_to_runtime_classpath(self, context, tgt, files_dict): safe_mkdir(target_dir) classpath_dir = safe_mkdtemp(dir=target_dir) for rel_path, content in files_dict.items(): - safe_file_dump(os.path.join(classpath_dir, rel_path), content, binary_mode=False) + safe_file_dump(os.path.join(classpath_dir, rel_path), content, mode='w') # Add to the classpath. runtime_classpath.add_for_target(tgt, [('default', classpath_dir)]) diff --git a/tests/python/pants_test/pantsd/test_pantsd_integration.py b/tests/python/pants_test/pantsd/test_pantsd_integration.py index bc1e0480c48..ffa1e3005b5 100644 --- a/tests/python/pants_test/pantsd/test_pantsd_integration.py +++ b/tests/python/pants_test/pantsd/test_pantsd_integration.py @@ -358,17 +358,17 @@ def test_pantsd_invalidation_stale_sources(self): pantsd_run(['help']) checker.assert_started() - safe_file_dump(test_build_file, "python_library(sources=globs('some_non_existent_file.py'))", binary_mode=False) + safe_file_dump(test_build_file, "python_library(sources=globs('some_non_existent_file.py'))", mode='w') result = pantsd_run(export_cmd) checker.assert_running() assertNotRegex(self, result.stdout_data, has_source_root_regex) - safe_file_dump(test_build_file, "python_library(sources=globs('*.py'))", binary_mode=False) + safe_file_dump(test_build_file, "python_library(sources=globs('*.py'))", mode='w') result = pantsd_run(export_cmd) checker.assert_running() assertNotRegex(self, result.stdout_data, has_source_root_regex) - safe_file_dump(test_src_file, 'import this\n', binary_mode=False) + safe_file_dump(test_src_file, 'import this\n', mode='w') result = pantsd_run(export_cmd) checker.assert_running() assertRegex(self, result.stdout_data, has_source_root_regex) @@ -385,7 +385,7 @@ def test_pantsd_parse_exception_success(self): try: safe_mkdir(test_path, clean=True) - safe_file_dump(test_build_file, "{}()".format(invalid_symbol), binary_mode=False) + safe_file_dump(test_build_file, "{}()".format(invalid_symbol), mode='w') for _ in range(3): with self.pantsd_run_context(success=False) as (pantsd_run, checker, _, _): result = pantsd_run(['list', 'testprojects::']) diff --git a/tests/python/pants_test/pantsd/test_process_manager.py b/tests/python/pants_test/pantsd/test_process_manager.py index 4886dc079ab..573546ec926 100644 --- a/tests/python/pants_test/pantsd/test_process_manager.py +++ b/tests/python/pants_test/pantsd/test_process_manager.py @@ -149,7 +149,7 @@ def test_deadline_until(self): def test_wait_for_file(self): with temporary_dir() as td: test_filename = os.path.join(td, 'test.out') - safe_file_dump(test_filename, 'test', binary_mode=False) + safe_file_dump(test_filename, 'test', mode='w') self.pmm._wait_for_file(test_filename, timeout=.1) def test_wait_for_file_timeout(self): diff --git a/tests/python/pants_test/pantsd/test_watchman.py b/tests/python/pants_test/pantsd/test_watchman.py index 0cab6a9653b..2a7a5294f44 100644 --- a/tests/python/pants_test/pantsd/test_watchman.py +++ b/tests/python/pants_test/pantsd/test_watchman.py @@ -57,12 +57,13 @@ def test_resolve_watchman_path_provided_exception(self): metadata_base_dir=self.subprocess_dir) def test_maybe_init_metadata(self): + # TODO(#7106): is this the right path to patch? with mock.patch('pants.pantsd.watchman.safe_mkdir', **self.PATCH_OPTS) as mock_mkdir, \ mock.patch('pants.pantsd.watchman.safe_file_dump', **self.PATCH_OPTS) as mock_file_dump: self.watchman._maybe_init_metadata() mock_mkdir.assert_called_once_with(self._watchman_dir) - mock_file_dump.assert_called_once_with(self._state_file, b'{}') + mock_file_dump.assert_called_once_with(self._state_file, b'{}', mode='wb') def test_construct_cmd(self): output = self.watchman._construct_cmd(['cmd', 'parts', 'etc'], diff --git a/tests/python/pants_test/reporting/BUILD b/tests/python/pants_test/reporting/BUILD index e5614eb33a5..eaaf806cc1b 100644 --- a/tests/python/pants_test/reporting/BUILD +++ b/tests/python/pants_test/reporting/BUILD @@ -22,7 +22,7 @@ python_tests( 'tests/python/pants_test:int-test', ], tags = {'integration'}, - timeout = 240, + timeout = 600, ) python_tests( diff --git a/tests/python/pants_test/reporting/test_reporting.py b/tests/python/pants_test/reporting/test_reporting.py index 3df1144481c..b221ed0a97a 100644 --- a/tests/python/pants_test/reporting/test_reporting.py +++ b/tests/python/pants_test/reporting/test_reporting.py @@ -94,3 +94,87 @@ def test_raise_if_no_parent_id_and_zipkin_endpoint_set(self): "Flags zipkin-trace-id and zipkin-parent-id must both either be set or not set." in str(result.exception) ) + + def test_raise_if_parent_id_is_of_wrong_len_format(self): + parent_id = 'ff' + options = {'reporting': { + 'zipkin_trace_id': self.trace_id, + 'zipkin_parent_id': parent_id, + 'zipkin_endpoint': self.zipkin_endpoint + }} + context = self.context(for_subsystems=[RunTracker, Reporting], options=options) + + run_tracker = RunTracker.global_instance() + reporting = Reporting.global_instance() + + with self.assertRaises(ValueError) as result: + reporting.initialize(run_tracker, context.options) + + self.assertTrue( + "Value of the flag zipkin-parent-id must be a 16-character hex string. " + + "Got {}.".format(parent_id) + in str(result.exception) + ) + + def test_raise_if_trace_id_is_of_wrong_len_format(self): + trace_id = 'aa' + options = {'reporting': { + 'zipkin_trace_id': trace_id, + 'zipkin_parent_id': self.parent_id, + 'zipkin_endpoint': self.zipkin_endpoint + }} + context = self.context(for_subsystems=[RunTracker, Reporting], options=options) + + run_tracker = RunTracker.global_instance() + reporting = Reporting.global_instance() + + with self.assertRaises(ValueError) as result: + reporting.initialize(run_tracker, context.options) + + self.assertTrue( + "Value of the flag zipkin-trace-id must be a 16-character or 32-character hex string. " + + "Got {}.".format(trace_id) + in str(result.exception) + ) + + def test_raise_if_parent_id_is_of_wrong_ch_format(self): + parent_id = 'gggggggggggggggg' + options = {'reporting': { + 'zipkin_trace_id': self.trace_id, + 'zipkin_parent_id': parent_id, + 'zipkin_endpoint': self.zipkin_endpoint + }} + context = self.context(for_subsystems=[RunTracker, Reporting], options=options) + + run_tracker = RunTracker.global_instance() + reporting = Reporting.global_instance() + + with self.assertRaises(ValueError) as result: + reporting.initialize(run_tracker, context.options) + + self.assertTrue( + "Value of the flag zipkin-parent-id must be a 16-character hex string. " + + "Got {}.".format(parent_id) + in str(result.exception) + ) + + def test_raise_if_trace_id_is_of_wrong_ch_format(self): + trace_id = 'gggggggggggggggg' + options = {'reporting': { + 'zipkin_trace_id': trace_id, + 'zipkin_parent_id': self.parent_id, + 'zipkin_endpoint': self.zipkin_endpoint + }} + context = self.context(for_subsystems=[RunTracker, Reporting], options=options) + + run_tracker = RunTracker.global_instance() + reporting = Reporting.global_instance() + + with self.assertRaises(ValueError) as result: + reporting.initialize(run_tracker, context.options) + + self.assertTrue( + "Value of the flag zipkin-trace-id must be a 16-character or 32-character hex string. " + + "Got {}.".format(trace_id) + in str(result.exception) + ) diff --git a/tests/python/pants_test/reporting/test_reporting_integration.py b/tests/python/pants_test/reporting/test_reporting_integration.py index 9f986f7924a..c09303aedf5 100644 --- a/tests/python/pants_test/reporting/test_reporting_integration.py +++ b/tests/python/pants_test/reporting/test_reporting_integration.py @@ -220,6 +220,23 @@ def test_zipkin_reporter_with_given_trace_id_parent_id(self): self.assertTrue(main_children) self.assertTrue(any(span['name'] == 'cloc' for span in main_children)) + def test_zipkin_reporter_with_zero_sample_rate(self): + ZipkinHandler = zipkin_handler() + with http_server(ZipkinHandler) as port: + endpoint = "http://localhost:{}".format(port) + command = [ + '--reporting-zipkin-endpoint={}'.format(endpoint), + '--reporting-zipkin-sample-rate=0.0', + 'cloc', + 'src/python/pants:version' + ] + + pants_run = self.run_pants(command) + self.assert_success(pants_run) + + num_of_traces = len(ZipkinHandler.traces) + self.assertEqual(num_of_traces, 0) + @staticmethod def find_spans_by_name(trace, name): return [span for span in trace if span['name'] == name] diff --git a/tests/python/pants_test/rules/test_test_integration.py b/tests/python/pants_test/rules/test_test_integration.py index 6f28e5bf31c..60127dadf22 100644 --- a/tests/python/pants_test/rules/test_test_integration.py +++ b/tests/python/pants_test/rules/test_test_integration.py @@ -72,9 +72,9 @@ def test_passing_python_test(self): platform SOME_TEXT rootdir: SOME_TEXT plugins: SOME_TEXT -collected 1 item +collected 1 items -testprojects/tests/python/pants/dummies/test_pass.py . [100%] +testprojects/tests/python/pants/dummies/test_pass.py . =========================== 1 passed in SOME_TEXT =========================== @@ -92,9 +92,9 @@ def test_failing_python_test(self): platform SOME_TEXT rootdir: SOME_TEXT plugins: SOME_TEXT -collected 1 item +collected 1 items -testprojects/tests/python/pants/dummies/test_fail.py F [100%] +testprojects/tests/python/pants/dummies/test_fail.py F =================================== FAILURES =================================== __________________________________ test_fail ___________________________________ @@ -120,9 +120,9 @@ def test_source_dep(self): platform SOME_TEXT rootdir: SOME_TEXT plugins: SOME_TEXT -collected 1 item +collected 1 items -testprojects/tests/python/pants/dummies/test_with_source_dep.py . [100%] +testprojects/tests/python/pants/dummies/test_with_source_dep.py . =========================== 1 passed in SOME_TEXT =========================== @@ -139,9 +139,9 @@ def test_thirdparty_dep(self): platform SOME_TEXT rootdir: SOME_TEXT plugins: SOME_TEXT -collected 1 item +collected 1 items -testprojects/tests/python/pants/dummies/test_with_thirdparty_dep.py . [100%] +testprojects/tests/python/pants/dummies/test_with_thirdparty_dep.py . =========================== 1 passed in SOME_TEXT =========================== @@ -160,9 +160,9 @@ def test_mixed_python_tests(self): platform SOME_TEXT rootdir: SOME_TEXT plugins: SOME_TEXT -collected 1 item +collected 1 items -testprojects/tests/python/pants/dummies/test_fail.py F [100%] +testprojects/tests/python/pants/dummies/test_fail.py F =================================== FAILURES =================================== __________________________________ test_fail ___________________________________ @@ -177,9 +177,9 @@ def test_fail(): platform SOME_TEXT rootdir: SOME_TEXT plugins: SOME_TEXT -collected 1 item +collected 1 items -testprojects/tests/python/pants/dummies/test_pass.py . [100%] +testprojects/tests/python/pants/dummies/test_pass.py . =========================== 1 passed in SOME_TEXT =========================== diff --git a/tests/python/pants_test/source/test_payload_fields.py b/tests/python/pants_test/source/test_payload_fields.py index 7bb5ff42694..189bf1f9ff2 100644 --- a/tests/python/pants_test/source/test_payload_fields.py +++ b/tests/python/pants_test/source/test_payload_fields.py @@ -6,8 +6,7 @@ from future.utils import text_type -from pants.base.project_tree import File -from pants.engine.fs import Digest, Path, Snapshot +from pants.engine.fs import Digest, Snapshot from pants.source.payload_fields import SourcesField from pants.source.wrapped_globs import Globs, LazyFilesetWithSpec from pants_test.test_base import TestBase @@ -83,10 +82,7 @@ def test_passes_eager_fileset_with_spec_through(self): self.assertEqual(['foo/foo/a.txt'], list(sf.relative_to_buildroot())) digest = '56001a7e48555f156420099a99da60a7a83acc90853046709341bf9f00a6f944' - want_snapshot = Snapshot( - Digest(text_type(digest), 77), - (Path('foo/foo/a.txt', stat=File('foo/foo/a.txt')),) - ) + want_snapshot = Snapshot(Digest(text_type(digest), 77), ('foo/foo/a.txt',), ()) # We explicitly pass a None scheduler because we expect no scheduler lookups to be required # in order to get a Snapshot. diff --git a/tests/python/pants_test/source/test_wrapped_globs.py b/tests/python/pants_test/source/test_wrapped_globs.py index c4a0a01500e..331f72af843 100644 --- a/tests/python/pants_test/source/test_wrapped_globs.py +++ b/tests/python/pants_test/source/test_wrapped_globs.py @@ -266,10 +266,6 @@ def test_source_snapshot(self): self.add_to_build_file('package/dir', 'files(name = "target", sources = ["foo"])') target = self.target('package/dir:target') snapshot = target.sources_snapshot(scheduler=self.scheduler) - snapshot_paths = tuple(file.path for file in snapshot.path_stats) - self.assertEqual( - ('package/dir/foo',), - snapshot_paths - ) + self.assertEqual(('package/dir/foo',), snapshot.files) self.assertEqual(target.sources_relative_to_target_base().files, ('foo',)) self.assertEqual(target.sources_relative_to_buildroot(), ['package/dir/foo']) diff --git a/tests/python/pants_test/task/test_simple_codegen_task.py b/tests/python/pants_test/task/test_simple_codegen_task.py index 01d8143bd5a..3aecf4fd82f 100644 --- a/tests/python/pants_test/task/test_simple_codegen_task.py +++ b/tests/python/pants_test/task/test_simple_codegen_task.py @@ -14,6 +14,7 @@ from pants.build_graph.target import Target from pants.task.simple_codegen_task import SimpleCodegenTask from pants.util.dirutil import safe_mkdtemp +from pants.util.objects import datatype from pants_test.task_test_base import TaskTestBase, ensure_cached @@ -113,6 +114,12 @@ def _copy_target_attributes(self): return ['copied'] +class DummyVersionedTarget(datatype(['target', 'results_dir'])): + @property + def current_results_dir(self): + return self.results_dir + + class SimpleCodegenTaskTest(TaskTestBase): @classmethod @@ -212,12 +219,13 @@ def _do_test_duplication(self, targets, allow_dups, should_fail): def execute(): for target in targets: target_workdir = target_workdirs[target] + vt = DummyVersionedTarget(target, target_workdir) task.execute_codegen(target, target_workdir) - sources = task._capture_sources(((target, target_workdir),))[0] - task._handle_duplicate_sources(target, target_workdir, sources) + sources = task._capture_sources((vt,))[0] + task._handle_duplicate_sources(vt, sources) # _handle_duplicate_sources may delete files from the filesystem, so we need to re-capture. - sources = task._capture_sources(((target, target_workdir),))[0] - syn_targets.append(task._inject_synthetic_target(target, target_workdir, sources)) + sources = task._capture_sources((vt,))[0] + syn_targets.append(task._inject_synthetic_target(vt, sources)) if should_fail: # If we're expected to fail, validate the resulting message. diff --git a/tests/python/pants_test/task/test_task.py b/tests/python/pants_test/task/test_task.py index d86ed5dc155..d80bf34141f 100644 --- a/tests/python/pants_test/task/test_task.py +++ b/tests/python/pants_test/task/test_task.py @@ -13,6 +13,7 @@ from pants.base.exceptions import TaskError from pants.build_graph.build_file_aliases import BuildFileAliases from pants.build_graph.files import Files +from pants.build_graph.target_filter_subsystem import TargetFilter from pants.cache.cache_setup import CacheSetup from pants.option.arg_splitter import GLOBAL_SCOPE from pants.subsystem.subsystem import Subsystem @@ -149,6 +150,11 @@ def execute(self): pass +class TaskWithTargetFiltering(DummyTask): + options_scope = 'task-with-target-filtering' + target_filtering_enabled = True + + class TaskTest(TaskTestBase): _filename = 'f' @@ -650,3 +656,9 @@ def test_fingerprint_transitive(self): fp3 = self._synth_fp(cls=TaskWithTransitiveSubsystemDependencies, options_fingerprintable=option_spec) self.assertNotEqual(fp1, fp3) + + def test_target_filtering_enabled(self): + self.assertNotIn(TargetFilter.scoped(DummyTask), + DummyTask.subsystem_dependencies()) + self.assertIn(TargetFilter.scoped(TaskWithTargetFiltering), + TaskWithTargetFiltering.subsystem_dependencies()) diff --git a/tests/python/pants_test/tasks/BUILD b/tests/python/pants_test/tasks/BUILD index 44cf4c6b7e6..52705df8233 100644 --- a/tests/python/pants_test/tasks/BUILD +++ b/tests/python/pants_test/tasks/BUILD @@ -1,20 +1,6 @@ # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). -python_library( - name = 'task_test_base', - sources = ['task_test_base.py'], - dependencies = [ - 'src/python/pants/base:deprecated', - 'src/python/pants/goal:context', - 'src/python/pants/ivy', - 'src/python/pants/task', - 'src/python/pants/util:contextutil', - 'src/python/pants/util:process_handler', - 'tests/python/pants_test:base_test', - ] -) - python_tests( name = 'scalastyle_integration', sources = ['test_scalastyle_integration.py'], diff --git a/tests/python/pants_test/tasks/task_test_base.py b/tests/python/pants_test/tasks/task_test_base.py deleted file mode 100644 index 0a634187aa7..00000000000 --- a/tests/python/pants_test/tasks/task_test_base.py +++ /dev/null @@ -1,305 +0,0 @@ -# coding=utf-8 -# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). -# Licensed under the Apache License, Version 2.0 (see LICENSE). - -from __future__ import absolute_import, division, print_function, unicode_literals - -import glob -import os -from contextlib import closing, contextmanager -from io import BytesIO - -from future.utils import PY2 - -from pants.base.deprecated import deprecated_module -from pants.goal.goal import Goal -from pants.ivy.bootstrapper import Bootstrapper -from pants.task.console_task import ConsoleTask -from pants.util.contextutil import temporary_dir -from pants.util.process_handler import subprocess -from pants_test.base_test import BaseTest - - -deprecated_module('1.12.0.dev2', 'Use pants_test.TaskTestBase instead') - - -# TODO: Find a better home for this? -def is_exe(name): - result = subprocess.call(['which', name], stdout=open(os.devnull, 'w'), stderr=subprocess.STDOUT) - return result == 0 - - -def ensure_cached(task_cls, expected_num_artifacts=None): - """Decorator for a task-executing unit test. Asserts that after running the - decorated test function, the cache for task_cls contains - expected_num_artifacts. - - Uses a new temp dir for the artifact cache, and uses a glob based on the - task's synthesized subtype to find the cache directories within the new temp - dir which were generated by the actions performed within the test method. - - :API: public - - :param task_cls: Class of the task to check the artifact cache - for (e.g. JarCreate). - :param expected_num_artifacts: Expected number of artifacts to be in the - task's cache after running the test. If - unspecified, will assert that the number of - artifacts in the cache is non-zero. - """ - def decorator(test_fn): - def wrapper(self, *args, **kwargs): - with self.cache_check(expected_num_artifacts=expected_num_artifacts): - test_fn(self, *args, **kwargs) - return wrapper - return decorator - - -class TaskTestBase(BaseTest): - """A baseclass useful for testing a single Task type. - - :API: public - """ - - options_scope = 'test_scope' - - @classmethod - def task_type(cls): - """Subclasses must return the type of the Task subclass under test. - - :API: public - """ - raise NotImplementedError() - - def setUp(self): - """ - :API: public - """ - super(TaskTestBase, self).setUp() - self._testing_task_type = self.synthesize_task_subtype(self.task_type(), self.options_scope) - # We locate the workdir below the pants_workdir, which BaseTest locates within the BuildRoot. - # BaseTest cleans this up, so we don't need to. We give it a stable name, so that we can - # use artifact caching to speed up tests. - self._test_workdir = os.path.join(self.pants_workdir, self.task_type().stable_name()) - os.mkdir(self._test_workdir) - # TODO: Push this down to JVM-related tests only? Seems wrong to have an ivy-specific - # action in this non-JVM-specific, high-level base class. - Bootstrapper.reset_instance() - - @property - def test_workdir(self): - """ - :API: public - """ - return self._test_workdir - - def synthesize_task_subtype(self, task_type, options_scope): - """Creates a synthetic subclass of the task type. - - Note that passing in a stable options scope will speed up some tests, as the scope may appear - in the paths of tools used by the task, and if these are stable, tests can get artifact - cache hits when bootstrapping these tools. This doesn't hurt test isolation, as we reset - class-level state between each test. - - # TODO: Use the task type directly once we re-do the Task lifecycle. - - :API: public - - :param task_type: The task type to subtype. - :param options_scope: The scope to give options on the generated task type. - :return: A pair (type, options_scope) - """ - subclass_name = 'test_{0}_{1}'.format(task_type.__name__, options_scope) - if PY2: - subclass_name = subclass_name.encode('utf-8') - return type(subclass_name, (task_type,), {'_stable_name': task_type._compute_stable_name(), - 'options_scope': options_scope}) - - def set_options(self, **kwargs): - """ - :API: public - """ - self.set_options_for_scope(self.options_scope, **kwargs) - - def context(self, for_task_types=None, **kwargs): - """ - :API: public - """ - # Add in our task type. - for_task_types = [self._testing_task_type] + (for_task_types or []) - return super(TaskTestBase, self).context(for_task_types=for_task_types, **kwargs) - - def create_task(self, context, workdir=None): - """ - :API: public - """ - if workdir is None: - workdir = self.test_workdir - return self._testing_task_type(context, workdir) - - @contextmanager - def cache_check(self, expected_num_artifacts=None): - """Sets up a temporary artifact cache and checks that the yielded-to code populates it. - - :param expected_num_artifacts: Expected number of artifacts to be in the cache after yielding. - If unspecified, will assert that the number of artifacts in the - cache is non-zero. - """ - with temporary_dir() as artifact_cache: - self.set_options_for_scope('cache.{}'.format(self.options_scope), - write_to=[artifact_cache]) - - yield - - cache_subdir_glob_str = os.path.join(artifact_cache, '*/') - cache_subdirs = glob.glob(cache_subdir_glob_str) - - if expected_num_artifacts == 0: - self.assertEqual(len(cache_subdirs), 0) - return - - self.assertEqual(len(cache_subdirs), 1) - task_cache = cache_subdirs[0] - - num_artifacts = 0 - for (_, _, files) in os.walk(task_cache): - num_artifacts += len(files) - - if expected_num_artifacts is None: - self.assertNotEqual(num_artifacts, 0) - else: - self.assertEqual(num_artifacts, expected_num_artifacts) - - -class ConsoleTaskTestBase(TaskTestBase): - """A base class useful for testing ConsoleTasks. - - :API: public - """ - - def setUp(self): - """ - :API: public - """ - Goal.clear() - super(ConsoleTaskTestBase, self).setUp() - - task_type = self.task_type() - assert issubclass(task_type, ConsoleTask), \ - 'task_type() must return a ConsoleTask subclass, got %s' % task_type - - def execute_task(self, targets=None, options=None): - """Creates a new task and executes it with the given config, command line args and targets. - - :API: public - - :param targets: Optional list of Target objects passed on the command line. - Returns the text output of the task. - """ - options = options or {} - with closing(BytesIO()) as output: - self.set_options(**options) - context = self.context(target_roots=targets, console_outstream=output) - task = self.create_task(context) - task.execute() - return output.getvalue() - - def execute_console_task(self, targets=None, extra_targets=None, options=None, - passthru_args=None, workspace=None, scheduler=None): - """Creates a new task and executes it with the given config, command line args and targets. - - :API: public - - :param options: option values. - :param targets: optional list of Target objects passed on the command line. - :param extra_targets: optional list of extra targets in the context in addition to those - passed on the command line. - :param passthru_args: optional list of passthru_args - :param workspace: optional Workspace to pass into the context. - - Returns the list of items returned from invoking the console task's console_output method. - """ - options = options or {} - self.set_options(**options) - context = self.context( - target_roots=targets, - passthru_args=passthru_args, - workspace=workspace, - scheduler=scheduler - ) - return self.execute_console_task_given_context(context, extra_targets=extra_targets) - - def execute_console_task_given_context(self, context, extra_targets=None): - """Creates a new task and executes it with the context and extra targets. - - :API: public - - :param context: The pants run context to use. - :param extra_targets: An optional list of extra targets in the context in addition to those - passed on the command line. - :returns: The list of items returned from invoking the console task's console_output method. - :rtype: list of strings - """ - task = self.create_task(context) - return list(task.console_output(list(task.context.targets()) + list(extra_targets or ()))) - - def assert_entries(self, sep, *output, **kwargs): - """Verifies the expected output text is flushed by the console task under test. - - NB: order of entries is not tested, just presence. - - :API: public - - sep: the expected output separator. - *output: the output entries expected between the separators - **options: additional options passed to execute_task. - """ - # We expect each output line to be suffixed with the separator, so for , and [1,2,3] we expect: - # '1,2,3,' - splitting this by the separator we should get ['1', '2', '3', ''] - always an extra - # empty string if the separator is properly always a suffix and not applied just between - # entries. - self.assertEqual(sorted(list(output) + ['']), sorted((self.execute_task(**kwargs)).split(sep))) - - def assert_console_output(self, *output, **kwargs): - """Verifies the expected output entries are emitted by the console task under test. - - NB: order of entries is not tested, just presence. - - :API: public - - *output: the expected output entries - **kwargs: additional kwargs passed to execute_console_task. - """ - self.assertEqual(sorted(output), sorted(self.execute_console_task(**kwargs))) - - def assert_console_output_contains(self, output, **kwargs): - """Verifies the expected output string is emitted by the console task under test. - - :API: public - - output: the expected output entry(ies) - **kwargs: additional kwargs passed to execute_console_task. - """ - self.assertIn(output, self.execute_console_task(**kwargs)) - - def assert_console_output_ordered(self, *output, **kwargs): - """Verifies the expected output entries are emitted by the console task under test. - - NB: order of entries is tested. - - :API: public - - *output: the expected output entries in expected order - **kwargs: additional kwargs passed to execute_console_task. - """ - self.assertEqual(list(output), self.execute_console_task(**kwargs)) - - def assert_console_raises(self, exception, **kwargs): - """Verifies the expected exception is raised by the console task under test. - - :API: public - - **kwargs: additional kwargs are passed to execute_console_task. - """ - with self.assertRaises(exception): - self.execute_console_task(**kwargs) diff --git a/tests/python/pants_test/test_base.py b/tests/python/pants_test/test_base.py index a3b3eb7c01c..abe240977a2 100644 --- a/tests/python/pants_test/test_base.py +++ b/tests/python/pants_test/test_base.py @@ -107,7 +107,7 @@ def create_dir(self, relpath): """ path = os.path.join(self.build_root, relpath) safe_mkdir(path) - self._invalidate_for(relpath) + self.invalidate_for(relpath) return path def create_workdir_dir(self, relpath): @@ -119,10 +119,10 @@ def create_workdir_dir(self, relpath): """ path = os.path.join(self.pants_workdir, relpath) safe_mkdir(path) - self._invalidate_for(relpath) + self.invalidate_for(relpath) return path - def _invalidate_for(self, *relpaths): + def invalidate_for(self, *relpaths): """Invalidates all files from the relpath, recursively up to the root. Many python operations implicitly create parent directories, so we assume that touching a @@ -131,7 +131,7 @@ def _invalidate_for(self, *relpaths): if self._scheduler is None: return files = {f for relpath in relpaths for f in recursive_dirname(relpath)} - self._scheduler.invalidate_files(files) + return self._scheduler.invalidate_files(files) def create_link(self, relsrc, reldst): """Creates a symlink within the buildroot. @@ -144,7 +144,7 @@ def create_link(self, relsrc, reldst): src = os.path.join(self.build_root, relsrc) dst = os.path.join(self.build_root, reldst) relative_symlink(src, dst) - self._invalidate_for(reldst) + self.invalidate_for(reldst) def create_file(self, relpath, contents='', mode='w'): """Writes to a file under the buildroot. @@ -158,7 +158,7 @@ def create_file(self, relpath, contents='', mode='w'): path = os.path.join(self.build_root, relpath) with safe_open(path, mode=mode) as fp: fp.write(contents) - self._invalidate_for(relpath) + self.invalidate_for(relpath) return path def create_files(self, path, files): @@ -432,7 +432,7 @@ def reset_build_graph(self, reset_build_files=False, delete_build_files=False): if delete_build_files: for f in files: os.remove(os.path.join(self.build_root, f)) - self._invalidate_for(*files) + self.invalidate_for(*files) if self._build_graph is not None: self._build_graph.reset() @@ -632,7 +632,7 @@ def make_snapshot(self, files): """ with temporary_dir() as temp_dir: for file_name, content in files.items(): - safe_file_dump(os.path.join(temp_dir, file_name), content) + safe_file_dump(os.path.join(temp_dir, file_name), content, mode='w') return self.scheduler.capture_snapshots(( PathGlobsAndRoot(PathGlobs(('**',)), text_type(temp_dir)), ))[0] diff --git a/tests/python/pants_test/util/test_dirutil.py b/tests/python/pants_test/util/test_dirutil.py index 1248f8ce985..3481492de73 100644 --- a/tests/python/pants_test/util/test_dirutil.py +++ b/tests/python/pants_test/util/test_dirutil.py @@ -396,6 +396,8 @@ def test_rm_rf_no_such_file_not_an_error(self, file_name='./vanishing_file'): def assert_dump_and_read(self, test_content, dump_kwargs, read_kwargs): with temporary_dir() as td: test_filename = os.path.join(td, 'test.out') + # TODO(#7121): remove all deprecated usages of `binary_mode` and `mode` arguments to + # safe_file_dump() in this file when the deprecation period is over! safe_file_dump(test_filename, test_content, **dump_kwargs) self.assertEqual(read_file(test_filename, **read_kwargs), test_content) @@ -407,7 +409,7 @@ def test_readwrite_file_binary(self): self.assert_dump_and_read(b'333', {'mode': 'w'}, {'binary_mode': True}) with self.assertRaises(AssertionError): # Both `binary_mode` and `mode` specified. - # TODO: Should be removed along with https://github.com/pantsbuild/pants/issues/6543 + # TODO(#6543): Should be removed along with https://github.com/pantsbuild/pants/issues/6543 self.assert_dump_and_read(b'333', {'binary_mode': True, 'mode': 'wb'}, {'binary_mode': True}) def test_readwrite_file_unicode(self): diff --git a/tests/python/pants_test/util/test_objects.py b/tests/python/pants_test/util/test_objects.py index 4fb24ebbf34..8cb1423c772 100644 --- a/tests/python/pants_test/util/test_objects.py +++ b/tests/python/pants_test/util/test_objects.py @@ -12,19 +12,24 @@ from future.utils import PY2, PY3, text_type -from pants.util.objects import (Collection, Exactly, SubclassesOf, SuperclassesOf, TypeCheckError, +from pants.util.collections_abc_backport import OrderedDict +from pants.util.objects import (EnumVariantSelectionError, Exactly, SubclassesOf, SuperclassesOf, + TypeCheckError, TypeConstraintError, TypedCollection, TypedDatatypeInstanceConstructionError, datatype, enum) from pants_test.test_base import TestBase -class CollectionTest(TestBase): - def test_collection_iteration(self): - self.assertEqual([1, 2], [x for x in Collection.of(int)([1, 2])]) - - class TypeConstraintTestBase(TestBase): class A(object): - pass + + def __repr__(self): + return '{}()'.format(type(self).__name__) + + def __str__(self): + return '(str form): {}'.format(repr(self)) + + def __eq__(self, other): + return type(self) == type(other) class B(A): pass @@ -41,9 +46,17 @@ def test_none(self): with self.assertRaises(ValueError): SubclassesOf() + def test_str_and_repr(self): + superclasses_of_b = SuperclassesOf(self.B) + self.assertEqual("SuperclassesOf(B)", str(superclasses_of_b)) + self.assertEqual("SuperclassesOf(B)", repr(superclasses_of_b)) + + superclasses_of_multiple = SuperclassesOf(self.A, self.B) + self.assertEqual("SuperclassesOf(A or B)", str(superclasses_of_multiple)) + self.assertEqual("SuperclassesOf(A, B)", repr(superclasses_of_multiple)) + def test_single(self): superclasses_of_b = SuperclassesOf(self.B) - self.assertEqual((self.B,), superclasses_of_b.types) self.assertTrue(superclasses_of_b.satisfied_by(self.A())) self.assertTrue(superclasses_of_b.satisfied_by(self.B())) self.assertFalse(superclasses_of_b.satisfied_by(self.BPrime())) @@ -51,12 +64,19 @@ def test_single(self): def test_multiple(self): superclasses_of_a_or_b = SuperclassesOf(self.A, self.B) - self.assertEqual((self.A, self.B), superclasses_of_a_or_b.types) self.assertTrue(superclasses_of_a_or_b.satisfied_by(self.A())) self.assertTrue(superclasses_of_a_or_b.satisfied_by(self.B())) self.assertFalse(superclasses_of_a_or_b.satisfied_by(self.BPrime())) self.assertFalse(superclasses_of_a_or_b.satisfied_by(self.C())) + def test_validate(self): + superclasses_of_a_or_b = SuperclassesOf(self.A, self.B) + self.assertEqual(self.A(), superclasses_of_a_or_b.validate_satisfied_by(self.A())) + self.assertEqual(self.B(), superclasses_of_a_or_b.validate_satisfied_by(self.B())) + with self.assertRaisesRegexp(TypeConstraintError, + re.escape("value C() (with type 'C') must satisfy this type constraint: SuperclassesOf(A or B).")): + superclasses_of_a_or_b.validate_satisfied_by(self.C()) + class ExactlyTest(TypeConstraintTestBase): def test_none(self): @@ -65,7 +85,6 @@ def test_none(self): def test_single(self): exactly_b = Exactly(self.B) - self.assertEqual((self.B,), exactly_b.types) self.assertFalse(exactly_b.satisfied_by(self.A())) self.assertTrue(exactly_b.satisfied_by(self.B())) self.assertFalse(exactly_b.satisfied_by(self.BPrime())) @@ -73,7 +92,6 @@ def test_single(self): def test_multiple(self): exactly_a_or_b = Exactly(self.A, self.B) - self.assertEqual((self.A, self.B), exactly_a_or_b.types) self.assertTrue(exactly_a_or_b.satisfied_by(self.A())) self.assertTrue(exactly_a_or_b.satisfied_by(self.B())) self.assertFalse(exactly_a_or_b.satisfied_by(self.BPrime())) @@ -84,31 +102,43 @@ def test_disallows_unsplatted_lists(self): Exactly([1]) def test_str_and_repr(self): - exactly_b_types = Exactly(self.B, description='B types') - self.assertEqual("=(B types)", str(exactly_b_types)) - self.assertEqual("Exactly(B types)", repr(exactly_b_types)) - exactly_b = Exactly(self.B) - self.assertEqual("=B", str(exactly_b)) + self.assertEqual("Exactly(B)", str(exactly_b)) self.assertEqual("Exactly(B)", repr(exactly_b)) exactly_multiple = Exactly(self.A, self.B) - self.assertEqual("=(A, B)", str(exactly_multiple)) + self.assertEqual("Exactly(A or B)", str(exactly_multiple)) self.assertEqual("Exactly(A, B)", repr(exactly_multiple)) def test_checking_via_bare_type(self): self.assertTrue(Exactly(self.B).satisfied_by_type(self.B)) self.assertFalse(Exactly(self.B).satisfied_by_type(self.C)) + def test_validate(self): + exactly_a_or_b = Exactly(self.A, self.B) + self.assertEqual(self.A(), exactly_a_or_b.validate_satisfied_by(self.A())) + self.assertEqual(self.B(), exactly_a_or_b.validate_satisfied_by(self.B())) + with self.assertRaisesRegexp(TypeConstraintError, + re.escape("value C() (with type 'C') must satisfy this type constraint: Exactly(A or B).")): + exactly_a_or_b.validate_satisfied_by(self.C()) + class SubclassesOfTest(TypeConstraintTestBase): def test_none(self): with self.assertRaises(ValueError): SubclassesOf() + def test_str_and_repr(self): + subclasses_of_b = SubclassesOf(self.B) + self.assertEqual("SubclassesOf(B)", str(subclasses_of_b)) + self.assertEqual("SubclassesOf(B)", repr(subclasses_of_b)) + + subclasses_of_multiple = SubclassesOf(self.A, self.B) + self.assertEqual("SubclassesOf(A or B)", str(subclasses_of_multiple)) + self.assertEqual("SubclassesOf(A, B)", repr(subclasses_of_multiple)) + def test_single(self): subclasses_of_b = SubclassesOf(self.B) - self.assertEqual((self.B,), subclasses_of_b.types) self.assertFalse(subclasses_of_b.satisfied_by(self.A())) self.assertTrue(subclasses_of_b.satisfied_by(self.B())) self.assertFalse(subclasses_of_b.satisfied_by(self.BPrime())) @@ -116,12 +146,62 @@ def test_single(self): def test_multiple(self): subclasses_of_b_or_c = SubclassesOf(self.B, self.C) - self.assertEqual((self.B, self.C), subclasses_of_b_or_c.types) self.assertTrue(subclasses_of_b_or_c.satisfied_by(self.B())) self.assertTrue(subclasses_of_b_or_c.satisfied_by(self.C())) self.assertFalse(subclasses_of_b_or_c.satisfied_by(self.BPrime())) self.assertFalse(subclasses_of_b_or_c.satisfied_by(self.A())) + def test_validate(self): + subclasses_of_a_or_b = SubclassesOf(self.A, self.B) + self.assertEqual(self.A(), subclasses_of_a_or_b.validate_satisfied_by(self.A())) + self.assertEqual(self.B(), subclasses_of_a_or_b.validate_satisfied_by(self.B())) + self.assertEqual(self.C(), subclasses_of_a_or_b.validate_satisfied_by(self.C())) + with self.assertRaisesRegexp(TypeConstraintError, + re.escape("value 1 (with type 'int') must satisfy this type constraint: SubclassesOf(A or B).")): + subclasses_of_a_or_b.validate_satisfied_by(1) + + +class TypedCollectionTest(TypeConstraintTestBase): + def test_str_and_repr(self): + collection_of_exactly_b = TypedCollection(Exactly(self.B)) + self.assertEqual("TypedCollection(Exactly(B))", str(collection_of_exactly_b)) + self.assertEqual("TypedCollection(Exactly(B))", repr(collection_of_exactly_b)) + + collection_of_multiple_subclasses = TypedCollection( + SubclassesOf(self.A, self.B)) + self.assertEqual("TypedCollection(SubclassesOf(A or B))", + str(collection_of_multiple_subclasses)) + self.assertEqual("TypedCollection(SubclassesOf(A, B))", + repr(collection_of_multiple_subclasses)) + + def test_collection_single(self): + collection_constraint = TypedCollection(Exactly(self.A)) + self.assertTrue(collection_constraint.satisfied_by([self.A()])) + self.assertFalse(collection_constraint.satisfied_by([self.A(), self.B()])) + self.assertTrue(collection_constraint.satisfied_by([self.A(), self.A()])) + + def test_collection_multiple(self): + collection_constraint = TypedCollection(SubclassesOf(self.B, self.BPrime)) + self.assertTrue(collection_constraint.satisfied_by([self.B(), self.C(), self.BPrime()])) + self.assertFalse(collection_constraint.satisfied_by([self.B(), self.A()])) + + def test_no_complex_sub_constraint(self): + sub_collection = TypedCollection(Exactly(self.A)) + with self.assertRaisesRegexp(TypeError, re.escape( + "constraint for collection must be a TypeOnlyConstraint! was: {}".format(sub_collection))): + TypedCollection(sub_collection) + + def test_validate(self): + collection_exactly_a_or_b = TypedCollection(Exactly(self.A, self.B)) + self.assertEqual([self.A()], collection_exactly_a_or_b.validate_satisfied_by([self.A()])) + self.assertEqual([self.B()], collection_exactly_a_or_b.validate_satisfied_by([self.B()])) + with self.assertRaisesRegexp(TypeConstraintError, + re.escape("in wrapped constraint TypedCollection(Exactly(A or B)): value A() (with type 'A') must satisfy this type constraint: SubclassesOf(Iterable).")): + collection_exactly_a_or_b.validate_satisfied_by(self.A()) + with self.assertRaisesRegexp(TypeConstraintError, + re.escape("in wrapped constraint TypedCollection(Exactly(A or B)) matching iterable object [C()]: value C() (with type 'C') must satisfy this type constraint: Exactly(A or B).")): + collection_exactly_a_or_b.validate_satisfied_by([self.C()]) + class ExportedDatatype(datatype(['val'])): pass @@ -175,6 +255,12 @@ def __repr__(self): class WithSubclassTypeConstraint(datatype([('some_value', SubclassesOf(SomeBaseClass))])): pass +class WithCollectionTypeConstraint(datatype([ + ('dependencies', TypedCollection(Exactly(int))), +])): + pass + + class NonNegativeInt(datatype([('an_int', int)])): """Example of overriding __new__() to perform deeper argument checking.""" @@ -392,7 +478,7 @@ def test_instance_construction_by_repr(self): some_val = SomeTypedDatatype(3) self.assertEqual(3, some_val.val) self.assertEqual(repr(some_val), "SomeTypedDatatype(val=3)") - self.assertEqual(str(some_val), "SomeTypedDatatype(val<=int>=3)") + self.assertEqual(str(some_val), "SomeTypedDatatype(val=3)") some_object = WithExplicitTypeConstraint(text_type('asdf'), 45) self.assertEqual(some_object.a_string, 'asdf') @@ -402,7 +488,7 @@ def compare_repr(include_unicode = False): .format(unicode_literal='u' if include_unicode else '') self.assertEqual(repr(some_object), expected_message) def compare_str(unicode_type_name): - expected_message = "WithExplicitTypeConstraint(a_string<={}>=asdf, an_int<=int>=45)".format(unicode_type_name) + expected_message = "WithExplicitTypeConstraint(a_string=asdf, an_int=45)".format(unicode_type_name) self.assertEqual(str(some_object), expected_message) if PY2: compare_str('unicode') @@ -414,7 +500,7 @@ def compare_str(unicode_type_name): some_nonneg_int = NonNegativeInt(an_int=3) self.assertEqual(3, some_nonneg_int.an_int) self.assertEqual(repr(some_nonneg_int), "NonNegativeInt(an_int=3)") - self.assertEqual(str(some_nonneg_int), "NonNegativeInt(an_int<=int>=3)") + self.assertEqual(str(some_nonneg_int), "NonNegativeInt(an_int=3)") wrapped_nonneg_int = CamelCaseWrapper(NonNegativeInt(45)) # test attribute naming for camel-cased types @@ -424,7 +510,7 @@ def compare_str(unicode_type_name): "CamelCaseWrapper(nonneg_int=NonNegativeInt(an_int=45))") self.assertEqual( str(wrapped_nonneg_int), - "CamelCaseWrapper(nonneg_int<=NonNegativeInt>=NonNegativeInt(an_int<=int>=45))") + "CamelCaseWrapper(nonneg_int=NonNegativeInt(an_int=45))") mixed_type_obj = MixedTyping(value=3, name=text_type('asdf')) self.assertEqual(3, mixed_type_obj.value) @@ -433,7 +519,7 @@ def compare_repr(include_unicode = False): .format(unicode_literal='u' if include_unicode else '') self.assertEqual(repr(mixed_type_obj), expected_message) def compare_str(unicode_type_name): - expected_message = "MixedTyping(value=3, name<={}>=asdf)".format(unicode_type_name) + expected_message = "MixedTyping(value=3, name=asdf)".format(unicode_type_name) self.assertEqual(str(mixed_type_obj), expected_message) if PY2: compare_str('unicode') @@ -448,7 +534,7 @@ def compare_str(unicode_type_name): "WithSubclassTypeConstraint(some_value=SomeDatatypeClass())") self.assertEqual( str(subclass_constraint_obj), - "WithSubclassTypeConstraint(some_value<+SomeBaseClass>=SomeDatatypeClass())") + "WithSubclassTypeConstraint(some_value=SomeDatatypeClass())") def test_mixin_type_construction(self): obj_with_mixin = TypedWithMixin(text_type(' asdf ')) @@ -457,7 +543,7 @@ def compare_repr(include_unicode = False): .format(unicode_literal='u' if include_unicode else '') self.assertEqual(repr(obj_with_mixin), expected_message) def compare_str(unicode_type_name): - expected_message = "TypedWithMixin(val<={}>= asdf )".format(unicode_type_name) + expected_message = "TypedWithMixin(val= asdf )".format(unicode_type_name) self.assertEqual(str(obj_with_mixin), expected_message) if PY2: compare_str('unicode') @@ -468,10 +554,18 @@ def compare_str(unicode_type_name): self.assertEqual(obj_with_mixin.as_str(), ' asdf ') self.assertEqual(obj_with_mixin.stripped(), 'asdf') + def test_instance_with_collection_construction_str_repr(self): + # TODO: convert the type of the input collection using a `wrapper_type` argument! + obj_with_collection = WithCollectionTypeConstraint([3]) + self.assertEqual("WithCollectionTypeConstraint(dependencies=[3])", + str(obj_with_collection)) + self.assertEqual("WithCollectionTypeConstraint(dependencies=[3])", + repr(obj_with_collection)) + def test_instance_construction_errors(self): with self.assertRaises(TypeError) as cm: SomeTypedDatatype(something=3) - expected_msg = "error: in constructor of type SomeTypedDatatype: type check error:\n__new__() got an unexpected keyword argument 'something'" + expected_msg = "type check error in class SomeTypedDatatype: error in namedtuple() base constructor: __new__() got an unexpected keyword argument 'something'" self.assertEqual(str(cm.exception), expected_msg) # not providing all the fields @@ -482,7 +576,7 @@ def test_instance_construction_errors(self): if PY3 else "__new__() takes exactly 2 arguments (1 given)" ) - expected_msg = "error: in constructor of type SomeTypedDatatype: type check error:\n" + expected_msg_ending + expected_msg = "type check error in class SomeTypedDatatype: error in namedtuple() base constructor: {}".format(expected_msg_ending) self.assertEqual(str(cm.exception), expected_msg) # unrecognized fields @@ -493,20 +587,20 @@ def test_instance_construction_errors(self): if PY3 else "__new__() takes exactly 2 arguments (3 given)" ) - expected_msg = "error: in constructor of type SomeTypedDatatype: type check error:\n" + expected_msg_ending + expected_msg = "type check error in class SomeTypedDatatype: error in namedtuple() base constructor: {}".format(expected_msg_ending) self.assertEqual(str(cm.exception), expected_msg) with self.assertRaises(TypedDatatypeInstanceConstructionError) as cm: CamelCaseWrapper(nonneg_int=3) expected_msg = ( - """error: in constructor of type CamelCaseWrapper: type check error: + """type check error in class CamelCaseWrapper: errors type checking constructor arguments: field 'nonneg_int' was invalid: value 3 (with type 'int') must satisfy this type constraint: Exactly(NonNegativeInt).""") self.assertEqual(str(cm.exception), expected_msg) # test that kwargs with keywords that aren't field names fail the same way with self.assertRaises(TypeError) as cm: CamelCaseWrapper(4, a=3) - expected_msg = "error: in constructor of type CamelCaseWrapper: type check error:\n__new__() got an unexpected keyword argument 'a'" + expected_msg = "type check error in class CamelCaseWrapper: error in namedtuple() base constructor: __new__() got an unexpected keyword argument 'a'" self.assertEqual(str(cm.exception), expected_msg) def test_type_check_errors(self): @@ -514,7 +608,7 @@ def test_type_check_errors(self): with self.assertRaises(TypeCheckError) as cm: SomeTypedDatatype([]) expected_msg = ( - """error: in constructor of type SomeTypedDatatype: type check error: + """type check error in class SomeTypedDatatype: errors type checking constructor arguments: field 'val' was invalid: value [] (with type 'list') must satisfy this type constraint: Exactly(int).""") self.assertEqual(str(cm.exception), expected_msg) @@ -523,7 +617,7 @@ def test_type_check_errors(self): AnotherTypedDatatype(text_type('correct'), text_type('should be list')) def compare_str(unicode_type_name, include_unicode=False): expected_message = ( - """error: in constructor of type AnotherTypedDatatype: type check error: + """type check error in class AnotherTypedDatatype: errors type checking constructor arguments: field 'elements' was invalid: value {unicode_literal}'should be list' (with type '{type_name}') must satisfy this type constraint: Exactly(list).""" .format(type_name=unicode_type_name, unicode_literal='u' if include_unicode else '')) self.assertEqual(str(cm.exception), expected_message) @@ -537,7 +631,7 @@ def compare_str(unicode_type_name, include_unicode=False): AnotherTypedDatatype(3, text_type('should be list')) def compare_str(unicode_type_name, include_unicode=False): expected_message = ( - """error: in constructor of type AnotherTypedDatatype: type check error: + """type check error in class AnotherTypedDatatype: errors type checking constructor arguments: field 'string' was invalid: value 3 (with type 'int') must satisfy this type constraint: Exactly({type_name}). field 'elements' was invalid: value {unicode_literal}'should be list' (with type '{type_name}') must satisfy this type constraint: Exactly(list).""" .format(type_name=unicode_type_name, unicode_literal='u' if include_unicode else '')) @@ -551,7 +645,7 @@ def compare_str(unicode_type_name, include_unicode=False): NonNegativeInt(text_type('asdf')) def compare_str(unicode_type_name, include_unicode=False): expected_message = ( - """error: in constructor of type NonNegativeInt: type check error: + """type check error in class NonNegativeInt: errors type checking constructor arguments: field 'an_int' was invalid: value {unicode_literal}'asdf' (with type '{type_name}') must satisfy this type constraint: Exactly(int).""" .format(type_name=unicode_type_name, unicode_literal='u' if include_unicode else '')) self.assertEqual(str(cm.exception), expected_message) @@ -562,18 +656,30 @@ def compare_str(unicode_type_name, include_unicode=False): with self.assertRaises(TypeCheckError) as cm: NonNegativeInt(-3) - expected_msg = ( - """error: in constructor of type NonNegativeInt: type check error: -value is negative: -3.""") + expected_msg = "type check error in class NonNegativeInt: value is negative: -3." self.assertEqual(str(cm.exception), expected_msg) with self.assertRaises(TypeCheckError) as cm: WithSubclassTypeConstraint(3) expected_msg = ( - """error: in constructor of type WithSubclassTypeConstraint: type check error: + """type check error in class WithSubclassTypeConstraint: errors type checking constructor arguments: field 'some_value' was invalid: value 3 (with type 'int') must satisfy this type constraint: SubclassesOf(SomeBaseClass).""") self.assertEqual(str(cm.exception), expected_msg) + with self.assertRaises(TypeCheckError) as cm: + WithCollectionTypeConstraint(3) + expected_msg = """\ +type check error in class WithCollectionTypeConstraint: errors type checking constructor arguments: +field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int)): value 3 (with type 'int') must satisfy this type constraint: SubclassesOf(Iterable).""" + self.assertEqual(str(cm.exception), expected_msg) + + with self.assertRaises(TypeCheckError) as cm: + WithCollectionTypeConstraint([3, "asdf"]) + expected_msg = """\ +type check error in class WithCollectionTypeConstraint: errors type checking constructor arguments: +field 'dependencies' was invalid: in wrapped constraint TypedCollection(Exactly(int)) matching iterable object [3, {u}'asdf']: value {u}'asdf' (with type '{string_type}') must satisfy this type constraint: Exactly(int).""".format(u='u' if PY2 else '', string_type='unicode' if PY2 else 'str') + self.assertEqual(str(cm.exception), expected_msg) + def test_copy(self): obj = AnotherTypedDatatype(string='some_string', elements=[1, 2, 3]) new_obj = obj.copy(string='another_string') @@ -588,21 +694,20 @@ def test_copy_failure(self): with self.assertRaises(TypeCheckError) as cm: obj.copy(nonexistent_field=3) expected_msg = ( - """error: in constructor of type AnotherTypedDatatype: type check error: -__new__() got an unexpected keyword argument 'nonexistent_field'""") + """type check error in class AnotherTypedDatatype: error in namedtuple() base constructor: __new__() got an unexpected keyword argument 'nonexistent_field'""") self.assertEqual(str(cm.exception), expected_msg) with self.assertRaises(TypeCheckError) as cm: obj.copy(elements=3) expected_msg = ( - """error: in constructor of type AnotherTypedDatatype: type check error: + """type check error in class AnotherTypedDatatype: errors type checking constructor arguments: field 'elements' was invalid: value 3 (with type 'int') must satisfy this type constraint: Exactly(list).""") self.assertEqual(str(cm.exception), expected_msg) def test_enum_class_creation_errors(self): expected_rx = re.escape( "When converting all_values ([1, 2, 3, 1]) to a set, at least one duplicate " - "was detected. The unique elements of all_values were: OrderedSet([1, 2, 3]).") + "was detected. The unique elements of all_values were: [1, 2, 3].") with self.assertRaisesRegexp(ValueError, expected_rx): class DuplicateAllowedValues(enum('x', [1, 2, 3, 1])): pass @@ -610,20 +715,91 @@ def test_enum_instance_creation(self): self.assertEqual(1, SomeEnum.create().x) self.assertEqual(2, SomeEnum.create(2).x) self.assertEqual(1, SomeEnum(1).x) - self.assertEqual(2, SomeEnum(x=2).x) def test_enum_instance_creation_errors(self): expected_rx = re.escape( - "Value 3 for 'x' must be one of: OrderedSet([1, 2]).") - with self.assertRaisesRegexp(TypeCheckError, expected_rx): + "Value 3 for 'x' must be one of: [1, 2].") + with self.assertRaisesRegexp(EnumVariantSelectionError, expected_rx): SomeEnum.create(3) - with self.assertRaisesRegexp(TypeCheckError, expected_rx): + with self.assertRaisesRegexp(EnumVariantSelectionError, expected_rx): SomeEnum(3) - with self.assertRaisesRegexp(TypeCheckError, expected_rx): + + # Specifying the value by keyword argument is not allowed. + with self.assertRaisesRegexp(TypeError, re.escape("__new__() got an unexpected keyword argument 'x'")): SomeEnum(x=3) + # Test that None is not used as the default unless none_is_default=True. + with self.assertRaisesRegexp(EnumVariantSelectionError, re.escape( + "Value None for 'x' must be one of: [1, 2]." + )): + SomeEnum.create(None) + self.assertEqual(1, SomeEnum.create(None, none_is_default=True).x) + expected_rx_falsy_value = re.escape( - "Value {}'' for 'x' must be one of: OrderedSet([1, 2])." + "Value {}'' for 'x' must be one of: [1, 2]." .format('u' if PY2 else '')) - with self.assertRaisesRegexp(TypeCheckError, expected_rx_falsy_value): - SomeEnum(x='') + with self.assertRaisesRegexp(EnumVariantSelectionError, expected_rx_falsy_value): + SomeEnum('') + + def test_enum_comparison_fails(self): + enum_instance = SomeEnum(1) + rx_str = re.escape("enum equality is defined to be an error") + with self.assertRaisesRegexp(TypeCheckError, rx_str): + enum_instance == enum_instance + with self.assertRaisesRegexp(TypeCheckError, rx_str): + enum_instance != enum_instance + # Test that comparison also fails against another type. + with self.assertRaisesRegexp(TypeCheckError, rx_str): + enum_instance == 1 + with self.assertRaisesRegexp(TypeCheckError, rx_str): + 1 == enum_instance + + class StrEnum(enum(['a'])): pass + enum_instance = StrEnum('a') + with self.assertRaisesRegexp(TypeCheckError, rx_str): + enum_instance == 'a' + with self.assertRaisesRegexp(TypeCheckError, rx_str): + 'a' == enum_instance + + def test_enum_resolve_variant(self): + one_enum_instance = SomeEnum(1) + two_enum_instance = SomeEnum(2) + self.assertEqual(3, one_enum_instance.resolve_for_enum_variant({ + 1: 3, + 2: 4, + })) + self.assertEqual(4, two_enum_instance.resolve_for_enum_variant({ + 1: 3, + 2: 4, + })) + + # Test that an unrecognized variant raises an error. + with self.assertRaisesRegexp(EnumVariantSelectionError, re.escape("""\ +type check error in class SomeEnum: pattern matching must have exactly the keys [1, 2] (was: [1, 2, 3])""", + )): + one_enum_instance.resolve_for_enum_variant({ + 1: 3, + 2: 4, + 3: 5, + }) + + # Test that not providing all the variants raises an error. + with self.assertRaisesRegexp(EnumVariantSelectionError, re.escape("""\ +type check error in class SomeEnum: pattern matching must have exactly the keys [1, 2] (was: [1])""")): + one_enum_instance.resolve_for_enum_variant({ + 1: 3, + }) + + # Test that the ordering of the values in the enum constructor is not relevant for testing + # whether all variants are provided. + class OutOfOrderEnum(enum([2, 1, 3])): pass + two_out_of_order_instance = OutOfOrderEnum(2) + # This OrderedDict mapping is in a different order than in the enum constructor. This test means + # we can rely on providing simply a literal dict to resolve_for_enum_variant() and not worry + # that the dict ordering will cause an error. + letter = two_out_of_order_instance.resolve_for_enum_variant(OrderedDict([ + (1, 'b'), + (2, 'a'), + (3, 'c'), + ])) + self.assertEqual(letter, 'a')