diff --git a/src/python/pants/bin/engine_initializer.py b/src/python/pants/bin/engine_initializer.py index f304d3e3546..b205d4a65a3 100644 --- a/src/python/pants/bin/engine_initializer.py +++ b/src/python/pants/bin/engine_initializer.py @@ -11,7 +11,7 @@ from pants.base.build_environment import get_buildroot, get_scm from pants.base.file_system_project_tree import FileSystemProjectTree from pants.base.target_roots import ChangedTargetRoots, LiteralTargetRoots -from pants.engine.build_files import BuildFileAddresses, create_graph_rules +from pants.engine.build_files import BuildFileAddresses, Specs, create_graph_rules from pants.engine.fs import create_fs_rules from pants.engine.isolated_process import create_process_rules from pants.engine.legacy.address_mapper import LegacyAddressMapper @@ -84,7 +84,7 @@ def warm_product_graph(self, target_roots): if type(target_roots) is ChangedTargetRoots: subjects = [BuildFileAddresses(target_roots.addresses)] elif type(target_roots) is LiteralTargetRoots: - subjects = target_roots.specs + subjects = [Specs(tuple(target_roots.specs))] else: raise ValueError('Unexpected TargetRoots type: `{}`.'.format(target_roots)) request = self.scheduler.execution_request([TransitiveHydratedTargets], subjects) diff --git a/src/python/pants/engine/BUILD b/src/python/pants/engine/BUILD index 342e35ae025..3d2bae81869 100644 --- a/src/python/pants/engine/BUILD +++ b/src/python/pants/engine/BUILD @@ -70,6 +70,8 @@ python_library( ':struct', 'src/python/pants/base:project_tree', 'src/python/pants/build_graph', + 'src/python/pants/util:dirutil', + 'src/python/pants/util:objects', ] ) @@ -77,7 +79,6 @@ python_library( name='mapper', sources=['mapper.py'], dependencies=[ - '3rdparty/python:pathspec', ':objects', ':parser', 'src/python/pants/build_graph', diff --git a/src/python/pants/engine/build_files.py b/src/python/pants/engine/build_files.py index 268a3c42721..870757a005f 100644 --- a/src/python/pants/engine/build_files.py +++ b/src/python/pants/engine/build_files.py @@ -12,25 +12,21 @@ from pants.base.project_tree import Dir from pants.base.specs import (AscendantAddresses, DescendantAddresses, SiblingAddresses, - SingleAddress) + SingleAddress, Spec) from pants.build_graph.address import Address, BuildFileAddress +from pants.build_graph.address_lookup_error import AddressLookupError from pants.engine.addressable import (AddressableDescriptor, BuildFileAddresses, Collection, - Exactly, TypeConstraintError) + TypeConstraintError) from pants.engine.fs import FilesContent, PathGlobs, Snapshot from pants.engine.mapper import AddressFamily, AddressMap, AddressMapper, ResolveError from pants.engine.objects import Locatable, SerializableFactory, Validatable from pants.engine.rules import RootRule, SingletonRule, TaskRule, rule from pants.engine.selectors import Select, SelectDependencies, SelectProjection from pants.engine.struct import Struct +from pants.util.dirutil import fast_relpath_optional from pants.util.objects import datatype -_SPECS_CONSTRAINT = Exactly(SingleAddress, - SiblingAddresses, - DescendantAddresses, - AscendantAddresses) - - class ResolvedTypeMismatchError(ResolveError): """Indicates a resolved object was not of the expected type.""" @@ -52,6 +48,10 @@ class BuildFileGlobs(datatype('BuildFilesGlobs', ['path_globs'])): """A wrapper around PathGlobs that are known to match a build file pattern.""" +class Specs(Collection.of(Spec)): + """A collection of Spec subclasses.""" + + @rule(BuildFiles, [SelectProjection(FilesContent, PathGlobs, 'path_globs', BuildFileGlobs)]) def build_files(files_content): @@ -60,8 +60,10 @@ def build_files(files_content): @rule(BuildFileGlobs, [Select(AddressMapper), Select(Dir)]) def buildfile_path_globs_for_dir(address_mapper, directory): - patterns = address_mapper.build_patterns - return BuildFileGlobs(PathGlobs.create(directory.path, include=patterns, exclude=())) + patterns = tuple(join(directory.path, p) for p in address_mapper.build_patterns) + return BuildFileGlobs(PathGlobs.create('', + include=patterns, + exclude=address_mapper.build_ignore_patterns)) @rule(AddressFamily, [Select(AddressMapper), Select(Dir), Select(BuildFiles)]) @@ -74,11 +76,7 @@ def parse_address_family(address_mapper, path, build_files): if not files_content: raise ResolveError('Directory "{}" does not contain build files.'.format(path)) address_maps = [] - paths = (f.path for f in files_content) - ignored_paths = set(address_mapper.build_ignore_patterns.match_files(paths)) for filecontent_product in files_content: - if filecontent_product.path in ignored_paths: - continue address_maps.append(AddressMap.parse(filecontent_product.path, filecontent_product.content, address_mapper.parser)) @@ -232,18 +230,24 @@ def _hydrate(item_type, spec_path, **kwargs): @rule(BuildFileAddresses, [Select(AddressMapper), SelectDependencies(AddressFamily, BuildDirs, field_types=(Dir,)), - Select(_SPECS_CONSTRAINT)]) -def addresses_from_address_families(address_mapper, address_families, spec): - """Given a list of AddressFamilies and a Spec, return matching Addresses. + Select(Specs)]) +def addresses_from_address_families(address_mapper, address_families, specs): + """Given a list of AddressFamilies matching a list of Specs, return matching Addresses. - Raises a ResolveError if: + Raises a AddressLookupError if: - there were no matching AddressFamilies, or - the Spec matches no addresses for SingleAddresses. """ - def raise_if_empty_address_families(): - if not address_families: - raise ResolveError('Path "{}" contains no BUILD files.'.format(spec.directory)) + # NB: `@memoized` does not work on local functions. + def by_directory(): + if by_directory.cached is None: + by_directory.cached = {af.namespace: af for af in address_families} + return by_directory.cached + by_directory.cached = None + + def raise_empty_address_family(spec): + raise ResolveError('Path "{}" contains no BUILD files.'.format(spec.directory)) def exclude_address(address): if address_mapper.exclude_patterns: @@ -251,24 +255,49 @@ def exclude_address(address): return any(p.search(address_str) is not None for p in address_mapper.exclude_patterns) return False - def all_included_addresses(): - return (a - for af in address_families - for a in af.addressables.keys() - if not exclude_address(a)) - - if type(spec) in (DescendantAddresses, SiblingAddresses): - raise_if_empty_address_families() - addresses = tuple(all_included_addresses()) - elif type(spec) is SingleAddress: - raise_if_empty_address_families() - addresses = tuple(a for a in all_included_addresses() if a.target_name == spec.name) - if not addresses and len(address_families) == 1: - _raise_did_you_mean(address_families[0], spec.name) - elif type(spec) is AscendantAddresses: - addresses = tuple(all_included_addresses()) - else: - raise ValueError('Unrecognized Spec type: {}'.format(spec)) + addresses = [] + included = set() + def include(address_families, predicate=None): + matched = False + for af in address_families: + for a in af.addressables.keys(): + if a in included: + continue + if not exclude_address(a) and (predicate is None or predicate(a)): + matched = True + addresses.append(a) + included.add(a) + return matched + + for spec in specs.dependencies: + if type(spec) is DescendantAddresses: + matched = include( + af + for af in address_families + if fast_relpath_optional(af.namespace, spec.directory) is not None + ) + if not matched: + raise AddressLookupError( + 'Spec {} does not match any targets.'.format(spec)) + elif type(spec) is SiblingAddresses: + address_family = by_directory().get(spec.directory) + if not address_family: + raise_empty_address_family(spec) + include([address_family]) + elif type(spec) is SingleAddress: + address_family = by_directory().get(spec.directory) + if not address_family: + raise_empty_address_family(spec) + if not include([address_family], predicate=lambda a: a.target_name == spec.name): + _raise_did_you_mean(address_family, spec.name) + elif type(spec) is AscendantAddresses: + include( + af + for af in address_families + if fast_relpath_optional(spec.directory, af.namespace) is not None + ) + else: + raise ValueError('Unrecognized Spec type: {}'.format(spec)) return BuildFileAddresses(addresses) @@ -277,30 +306,28 @@ def all_included_addresses(): def filter_build_dirs(address_mapper, snapshot): """Given a Snapshot matching a build pattern, return parent directories as BuildDirs.""" dirnames = set(dirname(f.stat.path) for f in snapshot.files) - ignored_dirnames = address_mapper.build_ignore_patterns.match_files('{}/'.format(dirname) for dirname in dirnames) - ignored_dirnames = set(d.rstrip('/') for d in ignored_dirnames) - return BuildDirs(tuple(Dir(d) for d in dirnames if d not in ignored_dirnames)) - - -@rule(PathGlobs, [Select(AddressMapper), Select(_SPECS_CONSTRAINT)]) -def spec_to_globs(address_mapper, spec): - """Given a Spec object, return a PathGlobs object for the build files that it matches.""" - if type(spec) is DescendantAddresses: - directory = spec.directory - patterns = [join('**', pattern) for pattern in address_mapper.build_patterns] - elif type(spec) in (SiblingAddresses, SingleAddress): - directory = spec.directory - patterns = address_mapper.build_patterns - elif type(spec) is AscendantAddresses: - directory = '' - patterns = [ - join(f, pattern) - for pattern in address_mapper.build_patterns - for f in _recursive_dirname(spec.directory) - ] - else: - raise ValueError('Unrecognized Spec type: {}'.format(spec)) - return PathGlobs.create(directory, include=patterns, exclude=[]) + return BuildDirs(tuple(Dir(d) for d in dirnames)) + + +@rule(PathGlobs, [Select(AddressMapper), Select(Specs)]) +def spec_to_globs(address_mapper, specs): + """Given a Spec object, return a PathGlobs object for the build files that it matches. + """ + patterns = set() + for spec in specs.dependencies: + if type(spec) is DescendantAddresses: + patterns.update(join(spec.directory, '**', pattern) + for pattern in address_mapper.build_patterns) + elif type(spec) in (SiblingAddresses, SingleAddress): + patterns.update(join(spec.directory, pattern) + for pattern in address_mapper.build_patterns) + elif type(spec) is AscendantAddresses: + patterns.update(join(f, pattern) + for pattern in address_mapper.build_patterns + for f in _recursive_dirname(spec.directory)) + else: + raise ValueError('Unrecognized Spec type: {}'.format(spec)) + return PathGlobs.create('', include=patterns, exclude=address_mapper.build_ignore_patterns) def _recursive_dirname(f): @@ -370,8 +397,5 @@ def create_graph_rules(address_mapper, symbol_table): RootRule(Address), RootRule(BuildFileAddress), RootRule(BuildFileAddresses), - RootRule(AscendantAddresses), - RootRule(DescendantAddresses), - RootRule(SiblingAddresses), - RootRule(SingleAddress), + RootRule(Specs), ] diff --git a/src/python/pants/engine/legacy/address_mapper.py b/src/python/pants/engine/legacy/address_mapper.py index 06487ce9d58..c51da69929c 100644 --- a/src/python/pants/engine/legacy/address_mapper.py +++ b/src/python/pants/engine/legacy/address_mapper.py @@ -10,9 +10,10 @@ from pants.base.build_file import BuildFile from pants.base.specs import DescendantAddresses, SiblingAddresses +from pants.build_graph.address_lookup_error import AddressLookupError from pants.build_graph.address_mapper import AddressMapper from pants.engine.addressable import BuildFileAddresses -from pants.engine.build_files import BuildFilesCollection +from pants.engine.build_files import BuildFilesCollection, Specs from pants.engine.mapper import ResolveError from pants.engine.nodes import Throw from pants.util.dirutil import fast_relpath @@ -32,16 +33,12 @@ def __init__(self, scheduler, build_root): self._build_root = build_root def scan_build_files(self, base_path): - request = self._scheduler.execution_request([BuildFilesCollection], [(DescendantAddresses(base_path))]) - - result = self._scheduler.execute(request) - if result.error: - raise result.error + specs = (DescendantAddresses(base_path),) + build_files_collection, = self._scheduler.product_request(BuildFilesCollection, [Specs(specs)]) build_files_set = set() - for _, state in result.root_products: - for build_files in state.value.dependencies: - build_files_set.update(f.path for f in build_files.files_content.dependencies) + for build_files in build_files_collection.dependencies: + build_files_set.update(f.path for f in build_files.files_content.dependencies) return build_files_set @@ -68,30 +65,34 @@ def addresses_in_spec_path(self, spec_path): def scan_specs(self, specs, fail_fast=True): return self._internal_scan_specs(specs, fail_fast=fail_fast, missing_is_fatal=True) + def _specs_string(self, specs): + return ', '.join(s.to_spec_string() for s in specs) + def _internal_scan_specs(self, specs, fail_fast=True, missing_is_fatal=True): - request = self._scheduler.execution_request([BuildFileAddresses], specs) + # TODO: This should really use `product_request`, but on the other hand, we need to + # deprecate the entire `AddressMapper` interface anyway. See #4769. + request = self._scheduler.execution_request([BuildFileAddresses], [Specs(tuple(specs))]) result = self._scheduler.execute(request) if result.error: raise self.BuildFileScanError(str(result.error)) - - addresses = set() - for (spec, _), state in result.root_products: - if isinstance(state, Throw): - if isinstance(state.exc, ResolveError): - if missing_is_fatal: - raise self.BuildFileScanError( - 'Spec `{}` does not match any targets.\n{}'.format(spec.to_spec_string(), str(state.exc))) - else: - # NB: ignore Throws containing ResolveErrors because they are due to missing targets / files - continue + (_, state), = result.root_products + + if isinstance(state, Throw): + if isinstance(state.exc, (AddressLookupError, ResolveError)): + if missing_is_fatal: + raise self.BuildFileScanError( + 'Spec `{}` does not match any targets.\n{}'.format( + self._specs_string(specs), str(state.exc))) else: - raise self.BuildFileScanError(str(state.exc)) - elif missing_is_fatal and not state.value.dependencies: - raise self.BuildFileScanError( - 'Spec `{}` does not match any targets.'.format(spec.to_spec_string())) - - addresses.update(state.value.dependencies) - return addresses + # NB: ignore Throws containing ResolveErrors because they are due to missing targets / files + return set() + else: + raise self.BuildFileScanError(str(state.exc)) + elif missing_is_fatal and not state.value.dependencies: + raise self.BuildFileScanError( + 'Spec `{}` does not match any targets.'.format(self._specs_string(specs))) + + return set(state.value.dependencies) def scan_addresses(self, root=None): if root: diff --git a/src/python/pants/engine/legacy/graph.py b/src/python/pants/engine/legacy/graph.py index 1491ce2bfc9..99afb4cbcb8 100644 --- a/src/python/pants/engine/legacy/graph.py +++ b/src/python/pants/engine/legacy/graph.py @@ -6,6 +6,7 @@ unicode_literals, with_statement) import logging +from collections import deque from contextlib import contextmanager from twitter.common.collections import OrderedSet @@ -20,11 +21,11 @@ from pants.build_graph.build_graph import BuildGraph from pants.build_graph.remote_sources import RemoteSources from pants.engine.addressable import BuildFileAddresses, Collection +from pants.engine.build_files import Specs from pants.engine.fs import PathGlobs, Snapshot from pants.engine.legacy.structs import BundleAdaptor, BundlesField, SourcesField, TargetAdaptor -from pants.engine.mapper import ResolveError from pants.engine.rules import TaskRule, rule -from pants.engine.selectors import Select, SelectDependencies, SelectProjection, SelectTransitive +from pants.engine.selectors import Select, SelectDependencies, SelectProjection from pants.source.wrapped_globs import EagerFilesetWithSpec, FilesetRelPathWrapper from pants.util.dirutil import fast_relpath from pants.util.objects import datatype @@ -56,9 +57,6 @@ class LegacyBuildGraph(BuildGraph): This implementation is backed by a Scheduler that is able to resolve TransitiveHydratedTargets. """ - class InvalidCommandLineSpecError(AddressLookupError): - """Raised when command line spec is not a valid directory""" - @classmethod def create(cls, scheduler, symbol_table): """Construct a graph given a Scheduler, Engine, and a SymbolTable class.""" @@ -79,7 +77,7 @@ def clone_new(self): """Returns a new BuildGraph instance of the same type and with the same __init__ params.""" return LegacyBuildGraph(self._scheduler, self._target_types) - def _index(self, roots): + def _index(self, hydrated_targets): """Index from the given roots into the storage provided by the base class. This is an additive operation: any existing connections involving these nodes are preserved. @@ -88,14 +86,12 @@ def _index(self, roots): new_targets = list() # Index the ProductGraph. - for product in roots: - # We have a successful TransitiveHydratedTargets value (for a particular input Spec). - for hydrated_target in product.dependencies: - target_adaptor = hydrated_target.adaptor - address = target_adaptor.address - all_addresses.add(address) - if address not in self._target_by_address: - new_targets.append(self._index_target(target_adaptor)) + for hydrated_target in hydrated_targets: + target_adaptor = hydrated_target.adaptor + address = target_adaptor.address + all_addresses.add(address) + if address not in self._target_by_address: + new_targets.append(self._index_target(target_adaptor)) # Once the declared dependencies of all targets are indexed, inject their # additional "traversable_(dependency_)?specs". @@ -208,12 +204,8 @@ def inject_addresses_closure(self, addresses): addresses = set(addresses) - set(self._target_by_address.keys()) if not addresses: return - matched = set(self._inject_specs([SingleAddress(a.spec_path, a.target_name) for a in addresses])) - missing = addresses - matched - if missing: - # TODO: When SingleAddress resolution converted from projection of a directory - # and name to a match for PathGlobs, we lost our useful AddressLookupError formatting. - raise AddressLookupError('Addresses were not matched: {}'.format(missing)) + for _ in self._inject_specs([SingleAddress(a.spec_path, a.target_name) for a in addresses]): + pass def inject_roots_closure(self, target_roots, fail_fast=None): if type(target_roots) is ChangedTargetRoots: @@ -239,9 +231,6 @@ def resolve_address(self, address): def _resolve_context(self): try: yield - except ResolveError as e: - # NB: ResolveError means that a target was not found, which is a common user facing error. - raise AddressLookupError(str(e)) except Exception as e: raise AddressLookupError( 'Build graph construction failed: {} {}'.format(type(e).__name__, str(e)) @@ -250,16 +239,15 @@ def _resolve_context(self): def _inject_addresses(self, subjects): """Injects targets into the graph for each of the given `Address` objects, and then yields them. - TODO: See #4533 about unifying "collection of literal Addresses" with the `Spec` types, which - would avoid the need for the independent `_inject_addresses` and `_inject_specs` codepaths. + TODO: See #5606 about undoing the split between `_inject_addresses` and `_inject_specs`. """ logger.debug('Injecting addresses to %s: %s', self, subjects) with self._resolve_context(): addresses = tuple(subjects) - hydrated_targets = self._scheduler.product_request(TransitiveHydratedTargets, - [BuildFileAddresses(addresses)]) + thts, = self._scheduler.product_request(TransitiveHydratedTargets, + [BuildFileAddresses(addresses)]) - self._index(hydrated_targets) + self._index(thts.closure) yielded_addresses = set() for address in subjects: @@ -274,20 +262,14 @@ def _inject_specs(self, subjects): """ logger.debug('Injecting specs to %s: %s', self, subjects) with self._resolve_context(): - product_results = self._scheduler.products_request([TransitiveHydratedTargets, BuildFileAddresses], - subjects) + specs = tuple(subjects) + thts, = self._scheduler.product_request(TransitiveHydratedTargets, + [Specs(specs)]) - self._index(product_results[TransitiveHydratedTargets]) + self._index(thts.closure) - yielded_addresses = set() - for subject, product in zip(subjects, product_results[BuildFileAddresses]): - if not product.dependencies: - raise self.InvalidCommandLineSpecError( - 'Spec {} does not match any targets.'.format(subject)) - for address in product.dependencies: - if address not in yielded_addresses: - yielded_addresses.add(address) - yield address + for hydrated_target in thts.roots: + yield hydrated_target.address class HydratedTarget(datatype('HydratedTarget', ['address', 'adaptor', 'dependencies'])): @@ -313,21 +295,50 @@ def __hash__(self): return hash(self.address) -class TransitiveHydratedTargets(Collection.of(HydratedTarget)): - """A transitive set of HydratedTarget objects.""" +class TransitiveHydratedTarget(datatype('TransitiveHydratedTarget', ['root', 'dependencies'])): + """A recursive structure wrapping a HydratedTarget root and TransitiveHydratedTarget deps.""" + + +class TransitiveHydratedTargets(datatype('TransitiveHydratedTargets', ['roots', 'closure'])): + """A set of HydratedTarget roots, and their transitive, flattened, de-duped closure.""" class HydratedTargets(Collection.of(HydratedTarget)): """An intransitive set of HydratedTarget objects.""" -@rule(TransitiveHydratedTargets, [SelectTransitive(HydratedTarget, - BuildFileAddresses, - field_types=(Address,), - field='addresses')]) -def transitive_hydrated_targets(targets): - """Recursively requests HydratedTarget instances, which will result in an eager, transitive graph walk.""" - return TransitiveHydratedTargets(targets) +@rule(TransitiveHydratedTargets, [SelectDependencies(TransitiveHydratedTarget, + BuildFileAddresses, + field_types=(Address,), + field='addresses')]) +def transitive_hydrated_targets(transitive_hydrated_targets): + """Kicks off recursion on expansion of TransitiveHydratedTarget objects. + + The TransitiveHydratedTarget struct represents a structure-shared graph, which we walk + and flatten here. The engine memoizes the computation of TransitiveHydratedTarget, so + when multiple TransitiveHydratedTargets objects are being constructed for multiple + roots, their structure will be shared. + """ + closure = set() + to_visit = deque(transitive_hydrated_targets) + + while to_visit: + tht = to_visit.popleft() + if tht.root in closure: + continue + closure.add(tht.root) + to_visit.extend(tht.dependencies) + + return TransitiveHydratedTargets(tuple(tht.root for tht in transitive_hydrated_targets), closure) + + +@rule(TransitiveHydratedTarget, [Select(HydratedTarget), + SelectDependencies(TransitiveHydratedTarget, + HydratedTarget, + field_types=(Address,), + field='addresses')]) +def transitive_hydrated_target(root, dependencies): + return TransitiveHydratedTarget(root, dependencies) @rule(HydratedTargets, [SelectDependencies(HydratedTarget, @@ -408,6 +419,7 @@ def create_legacy_graph_tasks(symbol_table): symbol_table_constraint = symbol_table.constraint() return [ transitive_hydrated_targets, + transitive_hydrated_target, hydrated_targets, TaskRule( HydratedTarget, diff --git a/src/python/pants/engine/legacy/source_mapper.py b/src/python/pants/engine/legacy/source_mapper.py index 38d75c80c85..6093e221a46 100644 --- a/src/python/pants/engine/legacy/source_mapper.py +++ b/src/python/pants/engine/legacy/source_mapper.py @@ -12,6 +12,7 @@ from pants.base.specs import AscendantAddresses, SingleAddress from pants.build_graph.address import parse_spec from pants.build_graph.source_mapper import SourceMapper +from pants.engine.build_files import Specs from pants.engine.legacy.address_mapper import LegacyAddressMapper from pants.engine.legacy.graph import HydratedTargets from pants.source.filespec import any_matches_filespec @@ -79,14 +80,14 @@ def iter_target_addresses_for_sources(self, sources): """Bulk, iterable form of `target_addresses_for_source`.""" # Walk up the buildroot looking for targets that would conceivably claim changed sources. sources_set = set(sources) - subjects = [AscendantAddresses(directory=d) for d in self._unique_dirs_for_sources(sources_set)] + specs = tuple(AscendantAddresses(directory=d) for d in self._unique_dirs_for_sources(sources_set)) # Uniqify all transitive hydrated targets. hydrated_target_to_address = {} - for hydrated_targets in self._scheduler.product_request(HydratedTargets, subjects): - for hydrated_target in hydrated_targets.dependencies: - if hydrated_target not in hydrated_target_to_address: - hydrated_target_to_address[hydrated_target] = hydrated_target.adaptor.address + hydrated_targets, = self._scheduler.product_request(HydratedTargets, [Specs(specs)]) + for hydrated_target in hydrated_targets.dependencies: + if hydrated_target not in hydrated_target_to_address: + hydrated_target_to_address[hydrated_target] = hydrated_target.adaptor.address for hydrated_target, legacy_address in six.iteritems(hydrated_target_to_address): # Handle BUILD files. diff --git a/src/python/pants/engine/mapper.py b/src/python/pants/engine/mapper.py index affaedfead4..f5c406a3fcc 100644 --- a/src/python/pants/engine/mapper.py +++ b/src/python/pants/engine/mapper.py @@ -8,9 +8,6 @@ import re from collections import OrderedDict -from pathspec import PathSpec -from pathspec.patterns.gitwildmatch import GitWildMatchPattern - from pants.build_graph.address import BuildFileAddress from pants.engine.objects import Serializable from pants.util.memo import memoized_property @@ -184,8 +181,8 @@ def __init__(self, :param list exclude_target_regexps: A list of regular expressions for excluding targets. """ self.parser = parser - self.build_patterns = build_patterns or (b'BUILD', b'BUILD.*') - self.build_ignore_patterns = PathSpec.from_lines(GitWildMatchPattern, build_ignore_patterns or []) + self.build_patterns = tuple(build_patterns or [b'BUILD', b'BUILD.*']) + self.build_ignore_patterns = tuple(build_ignore_patterns or []) self._exclude_target_regexps = exclude_target_regexps or [] self.exclude_patterns = [re.compile(pattern) for pattern in self._exclude_target_regexps] self.subproject_roots = subproject_roots or [] diff --git a/src/python/pants/engine/native.py b/src/python/pants/engine/native.py index 7fce59cea55..3756d80419a 100644 --- a/src/python/pants/engine/native.py +++ b/src/python/pants/engine/native.py @@ -160,7 +160,6 @@ void tasks_add_select(Tasks*, TypeConstraint); void tasks_add_select_variant(Tasks*, TypeConstraint, Buffer); void tasks_add_select_dependencies(Tasks*, TypeConstraint, TypeConstraint, Buffer, TypeIdBuffer); -void tasks_add_select_transitive(Tasks*, TypeConstraint, TypeConstraint, Buffer, TypeIdBuffer); void tasks_add_select_projection(Tasks*, TypeConstraint, TypeId, Buffer, TypeConstraint); void tasks_task_end(Tasks*); void tasks_singleton_add(Tasks*, Value, TypeConstraint); diff --git a/src/python/pants/engine/scheduler.py b/src/python/pants/engine/scheduler.py index 40fcea53421..3e98ea5fb1f 100644 --- a/src/python/pants/engine/scheduler.py +++ b/src/python/pants/engine/scheduler.py @@ -20,8 +20,8 @@ from pants.engine.native import Function, TypeConstraint, TypeId from pants.engine.nodes import Return, State, Throw from pants.engine.rules import RuleIndex, SingletonRule, TaskRule -from pants.engine.selectors import (Select, SelectDependencies, SelectProjection, SelectTransitive, - SelectVariant, constraint_for) +from pants.engine.selectors import (Select, SelectDependencies, SelectProjection, SelectVariant, + constraint_for) from pants.engine.struct import HasProducts, Variants from pants.util.contextutil import temporary_file_path from pants.util.objects import datatype @@ -208,12 +208,6 @@ def _register_task(self, output_constraint, rule): self._to_constraint(selector.dep_product), self._to_utf8_buf(selector.field), self._to_ids_buf(selector.field_types)) - elif selector_type is SelectTransitive: - self._native.lib.tasks_add_select_transitive(self._tasks, - product_constraint, - self._to_constraint(selector.dep_product), - self._to_utf8_buf(selector.field), - self._to_ids_buf(selector.field_types)) elif selector_type is SelectProjection: self._native.lib.tasks_add_select_projection(self._tasks, self._to_constraint(selector.product), diff --git a/src/python/pants/engine/selectors.py b/src/python/pants/engine/selectors.py index 02a55d306b9..6f6cda4203c 100644 --- a/src/python/pants/engine/selectors.py +++ b/src/python/pants/engine/selectors.py @@ -127,26 +127,6 @@ def __repr__(self): field_types_portion) -class SelectTransitive(datatype('Transitive', ['product', 'dep_product', 'field', 'field_types']), - Selector): - """A variation of `SelectDependencies` that is used to recursively request a product. - - One use case is to eagerly walk the entire graph. - - It first selects for dep_product then recursively requests products with the `product` type, expanding each by its - `field`. - - Requires that both the dep_product and product have the same field `field` that contains the same `field_types`. - """ - - DEFAULT_FIELD = 'dependencies' - - optional = False - - def __new__(cls, product, dep_product, field=DEFAULT_FIELD, field_types=tuple()): - return super(SelectTransitive, cls).__new__(cls, product, dep_product, field, field_types) - - class SelectProjection(datatype('Projection', ['product', 'projected_subject', 'field', 'input_product']), Selector): """Selects a field of the given Subject to produce a Subject, Product dependency from. diff --git a/src/python/pants/scm/change_calculator.py b/src/python/pants/scm/change_calculator.py index 6aa3c7bea79..fde3c0c6c79 100644 --- a/src/python/pants/scm/change_calculator.py +++ b/src/python/pants/scm/change_calculator.py @@ -13,7 +13,7 @@ from pants.base.build_environment import get_scm from pants.base.specs import DescendantAddresses from pants.build_graph.address import Address -from pants.engine.build_files import HydratedStructs +from pants.engine.build_files import HydratedStructs, Specs from pants.engine.legacy.graph import target_types_from_symbol_table from pants.engine.legacy.source_mapper import EngineSourceMapper from pants.goal.workspace import ScmWorkspace @@ -147,9 +147,10 @@ def iter_changed_target_addresses(self, changed_request): # For dependee finding, we need to parse all build files to collect all structs. But we # don't need to fully hydrate targets (ie, expand their source globs), and so we use # the `HydratedStructs` product. See #4535 for more info. + specs = (DescendantAddresses(''),) adaptor_iter = (t for targets in self._scheduler.product_request(HydratedStructs, - [DescendantAddresses('')]) + [Specs(specs)]) for t in targets.dependencies) graph = _DependentGraph.from_iterable(target_types_from_symbol_table(self._symbol_table), adaptor_iter) diff --git a/src/rust/engine/src/lib.rs b/src/rust/engine/src/lib.rs index d6df37f9b7c..a5b51d2345d 100644 --- a/src/rust/engine/src/lib.rs +++ b/src/rust/engine/src/lib.rs @@ -355,24 +355,6 @@ pub extern "C" fn tasks_add_select_dependencies( }) } -#[no_mangle] -pub extern "C" fn tasks_add_select_transitive( - tasks_ptr: *mut Tasks, - product: TypeConstraint, - dep_product: TypeConstraint, - field: Buffer, - field_types: TypeIdBuffer, -) { - with_tasks(tasks_ptr, |tasks| { - tasks.add_select_transitive( - product, - dep_product, - field.to_string().expect("field to be a string"), - field_types.to_vec(), - ); - }) -} - #[no_mangle] pub extern "C" fn tasks_add_select_projection( tasks_ptr: *mut Tasks, diff --git a/src/rust/engine/src/nodes.rs b/src/rust/engine/src/nodes.rs index 0c9ad31c4b5..4e135f77f62 100644 --- a/src/rust/engine/src/nodes.rs +++ b/src/rust/engine/src/nodes.rs @@ -2,13 +2,12 @@ // Licensed under the Apache License, Version 2.0 (see LICENSE). use std::error::Error; -use std::collections::{BTreeMap, HashSet}; +use std::collections::BTreeMap; use std::fmt; use std::os::unix::ffi::OsStrExt; use std::path::{Path, PathBuf}; use futures::future::{self, Future}; -use ordermap::OrderMap; use tempdir::TempDir; use boxfuture::{Boxable, BoxFuture}; @@ -511,166 +510,6 @@ impl SelectDependencies { } } -/// -/// A node that selects for the dep_product type, then recursively selects for the product type of -/// the result. Both the product and the dep_product must have the same "field" and the types of -/// products in that field must match the field type. -/// -/// A node that recursively selects the dependencies of requested type and merge them. -/// -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -pub struct SelectTransitive { - pub subject: Key, - pub variants: Variants, - pub selector: selectors::SelectTransitive, - dep_product_entries: rule_graph::Entries, - product_entries: rule_graph::Entries, -} - -impl SelectTransitive { - fn new( - selector: selectors::SelectTransitive, - subject: Key, - variants: Variants, - edges: &rule_graph::RuleEdges, - ) -> SelectTransitive { - let dep_p_entries = edges.entries_for(&rule_graph::SelectKey::NestedSelect( - Selector::SelectTransitive(selector.clone()), - selectors::Select::without_variant( - selector.clone().dep_product, - ), - )); - let p_entries = edges.entries_for(&rule_graph::SelectKey::ProjectedMultipleNestedSelect( - Selector::SelectTransitive(selector.clone()), - selector.field_types.clone(), - selectors::Select::without_variant( - selector.clone().product, - ), - )); - - SelectTransitive { - subject: subject, - variants: variants, - selector: selector.clone(), - dep_product_entries: dep_p_entries, - product_entries: p_entries, - } - } - - /// - /// Process single subject. - /// - /// Return tuple of: - /// (processed subject_key, product output, dependencies to be processed in future iterations). - /// - fn expand_transitive( - &self, - context: &Context, - subject_key: Key, - ) -> NodeFuture<(Key, Value, Vec)> { - let field_name = self.selector.field.to_owned(); - Select { - selector: selectors::Select::without_variant(self.selector.product), - subject: subject_key, - variants: self.variants.clone(), - // NB: We're filtering out all of the entries for field types other than - // subject_key's since none of them will match. - entries: self - .product_entries - .clone() - .into_iter() - .filter(|e| e.matches_subject_type(subject_key.type_id().clone())) - .collect(), - }.run(context.clone()) - .map(move |product| { - let deps = externs::project_multi(&product, &field_name); - (subject_key, product, deps) - }) - .to_boxed() - } -} - -/// -/// Track states when processing `SelectTransitive` iteratively. -/// -#[derive(Debug)] -struct TransitiveExpansion { - // Subjects to be processed. - todo: HashSet, - - // Mapping from processed subject `Key` to its product. - // Products will be collected at the end of iterations. - outputs: OrderMap, -} - -impl SelectTransitive { - fn run(self, context: Context) -> NodeFuture { - // Select the product holding the dependency list. - Select { - selector: selectors::Select::without_variant(self.selector.dep_product), - subject: self.subject.clone(), - variants: self.variants.clone(), - entries: self.dep_product_entries.clone(), - }.run(context.clone()) - .then(move |dep_product_res| { - match dep_product_res { - Ok(dep_product) => { - let subject_keys = externs::project_multi(&dep_product, &self.selector.field) - .into_iter() - .map(|subject| externs::key_for(subject)) - .collect(); - - let init = TransitiveExpansion { - todo: subject_keys, - outputs: OrderMap::default(), - }; - - future::loop_fn(init, move |mut expansion| { - let round = future::join_all({ - expansion - .todo - .drain() - .map(|subject_key| self.expand_transitive(&context, subject_key)) - .collect::>() - }); - - round.map(move |finished_items| { - let mut todo_candidates = Vec::new(); - for (subject_key, product, more_deps) in finished_items.into_iter() { - expansion.outputs.insert(subject_key, product); - todo_candidates.extend(more_deps); - } - - // NB enclose with {} to limit the borrowing scope. - { - let outputs = &expansion.outputs; - expansion.todo.extend( - todo_candidates - .into_iter() - .map(|dep| externs::key_for(dep)) - .filter(|dep_key| !outputs.contains_key(dep_key)) - .collect::>(), - ); - } - - if expansion.todo.is_empty() { - future::Loop::Break(expansion) - } else { - future::Loop::Continue(expansion) - } - }) - }).map(|expansion| { - externs::store_list(expansion.outputs.values().collect::>(), false) - }) - .to_boxed() - } - Err(failure) => err(failure), - } - }) - .to_boxed() - } -} - #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub struct SelectProjection { subject: Key, @@ -735,7 +574,7 @@ impl SelectProjection { selector: selectors::Select::without_variant(self.selector.product), subject: externs::key_for(projected_subject), variants: self.variants.clone(), - // NB: Unlike SelectDependencies and SelectTransitive, we don't need to filter by + // NB: Unlike SelectDependencies , we don't need to filter by // subject here, because there is only one projected type. entries: self.projected_entries.clone(), }.run(context.clone()) @@ -1100,10 +939,6 @@ impl Task { SelectDependencies::new(s, self.subject.clone(), self.variants.clone(), edges) .run(context.clone()) } - Selector::SelectTransitive(s) => { - SelectTransitive::new(s, self.subject.clone(), self.variants.clone(), edges) - .run(context.clone()) - } Selector::SelectProjection(s) => { SelectProjection::new(s, self.subject.clone(), self.variants.clone(), edges) .run(context.clone()) @@ -1177,7 +1012,7 @@ impl NodeKey { &NodeKey::Task(ref s) => { format!( "Task({}, {}, {})", - externs::key_to_str(&s.task.func.0), + externs::project_str(&externs::val_for(&s.task.func.0), "__name__"), keystr(&s.subject), typstr(&s.product) ) diff --git a/src/rust/engine/src/rule_graph.rs b/src/rust/engine/src/rule_graph.rs index d94092fb594..487748f5ab3 100644 --- a/src/rust/engine/src/rule_graph.rs +++ b/src/rust/engine/src/rule_graph.rs @@ -9,7 +9,7 @@ use std::io; use core::{ANY_TYPE, Function, Key, TypeConstraint, TypeId, Value}; use externs; -use selectors::{Select, SelectDependencies, SelectTransitive, Selector}; +use selectors::{Select, SelectDependencies, Selector}; use tasks::{Task, Tasks}; #[derive(Eq, Hash, PartialEq, Clone, Debug)] @@ -354,13 +354,7 @@ impl<'t> GraphMaker<'t> { ref dep_product, ref field_types, .. - }) | - &Selector::SelectTransitive(SelectTransitive { - ref product, - ref dep_product, - ref field_types, - .. - }) => { + }) => { let initial_selector = *dep_product; let initial_rules_or_literals = rhs_for_select( &self.tasks, @@ -702,24 +696,6 @@ pub fn selector_str(selector: &Selector) -> String { .join(", ") ) } - &Selector::SelectTransitive(ref s) => { - format!( - "{}({}, {}, {}field_types=({},))", - "SelectTransitive", - type_constraint_str(s.product), - type_constraint_str(s.dep_product), - if s.field == "dependencies" { - "".to_string() - } else { - format!("'{}', ", s.field) - }, - s.field_types - .iter() - .map(|&f| type_str(f)) - .collect::>() - .join(", ") - ) - } &Selector::SelectProjection(ref s) => { format!("SelectProjection({}, {}, '{}', {})", type_constraint_str(s.product), diff --git a/src/rust/engine/src/selectors.rs b/src/rust/engine/src/selectors.rs index f16a4a5c7c4..ff7159cadf2 100644 --- a/src/rust/engine/src/selectors.rs +++ b/src/rust/engine/src/selectors.rs @@ -26,14 +26,6 @@ pub struct SelectDependencies { pub field_types: Vec, } -#[derive(Clone, Debug, Eq, Hash, PartialEq)] -pub struct SelectTransitive { - pub product: TypeConstraint, - pub dep_product: TypeConstraint, - pub field: Field, - pub field_types: Vec, -} - #[derive(Clone, Debug, Eq, Hash, PartialEq)] pub struct SelectProjection { pub product: TypeConstraint, @@ -49,6 +41,5 @@ pub struct SelectProjection { pub enum Selector { Select(Select), SelectDependencies(SelectDependencies), - SelectTransitive(SelectTransitive), SelectProjection(SelectProjection), } diff --git a/src/rust/engine/src/tasks.rs b/src/rust/engine/src/tasks.rs index 69415a1a4bd..1366fa56577 100644 --- a/src/rust/engine/src/tasks.rs +++ b/src/rust/engine/src/tasks.rs @@ -5,7 +5,7 @@ use std::collections::{HashMap, HashSet}; use core::{Field, Function, FNV, Key, TypeConstraint, TypeId, Value}; use externs; -use selectors::{Selector, Select, SelectDependencies, SelectProjection, SelectTransitive}; +use selectors::{Selector, Select, SelectDependencies, SelectProjection}; #[derive(Clone, Debug, Eq, Hash, PartialEq)] @@ -135,21 +135,6 @@ impl Tasks { })); } - pub fn add_select_transitive( - &mut self, - product: TypeConstraint, - dep_product: TypeConstraint, - field: Field, - field_types: Vec, - ) { - self.clause(Selector::SelectTransitive(SelectTransitive { - product: product, - dep_product: dep_product, - field: field, - field_types: field_types, - })); - } - pub fn add_select_projection( &mut self, product: TypeConstraint, diff --git a/tests/python/pants_test/build_graph/test_build_graph_integration.py b/tests/python/pants_test/build_graph/test_build_graph_integration.py index 8e0b663043b..5df0064bdb9 100644 --- a/tests/python/pants_test/build_graph/test_build_graph_integration.py +++ b/tests/python/pants_test/build_graph/test_build_graph_integration.py @@ -18,7 +18,7 @@ def test_cycle(self): with self.file_renamed(os.path.join(prefix, 'cycle2'), 'TEST_BUILD', 'BUILD'): pants_run = self.run_pants(['compile', os.path.join(prefix, 'cycle1')]) self.assert_failure(pants_run) - self.assertIn('Cycle detected', pants_run.stderr_data) + self.assertIn('cycle', pants_run.stderr_data) def test_banned_module_import(self): self.banned_import('testprojects/src/python/build_file_imports_module') diff --git a/tests/python/pants_test/engine/BUILD b/tests/python/pants_test/engine/BUILD index d6c7cdcf500..67390a47da1 100644 --- a/tests/python/pants_test/engine/BUILD +++ b/tests/python/pants_test/engine/BUILD @@ -151,6 +151,7 @@ python_tests( ':scheduler_test_base', ':util', 'src/python/pants/build_graph', + 'src/python/pants/engine:build_files', 'src/python/pants/engine:mapper', 'src/python/pants/engine:struct', 'src/python/pants/util:dirutil', @@ -191,6 +192,7 @@ python_tests( coverage=['pants.engine.nodes', 'pants.engine.scheduler'], dependencies=[ ':util', + 'src/python/pants/base:cmd_line_spec_parser', 'src/python/pants/build_graph', 'src/python/pants/engine:scheduler', 'tests/python/pants_test/engine/examples:planners', diff --git a/tests/python/pants_test/engine/legacy/test_graph.py b/tests/python/pants_test/engine/legacy/test_graph.py index ee85ec89d60..4779ee2851a 100644 --- a/tests/python/pants_test/engine/legacy/test_graph.py +++ b/tests/python/pants_test/engine/legacy/test_graph.py @@ -102,8 +102,7 @@ def test_with_existing_directory_with_no_build_files_fails(self): with self.graph_helper() as graph_helper: self.create_graph_from_specs(graph_helper, ['build-support/bin::']) - self.assertIn('Path "build-support/bin" contains no BUILD files', - str(cm.exception)) + self.assertIn('does not match any targets.', str(cm.exception)) def test_inject_bad_dir(self): with self.assertRaises(AddressLookupError) as cm: diff --git a/tests/python/pants_test/engine/legacy/test_list_integration.py b/tests/python/pants_test/engine/legacy/test_list_integration.py index a130767cfd5..81e8b1e7e03 100644 --- a/tests/python/pants_test/engine/legacy/test_list_integration.py +++ b/tests/python/pants_test/engine/legacy/test_list_integration.py @@ -26,7 +26,7 @@ def test_list_none(self): def test_list_invalid_dir(self): pants_run = self.do_command('list', 'abcde::', success=False) - self.assertIn('ResolveError', pants_run.stderr_data) + self.assertIn('AddressLookupError', pants_run.stderr_data) def test_list_nested_function_scopes(self): pants_run = self.do_command('list', diff --git a/tests/python/pants_test/engine/test_engine.py b/tests/python/pants_test/engine/test_engine.py index d03ca891b29..0b757dc1ef0 100644 --- a/tests/python/pants_test/engine/test_engine.py +++ b/tests/python/pants_test/engine/test_engine.py @@ -122,7 +122,7 @@ def test_include_trace_error_raises_error_with_trace(self): self.assert_equal_with_printing(dedent(''' Received unexpected Throw state(s): Computing Select(, =A) - Computing Task(, , =A) + Computing Task(nested_raise, , =A) Throw(An exception for B) Traceback (most recent call last): File LOCATION-INFO, in call @@ -152,8 +152,8 @@ def test_trace_does_not_include_cancellations(self): self.assert_equal_with_printing(dedent(''' Received unexpected Throw state(s): Computing Select(, =A) - Computing Task(, , =A) - Computing Task(, , =C) + Computing Task(A, , =A) + Computing Task(nested_raise, , =C) Throw(An exception for B) Traceback (most recent call last): File LOCATION-INFO, in call diff --git a/tests/python/pants_test/engine/test_mapper.py b/tests/python/pants_test/engine/test_mapper.py index e11c019000c..3b8e12ffa67 100644 --- a/tests/python/pants_test/engine/test_mapper.py +++ b/tests/python/pants_test/engine/test_mapper.py @@ -13,11 +13,10 @@ from pants.base.specs import DescendantAddresses, SiblingAddresses, SingleAddress from pants.build_graph.address import Address from pants.engine.addressable import BuildFileAddresses, Collection -from pants.engine.build_files import UnhydratedStruct, create_graph_rules +from pants.engine.build_files import Specs, UnhydratedStruct, create_graph_rules from pants.engine.fs import create_fs_rules from pants.engine.mapper import (AddressFamily, AddressMap, AddressMapper, DifferingFamiliesError, DuplicateNameError, UnaddressableObjectError) -from pants.engine.nodes import Throw from pants.engine.parser import SymbolTable from pants.engine.rules import TaskRule from pants.engine.selectors import SelectDependencies @@ -167,18 +166,8 @@ def setUp(self): type_alias='target') def resolve(self, spec): - request = self.scheduler.execution_request([UnhydratedStructs], [spec]) - result = self.scheduler.execute(request) - if result.error: - raise result.error - - # Expect a single root. - if len(result.root_products) != 1: - raise Exception('Wrong number of result products: {}'.format(result.root_products)) - state = result.root_products[0][1] - if type(state) is Throw: - raise Exception(state.exc) - return state.value.dependencies + uhs, = self.scheduler.product_request(UnhydratedStructs, [Specs(tuple([spec]))]) + return uhs.dependencies def resolve_multi(self, spec): return {uhs.address: uhs.struct for uhs in self.resolve(spec)} diff --git a/tests/python/pants_test/engine/test_rules.py b/tests/python/pants_test/engine/test_rules.py index ff8faf4c545..5018f5e7670 100644 --- a/tests/python/pants_test/engine/test_rules.py +++ b/tests/python/pants_test/engine/test_rules.py @@ -14,7 +14,7 @@ from pants.engine.mapper import AddressMapper from pants.engine.rules import RootRule, RuleIndex, SingletonRule, TaskRule from pants.engine.scheduler import WrappedNativeScheduler -from pants.engine.selectors import Select, SelectDependencies, SelectProjection, SelectTransitive +from pants.engine.selectors import Select, SelectDependencies, SelectProjection from pants_test.engine.examples.parsers import JsonParser from pants_test.engine.examples.planners import Goal from pants_test.engine.util import (TargetTable, assert_equal_with_printing, @@ -303,8 +303,8 @@ def test_full_graph_for_planner_example(self): else: pass - self.assertEquals(41, len(all_rules)) - self.assertEquals(78, len(root_rule_lines)) # 2 lines per entry + self.assertEquals(20, len(all_rules)) + self.assertEquals(36, len(root_rule_lines)) # 2 lines per entry def test_smallest_full_test_multiple_root_subject_types(self): rules = [ @@ -473,28 +473,6 @@ def test_noop_removal_transitive(self): }""").strip(), subgraph) - def test_select_transitive_with_separate_types_for_subselectors(self): - rules = [ - TaskRule(Exactly(A), [SelectTransitive(B, C, field_types=(D,))], noop), - TaskRule(B, [Select(D)], noop), - TaskRule(C, [Select(SubA)], noop) - ] - - subgraph = self.create_subgraph(A, rules, SubA()) - - self.assert_equal_with_printing(dedent(""" - digraph { - // root subject types: SubA - // root entries - "Select(A) for SubA" [color=blue] - "Select(A) for SubA" -> {"(A, (SelectTransitive(B, C, field_types=(D,)),), noop) of SubA"} - // internal entries - "(A, (SelectTransitive(B, C, field_types=(D,)),), noop) of SubA" -> {"(C, (Select(SubA),), noop) of SubA" "(B, (Select(D),), noop) of D"} - "(B, (Select(D),), noop) of D" -> {"SubjectIsProduct(D)"} - "(C, (Select(SubA),), noop) of SubA" -> {"SubjectIsProduct(SubA)"} - }""").strip(), - subgraph) - def test_select_dependencies_with_separate_types_for_subselectors(self): rules = [ TaskRule(Exactly(A), [SelectDependencies(B, C, field_types=(D,))], noop), diff --git a/tests/python/pants_test/engine/test_scheduler.py b/tests/python/pants_test/engine/test_scheduler.py index 6e7334c4cef..2273bfcff12 100644 --- a/tests/python/pants_test/engine/test_scheduler.py +++ b/tests/python/pants_test/engine/test_scheduler.py @@ -12,6 +12,7 @@ from pants.base.cmd_line_spec_parser import CmdLineSpecParser from pants.build_graph.address import Address from pants.engine.addressable import BuildFileAddresses +from pants.engine.build_files import Specs from pants.engine.nodes import Return, Throw from pants.engine.rules import RootRule, TaskRule from pants.engine.selectors import Select, SelectVariant @@ -49,6 +50,9 @@ def setUp(self): self.managed_resolve_latest = Address.parse('3rdparty/jvm/managed:latest-hadoop') self.inferred_deps = Address.parse('src/scala/inferred_deps') + def parse_specs(self, *specs): + return Specs(tuple(self.spec_parser.parse_spec(spec) for spec in specs)) + def assert_select_for_subjects(self, walk, selector, subjects, variants=None): raise ValueError(walk) @@ -186,12 +190,12 @@ def test_multiple_classpath_entries(self): def test_descendant_specs(self): """Test that Addresses are produced via recursive globs of the 3rdparty/jvm directory.""" - spec = self.spec_parser.parse_spec('3rdparty/jvm::') - build_request = self.scheduler.execution_request([BuildFileAddresses], [spec]) + specs = self.parse_specs('3rdparty/jvm::') + build_request = self.scheduler.execution_request([BuildFileAddresses], [specs]) ((subject, _), root), = self.build(build_request) # Validate the root. - self.assertEqual(spec, subject) + self.assertEqual(specs, subject) self.assertEqual(BuildFileAddresses, type(root.value)) # Confirm that a few expected addresses are in the list. @@ -201,12 +205,12 @@ def test_descendant_specs(self): def test_sibling_specs(self): """Test that sibling Addresses are parsed in the 3rdparty/jvm directory.""" - spec = self.spec_parser.parse_spec('3rdparty/jvm:') - build_request = self.scheduler.execution_request([BuildFileAddresses], [spec]) + specs = self.parse_specs('3rdparty/jvm:') + build_request = self.scheduler.execution_request([BuildFileAddresses], [specs]) ((subject, _), root), = self.build(build_request) # Validate the root. - self.assertEqual(spec, subject) + self.assertEqual(specs, subject) self.assertEqual(BuildFileAddresses, type(root.value)) # Confirm that an expected address is in the list. @@ -215,8 +219,8 @@ def test_sibling_specs(self): self.assertNotIn(self.managed_guava, root.value.dependencies) def test_scheduler_visualize(self): - spec = self.spec_parser.parse_spec('3rdparty/jvm:') - build_request = self.request(['list'], spec) + specs = self.parse_specs('3rdparty/jvm::') + build_request = self.request(['list'], specs) self.build(build_request) with temporary_dir() as td: @@ -266,7 +270,7 @@ def test_trace_includes_rule_exception_traceback(self): assert_equal_with_printing(self, dedent(''' Computing Select(, =A) - Computing Task(, , =A) + Computing Task(nested_raise, , =A) Throw(An exception for B) Traceback (most recent call last): File LOCATION-INFO, in call diff --git a/tests/python/pants_test/option/util/fakes.py b/tests/python/pants_test/option/util/fakes.py index 9bc3d6bffaf..45fa0924b8f 100644 --- a/tests/python/pants_test/option/util/fakes.py +++ b/tests/python/pants_test/option/util/fakes.py @@ -93,7 +93,7 @@ class FakeOptions(object): def for_scope(self, scope): # TODO(John Sirois): Some users pass in A dict of scope -> _FakeOptionValues instead of a # dict of scope -> (dict of option name -> value). Clean up these usages and kill this - # accomodation. + # accommodation. options_for_this_scope = options.get(scope) or {} if isinstance(options_for_this_scope, _FakeOptionValues): options_for_this_scope = options_for_this_scope.option_values