id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
224,200
pantsbuild/pants
pants-plugins/src/python/internal_backend/utilities/register.py
PantsReleases.notes_for_version
def notes_for_version(self, version): """Given the parsed Version of pants, return its release notes. TODO: This method should parse out the specific version from the resulting file: see https://github.com/pantsbuild/pants/issues/1708 """ branch_name = self._branch_name(version) branch_notes_file = self._branch_notes.get(branch_name, None) if branch_notes_file is None: raise ValueError( 'Version {} lives in branch {}, which is not configured in {}.'.format( version, branch_name, self._branch_notes)) return _read_contents(branch_notes_file)
python
def notes_for_version(self, version): branch_name = self._branch_name(version) branch_notes_file = self._branch_notes.get(branch_name, None) if branch_notes_file is None: raise ValueError( 'Version {} lives in branch {}, which is not configured in {}.'.format( version, branch_name, self._branch_notes)) return _read_contents(branch_notes_file)
[ "def", "notes_for_version", "(", "self", ",", "version", ")", ":", "branch_name", "=", "self", ".", "_branch_name", "(", "version", ")", "branch_notes_file", "=", "self", ".", "_branch_notes", ".", "get", "(", "branch_name", ",", "None", ")", "if", "branch_n...
Given the parsed Version of pants, return its release notes. TODO: This method should parse out the specific version from the resulting file: see https://github.com/pantsbuild/pants/issues/1708
[ "Given", "the", "parsed", "Version", "of", "pants", "return", "its", "release", "notes", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/pants-plugins/src/python/internal_backend/utilities/register.py#L122-L134
224,201
pantsbuild/pants
src/python/pants/backend/jvm/targets/jar_library.py
JarLibrary.managed_dependencies
def managed_dependencies(self): """The managed_jar_dependencies target this jar_library specifies, or None. :API: public """ if self.payload.managed_dependencies: address = Address.parse(self.payload.managed_dependencies, relative_to=self.address.spec_path) self._build_graph.inject_address_closure(address) return self._build_graph.get_target(address) return None
python
def managed_dependencies(self): if self.payload.managed_dependencies: address = Address.parse(self.payload.managed_dependencies, relative_to=self.address.spec_path) self._build_graph.inject_address_closure(address) return self._build_graph.get_target(address) return None
[ "def", "managed_dependencies", "(", "self", ")", ":", "if", "self", ".", "payload", ".", "managed_dependencies", ":", "address", "=", "Address", ".", "parse", "(", "self", ".", "payload", ".", "managed_dependencies", ",", "relative_to", "=", "self", ".", "ad...
The managed_jar_dependencies target this jar_library specifies, or None. :API: public
[ "The", "managed_jar_dependencies", "target", "this", "jar_library", "specifies", "or", "None", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/targets/jar_library.py#L40-L50
224,202
pantsbuild/pants
src/python/pants/init/options_initializer.py
BuildConfigInitializer.setup
def setup(self): """Load backends and plugins. :returns: A `BuildConfiguration` object constructed during backend/plugin loading. """ return self._load_plugins( self._working_set, self._bootstrap_options.pythonpath, self._bootstrap_options.plugins, self._bootstrap_options.backend_packages )
python
def setup(self): return self._load_plugins( self._working_set, self._bootstrap_options.pythonpath, self._bootstrap_options.plugins, self._bootstrap_options.backend_packages )
[ "def", "setup", "(", "self", ")", ":", "return", "self", ".", "_load_plugins", "(", "self", ".", "_working_set", ",", "self", ".", "_bootstrap_options", ".", "pythonpath", ",", "self", ".", "_bootstrap_options", ".", "plugins", ",", "self", ".", "_bootstrap_...
Load backends and plugins. :returns: A `BuildConfiguration` object constructed during backend/plugin loading.
[ "Load", "backends", "and", "plugins", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/init/options_initializer.py#L61-L71
224,203
pantsbuild/pants
src/python/pants/init/options_initializer.py
OptionsInitializer._construct_options
def _construct_options(options_bootstrapper, build_configuration): """Parse and register options. :returns: An Options object representing the full set of runtime options. """ # Now that plugins and backends are loaded, we can gather the known scopes. # Gather the optionables that are not scoped to any other. All known scopes are reachable # via these optionables' known_scope_infos() methods. top_level_optionables = ( {GlobalOptionsRegistrar} | GlobalSubsystems.get() | build_configuration.optionables() | set(Goal.get_optionables()) ) # Now that we have the known scopes we can get the full options. `get_full_options` will # sort and de-duplicate these for us. known_scope_infos = [si for optionable in top_level_optionables for si in optionable.known_scope_infos()] return options_bootstrapper.get_full_options(known_scope_infos)
python
def _construct_options(options_bootstrapper, build_configuration): # Now that plugins and backends are loaded, we can gather the known scopes. # Gather the optionables that are not scoped to any other. All known scopes are reachable # via these optionables' known_scope_infos() methods. top_level_optionables = ( {GlobalOptionsRegistrar} | GlobalSubsystems.get() | build_configuration.optionables() | set(Goal.get_optionables()) ) # Now that we have the known scopes we can get the full options. `get_full_options` will # sort and de-duplicate these for us. known_scope_infos = [si for optionable in top_level_optionables for si in optionable.known_scope_infos()] return options_bootstrapper.get_full_options(known_scope_infos)
[ "def", "_construct_options", "(", "options_bootstrapper", ",", "build_configuration", ")", ":", "# Now that plugins and backends are loaded, we can gather the known scopes.", "# Gather the optionables that are not scoped to any other. All known scopes are reachable", "# via these optionables' k...
Parse and register options. :returns: An Options object representing the full set of runtime options.
[ "Parse", "and", "register", "options", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/init/options_initializer.py#L78-L99
224,204
pantsbuild/pants
src/python/pants/util/filtering.py
create_filters
def create_filters(predicate_params, predicate_factory): """Create filter functions from a list of string parameters. :param predicate_params: A list of predicate_param arguments as in `create_filter`. :param predicate_factory: As in `create_filter`. """ filters = [] for predicate_param in predicate_params: filters.append(create_filter(predicate_param, predicate_factory)) return filters
python
def create_filters(predicate_params, predicate_factory): filters = [] for predicate_param in predicate_params: filters.append(create_filter(predicate_param, predicate_factory)) return filters
[ "def", "create_filters", "(", "predicate_params", ",", "predicate_factory", ")", ":", "filters", "=", "[", "]", "for", "predicate_param", "in", "predicate_params", ":", "filters", ".", "append", "(", "create_filter", "(", "predicate_param", ",", "predicate_factory",...
Create filter functions from a list of string parameters. :param predicate_params: A list of predicate_param arguments as in `create_filter`. :param predicate_factory: As in `create_filter`.
[ "Create", "filter", "functions", "from", "a", "list", "of", "string", "parameters", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/util/filtering.py#L22-L31
224,205
pantsbuild/pants
src/python/pants/util/filtering.py
create_filter
def create_filter(predicate_param, predicate_factory): """Create a filter function from a string parameter. :param predicate_param: Create a filter for this param string. Each string is a comma-separated list of arguments to the predicate_factory. If the entire comma-separated list is prefixed by a '-' then the sense of the resulting filter is inverted. :param predicate_factory: A function that takes a parameter and returns a predicate, i.e., a function that takes a single parameter (of whatever type the filter operates on) and returns a boolean. :return: A filter function of one argument that is the logical OR of the predicates for each of the comma-separated arguments. If the comma-separated list was prefixed by a '-', the sense of the filter is inverted. """ # NOTE: Do not inline this into create_filters above. A separate function is necessary # in order to capture the different closure on each invocation. modifier, param = _extract_modifier(predicate_param) predicates = [predicate_factory(p) for p in param.split(',')] def filt(x): return modifier(any(pred(x) for pred in predicates)) return filt
python
def create_filter(predicate_param, predicate_factory): # NOTE: Do not inline this into create_filters above. A separate function is necessary # in order to capture the different closure on each invocation. modifier, param = _extract_modifier(predicate_param) predicates = [predicate_factory(p) for p in param.split(',')] def filt(x): return modifier(any(pred(x) for pred in predicates)) return filt
[ "def", "create_filter", "(", "predicate_param", ",", "predicate_factory", ")", ":", "# NOTE: Do not inline this into create_filters above. A separate function is necessary", "# in order to capture the different closure on each invocation.", "modifier", ",", "param", "=", "_extract_modifi...
Create a filter function from a string parameter. :param predicate_param: Create a filter for this param string. Each string is a comma-separated list of arguments to the predicate_factory. If the entire comma-separated list is prefixed by a '-' then the sense of the resulting filter is inverted. :param predicate_factory: A function that takes a parameter and returns a predicate, i.e., a function that takes a single parameter (of whatever type the filter operates on) and returns a boolean. :return: A filter function of one argument that is the logical OR of the predicates for each of the comma-separated arguments. If the comma-separated list was prefixed by a '-', the sense of the filter is inverted.
[ "Create", "a", "filter", "function", "from", "a", "string", "parameter", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/util/filtering.py#L34-L54
224,206
pantsbuild/pants
src/python/pants/util/filtering.py
wrap_filters
def wrap_filters(filters): """Returns a single filter that short-circuit ANDs the specified filters. :API: public """ def combined_filter(x): for filt in filters: if not filt(x): return False return True return combined_filter
python
def wrap_filters(filters): def combined_filter(x): for filt in filters: if not filt(x): return False return True return combined_filter
[ "def", "wrap_filters", "(", "filters", ")", ":", "def", "combined_filter", "(", "x", ")", ":", "for", "filt", "in", "filters", ":", "if", "not", "filt", "(", "x", ")", ":", "return", "False", "return", "True", "return", "combined_filter" ]
Returns a single filter that short-circuit ANDs the specified filters. :API: public
[ "Returns", "a", "single", "filter", "that", "short", "-", "circuit", "ANDs", "the", "specified", "filters", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/util/filtering.py#L57-L67
224,207
pantsbuild/pants
src/python/pants/build_graph/app_base.py
AppBase.binary
def binary(self): """Returns the binary this target references.""" dependencies = self.dependencies if len(dependencies) != 1: raise TargetDefinitionException(self, 'An app must define exactly one binary ' 'dependency, have: {}'.format(dependencies)) binary = dependencies[0] if not isinstance(binary, self.binary_target_type()): raise TargetDefinitionException(self, 'Expected binary dependency to be a {} ' 'target, found {}'.format(self.binary_target_type(), binary)) return binary
python
def binary(self): dependencies = self.dependencies if len(dependencies) != 1: raise TargetDefinitionException(self, 'An app must define exactly one binary ' 'dependency, have: {}'.format(dependencies)) binary = dependencies[0] if not isinstance(binary, self.binary_target_type()): raise TargetDefinitionException(self, 'Expected binary dependency to be a {} ' 'target, found {}'.format(self.binary_target_type(), binary)) return binary
[ "def", "binary", "(", "self", ")", ":", "dependencies", "=", "self", ".", "dependencies", "if", "len", "(", "dependencies", ")", "!=", "1", ":", "raise", "TargetDefinitionException", "(", "self", ",", "'An app must define exactly one binary '", "'dependency, have: {...
Returns the binary this target references.
[ "Returns", "the", "binary", "this", "target", "references", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/app_base.py#L291-L302
224,208
pantsbuild/pants
src/python/pants/goal/products.py
UnionProducts.copy
def copy(self): """Returns a copy of this UnionProducts. Edits to the copy's mappings will not affect the product mappings in the original. The copy is shallow though, so edits to the copy's product values will mutate the original's product values. :API: public :rtype: :class:`UnionProducts` """ products_by_target = defaultdict(OrderedSet) for key, value in self._products_by_target.items(): products_by_target[key] = OrderedSet(value) return UnionProducts(products_by_target=products_by_target)
python
def copy(self): products_by_target = defaultdict(OrderedSet) for key, value in self._products_by_target.items(): products_by_target[key] = OrderedSet(value) return UnionProducts(products_by_target=products_by_target)
[ "def", "copy", "(", "self", ")", ":", "products_by_target", "=", "defaultdict", "(", "OrderedSet", ")", "for", "key", ",", "value", "in", "self", ".", "_products_by_target", ".", "items", "(", ")", ":", "products_by_target", "[", "key", "]", "=", "OrderedS...
Returns a copy of this UnionProducts. Edits to the copy's mappings will not affect the product mappings in the original. The copy is shallow though, so edits to the copy's product values will mutate the original's product values. :API: public :rtype: :class:`UnionProducts`
[ "Returns", "a", "copy", "of", "this", "UnionProducts", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/goal/products.py#L40-L54
224,209
pantsbuild/pants
src/python/pants/goal/products.py
UnionProducts.add_for_targets
def add_for_targets(self, targets, products): """Updates the products for the given targets, adding to existing entries. :API: public """ # TODO: This is a temporary helper for use until the classpath has been split. for target in targets: self.add_for_target(target, products)
python
def add_for_targets(self, targets, products): # TODO: This is a temporary helper for use until the classpath has been split. for target in targets: self.add_for_target(target, products)
[ "def", "add_for_targets", "(", "self", ",", "targets", ",", "products", ")", ":", "# TODO: This is a temporary helper for use until the classpath has been split.", "for", "target", "in", "targets", ":", "self", ".", "add_for_target", "(", "target", ",", "products", ")" ...
Updates the products for the given targets, adding to existing entries. :API: public
[ "Updates", "the", "products", "for", "the", "given", "targets", "adding", "to", "existing", "entries", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/goal/products.py#L63-L70
224,210
pantsbuild/pants
src/python/pants/goal/products.py
UnionProducts.remove_for_target
def remove_for_target(self, target, products): """Updates the products for a particular target, removing the given existing entries. :API: public :param target: The target to remove the products for. :param products: A list of tuples (conf, path) to remove. """ for product in products: self._products_by_target[target].discard(product)
python
def remove_for_target(self, target, products): for product in products: self._products_by_target[target].discard(product)
[ "def", "remove_for_target", "(", "self", ",", "target", ",", "products", ")", ":", "for", "product", "in", "products", ":", "self", ".", "_products_by_target", "[", "target", "]", ".", "discard", "(", "product", ")" ]
Updates the products for a particular target, removing the given existing entries. :API: public :param target: The target to remove the products for. :param products: A list of tuples (conf, path) to remove.
[ "Updates", "the", "products", "for", "a", "particular", "target", "removing", "the", "given", "existing", "entries", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/goal/products.py#L72-L81
224,211
pantsbuild/pants
src/python/pants/goal/products.py
UnionProducts.get_for_targets
def get_for_targets(self, targets): """Gets the union of the products for the given targets, preserving the input order. :API: public """ products = OrderedSet() for target in targets: products.update(self._products_by_target[target]) return products
python
def get_for_targets(self, targets): products = OrderedSet() for target in targets: products.update(self._products_by_target[target]) return products
[ "def", "get_for_targets", "(", "self", ",", "targets", ")", ":", "products", "=", "OrderedSet", "(", ")", "for", "target", "in", "targets", ":", "products", ".", "update", "(", "self", ".", "_products_by_target", "[", "target", "]", ")", "return", "product...
Gets the union of the products for the given targets, preserving the input order. :API: public
[ "Gets", "the", "union", "of", "the", "products", "for", "the", "given", "targets", "preserving", "the", "input", "order", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/goal/products.py#L90-L98
224,212
pantsbuild/pants
src/python/pants/goal/products.py
UnionProducts.get_product_target_mappings_for_targets
def get_product_target_mappings_for_targets(self, targets): """Gets the product-target associations for the given targets, preserving the input order. :API: public :param targets: The targets to lookup products for. :returns: The ordered (product, target) tuples. """ product_target_mappings = [] for target in targets: for product in self._products_by_target[target]: product_target_mappings.append((product, target)) return product_target_mappings
python
def get_product_target_mappings_for_targets(self, targets): product_target_mappings = [] for target in targets: for product in self._products_by_target[target]: product_target_mappings.append((product, target)) return product_target_mappings
[ "def", "get_product_target_mappings_for_targets", "(", "self", ",", "targets", ")", ":", "product_target_mappings", "=", "[", "]", "for", "target", "in", "targets", ":", "for", "product", "in", "self", ".", "_products_by_target", "[", "target", "]", ":", "produc...
Gets the product-target associations for the given targets, preserving the input order. :API: public :param targets: The targets to lookup products for. :returns: The ordered (product, target) tuples.
[ "Gets", "the", "product", "-", "target", "associations", "for", "the", "given", "targets", "preserving", "the", "input", "order", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/goal/products.py#L100-L113
224,213
pantsbuild/pants
src/python/pants/goal/products.py
UnionProducts.target_for_product
def target_for_product(self, product): """Looks up the target key for a product. :API: public :param product: The product to search for :return: None if there is no target for the product """ for target, products in self._products_by_target.items(): if product in products: return target return None
python
def target_for_product(self, product): for target, products in self._products_by_target.items(): if product in products: return target return None
[ "def", "target_for_product", "(", "self", ",", "product", ")", ":", "for", "target", ",", "products", "in", "self", ".", "_products_by_target", ".", "items", "(", ")", ":", "if", "product", "in", "products", ":", "return", "target", "return", "None" ]
Looks up the target key for a product. :API: public :param product: The product to search for :return: None if there is no target for the product
[ "Looks", "up", "the", "target", "key", "for", "a", "product", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/goal/products.py#L115-L126
224,214
pantsbuild/pants
src/python/pants/goal/products.py
Products.register_data
def register_data(self, typename, value): """Registers a data product, raising if a product was already registered. :API: public :param typename: The type of product to register a value for. :param value: The data product to register under `typename`. :returns: The registered `value`. :raises: :class:`ProductError` if a value for the given product `typename` is already registered. """ if typename in self.data_products: raise ProductError('Already have a product registered for {}, cannot over-write with {}' .format(typename, value)) return self.safe_create_data(typename, lambda: value)
python
def register_data(self, typename, value): if typename in self.data_products: raise ProductError('Already have a product registered for {}, cannot over-write with {}' .format(typename, value)) return self.safe_create_data(typename, lambda: value)
[ "def", "register_data", "(", "self", ",", "typename", ",", "value", ")", ":", "if", "typename", "in", "self", ".", "data_products", ":", "raise", "ProductError", "(", "'Already have a product registered for {}, cannot over-write with {}'", ".", "format", "(", "typenam...
Registers a data product, raising if a product was already registered. :API: public :param typename: The type of product to register a value for. :param value: The data product to register under `typename`. :returns: The registered `value`. :raises: :class:`ProductError` if a value for the given product `typename` is already registered.
[ "Registers", "a", "data", "product", "raising", "if", "a", "product", "was", "already", "registered", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/goal/products.py#L414-L428
224,215
pantsbuild/pants
src/python/pants/goal/products.py
Products.get_data
def get_data(self, typename, init_func=None): """Returns a data product. :API: public If the product isn't found, returns None, unless init_func is set, in which case the product's value is set to the return value of init_func(), and returned. """ if typename not in self.data_products: if not init_func: return None self.data_products[typename] = init_func() return self.data_products.get(typename)
python
def get_data(self, typename, init_func=None): if typename not in self.data_products: if not init_func: return None self.data_products[typename] = init_func() return self.data_products.get(typename)
[ "def", "get_data", "(", "self", ",", "typename", ",", "init_func", "=", "None", ")", ":", "if", "typename", "not", "in", "self", ".", "data_products", ":", "if", "not", "init_func", ":", "return", "None", "self", ".", "data_products", "[", "typename", "]...
Returns a data product. :API: public If the product isn't found, returns None, unless init_func is set, in which case the product's value is set to the return value of init_func(), and returned.
[ "Returns", "a", "data", "product", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/goal/products.py#L438-L450
224,216
pantsbuild/pants
src/python/pants/goal/products.py
Products.get_only
def get_only(self, product_type, target): """If there is exactly one product for the given product type and target, returns the full filepath of said product. Otherwise, raises a ProductError. Useful for retrieving the filepath for the executable of a binary target. :API: public """ product_mapping = self.get(product_type).get(target) if len(product_mapping) != 1: raise ProductError('{} directories in product mapping: requires exactly 1.' .format(len(product_mapping))) for _, files in product_mapping.items(): if len(files) != 1: raise ProductError('{} files in target directory: requires exactly 1.' .format(len(files))) return files[0]
python
def get_only(self, product_type, target): product_mapping = self.get(product_type).get(target) if len(product_mapping) != 1: raise ProductError('{} directories in product mapping: requires exactly 1.' .format(len(product_mapping))) for _, files in product_mapping.items(): if len(files) != 1: raise ProductError('{} files in target directory: requires exactly 1.' .format(len(files))) return files[0]
[ "def", "get_only", "(", "self", ",", "product_type", ",", "target", ")", ":", "product_mapping", "=", "self", ".", "get", "(", "product_type", ")", ".", "get", "(", "target", ")", "if", "len", "(", "product_mapping", ")", "!=", "1", ":", "raise", "Prod...
If there is exactly one product for the given product type and target, returns the full filepath of said product. Otherwise, raises a ProductError. Useful for retrieving the filepath for the executable of a binary target. :API: public
[ "If", "there", "is", "exactly", "one", "product", "for", "the", "given", "product", "type", "and", "target", "returns", "the", "full", "filepath", "of", "said", "product", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/goal/products.py#L452-L472
224,217
pantsbuild/pants
contrib/awslambda/python/src/python/pants/contrib/awslambda/python/targets/python_awslambda.py
PythonAWSLambda.binary
def binary(self): """Returns the binary that builds the pex for this lambda.""" dependencies = self.dependencies if len(dependencies) != 1: raise TargetDefinitionException(self, 'An app must define exactly one binary ' 'dependency, have: {}'.format(dependencies)) binary = dependencies[0] if not isinstance(binary, PythonBinary): raise TargetDefinitionException(self, 'Expected binary dependency to be a python_binary ' 'target, found {}'.format(binary)) return binary
python
def binary(self): dependencies = self.dependencies if len(dependencies) != 1: raise TargetDefinitionException(self, 'An app must define exactly one binary ' 'dependency, have: {}'.format(dependencies)) binary = dependencies[0] if not isinstance(binary, PythonBinary): raise TargetDefinitionException(self, 'Expected binary dependency to be a python_binary ' 'target, found {}'.format(binary)) return binary
[ "def", "binary", "(", "self", ")", ":", "dependencies", "=", "self", ".", "dependencies", "if", "len", "(", "dependencies", ")", "!=", "1", ":", "raise", "TargetDefinitionException", "(", "self", ",", "'An app must define exactly one binary '", "'dependency, have: {...
Returns the binary that builds the pex for this lambda.
[ "Returns", "the", "binary", "that", "builds", "the", "pex", "for", "this", "lambda", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/contrib/awslambda/python/src/python/pants/contrib/awslambda/python/targets/python_awslambda.py#L49-L59
224,218
pantsbuild/pants
src/python/pants/init/extension_loader.py
load_backends_and_plugins
def load_backends_and_plugins(plugins, working_set, backends, build_configuration=None): """Load named plugins and source backends :param list<str> plugins: Plugins to load (see `load_plugins`). Plugins are loaded after backends. :param WorkingSet working_set: A pkg_resources.WorkingSet to load plugins from. :param list<str> backends: Source backends to load (see `load_build_configuration_from_source`). """ build_configuration = build_configuration or BuildConfiguration() load_build_configuration_from_source(build_configuration, backends) load_plugins(build_configuration, plugins or [], working_set) return build_configuration
python
def load_backends_and_plugins(plugins, working_set, backends, build_configuration=None): build_configuration = build_configuration or BuildConfiguration() load_build_configuration_from_source(build_configuration, backends) load_plugins(build_configuration, plugins or [], working_set) return build_configuration
[ "def", "load_backends_and_plugins", "(", "plugins", ",", "working_set", ",", "backends", ",", "build_configuration", "=", "None", ")", ":", "build_configuration", "=", "build_configuration", "or", "BuildConfiguration", "(", ")", "load_build_configuration_from_source", "("...
Load named plugins and source backends :param list<str> plugins: Plugins to load (see `load_plugins`). Plugins are loaded after backends. :param WorkingSet working_set: A pkg_resources.WorkingSet to load plugins from. :param list<str> backends: Source backends to load (see `load_build_configuration_from_source`).
[ "Load", "named", "plugins", "and", "source", "backends" ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/init/extension_loader.py#L26-L37
224,219
pantsbuild/pants
src/python/pants/init/extension_loader.py
load_plugins
def load_plugins(build_configuration, plugins, working_set): """Load named plugins from the current working_set into the supplied build_configuration "Loading" a plugin here refers to calling registration methods -- it is assumed each plugin is already on the path and an error will be thrown if it is not. Plugins should define their entrypoints in the `pantsbuild.plugin` group when configuring their distribution. Like source backends, the `build_file_aliases`, `global_subsystems` and `register_goals` methods are called if those entry points are defined. * Plugins are loaded in the order they are provided. * This is important as loading can add, remove or replace existing tasks installed by other plugins. If a plugin needs to assert that another plugin is registered before it, it can define an entrypoint "load_after" which can return a list of plugins which must have been loaded before it can be loaded. This does not change the order or what plugins are loaded in any way -- it is purely an assertion to guard against misconfiguration. :param BuildConfiguration build_configuration: The BuildConfiguration (for adding aliases). :param list<str> plugins: A list of plugin names optionally with versions, in requirement format. eg ['widgetpublish', 'widgetgen==1.2']. :param WorkingSet working_set: A pkg_resources.WorkingSet to load plugins from. """ loaded = {} for plugin in plugins: req = Requirement.parse(plugin) dist = working_set.find(req) if not dist: raise PluginNotFound('Could not find plugin: {}'.format(req)) entries = dist.get_entry_map().get('pantsbuild.plugin', {}) if 'load_after' in entries: deps = entries['load_after'].load()() for dep_name in deps: dep = Requirement.parse(dep_name) if dep.key not in loaded: raise PluginLoadOrderError('Plugin {0} must be loaded after {1}'.format(plugin, dep)) if 'build_file_aliases' in entries: aliases = entries['build_file_aliases'].load()() build_configuration.register_aliases(aliases) if 'register_goals' in entries: entries['register_goals'].load()() if 'global_subsystems' in entries: subsystems = entries['global_subsystems'].load()() build_configuration.register_optionables(subsystems) if 'rules' in entries: rules = entries['rules'].load()() build_configuration.register_rules(rules) loaded[dist.as_requirement().key] = dist
python
def load_plugins(build_configuration, plugins, working_set): loaded = {} for plugin in plugins: req = Requirement.parse(plugin) dist = working_set.find(req) if not dist: raise PluginNotFound('Could not find plugin: {}'.format(req)) entries = dist.get_entry_map().get('pantsbuild.plugin', {}) if 'load_after' in entries: deps = entries['load_after'].load()() for dep_name in deps: dep = Requirement.parse(dep_name) if dep.key not in loaded: raise PluginLoadOrderError('Plugin {0} must be loaded after {1}'.format(plugin, dep)) if 'build_file_aliases' in entries: aliases = entries['build_file_aliases'].load()() build_configuration.register_aliases(aliases) if 'register_goals' in entries: entries['register_goals'].load()() if 'global_subsystems' in entries: subsystems = entries['global_subsystems'].load()() build_configuration.register_optionables(subsystems) if 'rules' in entries: rules = entries['rules'].load()() build_configuration.register_rules(rules) loaded[dist.as_requirement().key] = dist
[ "def", "load_plugins", "(", "build_configuration", ",", "plugins", ",", "working_set", ")", ":", "loaded", "=", "{", "}", "for", "plugin", "in", "plugins", ":", "req", "=", "Requirement", ".", "parse", "(", "plugin", ")", "dist", "=", "working_set", ".", ...
Load named plugins from the current working_set into the supplied build_configuration "Loading" a plugin here refers to calling registration methods -- it is assumed each plugin is already on the path and an error will be thrown if it is not. Plugins should define their entrypoints in the `pantsbuild.plugin` group when configuring their distribution. Like source backends, the `build_file_aliases`, `global_subsystems` and `register_goals` methods are called if those entry points are defined. * Plugins are loaded in the order they are provided. * This is important as loading can add, remove or replace existing tasks installed by other plugins. If a plugin needs to assert that another plugin is registered before it, it can define an entrypoint "load_after" which can return a list of plugins which must have been loaded before it can be loaded. This does not change the order or what plugins are loaded in any way -- it is purely an assertion to guard against misconfiguration. :param BuildConfiguration build_configuration: The BuildConfiguration (for adding aliases). :param list<str> plugins: A list of plugin names optionally with versions, in requirement format. eg ['widgetpublish', 'widgetgen==1.2']. :param WorkingSet working_set: A pkg_resources.WorkingSet to load plugins from.
[ "Load", "named", "plugins", "from", "the", "current", "working_set", "into", "the", "supplied", "build_configuration" ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/init/extension_loader.py#L40-L96
224,220
pantsbuild/pants
src/python/pants/init/extension_loader.py
load_build_configuration_from_source
def load_build_configuration_from_source(build_configuration, backends=None): """Installs pants backend packages to provide BUILD file symbols and cli goals. :param BuildConfiguration build_configuration: The BuildConfiguration (for adding aliases). :param backends: An optional list of additional packages to load backends from. :raises: :class:``pants.base.exceptions.BuildConfigurationError`` if there is a problem loading the build configuration. """ # pants.build_graph and pants.core_task must always be loaded, and before any other backends. # TODO: Consider replacing the "backend" nomenclature here. pants.build_graph and # pants.core_tasks aren't really backends. backend_packages = OrderedSet(['pants.build_graph', 'pants.core_tasks'] + (backends or [])) for backend_package in backend_packages: load_backend(build_configuration, backend_package)
python
def load_build_configuration_from_source(build_configuration, backends=None): # pants.build_graph and pants.core_task must always be loaded, and before any other backends. # TODO: Consider replacing the "backend" nomenclature here. pants.build_graph and # pants.core_tasks aren't really backends. backend_packages = OrderedSet(['pants.build_graph', 'pants.core_tasks'] + (backends or [])) for backend_package in backend_packages: load_backend(build_configuration, backend_package)
[ "def", "load_build_configuration_from_source", "(", "build_configuration", ",", "backends", "=", "None", ")", ":", "# pants.build_graph and pants.core_task must always be loaded, and before any other backends.", "# TODO: Consider replacing the \"backend\" nomenclature here. pants.build_graph a...
Installs pants backend packages to provide BUILD file symbols and cli goals. :param BuildConfiguration build_configuration: The BuildConfiguration (for adding aliases). :param backends: An optional list of additional packages to load backends from. :raises: :class:``pants.base.exceptions.BuildConfigurationError`` if there is a problem loading the build configuration.
[ "Installs", "pants", "backend", "packages", "to", "provide", "BUILD", "file", "symbols", "and", "cli", "goals", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/init/extension_loader.py#L99-L112
224,221
pantsbuild/pants
src/python/pants/init/extension_loader.py
load_backend
def load_backend(build_configuration, backend_package): """Installs the given backend package into the build configuration. :param build_configuration the :class:``pants.build_graph.build_configuration.BuildConfiguration`` to install the backend plugin into. :param string backend_package: the package name containing the backend plugin register module that provides the plugin entrypoints. :raises: :class:``pants.base.exceptions.BuildConfigurationError`` if there is a problem loading the build configuration.""" backend_module = backend_package + '.register' try: module = importlib.import_module(backend_module) except ImportError as e: traceback.print_exc() raise BackendConfigurationError('Failed to load the {backend} backend: {error}' .format(backend=backend_module, error=e)) def invoke_entrypoint(name): entrypoint = getattr(module, name, lambda: None) try: return entrypoint() except TypeError as e: traceback.print_exc() raise BackendConfigurationError( 'Entrypoint {entrypoint} in {backend} must be a zero-arg callable: {error}' .format(entrypoint=name, backend=backend_module, error=e)) build_file_aliases = invoke_entrypoint('build_file_aliases') if build_file_aliases: build_configuration.register_aliases(build_file_aliases) subsystems = invoke_entrypoint('global_subsystems') if subsystems: build_configuration.register_optionables(subsystems) rules = invoke_entrypoint('rules') if rules: build_configuration.register_rules(rules) invoke_entrypoint('register_goals')
python
def load_backend(build_configuration, backend_package): backend_module = backend_package + '.register' try: module = importlib.import_module(backend_module) except ImportError as e: traceback.print_exc() raise BackendConfigurationError('Failed to load the {backend} backend: {error}' .format(backend=backend_module, error=e)) def invoke_entrypoint(name): entrypoint = getattr(module, name, lambda: None) try: return entrypoint() except TypeError as e: traceback.print_exc() raise BackendConfigurationError( 'Entrypoint {entrypoint} in {backend} must be a zero-arg callable: {error}' .format(entrypoint=name, backend=backend_module, error=e)) build_file_aliases = invoke_entrypoint('build_file_aliases') if build_file_aliases: build_configuration.register_aliases(build_file_aliases) subsystems = invoke_entrypoint('global_subsystems') if subsystems: build_configuration.register_optionables(subsystems) rules = invoke_entrypoint('rules') if rules: build_configuration.register_rules(rules) invoke_entrypoint('register_goals')
[ "def", "load_backend", "(", "build_configuration", ",", "backend_package", ")", ":", "backend_module", "=", "backend_package", "+", "'.register'", "try", ":", "module", "=", "importlib", ".", "import_module", "(", "backend_module", ")", "except", "ImportError", "as"...
Installs the given backend package into the build configuration. :param build_configuration the :class:``pants.build_graph.build_configuration.BuildConfiguration`` to install the backend plugin into. :param string backend_package: the package name containing the backend plugin register module that provides the plugin entrypoints. :raises: :class:``pants.base.exceptions.BuildConfigurationError`` if there is a problem loading the build configuration.
[ "Installs", "the", "given", "backend", "package", "into", "the", "build", "configuration", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/init/extension_loader.py#L115-L154
224,222
pantsbuild/pants
src/python/pants/engine/legacy/structs.py
TargetAdaptor.get_sources
def get_sources(self): """Returns target's non-deferred sources if exists or the default sources if defined. :rtype: :class:`GlobsWithConjunction` NB: once ivy is implemented in the engine, we can fetch sources natively here, and/or refactor how deferred sources are implemented. see: https://github.com/pantsbuild/pants/issues/2997 """ source = getattr(self, 'source', None) sources = getattr(self, 'sources', None) if source is not None and sources is not None: raise Target.IllegalArgument( self.address.spec, 'Cannot specify both source and sources attribute.' ) if source is not None: if not isinstance(source, string_types): raise Target.IllegalArgument( self.address.spec, 'source must be a string containing a path relative to the target, but got {} of type {}' .format(source, type(source)) ) sources = [source] # N.B. Here we check specifically for `sources is None`, as it's possible for sources # to be e.g. an explicit empty list (sources=[]). if sources is None: if self.default_sources_globs is not None: globs = Globs(*self.default_sources_globs, spec_path=self.address.spec_path, exclude=self.default_sources_exclude_globs or []) conjunction_globs = GlobsWithConjunction(globs, GlobExpansionConjunction.any_match) else: globs = None conjunction_globs = None else: globs = BaseGlobs.from_sources_field(sources, self.address.spec_path) conjunction_globs = GlobsWithConjunction(globs, GlobExpansionConjunction.all_match) return conjunction_globs
python
def get_sources(self): source = getattr(self, 'source', None) sources = getattr(self, 'sources', None) if source is not None and sources is not None: raise Target.IllegalArgument( self.address.spec, 'Cannot specify both source and sources attribute.' ) if source is not None: if not isinstance(source, string_types): raise Target.IllegalArgument( self.address.spec, 'source must be a string containing a path relative to the target, but got {} of type {}' .format(source, type(source)) ) sources = [source] # N.B. Here we check specifically for `sources is None`, as it's possible for sources # to be e.g. an explicit empty list (sources=[]). if sources is None: if self.default_sources_globs is not None: globs = Globs(*self.default_sources_globs, spec_path=self.address.spec_path, exclude=self.default_sources_exclude_globs or []) conjunction_globs = GlobsWithConjunction(globs, GlobExpansionConjunction.any_match) else: globs = None conjunction_globs = None else: globs = BaseGlobs.from_sources_field(sources, self.address.spec_path) conjunction_globs = GlobsWithConjunction(globs, GlobExpansionConjunction.all_match) return conjunction_globs
[ "def", "get_sources", "(", "self", ")", ":", "source", "=", "getattr", "(", "self", ",", "'source'", ",", "None", ")", "sources", "=", "getattr", "(", "self", ",", "'sources'", ",", "None", ")", "if", "source", "is", "not", "None", "and", "sources", ...
Returns target's non-deferred sources if exists or the default sources if defined. :rtype: :class:`GlobsWithConjunction` NB: once ivy is implemented in the engine, we can fetch sources natively here, and/or refactor how deferred sources are implemented. see: https://github.com/pantsbuild/pants/issues/2997
[ "Returns", "target", "s", "non", "-", "deferred", "sources", "if", "exists", "or", "the", "default", "sources", "if", "defined", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/engine/legacy/structs.py#L36-L78
224,223
pantsbuild/pants
src/python/pants/engine/legacy/structs.py
TargetAdaptor.field_adaptors
def field_adaptors(self): """Returns a tuple of Fields for captured fields which need additional treatment.""" with exception_logging(logger, 'Exception in `field_adaptors` property'): conjunction_globs = self.get_sources() if conjunction_globs is None: return tuple() sources = conjunction_globs.non_path_globs conjunction = conjunction_globs.conjunction if not sources: return tuple() base_globs = BaseGlobs.from_sources_field(sources, self.address.spec_path) path_globs = base_globs.to_path_globs(self.address.spec_path, conjunction) return (SourcesField( self.address, 'sources', base_globs.filespecs, base_globs, path_globs, self.validate_sources, ),)
python
def field_adaptors(self): with exception_logging(logger, 'Exception in `field_adaptors` property'): conjunction_globs = self.get_sources() if conjunction_globs is None: return tuple() sources = conjunction_globs.non_path_globs conjunction = conjunction_globs.conjunction if not sources: return tuple() base_globs = BaseGlobs.from_sources_field(sources, self.address.spec_path) path_globs = base_globs.to_path_globs(self.address.spec_path, conjunction) return (SourcesField( self.address, 'sources', base_globs.filespecs, base_globs, path_globs, self.validate_sources, ),)
[ "def", "field_adaptors", "(", "self", ")", ":", "with", "exception_logging", "(", "logger", ",", "'Exception in `field_adaptors` property'", ")", ":", "conjunction_globs", "=", "self", ".", "get_sources", "(", ")", "if", "conjunction_globs", "is", "None", ":", "re...
Returns a tuple of Fields for captured fields which need additional treatment.
[ "Returns", "a", "tuple", "of", "Fields", "for", "captured", "fields", "which", "need", "additional", "treatment", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/engine/legacy/structs.py#L81-L104
224,224
pantsbuild/pants
src/python/pants/engine/legacy/structs.py
BaseGlobs.from_sources_field
def from_sources_field(sources, spec_path): """Return a BaseGlobs for the given sources field. `sources` may be None, a list/tuple/set, a string or a BaseGlobs instance. """ if sources is None: return Files(spec_path=spec_path) elif isinstance(sources, BaseGlobs): return sources elif isinstance(sources, string_types): return Files(sources, spec_path=spec_path) elif isinstance(sources, (MutableSet, MutableSequence, tuple)) and \ all(isinstance(s, string_types) for s in sources): return Files(*sources, spec_path=spec_path) else: raise ValueError('Expected either a glob or list of literal sources: got: {}'.format(sources))
python
def from_sources_field(sources, spec_path): if sources is None: return Files(spec_path=spec_path) elif isinstance(sources, BaseGlobs): return sources elif isinstance(sources, string_types): return Files(sources, spec_path=spec_path) elif isinstance(sources, (MutableSet, MutableSequence, tuple)) and \ all(isinstance(s, string_types) for s in sources): return Files(*sources, spec_path=spec_path) else: raise ValueError('Expected either a glob or list of literal sources: got: {}'.format(sources))
[ "def", "from_sources_field", "(", "sources", ",", "spec_path", ")", ":", "if", "sources", "is", "None", ":", "return", "Files", "(", "spec_path", "=", "spec_path", ")", "elif", "isinstance", "(", "sources", ",", "BaseGlobs", ")", ":", "return", "sources", ...
Return a BaseGlobs for the given sources field. `sources` may be None, a list/tuple/set, a string or a BaseGlobs instance.
[ "Return", "a", "BaseGlobs", "for", "the", "given", "sources", "field", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/engine/legacy/structs.py#L309-L324
224,225
pantsbuild/pants
src/python/pants/engine/legacy/structs.py
BaseGlobs.filespecs
def filespecs(self): """Return a filespecs dict representing both globs and excludes.""" filespecs = {'globs': self._file_globs} exclude_filespecs = self._exclude_filespecs if exclude_filespecs: filespecs['exclude'] = exclude_filespecs return filespecs
python
def filespecs(self): filespecs = {'globs': self._file_globs} exclude_filespecs = self._exclude_filespecs if exclude_filespecs: filespecs['exclude'] = exclude_filespecs return filespecs
[ "def", "filespecs", "(", "self", ")", ":", "filespecs", "=", "{", "'globs'", ":", "self", ".", "_file_globs", "}", "exclude_filespecs", "=", "self", ".", "_exclude_filespecs", "if", "exclude_filespecs", ":", "filespecs", "[", "'exclude'", "]", "=", "exclude_fi...
Return a filespecs dict representing both globs and excludes.
[ "Return", "a", "filespecs", "dict", "representing", "both", "globs", "and", "excludes", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/engine/legacy/structs.py#L361-L367
224,226
pantsbuild/pants
src/python/pants/engine/legacy/structs.py
BaseGlobs.to_path_globs
def to_path_globs(self, relpath, conjunction): """Return a PathGlobs representing the included and excluded Files for these patterns.""" return PathGlobs( include=tuple(os.path.join(relpath, glob) for glob in self._file_globs), exclude=tuple(os.path.join(relpath, exclude) for exclude in self._excluded_file_globs), conjunction=conjunction)
python
def to_path_globs(self, relpath, conjunction): return PathGlobs( include=tuple(os.path.join(relpath, glob) for glob in self._file_globs), exclude=tuple(os.path.join(relpath, exclude) for exclude in self._excluded_file_globs), conjunction=conjunction)
[ "def", "to_path_globs", "(", "self", ",", "relpath", ",", "conjunction", ")", ":", "return", "PathGlobs", "(", "include", "=", "tuple", "(", "os", ".", "path", ".", "join", "(", "relpath", ",", "glob", ")", "for", "glob", "in", "self", ".", "_file_glob...
Return a PathGlobs representing the included and excluded Files for these patterns.
[ "Return", "a", "PathGlobs", "representing", "the", "included", "and", "excluded", "Files", "for", "these", "patterns", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/engine/legacy/structs.py#L376-L381
224,227
pantsbuild/pants
src/python/pants/backend/jvm/tasks/consolidate_classpath.py
ConsolidateClasspath._consolidate_classpath
def _consolidate_classpath(self, targets, classpath_products): """Convert loose directories in classpath_products into jars. """ # TODO: find a way to not process classpath entries for valid VTs. # NB: It is very expensive to call to get entries for each target one at a time. # For performance reasons we look them all up at once. entries_map = defaultdict(list) for (cp, target) in classpath_products.get_product_target_mappings_for_targets(targets, True): entries_map[target].append(cp) with self.invalidated(targets=targets, invalidate_dependents=True) as invalidation: for vt in invalidation.all_vts: entries = entries_map.get(vt.target, []) for index, (conf, entry) in enumerate(entries): if ClasspathUtil.is_dir(entry.path): jarpath = os.path.join(vt.results_dir, 'output-{}.jar'.format(index)) # Regenerate artifact for invalid vts. if not vt.valid: with self.open_jar(jarpath, overwrite=True, compressed=False) as jar: jar.write(entry.path) # Replace directory classpath entry with its jarpath. classpath_products.remove_for_target(vt.target, [(conf, entry.path)]) classpath_products.add_for_target(vt.target, [(conf, jarpath)])
python
def _consolidate_classpath(self, targets, classpath_products): # TODO: find a way to not process classpath entries for valid VTs. # NB: It is very expensive to call to get entries for each target one at a time. # For performance reasons we look them all up at once. entries_map = defaultdict(list) for (cp, target) in classpath_products.get_product_target_mappings_for_targets(targets, True): entries_map[target].append(cp) with self.invalidated(targets=targets, invalidate_dependents=True) as invalidation: for vt in invalidation.all_vts: entries = entries_map.get(vt.target, []) for index, (conf, entry) in enumerate(entries): if ClasspathUtil.is_dir(entry.path): jarpath = os.path.join(vt.results_dir, 'output-{}.jar'.format(index)) # Regenerate artifact for invalid vts. if not vt.valid: with self.open_jar(jarpath, overwrite=True, compressed=False) as jar: jar.write(entry.path) # Replace directory classpath entry with its jarpath. classpath_products.remove_for_target(vt.target, [(conf, entry.path)]) classpath_products.add_for_target(vt.target, [(conf, jarpath)])
[ "def", "_consolidate_classpath", "(", "self", ",", "targets", ",", "classpath_products", ")", ":", "# TODO: find a way to not process classpath entries for valid VTs.", "# NB: It is very expensive to call to get entries for each target one at a time.", "# For performance reasons we look them...
Convert loose directories in classpath_products into jars.
[ "Convert", "loose", "directories", "in", "classpath_products", "into", "jars", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/tasks/consolidate_classpath.py#L49-L73
224,228
pantsbuild/pants
src/python/pants/option/custom_types.py
_convert
def _convert(val, acceptable_types): """Ensure that val is one of the acceptable types, converting it if needed. :param val: The value we're parsing (either a string or one of the acceptable types). :param acceptable_types: A tuple of expected types for val. :returns: The parsed value. :raises :class:`pants.options.errors.ParseError`: if there was a problem parsing the val as an acceptable type. """ if isinstance(val, acceptable_types): return val return parse_expression(val, acceptable_types, raise_type=ParseError)
python
def _convert(val, acceptable_types): if isinstance(val, acceptable_types): return val return parse_expression(val, acceptable_types, raise_type=ParseError)
[ "def", "_convert", "(", "val", ",", "acceptable_types", ")", ":", "if", "isinstance", "(", "val", ",", "acceptable_types", ")", ":", "return", "val", "return", "parse_expression", "(", "val", ",", "acceptable_types", ",", "raise_type", "=", "ParseError", ")" ]
Ensure that val is one of the acceptable types, converting it if needed. :param val: The value we're parsing (either a string or one of the acceptable types). :param acceptable_types: A tuple of expected types for val. :returns: The parsed value. :raises :class:`pants.options.errors.ParseError`: if there was a problem parsing the val as an acceptable type.
[ "Ensure", "that", "val", "is", "one", "of", "the", "acceptable", "types", "converting", "it", "if", "needed", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/option/custom_types.py#L119-L130
224,229
pantsbuild/pants
src/python/pants/option/custom_types.py
ListValueComponent.create
def create(cls, value): """Interpret value as either a list or something to extend another list with. Note that we accept tuple literals, but the internal value is always a list. :param value: The value to convert. Can be an instance of ListValueComponent, a list, a tuple, a string representation of a list or tuple (possibly prefixed by + or - indicating modification instead of replacement), or any allowed member_type. May also be a comma-separated sequence of modifications. :rtype: `ListValueComponent` """ if isinstance(value, bytes): value = value.decode('utf-8') if isinstance(value, str): comma_separated_exprs = cls._split_modifier_expr(value) if len(comma_separated_exprs) > 1: return cls.merge([cls.create(x) for x in comma_separated_exprs]) action = cls.MODIFY appends = [] filters = [] if isinstance(value, cls): # Ensure idempotency. action = value._action appends = value._appends filters = value._filters elif isinstance(value, (list, tuple)): # Ensure we can handle list-typed default values. action = cls.REPLACE appends = value elif value.startswith('[') or value.startswith('('): action = cls.REPLACE appends = _convert(value, (list, tuple)) elif value.startswith('+[') or value.startswith('+('): appends = _convert(value[1:], (list, tuple)) elif value.startswith('-[') or value.startswith('-('): filters = _convert(value[1:], (list, tuple)) elif isinstance(value, str): appends = [value] else: appends = _convert('[{}]'.format(value), list) return cls(action, list(appends), list(filters))
python
def create(cls, value): if isinstance(value, bytes): value = value.decode('utf-8') if isinstance(value, str): comma_separated_exprs = cls._split_modifier_expr(value) if len(comma_separated_exprs) > 1: return cls.merge([cls.create(x) for x in comma_separated_exprs]) action = cls.MODIFY appends = [] filters = [] if isinstance(value, cls): # Ensure idempotency. action = value._action appends = value._appends filters = value._filters elif isinstance(value, (list, tuple)): # Ensure we can handle list-typed default values. action = cls.REPLACE appends = value elif value.startswith('[') or value.startswith('('): action = cls.REPLACE appends = _convert(value, (list, tuple)) elif value.startswith('+[') or value.startswith('+('): appends = _convert(value[1:], (list, tuple)) elif value.startswith('-[') or value.startswith('-('): filters = _convert(value[1:], (list, tuple)) elif isinstance(value, str): appends = [value] else: appends = _convert('[{}]'.format(value), list) return cls(action, list(appends), list(filters))
[ "def", "create", "(", "cls", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "bytes", ")", ":", "value", "=", "value", ".", "decode", "(", "'utf-8'", ")", "if", "isinstance", "(", "value", ",", "str", ")", ":", "comma_separated_exprs", ...
Interpret value as either a list or something to extend another list with. Note that we accept tuple literals, but the internal value is always a list. :param value: The value to convert. Can be an instance of ListValueComponent, a list, a tuple, a string representation of a list or tuple (possibly prefixed by + or - indicating modification instead of replacement), or any allowed member_type. May also be a comma-separated sequence of modifications. :rtype: `ListValueComponent`
[ "Interpret", "value", "as", "either", "a", "list", "or", "something", "to", "extend", "another", "list", "with", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/option/custom_types.py#L210-L250
224,230
pantsbuild/pants
src/python/pants/option/custom_types.py
DictValueComponent.create
def create(cls, value): """Interpret value as either a dict or something to extend another dict with. :param value: The value to convert. Can be an instance of DictValueComponent, a dict, or a string representation (possibly prefixed by +) of a dict. :rtype: `DictValueComponent` """ if isinstance(value, bytes): value = value.decode('utf-8') if isinstance(value, cls): # Ensure idempotency. action = value.action val = value.val elif isinstance(value, dict): # Ensure we can handle dict-typed default values. action = cls.REPLACE val = value elif value.startswith('{'): action = cls.REPLACE val = _convert(value, dict) elif value.startswith('+{'): action = cls.EXTEND val = _convert(value[1:], dict) else: raise ParseError('Invalid dict value: {}'.format(value)) return cls(action, dict(val))
python
def create(cls, value): if isinstance(value, bytes): value = value.decode('utf-8') if isinstance(value, cls): # Ensure idempotency. action = value.action val = value.val elif isinstance(value, dict): # Ensure we can handle dict-typed default values. action = cls.REPLACE val = value elif value.startswith('{'): action = cls.REPLACE val = _convert(value, dict) elif value.startswith('+{'): action = cls.EXTEND val = _convert(value[1:], dict) else: raise ParseError('Invalid dict value: {}'.format(value)) return cls(action, dict(val))
[ "def", "create", "(", "cls", ",", "value", ")", ":", "if", "isinstance", "(", "value", ",", "bytes", ")", ":", "value", "=", "value", ".", "decode", "(", "'utf-8'", ")", "if", "isinstance", "(", "value", ",", "cls", ")", ":", "# Ensure idempotency.", ...
Interpret value as either a dict or something to extend another dict with. :param value: The value to convert. Can be an instance of DictValueComponent, a dict, or a string representation (possibly prefixed by +) of a dict. :rtype: `DictValueComponent`
[ "Interpret", "value", "as", "either", "a", "dict", "or", "something", "to", "extend", "another", "dict", "with", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/option/custom_types.py#L296-L319
224,231
pantsbuild/pants
src/python/pants/base/generator.py
TemplateData.extend
def extend(self, **kwargs): """Returns a new instance with this instance's data overlayed by the key-value args.""" props = self.copy() props.update(kwargs) return TemplateData(**props)
python
def extend(self, **kwargs): props = self.copy() props.update(kwargs) return TemplateData(**props)
[ "def", "extend", "(", "self", ",", "*", "*", "kwargs", ")", ":", "props", "=", "self", ".", "copy", "(", ")", "props", ".", "update", "(", "kwargs", ")", "return", "TemplateData", "(", "*", "*", "props", ")" ]
Returns a new instance with this instance's data overlayed by the key-value args.
[ "Returns", "a", "new", "instance", "with", "this", "instance", "s", "data", "overlayed", "by", "the", "key", "-", "value", "args", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/base/generator.py#L23-L28
224,232
pantsbuild/pants
src/python/pants/backend/jvm/ossrh_publication_metadata.py
Scm.github
def github(cls, user, repo): """Creates an `Scm` for a github repo. :param string user: The github user or organization name the repo is hosted under. :param string repo: The repository name. :returns: An `Scm` representing the github repo. """ # For the url format, see: http://maven.apache.org/scm/git.html params = dict(user=user, repo=repo) connection = 'scm:git:git@github.com:{user}/{repo}.git'.format(**params) url = 'https://github.com/{user}/{repo}'.format(**params) return cls(connection=connection, developer_connection=connection, url=url)
python
def github(cls, user, repo): # For the url format, see: http://maven.apache.org/scm/git.html params = dict(user=user, repo=repo) connection = 'scm:git:git@github.com:{user}/{repo}.git'.format(**params) url = 'https://github.com/{user}/{repo}'.format(**params) return cls(connection=connection, developer_connection=connection, url=url)
[ "def", "github", "(", "cls", ",", "user", ",", "repo", ")", ":", "# For the url format, see: http://maven.apache.org/scm/git.html", "params", "=", "dict", "(", "user", "=", "user", ",", "repo", "=", "repo", ")", "connection", "=", "'scm:git:git@github.com:{user}/{re...
Creates an `Scm` for a github repo. :param string user: The github user or organization name the repo is hosted under. :param string repo: The repository name. :returns: An `Scm` representing the github repo.
[ "Creates", "an", "Scm", "for", "a", "github", "repo", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/ossrh_publication_metadata.py#L34-L45
224,233
pantsbuild/pants
src/python/pants/backend/jvm/ossrh_publication_metadata.py
Scm.tagged
def tagged(self, tag): """Creates a new `Scm` identical to this `Scm` but with the given `tag`.""" return Scm(self.connection, self.developer_connection, self.url, tag=tag)
python
def tagged(self, tag): return Scm(self.connection, self.developer_connection, self.url, tag=tag)
[ "def", "tagged", "(", "self", ",", "tag", ")", ":", "return", "Scm", "(", "self", ".", "connection", ",", "self", ".", "developer_connection", ",", "self", ".", "url", ",", "tag", "=", "tag", ")" ]
Creates a new `Scm` identical to this `Scm` but with the given `tag`.
[ "Creates", "a", "new", "Scm", "identical", "to", "this", "Scm", "but", "with", "the", "given", "tag", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/ossrh_publication_metadata.py#L61-L63
224,234
pantsbuild/pants
src/python/pants/auth/cookies.py
Cookies.update
def update(self, cookies): """Add specified cookies to our cookie jar, and persists it. :param cookies: Any iterable that yields http.cookiejar.Cookie instances, such as a CookieJar. """ cookie_jar = self.get_cookie_jar() for cookie in cookies: cookie_jar.set_cookie(cookie) with self._lock: cookie_jar.save()
python
def update(self, cookies): cookie_jar = self.get_cookie_jar() for cookie in cookies: cookie_jar.set_cookie(cookie) with self._lock: cookie_jar.save()
[ "def", "update", "(", "self", ",", "cookies", ")", ":", "cookie_jar", "=", "self", ".", "get_cookie_jar", "(", ")", "for", "cookie", "in", "cookies", ":", "cookie_jar", ".", "set_cookie", "(", "cookie", ")", "with", "self", ".", "_lock", ":", "cookie_jar...
Add specified cookies to our cookie jar, and persists it. :param cookies: Any iterable that yields http.cookiejar.Cookie instances, such as a CookieJar.
[ "Add", "specified", "cookies", "to", "our", "cookie", "jar", "and", "persists", "it", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/auth/cookies.py#L28-L37
224,235
pantsbuild/pants
src/python/pants/auth/cookies.py
Cookies.get_cookie_jar
def get_cookie_jar(self): """Returns our cookie jar.""" cookie_file = self._get_cookie_file() cookie_jar = LWPCookieJar(cookie_file) if os.path.exists(cookie_file): cookie_jar.load() else: safe_mkdir_for(cookie_file) # Save an empty cookie jar so we can change the file perms on it before writing data to it. with self._lock: cookie_jar.save() os.chmod(cookie_file, 0o600) return cookie_jar
python
def get_cookie_jar(self): cookie_file = self._get_cookie_file() cookie_jar = LWPCookieJar(cookie_file) if os.path.exists(cookie_file): cookie_jar.load() else: safe_mkdir_for(cookie_file) # Save an empty cookie jar so we can change the file perms on it before writing data to it. with self._lock: cookie_jar.save() os.chmod(cookie_file, 0o600) return cookie_jar
[ "def", "get_cookie_jar", "(", "self", ")", ":", "cookie_file", "=", "self", ".", "_get_cookie_file", "(", ")", "cookie_jar", "=", "LWPCookieJar", "(", "cookie_file", ")", "if", "os", ".", "path", ".", "exists", "(", "cookie_file", ")", ":", "cookie_jar", "...
Returns our cookie jar.
[ "Returns", "our", "cookie", "jar", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/auth/cookies.py#L39-L51
224,236
pantsbuild/pants
src/python/pants/auth/cookies.py
Cookies._lock
def _lock(self): """An identity-keyed inter-process lock around the cookie file.""" lockfile = '{}.lock'.format(self._get_cookie_file()) safe_mkdir_for(lockfile) return OwnerPrintingInterProcessFileLock(lockfile)
python
def _lock(self): lockfile = '{}.lock'.format(self._get_cookie_file()) safe_mkdir_for(lockfile) return OwnerPrintingInterProcessFileLock(lockfile)
[ "def", "_lock", "(", "self", ")", ":", "lockfile", "=", "'{}.lock'", ".", "format", "(", "self", ".", "_get_cookie_file", "(", ")", ")", "safe_mkdir_for", "(", "lockfile", ")", "return", "OwnerPrintingInterProcessFileLock", "(", "lockfile", ")" ]
An identity-keyed inter-process lock around the cookie file.
[ "An", "identity", "-", "keyed", "inter", "-", "process", "lock", "around", "the", "cookie", "file", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/auth/cookies.py#L58-L62
224,237
pantsbuild/pants
contrib/go/src/python/pants/contrib/go/tasks/go_buildgen.py
GoTargetGenerator.generate
def generate(self, local_go_targets): """Automatically generates a Go target graph for the given local go targets. :param iter local_go_targets: The target roots to fill in a target graph for. :raises: :class:`GoTargetGenerator.GenerationError` if any missing targets cannot be generated. """ visited = {l.import_path: l.address for l in local_go_targets} with temporary_dir() as gopath: for local_go_target in local_go_targets: deps = self._list_deps(gopath, local_go_target.address) self._generate_missing(gopath, local_go_target.address, deps, visited) return list(visited.items())
python
def generate(self, local_go_targets): visited = {l.import_path: l.address for l in local_go_targets} with temporary_dir() as gopath: for local_go_target in local_go_targets: deps = self._list_deps(gopath, local_go_target.address) self._generate_missing(gopath, local_go_target.address, deps, visited) return list(visited.items())
[ "def", "generate", "(", "self", ",", "local_go_targets", ")", ":", "visited", "=", "{", "l", ".", "import_path", ":", "l", ".", "address", "for", "l", "in", "local_go_targets", "}", "with", "temporary_dir", "(", ")", "as", "gopath", ":", "for", "local_go...
Automatically generates a Go target graph for the given local go targets. :param iter local_go_targets: The target roots to fill in a target graph for. :raises: :class:`GoTargetGenerator.GenerationError` if any missing targets cannot be generated.
[ "Automatically", "generates", "a", "Go", "target", "graph", "for", "the", "given", "local", "go", "targets", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/contrib/go/src/python/pants/contrib/go/tasks/go_buildgen.py#L54-L65
224,238
pantsbuild/pants
contrib/go/src/python/pants/contrib/go/tasks/go_buildgen.py
GoBuildgen.generate_targets
def generate_targets(self, local_go_targets=None): """Generate Go targets in memory to form a complete Go graph. :param local_go_targets: The local Go targets to fill in a complete target graph for. If `None`, then all local Go targets under the Go source root are used. :type local_go_targets: :class:`collections.Iterable` of :class:`pants.contrib.go.targets.go_local_source import GoLocalSource` :returns: A generation result if targets were generated, else `None`. :rtype: :class:`GoBuildgen.GenerationResult` """ # TODO(John Sirois): support multiple source roots like GOPATH does? # The GOPATH's 1st element is read-write, the rest are read-only; ie: their sources build to # the 1st element's pkg/ and bin/ dirs. go_roots_by_category = defaultdict(list) # TODO: Add "find source roots for lang" functionality to SourceRoots and use that instead. for sr in self.context.source_roots.all_roots(): if 'go' in sr.langs: go_roots_by_category[sr.category].append(sr.path) if go_roots_by_category[SourceRootCategories.TEST]: raise self.InvalidLocalRootsError('Go buildgen does not support test source roots.') if go_roots_by_category[SourceRootCategories.UNKNOWN]: raise self.InvalidLocalRootsError('Go buildgen does not support source roots of ' 'unknown category.') local_roots = go_roots_by_category[SourceRootCategories.SOURCE] if not local_roots: raise self.NoLocalRootsError('Can only BUILD gen if a Go local sources source root is ' 'defined.') if len(local_roots) > 1: raise self.InvalidLocalRootsError('Can only BUILD gen for a single Go local sources source ' 'root, found:\n\t{}' .format('\n\t'.join(sorted(local_roots)))) local_root = local_roots.pop() if local_go_targets: unrooted_locals = {t for t in local_go_targets if t.target_base != local_root} if unrooted_locals: raise self.UnrootedLocalSourceError('Cannot BUILD gen until the following targets are ' 'relocated to the source root at {}:\n\t{}' .format(local_root, '\n\t'.join(sorted(t.address.reference() for t in unrooted_locals)))) else: root = os.path.join(get_buildroot(), local_root) local_go_targets = self.context.scan(root=root).targets(self.is_local_src) if not local_go_targets: return None remote_roots = go_roots_by_category[SourceRootCategories.THIRDPARTY] if len(remote_roots) > 1: raise self.InvalidRemoteRootsError('Can only BUILD gen for a single Go remote library source ' 'root, found:\n\t{}' .format('\n\t'.join(sorted(remote_roots)))) remote_root = remote_roots.pop() if remote_roots else None generator = GoTargetGenerator(self.import_oracle, self.context.build_graph, local_root, self.get_fetcher_factory(), generate_remotes=self.get_options().remote, remote_root=remote_root) with self.context.new_workunit('go.buildgen', labels=[WorkUnitLabel.MULTITOOL]): try: generated = generator.generate(local_go_targets) return self.GenerationResult(generated=generated, local_root=local_root, remote_root=remote_root) except generator.GenerationError as e: raise self.GenerationError(e)
python
def generate_targets(self, local_go_targets=None): # TODO(John Sirois): support multiple source roots like GOPATH does? # The GOPATH's 1st element is read-write, the rest are read-only; ie: their sources build to # the 1st element's pkg/ and bin/ dirs. go_roots_by_category = defaultdict(list) # TODO: Add "find source roots for lang" functionality to SourceRoots and use that instead. for sr in self.context.source_roots.all_roots(): if 'go' in sr.langs: go_roots_by_category[sr.category].append(sr.path) if go_roots_by_category[SourceRootCategories.TEST]: raise self.InvalidLocalRootsError('Go buildgen does not support test source roots.') if go_roots_by_category[SourceRootCategories.UNKNOWN]: raise self.InvalidLocalRootsError('Go buildgen does not support source roots of ' 'unknown category.') local_roots = go_roots_by_category[SourceRootCategories.SOURCE] if not local_roots: raise self.NoLocalRootsError('Can only BUILD gen if a Go local sources source root is ' 'defined.') if len(local_roots) > 1: raise self.InvalidLocalRootsError('Can only BUILD gen for a single Go local sources source ' 'root, found:\n\t{}' .format('\n\t'.join(sorted(local_roots)))) local_root = local_roots.pop() if local_go_targets: unrooted_locals = {t for t in local_go_targets if t.target_base != local_root} if unrooted_locals: raise self.UnrootedLocalSourceError('Cannot BUILD gen until the following targets are ' 'relocated to the source root at {}:\n\t{}' .format(local_root, '\n\t'.join(sorted(t.address.reference() for t in unrooted_locals)))) else: root = os.path.join(get_buildroot(), local_root) local_go_targets = self.context.scan(root=root).targets(self.is_local_src) if not local_go_targets: return None remote_roots = go_roots_by_category[SourceRootCategories.THIRDPARTY] if len(remote_roots) > 1: raise self.InvalidRemoteRootsError('Can only BUILD gen for a single Go remote library source ' 'root, found:\n\t{}' .format('\n\t'.join(sorted(remote_roots)))) remote_root = remote_roots.pop() if remote_roots else None generator = GoTargetGenerator(self.import_oracle, self.context.build_graph, local_root, self.get_fetcher_factory(), generate_remotes=self.get_options().remote, remote_root=remote_root) with self.context.new_workunit('go.buildgen', labels=[WorkUnitLabel.MULTITOOL]): try: generated = generator.generate(local_go_targets) return self.GenerationResult(generated=generated, local_root=local_root, remote_root=remote_root) except generator.GenerationError as e: raise self.GenerationError(e)
[ "def", "generate_targets", "(", "self", ",", "local_go_targets", "=", "None", ")", ":", "# TODO(John Sirois): support multiple source roots like GOPATH does?", "# The GOPATH's 1st element is read-write, the rest are read-only; ie: their sources build to", "# the 1st element's pkg/ and bin/ d...
Generate Go targets in memory to form a complete Go graph. :param local_go_targets: The local Go targets to fill in a complete target graph for. If `None`, then all local Go targets under the Go source root are used. :type local_go_targets: :class:`collections.Iterable` of :class:`pants.contrib.go.targets.go_local_source import GoLocalSource` :returns: A generation result if targets were generated, else `None`. :rtype: :class:`GoBuildgen.GenerationResult`
[ "Generate", "Go", "targets", "in", "memory", "to", "form", "a", "complete", "Go", "graph", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/contrib/go/src/python/pants/contrib/go/tasks/go_buildgen.py#L321-L391
224,239
pantsbuild/pants
src/python/pants/base/specs.py
SingleAddress.address_target_pairs_from_address_families
def address_target_pairs_from_address_families(self, address_families): """Return the pair for the single target matching the single AddressFamily, or error. :raises: :class:`SingleAddress._SingleAddressResolutionError` if no targets could be found for a :class:`SingleAddress` instance. :return: list of (Address, Target) pairs with exactly one element. """ single_af = assert_single_element(address_families) addr_tgt_pairs = [ (addr, tgt) for addr, tgt in single_af.addressables.items() if addr.target_name == self.name ] if len(addr_tgt_pairs) == 0: raise self._SingleAddressResolutionError(single_af, self.name) # There will be at most one target with a given name in a single AddressFamily. assert(len(addr_tgt_pairs) == 1) return addr_tgt_pairs
python
def address_target_pairs_from_address_families(self, address_families): single_af = assert_single_element(address_families) addr_tgt_pairs = [ (addr, tgt) for addr, tgt in single_af.addressables.items() if addr.target_name == self.name ] if len(addr_tgt_pairs) == 0: raise self._SingleAddressResolutionError(single_af, self.name) # There will be at most one target with a given name in a single AddressFamily. assert(len(addr_tgt_pairs) == 1) return addr_tgt_pairs
[ "def", "address_target_pairs_from_address_families", "(", "self", ",", "address_families", ")", ":", "single_af", "=", "assert_single_element", "(", "address_families", ")", "addr_tgt_pairs", "=", "[", "(", "addr", ",", "tgt", ")", "for", "addr", ",", "tgt", "in",...
Return the pair for the single target matching the single AddressFamily, or error. :raises: :class:`SingleAddress._SingleAddressResolutionError` if no targets could be found for a :class:`SingleAddress` instance. :return: list of (Address, Target) pairs with exactly one element.
[ "Return", "the", "pair", "for", "the", "single", "target", "matching", "the", "single", "AddressFamily", "or", "error", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/base/specs.py#L106-L122
224,240
pantsbuild/pants
src/python/pants/ivy/bootstrapper.py
Bootstrapper.ivy
def ivy(self, bootstrap_workunit_factory=None): """Returns an ivy instance bootstrapped by this bootstrapper. :param bootstrap_workunit_factory: the optional workunit to bootstrap under. :raises: Bootstrapper.Error if ivy could not be bootstrapped """ return Ivy(self._get_classpath(bootstrap_workunit_factory), ivy_settings=self._ivy_subsystem.get_options().ivy_settings, ivy_resolution_cache_dir=self._ivy_subsystem.resolution_cache_dir(), extra_jvm_options=self._ivy_subsystem.extra_jvm_options())
python
def ivy(self, bootstrap_workunit_factory=None): return Ivy(self._get_classpath(bootstrap_workunit_factory), ivy_settings=self._ivy_subsystem.get_options().ivy_settings, ivy_resolution_cache_dir=self._ivy_subsystem.resolution_cache_dir(), extra_jvm_options=self._ivy_subsystem.extra_jvm_options())
[ "def", "ivy", "(", "self", ",", "bootstrap_workunit_factory", "=", "None", ")", ":", "return", "Ivy", "(", "self", ".", "_get_classpath", "(", "bootstrap_workunit_factory", ")", ",", "ivy_settings", "=", "self", ".", "_ivy_subsystem", ".", "get_options", "(", ...
Returns an ivy instance bootstrapped by this bootstrapper. :param bootstrap_workunit_factory: the optional workunit to bootstrap under. :raises: Bootstrapper.Error if ivy could not be bootstrapped
[ "Returns", "an", "ivy", "instance", "bootstrapped", "by", "this", "bootstrapper", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/ivy/bootstrapper.py#L82-L91
224,241
pantsbuild/pants
src/python/pants/ivy/bootstrapper.py
Bootstrapper._get_classpath
def _get_classpath(self, workunit_factory): """Returns the bootstrapped ivy classpath as a list of jar paths. :raises: Bootstrapper.Error if the classpath could not be bootstrapped """ if not self._classpath: self._classpath = self._bootstrap_ivy_classpath(workunit_factory) return self._classpath
python
def _get_classpath(self, workunit_factory): if not self._classpath: self._classpath = self._bootstrap_ivy_classpath(workunit_factory) return self._classpath
[ "def", "_get_classpath", "(", "self", ",", "workunit_factory", ")", ":", "if", "not", "self", ".", "_classpath", ":", "self", ".", "_classpath", "=", "self", ".", "_bootstrap_ivy_classpath", "(", "workunit_factory", ")", "return", "self", ".", "_classpath" ]
Returns the bootstrapped ivy classpath as a list of jar paths. :raises: Bootstrapper.Error if the classpath could not be bootstrapped
[ "Returns", "the", "bootstrapped", "ivy", "classpath", "as", "a", "list", "of", "jar", "paths", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/ivy/bootstrapper.py#L93-L100
224,242
pantsbuild/pants
src/python/pants/build_graph/target.py
Target.maybe_readable_combine_ids
def maybe_readable_combine_ids(cls, ids): """Generates combined id for a set of ids, but if the set is a single id, just use that. :API: public """ ids = list(ids) # We can't len a generator. return ids[0] if len(ids) == 1 else cls.combine_ids(ids)
python
def maybe_readable_combine_ids(cls, ids): ids = list(ids) # We can't len a generator. return ids[0] if len(ids) == 1 else cls.combine_ids(ids)
[ "def", "maybe_readable_combine_ids", "(", "cls", ",", "ids", ")", ":", "ids", "=", "list", "(", "ids", ")", "# We can't len a generator.", "return", "ids", "[", "0", "]", "if", "len", "(", "ids", ")", "==", "1", "else", "cls", ".", "combine_ids", "(", ...
Generates combined id for a set of ids, but if the set is a single id, just use that. :API: public
[ "Generates", "combined", "id", "for", "a", "set", "of", "ids", "but", "if", "the", "set", "is", "a", "single", "id", "just", "use", "that", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/target.py#L217-L223
224,243
pantsbuild/pants
src/python/pants/build_graph/target.py
Target.closure_for_targets
def closure_for_targets(cls, target_roots, exclude_scopes=None, include_scopes=None, bfs=None, postorder=None, respect_intransitive=False): """Computes the closure of the given targets respecting the given input scopes. :API: public :param list target_roots: The list of Targets to start from. These targets will always be included in the closure, regardless of scope settings. :param Scope exclude_scopes: If present and non-empty, only dependencies which have none of the scope names in this Scope will be traversed. :param Scope include_scopes: If present and non-empty, only dependencies which have at least one of the scope names in this Scope will be traversed. :param bool bfs: Whether to traverse in breadth-first or depth-first order. (Defaults to True). :param bool respect_intransitive: If True, any dependencies which have the 'intransitive' scope will not be included unless they are direct dependencies of one of the root targets. (Defaults to False). """ target_roots = list(target_roots) # Sometimes generators are passed into this function. if not target_roots: return OrderedSet() build_graph = target_roots[0]._build_graph addresses = [target.address for target in target_roots] dep_predicate = cls._closure_dep_predicate(target_roots, include_scopes=include_scopes, exclude_scopes=exclude_scopes, respect_intransitive=respect_intransitive) closure = OrderedSet() if not bfs: build_graph.walk_transitive_dependency_graph( addresses=addresses, work=closure.add, postorder=postorder, dep_predicate=dep_predicate, ) else: closure.update(build_graph.transitive_subgraph_of_addresses_bfs( addresses=addresses, dep_predicate=dep_predicate, )) # Make sure all the roots made it into the closure. closure.update(target_roots) return closure
python
def closure_for_targets(cls, target_roots, exclude_scopes=None, include_scopes=None, bfs=None, postorder=None, respect_intransitive=False): target_roots = list(target_roots) # Sometimes generators are passed into this function. if not target_roots: return OrderedSet() build_graph = target_roots[0]._build_graph addresses = [target.address for target in target_roots] dep_predicate = cls._closure_dep_predicate(target_roots, include_scopes=include_scopes, exclude_scopes=exclude_scopes, respect_intransitive=respect_intransitive) closure = OrderedSet() if not bfs: build_graph.walk_transitive_dependency_graph( addresses=addresses, work=closure.add, postorder=postorder, dep_predicate=dep_predicate, ) else: closure.update(build_graph.transitive_subgraph_of_addresses_bfs( addresses=addresses, dep_predicate=dep_predicate, )) # Make sure all the roots made it into the closure. closure.update(target_roots) return closure
[ "def", "closure_for_targets", "(", "cls", ",", "target_roots", ",", "exclude_scopes", "=", "None", ",", "include_scopes", "=", "None", ",", "bfs", "=", "None", ",", "postorder", "=", "None", ",", "respect_intransitive", "=", "False", ")", ":", "target_roots", ...
Computes the closure of the given targets respecting the given input scopes. :API: public :param list target_roots: The list of Targets to start from. These targets will always be included in the closure, regardless of scope settings. :param Scope exclude_scopes: If present and non-empty, only dependencies which have none of the scope names in this Scope will be traversed. :param Scope include_scopes: If present and non-empty, only dependencies which have at least one of the scope names in this Scope will be traversed. :param bool bfs: Whether to traverse in breadth-first or depth-first order. (Defaults to True). :param bool respect_intransitive: If True, any dependencies which have the 'intransitive' scope will not be included unless they are direct dependencies of one of the root targets. (Defaults to False).
[ "Computes", "the", "closure", "of", "the", "given", "targets", "respecting", "the", "given", "input", "scopes", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/target.py#L241-L285
224,244
pantsbuild/pants
src/python/pants/build_graph/target.py
Target.mark_invalidation_hash_dirty
def mark_invalidation_hash_dirty(self): """Invalidates memoized fingerprints for this target, including those in payloads. Exposed for testing. :API: public """ self._cached_fingerprint_map = {} self._cached_all_transitive_fingerprint_map = {} self._cached_direct_transitive_fingerprint_map = {} self._cached_strict_dependencies_map = {} self._cached_exports_addresses = None self.mark_extra_invalidation_hash_dirty() self.payload.mark_dirty()
python
def mark_invalidation_hash_dirty(self): self._cached_fingerprint_map = {} self._cached_all_transitive_fingerprint_map = {} self._cached_direct_transitive_fingerprint_map = {} self._cached_strict_dependencies_map = {} self._cached_exports_addresses = None self.mark_extra_invalidation_hash_dirty() self.payload.mark_dirty()
[ "def", "mark_invalidation_hash_dirty", "(", "self", ")", ":", "self", ".", "_cached_fingerprint_map", "=", "{", "}", "self", ".", "_cached_all_transitive_fingerprint_map", "=", "{", "}", "self", ".", "_cached_direct_transitive_fingerprint_map", "=", "{", "}", "self", ...
Invalidates memoized fingerprints for this target, including those in payloads. Exposed for testing. :API: public
[ "Invalidates", "memoized", "fingerprints", "for", "this", "target", "including", "those", "in", "payloads", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/target.py#L410-L423
224,245
pantsbuild/pants
src/python/pants/build_graph/target.py
Target.has_sources
def has_sources(self, extension=None): """Return `True` if this target owns sources; optionally of the given `extension`. :API: public :param string extension: Optional suffix of filenames to test for. :return: `True` if the target contains sources that match the optional extension suffix. :rtype: bool """ source_paths = self._sources_field.source_paths if not source_paths: return False if not extension: return True return any(source.endswith(extension) for source in source_paths)
python
def has_sources(self, extension=None): source_paths = self._sources_field.source_paths if not source_paths: return False if not extension: return True return any(source.endswith(extension) for source in source_paths)
[ "def", "has_sources", "(", "self", ",", "extension", "=", "None", ")", ":", "source_paths", "=", "self", ".", "_sources_field", ".", "source_paths", "if", "not", "source_paths", ":", "return", "False", "if", "not", "extension", ":", "return", "True", "return...
Return `True` if this target owns sources; optionally of the given `extension`. :API: public :param string extension: Optional suffix of filenames to test for. :return: `True` if the target contains sources that match the optional extension suffix. :rtype: bool
[ "Return", "True", "if", "this", "target", "owns", "sources", ";", "optionally", "of", "the", "given", "extension", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/target.py#L508-L522
224,246
pantsbuild/pants
src/python/pants/build_graph/target.py
Target.derived_from_chain
def derived_from_chain(self): """Returns all targets that this target was derived from. If this target was not derived from another, returns an empty sequence. :API: public """ cur = self while cur.derived_from is not cur: cur = cur.derived_from yield cur
python
def derived_from_chain(self): cur = self while cur.derived_from is not cur: cur = cur.derived_from yield cur
[ "def", "derived_from_chain", "(", "self", ")", ":", "cur", "=", "self", "while", "cur", ".", "derived_from", "is", "not", "cur", ":", "cur", "=", "cur", ".", "derived_from", "yield", "cur" ]
Returns all targets that this target was derived from. If this target was not derived from another, returns an empty sequence. :API: public
[ "Returns", "all", "targets", "that", "this", "target", "was", "derived", "from", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/target.py#L578-L588
224,247
pantsbuild/pants
src/python/pants/build_graph/target.py
Target.walk
def walk(self, work, predicate=None): """Walk of this target's dependency graph, DFS preorder traversal, visiting each node exactly once. If a predicate is supplied it will be used to test each target before handing the target to work and descending. Work can return targets in which case these will be added to the walk candidate set if not already walked. :API: public :param work: Callable that takes a :py:class:`pants.build_graph.target.Target` as its single argument. :param predicate: Callable that takes a :py:class:`pants.build_graph.target.Target` as its single argument and returns True if the target should passed to ``work``. """ if not callable(work): raise ValueError('work must be callable but was {}'.format(work)) if predicate and not callable(predicate): raise ValueError('predicate must be callable but was {}'.format(predicate)) self._build_graph.walk_transitive_dependency_graph([self.address], work, predicate)
python
def walk(self, work, predicate=None): if not callable(work): raise ValueError('work must be callable but was {}'.format(work)) if predicate and not callable(predicate): raise ValueError('predicate must be callable but was {}'.format(predicate)) self._build_graph.walk_transitive_dependency_graph([self.address], work, predicate)
[ "def", "walk", "(", "self", ",", "work", ",", "predicate", "=", "None", ")", ":", "if", "not", "callable", "(", "work", ")", ":", "raise", "ValueError", "(", "'work must be callable but was {}'", ".", "format", "(", "work", ")", ")", "if", "predicate", "...
Walk of this target's dependency graph, DFS preorder traversal, visiting each node exactly once. If a predicate is supplied it will be used to test each target before handing the target to work and descending. Work can return targets in which case these will be added to the walk candidate set if not already walked. :API: public :param work: Callable that takes a :py:class:`pants.build_graph.target.Target` as its single argument. :param predicate: Callable that takes a :py:class:`pants.build_graph.target.Target` as its single argument and returns True if the target should passed to ``work``.
[ "Walk", "of", "this", "target", "s", "dependency", "graph", "DFS", "preorder", "traversal", "visiting", "each", "node", "exactly", "once", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/target.py#L788-L807
224,248
pantsbuild/pants
src/python/pants/build_graph/target.py
Target.create_sources_field
def create_sources_field(self, sources, sources_rel_path, key_arg=None): """Factory method to create a SourcesField appropriate for the type of the sources object. Note that this method is called before the call to Target.__init__ so don't expect fields to be populated! :API: public :return: a payload field object representing the sources parameter :rtype: SourcesField """ if not sources: sources = FilesetWithSpec.empty(sources_rel_path) elif not isinstance(sources, FilesetWithSpec): key_arg_section = "'{}' to be ".format(key_arg) if key_arg else "" raise TargetDefinitionException(self, "Expected {}a glob, an address or a list, but was {}" .format(key_arg_section, type(sources))) return SourcesField(sources=sources)
python
def create_sources_field(self, sources, sources_rel_path, key_arg=None): if not sources: sources = FilesetWithSpec.empty(sources_rel_path) elif not isinstance(sources, FilesetWithSpec): key_arg_section = "'{}' to be ".format(key_arg) if key_arg else "" raise TargetDefinitionException(self, "Expected {}a glob, an address or a list, but was {}" .format(key_arg_section, type(sources))) return SourcesField(sources=sources)
[ "def", "create_sources_field", "(", "self", ",", "sources", ",", "sources_rel_path", ",", "key_arg", "=", "None", ")", ":", "if", "not", "sources", ":", "sources", "=", "FilesetWithSpec", ".", "empty", "(", "sources_rel_path", ")", "elif", "not", "isinstance",...
Factory method to create a SourcesField appropriate for the type of the sources object. Note that this method is called before the call to Target.__init__ so don't expect fields to be populated! :API: public :return: a payload field object representing the sources parameter :rtype: SourcesField
[ "Factory", "method", "to", "create", "a", "SourcesField", "appropriate", "for", "the", "type", "of", "the", "sources", "object", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/build_graph/target.py#L852-L870
224,249
pantsbuild/pants
src/python/pants/source/source_root.py
SourceRootFactory.create
def create(self, relpath, langs, category): """Return a source root at the given `relpath` for the given `langs` and `category`. :returns: :class:`SourceRoot`. """ return SourceRoot(relpath, tuple(self._canonicalize_langs(langs)), category)
python
def create(self, relpath, langs, category): return SourceRoot(relpath, tuple(self._canonicalize_langs(langs)), category)
[ "def", "create", "(", "self", ",", "relpath", ",", "langs", ",", "category", ")", ":", "return", "SourceRoot", "(", "relpath", ",", "tuple", "(", "self", ".", "_canonicalize_langs", "(", "langs", ")", ")", ",", "category", ")" ]
Return a source root at the given `relpath` for the given `langs` and `category`. :returns: :class:`SourceRoot`.
[ "Return", "a", "source", "root", "at", "the", "given", "relpath", "for", "the", "given", "langs", "and", "category", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/source/source_root.py#L44-L49
224,250
pantsbuild/pants
src/python/pants/source/source_root.py
SourceRoots.add_source_root
def add_source_root(self, path, langs=tuple(), category=SourceRootCategories.UNKNOWN): """Add the specified fixed source root, which must be relative to the buildroot. Useful in a limited set of circumstances, e.g., when unpacking sources from a jar with unknown structure. Tests should prefer to use dirs that match our source root patterns instead of explicitly setting source roots here. """ self._trie.add_fixed(path, langs, category)
python
def add_source_root(self, path, langs=tuple(), category=SourceRootCategories.UNKNOWN): self._trie.add_fixed(path, langs, category)
[ "def", "add_source_root", "(", "self", ",", "path", ",", "langs", "=", "tuple", "(", ")", ",", "category", "=", "SourceRootCategories", ".", "UNKNOWN", ")", ":", "self", ".", "_trie", ".", "add_fixed", "(", "path", ",", "langs", ",", "category", ")" ]
Add the specified fixed source root, which must be relative to the buildroot. Useful in a limited set of circumstances, e.g., when unpacking sources from a jar with unknown structure. Tests should prefer to use dirs that match our source root patterns instead of explicitly setting source roots here.
[ "Add", "the", "specified", "fixed", "source", "root", "which", "must", "be", "relative", "to", "the", "buildroot", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/source/source_root.py#L66-L73
224,251
pantsbuild/pants
src/python/pants/source/source_root.py
SourceRoots.find_by_path
def find_by_path(self, path): """Find the source root for the given path, or None. :param path: Find the source root for this path, relative to the buildroot. :return: A SourceRoot instance, or None if the path is not located under a source root and `unmatched==fail`. """ matched = self._trie.find(path) if matched: return matched elif self._options.unmatched == 'fail': return None elif self._options.unmatched == 'create': # If no source root is found, use the path directly. # TODO: Remove this logic. It should be an error to have no matching source root. return SourceRoot(path, [], SourceRootCategories.UNKNOWN)
python
def find_by_path(self, path): matched = self._trie.find(path) if matched: return matched elif self._options.unmatched == 'fail': return None elif self._options.unmatched == 'create': # If no source root is found, use the path directly. # TODO: Remove this logic. It should be an error to have no matching source root. return SourceRoot(path, [], SourceRootCategories.UNKNOWN)
[ "def", "find_by_path", "(", "self", ",", "path", ")", ":", "matched", "=", "self", ".", "_trie", ".", "find", "(", "path", ")", "if", "matched", ":", "return", "matched", "elif", "self", ".", "_options", ".", "unmatched", "==", "'fail'", ":", "return",...
Find the source root for the given path, or None. :param path: Find the source root for this path, relative to the buildroot. :return: A SourceRoot instance, or None if the path is not located under a source root and `unmatched==fail`.
[ "Find", "the", "source", "root", "for", "the", "given", "path", "or", "None", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/source/source_root.py#L83-L98
224,252
pantsbuild/pants
src/python/pants/source/source_root.py
SourceRoots.all_roots
def all_roots(self): """Return all known source roots. Returns a generator over (source root, list of langs, category) triples. Note: Requires a directory walk to match actual directories against patterns. However we don't descend into source roots, once found, so this should be fast in practice. Note: Does not follow symlinks. """ project_tree = get_project_tree(self._options) fixed_roots = set() for root, langs, category in self._trie.fixed(): if project_tree.exists(root): yield self._source_root_factory.create(root, langs, category) fixed_roots.add(root) for relpath, dirnames, _ in project_tree.walk('', topdown=True): match = self._trie.find(relpath) if match: if not any(fixed_root.startswith(relpath) for fixed_root in fixed_roots): yield match # Found a source root not a prefix of any fixed roots. del dirnames[:]
python
def all_roots(self): project_tree = get_project_tree(self._options) fixed_roots = set() for root, langs, category in self._trie.fixed(): if project_tree.exists(root): yield self._source_root_factory.create(root, langs, category) fixed_roots.add(root) for relpath, dirnames, _ in project_tree.walk('', topdown=True): match = self._trie.find(relpath) if match: if not any(fixed_root.startswith(relpath) for fixed_root in fixed_roots): yield match # Found a source root not a prefix of any fixed roots. del dirnames[:]
[ "def", "all_roots", "(", "self", ")", ":", "project_tree", "=", "get_project_tree", "(", "self", ".", "_options", ")", "fixed_roots", "=", "set", "(", ")", "for", "root", ",", "langs", ",", "category", "in", "self", ".", "_trie", ".", "fixed", "(", ")"...
Return all known source roots. Returns a generator over (source root, list of langs, category) triples. Note: Requires a directory walk to match actual directories against patterns. However we don't descend into source roots, once found, so this should be fast in practice. Note: Does not follow symlinks.
[ "Return", "all", "known", "source", "roots", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/source/source_root.py#L100-L122
224,253
pantsbuild/pants
src/python/pants/source/source_root.py
SourceRootConfig.create_trie
def create_trie(self): """Create a trie of source root patterns from options. :returns: :class:`SourceRootTrie` """ trie = SourceRootTrie(self.source_root_factory) options = self.get_options() for category in SourceRootCategories.ALL: # Add patterns. for pattern in options.get('{}_root_patterns'.format(category), []): trie.add_pattern(pattern, category) # Add fixed source roots. for path, langs in options.get('{}_roots'.format(category), {}).items(): trie.add_fixed(path, langs, category) return trie
python
def create_trie(self): trie = SourceRootTrie(self.source_root_factory) options = self.get_options() for category in SourceRootCategories.ALL: # Add patterns. for pattern in options.get('{}_root_patterns'.format(category), []): trie.add_pattern(pattern, category) # Add fixed source roots. for path, langs in options.get('{}_roots'.format(category), {}).items(): trie.add_fixed(path, langs, category) return trie
[ "def", "create_trie", "(", "self", ")", ":", "trie", "=", "SourceRootTrie", "(", "self", ".", "source_root_factory", ")", "options", "=", "self", ".", "get_options", "(", ")", "for", "category", "in", "SourceRootCategories", ".", "ALL", ":", "# Add patterns.",...
Create a trie of source root patterns from options. :returns: :class:`SourceRootTrie`
[ "Create", "a", "trie", "of", "source", "root", "patterns", "from", "options", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/source/source_root.py#L246-L262
224,254
pantsbuild/pants
src/python/pants/source/source_root.py
SourceRootTrie.add_pattern
def add_pattern(self, pattern, category=SourceRootCategories.UNKNOWN): """Add a pattern to the trie.""" self._do_add_pattern(pattern, tuple(), category)
python
def add_pattern(self, pattern, category=SourceRootCategories.UNKNOWN): self._do_add_pattern(pattern, tuple(), category)
[ "def", "add_pattern", "(", "self", ",", "pattern", ",", "category", "=", "SourceRootCategories", ".", "UNKNOWN", ")", ":", "self", ".", "_do_add_pattern", "(", "pattern", ",", "tuple", "(", ")", ",", "category", ")" ]
Add a pattern to the trie.
[ "Add", "a", "pattern", "to", "the", "trie", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/source/source_root.py#L335-L337
224,255
pantsbuild/pants
src/python/pants/source/source_root.py
SourceRootTrie.add_fixed
def add_fixed(self, path, langs, category=SourceRootCategories.UNKNOWN): """Add a fixed source root to the trie.""" if '*' in path: raise self.InvalidPath(path, 'fixed path cannot contain the * character') fixed_path = os.path.join('^', path) if path else '^' self._do_add_pattern(fixed_path, tuple(langs), category)
python
def add_fixed(self, path, langs, category=SourceRootCategories.UNKNOWN): if '*' in path: raise self.InvalidPath(path, 'fixed path cannot contain the * character') fixed_path = os.path.join('^', path) if path else '^' self._do_add_pattern(fixed_path, tuple(langs), category)
[ "def", "add_fixed", "(", "self", ",", "path", ",", "langs", ",", "category", "=", "SourceRootCategories", ".", "UNKNOWN", ")", ":", "if", "'*'", "in", "path", ":", "raise", "self", ".", "InvalidPath", "(", "path", ",", "'fixed path cannot contain the * charact...
Add a fixed source root to the trie.
[ "Add", "a", "fixed", "source", "root", "to", "the", "trie", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/source/source_root.py#L339-L344
224,256
pantsbuild/pants
src/python/pants/source/source_root.py
SourceRootTrie.fixed
def fixed(self): """Returns a list of just the fixed source roots in the trie.""" for key, child in self._root.children.items(): if key == '^': return list(child.subpatterns()) return []
python
def fixed(self): for key, child in self._root.children.items(): if key == '^': return list(child.subpatterns()) return []
[ "def", "fixed", "(", "self", ")", ":", "for", "key", ",", "child", "in", "self", ".", "_root", ".", "children", ".", "items", "(", ")", ":", "if", "key", "==", "'^'", ":", "return", "list", "(", "child", ".", "subpatterns", "(", ")", ")", "return...
Returns a list of just the fixed source roots in the trie.
[ "Returns", "a", "list", "of", "just", "the", "fixed", "source", "roots", "in", "the", "trie", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/source/source_root.py#L346-L351
224,257
pantsbuild/pants
src/python/pants/source/source_root.py
SourceRootTrie.find
def find(self, path): """Find the source root for the given path.""" keys = ['^'] + path.split(os.path.sep) for i in range(len(keys)): # See if we have a match at position i. We have such a match if following the path # segments into the trie, from the root, leads us to a terminal. node = self._root langs = set() j = i while j < len(keys): child = node.get_child(keys[j], langs) if child is None: break else: node = child j += 1 if node.is_terminal: if j == 1: # The match was on the root itself. path = '' else: path = os.path.join(*keys[1:j]) return self._source_root_factory.create(path, langs, node.category) # Otherwise, try the next value of i. return None
python
def find(self, path): keys = ['^'] + path.split(os.path.sep) for i in range(len(keys)): # See if we have a match at position i. We have such a match if following the path # segments into the trie, from the root, leads us to a terminal. node = self._root langs = set() j = i while j < len(keys): child = node.get_child(keys[j], langs) if child is None: break else: node = child j += 1 if node.is_terminal: if j == 1: # The match was on the root itself. path = '' else: path = os.path.join(*keys[1:j]) return self._source_root_factory.create(path, langs, node.category) # Otherwise, try the next value of i. return None
[ "def", "find", "(", "self", ",", "path", ")", ":", "keys", "=", "[", "'^'", "]", "+", "path", ".", "split", "(", "os", ".", "path", ".", "sep", ")", "for", "i", "in", "range", "(", "len", "(", "keys", ")", ")", ":", "# See if we have a match at p...
Find the source root for the given path.
[ "Find", "the", "source", "root", "for", "the", "given", "path", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/source/source_root.py#L368-L391
224,258
pantsbuild/pants
src/python/pants/binaries/binary_tool.py
BinaryToolBase.version
def version(self, context=None): """Returns the version of the specified binary tool. If replaces_scope and replaces_name are defined, then the caller must pass in a context, otherwise no context should be passed. # TODO: Once we're migrated, get rid of the context arg. :API: public """ if self.replaces_scope and self.replaces_name: if context: # If the old option is provided explicitly, let it take precedence. old_opts = context.options.for_scope(self.replaces_scope) if old_opts.get(self.replaces_name) and not old_opts.is_default(self.replaces_name): return old_opts.get(self.replaces_name) else: logger.warn('Cannot resolve version of {} from deprecated option {} in scope {} without a ' 'context!'.format(self._get_name(), self.replaces_name, self.replaces_scope)) return self.get_options().version
python
def version(self, context=None): if self.replaces_scope and self.replaces_name: if context: # If the old option is provided explicitly, let it take precedence. old_opts = context.options.for_scope(self.replaces_scope) if old_opts.get(self.replaces_name) and not old_opts.is_default(self.replaces_name): return old_opts.get(self.replaces_name) else: logger.warn('Cannot resolve version of {} from deprecated option {} in scope {} without a ' 'context!'.format(self._get_name(), self.replaces_name, self.replaces_scope)) return self.get_options().version
[ "def", "version", "(", "self", ",", "context", "=", "None", ")", ":", "if", "self", ".", "replaces_scope", "and", "self", ".", "replaces_name", ":", "if", "context", ":", "# If the old option is provided explicitly, let it take precedence.", "old_opts", "=", "contex...
Returns the version of the specified binary tool. If replaces_scope and replaces_name are defined, then the caller must pass in a context, otherwise no context should be passed. # TODO: Once we're migrated, get rid of the context arg. :API: public
[ "Returns", "the", "version", "of", "the", "specified", "binary", "tool", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/binaries/binary_tool.py#L132-L151
224,259
pantsbuild/pants
src/python/pants/reporting/reporting.py
Reporting.initialize
def initialize(self, run_tracker, all_options, start_time=None): """Initialize with the given RunTracker. TODO: See `RunTracker.start`. """ run_id, run_uuid = run_tracker.initialize(all_options) run_dir = os.path.join(self.get_options().reports_dir, run_id) html_dir = os.path.join(run_dir, 'html') safe_mkdir(html_dir) relative_symlink(run_dir, os.path.join(self.get_options().reports_dir, 'latest')) report = Report() # Capture initial console reporting into a buffer. We'll do something with it once # we know what the cmd-line flag settings are. outfile = BytesIO() errfile = BytesIO() capturing_reporter_settings = PlainTextReporter.Settings( outfile=outfile, errfile=errfile, log_level=Report.INFO, color=False, indent=True, timing=False, cache_stats=False, label_format=self.get_options().console_label_format, tool_output_format=self.get_options().console_tool_output_format) capturing_reporter = PlainTextReporter(run_tracker, capturing_reporter_settings) report.add_reporter('capturing', capturing_reporter) # Set up HTML reporting. We always want that. html_reporter_settings = HtmlReporter.Settings(log_level=Report.INFO, html_dir=html_dir, template_dir=self.get_options().template_dir) html_reporter = HtmlReporter(run_tracker, html_reporter_settings) report.add_reporter('html', html_reporter) # Set up Zipkin reporting. zipkin_endpoint = self.get_options().zipkin_endpoint trace_id = self.get_options().zipkin_trace_id parent_id = self.get_options().zipkin_parent_id sample_rate = self.get_options().zipkin_sample_rate if zipkin_endpoint is None and trace_id is not None and parent_id is not None: raise ValueError( "The zipkin-endpoint flag must be set if zipkin-trace-id and zipkin-parent-id flags are given." ) if (trace_id is None) != (parent_id is None): raise ValueError( "Flags zipkin-trace-id and zipkin-parent-id must both either be set or not set." ) # If trace_id isn't set by a flag, use UUID from run_id if trace_id is None: trace_id = run_uuid if trace_id and (len(trace_id) != 16 and len(trace_id) != 32 or not is_hex_string(trace_id)): raise ValueError( "Value of the flag zipkin-trace-id must be a 16-character or 32-character hex string. " + "Got {}.".format(trace_id) ) if parent_id and (len(parent_id) != 16 or not is_hex_string(parent_id)): raise ValueError( "Value of the flag zipkin-parent-id must be a 16-character hex string. " + "Got {}.".format(parent_id) ) if zipkin_endpoint is not None: zipkin_reporter_settings = ZipkinReporter.Settings(log_level=Report.INFO) zipkin_reporter = ZipkinReporter( run_tracker, zipkin_reporter_settings, zipkin_endpoint, trace_id, parent_id, sample_rate ) report.add_reporter('zipkin', zipkin_reporter) # Add some useful RunInfo. run_tracker.run_info.add_info('default_report', html_reporter.report_path()) port = ReportingServerManager().socket if port: run_tracker.run_info.add_info('report_url', 'http://localhost:{}/run/{}'.format(port, run_id)) # And start tracking the run. run_tracker.start(report, start_time)
python
def initialize(self, run_tracker, all_options, start_time=None): run_id, run_uuid = run_tracker.initialize(all_options) run_dir = os.path.join(self.get_options().reports_dir, run_id) html_dir = os.path.join(run_dir, 'html') safe_mkdir(html_dir) relative_symlink(run_dir, os.path.join(self.get_options().reports_dir, 'latest')) report = Report() # Capture initial console reporting into a buffer. We'll do something with it once # we know what the cmd-line flag settings are. outfile = BytesIO() errfile = BytesIO() capturing_reporter_settings = PlainTextReporter.Settings( outfile=outfile, errfile=errfile, log_level=Report.INFO, color=False, indent=True, timing=False, cache_stats=False, label_format=self.get_options().console_label_format, tool_output_format=self.get_options().console_tool_output_format) capturing_reporter = PlainTextReporter(run_tracker, capturing_reporter_settings) report.add_reporter('capturing', capturing_reporter) # Set up HTML reporting. We always want that. html_reporter_settings = HtmlReporter.Settings(log_level=Report.INFO, html_dir=html_dir, template_dir=self.get_options().template_dir) html_reporter = HtmlReporter(run_tracker, html_reporter_settings) report.add_reporter('html', html_reporter) # Set up Zipkin reporting. zipkin_endpoint = self.get_options().zipkin_endpoint trace_id = self.get_options().zipkin_trace_id parent_id = self.get_options().zipkin_parent_id sample_rate = self.get_options().zipkin_sample_rate if zipkin_endpoint is None and trace_id is not None and parent_id is not None: raise ValueError( "The zipkin-endpoint flag must be set if zipkin-trace-id and zipkin-parent-id flags are given." ) if (trace_id is None) != (parent_id is None): raise ValueError( "Flags zipkin-trace-id and zipkin-parent-id must both either be set or not set." ) # If trace_id isn't set by a flag, use UUID from run_id if trace_id is None: trace_id = run_uuid if trace_id and (len(trace_id) != 16 and len(trace_id) != 32 or not is_hex_string(trace_id)): raise ValueError( "Value of the flag zipkin-trace-id must be a 16-character or 32-character hex string. " + "Got {}.".format(trace_id) ) if parent_id and (len(parent_id) != 16 or not is_hex_string(parent_id)): raise ValueError( "Value of the flag zipkin-parent-id must be a 16-character hex string. " + "Got {}.".format(parent_id) ) if zipkin_endpoint is not None: zipkin_reporter_settings = ZipkinReporter.Settings(log_level=Report.INFO) zipkin_reporter = ZipkinReporter( run_tracker, zipkin_reporter_settings, zipkin_endpoint, trace_id, parent_id, sample_rate ) report.add_reporter('zipkin', zipkin_reporter) # Add some useful RunInfo. run_tracker.run_info.add_info('default_report', html_reporter.report_path()) port = ReportingServerManager().socket if port: run_tracker.run_info.add_info('report_url', 'http://localhost:{}/run/{}'.format(port, run_id)) # And start tracking the run. run_tracker.start(report, start_time)
[ "def", "initialize", "(", "self", ",", "run_tracker", ",", "all_options", ",", "start_time", "=", "None", ")", ":", "run_id", ",", "run_uuid", "=", "run_tracker", ".", "initialize", "(", "all_options", ")", "run_dir", "=", "os", ".", "path", ".", "join", ...
Initialize with the given RunTracker. TODO: See `RunTracker.start`.
[ "Initialize", "with", "the", "given", "RunTracker", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting.py#L68-L147
224,260
pantsbuild/pants
src/python/pants/reporting/reporting.py
Reporting.update_reporting
def update_reporting(self, global_options, is_quiet, run_tracker): """Updates reporting config once we've parsed cmd-line flags.""" # Get any output silently buffered in the old console reporter, and remove it. removed_reporter = run_tracker.report.remove_reporter('capturing') buffered_out = self._consume_stringio(removed_reporter.settings.outfile) buffered_err = self._consume_stringio(removed_reporter.settings.errfile) log_level = Report.log_level_from_string(global_options.level or 'info') # Ideally, we'd use terminfo or somesuch to discover whether a # terminal truly supports color, but most that don't set TERM=dumb. color = global_options.colors and (os.getenv('TERM') != 'dumb') timing = global_options.time cache_stats = global_options.time # TODO: Separate flag for this? if is_quiet: console_reporter = QuietReporter(run_tracker, QuietReporter.Settings(log_level=log_level, color=color, timing=timing, cache_stats=cache_stats)) else: # Set up the new console reporter. stdout = sys.stdout.buffer if PY3 else sys.stdout stderr = sys.stderr.buffer if PY3 else sys.stderr settings = PlainTextReporter.Settings(log_level=log_level, outfile=stdout, errfile=stderr, color=color, indent=True, timing=timing, cache_stats=cache_stats, label_format=self.get_options().console_label_format, tool_output_format=self.get_options().console_tool_output_format) console_reporter = PlainTextReporter(run_tracker, settings) console_reporter.emit(buffered_out, dest=ReporterDestination.OUT) console_reporter.emit(buffered_err, dest=ReporterDestination.ERR) console_reporter.flush() run_tracker.report.add_reporter('console', console_reporter) if global_options.logdir: # Also write plaintext logs to a file. This is completely separate from the html reports. safe_mkdir(global_options.logdir) run_id = run_tracker.run_info.get_info('id') outfile = open(os.path.join(global_options.logdir, '{}.log'.format(run_id)), 'wb') errfile = open(os.path.join(global_options.logdir, '{}.err.log'.format(run_id)), 'wb') settings = PlainTextReporter.Settings(log_level=log_level, outfile=outfile, errfile=errfile, color=False, indent=True, timing=True, cache_stats=True, label_format=self.get_options().console_label_format, tool_output_format=self.get_options().console_tool_output_format) logfile_reporter = PlainTextReporter(run_tracker, settings) logfile_reporter.emit(buffered_out, dest=ReporterDestination.OUT) logfile_reporter.emit(buffered_err, dest=ReporterDestination.ERR) logfile_reporter.flush() run_tracker.report.add_reporter('logfile', logfile_reporter) invalidation_report = self._get_invalidation_report() if invalidation_report: run_id = run_tracker.run_info.get_info('id') outfile = os.path.join(self.get_options().reports_dir, run_id, 'invalidation-report.csv') invalidation_report.set_filename(outfile) return invalidation_report
python
def update_reporting(self, global_options, is_quiet, run_tracker): # Get any output silently buffered in the old console reporter, and remove it. removed_reporter = run_tracker.report.remove_reporter('capturing') buffered_out = self._consume_stringio(removed_reporter.settings.outfile) buffered_err = self._consume_stringio(removed_reporter.settings.errfile) log_level = Report.log_level_from_string(global_options.level or 'info') # Ideally, we'd use terminfo or somesuch to discover whether a # terminal truly supports color, but most that don't set TERM=dumb. color = global_options.colors and (os.getenv('TERM') != 'dumb') timing = global_options.time cache_stats = global_options.time # TODO: Separate flag for this? if is_quiet: console_reporter = QuietReporter(run_tracker, QuietReporter.Settings(log_level=log_level, color=color, timing=timing, cache_stats=cache_stats)) else: # Set up the new console reporter. stdout = sys.stdout.buffer if PY3 else sys.stdout stderr = sys.stderr.buffer if PY3 else sys.stderr settings = PlainTextReporter.Settings(log_level=log_level, outfile=stdout, errfile=stderr, color=color, indent=True, timing=timing, cache_stats=cache_stats, label_format=self.get_options().console_label_format, tool_output_format=self.get_options().console_tool_output_format) console_reporter = PlainTextReporter(run_tracker, settings) console_reporter.emit(buffered_out, dest=ReporterDestination.OUT) console_reporter.emit(buffered_err, dest=ReporterDestination.ERR) console_reporter.flush() run_tracker.report.add_reporter('console', console_reporter) if global_options.logdir: # Also write plaintext logs to a file. This is completely separate from the html reports. safe_mkdir(global_options.logdir) run_id = run_tracker.run_info.get_info('id') outfile = open(os.path.join(global_options.logdir, '{}.log'.format(run_id)), 'wb') errfile = open(os.path.join(global_options.logdir, '{}.err.log'.format(run_id)), 'wb') settings = PlainTextReporter.Settings(log_level=log_level, outfile=outfile, errfile=errfile, color=False, indent=True, timing=True, cache_stats=True, label_format=self.get_options().console_label_format, tool_output_format=self.get_options().console_tool_output_format) logfile_reporter = PlainTextReporter(run_tracker, settings) logfile_reporter.emit(buffered_out, dest=ReporterDestination.OUT) logfile_reporter.emit(buffered_err, dest=ReporterDestination.ERR) logfile_reporter.flush() run_tracker.report.add_reporter('logfile', logfile_reporter) invalidation_report = self._get_invalidation_report() if invalidation_report: run_id = run_tracker.run_info.get_info('id') outfile = os.path.join(self.get_options().reports_dir, run_id, 'invalidation-report.csv') invalidation_report.set_filename(outfile) return invalidation_report
[ "def", "update_reporting", "(", "self", ",", "global_options", ",", "is_quiet", ",", "run_tracker", ")", ":", "# Get any output silently buffered in the old console reporter, and remove it.", "removed_reporter", "=", "run_tracker", ".", "report", ".", "remove_reporter", "(", ...
Updates reporting config once we've parsed cmd-line flags.
[ "Updates", "reporting", "config", "once", "we", "ve", "parsed", "cmd", "-", "line", "flags", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting.py#L159-L214
224,261
pantsbuild/pants
src/python/pants/cache/pinger.py
Pinger.ping
def ping(self, url): """Time a single roundtrip to the url. :param url to ping. :returns: the fastest ping time for a given netloc and number of tries. or Pinger.UNREACHABLE if ping times out. :rtype: float Note that we don't use actual ICMP pings, because cmd-line ping is inflexible and platform-dependent, so shelling out to it is annoying, and the ICMP python lib can only be called by the superuser. """ return self._get_ping_time(url, self._timeout, self._tries)
python
def ping(self, url): return self._get_ping_time(url, self._timeout, self._tries)
[ "def", "ping", "(", "self", ",", "url", ")", ":", "return", "self", ".", "_get_ping_time", "(", "url", ",", "self", ".", "_timeout", ",", "self", ".", "_tries", ")" ]
Time a single roundtrip to the url. :param url to ping. :returns: the fastest ping time for a given netloc and number of tries. or Pinger.UNREACHABLE if ping times out. :rtype: float Note that we don't use actual ICMP pings, because cmd-line ping is inflexible and platform-dependent, so shelling out to it is annoying, and the ICMP python lib can only be called by the superuser.
[ "Time", "a", "single", "roundtrip", "to", "the", "url", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/cache/pinger.py#L53-L65
224,262
pantsbuild/pants
src/python/pants/cache/pinger.py
BestUrlSelector.select_best_url
def select_best_url(self): """Select `best` url. Since urls are pre-sorted w.r.t. their ping times, we simply return the first element from the list. And we always return the same url unless we observe greater than max allowed number of consecutive failures. In this case, we would return the next `best` url, and append the previous best one to the end of list (essentially rotate to the left by one element). """ best_url = self.parsed_urls[0] try: yield best_url except Exception: self.unsuccessful_calls[best_url] += 1 # Not thread-safe but pool used by cache is based on subprocesses, therefore no race. if self.unsuccessful_calls[best_url] > self.max_failures: self.parsed_urls.rotate(-1) self.unsuccessful_calls[best_url] = 0 raise else: self.unsuccessful_calls[best_url] = 0
python
def select_best_url(self): best_url = self.parsed_urls[0] try: yield best_url except Exception: self.unsuccessful_calls[best_url] += 1 # Not thread-safe but pool used by cache is based on subprocesses, therefore no race. if self.unsuccessful_calls[best_url] > self.max_failures: self.parsed_urls.rotate(-1) self.unsuccessful_calls[best_url] = 0 raise else: self.unsuccessful_calls[best_url] = 0
[ "def", "select_best_url", "(", "self", ")", ":", "best_url", "=", "self", ".", "parsed_urls", "[", "0", "]", "try", ":", "yield", "best_url", "except", "Exception", ":", "self", ".", "unsuccessful_calls", "[", "best_url", "]", "+=", "1", "# Not thread-safe b...
Select `best` url. Since urls are pre-sorted w.r.t. their ping times, we simply return the first element from the list. And we always return the same url unless we observe greater than max allowed number of consecutive failures. In this case, we would return the next `best` url, and append the previous best one to the end of list (essentially rotate to the left by one element).
[ "Select", "best", "url", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/cache/pinger.py#L101-L123
224,263
pantsbuild/pants
src/python/pants/base/exception_sink.py
ExceptionSink.reset_log_location
def reset_log_location(cls, new_log_location): """Re-acquire file handles to error logs based in the new location. Class state: - Overwrites `cls._log_dir`, `cls._pid_specific_error_fileobj`, and `cls._shared_error_fileobj`. OS state: - May create a new directory. - Overwrites signal handlers for many fatal and non-fatal signals (but not SIGUSR2). :raises: :class:`ExceptionSink.ExceptionSinkError` if the directory does not exist or is not writable. """ # We could no-op here if the log locations are the same, but there's no reason not to have the # additional safety of re-acquiring file descriptors each time (and erroring out early if the # location is no longer writable). # Create the directory if possible, or raise if not writable. cls._check_or_create_new_destination(new_log_location) pid_specific_error_stream, shared_error_stream = cls._recapture_fatal_error_log_streams( new_log_location) # NB: mutate process-global state! if faulthandler.is_enabled(): logger.debug('re-enabling faulthandler') # Call Py_CLEAR() on the previous error stream: # https://github.com/vstinner/faulthandler/blob/master/faulthandler.c faulthandler.disable() # Send a stacktrace to this file if interrupted by a fatal error. faulthandler.enable(file=pid_specific_error_stream, all_threads=True) # NB: mutate the class variables! cls._log_dir = new_log_location cls._pid_specific_error_fileobj = pid_specific_error_stream cls._shared_error_fileobj = shared_error_stream
python
def reset_log_location(cls, new_log_location): # We could no-op here if the log locations are the same, but there's no reason not to have the # additional safety of re-acquiring file descriptors each time (and erroring out early if the # location is no longer writable). # Create the directory if possible, or raise if not writable. cls._check_or_create_new_destination(new_log_location) pid_specific_error_stream, shared_error_stream = cls._recapture_fatal_error_log_streams( new_log_location) # NB: mutate process-global state! if faulthandler.is_enabled(): logger.debug('re-enabling faulthandler') # Call Py_CLEAR() on the previous error stream: # https://github.com/vstinner/faulthandler/blob/master/faulthandler.c faulthandler.disable() # Send a stacktrace to this file if interrupted by a fatal error. faulthandler.enable(file=pid_specific_error_stream, all_threads=True) # NB: mutate the class variables! cls._log_dir = new_log_location cls._pid_specific_error_fileobj = pid_specific_error_stream cls._shared_error_fileobj = shared_error_stream
[ "def", "reset_log_location", "(", "cls", ",", "new_log_location", ")", ":", "# We could no-op here if the log locations are the same, but there's no reason not to have the", "# additional safety of re-acquiring file descriptors each time (and erroring out early if the", "# location is no longer ...
Re-acquire file handles to error logs based in the new location. Class state: - Overwrites `cls._log_dir`, `cls._pid_specific_error_fileobj`, and `cls._shared_error_fileobj`. OS state: - May create a new directory. - Overwrites signal handlers for many fatal and non-fatal signals (but not SIGUSR2). :raises: :class:`ExceptionSink.ExceptionSinkError` if the directory does not exist or is not writable.
[ "Re", "-", "acquire", "file", "handles", "to", "error", "logs", "based", "in", "the", "new", "location", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/base/exception_sink.py#L117-L152
224,264
pantsbuild/pants
src/python/pants/base/exception_sink.py
ExceptionSink.exceptions_log_path
def exceptions_log_path(cls, for_pid=None, in_dir=None): """Get the path to either the shared or pid-specific fatal errors log file.""" if for_pid is None: intermediate_filename_component = '' else: assert(isinstance(for_pid, IntegerForPid)) intermediate_filename_component = '.{}'.format(for_pid) in_dir = in_dir or cls._log_dir return os.path.join( in_dir, 'logs', 'exceptions{}.log'.format(intermediate_filename_component))
python
def exceptions_log_path(cls, for_pid=None, in_dir=None): if for_pid is None: intermediate_filename_component = '' else: assert(isinstance(for_pid, IntegerForPid)) intermediate_filename_component = '.{}'.format(for_pid) in_dir = in_dir or cls._log_dir return os.path.join( in_dir, 'logs', 'exceptions{}.log'.format(intermediate_filename_component))
[ "def", "exceptions_log_path", "(", "cls", ",", "for_pid", "=", "None", ",", "in_dir", "=", "None", ")", ":", "if", "for_pid", "is", "None", ":", "intermediate_filename_component", "=", "''", "else", ":", "assert", "(", "isinstance", "(", "for_pid", ",", "I...
Get the path to either the shared or pid-specific fatal errors log file.
[ "Get", "the", "path", "to", "either", "the", "shared", "or", "pid", "-", "specific", "fatal", "errors", "log", "file", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/base/exception_sink.py#L198-L209
224,265
pantsbuild/pants
src/python/pants/base/exception_sink.py
ExceptionSink.log_exception
def log_exception(cls, msg): """Try to log an error message to this process's error log and the shared error log. NB: Doesn't raise (logs an error instead). """ pid = os.getpid() fatal_error_log_entry = cls._format_exception_message(msg, pid) # We care more about this log than the shared log, so write to it first. try: cls._try_write_with_flush(cls._pid_specific_error_fileobj, fatal_error_log_entry) except Exception as e: logger.error( "Error logging the message '{}' to the pid-specific file handle for {} at pid {}:\n{}" .format(msg, cls._log_dir, pid, e)) # Write to the shared log. try: # TODO: we should probably guard this against concurrent modification by other pants # subprocesses somehow. cls._try_write_with_flush(cls._shared_error_fileobj, fatal_error_log_entry) except Exception as e: logger.error( "Error logging the message '{}' to the shared file handle for {} at pid {}:\n{}" .format(msg, cls._log_dir, pid, e))
python
def log_exception(cls, msg): pid = os.getpid() fatal_error_log_entry = cls._format_exception_message(msg, pid) # We care more about this log than the shared log, so write to it first. try: cls._try_write_with_flush(cls._pid_specific_error_fileobj, fatal_error_log_entry) except Exception as e: logger.error( "Error logging the message '{}' to the pid-specific file handle for {} at pid {}:\n{}" .format(msg, cls._log_dir, pid, e)) # Write to the shared log. try: # TODO: we should probably guard this against concurrent modification by other pants # subprocesses somehow. cls._try_write_with_flush(cls._shared_error_fileobj, fatal_error_log_entry) except Exception as e: logger.error( "Error logging the message '{}' to the shared file handle for {} at pid {}:\n{}" .format(msg, cls._log_dir, pid, e))
[ "def", "log_exception", "(", "cls", ",", "msg", ")", ":", "pid", "=", "os", ".", "getpid", "(", ")", "fatal_error_log_entry", "=", "cls", ".", "_format_exception_message", "(", "msg", ",", "pid", ")", "# We care more about this log than the shared log, so write to i...
Try to log an error message to this process's error log and the shared error log. NB: Doesn't raise (logs an error instead).
[ "Try", "to", "log", "an", "error", "message", "to", "this", "process", "s", "error", "log", "and", "the", "shared", "error", "log", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/base/exception_sink.py#L212-L236
224,266
pantsbuild/pants
src/python/pants/base/exception_sink.py
ExceptionSink.trapped_signals
def trapped_signals(cls, new_signal_handler): """A contextmanager which temporarily overrides signal handling.""" try: previous_signal_handler = cls.reset_signal_handler(new_signal_handler) yield finally: cls.reset_signal_handler(previous_signal_handler)
python
def trapped_signals(cls, new_signal_handler): try: previous_signal_handler = cls.reset_signal_handler(new_signal_handler) yield finally: cls.reset_signal_handler(previous_signal_handler)
[ "def", "trapped_signals", "(", "cls", ",", "new_signal_handler", ")", ":", "try", ":", "previous_signal_handler", "=", "cls", ".", "reset_signal_handler", "(", "new_signal_handler", ")", "yield", "finally", ":", "cls", ".", "reset_signal_handler", "(", "previous_sig...
A contextmanager which temporarily overrides signal handling.
[ "A", "contextmanager", "which", "temporarily", "overrides", "signal", "handling", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/base/exception_sink.py#L305-L311
224,267
pantsbuild/pants
src/python/pants/base/exception_sink.py
ExceptionSink._log_unhandled_exception_and_exit
def _log_unhandled_exception_and_exit(cls, exc_class=None, exc=None, tb=None, add_newline=False): """A sys.excepthook implementation which logs the error and exits with failure.""" exc_class = exc_class or sys.exc_info()[0] exc = exc or sys.exc_info()[1] tb = tb or sys.exc_info()[2] # This exception was raised by a signal handler with the intent to exit the program. if exc_class == SignalHandler.SignalHandledNonLocalExit: return cls._handle_signal_gracefully(exc.signum, exc.signame, exc.traceback_lines) extra_err_msg = None try: # Always output the unhandled exception details into a log file, including the traceback. exception_log_entry = cls._format_unhandled_exception_log(exc, tb, add_newline, should_print_backtrace=True) cls.log_exception(exception_log_entry) except Exception as e: extra_err_msg = 'Additional error logging unhandled exception {}: {}'.format(exc, e) logger.error(extra_err_msg) # Generate an unhandled exception report fit to be printed to the terminal (respecting the # Exiter's should_print_backtrace field). stderr_printed_error = cls._format_unhandled_exception_log( exc, tb, add_newline, should_print_backtrace=cls._should_print_backtrace_to_terminal) if extra_err_msg: stderr_printed_error = '{}\n{}'.format(stderr_printed_error, extra_err_msg) cls._exit_with_failure(stderr_printed_error)
python
def _log_unhandled_exception_and_exit(cls, exc_class=None, exc=None, tb=None, add_newline=False): exc_class = exc_class or sys.exc_info()[0] exc = exc or sys.exc_info()[1] tb = tb or sys.exc_info()[2] # This exception was raised by a signal handler with the intent to exit the program. if exc_class == SignalHandler.SignalHandledNonLocalExit: return cls._handle_signal_gracefully(exc.signum, exc.signame, exc.traceback_lines) extra_err_msg = None try: # Always output the unhandled exception details into a log file, including the traceback. exception_log_entry = cls._format_unhandled_exception_log(exc, tb, add_newline, should_print_backtrace=True) cls.log_exception(exception_log_entry) except Exception as e: extra_err_msg = 'Additional error logging unhandled exception {}: {}'.format(exc, e) logger.error(extra_err_msg) # Generate an unhandled exception report fit to be printed to the terminal (respecting the # Exiter's should_print_backtrace field). stderr_printed_error = cls._format_unhandled_exception_log( exc, tb, add_newline, should_print_backtrace=cls._should_print_backtrace_to_terminal) if extra_err_msg: stderr_printed_error = '{}\n{}'.format(stderr_printed_error, extra_err_msg) cls._exit_with_failure(stderr_printed_error)
[ "def", "_log_unhandled_exception_and_exit", "(", "cls", ",", "exc_class", "=", "None", ",", "exc", "=", "None", ",", "tb", "=", "None", ",", "add_newline", "=", "False", ")", ":", "exc_class", "=", "exc_class", "or", "sys", ".", "exc_info", "(", ")", "["...
A sys.excepthook implementation which logs the error and exits with failure.
[ "A", "sys", ".", "excepthook", "implementation", "which", "logs", "the", "error", "and", "exits", "with", "failure", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/base/exception_sink.py#L377-L404
224,268
pantsbuild/pants
src/python/pants/base/exception_sink.py
ExceptionSink._handle_signal_gracefully
def _handle_signal_gracefully(cls, signum, signame, traceback_lines): """Signal handler for non-fatal signals which raises or logs an error and exits with failure.""" # Extract the stack, and format an entry to be written to the exception log. formatted_traceback = cls._format_traceback(traceback_lines=traceback_lines, should_print_backtrace=True) signal_error_log_entry = cls._CATCHABLE_SIGNAL_ERROR_LOG_FORMAT.format( signum=signum, signame=signame, formatted_traceback=formatted_traceback) # TODO: determine the appropriate signal-safe behavior here (to avoid writing to our file # descriptors re-entrantly, which raises an IOError). # This method catches any exceptions raised within it. cls.log_exception(signal_error_log_entry) # Create a potentially-abbreviated traceback for the terminal or other interactive stream. formatted_traceback_for_terminal = cls._format_traceback( traceback_lines=traceback_lines, should_print_backtrace=cls._should_print_backtrace_to_terminal) terminal_log_entry = cls._CATCHABLE_SIGNAL_ERROR_LOG_FORMAT.format( signum=signum, signame=signame, formatted_traceback=formatted_traceback_for_terminal) # Exit, printing the output to the terminal. cls._exit_with_failure(terminal_log_entry)
python
def _handle_signal_gracefully(cls, signum, signame, traceback_lines): # Extract the stack, and format an entry to be written to the exception log. formatted_traceback = cls._format_traceback(traceback_lines=traceback_lines, should_print_backtrace=True) signal_error_log_entry = cls._CATCHABLE_SIGNAL_ERROR_LOG_FORMAT.format( signum=signum, signame=signame, formatted_traceback=formatted_traceback) # TODO: determine the appropriate signal-safe behavior here (to avoid writing to our file # descriptors re-entrantly, which raises an IOError). # This method catches any exceptions raised within it. cls.log_exception(signal_error_log_entry) # Create a potentially-abbreviated traceback for the terminal or other interactive stream. formatted_traceback_for_terminal = cls._format_traceback( traceback_lines=traceback_lines, should_print_backtrace=cls._should_print_backtrace_to_terminal) terminal_log_entry = cls._CATCHABLE_SIGNAL_ERROR_LOG_FORMAT.format( signum=signum, signame=signame, formatted_traceback=formatted_traceback_for_terminal) # Exit, printing the output to the terminal. cls._exit_with_failure(terminal_log_entry)
[ "def", "_handle_signal_gracefully", "(", "cls", ",", "signum", ",", "signame", ",", "traceback_lines", ")", ":", "# Extract the stack, and format an entry to be written to the exception log.", "formatted_traceback", "=", "cls", ".", "_format_traceback", "(", "traceback_lines", ...
Signal handler for non-fatal signals which raises or logs an error and exits with failure.
[ "Signal", "handler", "for", "non", "-", "fatal", "signals", "which", "raises", "or", "logs", "an", "error", "and", "exits", "with", "failure", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/base/exception_sink.py#L411-L434
224,269
pantsbuild/pants
src/python/pants/bin/daemon_pants_runner.py
DaemonExiter.exit
def exit(self, result=0, msg=None, *args, **kwargs): """Exit the runtime.""" if self._finalizer: try: self._finalizer() except Exception as e: try: NailgunProtocol.send_stderr( self._socket, '\nUnexpected exception in finalizer: {!r}\n'.format(e) ) except Exception: pass try: # Write a final message to stderr if present. if msg: NailgunProtocol.send_stderr(self._socket, msg) # Send an Exit chunk with the result. NailgunProtocol.send_exit_with_code(self._socket, result) # Shutdown the connected socket. teardown_socket(self._socket) finally: super(DaemonExiter, self).exit(result=result, *args, **kwargs)
python
def exit(self, result=0, msg=None, *args, **kwargs): if self._finalizer: try: self._finalizer() except Exception as e: try: NailgunProtocol.send_stderr( self._socket, '\nUnexpected exception in finalizer: {!r}\n'.format(e) ) except Exception: pass try: # Write a final message to stderr if present. if msg: NailgunProtocol.send_stderr(self._socket, msg) # Send an Exit chunk with the result. NailgunProtocol.send_exit_with_code(self._socket, result) # Shutdown the connected socket. teardown_socket(self._socket) finally: super(DaemonExiter, self).exit(result=result, *args, **kwargs)
[ "def", "exit", "(", "self", ",", "result", "=", "0", ",", "msg", "=", "None", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "if", "self", ".", "_finalizer", ":", "try", ":", "self", ".", "_finalizer", "(", ")", "except", "Exception", "as",...
Exit the runtime.
[ "Exit", "the", "runtime", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/bin/daemon_pants_runner.py#L51-L76
224,270
pantsbuild/pants
src/python/pants/bin/daemon_pants_runner.py
DaemonPantsRunner._tty_stdio
def _tty_stdio(cls, env): """Handles stdio redirection in the case of all stdio descriptors being the same tty.""" # If all stdio is a tty, there's only one logical I/O device (the tty device). This happens to # be addressable as a file in OSX and Linux, so we take advantage of that and directly open the # character device for output redirection - eliminating the need to directly marshall any # interactive stdio back/forth across the socket and permitting full, correct tty control with # no middle-man. stdin_ttyname, stdout_ttyname, stderr_ttyname = NailgunProtocol.ttynames_from_env(env) assert stdin_ttyname == stdout_ttyname == stderr_ttyname, ( 'expected all stdio ttys to be the same, but instead got: {}\n' 'please file a bug at http://github.com/pantsbuild/pants' .format([stdin_ttyname, stdout_ttyname, stderr_ttyname]) ) with open(stdin_ttyname, 'rb+', 0) as tty: tty_fileno = tty.fileno() with stdio_as(stdin_fd=tty_fileno, stdout_fd=tty_fileno, stderr_fd=tty_fileno): def finalizer(): termios.tcdrain(tty_fileno) yield finalizer
python
def _tty_stdio(cls, env): # If all stdio is a tty, there's only one logical I/O device (the tty device). This happens to # be addressable as a file in OSX and Linux, so we take advantage of that and directly open the # character device for output redirection - eliminating the need to directly marshall any # interactive stdio back/forth across the socket and permitting full, correct tty control with # no middle-man. stdin_ttyname, stdout_ttyname, stderr_ttyname = NailgunProtocol.ttynames_from_env(env) assert stdin_ttyname == stdout_ttyname == stderr_ttyname, ( 'expected all stdio ttys to be the same, but instead got: {}\n' 'please file a bug at http://github.com/pantsbuild/pants' .format([stdin_ttyname, stdout_ttyname, stderr_ttyname]) ) with open(stdin_ttyname, 'rb+', 0) as tty: tty_fileno = tty.fileno() with stdio_as(stdin_fd=tty_fileno, stdout_fd=tty_fileno, stderr_fd=tty_fileno): def finalizer(): termios.tcdrain(tty_fileno) yield finalizer
[ "def", "_tty_stdio", "(", "cls", ",", "env", ")", ":", "# If all stdio is a tty, there's only one logical I/O device (the tty device). This happens to", "# be addressable as a file in OSX and Linux, so we take advantage of that and directly open the", "# character device for output redirection -...
Handles stdio redirection in the case of all stdio descriptors being the same tty.
[ "Handles", "stdio", "redirection", "in", "the", "case", "of", "all", "stdio", "descriptors", "being", "the", "same", "tty", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/bin/daemon_pants_runner.py#L183-L201
224,271
pantsbuild/pants
src/python/pants/bin/daemon_pants_runner.py
DaemonPantsRunner.nailgunned_stdio
def nailgunned_stdio(cls, sock, env, handle_stdin=True): """Redirects stdio to the connected socket speaking the nailgun protocol.""" # Determine output tty capabilities from the environment. stdin_isatty, stdout_isatty, stderr_isatty = NailgunProtocol.isatty_from_env(env) is_tty_capable = all((stdin_isatty, stdout_isatty, stderr_isatty)) if is_tty_capable: with cls._tty_stdio(env) as finalizer: yield finalizer else: with cls._pipe_stdio( sock, stdin_isatty, stdout_isatty, stderr_isatty, handle_stdin ) as finalizer: yield finalizer
python
def nailgunned_stdio(cls, sock, env, handle_stdin=True): # Determine output tty capabilities from the environment. stdin_isatty, stdout_isatty, stderr_isatty = NailgunProtocol.isatty_from_env(env) is_tty_capable = all((stdin_isatty, stdout_isatty, stderr_isatty)) if is_tty_capable: with cls._tty_stdio(env) as finalizer: yield finalizer else: with cls._pipe_stdio( sock, stdin_isatty, stdout_isatty, stderr_isatty, handle_stdin ) as finalizer: yield finalizer
[ "def", "nailgunned_stdio", "(", "cls", ",", "sock", ",", "env", ",", "handle_stdin", "=", "True", ")", ":", "# Determine output tty capabilities from the environment.", "stdin_isatty", ",", "stdout_isatty", ",", "stderr_isatty", "=", "NailgunProtocol", ".", "isatty_from...
Redirects stdio to the connected socket speaking the nailgun protocol.
[ "Redirects", "stdio", "to", "the", "connected", "socket", "speaking", "the", "nailgun", "protocol", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/bin/daemon_pants_runner.py#L245-L262
224,272
pantsbuild/pants
src/python/pants/bin/daemon_pants_runner.py
DaemonPantsRunner._raise_deferred_exc
def _raise_deferred_exc(self): """Raises deferred exceptions from the daemon's synchronous path in the post-fork client.""" if self._deferred_exception: try: exc_type, exc_value, exc_traceback = self._deferred_exception raise_with_traceback(exc_value, exc_traceback) except TypeError: # If `_deferred_exception` isn't a 3-item tuple (raising a TypeError on the above # destructuring), treat it like a bare exception. raise self._deferred_exception
python
def _raise_deferred_exc(self): if self._deferred_exception: try: exc_type, exc_value, exc_traceback = self._deferred_exception raise_with_traceback(exc_value, exc_traceback) except TypeError: # If `_deferred_exception` isn't a 3-item tuple (raising a TypeError on the above # destructuring), treat it like a bare exception. raise self._deferred_exception
[ "def", "_raise_deferred_exc", "(", "self", ")", ":", "if", "self", ".", "_deferred_exception", ":", "try", ":", "exc_type", ",", "exc_value", ",", "exc_traceback", "=", "self", ".", "_deferred_exception", "raise_with_traceback", "(", "exc_value", ",", "exc_traceba...
Raises deferred exceptions from the daemon's synchronous path in the post-fork client.
[ "Raises", "deferred", "exceptions", "from", "the", "daemon", "s", "synchronous", "path", "in", "the", "post", "-", "fork", "client", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/bin/daemon_pants_runner.py#L265-L274
224,273
pantsbuild/pants
contrib/node/src/python/pants/contrib/node/subsystems/resolvers/npm_resolver.py
NpmResolver._scoped_package_name
def _scoped_package_name(node_task, package_name, node_scope): """Apply a node_scope to the package name. Overrides any existing package_name if already in a scope :return: A package_name with prepended with a node scope via '@' """ if not node_scope: return package_name scoped_package_name = package_name chunk = package_name.split('/', 1) if len(chunk) > 1 and chunk[0].startswith('@'): scoped_package_name = os.path.join('@{}'.format(node_scope), chunk[1:]) else: scoped_package_name = os.path.join('@{}'.format(node_scope), package_name) node_task.context.log.debug( 'Node package "{}" will be resolved with scope "{}".'.format(package_name, scoped_package_name)) return scoped_package_name
python
def _scoped_package_name(node_task, package_name, node_scope): if not node_scope: return package_name scoped_package_name = package_name chunk = package_name.split('/', 1) if len(chunk) > 1 and chunk[0].startswith('@'): scoped_package_name = os.path.join('@{}'.format(node_scope), chunk[1:]) else: scoped_package_name = os.path.join('@{}'.format(node_scope), package_name) node_task.context.log.debug( 'Node package "{}" will be resolved with scope "{}".'.format(package_name, scoped_package_name)) return scoped_package_name
[ "def", "_scoped_package_name", "(", "node_task", ",", "package_name", ",", "node_scope", ")", ":", "if", "not", "node_scope", ":", "return", "package_name", "scoped_package_name", "=", "package_name", "chunk", "=", "package_name", ".", "split", "(", "'/'", ",", ...
Apply a node_scope to the package name. Overrides any existing package_name if already in a scope :return: A package_name with prepended with a node scope via '@'
[ "Apply", "a", "node_scope", "to", "the", "package", "name", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/contrib/node/src/python/pants/contrib/node/subsystems/resolvers/npm_resolver.py#L158-L178
224,274
pantsbuild/pants
src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
JvmCompile.compile
def compile(self, ctx, args, dependency_classpath, upstream_analysis, settings, compiler_option_sets, zinc_file_manager, javac_plugin_map, scalac_plugin_map): """Invoke the compiler. Subclasses must implement. Must raise TaskError on compile failure. :param CompileContext ctx: A CompileContext for the target to compile. :param list args: Arguments to the compiler (such as javac or zinc). :param list dependency_classpath: List of classpath entries of type ClasspathEntry for dependencies. :param upstream_analysis: A map from classpath entry to analysis file for dependencies. :param JvmPlatformSettings settings: platform settings determining the -source, -target, etc for javac to use. :param list compiler_option_sets: The compiler_option_sets flags for the target. :param zinc_file_manager: whether to use zinc provided file manager. :param javac_plugin_map: Map of names of javac plugins to use to their arguments. :param scalac_plugin_map: Map of names of scalac plugins to use to their arguments. """ raise NotImplementedError()
python
def compile(self, ctx, args, dependency_classpath, upstream_analysis, settings, compiler_option_sets, zinc_file_manager, javac_plugin_map, scalac_plugin_map): raise NotImplementedError()
[ "def", "compile", "(", "self", ",", "ctx", ",", "args", ",", "dependency_classpath", ",", "upstream_analysis", ",", "settings", ",", "compiler_option_sets", ",", "zinc_file_manager", ",", "javac_plugin_map", ",", "scalac_plugin_map", ")", ":", "raise", "NotImplement...
Invoke the compiler. Subclasses must implement. Must raise TaskError on compile failure. :param CompileContext ctx: A CompileContext for the target to compile. :param list args: Arguments to the compiler (such as javac or zinc). :param list dependency_classpath: List of classpath entries of type ClasspathEntry for dependencies. :param upstream_analysis: A map from classpath entry to analysis file for dependencies. :param JvmPlatformSettings settings: platform settings determining the -source, -target, etc for javac to use. :param list compiler_option_sets: The compiler_option_sets flags for the target. :param zinc_file_manager: whether to use zinc provided file manager. :param javac_plugin_map: Map of names of javac plugins to use to their arguments. :param scalac_plugin_map: Map of names of scalac plugins to use to their arguments.
[ "Invoke", "the", "compiler", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py#L218-L237
224,275
pantsbuild/pants
src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
JvmCompile.do_compile
def do_compile(self, invalidation_check, compile_contexts, classpath_product): """Executes compilations for the invalid targets contained in a single chunk.""" invalid_targets = [vt.target for vt in invalidation_check.invalid_vts] valid_targets = [vt.target for vt in invalidation_check.all_vts if vt.valid] if self.execution_strategy == self.HERMETIC: self._set_directory_digests_for_valid_target_classpath_directories(valid_targets, compile_contexts) for valid_target in valid_targets: cc = self.select_runtime_context(compile_contexts[valid_target]) classpath_product.add_for_target( valid_target, [(conf, self._classpath_for_context(cc)) for conf in self._confs], ) self.register_extra_products_from_contexts(valid_targets, compile_contexts) if not invalid_targets: return # This ensures the workunit for the worker pool is set before attempting to compile. with self.context.new_workunit('isolation-{}-pool-bootstrap'.format(self.name())) \ as workunit: # This uses workunit.parent as the WorkerPool's parent so that child workunits # of different pools will show up in order in the html output. This way the current running # workunit is on the bottom of the page rather than possibly in the middle. worker_pool = WorkerPool(workunit.parent, self.context.run_tracker, self._worker_count) # Prepare the output directory for each invalid target, and confirm that analysis is valid. for target in invalid_targets: cc = self.select_runtime_context(compile_contexts[target]) safe_mkdir(cc.classes_dir.path) # Now create compile jobs for each invalid target one by one, using the classpath # generated by upstream JVM tasks and our own prepare_compile(). jobs = self._create_compile_jobs(compile_contexts, invalid_targets, invalidation_check.invalid_vts, classpath_product) exec_graph = ExecutionGraph(jobs, self.get_options().print_exception_stacktrace) try: exec_graph.execute(worker_pool, self.context.log) except ExecutionFailure as e: raise TaskError("Compilation failure: {}".format(e))
python
def do_compile(self, invalidation_check, compile_contexts, classpath_product): invalid_targets = [vt.target for vt in invalidation_check.invalid_vts] valid_targets = [vt.target for vt in invalidation_check.all_vts if vt.valid] if self.execution_strategy == self.HERMETIC: self._set_directory_digests_for_valid_target_classpath_directories(valid_targets, compile_contexts) for valid_target in valid_targets: cc = self.select_runtime_context(compile_contexts[valid_target]) classpath_product.add_for_target( valid_target, [(conf, self._classpath_for_context(cc)) for conf in self._confs], ) self.register_extra_products_from_contexts(valid_targets, compile_contexts) if not invalid_targets: return # This ensures the workunit for the worker pool is set before attempting to compile. with self.context.new_workunit('isolation-{}-pool-bootstrap'.format(self.name())) \ as workunit: # This uses workunit.parent as the WorkerPool's parent so that child workunits # of different pools will show up in order in the html output. This way the current running # workunit is on the bottom of the page rather than possibly in the middle. worker_pool = WorkerPool(workunit.parent, self.context.run_tracker, self._worker_count) # Prepare the output directory for each invalid target, and confirm that analysis is valid. for target in invalid_targets: cc = self.select_runtime_context(compile_contexts[target]) safe_mkdir(cc.classes_dir.path) # Now create compile jobs for each invalid target one by one, using the classpath # generated by upstream JVM tasks and our own prepare_compile(). jobs = self._create_compile_jobs(compile_contexts, invalid_targets, invalidation_check.invalid_vts, classpath_product) exec_graph = ExecutionGraph(jobs, self.get_options().print_exception_stacktrace) try: exec_graph.execute(worker_pool, self.context.log) except ExecutionFailure as e: raise TaskError("Compilation failure: {}".format(e))
[ "def", "do_compile", "(", "self", ",", "invalidation_check", ",", "compile_contexts", ",", "classpath_product", ")", ":", "invalid_targets", "=", "[", "vt", ".", "target", "for", "vt", "in", "invalidation_check", ".", "invalid_vts", "]", "valid_targets", "=", "[...
Executes compilations for the invalid targets contained in a single chunk.
[ "Executes", "compilations", "for", "the", "invalid", "targets", "contained", "in", "a", "single", "chunk", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py#L397-L444
224,276
pantsbuild/pants
src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
JvmCompile._compile_vts
def _compile_vts(self, vts, ctx, upstream_analysis, dependency_classpath, progress_message, settings, compiler_option_sets, zinc_file_manager, counter): """Compiles sources for the given vts into the given output dir. :param vts: VersionedTargetSet with one entry for the target. :param ctx: - A CompileContext instance for the target. :param dependency_classpath: A list of classpath entries of type ClasspathEntry for dependencies May be invoked concurrently on independent target sets. Postcondition: The individual targets in vts are up-to-date, as if each were compiled individually. """ if not ctx.sources: self.context.log.warn('Skipping {} compile for targets with no sources:\n {}' .format(self.name(), vts.targets)) else: counter_val = str(counter()).rjust(counter.format_length(), ' ') counter_str = '[{}/{}] '.format(counter_val, counter.size) # Do some reporting. self.context.log.info( counter_str, 'Compiling ', items_to_report_element(ctx.sources, '{} source'.format(self.name())), ' in ', items_to_report_element([t.address.reference() for t in vts.targets], 'target'), ' (', progress_message, ').') with self.context.new_workunit('compile', labels=[WorkUnitLabel.COMPILER]) as compile_workunit: try: directory_digest = self.compile( ctx, self._args, dependency_classpath, upstream_analysis, settings, compiler_option_sets, zinc_file_manager, self._get_plugin_map('javac', Java.global_instance(), ctx.target), self._get_plugin_map('scalac', ScalaPlatform.global_instance(), ctx.target), ) self._capture_logs(compile_workunit, ctx.log_dir) return directory_digest except TaskError: if self.get_options().suggest_missing_deps: logs = [path for _, name, _, path in self._find_logs(compile_workunit) if name == self.name()] if logs: self._find_missing_deps(logs, ctx.target) raise
python
def _compile_vts(self, vts, ctx, upstream_analysis, dependency_classpath, progress_message, settings, compiler_option_sets, zinc_file_manager, counter): if not ctx.sources: self.context.log.warn('Skipping {} compile for targets with no sources:\n {}' .format(self.name(), vts.targets)) else: counter_val = str(counter()).rjust(counter.format_length(), ' ') counter_str = '[{}/{}] '.format(counter_val, counter.size) # Do some reporting. self.context.log.info( counter_str, 'Compiling ', items_to_report_element(ctx.sources, '{} source'.format(self.name())), ' in ', items_to_report_element([t.address.reference() for t in vts.targets], 'target'), ' (', progress_message, ').') with self.context.new_workunit('compile', labels=[WorkUnitLabel.COMPILER]) as compile_workunit: try: directory_digest = self.compile( ctx, self._args, dependency_classpath, upstream_analysis, settings, compiler_option_sets, zinc_file_manager, self._get_plugin_map('javac', Java.global_instance(), ctx.target), self._get_plugin_map('scalac', ScalaPlatform.global_instance(), ctx.target), ) self._capture_logs(compile_workunit, ctx.log_dir) return directory_digest except TaskError: if self.get_options().suggest_missing_deps: logs = [path for _, name, _, path in self._find_logs(compile_workunit) if name == self.name()] if logs: self._find_missing_deps(logs, ctx.target) raise
[ "def", "_compile_vts", "(", "self", ",", "vts", ",", "ctx", ",", "upstream_analysis", ",", "dependency_classpath", ",", "progress_message", ",", "settings", ",", "compiler_option_sets", ",", "zinc_file_manager", ",", "counter", ")", ":", "if", "not", "ctx", ".",...
Compiles sources for the given vts into the given output dir. :param vts: VersionedTargetSet with one entry for the target. :param ctx: - A CompileContext instance for the target. :param dependency_classpath: A list of classpath entries of type ClasspathEntry for dependencies May be invoked concurrently on independent target sets. Postcondition: The individual targets in vts are up-to-date, as if each were compiled individually.
[ "Compiles", "sources", "for", "the", "given", "vts", "into", "the", "given", "output", "dir", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py#L472-L523
224,277
pantsbuild/pants
src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
JvmCompile._get_plugin_map
def _get_plugin_map(self, compiler, options_src, target): """Returns a map of plugin to args, for the given compiler. Only plugins that must actually be activated will be present as keys in the map. Plugins with no arguments will have an empty list as a value. Active plugins and their args will be gathered from (in order of precedence): - The <compiler>_plugins and <compiler>_plugin_args fields of the target, if it has them. - The <compiler>_plugins and <compiler>_plugin_args options of this task, if it has them. - The <compiler>_plugins and <compiler>_plugin_args fields of this task, if it has them. Note that in-repo plugins will not be returned, even if requested, when building themselves. Use published versions of those plugins for that. See: - examples/src/java/org/pantsbuild/example/javac/plugin/README.md. - examples/src/scala/org/pantsbuild/example/scalac/plugin/README.md :param compiler: one of 'javac', 'scalac'. :param options_src: A JvmToolMixin instance providing plugin options. :param target: The target whose plugins we compute. """ # Note that we get() options and getattr() target fields and task methods, # so we're robust when those don't exist (or are None). plugins_key = '{}_plugins'.format(compiler) requested_plugins = ( tuple(getattr(self, plugins_key, []) or []) + tuple(options_src.get_options().get(plugins_key, []) or []) + tuple((getattr(target, plugins_key, []) or [])) ) # Allow multiple flags and also comma-separated values in a single flag. requested_plugins = {p for val in requested_plugins for p in val.split(',')} plugin_args_key = '{}_plugin_args'.format(compiler) available_plugin_args = {} available_plugin_args.update(getattr(self, plugin_args_key, {}) or {}) available_plugin_args.update(options_src.get_options().get(plugin_args_key, {}) or {}) available_plugin_args.update(getattr(target, plugin_args_key, {}) or {}) # From all available args, pluck just the ones for the selected plugins. plugin_map = {} for plugin in requested_plugins: # Don't attempt to use a plugin while building that plugin. # This avoids a bootstrapping problem. Note that you can still # use published plugins on themselves, just not in-repo plugins. if target not in self._plugin_targets(compiler).get(plugin, {}): plugin_map[plugin] = available_plugin_args.get(plugin, []) return plugin_map
python
def _get_plugin_map(self, compiler, options_src, target): # Note that we get() options and getattr() target fields and task methods, # so we're robust when those don't exist (or are None). plugins_key = '{}_plugins'.format(compiler) requested_plugins = ( tuple(getattr(self, plugins_key, []) or []) + tuple(options_src.get_options().get(plugins_key, []) or []) + tuple((getattr(target, plugins_key, []) or [])) ) # Allow multiple flags and also comma-separated values in a single flag. requested_plugins = {p for val in requested_plugins for p in val.split(',')} plugin_args_key = '{}_plugin_args'.format(compiler) available_plugin_args = {} available_plugin_args.update(getattr(self, plugin_args_key, {}) or {}) available_plugin_args.update(options_src.get_options().get(plugin_args_key, {}) or {}) available_plugin_args.update(getattr(target, plugin_args_key, {}) or {}) # From all available args, pluck just the ones for the selected plugins. plugin_map = {} for plugin in requested_plugins: # Don't attempt to use a plugin while building that plugin. # This avoids a bootstrapping problem. Note that you can still # use published plugins on themselves, just not in-repo plugins. if target not in self._plugin_targets(compiler).get(plugin, {}): plugin_map[plugin] = available_plugin_args.get(plugin, []) return plugin_map
[ "def", "_get_plugin_map", "(", "self", ",", "compiler", ",", "options_src", ",", "target", ")", ":", "# Note that we get() options and getattr() target fields and task methods,", "# so we're robust when those don't exist (or are None).", "plugins_key", "=", "'{}_plugins'", ".", "...
Returns a map of plugin to args, for the given compiler. Only plugins that must actually be activated will be present as keys in the map. Plugins with no arguments will have an empty list as a value. Active plugins and their args will be gathered from (in order of precedence): - The <compiler>_plugins and <compiler>_plugin_args fields of the target, if it has them. - The <compiler>_plugins and <compiler>_plugin_args options of this task, if it has them. - The <compiler>_plugins and <compiler>_plugin_args fields of this task, if it has them. Note that in-repo plugins will not be returned, even if requested, when building themselves. Use published versions of those plugins for that. See: - examples/src/java/org/pantsbuild/example/javac/plugin/README.md. - examples/src/scala/org/pantsbuild/example/scalac/plugin/README.md :param compiler: one of 'javac', 'scalac'. :param options_src: A JvmToolMixin instance providing plugin options. :param target: The target whose plugins we compute.
[ "Returns", "a", "map", "of", "plugin", "to", "args", "for", "the", "given", "compiler", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py#L530-L577
224,278
pantsbuild/pants
src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
JvmCompile._find_logs
def _find_logs(self, compile_workunit): """Finds all logs under the given workunit.""" for idx, workunit in enumerate(compile_workunit.children): for output_name, outpath in workunit.output_paths().items(): if output_name in ('stdout', 'stderr'): yield idx, workunit.name, output_name, outpath
python
def _find_logs(self, compile_workunit): for idx, workunit in enumerate(compile_workunit.children): for output_name, outpath in workunit.output_paths().items(): if output_name in ('stdout', 'stderr'): yield idx, workunit.name, output_name, outpath
[ "def", "_find_logs", "(", "self", ",", "compile_workunit", ")", ":", "for", "idx", ",", "workunit", "in", "enumerate", "(", "compile_workunit", ".", "children", ")", ":", "for", "output_name", ",", "outpath", "in", "workunit", ".", "output_paths", "(", ")", ...
Finds all logs under the given workunit.
[ "Finds", "all", "logs", "under", "the", "given", "workunit", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py#L579-L584
224,279
pantsbuild/pants
src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
JvmCompile._upstream_analysis
def _upstream_analysis(self, compile_contexts, classpath_entries): """Returns tuples of classes_dir->analysis_file for the closure of the target.""" # Reorganize the compile_contexts by class directory. compile_contexts_by_directory = {} for compile_context in compile_contexts.values(): compile_context = self.select_runtime_context(compile_context) compile_contexts_by_directory[compile_context.classes_dir.path] = compile_context # If we have a compile context for the target, include it. for entry in classpath_entries: path = entry.path if not path.endswith('.jar'): compile_context = compile_contexts_by_directory.get(path) if not compile_context: self.context.log.debug('Missing upstream analysis for {}'.format(path)) else: yield compile_context.classes_dir.path, compile_context.analysis_file
python
def _upstream_analysis(self, compile_contexts, classpath_entries): # Reorganize the compile_contexts by class directory. compile_contexts_by_directory = {} for compile_context in compile_contexts.values(): compile_context = self.select_runtime_context(compile_context) compile_contexts_by_directory[compile_context.classes_dir.path] = compile_context # If we have a compile context for the target, include it. for entry in classpath_entries: path = entry.path if not path.endswith('.jar'): compile_context = compile_contexts_by_directory.get(path) if not compile_context: self.context.log.debug('Missing upstream analysis for {}'.format(path)) else: yield compile_context.classes_dir.path, compile_context.analysis_file
[ "def", "_upstream_analysis", "(", "self", ",", "compile_contexts", ",", "classpath_entries", ")", ":", "# Reorganize the compile_contexts by class directory.", "compile_contexts_by_directory", "=", "{", "}", "for", "compile_context", "in", "compile_contexts", ".", "values", ...
Returns tuples of classes_dir->analysis_file for the closure of the target.
[ "Returns", "tuples", "of", "classes_dir", "-", ">", "analysis_file", "for", "the", "closure", "of", "the", "target", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py#L628-L643
224,280
pantsbuild/pants
src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
JvmCompile.should_compile_incrementally
def should_compile_incrementally(self, vts, ctx): """Check to see if the compile should try to re-use the existing analysis. Returns true if we should try to compile the target incrementally. """ if not vts.is_incremental: return False if not self._clear_invalid_analysis: return True return os.path.exists(ctx.analysis_file)
python
def should_compile_incrementally(self, vts, ctx): if not vts.is_incremental: return False if not self._clear_invalid_analysis: return True return os.path.exists(ctx.analysis_file)
[ "def", "should_compile_incrementally", "(", "self", ",", "vts", ",", "ctx", ")", ":", "if", "not", "vts", ".", "is_incremental", ":", "return", "False", "if", "not", "self", ".", "_clear_invalid_analysis", ":", "return", "True", "return", "os", ".", "path", ...
Check to see if the compile should try to re-use the existing analysis. Returns true if we should try to compile the target incrementally.
[ "Check", "to", "see", "if", "the", "compile", "should", "try", "to", "re", "-", "use", "the", "existing", "analysis", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py#L728-L737
224,281
pantsbuild/pants
src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
JvmCompile._create_context_jar
def _create_context_jar(self, compile_context): """Jar up the compile_context to its output jar location. TODO(stuhood): In the medium term, we hope to add compiler support for this step, which would allow the jars to be used as compile _inputs_ as well. Currently using jar'd compile outputs as compile inputs would make the compiler's analysis useless. see https://github.com/twitter-forks/sbt/tree/stuhood/output-jars """ root = compile_context.classes_dir.path with compile_context.open_jar(mode='w') as jar: for abs_sub_dir, dirnames, filenames in safe_walk(root): for name in dirnames + filenames: abs_filename = os.path.join(abs_sub_dir, name) arcname = fast_relpath(abs_filename, root) jar.write(abs_filename, arcname)
python
def _create_context_jar(self, compile_context): root = compile_context.classes_dir.path with compile_context.open_jar(mode='w') as jar: for abs_sub_dir, dirnames, filenames in safe_walk(root): for name in dirnames + filenames: abs_filename = os.path.join(abs_sub_dir, name) arcname = fast_relpath(abs_filename, root) jar.write(abs_filename, arcname)
[ "def", "_create_context_jar", "(", "self", ",", "compile_context", ")", ":", "root", "=", "compile_context", ".", "classes_dir", ".", "path", "with", "compile_context", ".", "open_jar", "(", "mode", "=", "'w'", ")", "as", "jar", ":", "for", "abs_sub_dir", ",...
Jar up the compile_context to its output jar location. TODO(stuhood): In the medium term, we hope to add compiler support for this step, which would allow the jars to be used as compile _inputs_ as well. Currently using jar'd compile outputs as compile inputs would make the compiler's analysis useless. see https://github.com/twitter-forks/sbt/tree/stuhood/output-jars
[ "Jar", "up", "the", "compile_context", "to", "its", "output", "jar", "location", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py#L777-L791
224,282
pantsbuild/pants
src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
JvmCompile._extra_compile_time_classpath
def _extra_compile_time_classpath(self): """Compute any extra compile-time-only classpath elements.""" def extra_compile_classpath_iter(): for conf in self._confs: for jar in self.extra_compile_time_classpath_elements(): yield (conf, jar) return list(extra_compile_classpath_iter())
python
def _extra_compile_time_classpath(self): def extra_compile_classpath_iter(): for conf in self._confs: for jar in self.extra_compile_time_classpath_elements(): yield (conf, jar) return list(extra_compile_classpath_iter())
[ "def", "_extra_compile_time_classpath", "(", "self", ")", ":", "def", "extra_compile_classpath_iter", "(", ")", ":", "for", "conf", "in", "self", ".", "_confs", ":", "for", "jar", "in", "self", ".", "extra_compile_time_classpath_elements", "(", ")", ":", "yield"...
Compute any extra compile-time-only classpath elements.
[ "Compute", "any", "extra", "compile", "-", "time", "-", "only", "classpath", "elements", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py#L809-L816
224,283
pantsbuild/pants
src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py
JvmCompile._plugin_targets
def _plugin_targets(self, compiler): """Returns a map from plugin name to the targets that build that plugin.""" if compiler == 'javac': plugin_cls = JavacPlugin elif compiler == 'scalac': plugin_cls = ScalacPlugin else: raise TaskError('Unknown JVM compiler: {}'.format(compiler)) plugin_tgts = self.context.targets(predicate=lambda t: isinstance(t, plugin_cls)) return {t.plugin: t.closure() for t in plugin_tgts}
python
def _plugin_targets(self, compiler): if compiler == 'javac': plugin_cls = JavacPlugin elif compiler == 'scalac': plugin_cls = ScalacPlugin else: raise TaskError('Unknown JVM compiler: {}'.format(compiler)) plugin_tgts = self.context.targets(predicate=lambda t: isinstance(t, plugin_cls)) return {t.plugin: t.closure() for t in plugin_tgts}
[ "def", "_plugin_targets", "(", "self", ",", "compiler", ")", ":", "if", "compiler", "==", "'javac'", ":", "plugin_cls", "=", "JavacPlugin", "elif", "compiler", "==", "'scalac'", ":", "plugin_cls", "=", "ScalacPlugin", "else", ":", "raise", "TaskError", "(", ...
Returns a map from plugin name to the targets that build that plugin.
[ "Returns", "a", "map", "from", "plugin", "name", "to", "the", "targets", "that", "build", "that", "plugin", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/backend/jvm/tasks/jvm_compile/jvm_compile.py#L819-L828
224,284
pantsbuild/pants
contrib/cpp/src/python/pants/contrib/cpp/tasks/cpp_compile.py
CppCompile.execute
def execute(self): """Compile all sources in a given target to object files.""" def is_cc(source): _, ext = os.path.splitext(source) return ext in self.get_options().cc_extensions targets = self.context.targets(self.is_cpp) # Compile source files to objects. with self.invalidated(targets, invalidate_dependents=True) as invalidation_check: obj_mapping = self.context.products.get('objs') for vt in invalidation_check.all_vts: for source in vt.target.sources_relative_to_buildroot(): if is_cc(source): if not vt.valid: with self.context.new_workunit(name='cpp-compile', labels=[WorkUnitLabel.MULTITOOL]): # TODO: Parallelise the compilation. # TODO: Only recompile source files that have changed since the # object file was last written. Also use the output from # gcc -M to track dependencies on headers. self._compile(vt.target, vt.results_dir, source) objpath = self._objpath(vt.target, vt.results_dir, source) obj_mapping.add(vt.target, vt.results_dir).append(objpath)
python
def execute(self): def is_cc(source): _, ext = os.path.splitext(source) return ext in self.get_options().cc_extensions targets = self.context.targets(self.is_cpp) # Compile source files to objects. with self.invalidated(targets, invalidate_dependents=True) as invalidation_check: obj_mapping = self.context.products.get('objs') for vt in invalidation_check.all_vts: for source in vt.target.sources_relative_to_buildroot(): if is_cc(source): if not vt.valid: with self.context.new_workunit(name='cpp-compile', labels=[WorkUnitLabel.MULTITOOL]): # TODO: Parallelise the compilation. # TODO: Only recompile source files that have changed since the # object file was last written. Also use the output from # gcc -M to track dependencies on headers. self._compile(vt.target, vt.results_dir, source) objpath = self._objpath(vt.target, vt.results_dir, source) obj_mapping.add(vt.target, vt.results_dir).append(objpath)
[ "def", "execute", "(", "self", ")", ":", "def", "is_cc", "(", "source", ")", ":", "_", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "source", ")", "return", "ext", "in", "self", ".", "get_options", "(", ")", ".", "cc_extensions", "targ...
Compile all sources in a given target to object files.
[ "Compile", "all", "sources", "in", "a", "given", "target", "to", "object", "files", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/contrib/cpp/src/python/pants/contrib/cpp/tasks/cpp_compile.py#L37-L60
224,285
pantsbuild/pants
contrib/cpp/src/python/pants/contrib/cpp/tasks/cpp_compile.py
CppCompile._compile
def _compile(self, target, results_dir, source): """Compile given source to an object file.""" obj = self._objpath(target, results_dir, source) safe_mkdir_for(obj) abs_source = os.path.join(get_buildroot(), source) # TODO: include dir should include dependent work dir when headers are copied there. include_dirs = [] for dep in target.dependencies: if self.is_library(dep): include_dirs.extend([os.path.join(get_buildroot(), dep.target_base)]) cmd = [self.cpp_toolchain.compiler] cmd.extend(['-c']) cmd.extend(('-I{0}'.format(i) for i in include_dirs)) cmd.extend(['-o' + obj, abs_source]) cmd.extend(self.get_options().cc_options) # TODO: submit_async_work with self.run_command, [(cmd)] as a Work object. with self.context.new_workunit(name='cpp-compile', labels=[WorkUnitLabel.COMPILER]) as workunit: self.run_command(cmd, workunit) self.context.log.info('Built c++ object: {0}'.format(obj))
python
def _compile(self, target, results_dir, source): obj = self._objpath(target, results_dir, source) safe_mkdir_for(obj) abs_source = os.path.join(get_buildroot(), source) # TODO: include dir should include dependent work dir when headers are copied there. include_dirs = [] for dep in target.dependencies: if self.is_library(dep): include_dirs.extend([os.path.join(get_buildroot(), dep.target_base)]) cmd = [self.cpp_toolchain.compiler] cmd.extend(['-c']) cmd.extend(('-I{0}'.format(i) for i in include_dirs)) cmd.extend(['-o' + obj, abs_source]) cmd.extend(self.get_options().cc_options) # TODO: submit_async_work with self.run_command, [(cmd)] as a Work object. with self.context.new_workunit(name='cpp-compile', labels=[WorkUnitLabel.COMPILER]) as workunit: self.run_command(cmd, workunit) self.context.log.info('Built c++ object: {0}'.format(obj))
[ "def", "_compile", "(", "self", ",", "target", ",", "results_dir", ",", "source", ")", ":", "obj", "=", "self", ".", "_objpath", "(", "target", ",", "results_dir", ",", "source", ")", "safe_mkdir_for", "(", "obj", ")", "abs_source", "=", "os", ".", "pa...
Compile given source to an object file.
[ "Compile", "given", "source", "to", "an", "object", "file", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/contrib/cpp/src/python/pants/contrib/cpp/tasks/cpp_compile.py#L71-L94
224,286
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler.do_GET
def do_GET(self): """GET method implementation for BaseHTTPRequestHandler.""" if not self._client_allowed(): return try: (_, _, path, query, _) = urlsplit(self.path) params = parse_qs(query) # Give each handler a chance to respond. for prefix, handler in self._GET_handlers: if self._maybe_handle(prefix, handler, path, params): return # If no path specified, default to showing the list of all runs. if path == '/': self._handle_runs('', {}) return content = 'Invalid GET request {}'.format(self.path).encode('utf-8'), self._send_content(content, 'text/html', code=400) except (IOError, ValueError): pass
python
def do_GET(self): if not self._client_allowed(): return try: (_, _, path, query, _) = urlsplit(self.path) params = parse_qs(query) # Give each handler a chance to respond. for prefix, handler in self._GET_handlers: if self._maybe_handle(prefix, handler, path, params): return # If no path specified, default to showing the list of all runs. if path == '/': self._handle_runs('', {}) return content = 'Invalid GET request {}'.format(self.path).encode('utf-8'), self._send_content(content, 'text/html', code=400) except (IOError, ValueError): pass
[ "def", "do_GET", "(", "self", ")", ":", "if", "not", "self", ".", "_client_allowed", "(", ")", ":", "return", "try", ":", "(", "_", ",", "_", ",", "path", ",", "query", ",", "_", ")", "=", "urlsplit", "(", "self", ".", "path", ")", "params", "=...
GET method implementation for BaseHTTPRequestHandler.
[ "GET", "method", "implementation", "for", "BaseHTTPRequestHandler", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L59-L79
224,287
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._handle_runs
def _handle_runs(self, relpath, params): """Show a listing of all pants runs since the last clean-all.""" runs_by_day = self._partition_runs_by_day() args = self._default_template_args('run_list.html') args['runs_by_day'] = runs_by_day content = self._renderer.render_name('base.html', args).encode("utf-8") self._send_content(content, 'text/html')
python
def _handle_runs(self, relpath, params): runs_by_day = self._partition_runs_by_day() args = self._default_template_args('run_list.html') args['runs_by_day'] = runs_by_day content = self._renderer.render_name('base.html', args).encode("utf-8") self._send_content(content, 'text/html')
[ "def", "_handle_runs", "(", "self", ",", "relpath", ",", "params", ")", ":", "runs_by_day", "=", "self", ".", "_partition_runs_by_day", "(", ")", "args", "=", "self", ".", "_default_template_args", "(", "'run_list.html'", ")", "args", "[", "'runs_by_day'", "]"...
Show a listing of all pants runs since the last clean-all.
[ "Show", "a", "listing", "of", "all", "pants", "runs", "since", "the", "last", "clean", "-", "all", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L82-L88
224,288
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._handle_run
def _handle_run(self, relpath, params): """Show the report for a single pants run.""" args = self._default_template_args('run.html') run_id = relpath run_info = self._get_run_info_dict(run_id) if run_info is None: args['no_such_run'] = relpath if run_id == 'latest': args['is_latest'] = 'none' else: report_abspath = run_info['default_report'] report_relpath = os.path.relpath(report_abspath, self._root) report_dir = os.path.dirname(report_relpath) self_timings_path = os.path.join(report_dir, 'self_timings') cumulative_timings_path = os.path.join(report_dir, 'cumulative_timings') artifact_cache_stats_path = os.path.join(report_dir, 'artifact_cache_stats') run_info['timestamp_text'] = \ datetime.fromtimestamp(float(run_info['timestamp'])).strftime('%H:%M:%S on %A, %B %d %Y') timings_and_stats = '\n'.join([ self._collapsible_fmt_string.format(id='cumulative-timings-collapsible', title='Cumulative timings', class_prefix='aggregated-timings'), self._collapsible_fmt_string.format(id='self-timings-collapsible', title='Self timings', class_prefix='aggregated-timings'), self._collapsible_fmt_string.format(id='artifact-cache-stats-collapsible', title='Artifact cache stats', class_prefix='artifact-cache-stats') ]) args.update({'run_info': run_info, 'report_path': report_relpath, 'self_timings_path': self_timings_path, 'cumulative_timings_path': cumulative_timings_path, 'artifact_cache_stats_path': artifact_cache_stats_path, 'timings_and_stats': timings_and_stats}) if run_id == 'latest': args['is_latest'] = run_info['id'] content = self._renderer.render_name('base.html', args).encode("utf-8") self._send_content(content, 'text/html')
python
def _handle_run(self, relpath, params): args = self._default_template_args('run.html') run_id = relpath run_info = self._get_run_info_dict(run_id) if run_info is None: args['no_such_run'] = relpath if run_id == 'latest': args['is_latest'] = 'none' else: report_abspath = run_info['default_report'] report_relpath = os.path.relpath(report_abspath, self._root) report_dir = os.path.dirname(report_relpath) self_timings_path = os.path.join(report_dir, 'self_timings') cumulative_timings_path = os.path.join(report_dir, 'cumulative_timings') artifact_cache_stats_path = os.path.join(report_dir, 'artifact_cache_stats') run_info['timestamp_text'] = \ datetime.fromtimestamp(float(run_info['timestamp'])).strftime('%H:%M:%S on %A, %B %d %Y') timings_and_stats = '\n'.join([ self._collapsible_fmt_string.format(id='cumulative-timings-collapsible', title='Cumulative timings', class_prefix='aggregated-timings'), self._collapsible_fmt_string.format(id='self-timings-collapsible', title='Self timings', class_prefix='aggregated-timings'), self._collapsible_fmt_string.format(id='artifact-cache-stats-collapsible', title='Artifact cache stats', class_prefix='artifact-cache-stats') ]) args.update({'run_info': run_info, 'report_path': report_relpath, 'self_timings_path': self_timings_path, 'cumulative_timings_path': cumulative_timings_path, 'artifact_cache_stats_path': artifact_cache_stats_path, 'timings_and_stats': timings_and_stats}) if run_id == 'latest': args['is_latest'] = run_info['id'] content = self._renderer.render_name('base.html', args).encode("utf-8") self._send_content(content, 'text/html')
[ "def", "_handle_run", "(", "self", ",", "relpath", ",", "params", ")", ":", "args", "=", "self", ".", "_default_template_args", "(", "'run.html'", ")", "run_id", "=", "relpath", "run_info", "=", "self", ".", "_get_run_info_dict", "(", "run_id", ")", "if", ...
Show the report for a single pants run.
[ "Show", "the", "report", "for", "a", "single", "pants", "run", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L104-L142
224,289
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._handle_browse
def _handle_browse(self, relpath, params): """Handle requests to browse the filesystem under the build root.""" abspath = os.path.normpath(os.path.join(self._root, relpath)) if not abspath.startswith(self._root): raise ValueError # Prevent using .. to get files from anywhere other than root. if os.path.isdir(abspath): self._serve_dir(abspath, params) elif os.path.isfile(abspath): self._serve_file(abspath, params)
python
def _handle_browse(self, relpath, params): abspath = os.path.normpath(os.path.join(self._root, relpath)) if not abspath.startswith(self._root): raise ValueError # Prevent using .. to get files from anywhere other than root. if os.path.isdir(abspath): self._serve_dir(abspath, params) elif os.path.isfile(abspath): self._serve_file(abspath, params)
[ "def", "_handle_browse", "(", "self", ",", "relpath", ",", "params", ")", ":", "abspath", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "self", ".", "_root", ",", "relpath", ")", ")", "if", "not", "abspath", "."...
Handle requests to browse the filesystem under the build root.
[ "Handle", "requests", "to", "browse", "the", "filesystem", "under", "the", "build", "root", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L144-L152
224,290
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._handle_content
def _handle_content(self, relpath, params): """Render file content for pretty display.""" abspath = os.path.normpath(os.path.join(self._root, relpath)) if os.path.isfile(abspath): with open(abspath, 'rb') as infile: content = infile.read() else: content = 'No file found at {}'.format(abspath).encode('utf-8') content_type = mimetypes.guess_type(abspath)[0] or 'text/plain' if not content_type.startswith('text/') and not content_type == 'application/xml': # Binary file. Display it as hex, split into lines. n = 120 # Display lines of this max size. content = repr(content)[1:-1] # Will escape non-printables etc, dropping surrounding quotes. content = '\n'.join([content[i:i + n] for i in range(0, len(content), n)]) prettify = False prettify_extra_langs = [] else: prettify = True if self._settings.assets_dir: prettify_extra_dir = os.path.join(self._settings.assets_dir, 'js', 'prettify_extra_langs') prettify_extra_langs = [{'name': x} for x in os.listdir(prettify_extra_dir)] else: # TODO: Find these from our package, somehow. prettify_extra_langs = [] linenums = True args = {'prettify_extra_langs': prettify_extra_langs, 'content': content, 'prettify': prettify, 'linenums': linenums} content = self._renderer.render_name('file_content.html', args).encode("utf-8") self._send_content(content, 'text/html')
python
def _handle_content(self, relpath, params): abspath = os.path.normpath(os.path.join(self._root, relpath)) if os.path.isfile(abspath): with open(abspath, 'rb') as infile: content = infile.read() else: content = 'No file found at {}'.format(abspath).encode('utf-8') content_type = mimetypes.guess_type(abspath)[0] or 'text/plain' if not content_type.startswith('text/') and not content_type == 'application/xml': # Binary file. Display it as hex, split into lines. n = 120 # Display lines of this max size. content = repr(content)[1:-1] # Will escape non-printables etc, dropping surrounding quotes. content = '\n'.join([content[i:i + n] for i in range(0, len(content), n)]) prettify = False prettify_extra_langs = [] else: prettify = True if self._settings.assets_dir: prettify_extra_dir = os.path.join(self._settings.assets_dir, 'js', 'prettify_extra_langs') prettify_extra_langs = [{'name': x} for x in os.listdir(prettify_extra_dir)] else: # TODO: Find these from our package, somehow. prettify_extra_langs = [] linenums = True args = {'prettify_extra_langs': prettify_extra_langs, 'content': content, 'prettify': prettify, 'linenums': linenums} content = self._renderer.render_name('file_content.html', args).encode("utf-8") self._send_content(content, 'text/html')
[ "def", "_handle_content", "(", "self", ",", "relpath", ",", "params", ")", ":", "abspath", "=", "os", ".", "path", ".", "normpath", "(", "os", ".", "path", ".", "join", "(", "self", ".", "_root", ",", "relpath", ")", ")", "if", "os", ".", "path", ...
Render file content for pretty display.
[ "Render", "file", "content", "for", "pretty", "display", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L154-L182
224,291
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._handle_poll
def _handle_poll(self, relpath, params): """Handle poll requests for raw file contents.""" request = json.loads(params.get('q')[0]) ret = {} # request is a polling request for multiple files. For each file: # - id is some identifier assigned by the client, used to differentiate the results. # - path is the file to poll. # - pos is the last byte position in that file seen by the client. for poll in request: _id = poll.get('id', None) path = poll.get('path', None) pos = poll.get('pos', 0) if path: abspath = os.path.normpath(os.path.join(self._root, path)) if os.path.isfile(abspath): with open(abspath, 'rb') as infile: if pos: infile.seek(pos) content = infile.read() ret[_id] = content.decode("utf-8") content = json.dumps(ret).encode("utf-8") self._send_content(content, 'application/json')
python
def _handle_poll(self, relpath, params): request = json.loads(params.get('q')[0]) ret = {} # request is a polling request for multiple files. For each file: # - id is some identifier assigned by the client, used to differentiate the results. # - path is the file to poll. # - pos is the last byte position in that file seen by the client. for poll in request: _id = poll.get('id', None) path = poll.get('path', None) pos = poll.get('pos', 0) if path: abspath = os.path.normpath(os.path.join(self._root, path)) if os.path.isfile(abspath): with open(abspath, 'rb') as infile: if pos: infile.seek(pos) content = infile.read() ret[_id] = content.decode("utf-8") content = json.dumps(ret).encode("utf-8") self._send_content(content, 'application/json')
[ "def", "_handle_poll", "(", "self", ",", "relpath", ",", "params", ")", ":", "request", "=", "json", ".", "loads", "(", "params", ".", "get", "(", "'q'", ")", "[", "0", "]", ")", "ret", "=", "{", "}", "# request is a polling request for multiple files. For...
Handle poll requests for raw file contents.
[ "Handle", "poll", "requests", "for", "raw", "file", "contents", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L195-L216
224,292
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._partition_runs_by_day
def _partition_runs_by_day(self): """Split the runs by day, so we can display them grouped that way.""" run_infos = self._get_all_run_infos() for x in run_infos: ts = float(x['timestamp']) x['time_of_day_text'] = datetime.fromtimestamp(ts).strftime('%H:%M:%S') def date_text(dt): delta_days = (date.today() - dt).days if delta_days == 0: return 'Today' elif delta_days == 1: return 'Yesterday' elif delta_days < 7: return dt.strftime('%A') # Weekday name. else: d = dt.day % 10 suffix = 'st' if d == 1 else 'nd' if d == 2 else 'rd' if d == 3 else 'th' return dt.strftime('%B %d') + suffix # E.g., October 30th. keyfunc = lambda x: datetime.fromtimestamp(float(x['timestamp'])) sorted_run_infos = sorted(run_infos, key=keyfunc, reverse=True) return [{'date_text': date_text(dt), 'run_infos': [x for x in infos]} for dt, infos in itertools.groupby(sorted_run_infos, lambda x: keyfunc(x).date())]
python
def _partition_runs_by_day(self): run_infos = self._get_all_run_infos() for x in run_infos: ts = float(x['timestamp']) x['time_of_day_text'] = datetime.fromtimestamp(ts).strftime('%H:%M:%S') def date_text(dt): delta_days = (date.today() - dt).days if delta_days == 0: return 'Today' elif delta_days == 1: return 'Yesterday' elif delta_days < 7: return dt.strftime('%A') # Weekday name. else: d = dt.day % 10 suffix = 'st' if d == 1 else 'nd' if d == 2 else 'rd' if d == 3 else 'th' return dt.strftime('%B %d') + suffix # E.g., October 30th. keyfunc = lambda x: datetime.fromtimestamp(float(x['timestamp'])) sorted_run_infos = sorted(run_infos, key=keyfunc, reverse=True) return [{'date_text': date_text(dt), 'run_infos': [x for x in infos]} for dt, infos in itertools.groupby(sorted_run_infos, lambda x: keyfunc(x).date())]
[ "def", "_partition_runs_by_day", "(", "self", ")", ":", "run_infos", "=", "self", ".", "_get_all_run_infos", "(", ")", "for", "x", "in", "run_infos", ":", "ts", "=", "float", "(", "x", "[", "'timestamp'", "]", ")", "x", "[", "'time_of_day_text'", "]", "=...
Split the runs by day, so we can display them grouped that way.
[ "Split", "the", "runs", "by", "day", "so", "we", "can", "display", "them", "grouped", "that", "way", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L233-L256
224,293
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._get_run_info_dict
def _get_run_info_dict(self, run_id): """Get the RunInfo for a run, as a dict.""" run_info_path = os.path.join(self._settings.info_dir, run_id, 'info') if os.path.exists(run_info_path): # We copy the RunInfo as a dict, so we can add stuff to it to pass to the template. return RunInfo(run_info_path).get_as_dict() else: return None
python
def _get_run_info_dict(self, run_id): run_info_path = os.path.join(self._settings.info_dir, run_id, 'info') if os.path.exists(run_info_path): # We copy the RunInfo as a dict, so we can add stuff to it to pass to the template. return RunInfo(run_info_path).get_as_dict() else: return None
[ "def", "_get_run_info_dict", "(", "self", ",", "run_id", ")", ":", "run_info_path", "=", "os", ".", "path", ".", "join", "(", "self", ".", "_settings", ".", "info_dir", ",", "run_id", ",", "'info'", ")", "if", "os", ".", "path", ".", "exists", "(", "...
Get the RunInfo for a run, as a dict.
[ "Get", "the", "RunInfo", "for", "a", "run", "as", "a", "dict", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L258-L265
224,294
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._get_all_run_infos
def _get_all_run_infos(self): """Find the RunInfos for all runs since the last clean-all.""" info_dir = self._settings.info_dir if not os.path.isdir(info_dir): return [] paths = [os.path.join(info_dir, x) for x in os.listdir(info_dir)] # We copy the RunInfo as a dict, so we can add stuff to it to pass to the template. # We filter only those that have a timestamp, to avoid a race condition with writing # that field. return [d for d in [RunInfo(os.path.join(p, 'info')).get_as_dict() for p in paths if os.path.isdir(p) and not os.path.islink(p)] if 'timestamp' in d]
python
def _get_all_run_infos(self): info_dir = self._settings.info_dir if not os.path.isdir(info_dir): return [] paths = [os.path.join(info_dir, x) for x in os.listdir(info_dir)] # We copy the RunInfo as a dict, so we can add stuff to it to pass to the template. # We filter only those that have a timestamp, to avoid a race condition with writing # that field. return [d for d in [RunInfo(os.path.join(p, 'info')).get_as_dict() for p in paths if os.path.isdir(p) and not os.path.islink(p)] if 'timestamp' in d]
[ "def", "_get_all_run_infos", "(", "self", ")", ":", "info_dir", "=", "self", ".", "_settings", ".", "info_dir", "if", "not", "os", ".", "path", ".", "isdir", "(", "info_dir", ")", ":", "return", "[", "]", "paths", "=", "[", "os", ".", "path", ".", ...
Find the RunInfos for all runs since the last clean-all.
[ "Find", "the", "RunInfos", "for", "all", "runs", "since", "the", "last", "clean", "-", "all", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L267-L280
224,295
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._serve_dir
def _serve_dir(self, abspath, params): """Show a directory listing.""" relpath = os.path.relpath(abspath, self._root) breadcrumbs = self._create_breadcrumbs(relpath) entries = [{'link_path': os.path.join(relpath, e), 'name': e} for e in os.listdir(abspath)] args = self._default_template_args('dir.html') args.update({'root_parent': os.path.dirname(self._root), 'breadcrumbs': breadcrumbs, 'entries': entries, 'params': params}) content = self._renderer.render_name('base.html', args).encode("utf-8") self._send_content(content, 'text/html')
python
def _serve_dir(self, abspath, params): relpath = os.path.relpath(abspath, self._root) breadcrumbs = self._create_breadcrumbs(relpath) entries = [{'link_path': os.path.join(relpath, e), 'name': e} for e in os.listdir(abspath)] args = self._default_template_args('dir.html') args.update({'root_parent': os.path.dirname(self._root), 'breadcrumbs': breadcrumbs, 'entries': entries, 'params': params}) content = self._renderer.render_name('base.html', args).encode("utf-8") self._send_content(content, 'text/html')
[ "def", "_serve_dir", "(", "self", ",", "abspath", ",", "params", ")", ":", "relpath", "=", "os", ".", "path", ".", "relpath", "(", "abspath", ",", "self", ".", "_root", ")", "breadcrumbs", "=", "self", ".", "_create_breadcrumbs", "(", "relpath", ")", "...
Show a directory listing.
[ "Show", "a", "directory", "listing", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L282-L293
224,296
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._serve_file
def _serve_file(self, abspath, params): """Show a file. The actual content of the file is rendered by _handle_content. """ relpath = os.path.relpath(abspath, self._root) breadcrumbs = self._create_breadcrumbs(relpath) link_path = urlunparse(['', '', relpath, '', urlencode(params), '']) args = self._default_template_args('file.html') args.update({'root_parent': os.path.dirname(self._root), 'breadcrumbs': breadcrumbs, 'link_path': link_path}) content = self._renderer.render_name('base.html', args).encode("utf-8") self._send_content(content, 'text/html')
python
def _serve_file(self, abspath, params): relpath = os.path.relpath(abspath, self._root) breadcrumbs = self._create_breadcrumbs(relpath) link_path = urlunparse(['', '', relpath, '', urlencode(params), '']) args = self._default_template_args('file.html') args.update({'root_parent': os.path.dirname(self._root), 'breadcrumbs': breadcrumbs, 'link_path': link_path}) content = self._renderer.render_name('base.html', args).encode("utf-8") self._send_content(content, 'text/html')
[ "def", "_serve_file", "(", "self", ",", "abspath", ",", "params", ")", ":", "relpath", "=", "os", ".", "path", ".", "relpath", "(", "abspath", ",", "self", ".", "_root", ")", "breadcrumbs", "=", "self", ".", "_create_breadcrumbs", "(", "relpath", ")", ...
Show a file. The actual content of the file is rendered by _handle_content.
[ "Show", "a", "file", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L295-L308
224,297
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._send_content
def _send_content(self, content, content_type, code=200): """Send content to client.""" assert isinstance(content, bytes) self.send_response(code) self.send_header('Content-Type', content_type) self.send_header('Content-Length', str(len(content))) self.end_headers() self.wfile.write(content)
python
def _send_content(self, content, content_type, code=200): assert isinstance(content, bytes) self.send_response(code) self.send_header('Content-Type', content_type) self.send_header('Content-Length', str(len(content))) self.end_headers() self.wfile.write(content)
[ "def", "_send_content", "(", "self", ",", "content", ",", "content_type", ",", "code", "=", "200", ")", ":", "assert", "isinstance", "(", "content", ",", "bytes", ")", "self", ".", "send_response", "(", "code", ")", "self", ".", "send_header", "(", "'Con...
Send content to client.
[ "Send", "content", "to", "client", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L310-L317
224,298
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._client_allowed
def _client_allowed(self): """Check if client is allowed to connect to this server.""" client_ip = self._client_address[0] if not client_ip in self._settings.allowed_clients and \ not 'ALL' in self._settings.allowed_clients: content = 'Access from host {} forbidden.'.format(client_ip).encode('utf-8') self._send_content(content, 'text/html') return False return True
python
def _client_allowed(self): client_ip = self._client_address[0] if not client_ip in self._settings.allowed_clients and \ not 'ALL' in self._settings.allowed_clients: content = 'Access from host {} forbidden.'.format(client_ip).encode('utf-8') self._send_content(content, 'text/html') return False return True
[ "def", "_client_allowed", "(", "self", ")", ":", "client_ip", "=", "self", ".", "_client_address", "[", "0", "]", "if", "not", "client_ip", "in", "self", ".", "_settings", ".", "allowed_clients", "and", "not", "'ALL'", "in", "self", ".", "_settings", ".", ...
Check if client is allowed to connect to this server.
[ "Check", "if", "client", "is", "allowed", "to", "connect", "to", "this", "server", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L319-L327
224,299
pantsbuild/pants
src/python/pants/reporting/reporting_server.py
PantsHandler._maybe_handle
def _maybe_handle(self, prefix, handler, path, params, data=None): """Apply the handler if the prefix matches.""" if path.startswith(prefix): relpath = path[len(prefix):] if data: handler(relpath, params, data) else: handler(relpath, params) return True else: return False
python
def _maybe_handle(self, prefix, handler, path, params, data=None): if path.startswith(prefix): relpath = path[len(prefix):] if data: handler(relpath, params, data) else: handler(relpath, params) return True else: return False
[ "def", "_maybe_handle", "(", "self", ",", "prefix", ",", "handler", ",", "path", ",", "params", ",", "data", "=", "None", ")", ":", "if", "path", ".", "startswith", "(", "prefix", ")", ":", "relpath", "=", "path", "[", "len", "(", "prefix", ")", ":...
Apply the handler if the prefix matches.
[ "Apply", "the", "handler", "if", "the", "prefix", "matches", "." ]
b72e650da0df685824ffdcc71988b8c282d0962d
https://github.com/pantsbuild/pants/blob/b72e650da0df685824ffdcc71988b8c282d0962d/src/python/pants/reporting/reporting_server.py#L329-L339