INSTRUCTION
stringlengths
1
46.3k
RESPONSE
stringlengths
75
80.2k
Return the cut-nodes of the given hypergraph. @type hypergraph: hypergraph @param hypergraph: Hypergraph @rtype: list @return: List of cut-nodes.
def _cut_hypernodes(hypergraph): """ Return the cut-nodes of the given hypergraph. @type hypergraph: hypergraph @param hypergraph: Hypergraph @rtype: list @return: List of cut-nodes. """ nodes_ = cut_nodes(hypergraph.graph) nodes = [] for each in nodes_: if (each[1] == 'n'): nodes.append(each[0]) return nodes
Depth first search adapted for identification of cut-edges and cut-nodes. @type graph: graph, digraph @param graph: Graph @type spanning_tree: dictionary @param spanning_tree: Spanning tree being built for the graph by DFS. @type pre: dictionary @param pre: Graph's preordering. @type low: dictionary @param low: Associates to each node, the preordering index of the node of lowest preordering accessible from the given node. @type reply: list @param reply: List of cut-edges. @type node: node @param node: Node to be explored by DFS.
def _cut_dfs(graph, spanning_tree, pre, low, reply, node): """ Depth first search adapted for identification of cut-edges and cut-nodes. @type graph: graph, digraph @param graph: Graph @type spanning_tree: dictionary @param spanning_tree: Spanning tree being built for the graph by DFS. @type pre: dictionary @param pre: Graph's preordering. @type low: dictionary @param low: Associates to each node, the preordering index of the node of lowest preordering accessible from the given node. @type reply: list @param reply: List of cut-edges. @type node: node @param node: Node to be explored by DFS. """ pre[node] = pre[None] low[node] = pre[None] pre[None] = pre[None] + 1 for each in graph[node]: if (each not in pre): spanning_tree[each] = node _cut_dfs(graph, spanning_tree, pre, low, reply, each) if (low[node] > low[each]): low[node] = low[each] if (low[each] == pre[each]): reply.append((node, each)) elif (low[node] > pre[each] and spanning_tree[node] != each): low[node] = pre[each]
Remove a node from the graph. @type node: node @param node: Node identifier.
def del_node(self, node): """ Remove a node from the graph. @type node: node @param node: Node identifier. """ for each in list(self.neighbors(node)): if (each != node): self.del_edge((each, node)) del(self.node_neighbors[node]) del(self.node_attr[node])
Remove an edge from the graph. @type edge: tuple @param edge: Edge.
def del_edge(self, edge): """ Remove an edge from the graph. @type edge: tuple @param edge: Edge. """ u, v = edge self.node_neighbors[u].remove(v) self.del_edge_labeling((u, v)) if (u != v): self.node_neighbors[v].remove(u) self.del_edge_labeling((v, u))
Get the weight of an edge. @type edge: edge @param edge: One edge. @rtype: number @return: Edge weight.
def edge_weight(self, edge): """ Get the weight of an edge. @type edge: edge @param edge: One edge. @rtype: number @return: Edge weight. """ return self.get_edge_properties( edge ).setdefault( self.WEIGHT_ATTRIBUTE_NAME, self.DEFAULT_WEIGHT )
Set the weight of an edge. @type edge: edge @param edge: One edge. @type wt: number @param wt: Edge weight.
def set_edge_weight(self, edge, wt): """ Set the weight of an edge. @type edge: edge @param edge: One edge. @type wt: number @param wt: Edge weight. """ self.set_edge_properties(edge, weight=wt ) if not self.DIRECTED: self.set_edge_properties((edge[1], edge[0]) , weight=wt )
Get the label of an edge. @type edge: edge @param edge: One edge. @rtype: string @return: Edge label
def edge_label(self, edge): """ Get the label of an edge. @type edge: edge @param edge: One edge. @rtype: string @return: Edge label """ return self.get_edge_properties( edge ).setdefault( self.LABEL_ATTRIBUTE_NAME, self.DEFAULT_LABEL )
Set the label of an edge. @type edge: edge @param edge: One edge. @type label: string @param label: Edge label.
def set_edge_label(self, edge, label): """ Set the label of an edge. @type edge: edge @param edge: One edge. @type label: string @param label: Edge label. """ self.set_edge_properties(edge, label=label ) if not self.DIRECTED: self.set_edge_properties((edge[1], edge[0]) , label=label )
Add attribute to the given edge. @type edge: edge @param edge: One edge. @type attr: tuple @param attr: Node attribute specified as a tuple in the form (attribute, value).
def add_edge_attribute(self, edge, attr): """ Add attribute to the given edge. @type edge: edge @param edge: One edge. @type attr: tuple @param attr: Node attribute specified as a tuple in the form (attribute, value). """ self.edge_attr[edge] = self.edge_attributes(edge) + [attr] if (not self.DIRECTED and edge[0] != edge[1]): self.edge_attr[(edge[1],edge[0])] = self.edge_attributes((edge[1], edge[0])) + [attr]
Append a sequence of attributes to the given edge @type edge: edge @param edge: One edge. @type attrs: tuple @param attrs: Node attributes specified as a sequence of tuples in the form (attribute, value).
def add_edge_attributes(self, edge, attrs): """ Append a sequence of attributes to the given edge @type edge: edge @param edge: One edge. @type attrs: tuple @param attrs: Node attributes specified as a sequence of tuples in the form (attribute, value). """ for attr in attrs: self.add_edge_attribute(edge, attr)
Add attribute to the given node. @type node: node @param node: Node identifier @type attr: tuple @param attr: Node attribute specified as a tuple in the form (attribute, value).
def add_node_attribute(self, node, attr): """ Add attribute to the given node. @type node: node @param node: Node identifier @type attr: tuple @param attr: Node attribute specified as a tuple in the form (attribute, value). """ self.node_attr[node] = self.node_attr[node] + [attr]
Get shell code that should be run to activate this suite.
def activation_shell_code(self, shell=None): """Get shell code that should be run to activate this suite.""" from rez.shells import create_shell from rez.rex import RexExecutor executor = RexExecutor(interpreter=create_shell(shell), parent_variables=["PATH"], shebang=False) executor.env.PATH.append(self.tools_path) return executor.get_output().strip()
Get a context. Args: name (str): Name to store the context under. Returns: `ResolvedContext` object.
def context(self, name): """Get a context. Args: name (str): Name to store the context under. Returns: `ResolvedContext` object. """ data = self._context(name) context = data.get("context") if context: return context assert self.load_path context_path = os.path.join(self.load_path, "contexts", "%s.rxt" % name) context = ResolvedContext.load(context_path) data["context"] = context data["loaded"] = True return context
Add a context to the suite. Args: name (str): Name to store the context under. context (ResolvedContext): Context to add.
def add_context(self, name, context, prefix_char=None): """Add a context to the suite. Args: name (str): Name to store the context under. context (ResolvedContext): Context to add. """ if name in self.contexts: raise SuiteError("Context already in suite: %r" % name) if not context.success: raise SuiteError("Context is not resolved: %r" % name) self.contexts[name] = dict(name=name, context=context.copy(), tool_aliases={}, hidden_tools=set(), priority=self._next_priority, prefix_char=prefix_char) self._flush_tools()
Find contexts in the suite based on search criteria. Args: in_request (str): Match contexts that contain the given package in their request. in_resolve (str or `Requirement`): Match contexts that contain the given package in their resolve. You can also supply a conflict requirement - '!foo' will match any contexts whos resolve does not contain any version of package 'foo'. Returns: List of context names that match the search criteria.
def find_contexts(self, in_request=None, in_resolve=None): """Find contexts in the suite based on search criteria. Args: in_request (str): Match contexts that contain the given package in their request. in_resolve (str or `Requirement`): Match contexts that contain the given package in their resolve. You can also supply a conflict requirement - '!foo' will match any contexts whos resolve does not contain any version of package 'foo'. Returns: List of context names that match the search criteria. """ names = self.context_names if in_request: def _in_request(name): context = self.context(name) packages = set(x.name for x in context.requested_packages(True)) return (in_request in packages) names = [x for x in names if _in_request(x)] if in_resolve: if isinstance(in_resolve, basestring): in_resolve = PackageRequest(in_resolve) def _in_resolve(name): context = self.context(name) variant = context.get_resolved_package(in_resolve.name) if variant: overlap = (variant.version in in_resolve.range) return ((in_resolve.conflict and not overlap) or (overlap and not in_resolve.conflict)) else: return in_resolve.conflict names = [x for x in names if _in_resolve(x)] return names
Remove a context from the suite. Args: name (str): Name of the context to remove.
def remove_context(self, name): """Remove a context from the suite. Args: name (str): Name of the context to remove. """ self._context(name) del self.contexts[name] self._flush_tools()
Set a context's prefix. This will be applied to all wrappers for the tools in this context. For example, a tool called 'foo' would appear as '<prefix>foo' in the suite's bin path. Args: name (str): Name of the context to prefix. prefix (str): Prefix to apply to tools.
def set_context_prefix(self, name, prefix): """Set a context's prefix. This will be applied to all wrappers for the tools in this context. For example, a tool called 'foo' would appear as '<prefix>foo' in the suite's bin path. Args: name (str): Name of the context to prefix. prefix (str): Prefix to apply to tools. """ data = self._context(name) data["prefix"] = prefix self._flush_tools()
Set a context's suffix. This will be applied to all wrappers for the tools in this context. For example, a tool called 'foo' would appear as 'foo<suffix>' in the suite's bin path. Args: name (str): Name of the context to suffix. suffix (str): Suffix to apply to tools.
def set_context_suffix(self, name, suffix): """Set a context's suffix. This will be applied to all wrappers for the tools in this context. For example, a tool called 'foo' would appear as 'foo<suffix>' in the suite's bin path. Args: name (str): Name of the context to suffix. suffix (str): Suffix to apply to tools. """ data = self._context(name) data["suffix"] = suffix self._flush_tools()
Causes the context's tools to take priority over all others.
def bump_context(self, name): """Causes the context's tools to take priority over all others.""" data = self._context(name) data["priority"] = self._next_priority self._flush_tools()
Hide a tool so that it is not exposed in the suite. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to hide.
def hide_tool(self, context_name, tool_name): """Hide a tool so that it is not exposed in the suite. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to hide. """ data = self._context(context_name) hidden_tools = data["hidden_tools"] if tool_name not in hidden_tools: self._validate_tool(context_name, tool_name) hidden_tools.add(tool_name) self._flush_tools()
Unhide a tool so that it may be exposed in a suite. Note that unhiding a tool doesn't guarantee it can be seen - a tool of the same name from a different context may be overriding it. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to unhide.
def unhide_tool(self, context_name, tool_name): """Unhide a tool so that it may be exposed in a suite. Note that unhiding a tool doesn't guarantee it can be seen - a tool of the same name from a different context may be overriding it. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to unhide. """ data = self._context(context_name) hidden_tools = data["hidden_tools"] if tool_name in hidden_tools: hidden_tools.remove(tool_name) self._flush_tools()
Register an alias for a specific tool. Note that a tool alias takes precedence over a context prefix/suffix. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to alias. tool_alias (str): Alias to give the tool.
def alias_tool(self, context_name, tool_name, tool_alias): """Register an alias for a specific tool. Note that a tool alias takes precedence over a context prefix/suffix. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to alias. tool_alias (str): Alias to give the tool. """ data = self._context(context_name) aliases = data["tool_aliases"] if tool_name in aliases: raise SuiteError("Tool %r in context %r is already aliased to %r" % (tool_name, context_name, aliases[tool_name])) self._validate_tool(context_name, tool_name) aliases[tool_name] = tool_alias self._flush_tools()
Deregister an alias for a specific tool. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to unalias.
def unalias_tool(self, context_name, tool_name): """Deregister an alias for a specific tool. Args: context_name (str): Context containing the tool. tool_name (str): Name of tool to unalias. """ data = self._context(context_name) aliases = data["tool_aliases"] if tool_name in aliases: del aliases[tool_name] self._flush_tools()
Given a visible tool alias, return the full path to the executable. Args: tool_alias (str): Tool alias to search for. Returns: (str): Filepath of executable, or None if the tool is not in the suite. May also return None because this suite has not been saved to disk, so a filepath hasn't yet been established.
def get_tool_filepath(self, tool_alias): """Given a visible tool alias, return the full path to the executable. Args: tool_alias (str): Tool alias to search for. Returns: (str): Filepath of executable, or None if the tool is not in the suite. May also return None because this suite has not been saved to disk, so a filepath hasn't yet been established. """ tools_dict = self.get_tools() if tool_alias in tools_dict: if self.tools_path is None: return None else: return os.path.join(self.tools_path, tool_alias) else: return None
Given a visible tool alias, return the name of the context it belongs to. Args: tool_alias (str): Tool alias to search for. Returns: (str): Name of the context that exposes a visible instance of this tool alias, or None if the alias is not available.
def get_tool_context(self, tool_alias): """Given a visible tool alias, return the name of the context it belongs to. Args: tool_alias (str): Tool alias to search for. Returns: (str): Name of the context that exposes a visible instance of this tool alias, or None if the alias is not available. """ tools_dict = self.get_tools() data = tools_dict.get(tool_alias) if data: return data["context_name"] return None
Validate the suite.
def validate(self): """Validate the suite.""" for context_name in self.context_names: context = self.context(context_name) try: context.validate() except ResolvedContextError as e: raise SuiteError("Error in context %r: %s" % (context_name, str(e)))
Save the suite to disk. Args: path (str): Path to save the suite to. If a suite is already saved at `path`, then it will be overwritten. Otherwise, if `path` exists, an error is raised.
def save(self, path, verbose=False): """Save the suite to disk. Args: path (str): Path to save the suite to. If a suite is already saved at `path`, then it will be overwritten. Otherwise, if `path` exists, an error is raised. """ path = os.path.realpath(path) if os.path.exists(path): if self.load_path and self.load_path == path: if verbose: print "saving over previous suite..." for context_name in self.context_names: self.context(context_name) # load before dir deleted shutil.rmtree(path) else: raise SuiteError("Cannot save, path exists: %r" % path) contexts_path = os.path.join(path, "contexts") os.makedirs(contexts_path) # write suite data data = self.to_dict() filepath = os.path.join(path, "suite.yaml") with open(filepath, "w") as f: f.write(dump_yaml(data)) # write contexts for context_name in self.context_names: context = self.context(context_name) context._set_parent_suite(path, context_name) filepath = self._context_path(context_name, path) if verbose: print "writing %r..." % filepath context.save(filepath) # create alias wrappers tools_path = os.path.join(path, "bin") os.makedirs(tools_path) if verbose: print "creating alias wrappers in %r..." % tools_path tools = self.get_tools() for tool_alias, d in tools.iteritems(): tool_name = d["tool_name"] context_name = d["context_name"] data = self._context(context_name) prefix_char = data.get("prefix_char") if verbose: print ("creating %r -> %r (%s context)..." % (tool_alias, tool_name, context_name)) filepath = os.path.join(tools_path, tool_alias) create_forwarding_script(filepath, module="suite", func_name="_FWD__invoke_suite_tool_alias", context_name=context_name, tool_name=tool_name, prefix_char=prefix_char)
Get a list of paths to suites that are visible on $PATH. Returns: List of str.
def visible_suite_paths(cls, paths=None): """Get a list of paths to suites that are visible on $PATH. Returns: List of str. """ suite_paths = [] if paths is None: paths = os.getenv("PATH", "").split(os.pathsep) for path in paths: if path and os.path.isdir(path): path_ = os.path.dirname(path) filepath = os.path.join(path_, "suite.yaml") if os.path.isfile(filepath): suite_paths.append(path_) return suite_paths
Get a list of suites whos bin paths are visible on $PATH. Returns: List of `Suite` objects.
def load_visible_suites(cls, paths=None): """Get a list of suites whos bin paths are visible on $PATH. Returns: List of `Suite` objects. """ suite_paths = cls.visible_suite_paths(paths) suites = [cls.load(x) for x in suite_paths] return suites
Prints a message summarising the contents of the suite.
def print_info(self, buf=sys.stdout, verbose=False): """Prints a message summarising the contents of the suite.""" _pr = Printer(buf) if not self.contexts: _pr("Suite is empty.") return context_names = sorted(self.contexts.iterkeys()) _pr("Suite contains %d contexts:" % len(context_names)) if not verbose: _pr(' '.join(context_names)) return tools = self.get_tools().values() context_tools = defaultdict(set) context_variants = defaultdict(set) for entry in tools: context_name = entry["context_name"] context_tools[context_name].add(entry["tool_name"]) context_variants[context_name].add(str(entry["variant"])) _pr() rows = [["NAME", "VISIBLE TOOLS", "PATH"], ["----", "-------------", "----"]] for context_name in context_names: context_path = self._context_path(context_name) or '-' ntools = len(context_tools.get(context_name, [])) if ntools: nvariants = len(context_variants[context_name]) short_desc = "%d tools from %d packages" % (ntools, nvariants) else: short_desc = "no tools" rows.append((context_name, short_desc, context_path)) _pr("\n".join(columnise(rows)))
Print table of tools available in the suite. Args: context_name (str): If provided, only print the tools from this context.
def print_tools(self, buf=sys.stdout, verbose=False, context_name=None): """Print table of tools available in the suite. Args: context_name (str): If provided, only print the tools from this context. """ def _get_row(entry): context_name_ = entry["context_name"] tool_alias = entry["tool_alias"] tool_name = entry["tool_name"] properties = [] col = None variant = entry["variant"] if isinstance(variant, set): properties.append("(in conflict)") col = critical if verbose: package = ", ".join(x.qualified_package_name for x in variant) else: v = iter(variant).next() package = "%s (+%d more)" % (v.qualified_package_name, len(variant) - 1) else: package = variant.qualified_package_name if tool_name == tool_alias: tool_name = "-" else: properties.append("(aliased)") if col is None: col = alias_col msg = " ".join(properties) row = [tool_alias, tool_name, package, context_name_, msg] return row, col if context_name: self._context(context_name) # check context exists context_names = [context_name] else: context_names = sorted(self.contexts.iterkeys()) rows = [["TOOL", "ALIASING", "PACKAGE", "CONTEXT", ""], ["----", "--------", "-------", "-------", ""]] colors = [None, None] entries_dict = defaultdict(list) for d in self.get_tools().itervalues(): entries_dict[d["context_name"]].append(d) if verbose: # add hidden entries for d in self.hidden_tools: d_ = d.copy() d_["hidden"] = True entries_dict[d["context_name"]].append(d_) # add conflicting tools for docs in self.tool_conflicts.itervalues(): for d in docs: d_ = d.copy() d_["conflicting"] = True entries_dict[d["context_name"]].append(d_) for i, context_name in enumerate(context_names): entries = entries_dict.get(context_name, []) if entries: if i: rows.append(('', '', '', '', '')) colors.append(None) entries = sorted(entries, key=lambda x: x["tool_alias"].lower()) for entry in entries: row, col = _get_row(entry) if "hidden" in entry: row[-1] = "(hidden)" rows.append(row) colors.append(warning) elif "conflicting" in entry: row[-1] = "(not visible)" rows.append(row) colors.append(warning) else: rows.append(row) colors.append(col) if rows: _pr = Printer(buf) for col, line in zip(colors, columnise(rows)): _pr(line, col) else: _pr("No tools available.")
Returns the build system classes that could build the source in given dir. Args: working_dir (str): Dir containing the package definition and potentially build files. package (`Package`): Package to be built. This may or may not be needed to determine the build system. For eg, cmake just has to look for a CMakeLists.txt file, whereas the 'build_command' package field must be present for the 'custom' build system type. Returns: List of class: Valid build system class types.
def get_valid_build_systems(working_dir, package=None): """Returns the build system classes that could build the source in given dir. Args: working_dir (str): Dir containing the package definition and potentially build files. package (`Package`): Package to be built. This may or may not be needed to determine the build system. For eg, cmake just has to look for a CMakeLists.txt file, whereas the 'build_command' package field must be present for the 'custom' build system type. Returns: List of class: Valid build system class types. """ from rez.plugin_managers import plugin_manager from rez.exceptions import PackageMetadataError try: package = package or get_developer_package(working_dir) except PackageMetadataError: # no package, or bad package pass if package: if getattr(package, "build_command", None) is not None: buildsys_name = "custom" else: buildsys_name = getattr(package, "build_system", None) # package explicitly specifies build system if buildsys_name: cls = plugin_manager.get_plugin_class('build_system', buildsys_name) return [cls] # detect valid build systems clss = [] for buildsys_name in get_buildsys_types(): cls = plugin_manager.get_plugin_class('build_system', buildsys_name) if cls.is_valid_root(working_dir, package=package): clss.append(cls) # Sometimes files for multiple build systems can be present, because one # build system uses another (a 'child' build system) - eg, cmake uses # make. Detect this case and ignore files from the child build system. # child_clss = set(x.child_build_system() for x in clss) clss = list(set(clss) - child_clss) return clss
Return a new build system that can build the source in working_dir.
def create_build_system(working_dir, buildsys_type=None, package=None, opts=None, write_build_scripts=False, verbose=False, build_args=[], child_build_args=[]): """Return a new build system that can build the source in working_dir.""" from rez.plugin_managers import plugin_manager # detect build system if necessary if not buildsys_type: clss = get_valid_build_systems(working_dir, package=package) if not clss: raise BuildSystemError( "No build system is associated with the path %s" % working_dir) if len(clss) != 1: s = ', '.join(x.name() for x in clss) raise BuildSystemError(("Source could be built with one of: %s; " "Please specify a build system") % s) buildsys_type = iter(clss).next().name() # create instance of build system cls_ = plugin_manager.get_plugin_class('build_system', buildsys_type) return cls_(working_dir, opts=opts, package=package, write_build_scripts=write_build_scripts, verbose=verbose, build_args=build_args, child_build_args=child_build_args)
Implement this method to perform the actual build. Args: context: A ResolvedContext object that the build process must be executed within. variant (`Variant`): The variant being built. build_path: Where to write temporary build files. May be relative to working_dir. install_path (str): The package repository path to install the package to, if installing. If None, defaults to `config.local_packages_path`. install: If True, install the build. build_type: A BuildType (i.e local or central). Returns: A dict containing the following information: - success: Bool indicating if the build was successful. - extra_files: List of created files of interest, not including build targets. A good example is the interpreted context file, usually named 'build.rxt.sh' or similar. These files should be located under build_path. Rez may install them for debugging purposes. - build_env_script: If this instance was created with write_build_scripts as True, then the build should generate a script which, when run by the user, places them in the build environment.
def build(self, context, variant, build_path, install_path, install=False, build_type=BuildType.local): """Implement this method to perform the actual build. Args: context: A ResolvedContext object that the build process must be executed within. variant (`Variant`): The variant being built. build_path: Where to write temporary build files. May be relative to working_dir. install_path (str): The package repository path to install the package to, if installing. If None, defaults to `config.local_packages_path`. install: If True, install the build. build_type: A BuildType (i.e local or central). Returns: A dict containing the following information: - success: Bool indicating if the build was successful. - extra_files: List of created files of interest, not including build targets. A good example is the interpreted context file, usually named 'build.rxt.sh' or similar. These files should be located under build_path. Rez may install them for debugging purposes. - build_env_script: If this instance was created with write_build_scripts as True, then the build should generate a script which, when run by the user, places them in the build environment. """ raise NotImplementedError
Returns a standard set of environment variables that can be set for the build system to use
def get_standard_vars(cls, context, variant, build_type, install, build_path, install_path=None): """Returns a standard set of environment variables that can be set for the build system to use """ from rez.config import config package = variant.parent variant_requires = map(str, variant.variant_requires) if variant.index is None: variant_subpath = '' else: variant_subpath = variant._non_shortlinked_subpath vars_ = { 'REZ_BUILD_ENV': 1, 'REZ_BUILD_PATH': build_path, 'REZ_BUILD_THREAD_COUNT': package.config.build_thread_count, 'REZ_BUILD_VARIANT_INDEX': variant.index or 0, 'REZ_BUILD_VARIANT_REQUIRES': ' '.join(variant_requires), 'REZ_BUILD_VARIANT_SUBPATH': variant_subpath, 'REZ_BUILD_PROJECT_VERSION': str(package.version), 'REZ_BUILD_PROJECT_NAME': package.name, 'REZ_BUILD_PROJECT_DESCRIPTION': (package.description or '').strip(), 'REZ_BUILD_PROJECT_FILE': package.filepath, 'REZ_BUILD_SOURCE_PATH': os.path.dirname(package.filepath), 'REZ_BUILD_REQUIRES': ' '.join( str(x) for x in context.requested_packages(True) ), 'REZ_BUILD_REQUIRES_UNVERSIONED': ' '.join( x.name for x in context.requested_packages(True) ), 'REZ_BUILD_TYPE': build_type.name, 'REZ_BUILD_INSTALL': 1 if install else 0, } if install_path: vars_['REZ_BUILD_INSTALL_PATH'] = install_path if config.rez_1_environment_variables and \ not config.disable_rez_1_compatibility and \ build_type == BuildType.central: vars_['REZ_IN_REZ_RELEASE'] = 1 return vars_
Sets a standard set of environment variables for the build system to use
def set_standard_vars(cls, executor, context, variant, build_type, install, build_path, install_path=None): """Sets a standard set of environment variables for the build system to use """ vars = cls.get_standard_vars(context=context, variant=variant, build_type=build_type, install=install, build_path=build_path, install_path=install_path) for var, value in vars.iteritems(): executor.env[var] = value
Run a pip command. Args: command_args (list of str): Args to pip. Returns: `subprocess.Popen`: Pip process.
def run_pip_command(command_args, pip_version=None, python_version=None): """Run a pip command. Args: command_args (list of str): Args to pip. Returns: `subprocess.Popen`: Pip process. """ pip_exe, context = find_pip(pip_version, python_version) command = [pip_exe] + list(command_args) if context is None: return popen(command) else: return context.execute_shell(command=command, block=False)
Find a pip exe using the given python version. Returns: 2-tuple: str: pip executable; `ResolvedContext`: Context containing pip, or None if we fell back to system pip.
def find_pip(pip_version=None, python_version=None): """Find a pip exe using the given python version. Returns: 2-tuple: str: pip executable; `ResolvedContext`: Context containing pip, or None if we fell back to system pip. """ pip_exe = "pip" try: context = create_context(pip_version, python_version) except BuildError as e: # fall back on system pip. Not ideal but at least it's something from rez.backport.shutilwhich import which pip_exe = which("pip") if pip_exe: print_warning( "pip rez package could not be found; system 'pip' command (%s) " "will be used instead." % pip_exe) context = None else: raise e return pip_exe, context
Create a context containing the specific pip and python. Args: pip_version (str or `Version`): Version of pip to use, or latest if None. python_version (str or `Version`): Python version to use, or latest if None. Returns: `ResolvedContext`: Context containing pip and python.
def create_context(pip_version=None, python_version=None): """Create a context containing the specific pip and python. Args: pip_version (str or `Version`): Version of pip to use, or latest if None. python_version (str or `Version`): Python version to use, or latest if None. Returns: `ResolvedContext`: Context containing pip and python. """ # determine pip pkg to use for install, and python variants to install on if pip_version: pip_req = "pip-%s" % str(pip_version) else: pip_req = "pip" if python_version: ver = Version(str(python_version)) major_minor_ver = ver.trim(2) py_req = "python-%s" % str(major_minor_ver) else: # use latest major.minor package = get_latest_package("python") if package: major_minor_ver = package.version.trim(2) else: # no python package. We're gonna fail, let's just choose current # python version (and fail at context creation time) major_minor_ver = '.'.join(map(str, sys.version_info[:2])) py_req = "python-%s" % str(major_minor_ver) # use pip + latest python to perform pip download operations request = [pip_req, py_req] with convert_errors(from_=(PackageFamilyNotFoundError, PackageNotFoundError), to=BuildError, msg="Cannot run - pip or python rez " "package is not present"): context = ResolvedContext(request) # print pip package used to perform the install pip_variant = context.get_resolved_package("pip") pip_package = pip_variant.parent print_info("Using %s (%s)" % (pip_package.qualified_name, pip_variant.uri)) return context
Install a pip-compatible python package as a rez package. Args: source_name (str): Name of package or archive/url containing the pip package source. This is the same as the arg you would pass to the 'pip install' command. pip_version (str or `Version`): Version of pip to use to perform the install, uses latest if None. python_version (str or `Version`): Python version to use to perform the install, and subsequently have the resulting rez package depend on. mode (`InstallMode`): Installation mode, determines how dependencies are managed. release (bool): If True, install as a released package; otherwise, it will be installed as a local package. Returns: 2-tuple: List of `Variant`: Installed variants; List of `Variant`: Skipped variants (already installed).
def pip_install_package(source_name, pip_version=None, python_version=None, mode=InstallMode.min_deps, release=False): """Install a pip-compatible python package as a rez package. Args: source_name (str): Name of package or archive/url containing the pip package source. This is the same as the arg you would pass to the 'pip install' command. pip_version (str or `Version`): Version of pip to use to perform the install, uses latest if None. python_version (str or `Version`): Python version to use to perform the install, and subsequently have the resulting rez package depend on. mode (`InstallMode`): Installation mode, determines how dependencies are managed. release (bool): If True, install as a released package; otherwise, it will be installed as a local package. Returns: 2-tuple: List of `Variant`: Installed variants; List of `Variant`: Skipped variants (already installed). """ installed_variants = [] skipped_variants = [] pip_exe, context = find_pip(pip_version, python_version) # TODO: should check if packages_path is writable before continuing with pip # packages_path = (config.release_packages_path if release else config.local_packages_path) tmpdir = mkdtemp(suffix="-rez", prefix="pip-") stagingdir = os.path.join(tmpdir, "rez_staging") stagingsep = "".join([os.path.sep, "rez_staging", os.path.sep]) destpath = os.path.join(stagingdir, "python") binpath = os.path.join(stagingdir, "bin") incpath = os.path.join(stagingdir, "include") datapath = stagingdir if context and config.debug("package_release"): buf = StringIO() print >> buf, "\n\npackage download environment:" context.print_info(buf) _log(buf.getvalue()) # Build pip commandline cmd = [pip_exe, "install", "--install-option=--install-lib=%s" % destpath, "--install-option=--install-scripts=%s" % binpath, "--install-option=--install-headers=%s" % incpath, "--install-option=--install-data=%s" % datapath] if mode == InstallMode.no_deps: cmd.append("--no-deps") cmd.append(source_name) _cmd(context=context, command=cmd) _system = System() # Collect resulting python packages using distlib distribution_path = DistributionPath([destpath], include_egg=True) distributions = [d for d in distribution_path.get_distributions()] for distribution in distribution_path.get_distributions(): requirements = [] if distribution.metadata.run_requires: # Handle requirements. Currently handles conditional environment based # requirements and normal requirements # TODO: Handle optional requirements? for requirement in distribution.metadata.run_requires: if "environment" in requirement: if interpret(requirement["environment"]): requirements.extend(_get_dependencies(requirement, distributions)) elif "extra" in requirement: # Currently ignoring optional requirements pass else: requirements.extend(_get_dependencies(requirement, distributions)) tools = [] src_dst_lut = {} for installed_file in distribution.list_installed_files(allow_fail=True): source_file = os.path.normpath(os.path.join(destpath, installed_file[0])) if os.path.exists(source_file): destination_file = installed_file[0].split(stagingsep)[1] exe = False if is_exe(source_file) and \ destination_file.startswith("%s%s" % ("bin", os.path.sep)): _, _file = os.path.split(destination_file) tools.append(_file) exe = True data = [destination_file, exe] src_dst_lut[source_file] = data else: _log("Source file does not exist: " + source_file + "!") def make_root(variant, path): """Using distlib to iterate over all installed files of the current distribution to copy files to the target directory of the rez package variant """ for source_file, data in src_dst_lut.items(): destination_file, exe = data destination_file = os.path.normpath(os.path.join(path, destination_file)) if not os.path.exists(os.path.dirname(destination_file)): os.makedirs(os.path.dirname(destination_file)) shutil.copyfile(source_file, destination_file) if exe: shutil.copystat(source_file, destination_file) # determine variant requirements # TODO detect if platform/arch/os necessary, no if pure python variant_reqs = [] variant_reqs.append("platform-%s" % _system.platform) variant_reqs.append("arch-%s" % _system.arch) variant_reqs.append("os-%s" % _system.os) if context is None: # since we had to use system pip, we have to assume system python version py_ver = '.'.join(map(str, sys.version_info[:2])) else: python_variant = context.get_resolved_package("python") py_ver = python_variant.version.trim(2) variant_reqs.append("python-%s" % py_ver) name, _ = parse_name_and_version(distribution.name_and_version) name = distribution.name[0:len(name)].replace("-", "_") with make_package(name, packages_path, make_root=make_root) as pkg: pkg.version = distribution.version if distribution.metadata.summary: pkg.description = distribution.metadata.summary pkg.variants = [variant_reqs] if requirements: pkg.requires = requirements commands = [] commands.append("env.PYTHONPATH.append('{root}/python')") if tools: pkg.tools = tools commands.append("env.PATH.append('{root}/bin')") pkg.commands = '\n'.join(commands) installed_variants.extend(pkg.installed_variants or []) skipped_variants.extend(pkg.skipped_variants or []) # cleanup shutil.rmtree(tmpdir) return installed_variants, skipped_variants
Convert a variant handle from serialize_version < 4.0.
def convert_old_variant_handle(handle_dict): """Convert a variant handle from serialize_version < 4.0.""" old_variables = handle_dict.get("variables", {}) variables = dict(repository_type="filesystem") for old_key, key in variant_key_conversions.iteritems(): value = old_variables.get(old_key) #if value is not None: variables[key] = value path = handle_dict["path"] filename = os.path.basename(path) if os.path.splitext(filename)[0] == "package": key = "filesystem.variant" else: key = "filesystem.variant.combined" return dict(key=key, variables=variables)
Convert expansions from !OLD! style to {new}.
def convert_old_command_expansions(command): """Convert expansions from !OLD! style to {new}.""" command = command.replace("!VERSION!", "{version}") command = command.replace("!MAJOR_VERSION!", "{version.major}") command = command.replace("!MINOR_VERSION!", "{version.minor}") command = command.replace("!BASE!", "{base}") command = command.replace("!ROOT!", "{root}") command = command.replace("!USER!", "{system.user}") return command
Converts old-style package commands into equivalent Rex code.
def convert_old_commands(commands, annotate=True): """Converts old-style package commands into equivalent Rex code.""" from rez.config import config from rez.utils.logging_ import print_debug def _repl(s): return s.replace('\\"', '"') def _encode(s): # this replaces all occurrances of '\"' with '"', *except* for those # occurrances of '\"' that are within double quotes, which themselves # are not escaped. In other words, the string: # 'hey "there \"you\" " \"dude\" ' # ..will convert to: # 'hey "there \"you\" " "dude" ' s_new = '' prev_i = 0 for m in within_unescaped_quotes_regex.finditer(s): s_ = s[prev_i:m.start()] s_new += _repl(s_) s_new += s[m.start():m.end()] prev_i = m.end() s_ = s[prev_i:] s_new += _repl(s_) return repr(s_new) loc = [] for cmd in commands: if annotate: txt = "OLD COMMAND: %s" % cmd line = "comment(%s)" % _encode(txt) loc.append(line) cmd = convert_old_command_expansions(cmd) toks = cmd.strip().split() try: if toks[0] == "export": var, value = cmd.split(' ', 1)[1].split('=', 1) for bookend in ('"', "'"): if value.startswith(bookend) and value.endswith(bookend): value = value[1:-1] break # As the only old-style commands were Linux/Bash based, # we assume using the default separator ":" is ok - we don't # need to use os.pathsep as we don't expected to see a # Windows path here. separator = config.env_var_separators.get(var, ":") # This is a special case. We don't want to include "';'" in # our env var separators map as it's not really the correct # behaviour/something we want to promote. It's included here for # backwards compatibility only, and to not propogate elsewhere. if var == "CMAKE_MODULE_PATH": value = value.replace("'%s'" % separator, separator) value = value.replace('"%s"' % separator, separator) value = value.replace(":", separator) parts = value.split(separator) parts = [x for x in parts if x] if len(parts) > 1: idx = None var1 = "$%s" % var var2 = "${%s}" % var if var1 in parts: idx = parts.index(var1) elif var2 in parts: idx = parts.index(var2) if idx in (0, len(parts) - 1): func = "appendenv" if idx == 0 else "prependenv" parts = parts[1:] if idx == 0 else parts[:-1] val = separator.join(parts) loc.append("%s('%s', %s)" % (func, var, _encode(val))) continue loc.append("setenv('%s', %s)" % (var, _encode(value))) elif toks[0].startswith('#'): loc.append("comment(%s)" % _encode(' '.join(toks[1:]))) elif toks[0] == "alias": match = re.search("alias (?P<key>.*?)=(?P<value>.*)", cmd) key = match.groupdict()['key'].strip() value = match.groupdict()['value'].strip() if (value.startswith('"') and value.endswith('"')) or \ (value.startswith("'") and value.endswith("'")): value = value[1:-1] loc.append("alias('%s', %s)" % (key, _encode(value))) else: # assume we can execute this as a straight command loc.append("command(%s)" % _encode(cmd)) except: # if anything goes wrong, just fall back to bash command loc.append("command(%s)" % _encode(cmd)) rex_code = '\n'.join(loc) if config.debug("old_commands"): br = '-' * 80 msg = textwrap.dedent( """ %s OLD COMMANDS: %s NEW COMMANDS: %s %s """) % (br, '\n'.join(commands), rex_code, br) print_debug(msg) return rex_code
Build a dictionary mapping each pair of nodes to a number (the distance between them). @type graph: graph @param graph: Graph.
def optimize(self, graph): """ Build a dictionary mapping each pair of nodes to a number (the distance between them). @type graph: graph @param graph: Graph. """ for center in self.centers: shortest_routes = shortest_path(graph, center)[1] for node, weight in list(shortest_routes.items()): self.nodes.setdefault(node, []).append(weight)
Converts a pkg_resources.Requirement object into a list of Rez package request strings.
def convert_requirement(req): """ Converts a pkg_resources.Requirement object into a list of Rez package request strings. """ pkg_name = convert_name(req.project_name) if not req.specs: return [pkg_name] req_strs = [] for spec in req.specs: op, ver = spec ver = convert_version(ver) if op == "<": r = "%s-0+<%s" % (pkg_name, ver) req_strs.append(r) elif op == "<=": r = "%s-0+<%s|%s" % (pkg_name, ver, ver) req_strs.append(r) elif op == "==": r = "%s-%s" % (pkg_name, ver) req_strs.append(r) elif op == ">=": r = "%s-%s+" % (pkg_name, ver) req_strs.append(r) elif op == ">": r1 = "%s-%s+" % (pkg_name, ver) r2 = "!%s-%s" % (pkg_name, ver) req_strs.append(r1) req_strs.append(r2) elif op == "!=": r = "!%s-%s" % (pkg_name, ver) req_strs.append(r) else: print >> sys.stderr, \ "Warning: Can't understand op '%s', just depending on unversioned package..." % op req_strs.append(pkg_name) return req_strs
Get the dependencies of the given, already installed distribution. @param recurse If True, recursively find all dependencies. @returns A set of package names. @note The first entry in the list is always the top-level package itself.
def get_dist_dependencies(name, recurse=True): """ Get the dependencies of the given, already installed distribution. @param recurse If True, recursively find all dependencies. @returns A set of package names. @note The first entry in the list is always the top-level package itself. """ dist = pkg_resources.get_distribution(name) pkg_name = convert_name(dist.project_name) reqs = set() working = set([dist]) depth = 0 while working: deps = set() for distname in working: dist = pkg_resources.get_distribution(distname) pkg_name = convert_name(dist.project_name) reqs.add(pkg_name) for req in dist.requires(): reqs_ = convert_requirement(req) deps |= set(x.split('-', 1)[0] for x in reqs_ if not x.startswith('!')) working = deps - reqs depth += 1 if (not recurse) and (depth >= 2): break return reqs
Convert an already installed python distribution into a rez package. Args: dest_path (str): Where to put the rez package. The package will be created under dest_path/<NAME>/<VERSION>/. make_variant (bool): If True, makes a single variant in the rez package based on the MAJOR.MINOR version of python. ignore_dirs (bool): List of directory names to not copy from the dist. python_requirement (str): How the package should depend on python. One of: - "major": depend on python-X - "major_minor": depend on python-X.X - any other value: this string is used as the literal version range string. Returns: Install path of the new Rez package.
def convert_dist(name, dest_path, make_variant=True, ignore_dirs=None, python_requirement="major_minor"): """Convert an already installed python distribution into a rez package. Args: dest_path (str): Where to put the rez package. The package will be created under dest_path/<NAME>/<VERSION>/. make_variant (bool): If True, makes a single variant in the rez package based on the MAJOR.MINOR version of python. ignore_dirs (bool): List of directory names to not copy from the dist. python_requirement (str): How the package should depend on python. One of: - "major": depend on python-X - "major_minor": depend on python-X.X - any other value: this string is used as the literal version range string. Returns: Install path of the new Rez package. """ dist = pkg_resources.get_distribution(name) pkg_name = convert_name(dist.project_name) pkg_version = convert_version(dist.version) if python_requirement == "major": pyver = str(sys.version_info[0]) elif python_requirement == "major_minor": pyver = '.'.join(str(x) for x in sys.version_info[:2]) else: pyver = python_requirement pypkg = "python-%s" % pyver pkg_requires = [] if not make_variant: pkg_requires.append(pypkg) for req in dist.requires(): pkg_requires += convert_requirement(req) pkg_path = _mkdirs(dest_path, pkg_name, pkg_version) pkg_file = os.path.join(pkg_path, "package.py") root_path = _mkdirs(pkg_path, pypkg) if make_variant else pkg_path basename = os.path.basename(dist.location) is_egg = (os.path.splitext(basename)[1] == ".egg") if os.path.isdir(dist.location): if is_egg: # this is an egg-dir for file in os.listdir(dist.location): fpath = os.path.join(dist.location, file) if os.path.isfile(fpath): shutil.copy(fpath, root_path) else: shutil.copytree(fpath, os.path.join(root_path, file), ignore=shutil.ignore_patterns(ignore_dirs)) else: # this is a site dir egginfo_dir = "%s.egg-info" % dist.egg_name() eggpath = os.path.join(dist.location, egginfo_dir) file = os.path.join(eggpath, "installed-files.txt") if not os.path.isfile(file): raise RezSystemError( "There is not enough information on disk to convert the " "python distribution '%s' into a Rez package. The distribution " "is installed to a common site, but the installed file " "information is not present." % name) with open(file) as f: installed_files = f.read().strip().split() dirs = set() files = set() for file in installed_files: path = os.path.join(eggpath, file) path = os.path.realpath(path) if os.path.isfile(path) and path.startswith(dist.location + os.sep): dir_ = os.path.dirname(path) if ignore_dirs: reldir = os.path.relpath(dir_, dist.location) if set(reldir.split(os.sep)) & set(ignore_dirs): continue files.add(path) dirs.add(dir_) def _dst(p): dst = os.path.relpath(p, dist.location) dst = os.path.join(root_path, dst) return os.path.realpath(dst) for dir_ in dirs: dst_dir = _dst(dir_) _mkdirs(dst_dir) for file in files: dst_file = _dst(file) shutil.copy(file, dst_file) else: # this is an egg-file import zipfile assert(is_egg and os.path.isfile(dist.location)) assert(zipfile.is_zipfile(dist.location)) z = zipfile.ZipFile(dist.location) z.extractall(root_path) variants_str = "[['%s']]" % pypkg if make_variant else '' content = textwrap.dedent( """ config_version = 0 name = '%(name)s' version = '%(version)s' %(variants)s requires = %(requires)s def commands(): env.PYTHONPATH.append('{this.root}') """ % dict( name=pkg_name, version=pkg_version, variants=variants_str, requires=str(pkg_requires))) content = content.strip() + '\n' with open(pkg_file, 'w') as f: f.write(content) return pkg_path
Translate a shell-like wildcard pattern to a compiled regular expression. Return the compiled regex. If 'is_regex' true, then 'pattern' is directly compiled to a regex (if it's a string) or just returned as-is (assumes it's a regex object).
def _translate_pattern(self, pattern, anchor=True, prefix=None, is_regex=False): """Translate a shell-like wildcard pattern to a compiled regular expression. Return the compiled regex. If 'is_regex' true, then 'pattern' is directly compiled to a regex (if it's a string) or just returned as-is (assumes it's a regex object). """ if is_regex: if isinstance(pattern, str): return re.compile(pattern) else: return pattern if pattern: pattern_re = self._glob_to_re(pattern) else: pattern_re = '' base = re.escape(os.path.join(self.base, '')) if prefix is not None: # ditch end of pattern character empty_pattern = self._glob_to_re('') prefix_re = self._glob_to_re(prefix)[:-len(empty_pattern)] sep = os.sep if os.sep == '\\': sep = r'\\' pattern_re = '^' + base + sep.join((prefix_re, '.*' + pattern_re)) else: # no prefix -- respect anchor flag if anchor: pattern_re = '^' + base + pattern_re return re.compile(pattern_re)
Add other graph to this graph. @attention: Attributes and labels are not preserved. @type other: graph @param other: Graph
def add_graph(self, other): """ Add other graph to this graph. @attention: Attributes and labels are not preserved. @type other: graph @param other: Graph """ self.add_nodes( n for n in other.nodes() if not n in self.nodes() ) for each_node in other.nodes(): for each_edge in other.neighbors(each_node): if (not self.has_edge((each_node, each_edge))): self.add_edge((each_node, each_edge))
Add a spanning tree to the graph. @type st: dictionary @param st: Spanning tree.
def add_spanning_tree(self, st): """ Add a spanning tree to the graph. @type st: dictionary @param st: Spanning tree. """ self.add_nodes(list(st.keys())) for each in st: if (st[each] is not None): self.add_edge((st[each], each))
Make the graph a complete graph. @attention: This will modify the current graph.
def complete(self): """ Make the graph a complete graph. @attention: This will modify the current graph. """ for each in self.nodes(): for other in self.nodes(): if (each != other and not self.has_edge((each, other))): self.add_edge((each, other))
Return the inverse of the graph. @rtype: graph @return: Complement graph for the graph.
def inverse(self): """ Return the inverse of the graph. @rtype: graph @return: Complement graph for the graph. """ inv = self.__class__() inv.add_nodes(self.nodes()) inv.complete() for each in self.edges(): if (inv.has_edge(each)): inv.del_edge(each) return inv
Generate the reverse of a directed graph, returns an identical graph if not directed. Attributes & weights are preserved. @rtype: digraph @return: The directed graph that should be reversed.
def reverse(self): """ Generate the reverse of a directed graph, returns an identical graph if not directed. Attributes & weights are preserved. @rtype: digraph @return: The directed graph that should be reversed. """ assert self.DIRECTED, "Undirected graph types such as %s cannot be reversed" % self.__class__.__name__ N = self.__class__() #- Add the nodes N.add_nodes( n for n in self.nodes() ) #- Add the reversed edges for (u, v) in self.edges(): wt = self.edge_weight((u, v)) label = self.edge_label((u, v)) attributes = self.edge_attributes((u, v)) N.add_edge((v, u), wt, label, attributes) return N
Given a package and patch lock, return the equivalent request. For example, for object 'foo-1.2.1' and lock type 'lock_3', the equivalent request is '~foo-1.2'. This restricts updates to foo to patch-or-lower version changes only. For objects not versioned down to a given lock level, the closest possible lock is applied. So 'lock_3' applied to 'foo-1' would give '~foo-1'. Args: name (str): Package name. version (Version): Package version. patch_lock (PatchLock): Lock type to apply. Returns: `PackageRequest` object, or None if there is no equivalent request.
def get_lock_request(name, version, patch_lock, weak=True): """Given a package and patch lock, return the equivalent request. For example, for object 'foo-1.2.1' and lock type 'lock_3', the equivalent request is '~foo-1.2'. This restricts updates to foo to patch-or-lower version changes only. For objects not versioned down to a given lock level, the closest possible lock is applied. So 'lock_3' applied to 'foo-1' would give '~foo-1'. Args: name (str): Package name. version (Version): Package version. patch_lock (PatchLock): Lock type to apply. Returns: `PackageRequest` object, or None if there is no equivalent request. """ ch = '~' if weak else '' if patch_lock == PatchLock.lock: s = "%s%s==%s" % (ch, name, str(version)) return PackageRequest(s) elif (patch_lock == PatchLock.no_lock) or (not version): return None version_ = version.trim(patch_lock.rank) s = "%s%s-%s" % (ch, name, str(version_)) return PackageRequest(s)
Get packages in the request. Args: include_implicit (bool): If True, implicit packages are appended to the result. Returns: List of `PackageRequest` objects.
def requested_packages(self, include_implicit=False): """Get packages in the request. Args: include_implicit (bool): If True, implicit packages are appended to the result. Returns: List of `PackageRequest` objects. """ if include_implicit: return self._package_requests + self.implicit_packages else: return self._package_requests
Returns a `Variant` object or None if the package is not in the resolve.
def get_resolved_package(self, name): """Returns a `Variant` object or None if the package is not in the resolve. """ pkgs = [x for x in self._resolved_packages if x.name == name] return pkgs[0] if pkgs else None
Get a 'patched' request. A patched request is a copy of this context's request, but with some changes applied. This can then be used to create a new, 'patched' context. New package requests override original requests based on the type - normal, conflict or weak. So 'foo-2' overrides 'foo-1', '!foo-2' overrides '!foo-1' and '~foo-2' overrides '~foo-1', but a request such as '!foo-2' would not replace 'foo-1' - it would be added instead. Note that requests in `package_requests` can have the form '^foo'. This is another way of supplying package subtractions. Any new requests that don't override original requests are appended, in the order that they appear in `package_requests`. Args: package_requests (list of str or list of `PackageRequest`): Overriding requests. package_subtractions (list of str): Any original request with a package name in this list is removed, before the new requests are added. strict (bool): If True, the current context's resolve is used as the original request list, rather than the request. rank (int): If > 1, package versions can only increase in this rank and further - for example, rank=3 means that only version patch numbers are allowed to increase, major and minor versions will not change. This is only applied to packages that have not been explicitly overridden in `package_requests`. If rank <= 1, or `strict` is True, rank is ignored. Returns: List of `PackageRequest` objects that can be used to construct a new `ResolvedContext` object.
def get_patched_request(self, package_requests=None, package_subtractions=None, strict=False, rank=0): """Get a 'patched' request. A patched request is a copy of this context's request, but with some changes applied. This can then be used to create a new, 'patched' context. New package requests override original requests based on the type - normal, conflict or weak. So 'foo-2' overrides 'foo-1', '!foo-2' overrides '!foo-1' and '~foo-2' overrides '~foo-1', but a request such as '!foo-2' would not replace 'foo-1' - it would be added instead. Note that requests in `package_requests` can have the form '^foo'. This is another way of supplying package subtractions. Any new requests that don't override original requests are appended, in the order that they appear in `package_requests`. Args: package_requests (list of str or list of `PackageRequest`): Overriding requests. package_subtractions (list of str): Any original request with a package name in this list is removed, before the new requests are added. strict (bool): If True, the current context's resolve is used as the original request list, rather than the request. rank (int): If > 1, package versions can only increase in this rank and further - for example, rank=3 means that only version patch numbers are allowed to increase, major and minor versions will not change. This is only applied to packages that have not been explicitly overridden in `package_requests`. If rank <= 1, or `strict` is True, rank is ignored. Returns: List of `PackageRequest` objects that can be used to construct a new `ResolvedContext` object. """ # assemble source request if strict: request = [] for variant in self.resolved_packages: req = PackageRequest(variant.qualified_package_name) request.append(req) else: request = self.requested_packages()[:] # convert '^foo'-style requests to subtractions if package_requests: package_subtractions = package_subtractions or [] indexes = [] for i, req in enumerate(package_requests): name = str(req) if name.startswith('^'): package_subtractions.append(name[1:]) indexes.append(i) for i in reversed(indexes): del package_requests[i] # apply subtractions if package_subtractions: request = [x for x in request if x.name not in package_subtractions] # apply overrides if package_requests: request_dict = dict((x.name, (i, x)) for i, x in enumerate(request)) request_ = [] for req in package_requests: if isinstance(req, basestring): req = PackageRequest(req) if req.name in request_dict: i, req_ = request_dict[req.name] if (req_ is not None) and (req_.conflict == req.conflict) \ and (req_.weak == req.weak): request[i] = req del request_dict[req.name] else: request_.append(req) else: request_.append(req) request += request_ # add rank limiters if not strict and rank > 1: overrides = set(x.name for x in package_requests if not x.conflict) rank_limiters = [] for variant in self.resolved_packages: if variant.name not in overrides: if len(variant.version) >= rank: version = variant.version.trim(rank - 1) version = version.next() req = "~%s<%s" % (variant.name, str(version)) rank_limiters.append(req) request += rank_limiters return request
Get the resolve graph. Args: as_dot: If True, get the graph as a dot-language string. Otherwise, a pygraph.digraph object is returned. Returns: A string or `pygraph.digraph` object, or None if there is no graph associated with the resolve.
def graph(self, as_dot=False): """Get the resolve graph. Args: as_dot: If True, get the graph as a dot-language string. Otherwise, a pygraph.digraph object is returned. Returns: A string or `pygraph.digraph` object, or None if there is no graph associated with the resolve. """ if not self.has_graph: return None if not as_dot: if self.graph_ is None: # reads either dot format or our compact format self.graph_ = read_graph_from_string(self.graph_string) return self.graph_ if self.graph_string: if self.graph_string.startswith('{'): # compact format self.graph_ = read_graph_from_string(self.graph_string) else: # already in dot format. Note that this will only happen in # old rez contexts where the graph is not stored in the newer # compact format. return self.graph_string return write_dot(self.graph_)
Save the context to a buffer.
def write_to_buffer(self, buf): """Save the context to a buffer.""" doc = self.to_dict() if config.rxt_as_yaml: content = dump_yaml(doc) else: content = json.dumps(doc, indent=4, separators=(",", ": ")) buf.write(content)
Get the context for the current env, if there is one. Returns: `ResolvedContext`: Current context, or None if not in a resolved env.
def get_current(cls): """Get the context for the current env, if there is one. Returns: `ResolvedContext`: Current context, or None if not in a resolved env. """ filepath = os.getenv("REZ_RXT_FILE") if not filepath or not os.path.exists(filepath): return None return cls.load(filepath)
Load a resolved context from file.
def load(cls, path): """Load a resolved context from file.""" with open(path) as f: context = cls.read_from_buffer(f, path) context.set_load_path(path) return context
Load the context from a buffer.
def read_from_buffer(cls, buf, identifier_str=None): """Load the context from a buffer.""" try: return cls._read_from_buffer(buf, identifier_str) except Exception as e: cls._load_error(e, identifier_str)
Get the difference between the resolve in this context and another. The difference is described from the point of view of the current context - a newer package means that the package in `other` is newer than the package in `self`. Diffs can only be compared if their package search paths match, an error is raised otherwise. The diff is expressed in packages, not variants - the specific variant of a package is ignored. Returns: A dict containing: - 'newer_packages': A dict containing items: - package name (str); - List of `Package` objects. These are the packages up to and including the newer package in `self`, in ascending order. - 'older_packages': A dict containing: - package name (str); - List of `Package` objects. These are the packages down to and including the older package in `self`, in descending order. - 'added_packages': Set of `Package` objects present in `self` but not in `other`; - 'removed_packages': Set of `Package` objects present in `other`, but not in `self`. If any item ('added_packages' etc) is empty, it is not added to the resulting dict. Thus, an empty dict is returned if there is no difference between contexts.
def get_resolve_diff(self, other): """Get the difference between the resolve in this context and another. The difference is described from the point of view of the current context - a newer package means that the package in `other` is newer than the package in `self`. Diffs can only be compared if their package search paths match, an error is raised otherwise. The diff is expressed in packages, not variants - the specific variant of a package is ignored. Returns: A dict containing: - 'newer_packages': A dict containing items: - package name (str); - List of `Package` objects. These are the packages up to and including the newer package in `self`, in ascending order. - 'older_packages': A dict containing: - package name (str); - List of `Package` objects. These are the packages down to and including the older package in `self`, in descending order. - 'added_packages': Set of `Package` objects present in `self` but not in `other`; - 'removed_packages': Set of `Package` objects present in `other`, but not in `self`. If any item ('added_packages' etc) is empty, it is not added to the resulting dict. Thus, an empty dict is returned if there is no difference between contexts. """ if self.package_paths != other.package_paths: from difflib import ndiff diff = ndiff(self.package_paths, other.package_paths) raise ResolvedContextError("Cannot diff resolves, package search " "paths differ:\n%s" % '\n'.join(diff)) d = {} self_pkgs_ = set(x.parent for x in self._resolved_packages) other_pkgs_ = set(x.parent for x in other._resolved_packages) self_pkgs = self_pkgs_ - other_pkgs_ other_pkgs = other_pkgs_ - self_pkgs_ if not (self_pkgs or other_pkgs): return d self_fams = dict((x.name, x) for x in self_pkgs) other_fams = dict((x.name, x) for x in other_pkgs) newer_packages = {} older_packages = {} added_packages = set() removed_packages = set() for pkg in self_pkgs: if pkg.name not in other_fams: removed_packages.add(pkg) else: other_pkg = other_fams[pkg.name] if other_pkg.version > pkg.version: r = VersionRange.as_span(lower_version=pkg.version, upper_version=other_pkg.version) it = iter_packages(pkg.name, range_=r) pkgs = sorted(it, key=lambda x: x.version) newer_packages[pkg.name] = pkgs elif other_pkg.version < pkg.version: r = VersionRange.as_span(lower_version=other_pkg.version, upper_version=pkg.version) it = iter_packages(pkg.name, range_=r) pkgs = sorted(it, key=lambda x: x.version, reverse=True) older_packages[pkg.name] = pkgs for pkg in other_pkgs: if pkg.name not in self_fams: added_packages.add(pkg) if newer_packages: d["newer_packages"] = newer_packages if older_packages: d["older_packages"] = older_packages if added_packages: d["added_packages"] = added_packages if removed_packages: d["removed_packages"] = removed_packages return d
Prints a message summarising the contents of the resolved context. Args: buf (file-like object): Where to print this info to. verbosity (bool): Verbose mode. source_order (bool): If True, print resolved packages in the order they are sourced, rather than alphabetical order. show_resolved_uris (bool): By default, resolved packages have their 'root' property listed, or their 'uri' if 'root' is None. Use this option to list 'uri' regardless.
def print_info(self, buf=sys.stdout, verbosity=0, source_order=False, show_resolved_uris=False): """Prints a message summarising the contents of the resolved context. Args: buf (file-like object): Where to print this info to. verbosity (bool): Verbose mode. source_order (bool): If True, print resolved packages in the order they are sourced, rather than alphabetical order. show_resolved_uris (bool): By default, resolved packages have their 'root' property listed, or their 'uri' if 'root' is None. Use this option to list 'uri' regardless. """ _pr = Printer(buf) def _rt(t): if verbosity: s = time.strftime("%a %b %d %H:%M:%S %Z %Y", time.localtime(t)) return s + " (%d)" % int(t) else: return time.strftime("%a %b %d %H:%M:%S %Y", time.localtime(t)) if self.status_ in (ResolverStatus.failed, ResolverStatus.aborted): _pr("The context failed to resolve:\n%s" % self.failure_description, critical) return t_str = _rt(self.created) _pr("resolved by %s@%s, on %s, using Rez v%s" % (self.user, self.host, t_str, self.rez_version)) if self.requested_timestamp: t_str = _rt(self.requested_timestamp) _pr("packages released after %s were ignored" % t_str) _pr() if verbosity: _pr("search paths:", heading) rows = [] colors = [] for path in self.package_paths: if package_repository_manager.are_same(path, config.local_packages_path): label = "(local)" col = local else: label = "" col = None rows.append((path, label)) colors.append(col) for col, line in zip(colors, columnise(rows)): _pr(line, col) _pr() if self.package_filter: data = self.package_filter.to_pod() txt = dump_yaml(data) _pr("package filters:", heading) _pr(txt) _pr() _pr("requested packages:", heading) rows = [] colors = [] for request in self._package_requests: rows.append((str(request), "")) colors.append(None) for request in self.implicit_packages: rows.append((str(request), "(implicit)")) colors.append(implicit) for col, line in zip(colors, columnise(rows)): _pr(line, col) _pr() _pr("resolved packages:", heading) rows = [] colors = [] resolved_packages = self.resolved_packages or [] if not source_order: resolved_packages = sorted(resolved_packages, key=lambda x: x.name) for pkg in resolved_packages: t = [] col = None location = None # print root/uri if show_resolved_uris or not pkg.root: location = pkg.uri else: location = pkg.root if not os.path.exists(pkg.root): t.append('NOT FOUND') col = critical if pkg.is_local: t.append('local') col = local t = '(%s)' % ', '.join(t) if t else '' rows.append((pkg.qualified_package_name, location, t)) colors.append(col) for col, line in zip(colors, columnise(rows)): _pr(line, col) if verbosity: _pr() actual_solve_time = self.solve_time - self.load_time _pr("resolve details:", heading) _pr("load time: %.02f secs" % self.load_time) _pr("solve time: %.02f secs" % actual_solve_time) _pr("packages queried: %d" % self.num_loaded_packages) _pr("from cache: %s" % self.from_cache) if self.load_path: _pr("rxt file: %s" % self.load_path) if verbosity >= 2: _pr() _pr("tools:", heading) self.print_tools(buf=buf)
Print the difference between the resolve of two contexts. Args: other (`ResolvedContext`): Context to compare to. heading: One of: - None: Do not display a heading; - True: Display the filename of each context as a heading, if both contexts have a filepath; - 2-tuple: Use the given two strings as headings - the first is the heading for `self`, the second for `other`.
def print_resolve_diff(self, other, heading=None): """Print the difference between the resolve of two contexts. Args: other (`ResolvedContext`): Context to compare to. heading: One of: - None: Do not display a heading; - True: Display the filename of each context as a heading, if both contexts have a filepath; - 2-tuple: Use the given two strings as headings - the first is the heading for `self`, the second for `other`. """ d = self.get_resolve_diff(other) if not d: return rows = [] if heading is True and self.load_path and other.load_path: a = os.path.basename(self.load_path) b = os.path.basename(other.load_path) heading = (a, b) if isinstance(heading, tuple): rows.append(list(heading) + [""]) rows.append(('-' * len(heading[0]), '-' * len(heading[1]), "")) newer_packages = d.get("newer_packages", {}) older_packages = d.get("older_packages", {}) added_packages = d.get("added_packages", set()) removed_packages = d.get("removed_packages", set()) if newer_packages: for name, pkgs in newer_packages.iteritems(): this_pkg = pkgs[0] other_pkg = pkgs[-1] diff_str = "(+%d versions)" % (len(pkgs) - 1) rows.append((this_pkg.qualified_name, other_pkg.qualified_name, diff_str)) if older_packages: for name, pkgs in older_packages.iteritems(): this_pkg = pkgs[0] other_pkg = pkgs[-1] diff_str = "(-%d versions)" % (len(pkgs) - 1) rows.append((this_pkg.qualified_name, other_pkg.qualified_name, diff_str)) if added_packages: for pkg in sorted(added_packages, key=lambda x: x.name): rows.append(("-", pkg.qualified_name, "")) if removed_packages: for pkg in sorted(removed_packages, key=lambda x: x.name): rows.append((pkg.qualified_name, "-", "")) print '\n'.join(columnise(rows))
Generate the dependency graph. The dependency graph is a simpler subset of the resolve graph. It contains package name nodes connected directly to their dependencies. Weak references and conflict requests are not included in the graph. The dependency graph does not show conflicts. Returns: `pygraph.digraph` object.
def get_dependency_graph(self): """Generate the dependency graph. The dependency graph is a simpler subset of the resolve graph. It contains package name nodes connected directly to their dependencies. Weak references and conflict requests are not included in the graph. The dependency graph does not show conflicts. Returns: `pygraph.digraph` object. """ from rez.vendor.pygraph.classes.digraph import digraph nodes = {} edges = set() for variant in self._resolved_packages: nodes[variant.name] = variant.qualified_package_name for request in variant.get_requires(): if not request.conflict: edges.add((variant.name, request.name)) g = digraph() node_color = "#AAFFAA" node_fontsize = 10 attrs = [("fontsize", node_fontsize), ("fillcolor", node_color), ("style", "filled")] for name, qname in nodes.iteritems(): g.add_node(name, attrs=attrs + [("label", qname)]) for edge in edges: g.add_edge(edge) return g
Validate the context.
def validate(self): """Validate the context.""" try: for pkg in self.resolved_packages: pkg.validate_data() except RezError as e: raise ResolvedContextError("%s: %s" % (e.__class__.__name__, str(e)))
Get the environ dict resulting from interpreting this context. @param parent_environ Environment to interpret the context within, defaults to os.environ if None. @returns The environment dict generated by this context, when interpreted in a python rex interpreter.
def get_environ(self, parent_environ=None): """Get the environ dict resulting from interpreting this context. @param parent_environ Environment to interpret the context within, defaults to os.environ if None. @returns The environment dict generated by this context, when interpreted in a python rex interpreter. """ interp = Python(target_environ={}, passive=True) executor = self._create_executor(interp, parent_environ) self._execute(executor) return executor.get_output()
Get a data key value for each resolved package. Args: key (str): String key of property, eg 'tools'. request_only (bool): If True, only return the key from resolved packages that were also present in the request. Returns: Dict of {pkg-name: (variant, value)}.
def get_key(self, key, request_only=False): """Get a data key value for each resolved package. Args: key (str): String key of property, eg 'tools'. request_only (bool): If True, only return the key from resolved packages that were also present in the request. Returns: Dict of {pkg-name: (variant, value)}. """ values = {} requested_names = [x.name for x in self._package_requests if not x.conflict] for pkg in self.resolved_packages: if (not request_only) or (pkg.name in requested_names): value = getattr(pkg, key) if value is not None: values[pkg.name] = (pkg, value) return values
Get the variant(s) that provide the named tool. If there are more than one variants, the tool is in conflict, and Rez does not know which variant's tool is actually exposed. Args: tool_name(str): Name of the tool to search for. Returns: Set of `Variant` objects. If no variant provides the tool, an empty set is returned.
def get_tool_variants(self, tool_name): """Get the variant(s) that provide the named tool. If there are more than one variants, the tool is in conflict, and Rez does not know which variant's tool is actually exposed. Args: tool_name(str): Name of the tool to search for. Returns: Set of `Variant` objects. If no variant provides the tool, an empty set is returned. """ variants = set() tools_dict = self.get_tools(request_only=False) for variant, tools in tools_dict.itervalues(): if tool_name in tools: variants.add(variant) return variants
Returns tools of the same name provided by more than one package. Args: request_only: If True, only return the key from resolved packages that were also present in the request. Returns: Dict of {tool-name: set([Variant])}.
def get_conflicting_tools(self, request_only=False): """Returns tools of the same name provided by more than one package. Args: request_only: If True, only return the key from resolved packages that were also present in the request. Returns: Dict of {tool-name: set([Variant])}. """ from collections import defaultdict tool_sets = defaultdict(set) tools_dict = self.get_tools(request_only=request_only) for variant, tools in tools_dict.itervalues(): for tool in tools: tool_sets[tool].add(variant) conflicts = dict((k, v) for k, v in tool_sets.iteritems() if len(v) > 1) return conflicts
Get the list of rex.Action objects resulting from interpreting this context. This is provided mainly for testing purposes. Args: parent_environ Environment to interpret the context within, defaults to os.environ if None. Returns: A list of rex.Action subclass instances.
def get_actions(self, parent_environ=None): """Get the list of rex.Action objects resulting from interpreting this context. This is provided mainly for testing purposes. Args: parent_environ Environment to interpret the context within, defaults to os.environ if None. Returns: A list of rex.Action subclass instances. """ interp = Python(target_environ={}, passive=True) executor = self._create_executor(interp, parent_environ) self._execute(executor) return executor.actions
Get the shell code resulting from intepreting this context. Args: shell (str): Shell type, for eg 'bash'. If None, the current shell type is used. parent_environ (dict): Environment to interpret the context within, defaults to os.environ if None. style (): Style to format shell code in.
def get_shell_code(self, shell=None, parent_environ=None, style=OutputStyle.file): """Get the shell code resulting from intepreting this context. Args: shell (str): Shell type, for eg 'bash'. If None, the current shell type is used. parent_environ (dict): Environment to interpret the context within, defaults to os.environ if None. style (): Style to format shell code in. """ executor = self._create_executor(interpreter=create_shell(shell), parent_environ=parent_environ) if self.load_path and os.path.isfile(self.load_path): executor.env.REZ_RXT_FILE = self.load_path self._execute(executor) return executor.get_output(style)
Apply the context to the current python session. Note that this updates os.environ and possibly sys.path, if `parent_environ` is not provided. Args: parent_environ: Environment to interpret the context within, defaults to os.environ if None.
def apply(self, parent_environ=None): """Apply the context to the current python session. Note that this updates os.environ and possibly sys.path, if `parent_environ` is not provided. Args: parent_environ: Environment to interpret the context within, defaults to os.environ if None. """ interpreter = Python(target_environ=os.environ) executor = self._create_executor(interpreter, parent_environ) self._execute(executor) interpreter.apply_environ()
Find a program in the resolved environment. Args: cmd: String name of the program to find. parent_environ: Environment to interpret the context within, defaults to os.environ if None. fallback: If True, and the program is not found in the context, the current environment will then be searched. Returns: Path to the program, or None if the program was not found.
def which(self, cmd, parent_environ=None, fallback=False): """Find a program in the resolved environment. Args: cmd: String name of the program to find. parent_environ: Environment to interpret the context within, defaults to os.environ if None. fallback: If True, and the program is not found in the context, the current environment will then be searched. Returns: Path to the program, or None if the program was not found. """ env = self.get_environ(parent_environ=parent_environ) path = which(cmd, env=env) if fallback and path is None: path = which(cmd) return path
Run a command within a resolved context. This applies the context to a python environ dict, then runs a subprocess in that namespace. This is not a fully configured subshell - shell-specific commands such as aliases will not be applied. To execute a command within a subshell instead, use execute_shell(). Warning: This runs a command in a configured environ dict only, not in a true shell. To do that, call `execute_shell` using the `command` keyword argument. Args: args: Command arguments, can be a string. parent_environ: Environment to interpret the context within, defaults to os.environ if None. subprocess_kwargs: Args to pass to subprocess.Popen. Returns: A subprocess.Popen object. Note: This does not alter the current python session.
def execute_command(self, args, parent_environ=None, **subprocess_kwargs): """Run a command within a resolved context. This applies the context to a python environ dict, then runs a subprocess in that namespace. This is not a fully configured subshell - shell-specific commands such as aliases will not be applied. To execute a command within a subshell instead, use execute_shell(). Warning: This runs a command in a configured environ dict only, not in a true shell. To do that, call `execute_shell` using the `command` keyword argument. Args: args: Command arguments, can be a string. parent_environ: Environment to interpret the context within, defaults to os.environ if None. subprocess_kwargs: Args to pass to subprocess.Popen. Returns: A subprocess.Popen object. Note: This does not alter the current python session. """ if parent_environ in (None, os.environ): target_environ = {} else: target_environ = parent_environ.copy() interpreter = Python(target_environ=target_environ) executor = self._create_executor(interpreter, parent_environ) self._execute(executor) return interpreter.subprocess(args, **subprocess_kwargs)
Run some rex code in the context. Note: This is just a convenience form of `execute_shell`. Args: code (str): Rex code to execute. filename (str): Filename to report if there are syntax errors. shell: Shell type, for eg 'bash'. If None, the current shell type is used. parent_environ: Environment to run the shell process in, if None then the current environment is used. Popen_args: args to pass to the shell process object constructor. Returns: `subprocess.Popen` object for the shell process.
def execute_rex_code(self, code, filename=None, shell=None, parent_environ=None, **Popen_args): """Run some rex code in the context. Note: This is just a convenience form of `execute_shell`. Args: code (str): Rex code to execute. filename (str): Filename to report if there are syntax errors. shell: Shell type, for eg 'bash'. If None, the current shell type is used. parent_environ: Environment to run the shell process in, if None then the current environment is used. Popen_args: args to pass to the shell process object constructor. Returns: `subprocess.Popen` object for the shell process. """ def _actions_callback(executor): executor.execute_code(code, filename=filename) return self.execute_shell(shell=shell, parent_environ=parent_environ, command='', # don't run any command block=False, actions_callback=_actions_callback, **Popen_args)
Spawn a possibly-interactive shell. Args: shell: Shell type, for eg 'bash'. If None, the current shell type is used. parent_environ: Environment to run the shell process in, if None then the current environment is used. rcfile: Specify a file to source instead of shell startup files. norc: If True, skip shell startup files, if possible. stdin: If True, read commands from stdin, in a non-interactive shell. command: If not None, execute this command in a non-interactive shell. If an empty string or list, don't run a command, but don't open an interactive shell either. Can be a list of args. quiet: If True, skip the welcome message in interactive shells. block: If True, block until the shell is terminated. If False, return immediately. If None, will default to blocking if the shell is interactive. actions_callback: Callback with signature (RexExecutor). This lets the user append custom actions to the context, such as setting extra environment variables. Callback is run prior to context Rex execution. post_actions_callback: Callback with signature (RexExecutor). This lets the user append custom actions to the context, such as setting extra environment variables. Callback is run after context Rex execution. context_filepath: If provided, the context file will be written here, rather than to the default location (which is in a tempdir). If you use this arg, you are responsible for cleaning up the file. start_new_session: If True, change the process group of the target process. Note that this may override the Popen_args keyword 'preexec_fn'. detached: If True, open a separate terminal. Note that this may override the `pre_command` argument. pre_command: Command to inject before the shell command itself. This is for internal use. Popen_args: args to pass to the shell process object constructor. Returns: If blocking: A 3-tuple of (returncode, stdout, stderr); If non-blocking - A subprocess.Popen object for the shell process.
def execute_shell(self, shell=None, parent_environ=None, rcfile=None, norc=False, stdin=False, command=None, quiet=False, block=None, actions_callback=None, post_actions_callback=None, context_filepath=None, start_new_session=False, detached=False, pre_command=None, **Popen_args): """Spawn a possibly-interactive shell. Args: shell: Shell type, for eg 'bash'. If None, the current shell type is used. parent_environ: Environment to run the shell process in, if None then the current environment is used. rcfile: Specify a file to source instead of shell startup files. norc: If True, skip shell startup files, if possible. stdin: If True, read commands from stdin, in a non-interactive shell. command: If not None, execute this command in a non-interactive shell. If an empty string or list, don't run a command, but don't open an interactive shell either. Can be a list of args. quiet: If True, skip the welcome message in interactive shells. block: If True, block until the shell is terminated. If False, return immediately. If None, will default to blocking if the shell is interactive. actions_callback: Callback with signature (RexExecutor). This lets the user append custom actions to the context, such as setting extra environment variables. Callback is run prior to context Rex execution. post_actions_callback: Callback with signature (RexExecutor). This lets the user append custom actions to the context, such as setting extra environment variables. Callback is run after context Rex execution. context_filepath: If provided, the context file will be written here, rather than to the default location (which is in a tempdir). If you use this arg, you are responsible for cleaning up the file. start_new_session: If True, change the process group of the target process. Note that this may override the Popen_args keyword 'preexec_fn'. detached: If True, open a separate terminal. Note that this may override the `pre_command` argument. pre_command: Command to inject before the shell command itself. This is for internal use. Popen_args: args to pass to the shell process object constructor. Returns: If blocking: A 3-tuple of (returncode, stdout, stderr); If non-blocking - A subprocess.Popen object for the shell process. """ sh = create_shell(shell) if hasattr(command, "__iter__"): command = sh.join(command) # start a new session if specified if start_new_session: Popen_args.update(config.new_session_popen_args) # open a separate terminal if specified if detached: term_cmd = config.terminal_emulator_command if term_cmd: pre_command = term_cmd.strip().split() # block if the shell is likely to be interactive if block is None: block = not (command or stdin) # context and rxt files. If running detached, don't cleanup files, because # rez-env returns too early and deletes the tmp files before the detached # process can use them tmpdir = self.tmpdir_manager.mkdtemp(cleanup=not detached) if self.load_path and os.path.isfile(self.load_path): rxt_file = self.load_path else: rxt_file = os.path.join(tmpdir, "context.rxt") self.save(rxt_file) context_file = context_filepath or \ os.path.join(tmpdir, "context.%s" % sh.file_extension()) # interpret this context and write out the native context file executor = self._create_executor(sh, parent_environ) executor.env.REZ_RXT_FILE = rxt_file executor.env.REZ_CONTEXT_FILE = context_file if actions_callback: actions_callback(executor) self._execute(executor) if post_actions_callback: post_actions_callback(executor) context_code = executor.get_output() with open(context_file, 'w') as f: f.write(context_code) quiet = quiet or \ (RezToolsVisibility[config.rez_tools_visibility] == RezToolsVisibility.never) # spawn the shell subprocess p = sh.spawn_shell(context_file, tmpdir, rcfile=rcfile, norc=norc, stdin=stdin, command=command, env=parent_environ, quiet=quiet, pre_command=pre_command, **Popen_args) if block: stdout, stderr = p.communicate() return p.returncode, stdout, stderr else: return p
Convert context to dict containing only builtin types. Args: fields (list of str): If present, only write these fields into the dict. This can be used to avoid constructing expensive fields (such as 'graph') for some cases. Returns: dict: Dictified context.
def to_dict(self, fields=None): """Convert context to dict containing only builtin types. Args: fields (list of str): If present, only write these fields into the dict. This can be used to avoid constructing expensive fields (such as 'graph') for some cases. Returns: dict: Dictified context. """ data = {} def _add(field): return (fields is None or field in fields) if _add("resolved_packages"): resolved_packages = [] for pkg in (self._resolved_packages or []): resolved_packages.append(pkg.handle.to_dict()) data["resolved_packages"] = resolved_packages if _add("serialize_version"): data["serialize_version"] = \ '.'.join(map(str, ResolvedContext.serialize_version)) if _add("patch_locks"): data["patch_locks"] = dict((k, v.name) for k, v in self.patch_locks) if _add("package_orderers"): package_orderers = [package_order.to_pod(x) for x in (self.package_orderers or [])] data["package_orderers"] = package_orderers or None if _add("package_filter"): data["package_filter"] = self.package_filter.to_pod() if _add("graph"): if self.graph_string and self.graph_string.startswith('{'): graph_str = self.graph_string # already in compact format else: g = self.graph() graph_str = write_compacted(g) data["graph"] = graph_str data.update(dict( timestamp=self.timestamp, requested_timestamp=self.requested_timestamp, building=self.building, caching=self.caching, implicit_packages=map(str, self.implicit_packages), package_requests=map(str, self._package_requests), package_paths=self.package_paths, default_patch_lock=self.default_patch_lock.name, rez_version=self.rez_version, rez_path=self.rez_path, user=self.user, host=self.host, platform=self.platform, arch=self.arch, os=self.os, created=self.created, parent_suite_path=self.parent_suite_path, suite_context_name=self.suite_context_name, status=self.status_.name, failure_description=self.failure_description, from_cache=self.from_cache, solve_time=self.solve_time, load_time=self.load_time, num_loaded_packages=self.num_loaded_packages )) if fields: data = dict((k, v) for k, v in data.iteritems() if k in fields) return data
Load a `ResolvedContext` from a dict. Args: d (dict): Dict containing context data. identifier_str (str): String identifying the context, this is only used to display in an error string if a serialization version mismatch is detected. Returns: `ResolvedContext` object.
def from_dict(cls, d, identifier_str=None): """Load a `ResolvedContext` from a dict. Args: d (dict): Dict containing context data. identifier_str (str): String identifying the context, this is only used to display in an error string if a serialization version mismatch is detected. Returns: `ResolvedContext` object. """ # check serialization version def _print_version(value): return '.'.join(str(x) for x in value) toks = str(d["serialize_version"]).split('.') load_ver = tuple(int(x) for x in toks) curr_ver = ResolvedContext.serialize_version if load_ver[0] > curr_ver[0]: msg = ["The context"] if identifier_str: msg.append("in %s" % identifier_str) msg.append("was written by a newer version of Rez. The load may " "fail (serialize version %d > %d)" % (_print_version(load_ver), _print_version(curr_ver))) print >> sys.stderr, ' '.join(msg) # create and init the context r = ResolvedContext.__new__(ResolvedContext) r.load_path = None r.pre_resolve_bindings = None r.timestamp = d["timestamp"] r.building = d["building"] r.caching = d["caching"] r.implicit_packages = [PackageRequest(x) for x in d["implicit_packages"]] r._package_requests = [PackageRequest(x) for x in d["package_requests"]] r.package_paths = d["package_paths"] r.rez_version = d["rez_version"] r.rez_path = d["rez_path"] r.user = d["user"] r.host = d["host"] r.platform = d["platform"] r.arch = d["arch"] r.os = d["os"] r.created = d["created"] r.verbosity = d.get("verbosity", 0) r.status_ = ResolverStatus[d["status"]] r.failure_description = d["failure_description"] r.solve_time = d["solve_time"] r.load_time = d["load_time"] r.graph_string = d["graph"] r.graph_ = None r._resolved_packages = [] for d_ in d["resolved_packages"]: variant_handle = d_ if load_ver < (4, 0): # -- SINCE SERIALIZE VERSION 4.0 from rez.utils.backcompat import convert_old_variant_handle variant_handle = convert_old_variant_handle(variant_handle) variant = get_variant(variant_handle) variant.set_context(r) r._resolved_packages.append(variant) # -- SINCE SERIALIZE VERSION 1 r.requested_timestamp = d.get("requested_timestamp", 0) # -- SINCE SERIALIZE VERSION 2 r.parent_suite_path = d.get("parent_suite_path") r.suite_context_name = d.get("suite_context_name") # -- SINCE SERIALIZE VERSION 3 r.default_patch_lock = PatchLock[d.get("default_patch_lock", "no_lock")] patch_locks = d.get("patch_locks", {}) r.patch_locks = dict((k, PatchLock[v]) for k, v in patch_locks) # -- SINCE SERIALIZE VERSION 4.0 r.from_cache = d.get("from_cache", False) # -- SINCE SERIALIZE VERSION 4.1 data = d.get("package_filter", []) r.package_filter = PackageFilterList.from_pod(data) # -- SINCE SERIALIZE VERSION 4.2 data = d.get("package_orderers") if data: r.package_orderers = [package_order.from_pod(x) for x in data] else: r.package_orderers = None # -- SINCE SERIALIZE VERSION 4.3 r.num_loaded_packages = d.get("num_loaded_packages", -1) # track context usage if config.context_tracking_host: data = dict((k, v) for k, v in d.iteritems() if k in config.context_tracking_context_fields) r._track_context(data, action="sourced") return r
Build a dictionary mapping each pair of nodes to a number (the distance between them). @type graph: graph @param graph: Graph.
def optimize(self, graph): """ Build a dictionary mapping each pair of nodes to a number (the distance between them). @type graph: graph @param graph: Graph. """ for start in graph.nodes(): for end in graph.nodes(): for each in graph.node_attributes(start): if (each[0] == 'position'): start_attr = each[1] break for each in graph.node_attributes(end): if (each[0] == 'position'): end_attr = each[1] break dist = 0 for i in range(len(start_attr)): dist = dist + (float(start_attr[i]) - float(end_attr[i]))**2 self.distances[(start,end)] = dist
Add to sys.path, and revert on scope exit.
def add_sys_paths(paths): """Add to sys.path, and revert on scope exit. """ original_syspath = sys.path[:] sys.path.extend(paths) try: yield finally: sys.path = original_syspath
Wrapper for `subprocess.Popen`. Avoids python bug described here: https://bugs.python.org/issue3905. This can arise when apps (maya) install a non-standard stdin handler. In newer version of maya and katana, the sys.stdin object can also become replaced by an object with no 'fileno' attribute, this is also taken into account.
def popen(args, **kwargs): """Wrapper for `subprocess.Popen`. Avoids python bug described here: https://bugs.python.org/issue3905. This can arise when apps (maya) install a non-standard stdin handler. In newer version of maya and katana, the sys.stdin object can also become replaced by an object with no 'fileno' attribute, this is also taken into account. """ if "stdin" not in kwargs: try: file_no = sys.stdin.fileno() except AttributeError: file_no = sys.__stdin__.fileno() if file_no not in (0, 1, 2): kwargs["stdin"] = subprocess.PIPE return subprocess.Popen(args, **kwargs)
Yield .dist-info and/or .egg(-info) distributions.
def _yield_distributions(self): """ Yield .dist-info and/or .egg(-info) distributions. """ # We need to check if we've seen some resources already, because on # some Linux systems (e.g. some Debian/Ubuntu variants) there are # symlinks which alias other files in the environment. seen = set() for path in self.path: finder = resources.finder_for_path(path) if finder is None: continue r = finder.find('') if not r or not r.is_container: continue rset = sorted(r.resources) for entry in rset: r = finder.find(entry) if not r or r.path in seen: continue if self._include_dist and entry.endswith(DISTINFO_EXT): possible_filenames = [METADATA_FILENAME, WHEEL_METADATA_FILENAME] for metadata_filename in possible_filenames: metadata_path = posixpath.join(entry, metadata_filename) pydist = finder.find(metadata_path) if pydist: break else: continue with contextlib.closing(pydist.as_stream()) as stream: metadata = Metadata(fileobj=stream, scheme='legacy') logger.debug('Found %s', r.path) seen.add(r.path) yield new_dist_class(r.path, metadata=metadata, env=self) elif self._include_egg and entry.endswith(('.egg-info', '.egg')): logger.debug('Found %s', r.path) seen.add(r.path) yield old_dist_class(r.path, self)
Return the number of commits we are relative to the remote. Negative is behind, positive in front, zero means we are matched to remote.
def get_relative_to_remote(self): """Return the number of commits we are relative to the remote. Negative is behind, positive in front, zero means we are matched to remote. """ s = self.git("status", "--short", "-b")[0] r = re.compile("\[([^\]]+)\]") toks = r.findall(s) if toks: try: s2 = toks[-1] adj, n = s2.split() assert(adj in ("ahead", "behind")) n = int(n) return -n if adj == "behind" else n except Exception as e: raise ReleaseVCSError( ("Problem parsing first line of result of 'git status " "--short -b' (%s):\n%s") % (s, str(e))) else: return 0
Returns (remote, branch) tuple, or None,None if there is no remote.
def get_tracking_branch(self): """Returns (remote, branch) tuple, or None,None if there is no remote. """ try: remote_uri = self.git("rev-parse", "--abbrev-ref", "--symbolic-full-name", "@{u}")[0] return remote_uri.split('/', 1) except Exception as e: # capitalization of message changed sometime between git 1.8.3 # and 2.12 - used to be "No upstream", now "no upstream".. errmsg = str(e).lower() if ("no upstream branch" not in errmsg and "no upstream configured" not in errmsg): raise e return (None, None)
Return all nodes connected by the given hyperedge or all hyperedges connected to the given hypernode. @type obj: hyperedge @param obj: Object identifier. @rtype: list @return: List of node objects linked to the given hyperedge.
def links(self, obj): """ Return all nodes connected by the given hyperedge or all hyperedges connected to the given hypernode. @type obj: hyperedge @param obj: Object identifier. @rtype: list @return: List of node objects linked to the given hyperedge. """ if obj in self.edge_links: return self.edge_links[obj] else: return self.node_links[obj]
Return all neighbors adjacent to the given node. @type obj: node @param obj: Object identifier. @rtype: list @return: List of all node objects adjacent to the given node.
def neighbors(self, obj): """ Return all neighbors adjacent to the given node. @type obj: node @param obj: Object identifier. @rtype: list @return: List of all node objects adjacent to the given node. """ neighbors = set([]) for e in self.node_links[obj]: neighbors.update(set(self.edge_links[e])) return list(neighbors - set([obj]))
Add given node to the hypergraph. @attention: While nodes can be of any type, it's strongly recommended to use only numbers and single-line strings as node identifiers if you intend to use write(). @type node: node @param node: Node identifier.
def add_node(self, node): """ Add given node to the hypergraph. @attention: While nodes can be of any type, it's strongly recommended to use only numbers and single-line strings as node identifiers if you intend to use write(). @type node: node @param node: Node identifier. """ if (not node in self.node_links): self.node_links[node] = [] self.node_attr[node] = [] self.graph.add_node((node,'n')) else: raise AdditionError("Node %s already in graph" % node)
Delete a given node from the hypergraph. @type node: node @param node: Node identifier.
def del_node(self, node): """ Delete a given node from the hypergraph. @type node: node @param node: Node identifier. """ if self.has_node(node): for e in self.node_links[node]: self.edge_links[e].remove(node) self.node_links.pop(node) self.graph.del_node((node,'n'))
Add given hyperedge to the hypergraph. @attention: While hyperedge-nodes can be of any type, it's strongly recommended to use only numbers and single-line strings as node identifiers if you intend to use write(). @type hyperedge: hyperedge @param hyperedge: Hyperedge identifier.
def add_hyperedge(self, hyperedge): """ Add given hyperedge to the hypergraph. @attention: While hyperedge-nodes can be of any type, it's strongly recommended to use only numbers and single-line strings as node identifiers if you intend to use write(). @type hyperedge: hyperedge @param hyperedge: Hyperedge identifier. """ if (not hyperedge in self.edge_links): self.edge_links[hyperedge] = [] self.graph.add_node((hyperedge,'h'))
Delete the given hyperedge. @type hyperedge: hyperedge @param hyperedge: Hyperedge identifier.
def del_hyperedge(self, hyperedge): """ Delete the given hyperedge. @type hyperedge: hyperedge @param hyperedge: Hyperedge identifier. """ if (hyperedge in self.hyperedges()): for n in self.edge_links[hyperedge]: self.node_links[n].remove(hyperedge) del(self.edge_links[hyperedge]) self.del_edge_labeling(hyperedge) self.graph.del_node((hyperedge,'h'))
Link given node and hyperedge. @type node: node @param node: Node. @type hyperedge: node @param hyperedge: Hyperedge.
def link(self, node, hyperedge): """ Link given node and hyperedge. @type node: node @param node: Node. @type hyperedge: node @param hyperedge: Hyperedge. """ if (hyperedge not in self.node_links[node]): self.edge_links[hyperedge].append(node) self.node_links[node].append(hyperedge) self.graph.add_edge(((node,'n'), (hyperedge,'h'))) else: raise AdditionError("Link (%s, %s) already in graph" % (node, hyperedge))
Unlink given node and hyperedge. @type node: node @param node: Node. @type hyperedge: hyperedge @param hyperedge: Hyperedge.
def unlink(self, node, hyperedge): """ Unlink given node and hyperedge. @type node: node @param node: Node. @type hyperedge: hyperedge @param hyperedge: Hyperedge. """ self.node_links[node].remove(hyperedge) self.edge_links[hyperedge].remove(node) self.graph.del_edge(((node,'n'), (hyperedge,'h')))
Return the rank of the given hypergraph. @rtype: int @return: Rank of graph.
def rank(self): """ Return the rank of the given hypergraph. @rtype: int @return: Rank of graph. """ max_rank = 0 for each in self.hyperedges(): if len(self.edge_links[each]) > max_rank: max_rank = len(self.edge_links[each]) return max_rank
Increment letter-based IDs. Generates IDs like ['a', 'b', ..., 'z', 'aa', ab', ..., 'az', 'ba', ...] Returns: str: Next base-26 ID.
def get_next_base26(prev=None): """Increment letter-based IDs. Generates IDs like ['a', 'b', ..., 'z', 'aa', ab', ..., 'az', 'ba', ...] Returns: str: Next base-26 ID. """ if not prev: return 'a' r = re.compile("^[a-z]*$") if not r.match(prev): raise ValueError("Invalid base26") if not prev.endswith('z'): return prev[:-1] + chr(ord(prev[-1]) + 1) return get_next_base26(prev[:-1]) + 'a'
Create a base-26 symlink in `path` pointing to `source`. If such a symlink already exists, it is returned. Note that there is a small chance that this function may create a new symlink when there is already one pointed at `source`. Assumes `path` only contains base26 symlinks. Returns: str: Path to created symlink.
def create_unique_base26_symlink(path, source): """Create a base-26 symlink in `path` pointing to `source`. If such a symlink already exists, it is returned. Note that there is a small chance that this function may create a new symlink when there is already one pointed at `source`. Assumes `path` only contains base26 symlinks. Returns: str: Path to created symlink. """ retries = 0 while True: # if a link already exists that points at `source`, return it name = find_matching_symlink(path, source) if name: return os.path.join(path, name) # get highest current symlink in path names = [ x for x in os.listdir(path) if os.path.islink(os.path.join(path, x)) ] if names: prev = max(names) else: prev = None linkname = get_next_base26(prev) linkpath = os.path.join(path, linkname) # attempt to create the symlink try: os.symlink(source, linkpath) return linkpath except OSError as e: if e.errno != errno.EEXIST: raise # if we're here, the same named symlink was created in parallel # somewhere. Try again up to N times. # if retries > 10: raise RuntimeError( "Variant shortlink not created - there was too much contention.") retries += 1
Find wheels from which we can import PROJECTS. Scan through SEARCH_DIRS for a wheel for each PROJECT in turn. Return a list of the first wheel found for each PROJECT
def find_wheels(projects, search_dirs): """Find wheels from which we can import PROJECTS. Scan through SEARCH_DIRS for a wheel for each PROJECT in turn. Return a list of the first wheel found for each PROJECT """ wheels = [] # Look through SEARCH_DIRS for the first suitable wheel. Don't bother # about version checking here, as this is simply to get something we can # then use to install the correct version. for project in projects: for dirname in search_dirs: # This relies on only having "universal" wheels available. # The pattern could be tightened to require -py2.py3-none-any.whl. files = glob.glob(os.path.join(dirname, project + '-*.whl')) if files: wheels.append(os.path.abspath(files[0])) break else: # We're out of luck, so quit with a suitable error logger.fatal('Cannot find a wheel for %s' % (project,)) return wheels
Return the path locations for the environment (where libraries are, where scripts go, etc)
def path_locations(home_dir): """Return the path locations for the environment (where libraries are, where scripts go, etc)""" # XXX: We'd use distutils.sysconfig.get_python_inc/lib but its # prefix arg is broken: http://bugs.python.org/issue3386 if is_win: # Windows has lots of problems with executables with spaces in # the name; this function will remove them (using the ~1 # format): mkdir(home_dir) if ' ' in home_dir: import ctypes GetShortPathName = ctypes.windll.kernel32.GetShortPathNameW size = max(len(home_dir)+1, 256) buf = ctypes.create_unicode_buffer(size) try: u = unicode except NameError: u = str ret = GetShortPathName(u(home_dir), buf, size) if not ret: print('Error: the path "%s" has a space in it' % home_dir) print('We could not determine the short pathname for it.') print('Exiting.') sys.exit(3) home_dir = str(buf.value) lib_dir = join(home_dir, 'Lib') inc_dir = join(home_dir, 'Include') bin_dir = join(home_dir, 'Scripts') if is_jython: lib_dir = join(home_dir, 'Lib') inc_dir = join(home_dir, 'Include') bin_dir = join(home_dir, 'bin') elif is_pypy: lib_dir = home_dir inc_dir = join(home_dir, 'include') bin_dir = join(home_dir, 'bin') elif not is_win: lib_dir = join(home_dir, 'lib', py_version) multiarch_exec = '/usr/bin/multiarch-platform' if is_executable_file(multiarch_exec): # In Mageia (2) and Mandriva distros the include dir must be like: # virtualenv/include/multiarch-x86_64-linux/python2.7 # instead of being virtualenv/include/python2.7 p = subprocess.Popen(multiarch_exec, stdout=subprocess.PIPE, stderr=subprocess.PIPE) stdout, stderr = p.communicate() # stdout.strip is needed to remove newline character inc_dir = join(home_dir, 'include', stdout.strip(), py_version + abiflags) else: inc_dir = join(home_dir, 'include', py_version + abiflags) bin_dir = join(home_dir, 'bin') return home_dir, lib_dir, inc_dir, bin_dir
Platforms that use the "posix_local" install scheme (like Ubuntu with Python 2.7) need to be given an additional "local" location, sigh.
def fix_local_scheme(home_dir, symlink=True): """ Platforms that use the "posix_local" install scheme (like Ubuntu with Python 2.7) need to be given an additional "local" location, sigh. """ try: import sysconfig except ImportError: pass else: if sysconfig._get_default_scheme() == 'posix_local': local_path = os.path.join(home_dir, 'local') if not os.path.exists(local_path): os.mkdir(local_path) for subdir_name in os.listdir(home_dir): if subdir_name == 'local': continue copyfile(os.path.abspath(os.path.join(home_dir, subdir_name)), \ os.path.join(local_path, subdir_name), symlink)