idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
38,600
def set_context_prefix ( self , name , prefix ) : data = self . _context ( name ) data [ "prefix" ] = prefix self . _flush_tools ( )
Set a context s prefix .
38,601
def set_context_suffix ( self , name , suffix ) : data = self . _context ( name ) data [ "suffix" ] = suffix self . _flush_tools ( )
Set a context s suffix .
38,602
def bump_context ( self , name ) : data = self . _context ( name ) data [ "priority" ] = self . _next_priority self . _flush_tools ( )
Causes the context s tools to take priority over all others .
38,603
def hide_tool ( self , context_name , tool_name ) : data = self . _context ( context_name ) hidden_tools = data [ "hidden_tools" ] if tool_name not in hidden_tools : self . _validate_tool ( context_name , tool_name ) hidden_tools . add ( tool_name ) self . _flush_tools ( )
Hide a tool so that it is not exposed in the suite .
38,604
def unhide_tool ( self , context_name , tool_name ) : data = self . _context ( context_name ) hidden_tools = data [ "hidden_tools" ] if tool_name in hidden_tools : hidden_tools . remove ( tool_name ) self . _flush_tools ( )
Unhide a tool so that it may be exposed in a suite .
38,605
def alias_tool ( self , context_name , tool_name , tool_alias ) : data = self . _context ( context_name ) aliases = data [ "tool_aliases" ] if tool_name in aliases : raise SuiteError ( "Tool %r in context %r is already aliased to %r" % ( tool_name , context_name , aliases [ tool_name ] ) ) self . _validate_tool ( context_name , tool_name ) aliases [ tool_name ] = tool_alias self . _flush_tools ( )
Register an alias for a specific tool .
38,606
def unalias_tool ( self , context_name , tool_name ) : data = self . _context ( context_name ) aliases = data [ "tool_aliases" ] if tool_name in aliases : del aliases [ tool_name ] self . _flush_tools ( )
Deregister an alias for a specific tool .
38,607
def get_tool_filepath ( self , tool_alias ) : tools_dict = self . get_tools ( ) if tool_alias in tools_dict : if self . tools_path is None : return None else : return os . path . join ( self . tools_path , tool_alias ) else : return None
Given a visible tool alias return the full path to the executable .
38,608
def get_tool_context ( self , tool_alias ) : tools_dict = self . get_tools ( ) data = tools_dict . get ( tool_alias ) if data : return data [ "context_name" ] return None
Given a visible tool alias return the name of the context it belongs to .
38,609
def validate ( self ) : for context_name in self . context_names : context = self . context ( context_name ) try : context . validate ( ) except ResolvedContextError as e : raise SuiteError ( "Error in context %r: %s" % ( context_name , str ( e ) ) )
Validate the suite .
38,610
def save ( self , path , verbose = False ) : path = os . path . realpath ( path ) if os . path . exists ( path ) : if self . load_path and self . load_path == path : if verbose : print "saving over previous suite..." for context_name in self . context_names : self . context ( context_name ) shutil . rmtree ( path ) else : raise SuiteError ( "Cannot save, path exists: %r" % path ) contexts_path = os . path . join ( path , "contexts" ) os . makedirs ( contexts_path ) data = self . to_dict ( ) filepath = os . path . join ( path , "suite.yaml" ) with open ( filepath , "w" ) as f : f . write ( dump_yaml ( data ) ) for context_name in self . context_names : context = self . context ( context_name ) context . _set_parent_suite ( path , context_name ) filepath = self . _context_path ( context_name , path ) if verbose : print "writing %r..." % filepath context . save ( filepath ) tools_path = os . path . join ( path , "bin" ) os . makedirs ( tools_path ) if verbose : print "creating alias wrappers in %r..." % tools_path tools = self . get_tools ( ) for tool_alias , d in tools . iteritems ( ) : tool_name = d [ "tool_name" ] context_name = d [ "context_name" ] data = self . _context ( context_name ) prefix_char = data . get ( "prefix_char" ) if verbose : print ( "creating %r -> %r (%s context)..." % ( tool_alias , tool_name , context_name ) ) filepath = os . path . join ( tools_path , tool_alias ) create_forwarding_script ( filepath , module = "suite" , func_name = "_FWD__invoke_suite_tool_alias" , context_name = context_name , tool_name = tool_name , prefix_char = prefix_char )
Save the suite to disk .
38,611
def print_info ( self , buf = sys . stdout , verbose = False ) : _pr = Printer ( buf ) if not self . contexts : _pr ( "Suite is empty." ) return context_names = sorted ( self . contexts . iterkeys ( ) ) _pr ( "Suite contains %d contexts:" % len ( context_names ) ) if not verbose : _pr ( ' ' . join ( context_names ) ) return tools = self . get_tools ( ) . values ( ) context_tools = defaultdict ( set ) context_variants = defaultdict ( set ) for entry in tools : context_name = entry [ "context_name" ] context_tools [ context_name ] . add ( entry [ "tool_name" ] ) context_variants [ context_name ] . add ( str ( entry [ "variant" ] ) ) _pr ( ) rows = [ [ "NAME" , "VISIBLE TOOLS" , "PATH" ] , [ "----" , "-------------" , "----" ] ] for context_name in context_names : context_path = self . _context_path ( context_name ) or '-' ntools = len ( context_tools . get ( context_name , [ ] ) ) if ntools : nvariants = len ( context_variants [ context_name ] ) short_desc = "%d tools from %d packages" % ( ntools , nvariants ) else : short_desc = "no tools" rows . append ( ( context_name , short_desc , context_path ) ) _pr ( "\n" . join ( columnise ( rows ) ) )
Prints a message summarising the contents of the suite .
38,612
def get_valid_build_systems ( working_dir , package = None ) : from rez . plugin_managers import plugin_manager from rez . exceptions import PackageMetadataError try : package = package or get_developer_package ( working_dir ) except PackageMetadataError : pass if package : if getattr ( package , "build_command" , None ) is not None : buildsys_name = "custom" else : buildsys_name = getattr ( package , "build_system" , None ) if buildsys_name : cls = plugin_manager . get_plugin_class ( 'build_system' , buildsys_name ) return [ cls ] clss = [ ] for buildsys_name in get_buildsys_types ( ) : cls = plugin_manager . get_plugin_class ( 'build_system' , buildsys_name ) if cls . is_valid_root ( working_dir , package = package ) : clss . append ( cls ) child_clss = set ( x . child_build_system ( ) for x in clss ) clss = list ( set ( clss ) - child_clss ) return clss
Returns the build system classes that could build the source in given dir .
38,613
def create_build_system ( working_dir , buildsys_type = None , package = None , opts = None , write_build_scripts = False , verbose = False , build_args = [ ] , child_build_args = [ ] ) : from rez . plugin_managers import plugin_manager if not buildsys_type : clss = get_valid_build_systems ( working_dir , package = package ) if not clss : raise BuildSystemError ( "No build system is associated with the path %s" % working_dir ) if len ( clss ) != 1 : s = ', ' . join ( x . name ( ) for x in clss ) raise BuildSystemError ( ( "Source could be built with one of: %s; " "Please specify a build system" ) % s ) buildsys_type = iter ( clss ) . next ( ) . name ( ) cls_ = plugin_manager . get_plugin_class ( 'build_system' , buildsys_type ) return cls_ ( working_dir , opts = opts , package = package , write_build_scripts = write_build_scripts , verbose = verbose , build_args = build_args , child_build_args = child_build_args )
Return a new build system that can build the source in working_dir .
38,614
def build ( self , context , variant , build_path , install_path , install = False , build_type = BuildType . local ) : raise NotImplementedError
Implement this method to perform the actual build .
38,615
def get_standard_vars ( cls , context , variant , build_type , install , build_path , install_path = None ) : from rez . config import config package = variant . parent variant_requires = map ( str , variant . variant_requires ) if variant . index is None : variant_subpath = '' else : variant_subpath = variant . _non_shortlinked_subpath vars_ = { 'REZ_BUILD_ENV' : 1 , 'REZ_BUILD_PATH' : build_path , 'REZ_BUILD_THREAD_COUNT' : package . config . build_thread_count , 'REZ_BUILD_VARIANT_INDEX' : variant . index or 0 , 'REZ_BUILD_VARIANT_REQUIRES' : ' ' . join ( variant_requires ) , 'REZ_BUILD_VARIANT_SUBPATH' : variant_subpath , 'REZ_BUILD_PROJECT_VERSION' : str ( package . version ) , 'REZ_BUILD_PROJECT_NAME' : package . name , 'REZ_BUILD_PROJECT_DESCRIPTION' : ( package . description or '' ) . strip ( ) , 'REZ_BUILD_PROJECT_FILE' : package . filepath , 'REZ_BUILD_SOURCE_PATH' : os . path . dirname ( package . filepath ) , 'REZ_BUILD_REQUIRES' : ' ' . join ( str ( x ) for x in context . requested_packages ( True ) ) , 'REZ_BUILD_REQUIRES_UNVERSIONED' : ' ' . join ( x . name for x in context . requested_packages ( True ) ) , 'REZ_BUILD_TYPE' : build_type . name , 'REZ_BUILD_INSTALL' : 1 if install else 0 , } if install_path : vars_ [ 'REZ_BUILD_INSTALL_PATH' ] = install_path if config . rez_1_environment_variables and not config . disable_rez_1_compatibility and build_type == BuildType . central : vars_ [ 'REZ_IN_REZ_RELEASE' ] = 1 return vars_
Returns a standard set of environment variables that can be set for the build system to use
38,616
def set_standard_vars ( cls , executor , context , variant , build_type , install , build_path , install_path = None ) : vars = cls . get_standard_vars ( context = context , variant = variant , build_type = build_type , install = install , build_path = build_path , install_path = install_path ) for var , value in vars . iteritems ( ) : executor . env [ var ] = value
Sets a standard set of environment variables for the build system to use
38,617
def run_pip_command ( command_args , pip_version = None , python_version = None ) : pip_exe , context = find_pip ( pip_version , python_version ) command = [ pip_exe ] + list ( command_args ) if context is None : return popen ( command ) else : return context . execute_shell ( command = command , block = False )
Run a pip command .
38,618
def find_pip ( pip_version = None , python_version = None ) : pip_exe = "pip" try : context = create_context ( pip_version , python_version ) except BuildError as e : from rez . backport . shutilwhich import which pip_exe = which ( "pip" ) if pip_exe : print_warning ( "pip rez package could not be found; system 'pip' command (%s) " "will be used instead." % pip_exe ) context = None else : raise e return pip_exe , context
Find a pip exe using the given python version .
38,619
def create_context ( pip_version = None , python_version = None ) : if pip_version : pip_req = "pip-%s" % str ( pip_version ) else : pip_req = "pip" if python_version : ver = Version ( str ( python_version ) ) major_minor_ver = ver . trim ( 2 ) py_req = "python-%s" % str ( major_minor_ver ) else : package = get_latest_package ( "python" ) if package : major_minor_ver = package . version . trim ( 2 ) else : major_minor_ver = '.' . join ( map ( str , sys . version_info [ : 2 ] ) ) py_req = "python-%s" % str ( major_minor_ver ) request = [ pip_req , py_req ] with convert_errors ( from_ = ( PackageFamilyNotFoundError , PackageNotFoundError ) , to = BuildError , msg = "Cannot run - pip or python rez " "package is not present" ) : context = ResolvedContext ( request ) pip_variant = context . get_resolved_package ( "pip" ) pip_package = pip_variant . parent print_info ( "Using %s (%s)" % ( pip_package . qualified_name , pip_variant . uri ) ) return context
Create a context containing the specific pip and python .
38,620
def convert_old_variant_handle ( handle_dict ) : old_variables = handle_dict . get ( "variables" , { } ) variables = dict ( repository_type = "filesystem" ) for old_key , key in variant_key_conversions . iteritems ( ) : value = old_variables . get ( old_key ) variables [ key ] = value path = handle_dict [ "path" ] filename = os . path . basename ( path ) if os . path . splitext ( filename ) [ 0 ] == "package" : key = "filesystem.variant" else : key = "filesystem.variant.combined" return dict ( key = key , variables = variables )
Convert a variant handle from serialize_version < 4 . 0 .
38,621
def convert_requirement ( req ) : pkg_name = convert_name ( req . project_name ) if not req . specs : return [ pkg_name ] req_strs = [ ] for spec in req . specs : op , ver = spec ver = convert_version ( ver ) if op == "<" : r = "%s-0+<%s" % ( pkg_name , ver ) req_strs . append ( r ) elif op == "<=" : r = "%s-0+<%s|%s" % ( pkg_name , ver , ver ) req_strs . append ( r ) elif op == "==" : r = "%s-%s" % ( pkg_name , ver ) req_strs . append ( r ) elif op == ">=" : r = "%s-%s+" % ( pkg_name , ver ) req_strs . append ( r ) elif op == ">" : r1 = "%s-%s+" % ( pkg_name , ver ) r2 = "!%s-%s" % ( pkg_name , ver ) req_strs . append ( r1 ) req_strs . append ( r2 ) elif op == "!=" : r = "!%s-%s" % ( pkg_name , ver ) req_strs . append ( r ) else : print >> sys . stderr , "Warning: Can't understand op '%s', just depending on unversioned package..." % op req_strs . append ( pkg_name ) return req_strs
Converts a pkg_resources . Requirement object into a list of Rez package request strings .
38,622
def get_dist_dependencies ( name , recurse = True ) : dist = pkg_resources . get_distribution ( name ) pkg_name = convert_name ( dist . project_name ) reqs = set ( ) working = set ( [ dist ] ) depth = 0 while working : deps = set ( ) for distname in working : dist = pkg_resources . get_distribution ( distname ) pkg_name = convert_name ( dist . project_name ) reqs . add ( pkg_name ) for req in dist . requires ( ) : reqs_ = convert_requirement ( req ) deps |= set ( x . split ( '-' , 1 ) [ 0 ] for x in reqs_ if not x . startswith ( '!' ) ) working = deps - reqs depth += 1 if ( not recurse ) and ( depth >= 2 ) : break return reqs
Get the dependencies of the given already installed distribution .
38,623
def add_graph ( self , other ) : self . add_nodes ( n for n in other . nodes ( ) if not n in self . nodes ( ) ) for each_node in other . nodes ( ) : for each_edge in other . neighbors ( each_node ) : if ( not self . has_edge ( ( each_node , each_edge ) ) ) : self . add_edge ( ( each_node , each_edge ) )
Add other graph to this graph .
38,624
def add_spanning_tree ( self , st ) : self . add_nodes ( list ( st . keys ( ) ) ) for each in st : if ( st [ each ] is not None ) : self . add_edge ( ( st [ each ] , each ) )
Add a spanning tree to the graph .
38,625
def complete ( self ) : for each in self . nodes ( ) : for other in self . nodes ( ) : if ( each != other and not self . has_edge ( ( each , other ) ) ) : self . add_edge ( ( each , other ) )
Make the graph a complete graph .
38,626
def inverse ( self ) : inv = self . __class__ ( ) inv . add_nodes ( self . nodes ( ) ) inv . complete ( ) for each in self . edges ( ) : if ( inv . has_edge ( each ) ) : inv . del_edge ( each ) return inv
Return the inverse of the graph .
38,627
def reverse ( self ) : assert self . DIRECTED , "Undirected graph types such as %s cannot be reversed" % self . __class__ . __name__ N = self . __class__ ( ) N . add_nodes ( n for n in self . nodes ( ) ) for ( u , v ) in self . edges ( ) : wt = self . edge_weight ( ( u , v ) ) label = self . edge_label ( ( u , v ) ) attributes = self . edge_attributes ( ( u , v ) ) N . add_edge ( ( v , u ) , wt , label , attributes ) return N
Generate the reverse of a directed graph returns an identical graph if not directed . Attributes & weights are preserved .
38,628
def get_lock_request ( name , version , patch_lock , weak = True ) : ch = '~' if weak else '' if patch_lock == PatchLock . lock : s = "%s%s==%s" % ( ch , name , str ( version ) ) return PackageRequest ( s ) elif ( patch_lock == PatchLock . no_lock ) or ( not version ) : return None version_ = version . trim ( patch_lock . rank ) s = "%s%s-%s" % ( ch , name , str ( version_ ) ) return PackageRequest ( s )
Given a package and patch lock return the equivalent request .
38,629
def requested_packages ( self , include_implicit = False ) : if include_implicit : return self . _package_requests + self . implicit_packages else : return self . _package_requests
Get packages in the request .
38,630
def get_resolved_package ( self , name ) : pkgs = [ x for x in self . _resolved_packages if x . name == name ] return pkgs [ 0 ] if pkgs else None
Returns a Variant object or None if the package is not in the resolve .
38,631
def get_patched_request ( self , package_requests = None , package_subtractions = None , strict = False , rank = 0 ) : if strict : request = [ ] for variant in self . resolved_packages : req = PackageRequest ( variant . qualified_package_name ) request . append ( req ) else : request = self . requested_packages ( ) [ : ] if package_requests : package_subtractions = package_subtractions or [ ] indexes = [ ] for i , req in enumerate ( package_requests ) : name = str ( req ) if name . startswith ( '^' ) : package_subtractions . append ( name [ 1 : ] ) indexes . append ( i ) for i in reversed ( indexes ) : del package_requests [ i ] if package_subtractions : request = [ x for x in request if x . name not in package_subtractions ] if package_requests : request_dict = dict ( ( x . name , ( i , x ) ) for i , x in enumerate ( request ) ) request_ = [ ] for req in package_requests : if isinstance ( req , basestring ) : req = PackageRequest ( req ) if req . name in request_dict : i , req_ = request_dict [ req . name ] if ( req_ is not None ) and ( req_ . conflict == req . conflict ) and ( req_ . weak == req . weak ) : request [ i ] = req del request_dict [ req . name ] else : request_ . append ( req ) else : request_ . append ( req ) request += request_ if not strict and rank > 1 : overrides = set ( x . name for x in package_requests if not x . conflict ) rank_limiters = [ ] for variant in self . resolved_packages : if variant . name not in overrides : if len ( variant . version ) >= rank : version = variant . version . trim ( rank - 1 ) version = version . next ( ) req = "~%s<%s" % ( variant . name , str ( version ) ) rank_limiters . append ( req ) request += rank_limiters return request
Get a patched request .
38,632
def write_to_buffer ( self , buf ) : doc = self . to_dict ( ) if config . rxt_as_yaml : content = dump_yaml ( doc ) else : content = json . dumps ( doc , indent = 4 , separators = ( "," , ": " ) ) buf . write ( content )
Save the context to a buffer .
38,633
def get_current ( cls ) : filepath = os . getenv ( "REZ_RXT_FILE" ) if not filepath or not os . path . exists ( filepath ) : return None return cls . load ( filepath )
Get the context for the current env if there is one .
38,634
def load ( cls , path ) : with open ( path ) as f : context = cls . read_from_buffer ( f , path ) context . set_load_path ( path ) return context
Load a resolved context from file .
38,635
def read_from_buffer ( cls , buf , identifier_str = None ) : try : return cls . _read_from_buffer ( buf , identifier_str ) except Exception as e : cls . _load_error ( e , identifier_str )
Load the context from a buffer .
38,636
def get_resolve_diff ( self , other ) : if self . package_paths != other . package_paths : from difflib import ndiff diff = ndiff ( self . package_paths , other . package_paths ) raise ResolvedContextError ( "Cannot diff resolves, package search " "paths differ:\n%s" % '\n' . join ( diff ) ) d = { } self_pkgs_ = set ( x . parent for x in self . _resolved_packages ) other_pkgs_ = set ( x . parent for x in other . _resolved_packages ) self_pkgs = self_pkgs_ - other_pkgs_ other_pkgs = other_pkgs_ - self_pkgs_ if not ( self_pkgs or other_pkgs ) : return d self_fams = dict ( ( x . name , x ) for x in self_pkgs ) other_fams = dict ( ( x . name , x ) for x in other_pkgs ) newer_packages = { } older_packages = { } added_packages = set ( ) removed_packages = set ( ) for pkg in self_pkgs : if pkg . name not in other_fams : removed_packages . add ( pkg ) else : other_pkg = other_fams [ pkg . name ] if other_pkg . version > pkg . version : r = VersionRange . as_span ( lower_version = pkg . version , upper_version = other_pkg . version ) it = iter_packages ( pkg . name , range_ = r ) pkgs = sorted ( it , key = lambda x : x . version ) newer_packages [ pkg . name ] = pkgs elif other_pkg . version < pkg . version : r = VersionRange . as_span ( lower_version = other_pkg . version , upper_version = pkg . version ) it = iter_packages ( pkg . name , range_ = r ) pkgs = sorted ( it , key = lambda x : x . version , reverse = True ) older_packages [ pkg . name ] = pkgs for pkg in other_pkgs : if pkg . name not in self_fams : added_packages . add ( pkg ) if newer_packages : d [ "newer_packages" ] = newer_packages if older_packages : d [ "older_packages" ] = older_packages if added_packages : d [ "added_packages" ] = added_packages if removed_packages : d [ "removed_packages" ] = removed_packages return d
Get the difference between the resolve in this context and another .
38,637
def print_resolve_diff ( self , other , heading = None ) : d = self . get_resolve_diff ( other ) if not d : return rows = [ ] if heading is True and self . load_path and other . load_path : a = os . path . basename ( self . load_path ) b = os . path . basename ( other . load_path ) heading = ( a , b ) if isinstance ( heading , tuple ) : rows . append ( list ( heading ) + [ "" ] ) rows . append ( ( '-' * len ( heading [ 0 ] ) , '-' * len ( heading [ 1 ] ) , "" ) ) newer_packages = d . get ( "newer_packages" , { } ) older_packages = d . get ( "older_packages" , { } ) added_packages = d . get ( "added_packages" , set ( ) ) removed_packages = d . get ( "removed_packages" , set ( ) ) if newer_packages : for name , pkgs in newer_packages . iteritems ( ) : this_pkg = pkgs [ 0 ] other_pkg = pkgs [ - 1 ] diff_str = "(+%d versions)" % ( len ( pkgs ) - 1 ) rows . append ( ( this_pkg . qualified_name , other_pkg . qualified_name , diff_str ) ) if older_packages : for name , pkgs in older_packages . iteritems ( ) : this_pkg = pkgs [ 0 ] other_pkg = pkgs [ - 1 ] diff_str = "(-%d versions)" % ( len ( pkgs ) - 1 ) rows . append ( ( this_pkg . qualified_name , other_pkg . qualified_name , diff_str ) ) if added_packages : for pkg in sorted ( added_packages , key = lambda x : x . name ) : rows . append ( ( "-" , pkg . qualified_name , "" ) ) if removed_packages : for pkg in sorted ( removed_packages , key = lambda x : x . name ) : rows . append ( ( pkg . qualified_name , "-" , "" ) ) print '\n' . join ( columnise ( rows ) )
Print the difference between the resolve of two contexts .
38,638
def get_dependency_graph ( self ) : from rez . vendor . pygraph . classes . digraph import digraph nodes = { } edges = set ( ) for variant in self . _resolved_packages : nodes [ variant . name ] = variant . qualified_package_name for request in variant . get_requires ( ) : if not request . conflict : edges . add ( ( variant . name , request . name ) ) g = digraph ( ) node_color = "#AAFFAA" node_fontsize = 10 attrs = [ ( "fontsize" , node_fontsize ) , ( "fillcolor" , node_color ) , ( "style" , "filled" ) ] for name , qname in nodes . iteritems ( ) : g . add_node ( name , attrs = attrs + [ ( "label" , qname ) ] ) for edge in edges : g . add_edge ( edge ) return g
Generate the dependency graph .
38,639
def validate ( self ) : try : for pkg in self . resolved_packages : pkg . validate_data ( ) except RezError as e : raise ResolvedContextError ( "%s: %s" % ( e . __class__ . __name__ , str ( e ) ) )
Validate the context .
38,640
def get_environ ( self , parent_environ = None ) : interp = Python ( target_environ = { } , passive = True ) executor = self . _create_executor ( interp , parent_environ ) self . _execute ( executor ) return executor . get_output ( )
Get the environ dict resulting from interpreting this context .
38,641
def get_key ( self , key , request_only = False ) : values = { } requested_names = [ x . name for x in self . _package_requests if not x . conflict ] for pkg in self . resolved_packages : if ( not request_only ) or ( pkg . name in requested_names ) : value = getattr ( pkg , key ) if value is not None : values [ pkg . name ] = ( pkg , value ) return values
Get a data key value for each resolved package .
38,642
def get_conflicting_tools ( self , request_only = False ) : from collections import defaultdict tool_sets = defaultdict ( set ) tools_dict = self . get_tools ( request_only = request_only ) for variant , tools in tools_dict . itervalues ( ) : for tool in tools : tool_sets [ tool ] . add ( variant ) conflicts = dict ( ( k , v ) for k , v in tool_sets . iteritems ( ) if len ( v ) > 1 ) return conflicts
Returns tools of the same name provided by more than one package .
38,643
def get_shell_code ( self , shell = None , parent_environ = None , style = OutputStyle . file ) : executor = self . _create_executor ( interpreter = create_shell ( shell ) , parent_environ = parent_environ ) if self . load_path and os . path . isfile ( self . load_path ) : executor . env . REZ_RXT_FILE = self . load_path self . _execute ( executor ) return executor . get_output ( style )
Get the shell code resulting from intepreting this context .
38,644
def get_actions ( self , parent_environ = None ) : interp = Python ( target_environ = { } , passive = True ) executor = self . _create_executor ( interp , parent_environ ) self . _execute ( executor ) return executor . actions
Get the list of rex . Action objects resulting from interpreting this context . This is provided mainly for testing purposes .
38,645
def apply ( self , parent_environ = None ) : interpreter = Python ( target_environ = os . environ ) executor = self . _create_executor ( interpreter , parent_environ ) self . _execute ( executor ) interpreter . apply_environ ( )
Apply the context to the current python session .
38,646
def which ( self , cmd , parent_environ = None , fallback = False ) : env = self . get_environ ( parent_environ = parent_environ ) path = which ( cmd , env = env ) if fallback and path is None : path = which ( cmd ) return path
Find a program in the resolved environment .
38,647
def execute_command ( self , args , parent_environ = None , ** subprocess_kwargs ) : if parent_environ in ( None , os . environ ) : target_environ = { } else : target_environ = parent_environ . copy ( ) interpreter = Python ( target_environ = target_environ ) executor = self . _create_executor ( interpreter , parent_environ ) self . _execute ( executor ) return interpreter . subprocess ( args , ** subprocess_kwargs )
Run a command within a resolved context .
38,648
def execute_rex_code ( self , code , filename = None , shell = None , parent_environ = None , ** Popen_args ) : def _actions_callback ( executor ) : executor . execute_code ( code , filename = filename ) return self . execute_shell ( shell = shell , parent_environ = parent_environ , command = '' , block = False , actions_callback = _actions_callback , ** Popen_args )
Run some rex code in the context .
38,649
def execute_shell ( self , shell = None , parent_environ = None , rcfile = None , norc = False , stdin = False , command = None , quiet = False , block = None , actions_callback = None , post_actions_callback = None , context_filepath = None , start_new_session = False , detached = False , pre_command = None , ** Popen_args ) : sh = create_shell ( shell ) if hasattr ( command , "__iter__" ) : command = sh . join ( command ) if start_new_session : Popen_args . update ( config . new_session_popen_args ) if detached : term_cmd = config . terminal_emulator_command if term_cmd : pre_command = term_cmd . strip ( ) . split ( ) if block is None : block = not ( command or stdin ) tmpdir = self . tmpdir_manager . mkdtemp ( cleanup = not detached ) if self . load_path and os . path . isfile ( self . load_path ) : rxt_file = self . load_path else : rxt_file = os . path . join ( tmpdir , "context.rxt" ) self . save ( rxt_file ) context_file = context_filepath or os . path . join ( tmpdir , "context.%s" % sh . file_extension ( ) ) executor = self . _create_executor ( sh , parent_environ ) executor . env . REZ_RXT_FILE = rxt_file executor . env . REZ_CONTEXT_FILE = context_file if actions_callback : actions_callback ( executor ) self . _execute ( executor ) if post_actions_callback : post_actions_callback ( executor ) context_code = executor . get_output ( ) with open ( context_file , 'w' ) as f : f . write ( context_code ) quiet = quiet or ( RezToolsVisibility [ config . rez_tools_visibility ] == RezToolsVisibility . never ) p = sh . spawn_shell ( context_file , tmpdir , rcfile = rcfile , norc = norc , stdin = stdin , command = command , env = parent_environ , quiet = quiet , pre_command = pre_command , ** Popen_args ) if block : stdout , stderr = p . communicate ( ) return p . returncode , stdout , stderr else : return p
Spawn a possibly - interactive shell .
38,650
def to_dict ( self , fields = None ) : data = { } def _add ( field ) : return ( fields is None or field in fields ) if _add ( "resolved_packages" ) : resolved_packages = [ ] for pkg in ( self . _resolved_packages or [ ] ) : resolved_packages . append ( pkg . handle . to_dict ( ) ) data [ "resolved_packages" ] = resolved_packages if _add ( "serialize_version" ) : data [ "serialize_version" ] = '.' . join ( map ( str , ResolvedContext . serialize_version ) ) if _add ( "patch_locks" ) : data [ "patch_locks" ] = dict ( ( k , v . name ) for k , v in self . patch_locks ) if _add ( "package_orderers" ) : package_orderers = [ package_order . to_pod ( x ) for x in ( self . package_orderers or [ ] ) ] data [ "package_orderers" ] = package_orderers or None if _add ( "package_filter" ) : data [ "package_filter" ] = self . package_filter . to_pod ( ) if _add ( "graph" ) : if self . graph_string and self . graph_string . startswith ( '{' ) : graph_str = self . graph_string else : g = self . graph ( ) graph_str = write_compacted ( g ) data [ "graph" ] = graph_str data . update ( dict ( timestamp = self . timestamp , requested_timestamp = self . requested_timestamp , building = self . building , caching = self . caching , implicit_packages = map ( str , self . implicit_packages ) , package_requests = map ( str , self . _package_requests ) , package_paths = self . package_paths , default_patch_lock = self . default_patch_lock . name , rez_version = self . rez_version , rez_path = self . rez_path , user = self . user , host = self . host , platform = self . platform , arch = self . arch , os = self . os , created = self . created , parent_suite_path = self . parent_suite_path , suite_context_name = self . suite_context_name , status = self . status_ . name , failure_description = self . failure_description , from_cache = self . from_cache , solve_time = self . solve_time , load_time = self . load_time , num_loaded_packages = self . num_loaded_packages ) ) if fields : data = dict ( ( k , v ) for k , v in data . iteritems ( ) if k in fields ) return data
Convert context to dict containing only builtin types .
38,651
def add_sys_paths ( paths ) : original_syspath = sys . path [ : ] sys . path . extend ( paths ) try : yield finally : sys . path = original_syspath
Add to sys . path and revert on scope exit .
38,652
def popen ( args , ** kwargs ) : if "stdin" not in kwargs : try : file_no = sys . stdin . fileno ( ) except AttributeError : file_no = sys . __stdin__ . fileno ( ) if file_no not in ( 0 , 1 , 2 ) : kwargs [ "stdin" ] = subprocess . PIPE return subprocess . Popen ( args , ** kwargs )
Wrapper for subprocess . Popen .
38,653
def get_relative_to_remote ( self ) : s = self . git ( "status" , "--short" , "-b" ) [ 0 ] r = re . compile ( "\[([^\]]+)\]" ) toks = r . findall ( s ) if toks : try : s2 = toks [ - 1 ] adj , n = s2 . split ( ) assert ( adj in ( "ahead" , "behind" ) ) n = int ( n ) return - n if adj == "behind" else n except Exception as e : raise ReleaseVCSError ( ( "Problem parsing first line of result of 'git status " "--short -b' (%s):\n%s" ) % ( s , str ( e ) ) ) else : return 0
Return the number of commits we are relative to the remote . Negative is behind positive in front zero means we are matched to remote .
38,654
def links ( self , obj ) : if obj in self . edge_links : return self . edge_links [ obj ] else : return self . node_links [ obj ]
Return all nodes connected by the given hyperedge or all hyperedges connected to the given hypernode .
38,655
def neighbors ( self , obj ) : neighbors = set ( [ ] ) for e in self . node_links [ obj ] : neighbors . update ( set ( self . edge_links [ e ] ) ) return list ( neighbors - set ( [ obj ] ) )
Return all neighbors adjacent to the given node .
38,656
def del_node ( self , node ) : if self . has_node ( node ) : for e in self . node_links [ node ] : self . edge_links [ e ] . remove ( node ) self . node_links . pop ( node ) self . graph . del_node ( ( node , 'n' ) )
Delete a given node from the hypergraph .
38,657
def add_hyperedge ( self , hyperedge ) : if ( not hyperedge in self . edge_links ) : self . edge_links [ hyperedge ] = [ ] self . graph . add_node ( ( hyperedge , 'h' ) )
Add given hyperedge to the hypergraph .
38,658
def del_hyperedge ( self , hyperedge ) : if ( hyperedge in self . hyperedges ( ) ) : for n in self . edge_links [ hyperedge ] : self . node_links [ n ] . remove ( hyperedge ) del ( self . edge_links [ hyperedge ] ) self . del_edge_labeling ( hyperedge ) self . graph . del_node ( ( hyperedge , 'h' ) )
Delete the given hyperedge .
38,659
def link ( self , node , hyperedge ) : if ( hyperedge not in self . node_links [ node ] ) : self . edge_links [ hyperedge ] . append ( node ) self . node_links [ node ] . append ( hyperedge ) self . graph . add_edge ( ( ( node , 'n' ) , ( hyperedge , 'h' ) ) ) else : raise AdditionError ( "Link (%s, %s) already in graph" % ( node , hyperedge ) )
Link given node and hyperedge .
38,660
def unlink ( self , node , hyperedge ) : self . node_links [ node ] . remove ( hyperedge ) self . edge_links [ hyperedge ] . remove ( node ) self . graph . del_edge ( ( ( node , 'n' ) , ( hyperedge , 'h' ) ) )
Unlink given node and hyperedge .
38,661
def rank ( self ) : max_rank = 0 for each in self . hyperedges ( ) : if len ( self . edge_links [ each ] ) > max_rank : max_rank = len ( self . edge_links [ each ] ) return max_rank
Return the rank of the given hypergraph .
38,662
def get_next_base26 ( prev = None ) : if not prev : return 'a' r = re . compile ( "^[a-z]*$" ) if not r . match ( prev ) : raise ValueError ( "Invalid base26" ) if not prev . endswith ( 'z' ) : return prev [ : - 1 ] + chr ( ord ( prev [ - 1 ] ) + 1 ) return get_next_base26 ( prev [ : - 1 ] ) + 'a'
Increment letter - based IDs .
38,663
def create_unique_base26_symlink ( path , source ) : retries = 0 while True : name = find_matching_symlink ( path , source ) if name : return os . path . join ( path , name ) names = [ x for x in os . listdir ( path ) if os . path . islink ( os . path . join ( path , x ) ) ] if names : prev = max ( names ) else : prev = None linkname = get_next_base26 ( prev ) linkpath = os . path . join ( path , linkname ) try : os . symlink ( source , linkpath ) return linkpath except OSError as e : if e . errno != errno . EEXIST : raise if retries > 10 : raise RuntimeError ( "Variant shortlink not created - there was too much contention." ) retries += 1
Create a base - 26 symlink in path pointing to source .
38,664
def find_wheels ( projects , search_dirs ) : wheels = [ ] for project in projects : for dirname in search_dirs : files = glob . glob ( os . path . join ( dirname , project + '-*.whl' ) ) if files : wheels . append ( os . path . abspath ( files [ 0 ] ) ) break else : logger . fatal ( 'Cannot find a wheel for %s' % ( project , ) ) return wheels
Find wheels from which we can import PROJECTS .
38,665
def relative_script ( lines ) : "Return a script that'll work in a relocatable environment." activate = "import os; activate_this=os.path.join(os.path.dirname(os.path.realpath(__file__)), 'activate_this.py'); exec(compile(open(activate_this).read(), activate_this, 'exec'), dict(__file__=activate_this)); del os, activate_this" activate_at = None for idx , line in reversed ( list ( enumerate ( lines ) ) ) : if line . split ( ) [ : 3 ] == [ 'from' , '__future__' , 'import' ] : activate_at = idx + 1 break if activate_at is None : activate_at = 1 return lines [ : activate_at ] + [ '' , activate , '' ] + lines [ activate_at : ]
Return a script that ll work in a relocatable environment .
38,666
def fixup_pth_and_egg_link ( home_dir , sys_path = None ) : home_dir = os . path . normcase ( os . path . abspath ( home_dir ) ) if sys_path is None : sys_path = sys . path for path in sys_path : if not path : path = '.' if not os . path . isdir ( path ) : continue path = os . path . normcase ( os . path . abspath ( path ) ) if not path . startswith ( home_dir ) : logger . debug ( 'Skipping system (non-environment) directory %s' % path ) continue for filename in os . listdir ( path ) : filename = os . path . join ( path , filename ) if filename . endswith ( '.pth' ) : if not os . access ( filename , os . W_OK ) : logger . warn ( 'Cannot write .pth file %s, skipping' % filename ) else : fixup_pth_file ( filename ) if filename . endswith ( '.egg-link' ) : if not os . access ( filename , os . W_OK ) : logger . warn ( 'Cannot write .egg-link file %s, skipping' % filename ) else : fixup_egg_link ( filename )
Makes . pth and . egg - link files use relative paths
38,667
def make_relative_path ( source , dest , dest_is_directory = True ) : source = os . path . dirname ( source ) if not dest_is_directory : dest_filename = os . path . basename ( dest ) dest = os . path . dirname ( dest ) dest = os . path . normpath ( os . path . abspath ( dest ) ) source = os . path . normpath ( os . path . abspath ( source ) ) dest_parts = dest . strip ( os . path . sep ) . split ( os . path . sep ) source_parts = source . strip ( os . path . sep ) . split ( os . path . sep ) while dest_parts and source_parts and dest_parts [ 0 ] == source_parts [ 0 ] : dest_parts . pop ( 0 ) source_parts . pop ( 0 ) full_parts = [ '..' ] * len ( source_parts ) + dest_parts if not dest_is_directory : full_parts . append ( dest_filename ) if not full_parts : return './' return os . path . sep . join ( full_parts )
Make a filename relative where the filename is dest and it is being referred to from the filename source .
38,668
def create_bootstrap_script ( extra_text , python_version = '' ) : filename = __file__ if filename . endswith ( '.pyc' ) : filename = filename [ : - 1 ] f = codecs . open ( filename , 'r' , encoding = 'utf-8' ) content = f . read ( ) f . close ( ) py_exe = 'python%s' % python_version content = ( ( '#!/usr/bin/env %s\n' % py_exe ) + '## WARNING: This file is generated\n' + content ) return content . replace ( '##EXT' 'END##' , extra_text )
Creates a bootstrap script which is like this script but with extend_parser adjust_options and after_install hooks .
38,669
def read_data ( file , endian , num = 1 ) : res = struct . unpack ( endian + 'L' * num , file . read ( num * 4 ) ) if len ( res ) == 1 : return res [ 0 ] return res
Read a given number of 32 - bits unsigned integers from the given file with the given endianness .
38,670
def _stdout_level ( self ) : for level , consumer in self . consumers : if consumer is sys . stdout : return level return self . FATAL
Returns the level that stdout runs at
38,671
def get_config_section ( self , name ) : if self . config . has_section ( name ) : return self . config . items ( name ) return [ ]
Get a section of a configuration
38,672
def get_environ_vars ( self , prefix = 'VIRTUALENV_' ) : for key , val in os . environ . items ( ) : if key . startswith ( prefix ) : yield ( key . replace ( prefix , '' ) . lower ( ) , val )
Returns a generator with all environmental vars with prefix VIRTUALENV
38,673
def copy ( self , overrides = None , locked = False ) : other = copy . copy ( self ) if overrides is not None : other . overrides = overrides other . locked = locked other . _uncache ( ) return other
Create a separate copy of this config .
38,674
def override ( self , key , value ) : keys = key . split ( '.' ) if len ( keys ) > 1 : if keys [ 0 ] != "plugins" : raise AttributeError ( "no such setting: %r" % key ) self . plugins . override ( keys [ 1 : ] , value ) else : self . overrides [ key ] = value self . _uncache ( key )
Set a setting to the given value .
38,675
def remove_override ( self , key ) : keys = key . split ( '.' ) if len ( keys ) > 1 : raise NotImplementedError elif key in self . overrides : del self . overrides [ key ] self . _uncache ( key )
Remove a setting override if one exists .
38,676
def warn ( self , key ) : return ( not self . quiet and not self . warn_none and ( self . warn_all or getattr ( self , "warn_%s" % key ) ) )
Returns True if the warning setting is enabled .
38,677
def debug ( self , key ) : return ( not self . quiet and not self . debug_none and ( self . debug_all or getattr ( self , "debug_%s" % key ) ) )
Returns True if the debug setting is enabled .
38,678
def data ( self ) : d = { } for key in self . _data : if key == "plugins" : d [ key ] = self . plugins . data ( ) else : try : d [ key ] = getattr ( self , key ) except AttributeError : pass return d
Returns the entire configuration as a dict .
38,679
def nonlocal_packages_path ( self ) : paths = self . packages_path [ : ] if self . local_packages_path in paths : paths . remove ( self . local_packages_path ) return paths
Returns package search paths with local path removed .
38,680
def _swap ( self , other ) : self . __dict__ , other . __dict__ = other . __dict__ , self . __dict__
Swap this config with another .
38,681
def _create_main_config ( cls , overrides = None ) : filepaths = [ ] filepaths . append ( get_module_root_config ( ) ) filepath = os . getenv ( "REZ_CONFIG_FILE" ) if filepath : filepaths . extend ( filepath . split ( os . pathsep ) ) filepath = os . path . expanduser ( "~/.rezconfig" ) filepaths . append ( filepath ) return Config ( filepaths , overrides )
See comment block at top of rezconfig describing how the main config is assembled .
38,682
def platform_mapped ( func ) : def inner ( * args , ** kwargs ) : from rez . config import config result = func ( * args , ** kwargs ) entry = config . platform_map . get ( func . __name__ ) if entry : for key , value in entry . iteritems ( ) : result , changes = re . subn ( key , value , result ) if changes > 0 : break return result return inner
Decorates functions for lookups within a config . platform_map dictionary .
38,683
def pagerank ( graph , damping_factor = 0.85 , max_iterations = 100 , min_delta = 0.00001 ) : nodes = graph . nodes ( ) graph_size = len ( nodes ) if graph_size == 0 : return { } min_value = ( 1.0 - damping_factor ) / graph_size pagerank = dict . fromkeys ( nodes , 1.0 / graph_size ) for i in range ( max_iterations ) : diff = 0 for node in nodes : rank = min_value for referring_page in graph . incidents ( node ) : rank += damping_factor * pagerank [ referring_page ] / len ( graph . neighbors ( referring_page ) ) diff += abs ( pagerank [ node ] - rank ) pagerank [ node ] = rank if diff < min_delta : break return pagerank
Compute and return the PageRank in an directed graph .
38,684
def exec_command ( attr , cmd ) : import subprocess p = popen ( cmd , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) out , err = p . communicate ( ) if p . returncode : from rez . exceptions import InvalidPackageError raise InvalidPackageError ( "Error determining package attribute '%s':\n%s" % ( attr , err ) ) return out . strip ( ) , err . strip ( )
Runs a subproc to calculate a package attribute .
38,685
def exec_python ( attr , src , executable = "python" ) : import subprocess if isinstance ( src , basestring ) : src = [ src ] p = popen ( [ executable , "-c" , "; " . join ( src ) ] , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) out , err = p . communicate ( ) if p . returncode : from rez . exceptions import InvalidPackageError raise InvalidPackageError ( "Error determining package attribute '%s':\n%s" % ( attr , err ) ) return out . strip ( )
Runs a python subproc to calculate a package attribute .
38,686
def find_site_python ( module_name , paths = None ) : from rez . packages_ import iter_packages import subprocess import ast import os py_cmd = 'import {x}; print {x}.__path__' . format ( x = module_name ) p = popen ( [ "python" , "-c" , py_cmd ] , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) out , err = p . communicate ( ) if p . returncode : raise InvalidPackageError ( "Failed to find installed python module '%s':\n%s" % ( module_name , err ) ) module_paths = ast . literal_eval ( out . strip ( ) ) def issubdir ( path , parent_path ) : return path . startswith ( parent_path + os . sep ) for package in iter_packages ( "python" , paths = paths ) : if not hasattr ( package , "_site_paths" ) : continue contained = True for module_path in module_paths : if not any ( issubdir ( module_path , x ) for x in package . _site_paths ) : contained = False if contained : return package raise InvalidPackageError ( "Failed to find python installation containing the module '%s'. Has " "python been installed as a rez package?" % module_name )
Find the rez native python package that contains the given module .
38,687
def _intersection ( A , B ) : intersection = [ ] for i in A : if i in B : intersection . append ( i ) return intersection
A simple function to find an intersection between two arrays .
38,688
def critical_path ( graph ) : if not len ( find_cycle ( graph ) ) == 0 : return [ ] node_tuples = { } topological_nodes = topological_sorting ( graph ) for node in topological_nodes : node_tuples . update ( { node : ( None , 0 ) } ) for node in topological_nodes : predecessors = [ ] for pre in graph . incidents ( node ) : max_pre = node_tuples [ pre ] [ 1 ] predecessors . append ( ( pre , graph . edge_weight ( ( pre , node ) ) + max_pre ) ) max = 0 max_tuple = ( None , 0 ) for i in predecessors : if i [ 1 ] >= max : max = i [ 1 ] max_tuple = i node_tuples [ node ] = max_tuple max = 0 critical_node = None for k , v in list ( node_tuples . items ( ) ) : if v [ 1 ] >= max : max = v [ 1 ] critical_node = k path = [ ] def mid_critical_path ( end ) : if node_tuples [ end ] [ 0 ] != None : path . append ( end ) mid_critical_path ( node_tuples [ end ] [ 0 ] ) else : path . append ( end ) mid_critical_path ( critical_node ) path . reverse ( ) return path
Compute and return the critical path in an acyclic directed weighted graph .
38,689
def build ( self , context , variant , build_path , install_path , install = False , build_type = BuildType . local ) : ret = { } if self . write_build_scripts : build_env_script = os . path . join ( build_path , "build-env" ) create_forwarding_script ( build_env_script , module = ( "build_system" , "custom" ) , func_name = "_FWD__spawn_build_shell" , working_dir = self . working_dir , build_path = build_path , variant_index = variant . index , install = install , install_path = install_path ) ret [ "success" ] = True ret [ "build_env_script" ] = build_env_script return ret command = self . package . build_command if command is False : ret [ "success" ] = True return ret def expand ( txt ) : root = self . package . root install_ = "install" if install else '' return txt . format ( root = root , install = install_ ) . strip ( ) if isinstance ( command , basestring ) : if self . build_args : command = command + ' ' + ' ' . join ( map ( quote , self . build_args ) ) command = expand ( command ) cmd_str = command else : command = command + self . build_args command = map ( expand , command ) cmd_str = ' ' . join ( map ( quote , command ) ) if self . verbose : pr = Printer ( sys . stdout ) pr ( "Running build command: %s" % cmd_str , heading ) def _callback ( executor ) : self . _add_build_actions ( executor , context = context , package = self . package , variant = variant , build_type = build_type , install = install , build_path = build_path , install_path = install_path ) if self . opts : extra_args = getattr ( self . opts . parser , "_rezbuild_extra_args" , [ ] ) for key , value in vars ( self . opts ) . iteritems ( ) : if key in extra_args : varname = "__PARSE_ARG_%s" % key . upper ( ) if isinstance ( value , bool ) : value = 1 if value else 0 elif isinstance ( value , ( list , tuple ) ) : value = map ( str , value ) value = map ( quote , value ) value = ' ' . join ( value ) executor . env [ varname ] = value retcode , _ , _ = context . execute_shell ( command = command , block = True , cwd = build_path , actions_callback = _callback ) ret [ "success" ] = ( not retcode ) return ret
Perform the build .
38,690
def _create_tag_highlevel ( self , tag_name , message = None ) : results = [ ] if self . patch_path : tagged = self . _create_tag_lowlevel ( tag_name , message = message , patch = True ) if tagged : results . append ( { 'type' : 'tag' , 'patch' : True } ) self . hg ( 'bookmark' , '-f' , tag_name ) results . append ( { 'type' : 'bookmark' , 'patch' : False } ) else : tagged = self . _create_tag_lowlevel ( tag_name , message = message , patch = False ) if tagged : results . append ( { 'type' : 'tag' , 'patch' : False } ) return results
Create a tag on the toplevel repo if there is no patch repo or a tag on the patch repo and bookmark on the top repo if there is a patch repo
38,691
def _create_tag_lowlevel ( self , tag_name , message = None , force = True , patch = False ) : tags = self . get_tags ( patch = patch ) old_commit = tags . get ( tag_name ) if old_commit is not None : if not force : return False old_rev = old_commit [ 'rev' ] if self . is_ancestor ( old_rev , '.' , patch = patch ) : altered = self . hg ( 'status' , '--rev' , old_rev , '--rev' , '.' , '--no-status' ) if not altered or altered == [ '.hgtags' ] : force = False if not force : return False tag_args = [ 'tag' , tag_name ] if message : tag_args += [ '--message' , message ] if force : tag_args += [ '--force' ] self . hg ( patch = patch , * tag_args ) return True
Create a tag on the toplevel or patch repo
38,692
def is_ancestor ( self , commit1 , commit2 , patch = False ) : result = self . hg ( "log" , "-r" , "first(%s::%s)" % ( commit1 , commit2 ) , "--template" , "exists" , patch = patch ) return "exists" in result
Returns True if commit1 is a direct ancestor of commit2 or False otherwise .
38,693
def get_patched_request ( requires , patchlist ) : rules = { '' : ( True , True , True ) , '!' : ( False , False , False ) , '~' : ( False , False , True ) , '^' : ( True , True , True ) } requires = [ Requirement ( x ) if not isinstance ( x , Requirement ) else x for x in requires ] appended = [ ] for patch in patchlist : if patch and patch [ 0 ] in ( '!' , '~' , '^' ) : ch = patch [ 0 ] name = Requirement ( patch [ 1 : ] ) . name else : ch = '' name = Requirement ( patch ) . name rule = rules [ ch ] replaced = ( ch == '^' ) for i , req in enumerate ( requires ) : if req is None or req . name != name : continue if not req . conflict : replace = rule [ 0 ] elif not req . weak : replace = rule [ 1 ] else : replace = rule [ 2 ] if replace : if replaced : requires [ i ] = None else : requires [ i ] = Requirement ( patch ) replaced = True if not replaced : appended . append ( Requirement ( patch ) ) result = [ x for x in requires if x is not None ] + appended return result
Apply patch args to a request .
38,694
def execute ( self , app_path , app_args , version , ** kwargs ) : multi_launchapp = self . parent extra = multi_launchapp . get_setting ( "extra" ) use_rez = False if self . check_rez ( ) : from rez . resolved_context import ResolvedContext from rez . config import config config . parent_variables = [ "PYTHONPATH" , "HOUDINI_PATH" , "NUKE_PATH" , "HIERO_PLUGIN_PATH" , "KATANA_RESOURCES" ] rez_packages = extra [ "rez_packages" ] context = ResolvedContext ( rez_packages ) use_rez = True system = sys . platform shell_type = 'bash' if system == "linux2" : cmd = "%s %s &" % ( app_path , app_args ) elif self . parent . get_setting ( "engine" ) in [ "tk-flame" , "tk-flare" ] : cmd = "%s %s &" % ( app_path , app_args ) elif system == "darwin" : cmd = "open -n \"%s\"" % ( app_path ) if app_args : cmd += " --args \"%s\"" % app_args . replace ( "\"" , "\\\"" ) elif system == "win32" : cmd = "start /B \"App\" \"%s\" %s" % ( app_path , app_args ) shell_type = 'cmd' if use_rez : n_env = os . environ . copy ( ) proc = context . execute_shell ( command = cmd , parent_environ = n_env , shell = shell_type , stdin = False , block = False ) exit_code = proc . wait ( ) context . print_info ( verbosity = True ) else : exit_code = os . system ( cmd ) return { "command" : cmd , "return_code" : exit_code }
The execute functon of the hook will be called to start the required application
38,695
def check_rez ( self , strict = True ) : system = sys . platform if system == "win32" : rez_cmd = 'rez-env rez -- echo %REZ_REZ_ROOT%' else : rez_cmd = 'rez-env rez -- printenv REZ_REZ_ROOT' process = subprocess . Popen ( rez_cmd , stdout = subprocess . PIPE , shell = True ) rez_path , err = process . communicate ( ) if err or not rez_path : if strict : raise ImportError ( "Failed to find Rez as a package in the current " "environment! Try 'rez-bind rez'!" ) else : print >> sys . stderr , ( "WARNING: Failed to find a Rez package in the current " "environment. Unable to request Rez packages." ) rez_path = "" else : rez_path = rez_path . strip ( ) print "Found Rez:" , rez_path print "Adding Rez to system path..." sys . path . append ( rez_path ) return rez_path
Checks to see if a Rez package is available in the current environment . If it is available add it to the system path exposing the Rez Python API
38,696
def get_last_changed_revision ( client , url ) : try : svn_entries = client . info2 ( url , pysvn . Revision ( pysvn . opt_revision_kind . head ) , recurse = False ) if not svn_entries : raise ReleaseVCSError ( "svn.info2() returned no results on url %s" % url ) return svn_entries [ 0 ] [ 1 ] . last_changed_rev except pysvn . ClientError , ce : raise ReleaseVCSError ( "svn.info2() raised ClientError: %s" % ce )
util func get last revision of url
38,697
def get_svn_login ( realm , username , may_save ) : import getpass print "svn requires a password for the user %s:" % username pwd = '' while not pwd . strip ( ) : pwd = getpass . getpass ( " ) return True , username , pwd , False
provide svn with permissions .
38,698
def read ( string ) : dom = parseString ( string ) if dom . getElementsByTagName ( "graph" ) : G = graph ( ) elif dom . getElementsByTagName ( "digraph" ) : G = digraph ( ) elif dom . getElementsByTagName ( "hypergraph" ) : return read_hypergraph ( string ) else : raise InvalidGraphType for each_node in dom . getElementsByTagName ( "node" ) : G . add_node ( each_node . getAttribute ( 'id' ) ) for each_attr in each_node . getElementsByTagName ( "attribute" ) : G . add_node_attribute ( each_node . getAttribute ( 'id' ) , ( each_attr . getAttribute ( 'attr' ) , each_attr . getAttribute ( 'value' ) ) ) for each_edge in dom . getElementsByTagName ( "edge" ) : if ( not G . has_edge ( ( each_edge . getAttribute ( 'from' ) , each_edge . getAttribute ( 'to' ) ) ) ) : G . add_edge ( ( each_edge . getAttribute ( 'from' ) , each_edge . getAttribute ( 'to' ) ) , wt = float ( each_edge . getAttribute ( 'wt' ) ) , label = each_edge . getAttribute ( 'label' ) ) for each_attr in each_edge . getElementsByTagName ( "attribute" ) : attr_tuple = ( each_attr . getAttribute ( 'attr' ) , each_attr . getAttribute ( 'value' ) ) if ( attr_tuple not in G . edge_attributes ( ( each_edge . getAttribute ( 'from' ) , each_edge . getAttribute ( 'to' ) ) ) ) : G . add_edge_attribute ( ( each_edge . getAttribute ( 'from' ) , each_edge . getAttribute ( 'to' ) ) , attr_tuple ) return G
Read a graph from a XML document and return it . Nodes and edges specified in the input will be added to the current graph .
38,699
def read_hypergraph ( string ) : hgr = hypergraph ( ) dom = parseString ( string ) for each_node in dom . getElementsByTagName ( "node" ) : hgr . add_node ( each_node . getAttribute ( 'id' ) ) for each_node in dom . getElementsByTagName ( "hyperedge" ) : hgr . add_hyperedge ( each_node . getAttribute ( 'id' ) ) dom = parseString ( string ) for each_node in dom . getElementsByTagName ( "node" ) : for each_edge in each_node . getElementsByTagName ( "link" ) : hgr . link ( str ( each_node . getAttribute ( 'id' ) ) , str ( each_edge . getAttribute ( 'to' ) ) ) return hgr
Read a graph from a XML document . Nodes and hyperedges specified in the input will be added to the current graph .