idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
226,500
def convert_dicts ( d , to_class = AttrDictWrapper , from_class = dict ) : d_ = to_class ( ) for key , value in d . iteritems ( ) : if isinstance ( value , from_class ) : d_ [ key ] = convert_dicts ( value , to_class = to_class , from_class = from_class ) else : d_ [ key ] = value return d_
Recursively convert dict and UserDict types .
98
11
226,501
def package_loading ( self ) : t1 = time . time ( ) yield None t2 = time . time ( ) self . package_load_time += t2 - t1
Use this around code in your package repository that is loading a package for example from file or cache .
39
20
226,502
def is_empty ( self ) : for family in self . iter_package_families ( ) : for pkg in self . iter_packages ( family ) : return False return True
Determine if the repository contains any packages .
39
10
226,503
def make_resource_handle ( self , resource_key , * * variables ) : if variables . get ( "repository_type" , self . name ( ) ) != self . name ( ) : raise ResourceError ( "repository_type mismatch - requested %r, " "repository_type is %r" % ( variables [ "repository_type" ] , self . name ( ) ) ) variables [ "repository_type" ] = self . name ( ) if variables . get ( "location" , self . location ) != self . location : raise ResourceError ( "location mismatch - requested %r, repository " "location is %r" % ( variables [ "location" ] , self . location ) ) variables [ "location" ] = self . location resource_cls = self . pool . get_resource_class ( resource_key ) variables = resource_cls . normalize_variables ( variables ) return ResourceHandle ( resource_key , variables )
Create a ResourceHandle
213
4
226,504
def get_repository ( self , path ) : # normalise parts = path . split ( '@' , 1 ) if len ( parts ) == 1 : parts = ( "filesystem" , parts [ 0 ] ) repo_type , location = parts if repo_type == "filesystem" : # choice of abspath here vs realpath is deliberate. Realpath gives # canonical path, which can be a problem if two studios are sharing # packages, and have mirrored package paths, but some are actually # different paths, symlinked to look the same. It happened! location = os . path . abspath ( location ) normalised_path = "%s@%s" % ( repo_type , location ) return self . _get_repository ( normalised_path )
Get a package repository .
168
5
226,505
def are_same ( self , path_1 , path_2 ) : if path_1 == path_2 : return True repo_1 = self . get_repository ( path_1 ) repo_2 = self . get_repository ( path_2 ) return ( repo_1 . uid == repo_2 . uid )
Test that path_1 and path_2 refer to the same repository .
75
15
226,506
def create_transport ( host , connect_timeout , ssl = False ) : if ssl : return SSLTransport ( host , connect_timeout , ssl ) else : return TCPTransport ( host , connect_timeout )
Given a few parameters from the Connection constructor select and create a subclass of _AbstractTransport .
49
19
226,507
def get_plugin_class ( self , plugin_name ) : try : return self . plugin_classes [ plugin_name ] except KeyError : raise RezPluginError ( "Unrecognised %s plugin: '%s'" % ( self . pretty_type_name , plugin_name ) )
Returns the class registered under the given plugin name .
63
10
226,508
def get_plugin_module ( self , plugin_name ) : try : return self . plugin_modules [ plugin_name ] except KeyError : raise RezPluginError ( "Unrecognised %s plugin: '%s'" % ( self . pretty_type_name , plugin_name ) )
Returns the module containing the plugin of the given name .
63
11
226,509
def config_schema ( self ) : from rez . config import _plugin_config_dict d = _plugin_config_dict . get ( self . type_name , { } ) for name , plugin_class in self . plugin_classes . iteritems ( ) : if hasattr ( plugin_class , "schema_dict" ) and plugin_class . schema_dict : d_ = { name : plugin_class . schema_dict } deep_update ( d , d_ ) return dict_to_schema ( d , required = True , modifier = expand_system_vars )
Returns the merged configuration data schema for this plugin type .
129
11
226,510
def get_plugin_class ( self , plugin_type , plugin_name ) : plugin = self . _get_plugin_type ( plugin_type ) return plugin . get_plugin_class ( plugin_name )
Return the class registered under the given plugin name .
46
10
226,511
def get_plugin_module ( self , plugin_type , plugin_name ) : plugin = self . _get_plugin_type ( plugin_type ) return plugin . get_plugin_module ( plugin_name )
Return the module defining the class registered under the given plugin name .
46
13
226,512
def create_instance ( self , plugin_type , plugin_name , * * instance_kwargs ) : plugin_type = self . _get_plugin_type ( plugin_type ) return plugin_type . create_instance ( plugin_name , * * instance_kwargs )
Create and return an instance of the given plugin .
60
10
226,513
def get_summary_string ( self ) : rows = [ [ "PLUGIN TYPE" , "NAME" , "DESCRIPTION" , "STATUS" ] , [ "-----------" , "----" , "-----------" , "------" ] ] for plugin_type in sorted ( self . get_plugin_types ( ) ) : type_name = plugin_type . replace ( '_' , ' ' ) for name in sorted ( self . get_plugins ( plugin_type ) ) : module = self . get_plugin_module ( plugin_type , name ) desc = ( getattr ( module , "__doc__" , None ) or '' ) . strip ( ) rows . append ( ( type_name , name , desc , "loaded" ) ) for ( name , reason ) in sorted ( self . get_failed_plugins ( plugin_type ) ) : msg = "FAILED: %s" % reason rows . append ( ( type_name , name , '' , msg ) ) return '\n' . join ( columnise ( rows ) )
Get a formatted string summarising the plugins that were loaded .
228
12
226,514
def get_fragment ( self , offset ) : fragment_len = 10 s = '%r' % ( self . source [ offset : offset + fragment_len ] ) if offset + fragment_len < len ( self . source ) : s += '...' return s
Get the part of the source which is causing a problem .
59
12
226,515
def evaluate ( self , node , filename = None ) : if isinstance ( node , string_types ) : self . source = node kwargs = { 'mode' : 'eval' } if filename : kwargs [ 'filename' ] = filename try : node = ast . parse ( node , * * kwargs ) except SyntaxError as e : s = self . get_fragment ( e . offset ) raise SyntaxError ( 'syntax error %s' % s ) node_type = node . __class__ . __name__ . lower ( ) handler = self . get_handler ( node_type ) if handler is None : if self . source is None : s = '(source not available)' else : s = self . get_fragment ( node . col_offset ) raise SyntaxError ( "don't know how to evaluate %r %s" % ( node_type , s ) ) return handler ( node )
Evaluate a source string or node using filename when displaying errors .
203
14
226,516
def recursive_repr ( func ) : repr_running = set ( ) @ wraps ( func ) def wrapper ( self ) : "Return ellipsis on recursive re-entry to function." key = id ( self ) , get_ident ( ) if key in repr_running : return '...' repr_running . add ( key ) try : return func ( self ) finally : repr_running . discard ( key ) return wrapper
Decorator to prevent infinite repr recursion .
90
10
226,517
def _reset ( self , load ) : values = reduce ( iadd , self . _lists , [ ] ) self . _clear ( ) self . _load = load self . _half = load >> 1 self . _dual = load << 1 self . _update ( values )
Reset sorted list load .
60
6
226,518
def _build_index ( self ) : row0 = list ( map ( len , self . _lists ) ) if len ( row0 ) == 1 : self . _index [ : ] = row0 self . _offset = 0 return head = iter ( row0 ) tail = iter ( head ) row1 = list ( starmap ( add , zip ( head , tail ) ) ) if len ( row0 ) & 1 : row1 . append ( row0 [ - 1 ] ) if len ( row1 ) == 1 : self . _index [ : ] = row1 + row0 self . _offset = 1 return size = 2 ** ( int ( log_e ( len ( row1 ) - 1 , 2 ) ) + 1 ) row1 . extend ( repeat ( 0 , size - len ( row1 ) ) ) tree = [ row0 , row1 ] while len ( tree [ - 1 ] ) > 1 : head = iter ( tree [ - 1 ] ) tail = iter ( head ) row = list ( starmap ( add , zip ( head , tail ) ) ) tree . append ( row ) reduce ( iadd , reversed ( tree ) , self . _index ) self . _offset = size * 2 - 1
Build an index for indexing the sorted list .
261
10
226,519
def irange_key ( self , min_key = None , max_key = None , inclusive = ( True , True ) , reverse = False ) : _maxes = self . _maxes if not _maxes : return iter ( ( ) ) _keys = self . _keys # Calculate the minimum (pos, idx) pair. By default this location # will be inclusive in our calculation. if min_key is None : min_pos = 0 min_idx = 0 else : if inclusive [ 0 ] : min_pos = bisect_left ( _maxes , min_key ) if min_pos == len ( _maxes ) : return iter ( ( ) ) min_idx = bisect_left ( _keys [ min_pos ] , min_key ) else : min_pos = bisect_right ( _maxes , min_key ) if min_pos == len ( _maxes ) : return iter ( ( ) ) min_idx = bisect_right ( _keys [ min_pos ] , min_key ) # Calculate the maximum (pos, idx) pair. By default this location # will be exclusive in our calculation. if max_key is None : max_pos = len ( _maxes ) - 1 max_idx = len ( _keys [ max_pos ] ) else : if inclusive [ 1 ] : max_pos = bisect_right ( _maxes , max_key ) if max_pos == len ( _maxes ) : max_pos -= 1 max_idx = len ( _keys [ max_pos ] ) else : max_idx = bisect_right ( _keys [ max_pos ] , max_key ) else : max_pos = bisect_left ( _maxes , max_key ) if max_pos == len ( _maxes ) : max_pos -= 1 max_idx = len ( _keys [ max_pos ] ) else : max_idx = bisect_left ( _keys [ max_pos ] , max_key ) return self . _islice ( min_pos , min_idx , max_pos , max_idx , reverse )
Create an iterator of values between min_key and max_key .
470
14
226,520
def view_graph ( graph_str , parent = None , prune_to = None ) : from rezgui . dialogs . ImageViewerDialog import ImageViewerDialog from rez . config import config # check for already written tempfile h = hash ( ( graph_str , prune_to ) ) filepath = graph_file_lookup . get ( h ) if filepath and not os . path . exists ( filepath ) : filepath = None # write graph to tempfile if filepath is None : suffix = ".%s" % config . dot_image_format fd , filepath = tempfile . mkstemp ( suffix = suffix , prefix = "rez-graph-" ) os . close ( fd ) dlg = WriteGraphDialog ( graph_str , filepath , parent , prune_to = prune_to ) if not dlg . write_graph ( ) : return # display graph graph_file_lookup [ h ] = filepath dlg = ImageViewerDialog ( filepath , parent ) dlg . exec_ ( )
View a graph .
235
4
226,521
def select_version ( self , version_range ) : row = - 1 version = None for i , package in self . packages . iteritems ( ) : if package . version in version_range and ( version is None or version < package . version ) : version = package . version row = i self . clearSelection ( ) if row != - 1 : self . selectRow ( row ) return version
Select the latest versioned package in the given range .
84
11
226,522
def _fromset ( cls , values , key = None ) : sorted_set = object . __new__ ( cls ) sorted_set . _set = values # pylint: disable=protected-access sorted_set . __init__ ( key = key ) return sorted_set
Initialize sorted set from existing set .
62
8
226,523
def setup_parser_common ( parser ) : from rez . build_process_ import get_build_process_types from rez . build_system import get_valid_build_systems process_types = get_build_process_types ( ) parser . add_argument ( "--process" , type = str , choices = process_types , default = "local" , help = "the build process to use (default: %(default)s)." ) # add build system choices valid for this package package = get_current_developer_package ( ) clss = get_valid_build_systems ( os . getcwd ( ) , package = package ) if clss : if len ( clss ) == 1 : cls_ = clss [ 0 ] title = "%s build system arguments" % cls_ . name ( ) group = parser . add_argument_group ( title ) cls_ . bind_cli ( parser , group ) types = [ x . name ( ) for x in clss ] else : types = None parser . add_argument ( "-b" , "--build-system" , dest = "buildsys" , choices = types , help = "the build system to use. If not specified, it is detected. Set " "'build_system' or 'build_command' to specify the build system in the " "package itself." ) parser . add_argument ( "--variants" , nargs = '+' , type = int , metavar = "INDEX" , help = "select variants to build (zero-indexed)." ) parser . add_argument ( "--ba" , "--build-args" , dest = "build_args" , metavar = "ARGS" , help = "arguments to pass to the build system. Alternatively, list these " "after a '--'." ) parser . add_argument ( "--cba" , "--child-build-args" , dest = "child_build_args" , metavar = "ARGS" , help = "arguments to pass to the child build system, if any. " "Alternatively, list these after a second '--'." )
Parser setup common to both rez - build and rez - release .
471
15
226,524
def scoped_format ( txt , * * objects ) : pretty = objects . pop ( "pretty" , RecursiveAttribute . format_pretty ) expand = objects . pop ( "expand" , RecursiveAttribute . format_expand ) attr = RecursiveAttribute ( objects , read_only = True ) formatter = scoped_formatter ( * * objects ) return formatter . format ( txt , pretty = pretty , expand = expand )
Format a string with respect to a set of objects attributes .
97
12
226,525
def to_dict ( self ) : d = { } for k , v in self . __dict__ [ "data" ] . iteritems ( ) : if isinstance ( v , RecursiveAttribute ) : d [ k ] = v . to_dict ( ) else : d [ k ] = v return d
Get an equivalent dict representation .
66
6
226,526
def value ( self , key , type_ = None ) : if type_ is None : default = self . _default_value ( key ) val = self . _value ( key , default ) if type ( val ) == type ( default ) : return val else : return self . _convert_value ( val , type ( default ) ) else : val = self . _value ( key , None ) if val is None : return None return self . _convert_value ( val , type_ )
Get the value of a setting .
106
7
226,527
def get_string_list ( self , key ) : strings = [ ] size = self . beginReadArray ( key ) for i in range ( size ) : self . setArrayIndex ( i ) entry = str ( self . _value ( "entry" ) ) strings . append ( entry ) self . endArray ( ) return strings
Get a list of strings .
70
6
226,528
def prepend_string_list ( self , key , value , max_length_key ) : max_len = self . get ( max_length_key ) strings = self . get_string_list ( key ) strings = [ value ] + [ x for x in strings if x != value ] strings = strings [ : max_len ] self . beginWriteArray ( key ) for i in range ( len ( strings ) ) : self . setArrayIndex ( i ) self . setValue ( "entry" , strings [ i ] ) self . endArray ( )
Prepend a fixed - length string list with a new string .
120
13
226,529
def insert ( self , item , priority ) : heappush ( self . heap , HeapItem ( item , priority ) )
Insert item into the queue with the given priority .
27
10
226,530
def connected_components ( graph ) : recursionlimit = getrecursionlimit ( ) setrecursionlimit ( max ( len ( graph . nodes ( ) ) * 2 , recursionlimit ) ) visited = { } count = 1 # For 'each' node not found to belong to a connected component, find its connected # component. for each in graph : if ( each not in visited ) : _dfs ( graph , visited , count , each ) count = count + 1 setrecursionlimit ( recursionlimit ) return visited
Connected components .
111
4
226,531
def cut_edges ( graph ) : recursionlimit = getrecursionlimit ( ) setrecursionlimit ( max ( len ( graph . nodes ( ) ) * 2 , recursionlimit ) ) # Dispatch if we have a hypergraph if 'hypergraph' == graph . __class__ . __name__ : return _cut_hyperedges ( graph ) pre = { } # Pre-ordering low = { } # Lowest pre[] reachable from this node going down the spanning tree + one backedge spanning_tree = { } reply = [ ] pre [ None ] = 0 for each in graph : if ( each not in pre ) : spanning_tree [ each ] = None _cut_dfs ( graph , spanning_tree , pre , low , reply , each ) setrecursionlimit ( recursionlimit ) return reply
Return the cut - edges of the given graph . A cut edge or bridge is an edge of a graph whose removal increases the number of connected components in the graph .
176
33
226,532
def _cut_hyperedges ( hypergraph ) : edges_ = cut_nodes ( hypergraph . graph ) edges = [ ] for each in edges_ : if ( each [ 1 ] == 'h' ) : edges . append ( each [ 0 ] ) return edges
Return the cut - hyperedges of the given hypergraph .
59
13
226,533
def cut_nodes ( graph ) : recursionlimit = getrecursionlimit ( ) setrecursionlimit ( max ( len ( graph . nodes ( ) ) * 2 , recursionlimit ) ) # Dispatch if we have a hypergraph if 'hypergraph' == graph . __class__ . __name__ : return _cut_hypernodes ( graph ) pre = { } # Pre-ordering low = { } # Lowest pre[] reachable from this node going down the spanning tree + one backedge reply = { } spanning_tree = { } pre [ None ] = 0 # Create spanning trees, calculate pre[], low[] for each in graph : if ( each not in pre ) : spanning_tree [ each ] = None _cut_dfs ( graph , spanning_tree , pre , low , [ ] , each ) # Find cuts for each in graph : # If node is not a root if ( spanning_tree [ each ] is not None ) : for other in graph [ each ] : # If there is no back-edge from descendent to a ancestral of each if ( low [ other ] >= pre [ each ] and spanning_tree [ other ] == each ) : reply [ each ] = 1 # If node is a root else : children = 0 for other in graph : if ( spanning_tree [ other ] == each ) : children = children + 1 # root is cut-vertex iff it has two or more children if ( children >= 2 ) : reply [ each ] = 1 setrecursionlimit ( recursionlimit ) return list ( reply . keys ( ) )
Return the cut - nodes of the given graph . A cut node or articulation point is a node of a graph whose removal increases the number of connected components in the graph .
335
35
226,534
def _cut_hypernodes ( hypergraph ) : nodes_ = cut_nodes ( hypergraph . graph ) nodes = [ ] for each in nodes_ : if ( each [ 1 ] == 'n' ) : nodes . append ( each [ 0 ] ) return nodes
Return the cut - nodes of the given hypergraph .
58
11
226,535
def del_edge ( self , edge ) : u , v = edge self . node_neighbors [ u ] . remove ( v ) self . del_edge_labeling ( ( u , v ) ) if ( u != v ) : self . node_neighbors [ v ] . remove ( u ) self . del_edge_labeling ( ( v , u ) )
Remove an edge from the graph .
82
7
226,536
def edge_weight ( self , edge ) : return self . get_edge_properties ( edge ) . setdefault ( self . WEIGHT_ATTRIBUTE_NAME , self . DEFAULT_WEIGHT )
Get the weight of an edge .
45
7
226,537
def set_edge_weight ( self , edge , wt ) : self . set_edge_properties ( edge , weight = wt ) if not self . DIRECTED : self . set_edge_properties ( ( edge [ 1 ] , edge [ 0 ] ) , weight = wt )
Set the weight of an edge .
62
7
226,538
def edge_label ( self , edge ) : return self . get_edge_properties ( edge ) . setdefault ( self . LABEL_ATTRIBUTE_NAME , self . DEFAULT_LABEL )
Get the label of an edge .
46
7
226,539
def set_edge_label ( self , edge , label ) : self . set_edge_properties ( edge , label = label ) if not self . DIRECTED : self . set_edge_properties ( ( edge [ 1 ] , edge [ 0 ] ) , label = label )
Set the label of an edge .
59
7
226,540
def add_edge_attribute ( self , edge , attr ) : self . edge_attr [ edge ] = self . edge_attributes ( edge ) + [ attr ] if ( not self . DIRECTED and edge [ 0 ] != edge [ 1 ] ) : self . edge_attr [ ( edge [ 1 ] , edge [ 0 ] ) ] = self . edge_attributes ( ( edge [ 1 ] , edge [ 0 ] ) ) + [ attr ]
Add attribute to the given edge .
100
7
226,541
def add_node_attribute ( self , node , attr ) : self . node_attr [ node ] = self . node_attr [ node ] + [ attr ]
Add attribute to the given node .
37
7
226,542
def activation_shell_code ( self , shell = None ) : from rez . shells import create_shell from rez . rex import RexExecutor executor = RexExecutor ( interpreter = create_shell ( shell ) , parent_variables = [ "PATH" ] , shebang = False ) executor . env . PATH . append ( self . tools_path ) return executor . get_output ( ) . strip ( )
Get shell code that should be run to activate this suite .
93
12
226,543
def context ( self , name ) : data = self . _context ( name ) context = data . get ( "context" ) if context : return context assert self . load_path context_path = os . path . join ( self . load_path , "contexts" , "%s.rxt" % name ) context = ResolvedContext . load ( context_path ) data [ "context" ] = context data [ "loaded" ] = True return context
Get a context .
98
4
226,544
def add_context ( self , name , context , prefix_char = None ) : if name in self . contexts : raise SuiteError ( "Context already in suite: %r" % name ) if not context . success : raise SuiteError ( "Context is not resolved: %r" % name ) self . contexts [ name ] = dict ( name = name , context = context . copy ( ) , tool_aliases = { } , hidden_tools = set ( ) , priority = self . _next_priority , prefix_char = prefix_char ) self . _flush_tools ( )
Add a context to the suite .
125
7
226,545
def find_contexts ( self , in_request = None , in_resolve = None ) : names = self . context_names if in_request : def _in_request ( name ) : context = self . context ( name ) packages = set ( x . name for x in context . requested_packages ( True ) ) return ( in_request in packages ) names = [ x for x in names if _in_request ( x ) ] if in_resolve : if isinstance ( in_resolve , basestring ) : in_resolve = PackageRequest ( in_resolve ) def _in_resolve ( name ) : context = self . context ( name ) variant = context . get_resolved_package ( in_resolve . name ) if variant : overlap = ( variant . version in in_resolve . range ) return ( ( in_resolve . conflict and not overlap ) or ( overlap and not in_resolve . conflict ) ) else : return in_resolve . conflict names = [ x for x in names if _in_resolve ( x ) ] return names
Find contexts in the suite based on search criteria .
236
10
226,546
def remove_context ( self , name ) : self . _context ( name ) del self . contexts [ name ] self . _flush_tools ( )
Remove a context from the suite .
32
7
226,547
def set_context_prefix ( self , name , prefix ) : data = self . _context ( name ) data [ "prefix" ] = prefix self . _flush_tools ( )
Set a context s prefix .
39
6
226,548
def set_context_suffix ( self , name , suffix ) : data = self . _context ( name ) data [ "suffix" ] = suffix self . _flush_tools ( )
Set a context s suffix .
41
6
226,549
def bump_context ( self , name ) : data = self . _context ( name ) data [ "priority" ] = self . _next_priority self . _flush_tools ( )
Causes the context s tools to take priority over all others .
40
13
226,550
def hide_tool ( self , context_name , tool_name ) : data = self . _context ( context_name ) hidden_tools = data [ "hidden_tools" ] if tool_name not in hidden_tools : self . _validate_tool ( context_name , tool_name ) hidden_tools . add ( tool_name ) self . _flush_tools ( )
Hide a tool so that it is not exposed in the suite .
83
13
226,551
def unhide_tool ( self , context_name , tool_name ) : data = self . _context ( context_name ) hidden_tools = data [ "hidden_tools" ] if tool_name in hidden_tools : hidden_tools . remove ( tool_name ) self . _flush_tools ( )
Unhide a tool so that it may be exposed in a suite .
67
14
226,552
def alias_tool ( self , context_name , tool_name , tool_alias ) : data = self . _context ( context_name ) aliases = data [ "tool_aliases" ] if tool_name in aliases : raise SuiteError ( "Tool %r in context %r is already aliased to %r" % ( tool_name , context_name , aliases [ tool_name ] ) ) self . _validate_tool ( context_name , tool_name ) aliases [ tool_name ] = tool_alias self . _flush_tools ( )
Register an alias for a specific tool .
121
8
226,553
def unalias_tool ( self , context_name , tool_name ) : data = self . _context ( context_name ) aliases = data [ "tool_aliases" ] if tool_name in aliases : del aliases [ tool_name ] self . _flush_tools ( )
Deregister an alias for a specific tool .
61
11
226,554
def get_tool_filepath ( self , tool_alias ) : tools_dict = self . get_tools ( ) if tool_alias in tools_dict : if self . tools_path is None : return None else : return os . path . join ( self . tools_path , tool_alias ) else : return None
Given a visible tool alias return the full path to the executable .
69
13
226,555
def get_tool_context ( self , tool_alias ) : tools_dict = self . get_tools ( ) data = tools_dict . get ( tool_alias ) if data : return data [ "context_name" ] return None
Given a visible tool alias return the name of the context it belongs to .
51
15
226,556
def validate ( self ) : for context_name in self . context_names : context = self . context ( context_name ) try : context . validate ( ) except ResolvedContextError as e : raise SuiteError ( "Error in context %r: %s" % ( context_name , str ( e ) ) )
Validate the suite .
68
5
226,557
def save ( self , path , verbose = False ) : path = os . path . realpath ( path ) if os . path . exists ( path ) : if self . load_path and self . load_path == path : if verbose : print "saving over previous suite..." for context_name in self . context_names : self . context ( context_name ) # load before dir deleted shutil . rmtree ( path ) else : raise SuiteError ( "Cannot save, path exists: %r" % path ) contexts_path = os . path . join ( path , "contexts" ) os . makedirs ( contexts_path ) # write suite data data = self . to_dict ( ) filepath = os . path . join ( path , "suite.yaml" ) with open ( filepath , "w" ) as f : f . write ( dump_yaml ( data ) ) # write contexts for context_name in self . context_names : context = self . context ( context_name ) context . _set_parent_suite ( path , context_name ) filepath = self . _context_path ( context_name , path ) if verbose : print "writing %r..." % filepath context . save ( filepath ) # create alias wrappers tools_path = os . path . join ( path , "bin" ) os . makedirs ( tools_path ) if verbose : print "creating alias wrappers in %r..." % tools_path tools = self . get_tools ( ) for tool_alias , d in tools . iteritems ( ) : tool_name = d [ "tool_name" ] context_name = d [ "context_name" ] data = self . _context ( context_name ) prefix_char = data . get ( "prefix_char" ) if verbose : print ( "creating %r -> %r (%s context)..." % ( tool_alias , tool_name , context_name ) ) filepath = os . path . join ( tools_path , tool_alias ) create_forwarding_script ( filepath , module = "suite" , func_name = "_FWD__invoke_suite_tool_alias" , context_name = context_name , tool_name = tool_name , prefix_char = prefix_char )
Save the suite to disk .
505
6
226,558
def print_info ( self , buf = sys . stdout , verbose = False ) : _pr = Printer ( buf ) if not self . contexts : _pr ( "Suite is empty." ) return context_names = sorted ( self . contexts . iterkeys ( ) ) _pr ( "Suite contains %d contexts:" % len ( context_names ) ) if not verbose : _pr ( ' ' . join ( context_names ) ) return tools = self . get_tools ( ) . values ( ) context_tools = defaultdict ( set ) context_variants = defaultdict ( set ) for entry in tools : context_name = entry [ "context_name" ] context_tools [ context_name ] . add ( entry [ "tool_name" ] ) context_variants [ context_name ] . add ( str ( entry [ "variant" ] ) ) _pr ( ) rows = [ [ "NAME" , "VISIBLE TOOLS" , "PATH" ] , [ "----" , "-------------" , "----" ] ] for context_name in context_names : context_path = self . _context_path ( context_name ) or '-' ntools = len ( context_tools . get ( context_name , [ ] ) ) if ntools : nvariants = len ( context_variants [ context_name ] ) short_desc = "%d tools from %d packages" % ( ntools , nvariants ) else : short_desc = "no tools" rows . append ( ( context_name , short_desc , context_path ) ) _pr ( "\n" . join ( columnise ( rows ) ) )
Prints a message summarising the contents of the suite .
360
12
226,559
def get_valid_build_systems ( working_dir , package = None ) : from rez . plugin_managers import plugin_manager from rez . exceptions import PackageMetadataError try : package = package or get_developer_package ( working_dir ) except PackageMetadataError : # no package, or bad package pass if package : if getattr ( package , "build_command" , None ) is not None : buildsys_name = "custom" else : buildsys_name = getattr ( package , "build_system" , None ) # package explicitly specifies build system if buildsys_name : cls = plugin_manager . get_plugin_class ( 'build_system' , buildsys_name ) return [ cls ] # detect valid build systems clss = [ ] for buildsys_name in get_buildsys_types ( ) : cls = plugin_manager . get_plugin_class ( 'build_system' , buildsys_name ) if cls . is_valid_root ( working_dir , package = package ) : clss . append ( cls ) # Sometimes files for multiple build systems can be present, because one # build system uses another (a 'child' build system) - eg, cmake uses # make. Detect this case and ignore files from the child build system. # child_clss = set ( x . child_build_system ( ) for x in clss ) clss = list ( set ( clss ) - child_clss ) return clss
Returns the build system classes that could build the source in given dir .
327
14
226,560
def create_build_system ( working_dir , buildsys_type = None , package = None , opts = None , write_build_scripts = False , verbose = False , build_args = [ ] , child_build_args = [ ] ) : from rez . plugin_managers import plugin_manager # detect build system if necessary if not buildsys_type : clss = get_valid_build_systems ( working_dir , package = package ) if not clss : raise BuildSystemError ( "No build system is associated with the path %s" % working_dir ) if len ( clss ) != 1 : s = ', ' . join ( x . name ( ) for x in clss ) raise BuildSystemError ( ( "Source could be built with one of: %s; " "Please specify a build system" ) % s ) buildsys_type = iter ( clss ) . next ( ) . name ( ) # create instance of build system cls_ = plugin_manager . get_plugin_class ( 'build_system' , buildsys_type ) return cls_ ( working_dir , opts = opts , package = package , write_build_scripts = write_build_scripts , verbose = verbose , build_args = build_args , child_build_args = child_build_args )
Return a new build system that can build the source in working_dir .
292
15
226,561
def build ( self , context , variant , build_path , install_path , install = False , build_type = BuildType . local ) : raise NotImplementedError
Implement this method to perform the actual build .
37
10
226,562
def get_standard_vars ( cls , context , variant , build_type , install , build_path , install_path = None ) : from rez . config import config package = variant . parent variant_requires = map ( str , variant . variant_requires ) if variant . index is None : variant_subpath = '' else : variant_subpath = variant . _non_shortlinked_subpath vars_ = { 'REZ_BUILD_ENV' : 1 , 'REZ_BUILD_PATH' : build_path , 'REZ_BUILD_THREAD_COUNT' : package . config . build_thread_count , 'REZ_BUILD_VARIANT_INDEX' : variant . index or 0 , 'REZ_BUILD_VARIANT_REQUIRES' : ' ' . join ( variant_requires ) , 'REZ_BUILD_VARIANT_SUBPATH' : variant_subpath , 'REZ_BUILD_PROJECT_VERSION' : str ( package . version ) , 'REZ_BUILD_PROJECT_NAME' : package . name , 'REZ_BUILD_PROJECT_DESCRIPTION' : ( package . description or '' ) . strip ( ) , 'REZ_BUILD_PROJECT_FILE' : package . filepath , 'REZ_BUILD_SOURCE_PATH' : os . path . dirname ( package . filepath ) , 'REZ_BUILD_REQUIRES' : ' ' . join ( str ( x ) for x in context . requested_packages ( True ) ) , 'REZ_BUILD_REQUIRES_UNVERSIONED' : ' ' . join ( x . name for x in context . requested_packages ( True ) ) , 'REZ_BUILD_TYPE' : build_type . name , 'REZ_BUILD_INSTALL' : 1 if install else 0 , } if install_path : vars_ [ 'REZ_BUILD_INSTALL_PATH' ] = install_path if config . rez_1_environment_variables and not config . disable_rez_1_compatibility and build_type == BuildType . central : vars_ [ 'REZ_IN_REZ_RELEASE' ] = 1 return vars_
Returns a standard set of environment variables that can be set for the build system to use
509
17
226,563
def set_standard_vars ( cls , executor , context , variant , build_type , install , build_path , install_path = None ) : vars = cls . get_standard_vars ( context = context , variant = variant , build_type = build_type , install = install , build_path = build_path , install_path = install_path ) for var , value in vars . iteritems ( ) : executor . env [ var ] = value
Sets a standard set of environment variables for the build system to use
106
14
226,564
def run_pip_command ( command_args , pip_version = None , python_version = None ) : pip_exe , context = find_pip ( pip_version , python_version ) command = [ pip_exe ] + list ( command_args ) if context is None : return popen ( command ) else : return context . execute_shell ( command = command , block = False )
Run a pip command .
86
5
226,565
def find_pip ( pip_version = None , python_version = None ) : pip_exe = "pip" try : context = create_context ( pip_version , python_version ) except BuildError as e : # fall back on system pip. Not ideal but at least it's something from rez . backport . shutilwhich import which pip_exe = which ( "pip" ) if pip_exe : print_warning ( "pip rez package could not be found; system 'pip' command (%s) " "will be used instead." % pip_exe ) context = None else : raise e return pip_exe , context
Find a pip exe using the given python version .
141
11
226,566
def create_context ( pip_version = None , python_version = None ) : # determine pip pkg to use for install, and python variants to install on if pip_version : pip_req = "pip-%s" % str ( pip_version ) else : pip_req = "pip" if python_version : ver = Version ( str ( python_version ) ) major_minor_ver = ver . trim ( 2 ) py_req = "python-%s" % str ( major_minor_ver ) else : # use latest major.minor package = get_latest_package ( "python" ) if package : major_minor_ver = package . version . trim ( 2 ) else : # no python package. We're gonna fail, let's just choose current # python version (and fail at context creation time) major_minor_ver = '.' . join ( map ( str , sys . version_info [ : 2 ] ) ) py_req = "python-%s" % str ( major_minor_ver ) # use pip + latest python to perform pip download operations request = [ pip_req , py_req ] with convert_errors ( from_ = ( PackageFamilyNotFoundError , PackageNotFoundError ) , to = BuildError , msg = "Cannot run - pip or python rez " "package is not present" ) : context = ResolvedContext ( request ) # print pip package used to perform the install pip_variant = context . get_resolved_package ( "pip" ) pip_package = pip_variant . parent print_info ( "Using %s (%s)" % ( pip_package . qualified_name , pip_variant . uri ) ) return context
Create a context containing the specific pip and python .
377
10
226,567
def convert_old_variant_handle ( handle_dict ) : old_variables = handle_dict . get ( "variables" , { } ) variables = dict ( repository_type = "filesystem" ) for old_key , key in variant_key_conversions . iteritems ( ) : value = old_variables . get ( old_key ) #if value is not None: variables [ key ] = value path = handle_dict [ "path" ] filename = os . path . basename ( path ) if os . path . splitext ( filename ) [ 0 ] == "package" : key = "filesystem.variant" else : key = "filesystem.variant.combined" return dict ( key = key , variables = variables )
Convert a variant handle from serialize_version < 4 . 0 .
166
15
226,568
def convert_requirement ( req ) : pkg_name = convert_name ( req . project_name ) if not req . specs : return [ pkg_name ] req_strs = [ ] for spec in req . specs : op , ver = spec ver = convert_version ( ver ) if op == "<" : r = "%s-0+<%s" % ( pkg_name , ver ) req_strs . append ( r ) elif op == "<=" : r = "%s-0+<%s|%s" % ( pkg_name , ver , ver ) req_strs . append ( r ) elif op == "==" : r = "%s-%s" % ( pkg_name , ver ) req_strs . append ( r ) elif op == ">=" : r = "%s-%s+" % ( pkg_name , ver ) req_strs . append ( r ) elif op == ">" : r1 = "%s-%s+" % ( pkg_name , ver ) r2 = "!%s-%s" % ( pkg_name , ver ) req_strs . append ( r1 ) req_strs . append ( r2 ) elif op == "!=" : r = "!%s-%s" % ( pkg_name , ver ) req_strs . append ( r ) else : print >> sys . stderr , "Warning: Can't understand op '%s', just depending on unversioned package..." % op req_strs . append ( pkg_name ) return req_strs
Converts a pkg_resources . Requirement object into a list of Rez package request strings .
359
20
226,569
def get_dist_dependencies ( name , recurse = True ) : dist = pkg_resources . get_distribution ( name ) pkg_name = convert_name ( dist . project_name ) reqs = set ( ) working = set ( [ dist ] ) depth = 0 while working : deps = set ( ) for distname in working : dist = pkg_resources . get_distribution ( distname ) pkg_name = convert_name ( dist . project_name ) reqs . add ( pkg_name ) for req in dist . requires ( ) : reqs_ = convert_requirement ( req ) deps |= set ( x . split ( '-' , 1 ) [ 0 ] for x in reqs_ if not x . startswith ( '!' ) ) working = deps - reqs depth += 1 if ( not recurse ) and ( depth >= 2 ) : break return reqs
Get the dependencies of the given already installed distribution .
200
10
226,570
def add_graph ( self , other ) : self . add_nodes ( n for n in other . nodes ( ) if not n in self . nodes ( ) ) for each_node in other . nodes ( ) : for each_edge in other . neighbors ( each_node ) : if ( not self . has_edge ( ( each_node , each_edge ) ) ) : self . add_edge ( ( each_node , each_edge ) )
Add other graph to this graph .
98
7
226,571
def add_spanning_tree ( self , st ) : self . add_nodes ( list ( st . keys ( ) ) ) for each in st : if ( st [ each ] is not None ) : self . add_edge ( ( st [ each ] , each ) )
Add a spanning tree to the graph .
60
8
226,572
def complete ( self ) : for each in self . nodes ( ) : for other in self . nodes ( ) : if ( each != other and not self . has_edge ( ( each , other ) ) ) : self . add_edge ( ( each , other ) )
Make the graph a complete graph .
57
7
226,573
def inverse ( self ) : inv = self . __class__ ( ) inv . add_nodes ( self . nodes ( ) ) inv . complete ( ) for each in self . edges ( ) : if ( inv . has_edge ( each ) ) : inv . del_edge ( each ) return inv
Return the inverse of the graph .
64
7
226,574
def reverse ( self ) : assert self . DIRECTED , "Undirected graph types such as %s cannot be reversed" % self . __class__ . __name__ N = self . __class__ ( ) #- Add the nodes N . add_nodes ( n for n in self . nodes ( ) ) #- Add the reversed edges for ( u , v ) in self . edges ( ) : wt = self . edge_weight ( ( u , v ) ) label = self . edge_label ( ( u , v ) ) attributes = self . edge_attributes ( ( u , v ) ) N . add_edge ( ( v , u ) , wt , label , attributes ) return N
Generate the reverse of a directed graph returns an identical graph if not directed . Attributes & weights are preserved .
151
22
226,575
def get_lock_request ( name , version , patch_lock , weak = True ) : ch = '~' if weak else '' if patch_lock == PatchLock . lock : s = "%s%s==%s" % ( ch , name , str ( version ) ) return PackageRequest ( s ) elif ( patch_lock == PatchLock . no_lock ) or ( not version ) : return None version_ = version . trim ( patch_lock . rank ) s = "%s%s-%s" % ( ch , name , str ( version_ ) ) return PackageRequest ( s )
Given a package and patch lock return the equivalent request .
129
11
226,576
def requested_packages ( self , include_implicit = False ) : if include_implicit : return self . _package_requests + self . implicit_packages else : return self . _package_requests
Get packages in the request .
45
6
226,577
def get_resolved_package ( self , name ) : pkgs = [ x for x in self . _resolved_packages if x . name == name ] return pkgs [ 0 ] if pkgs else None
Returns a Variant object or None if the package is not in the resolve .
49
15
226,578
def get_patched_request ( self , package_requests = None , package_subtractions = None , strict = False , rank = 0 ) : # assemble source request if strict : request = [ ] for variant in self . resolved_packages : req = PackageRequest ( variant . qualified_package_name ) request . append ( req ) else : request = self . requested_packages ( ) [ : ] # convert '^foo'-style requests to subtractions if package_requests : package_subtractions = package_subtractions or [ ] indexes = [ ] for i , req in enumerate ( package_requests ) : name = str ( req ) if name . startswith ( '^' ) : package_subtractions . append ( name [ 1 : ] ) indexes . append ( i ) for i in reversed ( indexes ) : del package_requests [ i ] # apply subtractions if package_subtractions : request = [ x for x in request if x . name not in package_subtractions ] # apply overrides if package_requests : request_dict = dict ( ( x . name , ( i , x ) ) for i , x in enumerate ( request ) ) request_ = [ ] for req in package_requests : if isinstance ( req , basestring ) : req = PackageRequest ( req ) if req . name in request_dict : i , req_ = request_dict [ req . name ] if ( req_ is not None ) and ( req_ . conflict == req . conflict ) and ( req_ . weak == req . weak ) : request [ i ] = req del request_dict [ req . name ] else : request_ . append ( req ) else : request_ . append ( req ) request += request_ # add rank limiters if not strict and rank > 1 : overrides = set ( x . name for x in package_requests if not x . conflict ) rank_limiters = [ ] for variant in self . resolved_packages : if variant . name not in overrides : if len ( variant . version ) >= rank : version = variant . version . trim ( rank - 1 ) version = version . next ( ) req = "~%s<%s" % ( variant . name , str ( version ) ) rank_limiters . append ( req ) request += rank_limiters return request
Get a patched request .
506
5
226,579
def write_to_buffer ( self , buf ) : doc = self . to_dict ( ) if config . rxt_as_yaml : content = dump_yaml ( doc ) else : content = json . dumps ( doc , indent = 4 , separators = ( "," , ": " ) ) buf . write ( content )
Save the context to a buffer .
73
7
226,580
def get_current ( cls ) : filepath = os . getenv ( "REZ_RXT_FILE" ) if not filepath or not os . path . exists ( filepath ) : return None return cls . load ( filepath )
Get the context for the current env if there is one .
54
12
226,581
def load ( cls , path ) : with open ( path ) as f : context = cls . read_from_buffer ( f , path ) context . set_load_path ( path ) return context
Load a resolved context from file .
44
7
226,582
def read_from_buffer ( cls , buf , identifier_str = None ) : try : return cls . _read_from_buffer ( buf , identifier_str ) except Exception as e : cls . _load_error ( e , identifier_str )
Load the context from a buffer .
57
7
226,583
def get_resolve_diff ( self , other ) : if self . package_paths != other . package_paths : from difflib import ndiff diff = ndiff ( self . package_paths , other . package_paths ) raise ResolvedContextError ( "Cannot diff resolves, package search " "paths differ:\n%s" % '\n' . join ( diff ) ) d = { } self_pkgs_ = set ( x . parent for x in self . _resolved_packages ) other_pkgs_ = set ( x . parent for x in other . _resolved_packages ) self_pkgs = self_pkgs_ - other_pkgs_ other_pkgs = other_pkgs_ - self_pkgs_ if not ( self_pkgs or other_pkgs ) : return d self_fams = dict ( ( x . name , x ) for x in self_pkgs ) other_fams = dict ( ( x . name , x ) for x in other_pkgs ) newer_packages = { } older_packages = { } added_packages = set ( ) removed_packages = set ( ) for pkg in self_pkgs : if pkg . name not in other_fams : removed_packages . add ( pkg ) else : other_pkg = other_fams [ pkg . name ] if other_pkg . version > pkg . version : r = VersionRange . as_span ( lower_version = pkg . version , upper_version = other_pkg . version ) it = iter_packages ( pkg . name , range_ = r ) pkgs = sorted ( it , key = lambda x : x . version ) newer_packages [ pkg . name ] = pkgs elif other_pkg . version < pkg . version : r = VersionRange . as_span ( lower_version = other_pkg . version , upper_version = pkg . version ) it = iter_packages ( pkg . name , range_ = r ) pkgs = sorted ( it , key = lambda x : x . version , reverse = True ) older_packages [ pkg . name ] = pkgs for pkg in other_pkgs : if pkg . name not in self_fams : added_packages . add ( pkg ) if newer_packages : d [ "newer_packages" ] = newer_packages if older_packages : d [ "older_packages" ] = older_packages if added_packages : d [ "added_packages" ] = added_packages if removed_packages : d [ "removed_packages" ] = removed_packages return d
Get the difference between the resolve in this context and another .
579
12
226,584
def print_resolve_diff ( self , other , heading = None ) : d = self . get_resolve_diff ( other ) if not d : return rows = [ ] if heading is True and self . load_path and other . load_path : a = os . path . basename ( self . load_path ) b = os . path . basename ( other . load_path ) heading = ( a , b ) if isinstance ( heading , tuple ) : rows . append ( list ( heading ) + [ "" ] ) rows . append ( ( '-' * len ( heading [ 0 ] ) , '-' * len ( heading [ 1 ] ) , "" ) ) newer_packages = d . get ( "newer_packages" , { } ) older_packages = d . get ( "older_packages" , { } ) added_packages = d . get ( "added_packages" , set ( ) ) removed_packages = d . get ( "removed_packages" , set ( ) ) if newer_packages : for name , pkgs in newer_packages . iteritems ( ) : this_pkg = pkgs [ 0 ] other_pkg = pkgs [ - 1 ] diff_str = "(+%d versions)" % ( len ( pkgs ) - 1 ) rows . append ( ( this_pkg . qualified_name , other_pkg . qualified_name , diff_str ) ) if older_packages : for name , pkgs in older_packages . iteritems ( ) : this_pkg = pkgs [ 0 ] other_pkg = pkgs [ - 1 ] diff_str = "(-%d versions)" % ( len ( pkgs ) - 1 ) rows . append ( ( this_pkg . qualified_name , other_pkg . qualified_name , diff_str ) ) if added_packages : for pkg in sorted ( added_packages , key = lambda x : x . name ) : rows . append ( ( "-" , pkg . qualified_name , "" ) ) if removed_packages : for pkg in sorted ( removed_packages , key = lambda x : x . name ) : rows . append ( ( pkg . qualified_name , "-" , "" ) ) print '\n' . join ( columnise ( rows ) )
Print the difference between the resolve of two contexts .
495
10
226,585
def get_dependency_graph ( self ) : from rez . vendor . pygraph . classes . digraph import digraph nodes = { } edges = set ( ) for variant in self . _resolved_packages : nodes [ variant . name ] = variant . qualified_package_name for request in variant . get_requires ( ) : if not request . conflict : edges . add ( ( variant . name , request . name ) ) g = digraph ( ) node_color = "#AAFFAA" node_fontsize = 10 attrs = [ ( "fontsize" , node_fontsize ) , ( "fillcolor" , node_color ) , ( "style" , "filled" ) ] for name , qname in nodes . iteritems ( ) : g . add_node ( name , attrs = attrs + [ ( "label" , qname ) ] ) for edge in edges : g . add_edge ( edge ) return g
Generate the dependency graph .
203
6
226,586
def validate ( self ) : try : for pkg in self . resolved_packages : pkg . validate_data ( ) except RezError as e : raise ResolvedContextError ( "%s: %s" % ( e . __class__ . __name__ , str ( e ) ) )
Validate the context .
62
5
226,587
def get_environ ( self , parent_environ = None ) : interp = Python ( target_environ = { } , passive = True ) executor = self . _create_executor ( interp , parent_environ ) self . _execute ( executor ) return executor . get_output ( )
Get the environ dict resulting from interpreting this context .
69
11
226,588
def get_key ( self , key , request_only = False ) : values = { } requested_names = [ x . name for x in self . _package_requests if not x . conflict ] for pkg in self . resolved_packages : if ( not request_only ) or ( pkg . name in requested_names ) : value = getattr ( pkg , key ) if value is not None : values [ pkg . name ] = ( pkg , value ) return values
Get a data key value for each resolved package .
105
10
226,589
def get_conflicting_tools ( self , request_only = False ) : from collections import defaultdict tool_sets = defaultdict ( set ) tools_dict = self . get_tools ( request_only = request_only ) for variant , tools in tools_dict . itervalues ( ) : for tool in tools : tool_sets [ tool ] . add ( variant ) conflicts = dict ( ( k , v ) for k , v in tool_sets . iteritems ( ) if len ( v ) > 1 ) return conflicts
Returns tools of the same name provided by more than one package .
114
13
226,590
def get_shell_code ( self , shell = None , parent_environ = None , style = OutputStyle . file ) : executor = self . _create_executor ( interpreter = create_shell ( shell ) , parent_environ = parent_environ ) if self . load_path and os . path . isfile ( self . load_path ) : executor . env . REZ_RXT_FILE = self . load_path self . _execute ( executor ) return executor . get_output ( style )
Get the shell code resulting from intepreting this context .
115
13
226,591
def get_actions ( self , parent_environ = None ) : interp = Python ( target_environ = { } , passive = True ) executor = self . _create_executor ( interp , parent_environ ) self . _execute ( executor ) return executor . actions
Get the list of rex . Action objects resulting from interpreting this context . This is provided mainly for testing purposes .
64
23
226,592
def apply ( self , parent_environ = None ) : interpreter = Python ( target_environ = os . environ ) executor = self . _create_executor ( interpreter , parent_environ ) self . _execute ( executor ) interpreter . apply_environ ( )
Apply the context to the current python session .
61
9
226,593
def which ( self , cmd , parent_environ = None , fallback = False ) : env = self . get_environ ( parent_environ = parent_environ ) path = which ( cmd , env = env ) if fallback and path is None : path = which ( cmd ) return path
Find a program in the resolved environment .
65
8
226,594
def execute_command ( self , args , parent_environ = None , * * subprocess_kwargs ) : if parent_environ in ( None , os . environ ) : target_environ = { } else : target_environ = parent_environ . copy ( ) interpreter = Python ( target_environ = target_environ ) executor = self . _create_executor ( interpreter , parent_environ ) self . _execute ( executor ) return interpreter . subprocess ( args , * * subprocess_kwargs )
Run a command within a resolved context .
118
8
226,595
def execute_rex_code ( self , code , filename = None , shell = None , parent_environ = None , * * Popen_args ) : def _actions_callback ( executor ) : executor . execute_code ( code , filename = filename ) return self . execute_shell ( shell = shell , parent_environ = parent_environ , command = '' , # don't run any command block = False , actions_callback = _actions_callback , * * Popen_args )
Run some rex code in the context .
108
9
226,596
def execute_shell ( self , shell = None , parent_environ = None , rcfile = None , norc = False , stdin = False , command = None , quiet = False , block = None , actions_callback = None , post_actions_callback = None , context_filepath = None , start_new_session = False , detached = False , pre_command = None , * * Popen_args ) : sh = create_shell ( shell ) if hasattr ( command , "__iter__" ) : command = sh . join ( command ) # start a new session if specified if start_new_session : Popen_args . update ( config . new_session_popen_args ) # open a separate terminal if specified if detached : term_cmd = config . terminal_emulator_command if term_cmd : pre_command = term_cmd . strip ( ) . split ( ) # block if the shell is likely to be interactive if block is None : block = not ( command or stdin ) # context and rxt files. If running detached, don't cleanup files, because # rez-env returns too early and deletes the tmp files before the detached # process can use them tmpdir = self . tmpdir_manager . mkdtemp ( cleanup = not detached ) if self . load_path and os . path . isfile ( self . load_path ) : rxt_file = self . load_path else : rxt_file = os . path . join ( tmpdir , "context.rxt" ) self . save ( rxt_file ) context_file = context_filepath or os . path . join ( tmpdir , "context.%s" % sh . file_extension ( ) ) # interpret this context and write out the native context file executor = self . _create_executor ( sh , parent_environ ) executor . env . REZ_RXT_FILE = rxt_file executor . env . REZ_CONTEXT_FILE = context_file if actions_callback : actions_callback ( executor ) self . _execute ( executor ) if post_actions_callback : post_actions_callback ( executor ) context_code = executor . get_output ( ) with open ( context_file , 'w' ) as f : f . write ( context_code ) quiet = quiet or ( RezToolsVisibility [ config . rez_tools_visibility ] == RezToolsVisibility . never ) # spawn the shell subprocess p = sh . spawn_shell ( context_file , tmpdir , rcfile = rcfile , norc = norc , stdin = stdin , command = command , env = parent_environ , quiet = quiet , pre_command = pre_command , * * Popen_args ) if block : stdout , stderr = p . communicate ( ) return p . returncode , stdout , stderr else : return p
Spawn a possibly - interactive shell .
634
7
226,597
def to_dict ( self , fields = None ) : data = { } def _add ( field ) : return ( fields is None or field in fields ) if _add ( "resolved_packages" ) : resolved_packages = [ ] for pkg in ( self . _resolved_packages or [ ] ) : resolved_packages . append ( pkg . handle . to_dict ( ) ) data [ "resolved_packages" ] = resolved_packages if _add ( "serialize_version" ) : data [ "serialize_version" ] = '.' . join ( map ( str , ResolvedContext . serialize_version ) ) if _add ( "patch_locks" ) : data [ "patch_locks" ] = dict ( ( k , v . name ) for k , v in self . patch_locks ) if _add ( "package_orderers" ) : package_orderers = [ package_order . to_pod ( x ) for x in ( self . package_orderers or [ ] ) ] data [ "package_orderers" ] = package_orderers or None if _add ( "package_filter" ) : data [ "package_filter" ] = self . package_filter . to_pod ( ) if _add ( "graph" ) : if self . graph_string and self . graph_string . startswith ( '{' ) : graph_str = self . graph_string # already in compact format else : g = self . graph ( ) graph_str = write_compacted ( g ) data [ "graph" ] = graph_str data . update ( dict ( timestamp = self . timestamp , requested_timestamp = self . requested_timestamp , building = self . building , caching = self . caching , implicit_packages = map ( str , self . implicit_packages ) , package_requests = map ( str , self . _package_requests ) , package_paths = self . package_paths , default_patch_lock = self . default_patch_lock . name , rez_version = self . rez_version , rez_path = self . rez_path , user = self . user , host = self . host , platform = self . platform , arch = self . arch , os = self . os , created = self . created , parent_suite_path = self . parent_suite_path , suite_context_name = self . suite_context_name , status = self . status_ . name , failure_description = self . failure_description , from_cache = self . from_cache , solve_time = self . solve_time , load_time = self . load_time , num_loaded_packages = self . num_loaded_packages ) ) if fields : data = dict ( ( k , v ) for k , v in data . iteritems ( ) if k in fields ) return data
Convert context to dict containing only builtin types .
625
11
226,598
def add_sys_paths ( paths ) : original_syspath = sys . path [ : ] sys . path . extend ( paths ) try : yield finally : sys . path = original_syspath
Add to sys . path and revert on scope exit .
43
11
226,599
def popen ( args , * * kwargs ) : if "stdin" not in kwargs : try : file_no = sys . stdin . fileno ( ) except AttributeError : file_no = sys . __stdin__ . fileno ( ) if file_no not in ( 0 , 1 , 2 ) : kwargs [ "stdin" ] = subprocess . PIPE return subprocess . Popen ( args , * * kwargs )
Wrapper for subprocess . Popen .
104
9