idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
38,500
def _close ( self , args ) : reply_code = args . read_short ( ) reply_text = args . read_shortstr ( ) class_id = args . read_short ( ) method_id = args . read_short ( ) self . _send_method ( ( 20 , 41 ) ) self . _do_revive ( ) raise error_for_code ( reply_code , reply_text , ( class_id , method_id ) , ChannelError , )
Request a channel close
38,501
def _x_flow_ok ( self , active ) : args = AMQPWriter ( ) args . write_bit ( active ) self . _send_method ( ( 20 , 21 ) , args )
Confirm a flow method
38,502
def _x_open ( self ) : if self . is_open : return args = AMQPWriter ( ) args . write_shortstr ( '' ) self . _send_method ( ( 20 , 10 ) , args ) return self . wait ( allowed_methods = [ ( 20 , 11 ) , ] )
Open a channel for use
38,503
def exchange_declare ( self , exchange , type , passive = False , durable = False , auto_delete = True , nowait = False , arguments = None ) : arguments = { } if arguments is None else arguments args = AMQPWriter ( ) args . write_short ( 0 ) args . write_shortstr ( exchange ) args . write_shortstr ( type ) args . write_bit ( passive ) args . write_bit ( durable ) args . write_bit ( auto_delete ) args . write_bit ( False ) args . write_bit ( nowait ) args . write_table ( arguments ) self . _send_method ( ( 40 , 10 ) , args ) if auto_delete : warn ( VDeprecationWarning ( EXCHANGE_AUTODELETE_DEPRECATED ) ) if not nowait : return self . wait ( allowed_methods = [ ( 40 , 11 ) , ] )
Declare exchange create if needed
38,504
def exchange_bind ( self , destination , source = '' , routing_key = '' , nowait = False , arguments = None ) : arguments = { } if arguments is None else arguments args = AMQPWriter ( ) args . write_short ( 0 ) args . write_shortstr ( destination ) args . write_shortstr ( source ) args . write_shortstr ( routing_key ) args . write_bit ( nowait ) args . write_table ( arguments ) self . _send_method ( ( 40 , 30 ) , args ) if not nowait : return self . wait ( allowed_methods = [ ( 40 , 31 ) , ] )
This method binds an exchange to an exchange .
38,505
def queue_declare ( self , queue = '' , passive = False , durable = False , exclusive = False , auto_delete = True , nowait = False , arguments = None ) : arguments = { } if arguments is None else arguments args = AMQPWriter ( ) args . write_short ( 0 ) args . write_shortstr ( queue ) args . write_bit ( passive ) args . write_bit ( durable ) args . write_bit ( exclusive ) args . write_bit ( auto_delete ) args . write_bit ( nowait ) args . write_table ( arguments ) self . _send_method ( ( 50 , 10 ) , args ) if not nowait : return self . wait ( allowed_methods = [ ( 50 , 11 ) , ] )
Declare queue create if needed
38,506
def _queue_declare_ok ( self , args ) : return queue_declare_ok_t ( args . read_shortstr ( ) , args . read_long ( ) , args . read_long ( ) , )
Confirms a queue definition
38,507
def queue_delete ( self , queue = '' , if_unused = False , if_empty = False , nowait = False ) : args = AMQPWriter ( ) args . write_short ( 0 ) args . write_shortstr ( queue ) args . write_bit ( if_unused ) args . write_bit ( if_empty ) args . write_bit ( nowait ) self . _send_method ( ( 50 , 40 ) , args ) if not nowait : return self . wait ( allowed_methods = [ ( 50 , 41 ) , ] )
Delete a queue
38,508
def queue_purge ( self , queue = '' , nowait = False ) : args = AMQPWriter ( ) args . write_short ( 0 ) args . write_shortstr ( queue ) args . write_bit ( nowait ) self . _send_method ( ( 50 , 30 ) , args ) if not nowait : return self . wait ( allowed_methods = [ ( 50 , 31 ) , ] )
Purge a queue
38,509
def basic_ack ( self , delivery_tag , multiple = False ) : args = AMQPWriter ( ) args . write_longlong ( delivery_tag ) args . write_bit ( multiple ) self . _send_method ( ( 60 , 80 ) , args )
Acknowledge one or more messages
38,510
def basic_cancel ( self , consumer_tag , nowait = False ) : if self . connection is not None : self . no_ack_consumers . discard ( consumer_tag ) args = AMQPWriter ( ) args . write_shortstr ( consumer_tag ) args . write_bit ( nowait ) self . _send_method ( ( 60 , 30 ) , args ) return self . wait ( allowed_methods = [ ( 60 , 31 ) , ] )
End a queue consumer
38,511
def _basic_cancel_notify ( self , args ) : consumer_tag = args . read_shortstr ( ) callback = self . _on_cancel ( consumer_tag ) if callback : callback ( consumer_tag ) else : raise ConsumerCancelled ( consumer_tag , ( 60 , 30 ) )
Consumer cancelled by server .
38,512
def basic_consume ( self , queue = '' , consumer_tag = '' , no_local = False , no_ack = False , exclusive = False , nowait = False , callback = None , arguments = None , on_cancel = None ) : args = AMQPWriter ( ) args . write_short ( 0 ) args . write_shortstr ( queue ) args . write_shortstr ( consumer_tag ) args . write_bit ( no_local ) args . write_bit ( no_ack ) args . write_bit ( exclusive ) args . write_bit ( nowait ) args . write_table ( arguments or { } ) self . _send_method ( ( 60 , 20 ) , args ) if not nowait : consumer_tag = self . wait ( allowed_methods = [ ( 60 , 21 ) , ] ) self . callbacks [ consumer_tag ] = callback if on_cancel : self . cancel_callbacks [ consumer_tag ] = on_cancel if no_ack : self . no_ack_consumers . add ( consumer_tag ) return consumer_tag
Start a queue consumer
38,513
def _basic_deliver ( self , args , msg ) : consumer_tag = args . read_shortstr ( ) delivery_tag = args . read_longlong ( ) redelivered = args . read_bit ( ) exchange = args . read_shortstr ( ) routing_key = args . read_shortstr ( ) msg . channel = self msg . delivery_info = { 'consumer_tag' : consumer_tag , 'delivery_tag' : delivery_tag , 'redelivered' : redelivered , 'exchange' : exchange , 'routing_key' : routing_key , } try : fun = self . callbacks [ consumer_tag ] except KeyError : pass else : fun ( msg )
Notify the client of a consumer message
38,514
def _basic_get_ok ( self , args , msg ) : delivery_tag = args . read_longlong ( ) redelivered = args . read_bit ( ) exchange = args . read_shortstr ( ) routing_key = args . read_shortstr ( ) message_count = args . read_long ( ) msg . channel = self msg . delivery_info = { 'delivery_tag' : delivery_tag , 'redelivered' : redelivered , 'exchange' : exchange , 'routing_key' : routing_key , 'message_count' : message_count } return msg
Provide client with a message
38,515
def _basic_publish ( self , msg , exchange = '' , routing_key = '' , mandatory = False , immediate = False ) : args = AMQPWriter ( ) args . write_short ( 0 ) args . write_shortstr ( exchange ) args . write_shortstr ( routing_key ) args . write_bit ( mandatory ) args . write_bit ( immediate ) self . _send_method ( ( 60 , 40 ) , args , msg )
Publish a message
38,516
def basic_qos ( self , prefetch_size , prefetch_count , a_global ) : args = AMQPWriter ( ) args . write_long ( prefetch_size ) args . write_short ( prefetch_count ) args . write_bit ( a_global ) self . _send_method ( ( 60 , 10 ) , args ) return self . wait ( allowed_methods = [ ( 60 , 11 ) , ] )
Specify quality of service
38,517
def basic_recover ( self , requeue = False ) : args = AMQPWriter ( ) args . write_bit ( requeue ) self . _send_method ( ( 60 , 110 ) , args )
Redeliver unacknowledged messages
38,518
def basic_reject ( self , delivery_tag , requeue ) : args = AMQPWriter ( ) args . write_longlong ( delivery_tag ) args . write_bit ( requeue ) self . _send_method ( ( 60 , 90 ) , args )
Reject an incoming message
38,519
def _basic_return ( self , args , msg ) : self . returned_messages . put ( basic_return_t ( args . read_short ( ) , args . read_shortstr ( ) , args . read_shortstr ( ) , args . read_shortstr ( ) , msg , ) )
Return a failed message
38,520
def create_build_process ( process_type , working_dir , build_system , package = None , vcs = None , ensure_latest = True , skip_repo_errors = False , ignore_existing_tag = False , verbose = False , quiet = False ) : from rez . plugin_managers import plugin_manager process_types = get_build_process_types ( ) if process_type not in process_types : raise BuildProcessError ( "Unknown build process: %r" % process_type ) cls = plugin_manager . get_plugin_class ( 'build_process' , process_type ) return cls ( working_dir , build_system , package = package , vcs = vcs , ensure_latest = ensure_latest , skip_repo_errors = skip_repo_errors , ignore_existing_tag = ignore_existing_tag , verbose = verbose , quiet = quiet )
Create a BuildProcess instance .
38,521
def visit_variants ( self , func , variants = None , ** kwargs ) : if variants : present_variants = range ( self . package . num_variants ) invalid_variants = set ( variants ) - set ( present_variants ) if invalid_variants : raise BuildError ( "The package does not contain the variants: %s" % ", " . join ( str ( x ) for x in sorted ( invalid_variants ) ) ) results = [ ] num_visited = 0 for variant in self . package . iter_variants ( ) : if variants and variant . index not in variants : self . _print_header ( "Skipping variant %s (%s)..." % ( variant . index , self . _n_of_m ( variant ) ) ) continue result = func ( variant , ** kwargs ) results . append ( result ) num_visited += 1 return num_visited , results
Iterate over variants and call a function on each .
38,522
def create_build_context ( self , variant , build_type , build_path ) : request = variant . get_requires ( build_requires = True , private_build_requires = True ) req_strs = map ( str , request ) quoted_req_strs = map ( quote , req_strs ) self . _print ( "Resolving build environment: %s" , ' ' . join ( quoted_req_strs ) ) if build_type == BuildType . local : packages_path = self . package . config . packages_path else : packages_path = self . package . config . nonlocal_packages_path if self . package . config . is_overridden ( "package_filter" ) : from rez . package_filter import PackageFilterList data = self . package . config . package_filter package_filter = PackageFilterList . from_pod ( data ) else : package_filter = None context = ResolvedContext ( request , package_paths = packages_path , package_filter = package_filter , building = True ) if self . verbose : context . print_info ( ) rxt_filepath = os . path . join ( build_path , "build.rxt" ) context . save ( rxt_filepath ) if context . status != ResolverStatus . solved : raise BuildContextResolveError ( context ) return context , rxt_filepath
Create a context to build the variant within .
38,523
def get_release_data ( self ) : previous_package = self . get_previous_release ( ) if previous_package : previous_version = previous_package . version previous_revision = previous_package . revision else : previous_version = None previous_revision = None if self . vcs is None : return dict ( vcs = "None" , previous_version = previous_version ) revision = None with self . repo_operation ( ) : revision = self . vcs . get_current_revision ( ) changelog = self . get_changelog ( ) maxlen = config . max_package_changelog_chars if maxlen and changelog and len ( changelog ) > maxlen + 3 : changelog = changelog [ : maxlen ] + "..." return dict ( vcs = self . vcs . name ( ) , revision = revision , changelog = changelog , previous_version = previous_version , previous_revision = previous_revision )
Get release data for this release .
38,524
def minimal_spanning_tree ( graph , root = None ) : visited = [ ] spanning_tree = { } if ( root is not None ) : visited . append ( root ) nroot = root spanning_tree [ root ] = None else : nroot = 1 while ( nroot is not None ) : ledge = _lightest_edge ( graph , visited ) if ( ledge == None ) : if ( root is not None ) : break nroot = _first_unvisited ( graph , visited ) if ( nroot is not None ) : spanning_tree [ nroot ] = None visited . append ( nroot ) else : spanning_tree [ ledge [ 1 ] ] = ledge [ 0 ] visited . append ( ledge [ 1 ] ) return spanning_tree
Minimal spanning tree .
38,525
def cut_value ( graph , flow , cut ) : S = [ ] T = [ ] for node in cut . keys ( ) : if cut [ node ] == 0 : S . append ( node ) elif cut [ node ] == 1 : T . append ( node ) value = 0 for node in S : for neigh in graph . neighbors ( node ) : if neigh in T : value = value + flow [ ( node , neigh ) ] for inc in graph . incidents ( node ) : if inc in T : value = value - flow [ ( inc , node ) ] return value
Calculate the value of a cut .
38,526
def cut_tree ( igraph , caps = None ) : graph = digraph ( ) graph . add_graph ( igraph ) if not caps : caps = { } for edge in graph . edges ( ) : caps [ edge ] = igraph . edge_weight ( edge ) f = { } n = { } N = 0 for node in graph . nodes ( ) : n [ N ] = node N = N + 1 p = { } . fromkeys ( range ( N ) , 0 ) p [ 0 ] = None for s in range ( 1 , N ) : t = p [ s ] S = [ ] ( flow , cut ) = maximum_flow ( graph , n [ s ] , n [ t ] , caps ) for i in range ( N ) : if cut [ n [ i ] ] == 0 : S . append ( i ) value = cut_value ( graph , flow , cut ) f [ s ] = value for i in range ( N ) : if i == s : continue if i in S and p [ i ] == t : p [ i ] = s if p [ t ] in S : p [ s ] = p [ t ] p [ t ] = s f [ s ] = f [ t ] f [ t ] = value b = { } for i in range ( 1 , N ) : b [ ( n [ i ] , n [ p [ i ] ] ) ] = f [ i ] return b
Construct a Gomory - Hu cut tree by applying the algorithm of Gusfield .
38,527
def locate ( self , requirement , prereleases = False ) : result = None r = parse_requirement ( requirement ) if r is None : raise DistlibException ( 'Not a valid requirement: %r' % requirement ) scheme = get_scheme ( self . scheme ) self . matcher = matcher = scheme . matcher ( r . requirement ) logger . debug ( 'matcher: %s (%s)' , matcher , type ( matcher ) . __name__ ) versions = self . get_project ( r . name ) if len ( versions ) > 2 : slist = [ ] vcls = matcher . version_class for k in versions : if k in ( 'urls' , 'digests' ) : continue try : if not matcher . match ( k ) : logger . debug ( '%s did not match %r' , matcher , k ) else : if prereleases or not vcls ( k ) . is_prerelease : slist . append ( k ) else : logger . debug ( 'skipping pre-release ' 'version %s of %s' , k , matcher . name ) except Exception : logger . warning ( 'error matching %s with %r' , matcher , k ) pass if len ( slist ) > 1 : slist = sorted ( slist , key = scheme . key ) if slist : logger . debug ( 'sorted list: %s' , slist ) version = slist [ - 1 ] result = versions [ version ] if result : if r . extras : result . extras = r . extras result . download_urls = versions . get ( 'urls' , { } ) . get ( version , set ( ) ) d = { } sd = versions . get ( 'digests' , { } ) for url in result . download_urls : if url in sd : d [ url ] = sd [ url ] result . digests = d self . matcher = None return result
Find the most recent distribution which matches the given requirement .
38,528
def get_bind_modules ( verbose = False ) : builtin_path = os . path . join ( module_root_path , "bind" ) searchpaths = config . bind_module_path + [ builtin_path ] bindnames = { } for path in searchpaths : if verbose : print "searching %s..." % path if not os . path . isdir ( path ) : continue for filename in os . listdir ( path ) : fpath = os . path . join ( path , filename ) fname , ext = os . path . splitext ( filename ) if os . path . isfile ( fpath ) and ext == ".py" and not fname . startswith ( '_' ) : bindnames [ fname ] = fpath return bindnames
Get available bind modules .
38,529
def find_bind_module ( name , verbose = False ) : bindnames = get_bind_modules ( verbose = verbose ) bindfile = bindnames . get ( name ) if bindfile : return bindfile if not verbose : return None fuzzy_matches = get_close_pkgs ( name , bindnames . keys ( ) ) if fuzzy_matches : rows = [ ( x [ 0 ] , bindnames [ x [ 0 ] ] ) for x in fuzzy_matches ] print "'%s' not found. Close matches:" % name print '\n' . join ( columnise ( rows ) ) else : print "No matches." return None
Find the bind module matching the given name .
38,530
def bind_package ( name , path = None , version_range = None , no_deps = False , bind_args = None , quiet = False ) : pending = set ( [ name ] ) installed_variants = [ ] installed_package_names = set ( ) primary = True while pending : pending_ = pending pending = set ( ) exc_type = None for name_ in pending_ : try : variants_ = _bind_package ( name_ , path = path , version_range = version_range , bind_args = bind_args , quiet = quiet ) except exc_type as e : print_error ( "Could not bind '%s': %s: %s" % ( name_ , e . __class__ . __name__ , str ( e ) ) ) continue installed_variants . extend ( variants_ ) for variant in variants_ : installed_package_names . add ( variant . name ) if not no_deps : for variant in variants_ : for requirement in variant . requires : if not requirement . conflict : pending . add ( requirement . name ) primary = False version_range = None bind_args = None exc_type = RezBindError if installed_variants and not quiet : print "The following packages were installed:" print _print_package_list ( installed_variants ) return installed_variants
Bind software available on the current system as a rez package .
38,531
def create_release_hook ( name , source_path ) : from rez . plugin_managers import plugin_manager return plugin_manager . create_instance ( 'release_hook' , name , source_path = source_path )
Return a new release hook of the given type .
38,532
def pre_build ( self , user , install_path , variants = None , release_message = None , changelog = None , previous_version = None , previous_revision = None , ** kwargs ) : pass
Pre - build hook .
38,533
def traversal ( graph , node , order ) : visited = { } if ( order == 'pre' ) : pre = 1 post = 0 elif ( order == 'post' ) : pre = 0 post = 1 for each in _dfs ( graph , visited , node , pre , post ) : yield each
Graph traversal iterator .
38,534
def is_zipfile ( filename ) : try : fpin = open ( filename , "rb" ) endrec = _EndRecData ( fpin ) fpin . close ( ) if endrec : return True except IOError : pass return False
Quickly see if file is a ZIP file by checking the magic number .
38,535
def _EndRecData ( fpin ) : fpin . seek ( 0 , 2 ) filesize = fpin . tell ( ) try : fpin . seek ( - sizeEndCentDir , 2 ) except IOError : return None data = fpin . read ( ) if data [ 0 : 4 ] == stringEndArchive and data [ - 2 : ] == "\000\000" : endrec = struct . unpack ( structEndArchive , data ) endrec = list ( endrec ) endrec . append ( "" ) endrec . append ( filesize - sizeEndCentDir ) return _EndRecData64 ( fpin , - sizeEndCentDir , endrec ) maxCommentStart = max ( filesize - ( 1 << 16 ) - sizeEndCentDir , 0 ) fpin . seek ( maxCommentStart , 0 ) data = fpin . read ( ) start = data . rfind ( stringEndArchive ) if start >= 0 : recData = data [ start : start + sizeEndCentDir ] endrec = list ( struct . unpack ( structEndArchive , recData ) ) comment = data [ start + sizeEndCentDir : ] if endrec [ _ECD_COMMENT_SIZE ] == len ( comment ) : endrec . append ( comment ) endrec . append ( maxCommentStart + start ) return _EndRecData64 ( fpin , maxCommentStart + start - filesize , endrec ) return
Return data from the End of Central Directory record or None .
38,536
def _GenerateCRCTable ( ) : poly = 0xedb88320 table = [ 0 ] * 256 for i in range ( 256 ) : crc = i for j in range ( 8 ) : if crc & 1 : crc = ( ( crc >> 1 ) & 0x7FFFFFFF ) ^ poly else : crc = ( ( crc >> 1 ) & 0x7FFFFFFF ) table [ i ] = crc return table
Generate a CRC - 32 table .
38,537
def _crc32 ( self , ch , crc ) : return ( ( crc >> 8 ) & 0xffffff ) ^ self . crctable [ ( crc ^ ord ( ch ) ) & 0xff ]
Compute the CRC32 primitive on one byte .
38,538
def readline ( self , size = - 1 ) : if size < 0 : size = sys . maxint elif size == 0 : return '' nl , nllen = self . _checkfornewline ( ) if nl >= 0 : nl = min ( nl , size ) else : size -= len ( self . linebuffer ) while nl < 0 and size > 0 : buf = self . read ( min ( size , 100 ) ) if not buf : break self . linebuffer += buf size -= len ( buf ) nl , nllen = self . _checkfornewline ( ) if nl < 0 : s = self . linebuffer self . linebuffer = '' return s buf = self . linebuffer [ : nl ] self . lastdiscard = self . linebuffer [ nl : nl + nllen ] self . linebuffer = self . linebuffer [ nl + nllen : ] return buf + "\n"
Read a line with approx . size . If size is negative read a whole line .
38,539
def _GetContents ( self ) : try : self . _RealGetContents ( ) except BadZipfile : if not self . _filePassed : self . fp . close ( ) self . fp = None raise
Read the directory making sure we close the file if the format is bad .
38,540
def namelist ( self ) : l = [ ] for data in self . filelist : l . append ( data . filename ) return l
Return a list of file names in the archive .
38,541
def printdir ( self ) : print "%-46s %19s %12s" % ( "File Name" , "Modified " , "Size" ) for zinfo in self . filelist : date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo . date_time [ : 6 ] print "%-46s %s %12d" % ( zinfo . filename , date , zinfo . file_size )
Print a table of contents for the zip file .
38,542
def getinfo ( self , name ) : info = self . NameToInfo . get ( name ) if info is None : raise KeyError ( 'There is no item named %r in the archive' % name ) return info
Return the instance of ZipInfo given name .
38,543
def extract ( self , member , path = None , pwd = None ) : if not isinstance ( member , ZipInfo ) : member = self . getinfo ( member ) if path is None : path = os . getcwd ( ) return self . _extract_member ( member , path , pwd )
Extract a member from the archive to the current working directory using its full name . Its file information is extracted as accurately as possible . member may be a filename or a ZipInfo object . You can specify a different directory using path .
38,544
def writepy ( self , pathname , basename = "" ) : dir , name = os . path . split ( pathname ) if os . path . isdir ( pathname ) : initname = os . path . join ( pathname , "__init__.py" ) if os . path . isfile ( initname ) : if basename : basename = "%s/%s" % ( basename , name ) else : basename = name if self . debug : print "Adding package in" , pathname , "as" , basename fname , arcname = self . _get_codename ( initname [ 0 : - 3 ] , basename ) if self . debug : print "Adding" , arcname self . write ( fname , arcname ) dirlist = os . listdir ( pathname ) dirlist . remove ( "__init__.py" ) for filename in dirlist : path = os . path . join ( pathname , filename ) root , ext = os . path . splitext ( filename ) if os . path . isdir ( path ) : if os . path . isfile ( os . path . join ( path , "__init__.py" ) ) : self . writepy ( path , basename ) elif ext == ".py" : fname , arcname = self . _get_codename ( path [ 0 : - 3 ] , basename ) if self . debug : print "Adding" , arcname self . write ( fname , arcname ) else : if self . debug : print "Adding files from directory" , pathname for filename in os . listdir ( pathname ) : path = os . path . join ( pathname , filename ) root , ext = os . path . splitext ( filename ) if ext == ".py" : fname , arcname = self . _get_codename ( path [ 0 : - 3 ] , basename ) if self . debug : print "Adding" , arcname self . write ( fname , arcname ) else : if pathname [ - 3 : ] != ".py" : raise RuntimeError , 'Files added with writepy() must end with ".py"' fname , arcname = self . _get_codename ( pathname [ 0 : - 3 ] , basename ) if self . debug : print "Adding file" , arcname self . write ( fname , arcname )
Add all files from pathname to the ZIP archive .
38,545
def get_fileobject ( self , dir = None , ** kwargs ) : if dir is None : dir = os . path . normpath ( os . path . dirname ( self . _path ) ) descriptor , name = tempfile . mkstemp ( dir = dir ) os . close ( descriptor ) kwargs [ 'mode' ] = self . _mode kwargs [ 'file' ] = name return io . open ( ** kwargs )
Return the temporary file to use .
38,546
def commit ( self , f ) : if self . _overwrite : replace_atomic ( f . name , self . _path ) else : move_atomic ( f . name , self . _path )
Move the temporary file to the target location .
38,547
def read_pid_from_pidfile ( pidfile_path ) : pid = None try : pidfile = open ( pidfile_path , 'r' ) except IOError : pass else : line = pidfile . readline ( ) . strip ( ) try : pid = int ( line ) except ValueError : pass pidfile . close ( ) return pid
Read the PID recorded in the named PID file .
38,548
def apply_saved_layout ( self ) : num_widgets = self . config . get ( self . config_key + "/num_widgets" , int ) if num_widgets : sizes = [ ] for i in range ( num_widgets ) : key = "%s/size_%d" % ( self . config_key , i ) size = self . config . get ( key , int ) sizes . append ( size ) self . setSizes ( sizes ) return True return False
Call this after adding your child widgets .
38,549
def remove_nones ( ** kwargs ) : return dict ( ( k , v ) for k , v in kwargs . iteritems ( ) if v is not None )
Return diict copy with nones removed .
38,550
def deep_update ( dict1 , dict2 ) : def flatten ( v ) : if isinstance ( v , ModifyList ) : return v . apply ( [ ] ) elif isinstance ( v , dict ) : return dict ( ( k , flatten ( v_ ) ) for k , v_ in v . iteritems ( ) ) else : return v def merge ( v1 , v2 ) : if isinstance ( v1 , dict ) and isinstance ( v2 , dict ) : deep_update ( v1 , v2 ) return v1 elif isinstance ( v2 , ModifyList ) : v1 = flatten ( v1 ) return v2 . apply ( v1 ) else : return flatten ( v2 ) for k1 , v1 in dict1 . iteritems ( ) : if k1 not in dict2 : dict1 [ k1 ] = flatten ( v1 ) for k2 , v2 in dict2 . iteritems ( ) : v1 = dict1 . get ( k2 ) if v1 is KeyError : dict1 [ k2 ] = flatten ( v2 ) else : dict1 [ k2 ] = merge ( v1 , v2 )
Perform a deep merge of dict2 into dict1 .
38,551
def deep_del ( data , fn ) : result = { } for k , v in data . iteritems ( ) : if not fn ( v ) : if isinstance ( v , dict ) : result [ k ] = deep_del ( v , fn ) else : result [ k ] = v return result
Create dict copy with removed items .
38,552
def get_dict_diff_str ( d1 , d2 , title ) : added , removed , changed = get_dict_diff ( d1 , d2 ) lines = [ title ] if added : lines . append ( "Added attributes: %s" % [ '.' . join ( x ) for x in added ] ) if removed : lines . append ( "Removed attributes: %s" % [ '.' . join ( x ) for x in removed ] ) if changed : lines . append ( "Changed attributes: %s" % [ '.' . join ( x ) for x in changed ] ) return '\n' . join ( lines )
Returns same as get_dict_diff but as a readable string .
38,553
def convert_dicts ( d , to_class = AttrDictWrapper , from_class = dict ) : d_ = to_class ( ) for key , value in d . iteritems ( ) : if isinstance ( value , from_class ) : d_ [ key ] = convert_dicts ( value , to_class = to_class , from_class = from_class ) else : d_ [ key ] = value return d_
Recursively convert dict and UserDict types .
38,554
def package_loading ( self ) : t1 = time . time ( ) yield None t2 = time . time ( ) self . package_load_time += t2 - t1
Use this around code in your package repository that is loading a package for example from file or cache .
38,555
def is_empty ( self ) : for family in self . iter_package_families ( ) : for pkg in self . iter_packages ( family ) : return False return True
Determine if the repository contains any packages .
38,556
def make_resource_handle ( self , resource_key , ** variables ) : if variables . get ( "repository_type" , self . name ( ) ) != self . name ( ) : raise ResourceError ( "repository_type mismatch - requested %r, " "repository_type is %r" % ( variables [ "repository_type" ] , self . name ( ) ) ) variables [ "repository_type" ] = self . name ( ) if variables . get ( "location" , self . location ) != self . location : raise ResourceError ( "location mismatch - requested %r, repository " "location is %r" % ( variables [ "location" ] , self . location ) ) variables [ "location" ] = self . location resource_cls = self . pool . get_resource_class ( resource_key ) variables = resource_cls . normalize_variables ( variables ) return ResourceHandle ( resource_key , variables )
Create a ResourceHandle
38,557
def get_repository ( self , path ) : parts = path . split ( '@' , 1 ) if len ( parts ) == 1 : parts = ( "filesystem" , parts [ 0 ] ) repo_type , location = parts if repo_type == "filesystem" : location = os . path . abspath ( location ) normalised_path = "%s@%s" % ( repo_type , location ) return self . _get_repository ( normalised_path )
Get a package repository .
38,558
def are_same ( self , path_1 , path_2 ) : if path_1 == path_2 : return True repo_1 = self . get_repository ( path_1 ) repo_2 = self . get_repository ( path_2 ) return ( repo_1 . uid == repo_2 . uid )
Test that path_1 and path_2 refer to the same repository .
38,559
def create_transport ( host , connect_timeout , ssl = False ) : if ssl : return SSLTransport ( host , connect_timeout , ssl ) else : return TCPTransport ( host , connect_timeout )
Given a few parameters from the Connection constructor select and create a subclass of _AbstractTransport .
38,560
def get_plugin_class ( self , plugin_name ) : try : return self . plugin_classes [ plugin_name ] except KeyError : raise RezPluginError ( "Unrecognised %s plugin: '%s'" % ( self . pretty_type_name , plugin_name ) )
Returns the class registered under the given plugin name .
38,561
def get_plugin_module ( self , plugin_name ) : try : return self . plugin_modules [ plugin_name ] except KeyError : raise RezPluginError ( "Unrecognised %s plugin: '%s'" % ( self . pretty_type_name , plugin_name ) )
Returns the module containing the plugin of the given name .
38,562
def config_schema ( self ) : from rez . config import _plugin_config_dict d = _plugin_config_dict . get ( self . type_name , { } ) for name , plugin_class in self . plugin_classes . iteritems ( ) : if hasattr ( plugin_class , "schema_dict" ) and plugin_class . schema_dict : d_ = { name : plugin_class . schema_dict } deep_update ( d , d_ ) return dict_to_schema ( d , required = True , modifier = expand_system_vars )
Returns the merged configuration data schema for this plugin type .
38,563
def get_plugin_class ( self , plugin_type , plugin_name ) : plugin = self . _get_plugin_type ( plugin_type ) return plugin . get_plugin_class ( plugin_name )
Return the class registered under the given plugin name .
38,564
def get_plugin_module ( self , plugin_type , plugin_name ) : plugin = self . _get_plugin_type ( plugin_type ) return plugin . get_plugin_module ( plugin_name )
Return the module defining the class registered under the given plugin name .
38,565
def create_instance ( self , plugin_type , plugin_name , ** instance_kwargs ) : plugin_type = self . _get_plugin_type ( plugin_type ) return plugin_type . create_instance ( plugin_name , ** instance_kwargs )
Create and return an instance of the given plugin .
38,566
def get_summary_string ( self ) : rows = [ [ "PLUGIN TYPE" , "NAME" , "DESCRIPTION" , "STATUS" ] , [ "-----------" , "----" , "-----------" , "------" ] ] for plugin_type in sorted ( self . get_plugin_types ( ) ) : type_name = plugin_type . replace ( '_' , ' ' ) for name in sorted ( self . get_plugins ( plugin_type ) ) : module = self . get_plugin_module ( plugin_type , name ) desc = ( getattr ( module , "__doc__" , None ) or '' ) . strip ( ) rows . append ( ( type_name , name , desc , "loaded" ) ) for ( name , reason ) in sorted ( self . get_failed_plugins ( plugin_type ) ) : msg = "FAILED: %s" % reason rows . append ( ( type_name , name , '' , msg ) ) return '\n' . join ( columnise ( rows ) )
Get a formatted string summarising the plugins that were loaded .
38,567
def get_fragment ( self , offset ) : fragment_len = 10 s = '%r' % ( self . source [ offset : offset + fragment_len ] ) if offset + fragment_len < len ( self . source ) : s += '...' return s
Get the part of the source which is causing a problem .
38,568
def evaluate ( self , node , filename = None ) : if isinstance ( node , string_types ) : self . source = node kwargs = { 'mode' : 'eval' } if filename : kwargs [ 'filename' ] = filename try : node = ast . parse ( node , ** kwargs ) except SyntaxError as e : s = self . get_fragment ( e . offset ) raise SyntaxError ( 'syntax error %s' % s ) node_type = node . __class__ . __name__ . lower ( ) handler = self . get_handler ( node_type ) if handler is None : if self . source is None : s = '(source not available)' else : s = self . get_fragment ( node . col_offset ) raise SyntaxError ( "don't know how to evaluate %r %s" % ( node_type , s ) ) return handler ( node )
Evaluate a source string or node using filename when displaying errors .
38,569
def recursive_repr ( func ) : repr_running = set ( ) @ wraps ( func ) def wrapper ( self ) : "Return ellipsis on recursive re-entry to function." key = id ( self ) , get_ident ( ) if key in repr_running : return '...' repr_running . add ( key ) try : return func ( self ) finally : repr_running . discard ( key ) return wrapper
Decorator to prevent infinite repr recursion .
38,570
def _reset ( self , load ) : values = reduce ( iadd , self . _lists , [ ] ) self . _clear ( ) self . _load = load self . _half = load >> 1 self . _dual = load << 1 self . _update ( values )
Reset sorted list load .
38,571
def _build_index ( self ) : row0 = list ( map ( len , self . _lists ) ) if len ( row0 ) == 1 : self . _index [ : ] = row0 self . _offset = 0 return head = iter ( row0 ) tail = iter ( head ) row1 = list ( starmap ( add , zip ( head , tail ) ) ) if len ( row0 ) & 1 : row1 . append ( row0 [ - 1 ] ) if len ( row1 ) == 1 : self . _index [ : ] = row1 + row0 self . _offset = 1 return size = 2 ** ( int ( log_e ( len ( row1 ) - 1 , 2 ) ) + 1 ) row1 . extend ( repeat ( 0 , size - len ( row1 ) ) ) tree = [ row0 , row1 ] while len ( tree [ - 1 ] ) > 1 : head = iter ( tree [ - 1 ] ) tail = iter ( head ) row = list ( starmap ( add , zip ( head , tail ) ) ) tree . append ( row ) reduce ( iadd , reversed ( tree ) , self . _index ) self . _offset = size * 2 - 1
Build an index for indexing the sorted list .
38,572
def irange_key ( self , min_key = None , max_key = None , inclusive = ( True , True ) , reverse = False ) : _maxes = self . _maxes if not _maxes : return iter ( ( ) ) _keys = self . _keys if min_key is None : min_pos = 0 min_idx = 0 else : if inclusive [ 0 ] : min_pos = bisect_left ( _maxes , min_key ) if min_pos == len ( _maxes ) : return iter ( ( ) ) min_idx = bisect_left ( _keys [ min_pos ] , min_key ) else : min_pos = bisect_right ( _maxes , min_key ) if min_pos == len ( _maxes ) : return iter ( ( ) ) min_idx = bisect_right ( _keys [ min_pos ] , min_key ) if max_key is None : max_pos = len ( _maxes ) - 1 max_idx = len ( _keys [ max_pos ] ) else : if inclusive [ 1 ] : max_pos = bisect_right ( _maxes , max_key ) if max_pos == len ( _maxes ) : max_pos -= 1 max_idx = len ( _keys [ max_pos ] ) else : max_idx = bisect_right ( _keys [ max_pos ] , max_key ) else : max_pos = bisect_left ( _maxes , max_key ) if max_pos == len ( _maxes ) : max_pos -= 1 max_idx = len ( _keys [ max_pos ] ) else : max_idx = bisect_left ( _keys [ max_pos ] , max_key ) return self . _islice ( min_pos , min_idx , max_pos , max_idx , reverse )
Create an iterator of values between min_key and max_key .
38,573
def view_graph ( graph_str , parent = None , prune_to = None ) : from rezgui . dialogs . ImageViewerDialog import ImageViewerDialog from rez . config import config h = hash ( ( graph_str , prune_to ) ) filepath = graph_file_lookup . get ( h ) if filepath and not os . path . exists ( filepath ) : filepath = None if filepath is None : suffix = ".%s" % config . dot_image_format fd , filepath = tempfile . mkstemp ( suffix = suffix , prefix = "rez-graph-" ) os . close ( fd ) dlg = WriteGraphDialog ( graph_str , filepath , parent , prune_to = prune_to ) if not dlg . write_graph ( ) : return graph_file_lookup [ h ] = filepath dlg = ImageViewerDialog ( filepath , parent ) dlg . exec_ ( )
View a graph .
38,574
def select_version ( self , version_range ) : row = - 1 version = None for i , package in self . packages . iteritems ( ) : if package . version in version_range and ( version is None or version < package . version ) : version = package . version row = i self . clearSelection ( ) if row != - 1 : self . selectRow ( row ) return version
Select the latest versioned package in the given range .
38,575
def _fromset ( cls , values , key = None ) : sorted_set = object . __new__ ( cls ) sorted_set . _set = values sorted_set . __init__ ( key = key ) return sorted_set
Initialize sorted set from existing set .
38,576
def setup_parser_common ( parser ) : from rez . build_process_ import get_build_process_types from rez . build_system import get_valid_build_systems process_types = get_build_process_types ( ) parser . add_argument ( "--process" , type = str , choices = process_types , default = "local" , help = "the build process to use (default: %(default)s)." ) package = get_current_developer_package ( ) clss = get_valid_build_systems ( os . getcwd ( ) , package = package ) if clss : if len ( clss ) == 1 : cls_ = clss [ 0 ] title = "%s build system arguments" % cls_ . name ( ) group = parser . add_argument_group ( title ) cls_ . bind_cli ( parser , group ) types = [ x . name ( ) for x in clss ] else : types = None parser . add_argument ( "-b" , "--build-system" , dest = "buildsys" , choices = types , help = "the build system to use. If not specified, it is detected. Set " "'build_system' or 'build_command' to specify the build system in the " "package itself." ) parser . add_argument ( "--variants" , nargs = '+' , type = int , metavar = "INDEX" , help = "select variants to build (zero-indexed)." ) parser . add_argument ( "--ba" , "--build-args" , dest = "build_args" , metavar = "ARGS" , help = "arguments to pass to the build system. Alternatively, list these " "after a '--'." ) parser . add_argument ( "--cba" , "--child-build-args" , dest = "child_build_args" , metavar = "ARGS" , help = "arguments to pass to the child build system, if any. " "Alternatively, list these after a second '--'." )
Parser setup common to both rez - build and rez - release .
38,577
def scoped_format ( txt , ** objects ) : pretty = objects . pop ( "pretty" , RecursiveAttribute . format_pretty ) expand = objects . pop ( "expand" , RecursiveAttribute . format_expand ) attr = RecursiveAttribute ( objects , read_only = True ) formatter = scoped_formatter ( ** objects ) return formatter . format ( txt , pretty = pretty , expand = expand )
Format a string with respect to a set of objects attributes .
38,578
def to_dict ( self ) : d = { } for k , v in self . __dict__ [ "data" ] . iteritems ( ) : if isinstance ( v , RecursiveAttribute ) : d [ k ] = v . to_dict ( ) else : d [ k ] = v return d
Get an equivalent dict representation .
38,579
def value ( self , key , type_ = None ) : if type_ is None : default = self . _default_value ( key ) val = self . _value ( key , default ) if type ( val ) == type ( default ) : return val else : return self . _convert_value ( val , type ( default ) ) else : val = self . _value ( key , None ) if val is None : return None return self . _convert_value ( val , type_ )
Get the value of a setting .
38,580
def get_string_list ( self , key ) : strings = [ ] size = self . beginReadArray ( key ) for i in range ( size ) : self . setArrayIndex ( i ) entry = str ( self . _value ( "entry" ) ) strings . append ( entry ) self . endArray ( ) return strings
Get a list of strings .
38,581
def prepend_string_list ( self , key , value , max_length_key ) : max_len = self . get ( max_length_key ) strings = self . get_string_list ( key ) strings = [ value ] + [ x for x in strings if x != value ] strings = strings [ : max_len ] self . beginWriteArray ( key ) for i in range ( len ( strings ) ) : self . setArrayIndex ( i ) self . setValue ( "entry" , strings [ i ] ) self . endArray ( )
Prepend a fixed - length string list with a new string .
38,582
def insert ( self , item , priority ) : heappush ( self . heap , HeapItem ( item , priority ) )
Insert item into the queue with the given priority .
38,583
def connected_components ( graph ) : recursionlimit = getrecursionlimit ( ) setrecursionlimit ( max ( len ( graph . nodes ( ) ) * 2 , recursionlimit ) ) visited = { } count = 1 for each in graph : if ( each not in visited ) : _dfs ( graph , visited , count , each ) count = count + 1 setrecursionlimit ( recursionlimit ) return visited
Connected components .
38,584
def cut_edges ( graph ) : recursionlimit = getrecursionlimit ( ) setrecursionlimit ( max ( len ( graph . nodes ( ) ) * 2 , recursionlimit ) ) if 'hypergraph' == graph . __class__ . __name__ : return _cut_hyperedges ( graph ) pre = { } low = { } spanning_tree = { } reply = [ ] pre [ None ] = 0 for each in graph : if ( each not in pre ) : spanning_tree [ each ] = None _cut_dfs ( graph , spanning_tree , pre , low , reply , each ) setrecursionlimit ( recursionlimit ) return reply
Return the cut - edges of the given graph . A cut edge or bridge is an edge of a graph whose removal increases the number of connected components in the graph .
38,585
def _cut_hyperedges ( hypergraph ) : edges_ = cut_nodes ( hypergraph . graph ) edges = [ ] for each in edges_ : if ( each [ 1 ] == 'h' ) : edges . append ( each [ 0 ] ) return edges
Return the cut - hyperedges of the given hypergraph .
38,586
def cut_nodes ( graph ) : recursionlimit = getrecursionlimit ( ) setrecursionlimit ( max ( len ( graph . nodes ( ) ) * 2 , recursionlimit ) ) if 'hypergraph' == graph . __class__ . __name__ : return _cut_hypernodes ( graph ) pre = { } low = { } reply = { } spanning_tree = { } pre [ None ] = 0 for each in graph : if ( each not in pre ) : spanning_tree [ each ] = None _cut_dfs ( graph , spanning_tree , pre , low , [ ] , each ) for each in graph : if ( spanning_tree [ each ] is not None ) : for other in graph [ each ] : if ( low [ other ] >= pre [ each ] and spanning_tree [ other ] == each ) : reply [ each ] = 1 else : children = 0 for other in graph : if ( spanning_tree [ other ] == each ) : children = children + 1 if ( children >= 2 ) : reply [ each ] = 1 setrecursionlimit ( recursionlimit ) return list ( reply . keys ( ) )
Return the cut - nodes of the given graph . A cut node or articulation point is a node of a graph whose removal increases the number of connected components in the graph .
38,587
def _cut_hypernodes ( hypergraph ) : nodes_ = cut_nodes ( hypergraph . graph ) nodes = [ ] for each in nodes_ : if ( each [ 1 ] == 'n' ) : nodes . append ( each [ 0 ] ) return nodes
Return the cut - nodes of the given hypergraph .
38,588
def del_edge ( self , edge ) : u , v = edge self . node_neighbors [ u ] . remove ( v ) self . del_edge_labeling ( ( u , v ) ) if ( u != v ) : self . node_neighbors [ v ] . remove ( u ) self . del_edge_labeling ( ( v , u ) )
Remove an edge from the graph .
38,589
def edge_weight ( self , edge ) : return self . get_edge_properties ( edge ) . setdefault ( self . WEIGHT_ATTRIBUTE_NAME , self . DEFAULT_WEIGHT )
Get the weight of an edge .
38,590
def set_edge_weight ( self , edge , wt ) : self . set_edge_properties ( edge , weight = wt ) if not self . DIRECTED : self . set_edge_properties ( ( edge [ 1 ] , edge [ 0 ] ) , weight = wt )
Set the weight of an edge .
38,591
def edge_label ( self , edge ) : return self . get_edge_properties ( edge ) . setdefault ( self . LABEL_ATTRIBUTE_NAME , self . DEFAULT_LABEL )
Get the label of an edge .
38,592
def set_edge_label ( self , edge , label ) : self . set_edge_properties ( edge , label = label ) if not self . DIRECTED : self . set_edge_properties ( ( edge [ 1 ] , edge [ 0 ] ) , label = label )
Set the label of an edge .
38,593
def add_edge_attribute ( self , edge , attr ) : self . edge_attr [ edge ] = self . edge_attributes ( edge ) + [ attr ] if ( not self . DIRECTED and edge [ 0 ] != edge [ 1 ] ) : self . edge_attr [ ( edge [ 1 ] , edge [ 0 ] ) ] = self . edge_attributes ( ( edge [ 1 ] , edge [ 0 ] ) ) + [ attr ]
Add attribute to the given edge .
38,594
def add_node_attribute ( self , node , attr ) : self . node_attr [ node ] = self . node_attr [ node ] + [ attr ]
Add attribute to the given node .
38,595
def activation_shell_code ( self , shell = None ) : from rez . shells import create_shell from rez . rex import RexExecutor executor = RexExecutor ( interpreter = create_shell ( shell ) , parent_variables = [ "PATH" ] , shebang = False ) executor . env . PATH . append ( self . tools_path ) return executor . get_output ( ) . strip ( )
Get shell code that should be run to activate this suite .
38,596
def context ( self , name ) : data = self . _context ( name ) context = data . get ( "context" ) if context : return context assert self . load_path context_path = os . path . join ( self . load_path , "contexts" , "%s.rxt" % name ) context = ResolvedContext . load ( context_path ) data [ "context" ] = context data [ "loaded" ] = True return context
Get a context .
38,597
def add_context ( self , name , context , prefix_char = None ) : if name in self . contexts : raise SuiteError ( "Context already in suite: %r" % name ) if not context . success : raise SuiteError ( "Context is not resolved: %r" % name ) self . contexts [ name ] = dict ( name = name , context = context . copy ( ) , tool_aliases = { } , hidden_tools = set ( ) , priority = self . _next_priority , prefix_char = prefix_char ) self . _flush_tools ( )
Add a context to the suite .
38,598
def find_contexts ( self , in_request = None , in_resolve = None ) : names = self . context_names if in_request : def _in_request ( name ) : context = self . context ( name ) packages = set ( x . name for x in context . requested_packages ( True ) ) return ( in_request in packages ) names = [ x for x in names if _in_request ( x ) ] if in_resolve : if isinstance ( in_resolve , basestring ) : in_resolve = PackageRequest ( in_resolve ) def _in_resolve ( name ) : context = self . context ( name ) variant = context . get_resolved_package ( in_resolve . name ) if variant : overlap = ( variant . version in in_resolve . range ) return ( ( in_resolve . conflict and not overlap ) or ( overlap and not in_resolve . conflict ) ) else : return in_resolve . conflict names = [ x for x in names if _in_resolve ( x ) ] return names
Find contexts in the suite based on search criteria .
38,599
def remove_context ( self , name ) : self . _context ( name ) del self . contexts [ name ] self . _flush_tools ( )
Remove a context from the suite .