idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
38,300
def get_config_vars ( * args ) : global _CONFIG_VARS if _CONFIG_VARS is None : _CONFIG_VARS = { } _CONFIG_VARS [ 'prefix' ] = _PREFIX _CONFIG_VARS [ 'exec_prefix' ] = _EXEC_PREFIX _CONFIG_VARS [ 'py_version' ] = _PY_VERSION _CONFIG_VARS [ 'py_version_short' ] = _PY_VERSION_SHORT _CONFIG_VARS [ 'py_version_nodot' ] = _PY_VERSION [ 0 ] + _PY_VERSION [ 2 ] _CONFIG_VARS [ 'base' ] = _PREFIX _CONFIG_VARS [ 'platbase' ] = _EXEC_PREFIX _CONFIG_VARS [ 'projectbase' ] = _PROJECT_BASE try : _CONFIG_VARS [ 'abiflags' ] = sys . abiflags except AttributeError : _CONFIG_VARS [ 'abiflags' ] = '' if os . name in ( 'nt' , 'os2' ) : _init_non_posix ( _CONFIG_VARS ) if os . name == 'posix' : _init_posix ( _CONFIG_VARS ) if sys . version >= '2.6' : _CONFIG_VARS [ 'userbase' ] = _getuserbase ( ) if 'srcdir' not in _CONFIG_VARS : _CONFIG_VARS [ 'srcdir' ] = _PROJECT_BASE else : _CONFIG_VARS [ 'srcdir' ] = _safe_realpath ( _CONFIG_VARS [ 'srcdir' ] ) if _PYTHON_BUILD and os . name == "posix" : base = _PROJECT_BASE try : cwd = os . getcwd ( ) except OSError : cwd = None if ( not os . path . isabs ( _CONFIG_VARS [ 'srcdir' ] ) and base != cwd ) : srcdir = os . path . join ( base , _CONFIG_VARS [ 'srcdir' ] ) _CONFIG_VARS [ 'srcdir' ] = os . path . normpath ( srcdir ) if sys . platform == 'darwin' : kernel_version = os . uname ( ) [ 2 ] major_version = int ( kernel_version . split ( '.' ) [ 0 ] ) if major_version < 8 : for key in ( 'LDFLAGS' , 'BASECFLAGS' , 'CFLAGS' , 'PY_CFLAGS' , 'BLDSHARED' ) : flags = _CONFIG_VARS [ key ] flags = re . sub ( '-arch\s+\w+\s' , ' ' , flags ) flags = re . sub ( '-isysroot [^ \t]*' , ' ' , flags ) _CONFIG_VARS [ key ] = flags else : if 'ARCHFLAGS' in os . environ : arch = os . environ [ 'ARCHFLAGS' ] for key in ( 'LDFLAGS' , 'BASECFLAGS' , 'CFLAGS' , 'PY_CFLAGS' , 'BLDSHARED' ) : flags = _CONFIG_VARS [ key ] flags = re . sub ( '-arch\s+\w+\s' , ' ' , flags ) flags = flags + ' ' + arch _CONFIG_VARS [ key ] = flags CFLAGS = _CONFIG_VARS . get ( 'CFLAGS' , '' ) m = re . search ( '-isysroot\s+(\S+)' , CFLAGS ) if m is not None : sdk = m . group ( 1 ) if not os . path . exists ( sdk ) : for key in ( 'LDFLAGS' , 'BASECFLAGS' , 'CFLAGS' , 'PY_CFLAGS' , 'BLDSHARED' ) : flags = _CONFIG_VARS [ key ] flags = re . sub ( '-isysroot\s+\S+(\s|$)' , ' ' , flags ) _CONFIG_VARS [ key ] = flags if args : vals = [ ] for name in args : vals . append ( _CONFIG_VARS . get ( name ) ) return vals else : return _CONFIG_VARS
With no arguments return a dictionary of all configuration variables relevant for the current platform .
38,301
def set_authors ( data ) : if "authors" in data : return shfile = os . path . join ( os . path . dirname ( __file__ ) , "get_committers.sh" ) p = subprocess . Popen ( [ "bash" , shfile ] , stdout = subprocess . PIPE ) out , _ = p . communicate ( ) if p . returncode : return authors = out . strip ( ) . split ( '\n' ) authors = [ x . strip ( ) for x in authors ] data [ "authors" ] = authors
Add authors attribute based on repo contributions
38,302
def make_package ( name , path , make_base = None , make_root = None , skip_existing = True , warn_on_skip = True ) : maker = PackageMaker ( name ) yield maker package = maker . get_package ( ) cwd = os . getcwd ( ) src_variants = [ ] if skip_existing : for variant in package . iter_variants ( ) : variant_ = variant . install ( path , dry_run = True ) if variant_ is None : src_variants . append ( variant ) else : maker . skipped_variants . append ( variant_ ) if warn_on_skip : print_warning ( "Skipping installation: Package variant already " "exists: %s" % variant_ . uri ) else : src_variants = package . iter_variants ( ) with retain_cwd ( ) : for variant in src_variants : variant_ = variant . install ( path ) base = variant_ . base if make_base and base : if not os . path . exists ( base ) : os . makedirs ( base ) os . chdir ( base ) make_base ( variant_ , base ) root = variant_ . root if make_root and root : if not os . path . exists ( root ) : os . makedirs ( root ) os . chdir ( root ) make_root ( variant_ , root ) maker . installed_variants . append ( variant_ )
Make and install a package .
38,303
def get_package ( self ) : package_data = self . _get_data ( ) package_data = package_schema . validate ( package_data ) if "requires_rez_version" in package_data : ver = package_data . pop ( "requires_rez_version" ) if _rez_Version < ver : raise PackageMetadataError ( "Failed reading package definition file: rez version >= %s " "needed (current version is %s)" % ( ver , _rez_Version ) ) version_str = package_data . get ( "version" ) or "_NO_VERSION" repo_data = { self . name : { version_str : package_data } } repo = create_memory_package_repository ( repo_data ) family_resource = repo . get_package_family ( self . name ) it = repo . iter_packages ( family_resource ) package_resource = it . next ( ) package = self . package_cls ( package_resource ) package . validate_data ( ) return package
Create the analogous package .
38,304
def getTokensEndLoc ( ) : import inspect fstack = inspect . stack ( ) try : for f in fstack [ 2 : ] : if f [ 3 ] == "_parseNoCache" : endloc = f [ 0 ] . f_locals [ "loc" ] return endloc else : raise ParseFatalException ( "incorrect usage of getTokensEndLoc - may only be called from within a parse action" ) finally : del fstack
Method to be called from within a parse action to determine the end location of the parsed tokens .
38,305
def schema_keys ( schema ) : def _get_leaf ( value ) : if isinstance ( value , Schema ) : return _get_leaf ( value . _schema ) return value keys = set ( ) dict_ = schema . _schema assert isinstance ( dict_ , dict ) for key in dict_ . iterkeys ( ) : key_ = _get_leaf ( key ) if isinstance ( key_ , basestring ) : keys . add ( key_ ) return keys
Get the string values of keys in a dict - based schema .
38,306
def dict_to_schema ( schema_dict , required , allow_custom_keys = True , modifier = None ) : if modifier : modifier = Use ( modifier ) def _to ( value ) : if isinstance ( value , dict ) : d = { } for k , v in value . iteritems ( ) : if isinstance ( k , basestring ) : k = Required ( k ) if required else Optional ( k ) d [ k ] = _to ( v ) if allow_custom_keys : d [ Optional ( basestring ) ] = modifier or object schema = Schema ( d ) elif modifier : schema = And ( value , modifier ) else : schema = value return schema return _to ( schema_dict )
Convert a dict of Schemas into a Schema .
38,307
def enter_diff_mode ( self , context_model = None ) : assert not self . diff_mode self . diff_mode = True if context_model is None : self . diff_from_source = True self . diff_context_model = self . context_model . copy ( ) else : self . diff_from_source = False self . diff_context_model = context_model self . clear ( ) self . setColumnCount ( 5 ) self . refresh ( )
Enter diff mode .
38,308
def leave_diff_mode ( self ) : assert self . diff_mode self . diff_mode = False self . diff_context_model = None self . diff_from_source = False self . setColumnCount ( 2 ) self . refresh ( )
Leave diff mode .
38,309
def get_title ( self ) : def _title ( context_model ) : context = context_model . context ( ) if context is None : return "new context*" title = os . path . basename ( context . load_path ) if context . load_path else "new context" if context_model . is_modified ( ) : title += '*' return title if self . diff_mode : diff_title = _title ( self . diff_context_model ) if self . diff_from_source : diff_title += "'" return "%s %s %s" % ( _title ( self . context_model ) , self . short_double_arrow , diff_title ) else : return _title ( self . context_model )
Returns a string suitable for titling a window containing this table .
38,310
def _color_level ( str_ , level ) : fore_color , back_color , styles = _get_style_from_config ( level ) return _color ( str_ , fore_color , back_color , styles )
Return the string wrapped with the appropriate styling for the message level . The styling will be determined based on the rez configuration .
38,311
def _color ( str_ , fore_color = None , back_color = None , styles = None ) : if not config . get ( "color_enabled" , False ) or platform_ . name == "windows" : return str_ _init_colorama ( ) colored = "" if not styles : styles = [ ] if fore_color : colored += getattr ( colorama . Fore , fore_color . upper ( ) , '' ) if back_color : colored += getattr ( colorama . Back , back_color . upper ( ) , '' ) for style in styles : colored += getattr ( colorama . Style , style . upper ( ) , '' ) return colored + str_ + colorama . Style . RESET_ALL
Return the string wrapped with the appropriate styling escape sequences .
38,312
def late ( ) : from rez . package_resources_ import package_rex_keys def decorated ( fn ) : if fn . __name__ in package_rex_keys : raise ValueError ( "Cannot use @late decorator on function '%s'" % fn . __name__ ) setattr ( fn , "_late" , True ) _add_decorator ( fn , "late" ) return fn return decorated
Used by functions in package . py that are evaluated lazily .
38,313
def include ( module_name , * module_names ) : def decorated ( fn ) : _add_decorator ( fn , "include" , nargs = [ module_name ] + list ( module_names ) ) return fn return decorated
Used by functions in package . py to have access to named modules .
38,314
def iter_package_families ( paths = None ) : for path in ( paths or config . packages_path ) : repo = package_repository_manager . get_repository ( path ) for resource in repo . iter_package_families ( ) : yield PackageFamily ( resource )
Iterate over package families in no particular order .
38,315
def iter_packages ( name , range_ = None , paths = None ) : entries = _get_families ( name , paths ) seen = set ( ) for repo , family_resource in entries : for package_resource in repo . iter_packages ( family_resource ) : key = ( package_resource . name , package_resource . version ) if key in seen : continue seen . add ( key ) if range_ : if isinstance ( range_ , basestring ) : range_ = VersionRange ( range_ ) if package_resource . version not in range_ : continue yield Package ( package_resource )
Iterate over Package instances in no particular order .
38,316
def get_package ( name , version , paths = None ) : if isinstance ( version , basestring ) : range_ = VersionRange ( "==%s" % version ) else : range_ = VersionRange . from_version ( version , "==" ) it = iter_packages ( name , range_ , paths ) try : return it . next ( ) except StopIteration : return None
Get an exact version of a package .
38,317
def get_package_from_string ( txt , paths = None ) : o = VersionedObject ( txt ) return get_package ( o . name , o . version , paths = paths )
Get a package given a string .
38,318
def get_developer_package ( path , format = None ) : from rez . developer_package import DeveloperPackage return DeveloperPackage . from_path ( path , format = format )
Create a developer package .
38,319
def create_package ( name , data , package_cls = None ) : from rez . package_maker__ import PackageMaker maker = PackageMaker ( name , data , package_cls = package_cls ) return maker . get_package ( )
Create a package given package data .
38,320
def get_last_release_time ( name , paths = None ) : entries = _get_families ( name , paths ) max_time = 0 for repo , family_resource in entries : time_ = repo . get_last_release_time ( family_resource ) if time_ == 0 : return 0 max_time = max ( max_time , time_ ) return max_time
Returns the most recent time this package was released .
38,321
def get_completions ( prefix , paths = None , family_only = False ) : op = None if prefix : if prefix [ 0 ] in ( '!' , '~' ) : if family_only : return set ( ) op = prefix [ 0 ] prefix = prefix [ 1 : ] fam = None for ch in ( '-' , '@' , '#' ) : if ch in prefix : if family_only : return set ( ) fam = prefix . split ( ch ) [ 0 ] break words = set ( ) if not fam : words = set ( x . name for x in iter_package_families ( paths = paths ) if x . name . startswith ( prefix ) ) if len ( words ) == 1 : fam = iter ( words ) . next ( ) if family_only : return words if fam : it = iter_packages ( fam , paths = paths ) words . update ( x . qualified_name for x in it if x . qualified_name . startswith ( prefix ) ) if op : words = set ( op + x for x in words ) return words
Get autocompletion options given a prefix string .
38,322
def iter_packages ( self ) : for package in self . repository . iter_packages ( self . resource ) : yield Package ( package )
Iterate over the packages within this family in no particular order .
38,323
def is_local ( self ) : local_repo = package_repository_manager . get_repository ( self . config . local_packages_path ) return ( self . resource . _repository . uid == local_repo . uid )
Returns True if the package is in the local package repository
38,324
def print_info ( self , buf = None , format_ = FileFormat . yaml , skip_attributes = None , include_release = False ) : data = self . validated_data ( ) . copy ( ) data . pop ( "config" , None ) if self . config : if isinstance ( self , Package ) : config_dict = self . data . get ( "config" ) else : config_dict = self . parent . data . get ( "config" ) data [ "config" ] = config_dict if not include_release : skip_attributes = list ( skip_attributes or [ ] ) + list ( package_release_keys ) buf = buf or sys . stdout dump_package_data ( data , buf = buf , format_ = format_ , skip_attributes = skip_attributes )
Print the contents of the package .
38,325
def qualified_name ( self ) : o = VersionedObject . construct ( self . name , self . version ) return str ( o )
Get the qualified name of the package .
38,326
def parent ( self ) : family = self . repository . get_parent_package_family ( self . resource ) return PackageFamily ( family ) if family else None
Get the parent package family .
38,327
def iter_variants ( self ) : for variant in self . repository . iter_variants ( self . resource ) : yield Variant ( variant , context = self . context , parent = self )
Iterate over the variants within this package in index order .
38,328
def get_variant ( self , index = None ) : for variant in self . iter_variants ( ) : if variant . index == index : return variant
Get the variant with the associated index .
38,329
def qualified_name ( self ) : idxstr = '' if self . index is None else str ( self . index ) return "%s[%s]" % ( self . qualified_package_name , idxstr )
Get the qualified name of the variant .
38,330
def parent ( self ) : if self . _parent is not None : return self . _parent try : package = self . repository . get_parent_package ( self . resource ) self . _parent = Package ( package , context = self . context ) except AttributeError as e : reraise ( e , ValueError ) return self . _parent
Get the parent package .
38,331
def get_requires ( self , build_requires = False , private_build_requires = False ) : requires = self . requires or [ ] if build_requires : requires = requires + ( self . build_requires or [ ] ) if private_build_requires : requires = requires + ( self . private_build_requires or [ ] ) return requires
Get the requirements of the variant .
38,332
def install ( self , path , dry_run = False , overrides = None ) : repo = package_repository_manager . get_repository ( path ) resource = repo . install_variant ( self . resource , dry_run = dry_run , overrides = overrides ) if resource is None : return None elif resource is self . resource : return self else : return Variant ( resource )
Install this variant into another package repository .
38,333
def open_file_for_write ( filepath , mode = None ) : stream = StringIO ( ) yield stream content = stream . getvalue ( ) filepath = os . path . realpath ( filepath ) tmpdir = tmpdir_manager . mkdtemp ( ) cache_filepath = os . path . join ( tmpdir , os . path . basename ( filepath ) ) debug_print ( "Writing to %s (local cache of %s)" , cache_filepath , filepath ) with atomic_write ( filepath , overwrite = True ) as f : f . write ( content ) if mode is not None : os . chmod ( filepath , mode ) with open ( cache_filepath , 'w' ) as f : f . write ( content ) file_cache [ filepath ] = cache_filepath
Writes both to given filepath and tmpdir location .
38,334
def load_py ( stream , filepath = None ) : with add_sys_paths ( config . package_definition_build_python_paths ) : return _load_py ( stream , filepath = filepath )
Load python - formatted data from a stream .
38,335
def process_python_objects ( data , filepath = None ) : def _process ( value ) : if isinstance ( value , dict ) : for k , v in value . items ( ) : value [ k ] = _process ( v ) return value elif isfunction ( value ) : func = value if hasattr ( func , "_early" ) : import types fn = types . FunctionType ( func . func_code , func . func_globals . copy ( ) , name = func . func_name , argdefs = func . func_defaults , closure = func . func_closure ) fn . func_globals [ "this" ] = EarlyThis ( data ) fn . func_globals . update ( get_objects ( ) ) spec = getargspec ( func ) args = spec . args or [ ] if len ( args ) not in ( 0 , 1 ) : raise ResourceError ( "@early decorated function must " "take zero or one args only" ) if args : value_ = fn ( data ) else : value_ = fn ( ) return _process ( value_ ) elif hasattr ( func , "_late" ) : return SourceCode ( func = func , filepath = filepath , eval_as_function = True ) elif func . __name__ in package_rex_keys : return SourceCode ( func = func , filepath = filepath , eval_as_function = False ) else : return func else : return value def _trim ( value ) : if isinstance ( value , dict ) : for k , v in value . items ( ) : if isfunction ( v ) : if v . __name__ == "preprocess" : pass else : del value [ k ] elif ismodule ( v ) or k . startswith ( "__" ) : del value [ k ] else : value [ k ] = _trim ( v ) return value data = _process ( data ) data = _trim ( data ) return data
Replace certain values in the given package data dict .
38,336
def load_yaml ( stream , ** kwargs ) : content = stream . read ( ) try : return yaml . load ( content ) or { } except Exception , e : if stream . name and stream . name != '<string>' : for mark_name in 'context_mark' , 'problem_mark' : mark = getattr ( e , mark_name , None ) if mark is None : continue if getattr ( mark , 'name' ) == '<string>' : mark . name = stream . name raise e
Load yaml - formatted data from a stream .
38,337
def _blocked ( self , args ) : reason = args . read_shortstr ( ) if self . on_blocked : return self . on_blocked ( reason )
RabbitMQ Extension .
38,338
def _x_secure_ok ( self , response ) : args = AMQPWriter ( ) args . write_longstr ( response ) self . _send_method ( ( 10 , 21 ) , args )
Security mechanism response
38,339
def _x_start_ok ( self , client_properties , mechanism , response , locale ) : if self . server_capabilities . get ( 'consumer_cancel_notify' ) : if 'capabilities' not in client_properties : client_properties [ 'capabilities' ] = { } client_properties [ 'capabilities' ] [ 'consumer_cancel_notify' ] = True if self . server_capabilities . get ( 'connection.blocked' ) : if 'capabilities' not in client_properties : client_properties [ 'capabilities' ] = { } client_properties [ 'capabilities' ] [ 'connection.blocked' ] = True args = AMQPWriter ( ) args . write_table ( client_properties ) args . write_shortstr ( mechanism ) args . write_longstr ( response ) args . write_shortstr ( locale ) self . _send_method ( ( 10 , 11 ) , args )
Select security mechanism and locale
38,340
def _tune ( self , args ) : client_heartbeat = self . client_heartbeat or 0 self . channel_max = args . read_short ( ) or self . channel_max self . frame_max = args . read_long ( ) or self . frame_max self . method_writer . frame_max = self . frame_max self . server_heartbeat = args . read_short ( ) or 0 if self . server_heartbeat == 0 or client_heartbeat == 0 : self . heartbeat = max ( self . server_heartbeat , client_heartbeat ) else : self . heartbeat = min ( self . server_heartbeat , client_heartbeat ) if not self . client_heartbeat : self . heartbeat = 0 self . _x_tune_ok ( self . channel_max , self . frame_max , self . heartbeat )
Propose connection tuning parameters
38,341
def heartbeat_tick ( self , rate = 2 ) : if not self . heartbeat : return sent_now = self . method_writer . bytes_sent recv_now = self . method_reader . bytes_recv if self . prev_sent is None or self . prev_sent != sent_now : self . last_heartbeat_sent = monotonic ( ) if self . prev_recv is None or self . prev_recv != recv_now : self . last_heartbeat_received = monotonic ( ) self . prev_sent , self . prev_recv = sent_now , recv_now if monotonic ( ) > self . last_heartbeat_sent + self . heartbeat : self . send_heartbeat ( ) self . last_heartbeat_sent = monotonic ( ) if ( self . last_heartbeat_received and self . last_heartbeat_received + 2 * self . heartbeat < monotonic ( ) ) : raise ConnectionForced ( 'Too many heartbeats missed' )
Send heartbeat packets if necessary and fail if none have been received recently . This should be called frequently on the order of once per second .
38,342
def _x_tune_ok ( self , channel_max , frame_max , heartbeat ) : args = AMQPWriter ( ) args . write_short ( channel_max ) args . write_long ( frame_max ) args . write_short ( heartbeat or 0 ) self . _send_method ( ( 10 , 31 ) , args ) self . _wait_tune_ok = False
Negotiate connection tuning parameters
38,343
def parent_suite ( self ) : if self . context and self . context . parent_suite_path : return Suite . load ( self . context . parent_suite_path ) return None
Get the current parent suite .
38,344
def print_info ( self , obj = None , buf = sys . stdout ) : if not obj : self . _print_info ( buf ) return True b = False for fn in ( self . _print_tool_info , self . _print_package_info , self . _print_suite_info , self . _print_context_info ) : b_ = fn ( obj , buf , b ) b |= b_ if b_ : print >> buf , '' if not b : print >> buf , "Rez does not know what '%s' is" % obj return b
Print a status message about the given object .
38,345
def print_tools ( self , pattern = None , buf = sys . stdout ) : seen = set ( ) rows = [ ] context = self . context if context : data = context . get_tools ( ) conflicts = set ( context . get_conflicting_tools ( ) . keys ( ) ) for _ , ( variant , tools ) in sorted ( data . items ( ) ) : pkg_str = variant . qualified_package_name for tool in tools : if pattern and not fnmatch ( tool , pattern ) : continue if tool in conflicts : label = "(in conflict)" color = critical else : label = '' color = None rows . append ( [ tool , '-' , pkg_str , "active context" , label , color ] ) seen . add ( tool ) for suite in self . suites : for tool , d in suite . get_tools ( ) . iteritems ( ) : if tool in seen : continue if pattern and not fnmatch ( tool , pattern ) : continue label = [ ] color = None path = which ( tool ) if path : path_ = os . path . join ( suite . tools_path , tool ) if path != path_ : label . append ( "(hidden by unknown tool '%s')" % path ) color = warning variant = d [ "variant" ] if isinstance ( variant , set ) : pkg_str = ", " . join ( variant ) label . append ( "(in conflict)" ) color = critical else : pkg_str = variant . qualified_package_name orig_tool = d [ "tool_name" ] if orig_tool == tool : orig_tool = '-' label = ' ' . join ( label ) source = ( "context '%s' in suite '%s'" % ( d [ "context_name" ] , suite . load_path ) ) rows . append ( [ tool , orig_tool , pkg_str , source , label , color ] ) seen . add ( tool ) _pr = Printer ( buf ) if not rows : _pr ( "No matching tools." ) return False headers = [ [ "TOOL" , "ALIASING" , "PACKAGE" , "SOURCE" , "" , None ] , [ "----" , "--------" , "-------" , "------" , "" , None ] ] rows = headers + sorted ( rows , key = lambda x : x [ 0 ] . lower ( ) ) print_colored_columns ( _pr , rows ) return True
Print a list of visible tools .
38,346
def from_path ( cls , path , format = None ) : name = None data = None if format is None : formats = ( FileFormat . py , FileFormat . yaml ) else : formats = ( format , ) try : mode = os . stat ( path ) . st_mode except ( IOError , OSError ) : raise PackageMetadataError ( "Path %r did not exist, or was not accessible" % path ) is_dir = stat . S_ISDIR ( mode ) for name_ in config . plugins . package_repository . filesystem . package_filenames : for format_ in formats : if is_dir : filepath = os . path . join ( path , "%s.%s" % ( name_ , format_ . extension ) ) exists = os . path . isfile ( filepath ) else : if format is None : if os . path . splitext ( path ) [ 1 ] != format_ . extension : continue filepath = path exists = True if exists : data = load_from_file ( filepath , format_ , disable_memcache = True ) break if data : name = data . get ( "name" ) if name is not None or isinstance ( name , basestring ) : break if data is None : raise PackageMetadataError ( "No package definition file found at %s" % path ) if name is None or not isinstance ( name , basestring ) : raise PackageMetadataError ( "Error in %r - missing or non-string field 'name'" % filepath ) package = create_package ( name , data , package_cls = cls ) result = package . _get_preprocessed ( data ) if result : package , data = result package . filepath = filepath package . includes = set ( ) def visit ( d ) : for k , v in d . iteritems ( ) : if isinstance ( v , SourceCode ) : package . includes |= ( v . includes or set ( ) ) elif isinstance ( v , dict ) : visit ( v ) visit ( data ) package . _validate_includes ( ) return package
Load a developer package .
38,347
def dump_all ( documents , stream = None , Dumper = Dumper , default_style = None , default_flow_style = None , canonical = None , indent = None , width = None , allow_unicode = None , line_break = None , encoding = 'utf-8' , explicit_start = None , explicit_end = None , version = None , tags = None ) : getvalue = None if stream is None : if encoding is None : from StringIO import StringIO else : from cStringIO import StringIO stream = StringIO ( ) getvalue = stream . getvalue dumper = Dumper ( stream , default_style = default_style , default_flow_style = default_flow_style , canonical = canonical , indent = indent , width = width , allow_unicode = allow_unicode , line_break = line_break , encoding = encoding , version = version , tags = tags , explicit_start = explicit_start , explicit_end = explicit_end ) try : dumper . open ( ) for data in documents : dumper . represent ( data ) dumper . close ( ) finally : dumper . dispose ( ) if getvalue : return getvalue ( )
Serialize a sequence of Python objects into a YAML stream . If stream is None return the produced string instead .
38,348
def running_instances ( self , context , process_name ) : handle = ( id ( context ) , process_name ) it = self . processes . get ( handle , { } ) . itervalues ( ) entries = [ x for x in it if x [ 0 ] . poll ( ) is None ] return entries
Get a list of running instances .
38,349
def get_public_methods ( self ) : return self . get_action_methods ( ) + [ ( 'getenv' , self . getenv ) , ( 'expandvars' , self . expandvars ) , ( 'defined' , self . defined ) , ( 'undefined' , self . undefined ) ]
return a list of methods on this class which should be exposed in the rex API .
38,350
def apply_environ ( self ) : if self . manager is None : raise RezSystemError ( "You must call 'set_manager' on a Python rex " "interpreter before using it." ) self . target_environ . update ( self . manager . environ )
Apply changes to target environ .
38,351
def formatted ( self , func ) : other = EscapedString . __new__ ( EscapedString ) other . strings = [ ] for is_literal , value in self . strings : if not is_literal : value = func ( value ) other . strings . append ( ( is_literal , value ) ) return other
Return the string with non - literal parts formatted .
38,352
def execute_code ( self , code , filename = None , isolate = False ) : def _apply ( ) : self . compile_code ( code = code , filename = filename , exec_namespace = self . globals ) if isolate : saved_globals = dict ( self . globals ) try : _apply ( ) finally : self . globals . clear ( ) self . globals . update ( saved_globals ) else : _apply ( )
Execute code within the execution context .
38,353
def execute_function ( self , func , * nargs , ** kwargs ) : import types fn = types . FunctionType ( func . func_code , func . func_globals . copy ( ) , name = func . func_name , argdefs = func . func_defaults , closure = func . func_closure ) fn . func_globals . update ( self . globals ) error_class = Exception if config . catch_rex_errors else None try : return fn ( * nargs , ** kwargs ) except RexError : raise except error_class as e : from inspect import getfile stack = traceback . format_exc ( ) filename = getfile ( func ) raise RexError ( "Failed to exec %s:\n\n%s" % ( filename , stack ) )
Execute a function object within the execution context .
38,354
def get_output ( self , style = OutputStyle . file ) : return self . manager . get_output ( style = style )
Returns the result of all previous calls to execute_code .
38,355
def configure ( self , graph , spanning_tree ) : self . graph = graph self . spanning_tree = spanning_tree
Configure the filter .
38,356
def iter_packages ( self , name , range_ = None , paths = None ) : for package in iter_packages ( name , range_ , paths ) : if not self . excludes ( package ) : yield package
Same as iter_packages in packages . py but also applies this filter .
38,357
def copy ( self ) : other = PackageFilter . __new__ ( PackageFilter ) other . _excludes = self . _excludes . copy ( ) other . _includes = self . _includes . copy ( ) return other
Return a shallow copy of the filter .
38,358
def cost ( self ) : total = 0.0 for family , rules in self . _excludes . iteritems ( ) : cost = sum ( x . cost ( ) for x in rules ) if family : cost = cost / float ( 10 ) total += cost return total
Get the approximate cost of this filter .
38,359
def add_filter ( self , package_filter ) : filters = self . filters + [ package_filter ] self . filters = sorted ( filters , key = lambda x : x . cost )
Add a filter to the list .
38,360
def copy ( self ) : other = PackageFilterList . __new__ ( PackageFilterList ) other . filters = [ x . copy ( ) for x in self . filters ] return other
Return a copy of the filter list .
38,361
def parse_rule ( cls , txt ) : types = { "glob" : GlobRule , "regex" : RegexRule , "range" : RangeRule , "before" : TimestampRule , "after" : TimestampRule } label , txt = Rule . _parse_label ( txt ) if label is None : if '*' in txt : label = "glob" else : label = "range" elif label not in types : raise ConfigurationError ( "'%s' is not a valid package filter type" % label ) rule_cls = types [ label ] txt_ = "%s(%s)" % ( label , txt ) try : rule = rule_cls . _parse ( txt_ ) except Exception as e : raise ConfigurationError ( "Error parsing package filter '%s': %s: %s" % ( txt_ , e . __class__ . __name__ , str ( e ) ) ) return rule
Parse a rule from a string .
38,362
def read_bit ( self ) : if not self . bitcount : self . bits = ord ( self . input . read ( 1 ) ) self . bitcount = 8 result = ( self . bits & 1 ) == 1 self . bits >>= 1 self . bitcount -= 1 return result
Read a single boolean value .
38,363
def read_long ( self ) : self . bitcount = self . bits = 0 return unpack ( '>I' , self . input . read ( 4 ) ) [ 0 ]
Read an unsigned 32 - bit integer
38,364
def read_longlong ( self ) : self . bitcount = self . bits = 0 return unpack ( '>Q' , self . input . read ( 8 ) ) [ 0 ]
Read an unsigned 64 - bit integer
38,365
def read_float ( self ) : self . bitcount = self . bits = 0 return unpack ( '>d' , self . input . read ( 8 ) ) [ 0 ]
Read float value .
38,366
def read_shortstr ( self ) : self . bitcount = self . bits = 0 slen = unpack ( 'B' , self . input . read ( 1 ) ) [ 0 ] return self . input . read ( slen ) . decode ( 'utf-8' )
Read a short string that s stored in up to 255 bytes .
38,367
def _load_properties ( self , raw_bytes ) : r = AMQPReader ( raw_bytes ) flags = [ ] while 1 : flag_bits = r . read_short ( ) flags . append ( flag_bits ) if flag_bits & 1 == 0 : break shift = 0 d = { } for key , proptype in self . PROPERTIES : if shift == 0 : if not flags : break flag_bits , flags = flags [ 0 ] , flags [ 1 : ] shift = 15 if flag_bits & ( 1 << shift ) : d [ key ] = getattr ( r , 'read_' + proptype ) ( ) shift -= 1 self . properties = d
Given the raw bytes containing the property - flags and property - list from a content - frame - header parse and insert into a dictionary stored in this object as an attribute named properties .
38,368
def create_executable_script ( filepath , body , program = None ) : program = program or "python" if callable ( body ) : from rez . utils . sourcecode import SourceCode code = SourceCode ( func = body ) body = code . source if not body . endswith ( '\n' ) : body += '\n' with open ( filepath , 'w' ) as f : f . write ( "#!/usr/bin/env %s\n" % program ) f . write ( body ) if os . name == "posix" : os . chmod ( filepath , stat . S_IRUSR | stat . S_IRGRP | stat . S_IROTH | stat . S_IXUSR | stat . S_IXGRP | stat . S_IXOTH )
Create an executable script .
38,369
def create_forwarding_script ( filepath , module , func_name , * nargs , ** kwargs ) : doc = dict ( module = module , func_name = func_name ) if nargs : doc [ "nargs" ] = nargs if kwargs : doc [ "kwargs" ] = kwargs body = dump_yaml ( doc ) create_executable_script ( filepath , body , "_rez_fwd" )
Create a forwarding script .
38,370
def dedup ( seq ) : seen = set ( ) for item in seq : if item not in seen : seen . add ( item ) yield item
Remove duplicates from a list while keeping order .
38,371
def find_last_sublist ( list_ , sublist ) : for i in reversed ( range ( len ( list_ ) - len ( sublist ) + 1 ) ) : if list_ [ i ] == sublist [ 0 ] and list_ [ i : i + len ( sublist ) ] == sublist : return i return None
Given a list find the last occurance of a sublist within it .
38,372
def open ( self , section_index = 0 ) : uri = self . _sections [ section_index ] [ 1 ] if len ( uri . split ( ) ) == 1 : self . _open_url ( uri ) else : if self . _verbose : print "running command: %s" % uri p = popen ( uri , shell = True ) p . wait ( )
Launch a help section .
38,373
def print_info ( self , buf = None ) : buf = buf or sys . stdout print >> buf , "Sections:" for i , section in enumerate ( self . _sections ) : print >> buf , " %s:\t%s (%s)" % ( i + 1 , section [ 0 ] , section [ 1 ] )
Print help sections .
38,374
def set_servers ( self , servers ) : self . servers = [ _Host ( s , self . debug , dead_retry = self . dead_retry , socket_timeout = self . socket_timeout , flush_on_reconnect = self . flush_on_reconnect ) for s in servers ] self . _init_buckets ( )
Set the pool of servers used by this client .
38,375
def get_stats ( self , stat_args = None ) : data = [ ] for s in self . servers : if not s . connect ( ) : continue if s . family == socket . AF_INET : name = '%s:%s (%s)' % ( s . ip , s . port , s . weight ) elif s . family == socket . AF_INET6 : name = '[%s]:%s (%s)' % ( s . ip , s . port , s . weight ) else : name = 'unix:%s (%s)' % ( s . address , s . weight ) if not stat_args : s . send_cmd ( 'stats' ) else : s . send_cmd ( 'stats ' + stat_args ) serverData = { } data . append ( ( name , serverData ) ) readline = s . readline while 1 : line = readline ( ) if not line or line . strip ( ) in ( 'END' , 'RESET' ) : break stats = line . split ( ' ' , 2 ) serverData [ stats [ 1 ] ] = stats [ 2 ] return ( data )
Get statistics from each of the servers .
38,376
def delete_multi ( self , keys , time = 0 , key_prefix = '' ) : self . _statlog ( 'delete_multi' ) server_keys , prefixed_to_orig_key = self . _map_and_prefix_keys ( keys , key_prefix ) dead_servers = [ ] rc = 1 for server in server_keys . iterkeys ( ) : bigcmd = [ ] write = bigcmd . append if time != None : for key in server_keys [ server ] : write ( "delete %s %d\r\n" % ( key , time ) ) else : for key in server_keys [ server ] : write ( "delete %s\r\n" % key ) try : server . send_cmds ( '' . join ( bigcmd ) ) except socket . error , msg : rc = 0 if isinstance ( msg , tuple ) : msg = msg [ 1 ] server . mark_dead ( msg ) dead_servers . append ( server ) for server in dead_servers : del server_keys [ server ] for server , keys in server_keys . iteritems ( ) : try : for key in keys : server . expect ( "DELETED" ) except socket . error , msg : if isinstance ( msg , tuple ) : msg = msg [ 1 ] server . mark_dead ( msg ) rc = 0 return rc
Delete multiple keys in the memcache doing just one query .
38,377
def delete ( self , key , time = 0 ) : if self . do_check_key : self . check_key ( key ) server , key = self . _get_server ( key ) if not server : return 0 self . _statlog ( 'delete' ) if time != None and time != 0 : cmd = "delete %s %d" % ( key , time ) else : cmd = "delete %s" % key try : server . send_cmd ( cmd ) line = server . readline ( ) if line and line . strip ( ) in [ 'DELETED' , 'NOT_FOUND' ] : return 1 self . debuglog ( 'Delete expected DELETED or NOT_FOUND, got: %s' % repr ( line ) ) except socket . error , msg : if isinstance ( msg , tuple ) : msg = msg [ 1 ] server . mark_dead ( msg ) return 0
Deletes a key from the memcache .
38,378
def add ( self , key , val , time = 0 , min_compress_len = 0 ) : return self . _set ( "add" , key , val , time , min_compress_len )
Add new key with value .
38,379
def append ( self , key , val , time = 0 , min_compress_len = 0 ) : return self . _set ( "append" , key , val , time , min_compress_len )
Append the value to the end of the existing key s value .
38,380
def prepend ( self , key , val , time = 0 , min_compress_len = 0 ) : return self . _set ( "prepend" , key , val , time , min_compress_len )
Prepend the value to the beginning of the existing key s value .
38,381
def replace ( self , key , val , time = 0 , min_compress_len = 0 ) : return self . _set ( "replace" , key , val , time , min_compress_len )
Replace existing key with value .
38,382
def set ( self , key , val , time = 0 , min_compress_len = 0 ) : return self . _set ( "set" , key , val , time , min_compress_len )
Unconditionally sets a key to a given value in the memcache .
38,383
def set_multi ( self , mapping , time = 0 , key_prefix = '' , min_compress_len = 0 ) : self . _statlog ( 'set_multi' ) server_keys , prefixed_to_orig_key = self . _map_and_prefix_keys ( mapping . iterkeys ( ) , key_prefix ) dead_servers = [ ] notstored = [ ] for server in server_keys . iterkeys ( ) : bigcmd = [ ] write = bigcmd . append try : for key in server_keys [ server ] : store_info = self . _val_to_store_info ( mapping [ prefixed_to_orig_key [ key ] ] , min_compress_len ) if store_info : write ( "set %s %d %d %d\r\n%s\r\n" % ( key , store_info [ 0 ] , time , store_info [ 1 ] , store_info [ 2 ] ) ) else : notstored . append ( prefixed_to_orig_key [ key ] ) server . send_cmds ( '' . join ( bigcmd ) ) except socket . error , msg : if isinstance ( msg , tuple ) : msg = msg [ 1 ] server . mark_dead ( msg ) dead_servers . append ( server ) for server in dead_servers : del server_keys [ server ] if not server_keys : return ( mapping . keys ( ) ) for server , keys in server_keys . iteritems ( ) : try : for key in keys : if server . readline ( ) == 'STORED' : continue else : notstored . append ( prefixed_to_orig_key [ key ] ) except ( _Error , socket . error ) , msg : if isinstance ( msg , tuple ) : msg = msg [ 1 ] server . mark_dead ( msg ) return notstored
Sets multiple keys in the memcache doing just one query .
38,384
def get_multi ( self , keys , key_prefix = '' ) : self . _statlog ( 'get_multi' ) server_keys , prefixed_to_orig_key = self . _map_and_prefix_keys ( keys , key_prefix ) dead_servers = [ ] for server in server_keys . iterkeys ( ) : try : server . send_cmd ( "get %s" % " " . join ( server_keys [ server ] ) ) except socket . error , msg : if isinstance ( msg , tuple ) : msg = msg [ 1 ] server . mark_dead ( msg ) dead_servers . append ( server ) for server in dead_servers : del server_keys [ server ] retvals = { } for server in server_keys . iterkeys ( ) : try : line = server . readline ( ) while line and line != 'END' : rkey , flags , rlen = self . _expectvalue ( server , line ) if rkey is not None : val = self . _recv_value ( server , flags , rlen ) retvals [ prefixed_to_orig_key [ rkey ] ] = val line = server . readline ( ) except ( _Error , socket . error ) , msg : if isinstance ( msg , tuple ) : msg = msg [ 1 ] server . mark_dead ( msg ) return retvals
Retrieves multiple keys from the memcache doing just one query .
38,385
def readline ( self , raise_exception = False ) : buf = self . buffer if self . socket : recv = self . socket . recv else : recv = lambda bufsize : '' while True : index = buf . find ( '\r\n' ) if index >= 0 : break data = recv ( 4096 ) if not data : self . mark_dead ( 'connection closed in readline()' ) if raise_exception : raise _ConnectionDeadError ( ) else : return '' buf += data self . buffer = buf [ index + 2 : ] return buf [ : index ]
Read a line and return it . If raise_exception is set raise _ConnectionDeadError if the read fails otherwise return an empty string .
38,386
def create_repository ( cls , repository_data ) : location = "memory{%s}" % hex ( id ( repository_data ) ) resource_pool = ResourcePool ( cache_size = None ) repo = MemoryPackageRepository ( location , resource_pool ) repo . data = repository_data return repo
Create a standalone in - memory repository .
38,387
def read_file ( self , fileob ) : msg = message_from_file ( fileob ) self . _fields [ 'Metadata-Version' ] = msg [ 'metadata-version' ] for field in _ALL_FIELDS : if field not in msg : continue if field in _LISTFIELDS : values = msg . get_all ( field ) if field in _LISTTUPLEFIELDS and values is not None : values = [ tuple ( value . split ( ',' ) ) for value in values ] self . set ( field , values ) else : value = msg [ field ] if value is not None and value != 'UNKNOWN' : self . set ( field , value ) self . set_metadata_version ( )
Read the metadata values from a file object .
38,388
def check ( self , strict = False ) : self . set_metadata_version ( ) missing , warnings = [ ] , [ ] for attr in ( 'Name' , 'Version' ) : if attr not in self : missing . append ( attr ) if strict and missing != [ ] : msg = 'missing required metadata: %s' % ', ' . join ( missing ) raise MetadataMissingError ( msg ) for attr in ( 'Home-page' , 'Author' ) : if attr not in self : missing . append ( attr ) if self [ 'Metadata-Version' ] != '1.2' : return missing , warnings scheme = get_scheme ( self . scheme ) def are_valid_constraints ( value ) : for v in value : if not scheme . is_valid_matcher ( v . split ( ';' ) [ 0 ] ) : return False return True for fields , controller in ( ( _PREDICATE_FIELDS , are_valid_constraints ) , ( _VERSIONS_FIELDS , scheme . is_valid_constraint_list ) , ( _VERSION_FIELDS , scheme . is_valid_version ) ) : for field in fields : value = self . get ( field , None ) if value is not None and not controller ( value ) : warnings . append ( 'Wrong value for %r: %s' % ( field , value ) ) return missing , warnings
Check if the metadata is compliant . If strict is True then raise if no Name or Version are provided
38,389
def create_pane ( widgets , horizontal , parent_widget = None , compact = False , compact_spacing = 2 ) : pane = parent_widget or QtGui . QWidget ( ) type_ = QtGui . QHBoxLayout if horizontal else QtGui . QVBoxLayout layout = type_ ( ) if compact : layout . setSpacing ( compact_spacing ) layout . setContentsMargins ( compact_spacing , compact_spacing , compact_spacing , compact_spacing ) for widget in widgets : stretch = 0 if isinstance ( widget , tuple ) : widget , stretch = widget if isinstance ( widget , int ) : layout . addSpacing ( widget ) elif widget : layout . addWidget ( widget , stretch ) else : layout . addStretch ( ) pane . setLayout ( layout ) return pane
Create a widget containing an aligned set of widgets .
38,390
def get_icon ( name , as_qicon = False ) : filename = name + ".png" icon = icons . get ( filename ) if not icon : path = os . path . dirname ( __file__ ) path = os . path . join ( path , "icons" ) filepath = os . path . join ( path , filename ) if not os . path . exists ( filepath ) : filepath = os . path . join ( path , "pink.png" ) icon = QtGui . QPixmap ( filepath ) icons [ filename ] = icon return QtGui . QIcon ( icon ) if as_qicon else icon
Returns a QPixmap containing the given image or a QIcon if as_qicon is True
38,391
def interp_color ( a , b , f ) : a_ = ( a . redF ( ) , a . greenF ( ) , a . blueF ( ) ) b_ = ( b . redF ( ) , b . greenF ( ) , b . blueF ( ) ) a_ = [ x * ( 1 - f ) for x in a_ ] b_ = [ x * f for x in b_ ] c = [ x + y for x , y in zip ( a_ , b_ ) ] return QtGui . QColor . fromRgbF ( * c )
Interpolate between two colors .
38,392
def create_toolbutton ( entries , parent = None ) : btn = QtGui . QToolButton ( parent ) menu = QtGui . QMenu ( ) actions = [ ] for label , slot in entries : action = add_menu_action ( menu , label , slot ) actions . append ( action ) btn . setPopupMode ( QtGui . QToolButton . MenuButtonPopup ) btn . setDefaultAction ( actions [ 0 ] ) btn . setMenu ( menu ) return btn , actions
Create a toolbutton .
38,393
def get_page ( self , url ) : scheme , netloc , path , _ , _ , _ = urlparse ( url ) if scheme == 'file' and os . path . isdir ( url2pathname ( path ) ) : url = urljoin ( ensure_slash ( url ) , 'index.html' ) if url in self . _page_cache : result = self . _page_cache [ url ] logger . debug ( 'Returning %s from cache: %s' , url , result ) else : host = netloc . split ( ':' , 1 ) [ 0 ] result = None if host in self . _bad_hosts : logger . debug ( 'Skipping %s due to bad host %s' , url , host ) else : req = Request ( url , headers = { 'Accept-encoding' : 'identity' } ) try : logger . debug ( 'Fetching %s' , url ) resp = self . opener . open ( req , timeout = self . timeout ) logger . debug ( 'Fetched %s' , url ) headers = resp . info ( ) content_type = headers . get ( 'Content-Type' , '' ) if HTML_CONTENT_TYPE . match ( content_type ) : final_url = resp . geturl ( ) data = resp . read ( ) encoding = headers . get ( 'Content-Encoding' ) if encoding : decoder = self . decoders [ encoding ] data = decoder ( data ) encoding = 'utf-8' m = CHARSET . search ( content_type ) if m : encoding = m . group ( 1 ) try : data = data . decode ( encoding ) except UnicodeError : data = data . decode ( 'latin-1' ) result = Page ( data , final_url ) self . _page_cache [ final_url ] = result except HTTPError as e : if e . code != 404 : logger . exception ( 'Fetch failed: %s: %s' , url , e ) except URLError as e : logger . exception ( 'Fetch failed: %s: %s' , url , e ) with self . _lock : self . _bad_hosts . add ( host ) except Exception as e : logger . exception ( 'Fetch failed: %s: %s' , url , e ) finally : self . _page_cache [ url ] = result return result
Get the HTML for an URL possibly from an in - memory cache .
38,394
def get_reverse_dependency_tree ( package_name , depth = None , paths = None , build_requires = False , private_build_requires = False ) : pkgs_list = [ [ package_name ] ] g = digraph ( ) g . add_node ( package_name ) it = iter_package_families ( paths ) package_names = set ( x . name for x in it ) if package_name not in package_names : raise PackageFamilyNotFoundError ( "No such package family %r" % package_name ) if depth == 0 : return pkgs_list , g bar = ProgressBar ( "Searching" , len ( package_names ) ) lookup = defaultdict ( set ) for i , package_name_ in enumerate ( package_names ) : it = iter_packages ( name = package_name_ , paths = paths ) packages = list ( it ) if not packages : continue pkg = max ( packages , key = lambda x : x . version ) requires = [ ] for variant in pkg . iter_variants ( ) : pbr = ( private_build_requires and pkg . name == package_name ) requires += variant . get_requires ( build_requires = build_requires , private_build_requires = pbr ) for req in requires : if not req . conflict : lookup [ req . name ] . add ( package_name_ ) bar . next ( ) bar . finish ( ) n = 0 consumed = set ( [ package_name ] ) working_set = set ( [ package_name ] ) node_color = "#F6F6F6" node_fontsize = 10 node_attrs = [ ( "fillcolor" , node_color ) , ( "style" , "filled" ) , ( "fontsize" , node_fontsize ) ] while working_set and ( depth is None or n < depth ) : working_set_ = set ( ) for child in working_set : parents = lookup [ child ] - consumed working_set_ . update ( parents ) consumed . update ( parents ) for parent in parents : g . add_node ( parent , attrs = node_attrs ) g . add_edge ( ( parent , child ) ) if working_set_ : pkgs_list . append ( sorted ( list ( working_set_ ) ) ) working_set = working_set_ n += 1 return pkgs_list , g
Find packages that depend on the given package .
38,395
def get_plugins ( package_name , paths = None ) : pkg = get_latest_package ( package_name , paths = paths , error = True ) if not pkg . has_plugins : return [ ] it = iter_package_families ( paths ) package_names = set ( x . name for x in it ) bar = ProgressBar ( "Searching" , len ( package_names ) ) plugin_pkgs = [ ] for package_name_ in package_names : bar . next ( ) if package_name_ == package_name : continue plugin_pkg = get_latest_package ( package_name_ , paths = paths ) if not plugin_pkg . plugin_for : continue for plugin_for in plugin_pkg . plugin_for : if plugin_for == pkg . name : plugin_pkgs . append ( package_name_ ) bar . finish ( ) return plugin_pkgs
Find packages that are plugins of the given package .
38,396
def search ( self , resources_request = None ) : name_pattern , version_range = self . _parse_request ( resources_request ) family_names = set ( x . name for x in iter_package_families ( paths = self . package_paths ) if fnmatch . fnmatch ( x . name , name_pattern ) ) family_names = sorted ( family_names ) if self . resource_type : resource_type = self . resource_type elif version_range or len ( family_names ) == 1 : resource_type = "package" else : resource_type = "family" if not family_names : return resource_type , [ ] if resource_type == "family" : results = [ ResourceSearchResult ( x , "family" ) for x in family_names ] return "family" , results results = [ ] for name in family_names : it = iter_packages ( name , version_range , paths = self . package_paths ) packages = sorted ( it , key = lambda x : x . version ) if self . latest and packages : packages = [ packages [ - 1 ] ] for package in packages : try : if package . timestamp : if self . after_time and package . timestamp < self . after_time : continue if self . before_time and package . timestamp >= self . before_time : continue if self . validate : package . validate_data ( ) except ResourceContentError as e : if resource_type == "package" : result = ResourceSearchResult ( package , "package" , str ( e ) ) results . append ( result ) continue if resource_type == "package" : result = ResourceSearchResult ( package , "package" ) results . append ( result ) continue try : for variant in package . iter_variants ( ) : if self . validate : try : variant . validate_data ( ) except ResourceContentError as e : result = ResourceSearchResult ( variant , "variant" , str ( e ) ) results . append ( result ) continue result = ResourceSearchResult ( variant , "variant" ) results . append ( result ) except ResourceContentError : continue return resource_type , results
Search for resources .
38,397
def print_search_results ( self , search_results , buf = sys . stdout ) : formatted_lines = self . format_search_results ( search_results ) pr = Printer ( buf ) for txt , style in formatted_lines : pr ( txt , style )
Print formatted search results .
38,398
def format_search_results ( self , search_results ) : formatted_lines = [ ] for search_result in search_results : lines = self . _format_search_result ( search_result ) formatted_lines . extend ( lines ) return formatted_lines
Format search results .
38,399
def read ( string ) : dotG = pydot . graph_from_dot_data ( string ) if ( dotG . get_type ( ) == "graph" ) : G = graph ( ) elif ( dotG . get_type ( ) == "digraph" ) : G = digraph ( ) elif ( dotG . get_type ( ) == "hypergraph" ) : return read_hypergraph ( string ) else : raise InvalidGraphType for each_node in dotG . get_nodes ( ) : G . add_node ( each_node . get_name ( ) ) for each_attr_key , each_attr_val in each_node . get_attributes ( ) . items ( ) : G . add_node_attribute ( each_node . get_name ( ) , ( each_attr_key , each_attr_val ) ) for each_edge in dotG . get_edges ( ) : if not G . has_node ( each_edge . get_source ( ) ) : G . add_node ( each_edge . get_source ( ) ) if not G . has_node ( each_edge . get_destination ( ) ) : G . add_node ( each_edge . get_destination ( ) ) if 'weight' in each_edge . get_attributes ( ) . keys ( ) : _wt = each_edge . get_attributes ( ) [ 'weight' ] else : _wt = 1 if 'label' in each_edge . get_attributes ( ) . keys ( ) : _label = each_edge . get_attributes ( ) [ 'label' ] else : _label = '' G . add_edge ( ( each_edge . get_source ( ) , each_edge . get_destination ( ) ) , wt = _wt , label = _label ) for each_attr_key , each_attr_val in each_edge . get_attributes ( ) . items ( ) : if not each_attr_key in [ 'weight' , 'label' ] : G . add_edge_attribute ( ( each_edge . get_source ( ) , each_edge . get_destination ( ) ) , ( each_attr_key , each_attr_val ) ) return G
Read a graph from a string in Dot language and return it . Nodes and edges specified in the input will be added to the current graph .