idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
44,200
def find_ip ( family = AF_INET , flavour = "opendns" ) : flavours = { "opendns" : { AF_INET : { "@" : ( "resolver1.opendns.com" , "resolver2.opendns.com" ) , "qname" : "myip.opendns.com" , "rdtype" : "A" , } , AF_INET6 : { "@" : ( "resolver1.ipv6-sandbox.opendns.com" , "resolver2.ipv6-sandbox.opendns.com" ) , "qname" : "myip.opendns.com" , "rdtype" : "AAAA" , } , } , } flavour = flavours [ "opendns" ] resolver = dns . resolver . Resolver ( ) resolver . nameservers = [ next ( iter ( resolve ( h , family = family ) ) ) for h in flavour [ family ] [ "@" ] ] answers = resolver . query ( qname = flavour [ family ] [ "qname" ] , rdtype = flavour [ family ] [ "rdtype" ] ) for rdata in answers : return rdata . address return None
Find the publicly visible IP address of the current system .
44,201
def detect ( self ) : theip = find_ip ( family = self . opts_family ) self . set_current_value ( theip ) return theip
Detect the WAN IP of the current process through DNS .
44,202
def parse_cmdline_args ( args , classes ) : if args is None : raise ValueError ( "args must not be None" ) parsed_args = { } for kls in classes : prefix = kls . configuration_key_prefix ( ) name = kls . configuration_key if getattr ( args , "%s_%s" % ( prefix , name ) , False ) : logging . debug ( "Gathering initargs for '%s.%s'" , prefix , name ) initargs = { } for arg_name in kls . init_argnames ( ) : val = getattr ( args , "%s_%s_%s" % ( prefix , name , arg_name ) ) if val is not None : initargs [ arg_name ] = val if prefix not in parsed_args : parsed_args [ prefix ] = [ ] parsed_args [ prefix ] . append ( ( name , initargs ) ) return parsed_args
Parse all updater and detector related arguments from args .
44,203
def register_arguments ( cls , parser ) : if hasattr ( cls , "_dont_register_arguments" ) : return prefix = cls . configuration_key_prefix ( ) cfgkey = cls . configuration_key parser . add_argument ( "--%s-%s" % ( prefix , cfgkey ) , action = "store_true" , dest = "%s_%s" % ( prefix , cfgkey ) , default = False , help = "%s: %s" % ( cls . __name__ , cls . help ( ) ) ) args = cls . init_argnames ( ) defaults = cls . _init_argdefaults ( ) for arg in args [ 0 : len ( args ) - len ( defaults ) ] : parser . add_argument ( "--%s-%s-%s" % ( prefix , cfgkey , arg ) , dest = "%s_%s_%s" % ( prefix , cfgkey , arg ) , help = "" ) for i , arg in enumerate ( args [ len ( args ) - len ( defaults ) : ] ) : parser . add_argument ( "--%s-%s-%s" % ( prefix , cfgkey , arg ) , dest = "%s_%s_%s" % ( prefix , cfgkey , arg ) , default = defaults [ i ] , help = "default: %(default)s" )
Register command line options .
44,204
def tag_patches_branch ( package , local_patches_branch , patches_branch , force = False , push = False ) : vr = specfile . Spec ( ) . get_vr ( epoch = False ) nvr_tag = package + '-' + vr tag_cmd = [ 'tag' , nvr_tag , local_patches_branch ] if force : tag_cmd . append ( '-f' ) git ( * tag_cmd ) if push : patches_remote = patches_branch . partition ( '/' ) [ 0 ] git ( 'push' , patches_remote , nvr_tag ) else : print ( 'Not pushing tag. Run "git push patches %s" by hand.' % nvr_tag )
Tag the local_patches_branch with this package s NVR .
44,205
def load_class ( module_name , class_name ) : try : plugmod = import_module ( module_name ) except Exception as exc : warn ( "Importing built-in plugin %s.%s raised an exception: %r" % ( module_name , class_name , repr ( exc ) ) , ImportWarning ) return None else : return getattr ( plugmod , class_name )
Return class object specified by module name and class name .
44,206
def find_class ( name , classes ) : name = name . lower ( ) cls = next ( ( c for c in classes if c . configuration_key == name ) , None ) if cls is None : raise ValueError ( "No class named '%s' could be found" % name ) return cls
Return class in classes identified by configuration key name .
44,207
def decamel ( string ) : regex = re . compile ( r'(\B[A-Z][a-z]*)' ) return regex . sub ( r' \1' , string )
Split CamelCased words .
44,208
def decamel_to_snake ( string ) : strings = [ decamel ( word ) if not word . isupper ( ) else word . lower ( ) for word in string . split ( ) ] return "_" . join ( [ snake ( dstring ) for dstring in strings ] )
Convert to lower case join camel case with underscore . CamelCase - > camel_case . Camel Case - > camel_case .
44,209
def info_file ( distro = None ) : if not distro : distro = cfg [ 'DISTRO' ] info_file_conf = distro . upper ( ) + 'INFO_FILE' try : return cfg [ info_file_conf ] except KeyError : raise exception . InvalidUsage ( why = "Couldn't find config option %s for distro: %s" % ( info_file_conf , distro ) )
Return default distroinfo info file
44,210
def get_distroinfo ( distro = None ) : if not distro : distro = cfg [ 'DISTRO' ] _info_file = info_file ( distro ) git_info_url_conf = distro . upper ( ) + 'INFO_REPO' try : remote_git_info = cfg [ git_info_url_conf ] return DistroInfo ( _info_file , remote_git_info = remote_git_info ) except KeyError : pass remote_info_url_conf = distro . upper ( ) + 'INFO_RAW_URL' try : remote_info = cfg [ remote_info_url_conf ] return DistroInfo ( _info_file , remote_info = remote_info ) except KeyError : raise exception . InvalidUsage ( why = "Couldn't find config option %s or %s for distro: %s" % ( git_info_url_conf , remote_info_url_conf , distro ) )
Get DistroInfo initialized from configuration
44,211
def spec_fn ( spec_dir = '.' ) : specs = [ f for f in os . listdir ( spec_dir ) if os . path . isfile ( f ) and f . endswith ( '.spec' ) ] if not specs : raise exception . SpecFileNotFound ( ) if len ( specs ) != 1 : raise exception . MultipleSpecFilesFound ( ) return specs [ 0 ]
Return the filename for a . spec file in this directory .
44,212
def get_patches_ignore_regex ( self ) : match = re . search ( r'# *patches_ignore=([\w *.+?[\]|{,}\-_]+)' , self . txt ) if not match : return None regex_string = match . group ( 1 ) try : return re . compile ( regex_string ) except Exception : return None
Returns a string representing a regex for filtering out patches
44,213
def get_vr ( self , epoch = None ) : version = self . get_tag ( 'Version' , expand_macros = True ) e = None if epoch is None or epoch : try : e = self . get_tag ( 'Epoch' ) except exception . SpecFileParseError : pass if epoch is None and e : epoch = True if epoch : if not e : e = '0' version = '%s:%s' % ( e , version ) release = self . get_tag ( 'Release' ) release = re . sub ( r'%\{?\??dist\}?$' , '' , release ) release = self . expand_macro ( release ) if release : return '%s-%s' % ( version , release ) return version
get VR string from . spec Version Release and Epoch
44,214
def get_nvr ( self , epoch = None ) : name = self . get_tag ( 'Name' , expand_macros = True ) vr = self . get_vr ( epoch = epoch ) return '%s-%s' % ( name , vr )
get NVR string from . spec Name Version Release and Epoch
44,215
def detect_ip ( kind ) : if kind not in ( IPV4 , IPV6_PUBLIC , IPV6_TMP , IPV6_ANY ) : raise ValueError ( "invalid kind specified" ) af = socket . AF_INET if kind == IPV4 else socket . AF_INET6 s = socket . socket ( af , socket . SOCK_DGRAM ) try : if kind in [ IPV6_PUBLIC , IPV6_TMP , ] : try : if kind == IPV6_PUBLIC : preference = socket . IPV6_PREFER_SRC_PUBLIC elif kind == IPV6_TMP : preference = socket . IPV6_PREFER_SRC_TMP s . setsockopt ( socket . IPPROTO_IPV6 , socket . IPV6_ADDR_PREFERENCES , preference ) except socket . error as e : if e . errno == errno . ENOPROTOOPT : raise GetIpException ( "Kernel doesn't support IPv6 address preference" ) else : raise GetIpException ( "Unable to set IPv6 address preference: %s" % e ) try : outside_ip = OUTSIDE_IPV4 if kind == IPV4 else OUTSIDE_IPV6 s . connect ( ( outside_ip , 9 ) ) except ( socket . error , socket . gaierror ) as e : raise GetIpException ( str ( e ) ) ip = s . getsockname ( ) [ 0 ] finally : s . close ( ) return ip
Detect IP address .
44,216
def setup_kojiclient ( profile ) : opts = koji . read_config ( profile ) for k , v in opts . iteritems ( ) : opts [ k ] = os . path . expanduser ( v ) if type ( v ) is str else v kojiclient = koji . ClientSession ( opts [ 'server' ] , opts = opts ) kojiclient . ssl_login ( opts [ 'cert' ] , None , opts [ 'serverca' ] ) return kojiclient
Setup koji client session
44,217
def retrieve_sources ( ) : spectool = find_executable ( 'spectool' ) if not spectool : log . warn ( 'spectool is not installed' ) return try : specfile = spec_fn ( ) except Exception : return cmd = [ spectool , "-g" , specfile ] output = subprocess . check_output ( ' ' . join ( cmd ) , shell = True ) log . warn ( output )
Retrieve sources using spectool
44,218
def create_srpm ( dist = 'el7' ) : if not RPM_AVAILABLE : raise RpmModuleNotAvailable ( ) path = os . getcwd ( ) try : specfile = spec_fn ( ) spec = Spec ( specfile ) except Exception : return rpmdefines = [ "--define 'dist .{}'" . format ( dist ) , "--define '_sourcedir {}'" . format ( path ) , "--define '_srcrpmdir {}'" . format ( path ) ] rpm . addMacro ( '_sourcedir' , '.{}' . format ( dist ) ) rpm . addMacro ( 'dist' , '.{}' . format ( dist ) ) module_name = spec . get_tag ( 'Name' , True ) version = spec . get_tag ( 'Version' , True ) release = spec . get_tag ( 'Release' , True ) srpm = os . path . join ( path , "{}-{}-{}.src.rpm" . format ( module_name , version , release ) ) if os . path . exists ( srpm ) : log . warn ( 'Srpm found, rewriting it.' ) cmd = [ 'rpmbuild' ] cmd . extend ( rpmdefines ) cmd . extend ( [ '--nodeps' , '-bs' , specfile ] ) output = subprocess . check_output ( ' ' . join ( cmd ) , shell = True ) log . warn ( output ) srpm = output . split ( ) [ 1 ] return srpm
Create an srpm Requires that sources are available in local directory
44,219
def compute_auth_key ( userid , password ) : import sys if sys . version_info >= ( 3 , 0 ) : return hashlib . sha1 ( b"|" . join ( ( userid . encode ( "ascii" ) , password . encode ( "ascii" ) ) ) ) . hexdigest ( ) return hashlib . sha1 ( "|" . join ( ( userid , password ) ) ) . hexdigest ( )
Compute the authentication key for freedns . afraid . org .
44,220
def records ( credentials , url = "https://freedns.afraid.org/api/" ) : params = { "action" : "getdyndns" , "sha" : credentials . sha } req = requests . get ( url , params = params , headers = constants . REQUEST_HEADERS_DEFAULT , timeout = 60 ) for record_line in ( line . strip ( ) for line in req . text . splitlines ( ) if len ( line . strip ( ) ) > 0 ) : yield AfraidDynDNSRecord ( * record_line . split ( "|" ) )
Yield the dynamic DNS records associated with this account .
44,221
def update ( url ) : req = requests . get ( url , headers = constants . REQUEST_HEADERS_DEFAULT , timeout = 60 ) req . close ( ) ipregex = re . compile ( r"\b(?P<ip>(?:[0-9]{1,3}\.){3}[0-9]{1,3})\b" ) ipmatch = ipregex . search ( req . text ) if ipmatch : return str ( ipaddress ( ipmatch . group ( "ip" ) ) ) LOG . error ( "couldn't parse the server's response '%s'" , req . text ) return None
Update remote DNS record by requesting its special endpoint URL .
44,222
def sha ( self ) : if self . _sha is None : self . _sha = compute_auth_key ( self . userid , self . password ) return self . _sha
Return sha lazily compute if not done yet .
44,223
def register_observer ( self , observer , events = None ) : if events is not None and not isinstance ( events , ( tuple , list ) ) : events = ( events , ) if observer in self . _observers : LOG . warning ( "Observer '%r' already registered, overwriting for events" " %r" , observer , events ) self . _observers [ observer ] = events
Register a listener function .
44,224
def notify_observers ( self , event = None , msg = None ) : for observer , events in list ( self . _observers . items ( ) ) : if events is None or event is None or event in events : try : observer ( self , event , msg ) except ( Exception , ) as ex : self . unregister_observer ( observer ) errmsg = "Exception in message dispatch: Handler '{0}' unregistered for event '{1}' " . format ( observer . __class__ . __name__ , event ) LOG . error ( errmsg , exc_info = ex )
Notify observers .
44,225
def detect ( self ) : if self . opts_family == AF_INET6 : kind = IPV6_PUBLIC else : kind = IPV4 theip = None try : theip = detect_ip ( kind ) except GetIpException : LOG . exception ( "socket detector raised an exception:" ) self . set_current_value ( theip ) return theip
Detect the IP address .
44,226
def _detect ( self ) : theip = None try : if self . opts_family == AF_INET6 : addrlist = netifaces . ifaddresses ( self . opts_iface ) [ netifaces . AF_INET6 ] else : addrlist = netifaces . ifaddresses ( self . opts_iface ) [ netifaces . AF_INET ] except ValueError as exc : LOG . error ( "netifaces choked while trying to get network interface" " information for interface '%s'" , self . opts_iface , exc_info = exc ) else : for pair in addrlist : try : detip = ipaddress ( pair [ "addr" ] ) except ( TypeError , ValueError ) as exc : LOG . debug ( "Found invalid IP '%s' on interface '%s'!?" , pair [ "addr" ] , self . opts_iface , exc_info = exc ) continue if self . netmask is not None : if detip in self . netmask : theip = pair [ "addr" ] else : continue else : theip = pair [ "addr" ] break self . set_current_value ( theip ) return theip
Use the netifaces module to detect ifconfig information .
44,227
def clean_tempdir ( context , scenario ) : tempdir = getattr ( context , 'tempdir' , None ) if tempdir and scenario . status == 'passed' : shutil . rmtree ( tempdir ) del ( context . tempdir )
Clean up temporary test dirs for passed tests .
44,228
def is_reserved_ip ( self , ip ) : theip = ipaddress ( ip ) for res in self . _reserved_netmasks : if theip in ipnetwork ( res ) : return True return False
Check if the given ip address is in a reserved ipv4 address space .
44,229
def random_public_ip ( self ) : randomip = random_ip ( ) while self . is_reserved_ip ( randomip ) : randomip = random_ip ( ) return randomip
Return a randomly generated public IPv4 address .
44,230
def detect ( self ) : for theip in self . rips : LOG . debug ( "detected %s" , str ( theip ) ) self . set_current_value ( str ( theip ) ) return str ( theip )
Detect IP and return it .
44,231
def patches_base_ref ( default = exception . CantGuess ) : ref = None try : spec = specfile . Spec ( ) ref , _ = spec . get_patches_base ( expand_macros = True ) if ref : ref , _ = tag2version ( ref ) else : ref = spec . get_tag ( 'Version' , expand_macros = True ) milestone = spec . get_milestone ( ) if milestone : ref += milestone if not ref : raise exception . CantGuess ( msg = "got empty .spec Version" ) except Exception as ex : if default is exception . CantGuess : raise exception . CantGuess ( what = "current package version" , why = str ( ex ) ) else : return default tag_style = version_tag_style ( ref ) return version2tag ( ref , tag_style = tag_style )
Return a git reference to patches branch base .
44,232
def display_listitems ( items , url ) : if ( len ( items ) == 2 and items [ 0 ] . get_label ( ) == '..' and items [ 1 ] . get_played ( ) ) : display_video ( items ) else : label_width = get_max_len ( item . get_label ( ) for item in items ) num_width = len ( str ( len ( items ) ) ) output = [ ] for i , item in enumerate ( items ) : output . append ( '[%s] %s (%s)' % ( str ( i ) . rjust ( num_width ) , item . get_label ( ) . ljust ( label_width ) , item . get_path ( ) ) ) line_width = get_max_len ( output ) output . append ( '-' * line_width ) header = [ '' , '=' * line_width , 'Current URL: %s' % url , '-' * line_width , '%s %s Path' % ( '#' . center ( num_width + 2 ) , 'Label' . ljust ( label_width ) ) , '-' * line_width , ] print '\n' . join ( header + output )
Displays a list of items along with the index to enable a user to select an item .
44,233
def display_video ( items ) : parent_item , played_item = items title_line = 'Playing Media %s (%s)' % ( played_item . get_label ( ) , played_item . get_path ( ) ) parent_line = '[0] %s (%s)' % ( parent_item . get_label ( ) , parent_item . get_path ( ) ) line_width = get_max_len ( [ title_line , parent_line ] ) output = [ '-' * line_width , title_line , '-' * line_width , parent_line , ] print '\n' . join ( output )
Prints a message for a playing video and displays the parent listitem .
44,234
def get_user_choice ( items ) : choice = raw_input ( 'Choose an item or "q" to quit: ' ) while choice != 'q' : try : item = items [ int ( choice ) ] print return item except ValueError : choice = raw_input ( 'You entered a non-integer. Choice must be an' ' integer or "q": ' ) except IndexError : choice = raw_input ( 'You entered an invalid integer. Choice must be' ' from above url list or "q": ' ) return None
Returns the selected item from provided items or None if q was entered for quit .
44,235
def decode ( data ) : if isinstance ( data , unicode ) : return data for bom , encoding in UNICODE_BOMS : if data . startswith ( bom ) : return data [ len ( bom ) : ] . decode ( encoding , errors = 'ignore' ) try : return data . decode ( 'utf-8' ) except UnicodeDecodeError : pass for encoding in COMMON_ENCODINGS : try : return data . decode ( encoding ) except UnicodeDecodeError : pass return NON_ASCII_FILTER . sub ( '' , data ) . decode ( 'ascii' , errors = 'replace' )
Decode data employing some charset detection and including unicode BOM stripping .
44,236
def get_context ( self , line = 1 , column = 0 ) : 'Returns a tuple containing the context for a line' line -= 1 datalen = len ( self . data ) if datalen <= line : return None build = [ self . data [ line ] ] if line > 0 : build . insert ( 0 , self . data [ line - 1 ] ) else : build . insert ( 0 , None ) if line < datalen - 1 : build . append ( self . data [ line + 1 ] ) else : build . append ( None ) leading_counts = [ ] lstrip_count = INFINITY for line in build : if line is None or not line . strip ( ) : leading_counts . append ( lstrip_count ) continue ws_count = len ( line ) - len ( line . lstrip ( ) ) leading_counts . append ( ws_count ) if ws_count < lstrip_count : lstrip_count = ws_count if lstrip_count == INFINITY : return ( '' , '' , '' ) for lnum in range ( 3 ) : if not build [ lnum ] : continue line = build [ lnum ] . strip ( ) if not line : build [ lnum ] = '' continue line = self . _format_line ( line , column = column , rel_line = lnum ) line = '%s%s' % ( ' ' * ( leading_counts [ lnum ] - lstrip_count ) , line ) build [ lnum ] = line return tuple ( build )
Returns a tuple containing the context for a line
44,237
def _format_line ( self , data , column = 0 , rel_line = 1 ) : 'Formats a line from the data to be the appropriate length' line_length = len ( data ) if line_length > 140 : if rel_line == 0 : data = '... %s' % data [ - 140 : ] elif rel_line == 1 : if column < 70 : data = '%s ...' % data [ : 140 ] elif column > line_length - 70 : data = '... %s' % data [ - 140 : ] else : data = '... %s ...' % data [ column - 70 : column + 70 ] elif rel_line == 2 : data = '%s ...' % data [ : 140 ] data = unicodehelper . decode ( data ) return data
Formats a line from the data to be the appropriate length
44,238
def get_line ( self , position ) : 'Returns the line number that the given string position is found on' datalen = len ( self . data ) count = len ( self . data [ 0 ] ) line = 1 while count < position : if line >= datalen : break count += len ( self . data [ line ] ) + 1 line += 1 return line
Returns the line number that the given string position is found on
44,239
def load_addon_strings ( addon , filename ) : def get_strings ( fn ) : xml = parse ( fn ) strings = dict ( ( tag . getAttribute ( 'id' ) , tag . firstChild . data ) for tag in xml . getElementsByTagName ( 'string' ) ) return strings addon . _strings = get_strings ( filename )
This is not an official XBMC method it is here to faciliate mocking up the other methods when running outside of XBMC .
44,240
def get_addon_id ( addonxml ) : xml = parse ( addonxml ) addon_node = xml . getElementsByTagName ( 'addon' ) [ 0 ] return addon_node . getAttribute ( 'id' )
Parses an addon id from the given addon . xml filename .
44,241
def get_addon_name ( addonxml ) : xml = parse ( addonxml ) addon_node = xml . getElementsByTagName ( 'addon' ) [ 0 ] return addon_node . getAttribute ( 'name' )
Parses an addon name from the given addon . xml filename .
44,242
def _create_dir ( path ) : try : os . makedirs ( path ) except OSError , exc : if exc . errno == errno . EEXIST : pass else : raise
Creates necessary directories for the given path or does nothing if the directories already exist .
44,243
def translatePath ( path ) : valid_dirs = [ 'xbmc' , 'home' , 'temp' , 'masterprofile' , 'profile' , 'subtitles' , 'userdata' , 'database' , 'thumbnails' , 'recordings' , 'screenshots' , 'musicplaylists' , 'videoplaylists' , 'cdrips' , 'skin' , ] assert path . startswith ( 'special://' ) , 'Not a valid special:// path.' parts = path . split ( '/' ) [ 2 : ] assert len ( parts ) > 1 , 'Need at least a single root directory' assert parts [ 0 ] in valid_dirs , '%s is not a valid root dir.' % parts [ 0 ] _create_dir ( os . path . join ( TEMP_DIR , parts [ 0 ] ) ) return os . path . join ( TEMP_DIR , * parts )
Creates folders in the OS s temp directory . Doesn t touch any possible XBMC installation on the machine . Attempting to do as little work as possible to enable this function to work seamlessly .
44,244
def _parse_request ( self , url = None , handle = None ) : if url is None : url = sys . argv [ 0 ] if len ( sys . argv ) == 3 : url += sys . argv [ 2 ] if handle is None : handle = sys . argv [ 1 ] return Request ( url , handle )
Handles setup of the plugin state including request arguments handle mode .
44,245
def register_module ( self , module , url_prefix ) : module . _plugin = self module . _url_prefix = url_prefix for func in module . _register_funcs : func ( self , url_prefix )
Registers a module with a plugin . Requires a url_prefix that will then enable calls to url_for .
44,246
def cached_route ( self , url_rule , name = None , options = None , TTL = None ) : route_decorator = self . route ( url_rule , name = name , options = options ) if TTL : cache_decorator = self . cached ( TTL ) else : cache_decorator = self . cached ( ) def new_decorator ( func ) : return route_decorator ( cache_decorator ( func ) ) return new_decorator
A decorator to add a route to a view and also apply caching . The url_rule name and options arguments are the same arguments for the route function . The TTL argument if given will passed along to the caching decorator .
44,247
def run ( self , test = False ) : self . _request = self . _parse_request ( ) log . debug ( 'Handling incoming request for %s' , self . request . path ) items = self . _dispatch ( self . request . path ) if hasattr ( self , '_unsynced_storages' ) : for storage in self . _unsynced_storages . values ( ) : log . debug ( 'Saving a %s storage to disk at "%s"' , storage . file_format , storage . filename ) storage . close ( ) return items
The main entry point for a plugin .
44,248
def main ( ) : parser = OptionParser ( ) if len ( sys . argv ) == 1 : parser . set_usage ( USAGE ) parser . error ( 'At least one command is required.' ) command = sys . argv [ 1 ] if command == '-h' : parser . set_usage ( USAGE ) opts , args = parser . parse_args ( ) if command not in COMMANDS . keys ( ) : parser . error ( 'Invalid command' ) manager = COMMANDS [ command ] if hasattr ( manager , 'option_list' ) : for args , kwargs in manager . option_list : parser . add_option ( * args , ** kwargs ) if hasattr ( manager , 'usage' ) : parser . set_usage ( manager . usage ) opts , args = parser . parse_args ( ) manager . run ( opts , args [ 1 : ] )
The entry point for the console script xbmcswift2 .
44,249
def write ( self , text ) : 'Uses curses to print in the fanciest way possible.' if not self . no_color : text = self . colorize_text ( text ) else : pattern = re . compile ( '\<\<[A-Z]*?\>\>' ) text = pattern . sub ( '' , text ) text += '\n' self . buffer . write ( text ) return self
Uses curses to print in the fanciest way possible .
44,250
def _turn_sigterm_into_systemexit ( ) : try : import signal except ImportError : return def handle_term ( signo , frame ) : raise SystemExit signal . signal ( signal . SIGTERM , handle_term )
Attempts to turn a SIGTERM exception into a SystemExit exception .
44,251
def wsgiref_server_runner ( wsgi_app , global_conf , ** kw ) : from wsgiref . simple_server import make_server , WSGIServer host = kw . get ( 'host' , '0.0.0.0' ) port = int ( kw . get ( 'port' , 8080 ) ) threaded = asbool ( kw . get ( 'wsgiref.threaded' , False ) ) server_class = WSGIServer certfile = kw . get ( 'wsgiref.certfile' ) keyfile = kw . get ( 'wsgiref.keyfile' ) scheme = 'http' if certfile and keyfile : import ssl class SecureWSGIServer ( WSGIServer ) : def get_request ( self ) : socket , client_address = WSGIServer . get_request ( self ) socket = ssl . wrap_socket ( socket , server_side = True , certfile = certfile , keyfile = keyfile ) return socket , client_address port = int ( kw . get ( 'port' , 4443 ) ) server_class = SecureWSGIServer if threaded : from SocketServer import ThreadingMixIn class GearboxWSGIServer ( ThreadingMixIn , server_class ) : pass server_type = 'Threaded' else : class GearboxWSGIServer ( server_class ) : pass server_type = 'Standard' server = make_server ( host , port , wsgi_app , server_class = GearboxWSGIServer ) if certfile and keyfile : server_type += ' Secure' scheme += 's' ServeCommand . out ( 'Starting %s HTTP server on %s://%s:%s' % ( server_type , scheme , host , port ) ) server . serve_forever ( )
Entry point for wsgiref s WSGI server
44,252
def cherrypy_server_runner ( app , global_conf = None , host = '127.0.0.1' , port = None , ssl_pem = None , protocol_version = None , numthreads = None , server_name = None , max = None , request_queue_size = None , timeout = None ) : is_ssl = False if ssl_pem : port = port or 4443 is_ssl = True if not port : if ':' in host : host , port = host . split ( ':' , 1 ) else : port = 8080 bind_addr = ( host , int ( port ) ) kwargs = { } for var_name in ( 'numthreads' , 'max' , 'request_queue_size' , 'timeout' ) : var = locals ( ) [ var_name ] if var is not None : kwargs [ var_name ] = int ( var ) server = None try : import cheroot . wsgi as wsgiserver server = wsgiserver . Server ( bind_addr , app , server_name = server_name , ** kwargs ) except ImportError : from cherrypy import wsgiserver server = wsgiserver . CherryPyWSGIServer ( bind_addr , app , server_name = server_name , ** kwargs ) server . ssl_certificate = server . ssl_private_key = ssl_pem if protocol_version : server . protocol = protocol_version try : protocol = is_ssl and 'https' or 'http' if host == '0.0.0.0' : print ( 'serving on 0.0.0.0:%s view at %s://127.0.0.1:%s' % ( port , protocol , port ) ) else : print ( 'serving on %s://%s:%s' % ( protocol , host , port ) ) server . start ( ) except ( KeyboardInterrupt , SystemExit ) : server . stop ( ) return server
Entry point for CherryPy s WSGI server
44,253
def get_fixed_argv ( self ) : argv = sys . argv [ : ] if sys . platform == 'win32' and argv [ 0 ] . endswith ( '.py' ) : argv . insert ( 0 , sys . executable ) return argv
Get proper arguments for re - running the command .
44,254
def package_contents ( self ) : 'Returns a dictionary of file information' if self . contents_cache : return self . contents_cache files = self . zf . infolist ( ) out_files = { } for file_ in files : file_doc = { 'name' : file_ . filename , 'size' : file_ . file_size , 'name_lower' : file_ . filename . lower ( ) } file_doc [ 'extension' ] = file_doc [ 'name_lower' ] . split ( '.' ) [ - 1 ] out_files [ file_ . filename ] = file_doc self . contents_cache = out_files return out_files
Returns a dictionary of file information
44,255
def write ( self , name , data ) : if isinstance ( data , StringIO ) : self . zf . writestr ( name , data . getvalue ( ) ) else : self . zf . writestr ( name , to_utf8 ( data ) )
Write a blob of data to the XPI manager .
44,256
def write_file ( self , name , path = None ) : if path is None : path = name self . zf . write ( path , name )
Write the contents of a file from the disk to the XPI .
44,257
def enum ( * args , ** kwargs ) : kwargs . update ( ( arg , arg ) for arg in args ) kwargs [ '_fields' ] = kwargs . keys ( ) return type ( 'Enum' , ( ) , kwargs )
An enum class to mirror XBMC constatns . All args and kwargs . keys are added as atrrs on the returned object .
44,258
def clean_dict ( dct ) : return dict ( ( key , val ) for key , val in dct . items ( ) if val is not None )
Returns a dict where items with a None value are removed
44,259
def pickle_dict ( items ) : ret = { } pickled_keys = [ ] for key , val in items . items ( ) : if isinstance ( val , basestring ) : ret [ key ] = val else : pickled_keys . append ( key ) ret [ key ] = pickle . dumps ( val ) if pickled_keys : ret [ '_pickled' ] = ',' . join ( pickled_keys ) return ret
Returns a new dictionary where values which aren t instances of basestring are pickled . Also a new key _pickled contains a comma separated list of keys corresponding to the pickled values .
44,260
def unpickle_args ( items ) : pickled = items . pop ( '_pickled' , None ) if pickled is None : return items pickled_keys = pickled [ 0 ] . split ( ',' ) ret = { } for key , vals in items . items ( ) : if key in pickled_keys : ret [ key ] = [ pickle . loads ( val ) for val in vals ] else : ret [ key ] = vals return ret
Takes a dict and unpickles values whose keys are found in _pickled key .
44,261
def unpickle_dict ( items ) : pickled_keys = items . pop ( '_pickled' , '' ) . split ( ',' ) ret = { } for key , val in items . items ( ) : if key in pickled_keys : ret [ key ] = pickle . loads ( val ) else : ret [ key ] = val return ret
Returns a dict pickled with pickle_dict
44,262
def download_page ( url , data = None ) : conn = urllib2 . urlopen ( url , data ) resp = conn . read ( ) conn . close ( ) return resp
Returns the response for the given url . The optional data argument is passed directly to urlopen .
44,263
def load_commands ( self , namespace ) : for ep in pkg_resources . iter_entry_points ( namespace ) : LOG . debug ( 'found command %r' , ep . name ) cmd_name = ( ep . name . replace ( '_' , ' ' ) if self . convert_underscores else ep . name ) self . commands [ cmd_name ] = ep return
Load all the commands from an entrypoint
44,264
def find_command ( self , argv ) : search_args = argv [ : ] name = '' while search_args : if search_args [ 0 ] . startswith ( '-' ) : name = '%s %s' % ( name , search_args [ 0 ] ) raise ValueError ( 'Invalid command %r' % name ) next_val = search_args . pop ( 0 ) name = '%s %s' % ( name , next_val ) if name else next_val if name in self . commands : cmd_ep = self . commands [ name ] if hasattr ( cmd_ep , 'resolve' ) : cmd_factory = cmd_ep . resolve ( ) else : arg_spec = inspect . getargspec ( cmd_ep . load ) if 'require' in arg_spec [ 0 ] : cmd_factory = cmd_ep . load ( require = False ) else : cmd_factory = cmd_ep . load ( ) return ( cmd_factory , name , search_args ) else : raise ValueError ( 'Unknown command %r' % next ( iter ( argv ) , '' ) )
Given an argument list find a command and return the processor and any remaining arguments .
44,265
def prepare_package ( err , path , expectation = 0 , for_appversions = None , timeout = - 1 ) : package = None try : if not os . path . isfile ( path ) : err . error ( ( 'main' , 'prepare_package' , 'not_found' ) , 'The package could not be found' ) return package_extension = os . path . splitext ( path ) [ 1 ] package_extension = package_extension . lower ( ) def timeout_handler ( signum , frame ) : raise validator . ValidationTimeout ( timeout ) if timeout != - 1 : signal . signal ( signal . SIGALRM , timeout_handler ) signal . setitimer ( signal . ITIMER_REAL , timeout ) if package_extension == '.xml' : test_search ( err , path , expectation ) elif package_extension not in ( '.xpi' , '.jar' ) : err . error ( ( 'main' , 'prepare_package' , 'unrecognized' ) , 'The package is not of a recognized type.' ) else : package = open ( path , 'rb' ) test_package ( err , package , path , expectation , for_appversions ) err . metadata [ 'is_extension' ] = err . detected_type == PACKAGE_EXTENSION except validator . ValidationTimeout : err . system_error ( msg_id = 'validation_timeout' , message = 'Validation has timed out' , signing_severity = 'high' , description = ( 'Validation was unable to complete in the allotted ' 'time. This is most likely due to the size or ' 'complexity of your add-on.' , 'This timeout has been logged, but please consider ' 'filing an issue report here: ' 'https://bit.ly/1POrYYU' ) , exc_info = sys . exc_info ( ) ) except Exception : err . system_error ( exc_info = sys . exc_info ( ) ) finally : if timeout != - 1 : signal . setitimer ( signal . ITIMER_REAL , 0 ) signal . signal ( signal . SIGALRM , signal . SIG_DFL ) if package : package . close ( ) decorator . cleanup ( )
Prepares a file - based package for validation .
44,266
def populate_chrome_manifest ( err , xpi_package ) : "Loads the chrome.manifest if it's present" if 'chrome.manifest' in xpi_package : chrome_data = xpi_package . read ( 'chrome.manifest' ) chrome = ChromeManifest ( chrome_data , 'chrome.manifest' ) chrome_recursion_buster = set ( ) def get_linked_manifest ( path , from_path , from_chrome , from_triple ) : if path in chrome_recursion_buster : err . warning ( err_id = ( 'submain' , 'populate_chrome_manifest' , 'recursion' ) , warning = 'Linked manifest recursion detected.' , description = 'A chrome registration file links back to ' 'itself. This can cause a multitude of ' 'issues.' , filename = path ) return if path not in xpi_package : err . notice ( err_id = ( 'submain' , 'populate_chrome_manifest' , 'linkerr' ) , notice = 'Linked manifest could not be found.' , description = ( 'A linked manifest file could not be found ' 'in the package.' , 'Path: %s' % path ) , filename = from_path , line = from_triple [ 'line' ] , context = from_chrome . context ) return chrome_recursion_buster . add ( path ) manifest = ChromeManifest ( xpi_package . read ( path ) , path ) for triple in manifest . triples : yield triple if triple [ 'subject' ] == 'manifest' : subpath = triple [ 'predicate' ] if not subpath . startswith ( '/' ) : subpath = '%s/%s' % ( '/' . join ( path . split ( '/' ) [ : - 1 ] ) , subpath ) subpath = subpath . lstrip ( '/' ) for subtriple in get_linked_manifest ( subpath , path , manifest , triple ) : yield subtriple chrome_recursion_buster . discard ( path ) chrome_recursion_buster . add ( 'chrome.manifest' ) for extra_manifest in chrome . get_triples ( subject = 'manifest' ) : for triple in get_linked_manifest ( extra_manifest [ 'predicate' ] , 'chrome.manifest' , chrome , extra_manifest ) : chrome . triples . append ( triple ) chrome_recursion_buster . discard ( 'chrome.manifest' ) err . save_resource ( 'chrome.manifest' , chrome , pushable = True ) err . save_resource ( 'chrome.manifest_nopush' , chrome , pushable = False )
Loads the chrome . manifest if it s present
44,267
def detect_type ( err , install_rdf = None , xpi_package = None ) : translated_types = { '2' : PACKAGE_EXTENSION , '4' : PACKAGE_THEME , '8' : PACKAGE_LANGPACK , '32' : PACKAGE_MULTI , '64' : PACKAGE_DICTIONARY , '128' : PACKAGE_EXTENSION , '256' : PACKAGE_EXTENSION , } if install_rdf is None : types = { 'xpi' : PACKAGE_DICTIONARY } err . notice ( ( 'typedetection' , 'detect_type' , 'missing_install_rdf' ) , 'install.rdf was not found.' , 'The type should be determined by install.rdf if present. ' "If it isn't, we still need to know the type." ) if xpi_package . extension in types : return types [ xpi_package . extension ] else : return None type_uri = install_rdf . uri ( 'type' ) type_ = install_rdf . get_object ( None , type_uri ) if any ( file_ for file_ in xpi_package if file_ . startswith ( 'dictionaries/' ) ) : if type_ != '64' : err . error ( ( 'typedetection' , 'dictionary_valid_type' , 'invalid_em_type' ) , 'Invalid <em:type> value.' , 'The package appears to be a dictionary but does not have ' 'the correct <em:type> set in the install manifest.' ) return PACKAGE_DICTIONARY if type_ is not None : if type_ in translated_types : err . save_resource ( 'is_multipackage' , type_ == '32' , pushable = True ) return translated_types [ type_ ] else : err . error ( ( 'typedetection' , 'detect_type' , 'invalid_em_type' ) , 'Invalid <em:type> value.' , 'The only valid values for <em:type> are 2, 4, 8, and ' '32. Any other values are either invalid or deprecated.' , 'install.rdf' ) return else : err . notice ( err_id = ( 'typedetection' , 'detect_type' , 'no_em:type' ) , notice = 'No <em:type> element found in install.rdf' , description = "It isn't always required, but it is the most reliable " 'method for determining add-on type.' , filename = 'install.rdf' ) extensions = { 'jar' : '4' , 'xpi' : '2' } if xpi_package . extension in extensions : install_rdf_type = extensions [ xpi_package . extension ] return translated_types [ install_rdf_type ]
Determines the type of add - on being validated based on install . rdf file extension and other properties .
44,268
def setup_options ( opts ) : if opts . quiet : logger . log . setLevel ( logging . WARNING ) logger . GLOBAL_LOG_LEVEL = logging . WARNING if opts . verbose : logger . log . setLevel ( logging . DEBUG ) logger . GLOBAL_LOG_LEVEL = logging . DEBUG
Takes any actions necessary based on command line options
44,269
def get_addon_module_name ( addonxml_filename ) : try : xml = ET . parse ( addonxml_filename ) . getroot ( ) except IOError : sys . exit ( 'Cannot find an addon.xml file in the current working ' 'directory. Please run this command from the root directory ' 'of an addon.' ) try : plugin_source = ( ext for ext in xml . findall ( 'extension' ) if ext . get ( 'point' ) == 'xbmc.python.pluginsource' ) . next ( ) except StopIteration : sys . exit ( 'ERROR, no pluginsource in addonxml' ) return plugin_source . get ( 'library' ) . split ( '.' ) [ 0 ]
Attempts to extract a module name for the given addon s addon . xml file . Looks for the xbmc . python . pluginsource extension node and returns the addon s filename without the . py suffix .
44,270
def once ( plugin , parent_stack = None ) : plugin . clear_added_items ( ) items = plugin . run ( ) if parent_stack and plugin . _update_listing : del parent_stack [ - 1 ] if parent_stack : items . insert ( 0 , parent_stack [ - 1 ] ) display_listitems ( items , plugin . request . url ) return items
A run mode for the CLI that runs the plugin once and exits .
44,271
def interactive ( plugin ) : items = [ item for item in once ( plugin ) if not item . get_played ( ) ] parent_stack = [ ] selected_item = get_user_choice ( items ) while selected_item is not None : if parent_stack and selected_item == parent_stack [ - 1 ] : parent_stack . pop ( ) else : parent_stack . append ( ListItem . from_dict ( label = '..' , path = plugin . request . url ) ) patch_plugin ( plugin , selected_item . get_path ( ) ) items = [ item for item in once ( plugin , parent_stack = parent_stack ) if not item . get_played ( ) ] selected_item = get_user_choice ( items )
A run mode for the CLI that runs the plugin in a loop based on user input .
44,272
def crawl ( plugin ) : paths_visited = set ( ) paths_to_visit = set ( item . get_path ( ) for item in once ( plugin ) ) while paths_to_visit and continue_or_quit ( ) : path = paths_to_visit . pop ( ) paths_visited . add ( path ) patch_plugin ( plugin , path ) new_paths = set ( item . get_path ( ) for item in once ( plugin ) ) paths_to_visit . update ( path for path in new_paths if path not in paths_visited )
Performs a breadth - first crawl of all possible routes from the starting path . Will only visit a URL once even if it is referenced multiple times in a plugin . Requires user interaction in between each fetch .
44,273
def run ( opts , args ) : setup_options ( opts ) mode = Modes . ONCE if len ( args ) > 0 and hasattr ( Modes , args [ 0 ] . upper ( ) ) : _mode = args . pop ( 0 ) . upper ( ) mode = getattr ( Modes , _mode ) url = None if len ( args ) > 0 : url = args . pop ( 0 ) plugin_mgr = PluginManager . load_plugin_from_addonxml ( mode , url ) plugin_mgr . run ( )
The run method for the run command . Executes a plugin from the command line .
44,274
def run ( self ) : handle = 0 handlers = { Modes . ONCE : once , Modes . CRAWL : crawl , Modes . INTERACTIVE : interactive , } handler = handlers [ self . mode ] patch_sysargv ( self . url or 'plugin://%s/' % self . plugin . id , handle ) return handler ( self . plugin )
This method runs the the plugin in the appropriate mode parsed from the command line options .
44,275
def conversation ( self , name = None , ** kwargs ) : convo = Conversation ( self , ** kwargs ) super ( ) . conversation ( name , convo ) return convo
Make a new conversation .
44,276
def validate ( path , format = 'json' , approved_applications = None , determined = True , listed = True , expectation = PACKAGE_ANY , for_appversions = None , overrides = None , timeout = - 1 , compat_test = False , ** kw ) : bundle = ErrorBundle ( listed = listed , determined = determined , overrides = overrides , for_appversions = for_appversions ) bundle . save_resource ( 'is_compat_test' , compat_test ) if approved_applications is None : approved_applications = os . path . join ( os . path . dirname ( __file__ ) , 'app_versions.json' ) if isinstance ( approved_applications , types . StringTypes ) : with open ( approved_applications ) as approved_apps : apps = json . load ( approved_apps ) elif isinstance ( approved_applications , dict ) : apps = approved_applications else : raise ValueError ( 'Unknown format for `approved_applications`.' ) constants . APPROVED_APPLICATIONS . clear ( ) constants . APPROVED_APPLICATIONS . update ( apps ) submain . prepare_package ( bundle , path , expectation , for_appversions = for_appversions , timeout = timeout ) return format_result ( bundle , format )
Perform validation in one easy step!
44,277
def system_error ( self , msg_id = None , message = None , description = None , validation_timeout = False , exc_info = None , ** kw ) : if exc_info : if ( isinstance ( exc_info [ 1 ] , validator . ValidationTimeout ) and msg_id != 'validation_timeout' ) : raise exc_info [ 1 ] log . error ( 'Unexpected error during validation: %s: %s' % ( exc_info [ 0 ] . __name__ , exc_info [ 1 ] ) , exc_info = exc_info ) full_id = ( 'validator' , 'unexpected_exception' ) if msg_id : full_id += ( msg_id , ) self . error ( full_id , message or 'An unexpected error has occurred.' , description or ( 'Validation was unable to complete successfully due ' 'to an unexpected error.' , 'The error has been logged, but please consider ' 'filing an issue report here: ' 'https://bit.ly/1POrYYU' ) , tier = 1 , ** kw ) self . errors . insert ( 0 , self . errors . pop ( ) )
Add an error message for an unexpected exception in validator code and move it to the front of the error message list . If exc_info is supplied the error will be logged .
44,278
def drop_message ( self , message ) : for type_ in 'errors' , 'warnings' , 'notices' : list_ = getattr ( self , type_ ) if message in list_ : list_ . remove ( message ) if 'signing_severity' in message : self . signing_summary [ message [ 'signing_severity' ] ] -= 1 return True return False
Drop the given message object from the appropriate message list .
44,279
def set_tier ( self , tier ) : 'Updates the tier and ending tier' self . tier = tier if tier > self . ending_tier : self . ending_tier = tier
Updates the tier and ending tier
44,280
def _save_message ( self , stack , type_ , message , context = None , from_merge = False ) : 'Stores a message in the appropriate message stack.' uid = uuid . uuid4 ( ) . hex message [ 'uid' ] = uid if context is not None : if isinstance ( context , tuple ) : message [ 'context' ] = context else : message [ 'context' ] = ( context . get_context ( line = message [ 'line' ] , column = message [ 'column' ] ) ) else : message [ 'context' ] = None if self . package_stack : if not isinstance ( message [ 'file' ] , list ) : message [ 'file' ] = [ message [ 'file' ] ] message [ 'file' ] = self . package_stack + message [ 'file' ] if message [ 'for_appversions' ] : if not self . supports_version ( message [ 'for_appversions' ] ) : if self . instant : print '(Instant error discarded)' self . _print_message ( type_ , message , verbose = True ) return elif self . version_requirements : message [ 'for_appversions' ] = self . version_requirements stack . append ( message ) if message [ 'tier' ] is None : message [ 'tier' ] = self . tier if message [ 'compatibility_type' ] and not from_merge : self . compat_summary [ '%ss' % message [ 'compatibility_type' ] ] += 1 if message [ 'id' ] : tree = self . message_tree last_id = None for eid in message [ 'id' ] : if last_id is not None : tree = tree [ last_id ] if eid not in tree : tree [ eid ] = { '__errors' : 0 , '__warnings' : 0 , '__notices' : 0 , '__messages' : [ ] } tree [ eid ] [ '__%s' % type_ ] += 1 last_id = eid tree [ last_id ] [ '__messages' ] . append ( uid ) if self . instant : self . _print_message ( type_ , message , verbose = True )
Stores a message in the appropriate message stack .
44,281
def failed ( self , fail_on_warnings = True ) : return bool ( self . errors ) or ( fail_on_warnings and bool ( self . warnings ) )
Returns a boolean value describing whether the validation succeeded or not .
44,282
def get_resource ( self , name ) : 'Retrieves an object that has been stored by another test.' if name in self . resources : return self . resources [ name ] elif name in self . pushable_resources : return self . pushable_resources [ name ] else : return False
Retrieves an object that has been stored by another test .
44,283
def save_resource ( self , name , resource , pushable = False ) : 'Saves an object such that it can be used by other tests.' if pushable : self . pushable_resources [ name ] = resource else : self . resources [ name ] = resource
Saves an object such that it can be used by other tests .
44,284
def push_state ( self , new_file = '' ) : 'Saves the current error state to parse subpackages' self . subpackages . append ( { 'detected_type' : self . detected_type , 'message_tree' : self . message_tree , 'resources' : self . pushable_resources , 'metadata' : self . metadata } ) self . message_tree = { } self . pushable_resources = { } self . metadata = { 'requires_chrome' : False , 'listed' : self . metadata . get ( 'listed' ) , 'validator_version' : validator . __version__ } self . package_stack . append ( new_file )
Saves the current error state to parse subpackages
44,285
def pop_state ( self ) : 'Retrieves the last saved state and restores it.' state = self . subpackages . pop ( ) metadata = self . metadata self . detected_type = state [ 'detected_type' ] self . message_tree = state [ 'message_tree' ] self . pushable_resources = state [ 'resources' ] self . metadata = state [ 'metadata' ] name = self . package_stack . pop ( ) self . metadata . setdefault ( 'sub_packages' , { } ) [ name ] = metadata
Retrieves the last saved state and restores it .
44,286
def render_json ( self ) : 'Returns a JSON summary of the validation operation.' types = { 0 : 'unknown' , 1 : 'extension' , 2 : 'theme' , 3 : 'dictionary' , 4 : 'langpack' , 5 : 'search' , 8 : 'webapp' } output = { 'detected_type' : types [ self . detected_type ] , 'ending_tier' : self . ending_tier , 'success' : not self . failed ( ) , 'messages' : [ ] , 'errors' : len ( self . errors ) , 'warnings' : len ( self . warnings ) , 'notices' : len ( self . notices ) , 'message_tree' : self . message_tree , 'compatibility_summary' : self . compat_summary , 'signing_summary' : self . signing_summary , 'metadata' : self . metadata } messages = output [ 'messages' ] for error in self . errors : error [ 'type' ] = 'error' messages . append ( error ) for warning in self . warnings : warning [ 'type' ] = 'warning' messages . append ( warning ) for notice in self . notices : notice [ 'type' ] = 'notice' messages . append ( notice ) return json . dumps ( output )
Returns a JSON summary of the validation operation .
44,287
def print_summary ( self , verbose = False , no_color = False ) : 'Prints a summary of the validation process so far.' types = { 0 : 'Unknown' , 1 : 'Extension/Multi-Extension' , 2 : 'Full Theme' , 3 : 'Dictionary' , 4 : 'Language Pack' , 5 : 'Search Provider' , 7 : 'Subpackage' , 8 : 'App' } detected_type = types [ self . detected_type ] buffer = StringIO ( ) self . handler = OutputHandler ( buffer , no_color ) self . handler . write ( '\n<<GREEN>>Summary:' ) . write ( '-' * 30 ) . write ( 'Detected type: <<BLUE>>%s' % detected_type ) . write ( '-' * 30 ) if self . failed ( ) : self . handler . write ( '<<BLUE>>Test failed! Errors:' ) for error in self . errors : self . _print_message ( '<<RED>>Error:<<NORMAL>>\t' , error , verbose ) for warning in self . warnings : self . _print_message ( '<<YELLOW>>Warning:<<NORMAL>> ' , warning , verbose ) else : self . handler . write ( '<<GREEN>>All tests succeeded!' ) if self . notices : for notice in self . notices : self . _print_message ( prefix = '<<WHITE>>Notice:<<NORMAL>>\t' , message = notice , verbose = verbose ) if 'is_jetpack' in self . metadata and verbose : self . handler . write ( '\n' ) self . handler . write ( '<<GREEN>>Jetpack add-on detected.<<NORMAL>>\n' 'Identified files:' ) if 'jetpack_identified_files' in self . metadata : for filename , data in self . metadata [ 'jetpack_identified_files' ] . items ( ) : self . handler . write ( ( ' %s\n' % filename ) + ( ' %s : %s' % data ) ) if 'jetpack_unknown_files' in self . metadata : self . handler . write ( 'Unknown files:' ) for filename in self . metadata [ 'jetpack_unknown_files' ] : self . handler . write ( ' %s' % filename ) self . handler . write ( '\n' ) if self . unfinished : self . handler . write ( '<<RED>>Validation terminated early' ) self . handler . write ( 'Errors during validation are preventing ' 'the validation process from completing.' ) self . handler . write ( 'Use the <<YELLOW>>--determined<<NORMAL>> ' 'flag to ignore these errors.' ) self . handler . write ( '\n' ) return buffer . getvalue ( )
Prints a summary of the validation process so far .
44,288
def _flatten_list ( self , data ) : 'Flattens nested lists into strings.' if data is None : return '' if isinstance ( data , types . StringTypes ) : return data elif isinstance ( data , ( list , tuple ) ) : return '\n' . join ( self . _flatten_list ( x ) for x in data )
Flattens nested lists into strings .
44,289
def _print_message ( self , prefix , message , verbose = True ) : 'Prints a message and takes care of all sorts of nasty code' output = [ '\n' , prefix , message [ 'message' ] ] if verbose : verbose_output = [ ] if message [ 'description' ] : verbose_output . append ( self . _flatten_list ( message [ 'description' ] ) ) if message . get ( 'signing_severity' ) : verbose_output . append ( ( '\tAutomated signing severity: %s' % message [ 'signing_severity' ] ) ) if message . get ( 'signing_help' ) : verbose_output . append ( '\tSuggestions for passing automated signing:' ) verbose_output . append ( self . _flatten_list ( message [ 'signing_help' ] ) ) verbose_output . append ( '\tTier:\t%d' % message [ 'tier' ] ) files = message [ 'file' ] if files is not None and files != '' : fmsg = '\tFile:\t%s' if type ( files ) is list : if files [ - 1 ] == '' : files [ - 1 ] = '(none)' verbose_output . append ( fmsg % ' > ' . join ( files ) ) else : verbose_output . append ( fmsg % files ) if message [ 'line' ] : verbose_output . append ( '\tLine:\t%s' % message [ 'line' ] ) if message [ 'column' ] and message [ 'column' ] != 0 : verbose_output . append ( '\tColumn:\t%d' % message [ 'column' ] ) if message . get ( 'context' ) : verbose_output . append ( '\tContext:' ) verbose_output . extend ( [ ( '\t> %s' % x if x is not None else '\t>' + ( '-' * 20 ) ) for x in message [ 'context' ] ] ) output . append ( '\n' ) output . append ( '\n' . join ( verbose_output ) ) self . handler . write ( u'' . join ( map ( unicodehelper . decode , output ) ) )
Prints a message and takes care of all sorts of nasty code
44,290
def supports_version ( self , guid_set ) : if self . supported_versions is None : raise Exception ( 'Early compatibility test run before install.rdf ' 'was parsed.' ) return self . _compare_version ( requirements = guid_set , support = self . supported_versions )
Returns whether a GUID set in for_appversions format is compatbile with the current supported applications list .
44,291
def _compare_version ( self , requirements , support ) : for guid in requirements : if ( guid in support and any ( ( detected_version in requirements [ guid ] ) for detected_version in support [ guid ] ) ) : return True
Return whether there is an intersection between a support applications GUID set and a set of supported applications .
44,292
def discard_unused_messages ( self , ending_tier ) : stacks = [ self . errors , self . warnings , self . notices ] for stack in stacks : for message in stack : if message [ 'tier' ] > ending_tier : stack . remove ( message )
Delete messages from errors warnings and notices whose tier is greater than the ending tier .
44,293
def migrator ( state ) : for tweak in ( 'tweak1' , 'tweak2' , 'tweak3' ) : del state [ 0 ] [ tweak ] for convo in state [ 1 ] : if tweak in convo : del convo [ tweak ] return state
Tweaks will be lost for Cleverbot and its conversations .
44,294
def migrator ( state ) : cleverbot_kwargs , convos_kwargs = state cb = Cleverbot ( ** cleverbot_kwargs ) for convo_kwargs in convos_kwargs : cb . conversation ( ** convo_kwargs ) return cb
Nameless conversations will be lost .
44,295
def init_with_uid ( self , uid ) : self . _uid = uid self . _brain = None self . _catalog = None self . _instance = None
Initialize with an UID
44,296
def init_with_brain ( self , brain ) : self . _uid = api . get_uid ( brain ) self . _brain = brain self . _catalog = self . get_catalog_for ( brain ) self . _instance = None
Initialize with a catalog brain
44,297
def init_with_instance ( self , instance ) : self . _uid = api . get_uid ( instance ) self . _brain = None self . _catalog = self . get_catalog_for ( instance ) self . _instance = instance
Initialize with an instance object
44,298
def process_value ( self , value ) : if api . is_uid ( value ) : return self . to_super_model ( value ) elif api . is_object ( value ) : return self . to_super_model ( value ) elif isinstance ( value , basestring ) : return safe_unicode ( value ) . encode ( "utf-8" ) elif isinstance ( value , DateTime ) : return value elif isinstance ( value , ( LazyMap , list , tuple ) ) : return map ( self . process_value , value ) elif isinstance ( value , ( dict ) ) : return { k : self . process_value ( v ) for k , v in value . iteritems ( ) } elif safe_callable ( value ) : return self . process_value ( value ( ) ) return value
Process publication value
44,299
def instance ( self ) : if self . _instance is None : logger . debug ( "SuperModel::instance: *Wakup object*" ) self . _instance = api . get_object ( self . brain ) return self . _instance
Content instance of the wrapped object