idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
20,900
def getargvalues ( frame ) : args , varargs , varkw = getargs ( frame . f_code ) return args , varargs , varkw , frame . f_locals
Get information about arguments passed into a particular frame .
20,901
def strseq ( object , convert , join = joinseq ) : if type ( object ) in [ types . ListType , types . TupleType ] : return join ( map ( lambda o , c = convert , j = join : strseq ( o , c , j ) , object ) ) else : return convert ( object )
Recursively walk a sequence stringifying each element .
20,902
def formatargspec ( args , varargs = None , varkw = None , defaults = None , formatarg = str , formatvarargs = lambda name : '*' + name , formatvarkw = lambda name : '**' + name , formatvalue = lambda value : '=' + repr ( value ) , join = joinseq ) : specs = [ ] if defaults : firstdefault = len ( args ) - len ( defaults ) for i in range ( len ( args ) ) : spec = strseq ( args [ i ] , formatarg , join ) if defaults and i >= firstdefault : spec = spec + formatvalue ( defaults [ i - firstdefault ] ) specs . append ( spec ) if varargs : specs . append ( formatvarargs ( varargs ) ) if varkw : specs . append ( formatvarkw ( varkw ) ) return '(' + string . join ( specs , ', ' ) + ')'
Format an argument spec from the 4 values returned by getargspec .
20,903
def formatargvalues ( args , varargs , varkw , locals , formatarg = str , formatvarargs = lambda name : '*' + name , formatvarkw = lambda name : '**' + name , formatvalue = lambda value : '=' + repr ( value ) , join = joinseq ) : def convert ( name , locals = locals , formatarg = formatarg , formatvalue = formatvalue ) : return formatarg ( name ) + formatvalue ( locals [ name ] ) specs = [ ] for i in range ( len ( args ) ) : specs . append ( strseq ( args [ i ] , convert , join ) ) if varargs : specs . append ( formatvarargs ( varargs ) + formatvalue ( locals [ varargs ] ) ) if varkw : specs . append ( formatvarkw ( varkw ) + formatvalue ( locals [ varkw ] ) ) return '(' + string . join ( specs , ', ' ) + ')'
Format an argument spec from the 4 values returned by getargvalues .
20,904
def getlineno ( frame ) : lineno = frame . f_lineno code = frame . f_code if hasattr ( code , 'co_lnotab' ) : table = code . co_lnotab lineno = code . co_firstlineno addr = 0 for i in range ( 0 , len ( table ) , 2 ) : addr = addr + ord ( table [ i ] ) if addr > frame . f_lasti : break lineno = lineno + ord ( table [ i + 1 ] ) return lineno
Get the line number from a frame object allowing for optimization .
20,905
def getinnerframes ( tb , context = 1 ) : framelist = [ ] while tb : framelist . append ( ( tb . tb_frame , ) + getframeinfo ( tb , context ) ) tb = tb . tb_next return framelist
Get a list of records for a traceback s frame and all lower frames .
20,906
def convert ( gr , raw_node ) : type , value , context , children = raw_node if children or type in gr . number2symbol : if len ( children ) == 1 : return children [ 0 ] return Node ( type , children , context = context ) else : return Leaf ( type , value , context = context )
Convert raw node information to a Node or Leaf instance .
20,907
def generate_matches ( patterns , nodes ) : if not patterns : yield 0 , { } else : p , rest = patterns [ 0 ] , patterns [ 1 : ] for c0 , r0 in p . generate_matches ( nodes ) : if not rest : yield c0 , r0 else : for c1 , r1 in generate_matches ( rest , nodes [ c0 : ] ) : r = { } r . update ( r0 ) r . update ( r1 ) yield c0 + c1 , r
Generator yielding matches for a sequence of patterns and nodes .
20,908
def replace ( self , new ) : assert self . parent is not None , str ( self ) assert new is not None if not isinstance ( new , list ) : new = [ new ] l_children = [ ] found = False for ch in self . parent . children : if ch is self : assert not found , ( self . parent . children , self , new ) if new is not None : l_children . extend ( new ) found = True else : l_children . append ( ch ) assert found , ( self . children , self , new ) self . parent . changed ( ) self . parent . children = l_children for x in new : x . parent = self . parent self . parent = None
Replace this node with a new one in the parent .
20,909
def get_lineno ( self ) : node = self while not isinstance ( node , Leaf ) : if not node . children : return node = node . children [ 0 ] return node . lineno
Return the line number which generated the invocant node .
20,910
def remove ( self ) : if self . parent : for i , node in enumerate ( self . parent . children ) : if node is self : self . parent . changed ( ) del self . parent . children [ i ] self . parent = None return i
Remove the node from the tree . Returns the position of the node in its parent s children before it was removed .
20,911
def next_sibling ( self ) : if self . parent is None : return None for i , child in enumerate ( self . parent . children ) : if child is self : try : return self . parent . children [ i + 1 ] except IndexError : return None
The node immediately following the invocant in their parent s children list . If the invocant does not have a next sibling it is None
20,912
def prev_sibling ( self ) : if self . parent is None : return None for i , child in enumerate ( self . parent . children ) : if child is self : if i == 0 : return None return self . parent . children [ i - 1 ]
The node immediately preceding the invocant in their parent s children list . If the invocant does not have a previous sibling it is None .
20,913
def post_order ( self ) : for child in self . children : for node in child . post_order ( ) : yield node yield self
Return a post - order iterator for the tree .
20,914
def pre_order ( self ) : yield self for child in self . children : for node in child . pre_order ( ) : yield node
Return a pre - order iterator for the tree .
20,915
def match ( self , node , results = None ) : if self . type is not None and node . type != self . type : return False if self . content is not None : r = None if results is not None : r = { } if not self . _submatch ( node , r ) : return False if r : results . update ( r ) if results is not None and self . name : results [ self . name ] = node return True
Does this pattern exactly match a node?
20,916
def generate_matches ( self , nodes ) : r = { } if nodes and self . match ( nodes [ 0 ] , r ) : yield 1 , r
Generator yielding all matches for this pattern .
20,917
def _submatch ( self , node , results = None ) : if self . wildcards : for c , r in generate_matches ( self . content , node . children ) : if c == len ( node . children ) : if results is not None : results . update ( r ) return True return False if len ( self . content ) != len ( node . children ) : return False for subpattern , child in zip ( self . content , node . children ) : if not subpattern . match ( child , results ) : return False return True
Match the pattern s content to the node s children .
20,918
def optimize ( self ) : subpattern = None if ( self . content is not None and len ( self . content ) == 1 and len ( self . content [ 0 ] ) == 1 ) : subpattern = self . content [ 0 ] [ 0 ] if self . min == 1 and self . max == 1 : if self . content is None : return NodePattern ( name = self . name ) if subpattern is not None and self . name == subpattern . name : return subpattern . optimize ( ) if ( self . min <= 1 and isinstance ( subpattern , WildcardPattern ) and subpattern . min <= 1 and self . name == subpattern . name ) : return WildcardPattern ( subpattern . content , self . min * subpattern . min , self . max * subpattern . max , subpattern . name ) return self
Optimize certain stacked wildcard patterns .
20,919
def generate_matches ( self , nodes ) : if self . content is None : for count in xrange ( self . min , 1 + min ( len ( nodes ) , self . max ) ) : r = { } if self . name : r [ self . name ] = nodes [ : count ] yield count , r elif self . name == "bare_name" : yield self . _bare_name_matches ( nodes ) else : if hasattr ( sys , "getrefcount" ) : save_stderr = sys . stderr sys . stderr = StringIO ( ) try : for count , r in self . _recursive_matches ( nodes , 0 ) : if self . name : r [ self . name ] = nodes [ : count ] yield count , r except RuntimeError : for count , r in self . _iterative_matches ( nodes ) : if self . name : r [ self . name ] = nodes [ : count ] yield count , r finally : if hasattr ( sys , "getrefcount" ) : sys . stderr = save_stderr
Generator yielding matches for a sequence of nodes .
20,920
def _iterative_matches ( self , nodes ) : nodelen = len ( nodes ) if 0 >= self . min : yield 0 , { } results = [ ] for alt in self . content : for c , r in generate_matches ( alt , nodes ) : yield c , r results . append ( ( c , r ) ) while results : new_results = [ ] for c0 , r0 in results : if c0 < nodelen and c0 <= self . max : for alt in self . content : for c1 , r1 in generate_matches ( alt , nodes [ c0 : ] ) : if c1 > 0 : r = { } r . update ( r0 ) r . update ( r1 ) yield c0 + c1 , r new_results . append ( ( c0 + c1 , r ) ) results = new_results
Helper to iteratively yield the matches .
20,921
def _bare_name_matches ( self , nodes ) : count = 0 r = { } done = False max = len ( nodes ) while not done and count < max : done = True for leaf in self . content : if leaf [ 0 ] . match ( nodes [ count ] , r ) : count += 1 done = False break r [ self . name ] = nodes [ : count ] return count , r
Special optimized matcher for bare_name .
20,922
def _recursive_matches ( self , nodes , count ) : assert self . content is not None if count >= self . min : yield 0 , { } if count < self . max : for alt in self . content : for c0 , r0 in generate_matches ( alt , nodes ) : for c1 , r1 in self . _recursive_matches ( nodes [ c0 : ] , count + 1 ) : r = { } r . update ( r0 ) r . update ( r1 ) yield c0 + c1 , r
Helper to recursively yield the matches .
20,923
def xreload ( mod ) : r = Reload ( mod ) r . apply ( ) found_change = r . found_change r = None pydevd_dont_trace . clear_trace_filter_cache ( ) return found_change
Reload a module in place updating classes methods and functions .
20,924
def _update ( self , namespace , name , oldobj , newobj , is_class_namespace = False ) : try : notify_info2 ( 'Updating: ' , oldobj ) if oldobj is newobj : return if type ( oldobj ) is not type ( newobj ) : notify_error ( 'Type of: %s changed... Skipping.' % ( oldobj , ) ) return if isinstance ( newobj , types . FunctionType ) : self . _update_function ( oldobj , newobj ) return if isinstance ( newobj , types . MethodType ) : self . _update_method ( oldobj , newobj ) return if isinstance ( newobj , classmethod ) : self . _update_classmethod ( oldobj , newobj ) return if isinstance ( newobj , staticmethod ) : self . _update_staticmethod ( oldobj , newobj ) return if hasattr ( types , 'ClassType' ) : classtype = ( types . ClassType , type ) else : classtype = type if isinstance ( newobj , classtype ) : self . _update_class ( oldobj , newobj ) return if hasattr ( newobj , '__metaclass__' ) and hasattr ( newobj , '__class__' ) and newobj . __metaclass__ == newobj . __class__ : self . _update_class ( oldobj , newobj ) return if namespace is not None : if oldobj != newobj and str ( oldobj ) != str ( newobj ) and repr ( oldobj ) != repr ( newobj ) : xreload_old_new = None if is_class_namespace : xreload_old_new = getattr ( namespace , '__xreload_old_new__' , None ) if xreload_old_new is not None : self . found_change = True xreload_old_new ( name , oldobj , newobj ) elif '__xreload_old_new__' in namespace : xreload_old_new = namespace [ '__xreload_old_new__' ] xreload_old_new ( namespace , name , oldobj , newobj ) self . found_change = True except : notify_error ( 'Exception found when updating %s. Proceeding for other items.' % ( name , ) ) pydev_log . exception ( )
Update oldobj if possible in place with newobj .
20,925
def _update_function ( self , oldfunc , newfunc ) : oldfunc . __doc__ = newfunc . __doc__ oldfunc . __dict__ . update ( newfunc . __dict__ ) try : newfunc . __code__ attr_name = '__code__' except AttributeError : newfunc . func_code attr_name = 'func_code' old_code = getattr ( oldfunc , attr_name ) new_code = getattr ( newfunc , attr_name ) if not code_objects_equal ( old_code , new_code ) : notify_info0 ( 'Updated function code:' , oldfunc ) setattr ( oldfunc , attr_name , new_code ) self . found_change = True try : oldfunc . __defaults__ = newfunc . __defaults__ except AttributeError : oldfunc . func_defaults = newfunc . func_defaults return oldfunc
Update a function object .
20,926
def _update_method ( self , oldmeth , newmeth ) : if hasattr ( oldmeth , 'im_func' ) and hasattr ( newmeth , 'im_func' ) : self . _update ( None , None , oldmeth . im_func , newmeth . im_func ) elif hasattr ( oldmeth , '__func__' ) and hasattr ( newmeth , '__func__' ) : self . _update ( None , None , oldmeth . __func__ , newmeth . __func__ ) return oldmeth
Update a method object .
20,927
def _update_class ( self , oldclass , newclass ) : olddict = oldclass . __dict__ newdict = newclass . __dict__ oldnames = set ( olddict ) newnames = set ( newdict ) for name in newnames - oldnames : setattr ( oldclass , name , newdict [ name ] ) notify_info0 ( 'Added:' , name , 'to' , oldclass ) self . found_change = True for name in ( oldnames & newnames ) - set ( [ '__dict__' , '__doc__' ] ) : self . _update ( oldclass , name , olddict [ name ] , newdict [ name ] , is_class_namespace = True ) old_bases = getattr ( oldclass , '__bases__' , None ) new_bases = getattr ( newclass , '__bases__' , None ) if str ( old_bases ) != str ( new_bases ) : notify_error ( 'Changing the hierarchy of a class is not supported. %s may be inconsistent.' % ( oldclass , ) ) self . _handle_namespace ( oldclass , is_class_namespace = True )
Update a class object .
20,928
def _update_classmethod ( self , oldcm , newcm ) : self . _update ( None , None , oldcm . __get__ ( 0 ) , newcm . __get__ ( 0 ) )
Update a classmethod update .
20,929
def _update_staticmethod ( self , oldsm , newsm ) : self . _update ( None , None , oldsm . __get__ ( 0 ) , newsm . __get__ ( 0 ) )
Update a staticmethod update .
20,930
def make_save_locals_impl ( ) : try : if '__pypy__' in sys . builtin_module_names : import __pypy__ save_locals = __pypy__ . locals_to_fast except : pass else : if '__pypy__' in sys . builtin_module_names : def save_locals_pypy_impl ( frame ) : save_locals ( frame ) return save_locals_pypy_impl try : import ctypes locals_to_fast = ctypes . pythonapi . PyFrame_LocalsToFast except : pass else : def save_locals_ctypes_impl ( frame ) : locals_to_fast ( ctypes . py_object ( frame ) , ctypes . c_int ( 0 ) ) return save_locals_ctypes_impl return None
Factory for the save_locals_impl method . This may seem like a complicated pattern but it is essential that the method is created at module load time . Inner imports after module load time would cause an occasional debugger deadlock due to the importer lock and debugger lock being taken in different order in different threads .
20,931
def Assign ( target , source ) : if not isinstance ( target , list ) : target = [ target ] if not isinstance ( source , list ) : source . prefix = u" " source = [ source ] return Node ( syms . atom , target + [ Leaf ( token . EQUAL , u"=" , prefix = u" " ) ] + source )
Build an assignment statement
20,932
def Call ( func_name , args = None , prefix = None ) : node = Node ( syms . power , [ func_name , ArgList ( args ) ] ) if prefix is not None : node . prefix = prefix return node
A function call
20,933
def Subscript ( index_node ) : return Node ( syms . trailer , [ Leaf ( token . LBRACE , u"[" ) , index_node , Leaf ( token . RBRACE , u"]" ) ] )
A numeric or string subscript
20,934
def is_tuple ( node ) : if isinstance ( node , Node ) and node . children == [ LParen ( ) , RParen ( ) ] : return True return ( isinstance ( node , Node ) and len ( node . children ) == 3 and isinstance ( node . children [ 0 ] , Leaf ) and isinstance ( node . children [ 1 ] , Node ) and isinstance ( node . children [ 2 ] , Leaf ) and node . children [ 0 ] . value == u"(" and node . children [ 2 ] . value == u")" )
Does the node represent a tuple literal?
20,935
def is_list ( node ) : return ( isinstance ( node , Node ) and len ( node . children ) > 1 and isinstance ( node . children [ 0 ] , Leaf ) and isinstance ( node . children [ - 1 ] , Leaf ) and node . children [ 0 ] . value == u"[" and node . children [ - 1 ] . value == u"]" )
Does the node represent a list literal?
20,936
def attr_chain ( obj , attr ) : next = getattr ( obj , attr ) while next : yield next next = getattr ( next , attr )
Follow an attribute chain .
20,937
def is_probably_builtin ( node ) : prev = node . prev_sibling if prev is not None and prev . type == token . DOT : return False parent = node . parent if parent . type in ( syms . funcdef , syms . classdef ) : return False if parent . type == syms . expr_stmt and parent . children [ 0 ] is node : return False if parent . type == syms . parameters or ( parent . type == syms . typedargslist and ( ( prev is not None and prev . type == token . COMMA ) or parent . children [ 0 ] is node ) ) : return False return True
Check that something isn t an attribute or function name etc .
20,938
def find_root ( node ) : while node . type != syms . file_input : node = node . parent if not node : raise ValueError ( "root found before file_input node was found." ) return node
Find the top level namespace .
20,939
def does_tree_import ( package , name , node ) : binding = find_binding ( name , find_root ( node ) , package ) return bool ( binding )
Returns true if name is imported from package at the top level of the tree which node belongs to . To cover the case of an import like import foo use None for the package and foo for the name .
20,940
def touch_import ( package , name , node ) : def is_import_stmt ( node ) : return ( node . type == syms . simple_stmt and node . children and is_import ( node . children [ 0 ] ) ) root = find_root ( node ) if does_tree_import ( package , name , root ) : return insert_pos = offset = 0 for idx , node in enumerate ( root . children ) : if not is_import_stmt ( node ) : continue for offset , node2 in enumerate ( root . children [ idx : ] ) : if not is_import_stmt ( node2 ) : break insert_pos = idx + offset break if insert_pos == 0 : for idx , node in enumerate ( root . children ) : if ( node . type == syms . simple_stmt and node . children and node . children [ 0 ] . type == token . STRING ) : insert_pos = idx + 1 break if package is None : import_ = Node ( syms . import_name , [ Leaf ( token . NAME , u"import" ) , Leaf ( token . NAME , name , prefix = u" " ) ] ) else : import_ = FromImport ( package , [ Leaf ( token . NAME , name , prefix = u" " ) ] ) children = [ import_ , Newline ( ) ] root . insert_child ( insert_pos , Node ( syms . simple_stmt , children ) )
Works like does_tree_import but adds an import statement if it was not imported .
20,941
def find_binding ( name , node , package = None ) : for child in node . children : ret = None if child . type == syms . for_stmt : if _find ( name , child . children [ 1 ] ) : return child n = find_binding ( name , make_suite ( child . children [ - 1 ] ) , package ) if n : ret = n elif child . type in ( syms . if_stmt , syms . while_stmt ) : n = find_binding ( name , make_suite ( child . children [ - 1 ] ) , package ) if n : ret = n elif child . type == syms . try_stmt : n = find_binding ( name , make_suite ( child . children [ 2 ] ) , package ) if n : ret = n else : for i , kid in enumerate ( child . children [ 3 : ] ) : if kid . type == token . COLON and kid . value == ":" : n = find_binding ( name , make_suite ( child . children [ i + 4 ] ) , package ) if n : ret = n elif child . type in _def_syms and child . children [ 1 ] . value == name : ret = child elif _is_import_binding ( child , name , package ) : ret = child elif child . type == syms . simple_stmt : ret = find_binding ( name , child , package ) elif child . type == syms . expr_stmt : if _find ( name , child . children [ 0 ] ) : ret = child if ret : if not package : return ret if is_import ( ret ) : return ret return None
Returns the node which binds variable name otherwise None . If optional argument package is supplied only imports will be returned . See test cases for examples .
20,942
def execute_console_command ( frame , thread_id , frame_id , line , buffer_output = True ) : console_message = ConsoleMessage ( ) interpreter = get_interactive_console ( thread_id , frame_id , frame , console_message ) more , output_messages , error_messages = interpreter . push ( line , frame , buffer_output ) console_message . update_more ( more ) for message in output_messages : console_message . add_console_message ( CONSOLE_OUTPUT , message ) for message in error_messages : console_message . add_console_message ( CONSOLE_ERROR , message ) return console_message
fetch an interactive console instance from the cache and push the received command to the console .
20,943
def add_console_message ( self , message_type , message ) : for m in message . split ( "\n" ) : if m . strip ( ) : self . console_messages . append ( ( message_type , m ) )
add messages in the console_messages list
20,944
def push ( self , line , frame , buffer_output = True ) : self . __buffer_output = buffer_output more = False if buffer_output : original_stdout = sys . stdout original_stderr = sys . stderr try : try : self . frame = frame if buffer_output : out = sys . stdout = IOBuf ( ) err = sys . stderr = IOBuf ( ) more = self . add_exec ( line ) except Exception : exc = get_exception_traceback_str ( ) if buffer_output : err . buflist . append ( "Internal Error: %s" % ( exc , ) ) else : sys . stderr . write ( "Internal Error: %s\n" % ( exc , ) ) finally : self . frame = None frame = None if buffer_output : sys . stdout = original_stdout sys . stderr = original_stderr if buffer_output : return more , out . buflist , err . buflist else : return more , [ ] , [ ]
Change built - in stdout and stderr methods by the new custom StdMessage . execute the InteractiveConsole . push . Change the stdout and stderr back be the original built - ins
20,945
def to_dict ( self ) : 'Convert a structure into a Python native type.' ctx = Context ( ) ContextFlags = self . ContextFlags ctx [ 'ContextFlags' ] = ContextFlags if ( ContextFlags & CONTEXT_DEBUG_REGISTERS ) == CONTEXT_DEBUG_REGISTERS : for key in self . _ctx_debug : ctx [ key ] = getattr ( self , key ) if ( ContextFlags & CONTEXT_FLOATING_POINT ) == CONTEXT_FLOATING_POINT : ctx [ 'FloatSave' ] = self . FloatSave . to_dict ( ) if ( ContextFlags & CONTEXT_SEGMENTS ) == CONTEXT_SEGMENTS : for key in self . _ctx_segs : ctx [ key ] = getattr ( self , key ) if ( ContextFlags & CONTEXT_INTEGER ) == CONTEXT_INTEGER : for key in self . _ctx_int : ctx [ key ] = getattr ( self , key ) if ( ContextFlags & CONTEXT_CONTROL ) == CONTEXT_CONTROL : for key in self . _ctx_ctrl : ctx [ key ] = getattr ( self , key ) if ( ContextFlags & CONTEXT_EXTENDED_REGISTERS ) == CONTEXT_EXTENDED_REGISTERS : er = [ self . ExtendedRegisters [ index ] for index in compat . xrange ( 0 , MAXIMUM_SUPPORTED_EXTENSION ) ] er = tuple ( er ) ctx [ 'ExtendedRegisters' ] = er return ctx
Convert a structure into a Python native type .
20,946
def _get_normal_name ( orig_enc ) : enc = orig_enc [ : 12 ] . lower ( ) . replace ( "_" , "-" ) if enc == "utf-8" or enc . startswith ( "utf-8-" ) : return "utf-8" if enc in ( "latin-1" , "iso-8859-1" , "iso-latin-1" ) or enc . startswith ( ( "latin-1-" , "iso-8859-1-" , "iso-latin-1-" ) ) : return "iso-8859-1" return orig_enc
Imitates get_normal_name in tokenizer . c .
20,947
def get_lines ( command ) : stdout = get_output ( command ) return [ line . strip ( ) . decode ( 'utf-8' ) for line in stdout . splitlines ( ) ]
Run a command and return lines of output
20,948
def git_hook ( strict = False ) : diff_cmd = "git diff-index --cached --name-only --diff-filter=ACMRTUXB HEAD" files_modified = get_lines ( diff_cmd ) errors = 0 for filename in files_modified : if filename . endswith ( '.py' ) : staged_cmd = "git show :%s" % filename staged_contents = get_output ( staged_cmd ) sort = SortImports ( file_path = filename , file_contents = staged_contents . decode ( ) , check = True ) if sort . incorrectly_sorted : errors += 1 return errors if strict else 0
Git pre - commit hook to check staged files for isort errors
20,949
def send_header ( self , keyword , value ) : if self . request_version != 'HTTP/0.9' : self . wfile . write ( "%s: %s\r\n" % ( keyword , value ) ) if keyword . lower ( ) == 'connection' : if value . lower ( ) == 'close' : self . close_connection = 1 elif value . lower ( ) == 'keep-alive' : self . close_connection = 0
Send a MIME header .
20,950
def diff_texts ( a , b , filename ) : a = a . splitlines ( ) b = b . splitlines ( ) return difflib . unified_diff ( a , b , filename , filename , "(original)" , "(refactored)" , lineterm = "" )
Return a unified diff of two strings .
20,951
def create_inputhook_qt5 ( mgr , app = None ) : if app is None : app = QtCore . QCoreApplication . instance ( ) if app is None : from PyQt5 import QtWidgets app = QtWidgets . QApplication ( [ " " ] ) ip = InteractiveShell . instance ( ) if hasattr ( ip , '_inputhook_qt5' ) : return app , ip . _inputhook_qt5 def inputhook_qt5 ( ) : try : allow_CTRL_C ( ) app = QtCore . QCoreApplication . instance ( ) if not app : return 0 app . processEvents ( QtCore . QEventLoop . AllEvents , 300 ) if not stdin_ready ( ) : timer = QtCore . QTimer ( ) event_loop = QtCore . QEventLoop ( ) timer . timeout . connect ( event_loop . quit ) while not stdin_ready ( ) : timer . start ( 50 ) event_loop . exec_ ( ) timer . stop ( ) except KeyboardInterrupt : global got_kbdint , sigint_timer ignore_CTRL_C ( ) got_kbdint = True mgr . clear_inputhook ( ) if ( os . name == 'posix' ) : pid = os . getpid ( ) if ( not sigint_timer ) : sigint_timer = threading . Timer ( .01 , os . kill , args = [ pid , signal . SIGINT ] ) sigint_timer . start ( ) else : print ( "\nKeyboardInterrupt - Ctrl-C again for new prompt" ) except : ignore_CTRL_C ( ) from traceback import print_exc print_exc ( ) print ( "Got exception from inputhook_qt5, unregistering." ) mgr . clear_inputhook ( ) finally : allow_CTRL_C ( ) return 0 def preprompthook_qt5 ( ishell ) : global got_kbdint , sigint_timer if ( sigint_timer ) : sigint_timer . cancel ( ) sigint_timer = None if got_kbdint : mgr . set_inputhook ( inputhook_qt5 ) got_kbdint = False ip . _inputhook_qt5 = inputhook_qt5 ip . set_hook ( 'pre_prompt_hook' , preprompthook_qt5 ) return app , inputhook_qt5
Create an input hook for running the Qt5 application event loop .
20,952
def from_dict ( cls , ctx ) : 'Instance a new structure from a Python native type.' ctx = Context ( ctx ) s = cls ( ) ContextFlags = ctx [ 'ContextFlags' ] s . ContextFlags = ContextFlags for key in cls . _others : if key != 'VectorRegister' : setattr ( s , key , ctx [ key ] ) else : w = ctx [ key ] v = ( M128A * len ( w ) ) ( ) i = 0 for x in w : y = M128A ( ) y . High = x >> 64 y . Low = x - ( x >> 64 ) v [ i ] = y i += 1 setattr ( s , key , v ) if ( ContextFlags & CONTEXT_CONTROL ) == CONTEXT_CONTROL : for key in cls . _control : setattr ( s , key , ctx [ key ] ) if ( ContextFlags & CONTEXT_INTEGER ) == CONTEXT_INTEGER : for key in cls . _integer : setattr ( s , key , ctx [ key ] ) if ( ContextFlags & CONTEXT_SEGMENTS ) == CONTEXT_SEGMENTS : for key in cls . _segments : setattr ( s , key , ctx [ key ] ) if ( ContextFlags & CONTEXT_DEBUG_REGISTERS ) == CONTEXT_DEBUG_REGISTERS : for key in cls . _debug : setattr ( s , key , ctx [ key ] ) if ( ContextFlags & CONTEXT_MMX_REGISTERS ) == CONTEXT_MMX_REGISTERS : xmm = s . FltSave . xmm for key in cls . _mmx : y = M128A ( ) y . High = x >> 64 y . Low = x - ( x >> 64 ) setattr ( xmm , key , y ) return s
Instance a new structure from a Python native type .
20,953
def do_exit ( * args ) : try : import java . lang . System java . lang . System . exit ( 1 ) except ImportError : if len ( args ) == 1 : os . _exit ( args [ 0 ] ) else : os . _exit ( 0 )
We have to override the exit because calling sys . exit will only actually exit the main thread and as we re in a Xml - rpc server that won t work .
20,954
def console_exec ( thread_id , frame_id , expression , dbg ) : frame = dbg . find_frame ( thread_id , frame_id ) is_multiline = expression . count ( '@LINE@' ) > 1 expression = str ( expression . replace ( '@LINE@' , '\n' ) ) updated_globals = { } updated_globals . update ( frame . f_globals ) updated_globals . update ( frame . f_locals ) if IPYTHON : need_more = exec_code ( CodeFragment ( expression ) , updated_globals , frame . f_locals , dbg ) if not need_more : pydevd_save_locals . save_locals ( frame ) return need_more interpreter = ConsoleWriter ( ) if not is_multiline : try : code = compile_command ( expression ) except ( OverflowError , SyntaxError , ValueError ) : interpreter . showsyntaxerror ( ) return False if code is None : return True else : code = expression try : Exec ( code , updated_globals , frame . f_locals ) except SystemExit : raise except : interpreter . showtraceback ( ) else : pydevd_save_locals . save_locals ( frame ) return False
returns False in case expression is partially correct
20,955
def CustomAddressIterator ( memory_map , condition ) : for mbi in memory_map : if condition ( mbi ) : address = mbi . BaseAddress max_addr = address + mbi . RegionSize while address < max_addr : yield address address = address + 1
Generator function that iterates through a memory map filtering memory region blocks by any given condition .
20,956
def pageSize ( cls ) : try : try : pageSize = win32 . GetSystemInfo ( ) . dwPageSize except WindowsError : pageSize = 0x1000 except NameError : pageSize = 0x1000 cls . pageSize = pageSize return pageSize
Try to get the pageSize value on runtime .
20,957
def get_buffer_size_in_pages ( cls , address , size ) : if size < 0 : size = - size address = address - size begin , end = cls . align_address_range ( address , address + size ) return int ( float ( end - begin ) / float ( cls . pageSize ) )
Get the number of pages in use by the given buffer .
20,958
def do_ranges_intersect ( begin , end , old_begin , old_end ) : return ( old_begin <= begin < old_end ) or ( old_begin < end <= old_end ) or ( begin <= old_begin < end ) or ( begin < old_end <= end )
Determine if the two given memory address ranges intersect .
20,959
def clear_bp ( cls , ctx , register ) : ctx [ 'Dr7' ] &= cls . clearMask [ register ] ctx [ 'Dr%d' % register ] = 0
Clears a hardware breakpoint .
20,960
def set_bp ( cls , ctx , register , address , trigger , watch ) : Dr7 = ctx [ 'Dr7' ] Dr7 |= cls . enableMask [ register ] orMask , andMask = cls . triggerMask [ register ] [ trigger ] Dr7 &= andMask Dr7 |= orMask orMask , andMask = cls . watchMask [ register ] [ watch ] Dr7 &= andMask Dr7 |= orMask ctx [ 'Dr7' ] = Dr7 ctx [ 'Dr%d' % register ] = address
Sets a hardware breakpoint .
20,961
def find_slot ( cls , ctx ) : Dr7 = ctx [ 'Dr7' ] slot = 0 for m in cls . enableMask : if ( Dr7 & m ) == 0 : return slot slot += 1 return None
Finds an empty slot to set a hardware breakpoint .
20,962
def run ( self , graminit_h , graminit_c ) : self . parse_graminit_h ( graminit_h ) self . parse_graminit_c ( graminit_c ) self . finish_off ( )
Load the grammar tables from the text files written by pgen .
20,963
def complete_from_dir ( directory ) : global currDirModule if currDirModule is not None : if len ( sys . path ) > 0 and sys . path [ 0 ] == currDirModule : del sys . path [ 0 ] currDirModule = directory sys . path . insert ( 0 , directory )
This is necessary so that we get the imports from the same directory where the file we are completing is located .
20,964
def handle_request ( self ) : timeout = self . socket . gettimeout ( ) if timeout is None : timeout = self . timeout elif self . timeout is not None : timeout = min ( timeout , self . timeout ) fd_sets = select . select ( [ self ] , [ ] , [ ] , timeout ) if not fd_sets [ 0 ] : self . handle_timeout ( ) return self . _handle_request_noblock ( )
Handle one request possibly blocking .
20,965
def handle_error ( self , request , client_address ) : print '-' * 40 print 'Exception happened during processing of request from' , print client_address import traceback traceback . print_exc ( ) print '-' * 40
Handle an error gracefully . May be overridden .
20,966
def collect_children ( self ) : if self . active_children is None : return while len ( self . active_children ) >= self . max_children : try : pid , status = os . waitpid ( 0 , 0 ) except os . error : pid = None if pid not in self . active_children : continue self . active_children . remove ( pid ) for child in self . active_children : try : pid , status = os . waitpid ( child , os . WNOHANG ) except os . error : pid = None if not pid : continue try : self . active_children . remove ( pid ) except ValueError , e : raise ValueError ( '%s. x=%d and list=%r' % ( e . message , pid , self . active_children ) )
Internal routine to wait for children that have exited .
20,967
def process_request ( self , request , client_address ) : self . collect_children ( ) pid = os . fork ( ) if pid : if self . active_children is None : self . active_children = [ ] self . active_children . append ( pid ) self . close_request ( request ) return else : try : self . finish_request ( request , client_address ) self . shutdown_request ( request ) os . _exit ( 0 ) except : try : self . handle_error ( request , client_address ) self . shutdown_request ( request ) finally : os . _exit ( 1 )
Fork a new subprocess to process the request .
20,968
def process_request_thread ( self , request , client_address ) : try : self . finish_request ( request , client_address ) self . shutdown_request ( request ) except : self . handle_error ( request , client_address ) self . shutdown_request ( request )
Same as in BaseServer but as a thread .
20,969
def get_options ( ) : loaded = loaded_api ( ) if loaded is not None : return [ loaded ] mpl = sys . modules . get ( 'matplotlib' , None ) if mpl is not None and not check_version ( mpl . __version__ , '1.0.2' ) : return [ QT_API_PYQT_DEFAULT ] if os . environ . get ( 'QT_API' , None ) is None : return matplotlib_options ( mpl ) or [ QT_API_PYQT_DEFAULT , QT_API_PYSIDE , QT_API_PYQT5 ] return None
Return a list of acceptable QT APIs in decreasing order of preference
20,970
def tabs_or_spaces ( physical_line , indent_char ) : r indent = INDENT_REGEX . match ( physical_line ) . group ( 1 ) for offset , char in enumerate ( indent ) : if char != indent_char : return offset , "E101 indentation contains mixed spaces and tabs"
r Never mix tabs and spaces .
20,971
def tabs_obsolete ( physical_line ) : r indent = INDENT_REGEX . match ( physical_line ) . group ( 1 ) if '\t' in indent : return indent . index ( '\t' ) , "W191 indentation contains tabs"
r For new projects spaces - only are strongly recommended over tabs .
20,972
def trailing_whitespace ( physical_line ) : r physical_line = physical_line . rstrip ( '\n' ) physical_line = physical_line . rstrip ( '\r' ) physical_line = physical_line . rstrip ( '\x0c' ) stripped = physical_line . rstrip ( ' \t\v' ) if physical_line != stripped : if stripped : return len ( stripped ) , "W291 trailing whitespace" else : return 0 , "W293 blank line contains whitespace"
r Trailing whitespace is superfluous .
20,973
def trailing_blank_lines ( physical_line , lines , line_number , total_lines ) : r if line_number == total_lines : stripped_last_line = physical_line . rstrip ( ) if not stripped_last_line : return 0 , "W391 blank line at end of file" if stripped_last_line == physical_line : return len ( physical_line ) , "W292 no newline at end of file"
r Trailing blank lines are superfluous .
20,974
def maximum_line_length ( physical_line , max_line_length , multiline , noqa ) : r line = physical_line . rstrip ( ) length = len ( line ) if length > max_line_length and not noqa : chunks = line . split ( ) if ( ( len ( chunks ) == 1 and multiline ) or ( len ( chunks ) == 2 and chunks [ 0 ] == '#' ) ) and len ( line ) - len ( chunks [ - 1 ] ) < max_line_length - 7 : return if hasattr ( line , 'decode' ) : try : length = len ( line . decode ( 'utf-8' ) ) except UnicodeError : pass if length > max_line_length : return ( max_line_length , "E501 line too long " "(%d > %d characters)" % ( length , max_line_length ) )
r Limit all lines to a maximum of 79 characters .
20,975
def blank_lines ( logical_line , blank_lines , indent_level , line_number , blank_before , previous_logical , previous_unindented_logical_line , previous_indent_level , lines ) : r if line_number < 3 and not previous_logical : return if previous_logical . startswith ( '@' ) : if blank_lines : yield 0 , "E304 blank lines found after function decorator" elif blank_lines > 2 or ( indent_level and blank_lines == 2 ) : yield 0 , "E303 too many blank lines (%d)" % blank_lines elif STARTSWITH_TOP_LEVEL_REGEX . match ( logical_line ) : if indent_level : if not ( blank_before or previous_indent_level < indent_level or DOCSTRING_REGEX . match ( previous_logical ) ) : ancestor_level = indent_level nested = False for line in lines [ line_number - 2 : : - 1 ] : if line . strip ( ) and expand_indent ( line ) < ancestor_level : ancestor_level = expand_indent ( line ) nested = line . lstrip ( ) . startswith ( 'def ' ) if nested or ancestor_level == 0 : break if nested : yield 0 , "E306 expected 1 blank line before a " "nested definition, found 0" else : yield 0 , "E301 expected 1 blank line, found 0" elif blank_before != 2 : yield 0 , "E302 expected 2 blank lines, found %d" % blank_before elif ( logical_line and not indent_level and blank_before != 2 and previous_unindented_logical_line . startswith ( ( 'def ' , 'class ' ) ) ) : yield 0 , "E305 expected 2 blank lines after " "class or function definition, found %d" % blank_before
r Separate top - level function and class definitions with two blank lines .
20,976
def whitespace_around_keywords ( logical_line ) : r for match in KEYWORD_REGEX . finditer ( logical_line ) : before , after = match . groups ( ) if '\t' in before : yield match . start ( 1 ) , "E274 tab before keyword" elif len ( before ) > 1 : yield match . start ( 1 ) , "E272 multiple spaces before keyword" if '\t' in after : yield match . start ( 2 ) , "E273 tab after keyword" elif len ( after ) > 1 : yield match . start ( 2 ) , "E271 multiple spaces after keyword"
r Avoid extraneous whitespace around keywords .
20,977
def missing_whitespace ( logical_line ) : r line = logical_line for index in range ( len ( line ) - 1 ) : char = line [ index ] if char in ',;:' and line [ index + 1 ] not in WHITESPACE : before = line [ : index ] if char == ':' and before . count ( '[' ) > before . count ( ']' ) and before . rfind ( '{' ) < before . rfind ( '[' ) : continue if char == ',' and line [ index + 1 ] == ')' : continue yield index , "E231 missing whitespace after '%s'" % char
r Each comma semicolon or colon should be followed by whitespace .
20,978
def indentation ( logical_line , previous_logical , indent_char , indent_level , previous_indent_level ) : r c = 0 if logical_line else 3 tmpl = "E11%d %s" if logical_line else "E11%d %s (comment)" if indent_level % 4 : yield 0 , tmpl % ( 1 + c , "indentation is not a multiple of four" ) indent_expect = previous_logical . endswith ( ':' ) if indent_expect and indent_level <= previous_indent_level : yield 0 , tmpl % ( 2 + c , "expected an indented block" ) elif not indent_expect and indent_level > previous_indent_level : yield 0 , tmpl % ( 3 + c , "unexpected indentation" )
r Use 4 spaces per indentation level .
20,979
def whitespace_around_operator ( logical_line ) : r for match in OPERATOR_REGEX . finditer ( logical_line ) : before , after = match . groups ( ) if '\t' in before : yield match . start ( 1 ) , "E223 tab before operator" elif len ( before ) > 1 : yield match . start ( 1 ) , "E221 multiple spaces before operator" if '\t' in after : yield match . start ( 2 ) , "E224 tab after operator" elif len ( after ) > 1 : yield match . start ( 2 ) , "E222 multiple spaces after operator"
r Avoid extraneous whitespace around an operator .
20,980
def missing_whitespace_around_operator ( logical_line , tokens ) : r parens = 0 need_space = False prev_type = tokenize . OP prev_text = prev_end = None for token_type , text , start , end , line in tokens : if token_type in SKIP_COMMENTS : continue if text in ( '(' , 'lambda' ) : parens += 1 elif text == ')' : parens -= 1 if need_space : if start != prev_end : if need_space is not True and not need_space [ 1 ] : yield ( need_space [ 0 ] , "E225 missing whitespace around operator" ) need_space = False elif text == '>' and prev_text in ( '<' , '-' ) : pass else : if need_space is True or need_space [ 1 ] : yield prev_end , "E225 missing whitespace around operator" elif prev_text != '**' : code , optype = 'E226' , 'arithmetic' if prev_text == '%' : code , optype = 'E228' , 'modulo' elif prev_text not in ARITHMETIC_OP : code , optype = 'E227' , 'bitwise or shift' yield ( need_space [ 0 ] , "%s missing whitespace " "around %s operator" % ( code , optype ) ) need_space = False elif token_type == tokenize . OP and prev_end is not None : if text == '=' and parens : pass elif text in WS_NEEDED_OPERATORS : need_space = True elif text in UNARY_OPERATORS : if ( prev_text in '}])' if prev_type == tokenize . OP else prev_text not in KEYWORDS ) : need_space = None elif text in WS_OPTIONAL_OPERATORS : need_space = None if need_space is None : need_space = ( prev_end , start != prev_end ) elif need_space and start == prev_end : yield prev_end , "E225 missing whitespace around operator" need_space = False prev_type = token_type prev_text = text prev_end = end
r Surround operators with a single space on either side .
20,981
def whitespace_around_comma ( logical_line ) : r line = logical_line for m in WHITESPACE_AFTER_COMMA_REGEX . finditer ( line ) : found = m . start ( ) + 1 if '\t' in m . group ( ) : yield found , "E242 tab after '%s'" % m . group ( ) [ 0 ] else : yield found , "E241 multiple spaces after '%s'" % m . group ( ) [ 0 ]
r Avoid extraneous whitespace after a comma or a colon .
20,982
def whitespace_around_named_parameter_equals ( logical_line , tokens ) : r parens = 0 no_space = False prev_end = None annotated_func_arg = False in_def = bool ( STARTSWITH_DEF_REGEX . match ( logical_line ) ) message = "E251 unexpected spaces around keyword / parameter equals" for token_type , text , start , end , line in tokens : if token_type == tokenize . NL : continue if no_space : no_space = False if start != prev_end : yield ( prev_end , message ) if token_type == tokenize . OP : if text in '([' : parens += 1 elif text in ')]' : parens -= 1 elif in_def and text == ':' and parens == 1 : annotated_func_arg = True elif parens and text == ',' and parens == 1 : annotated_func_arg = False elif parens and text == '=' and not annotated_func_arg : no_space = True if start != prev_end : yield ( prev_end , message ) if not parens : annotated_func_arg = False prev_end = end
r Don t use spaces around the = sign in function arguments .
20,983
def whitespace_before_comment ( logical_line , tokens ) : r prev_end = ( 0 , 0 ) for token_type , text , start , end , line in tokens : if token_type == tokenize . COMMENT : inline_comment = line [ : start [ 1 ] ] . strip ( ) if inline_comment : if prev_end [ 0 ] == start [ 0 ] and start [ 1 ] < prev_end [ 1 ] + 2 : yield ( prev_end , "E261 at least two spaces before inline comment" ) symbol , sp , comment = text . partition ( ' ' ) bad_prefix = symbol not in '#:' and ( symbol . lstrip ( '#' ) [ : 1 ] or '#' ) if inline_comment : if bad_prefix or comment [ : 1 ] in WHITESPACE : yield start , "E262 inline comment should start with '# '" elif bad_prefix and ( bad_prefix != '!' or start [ 0 ] > 1 ) : if bad_prefix != '#' : yield start , "E265 block comment should start with '# '" elif comment : yield start , "E266 too many leading '#' for block comment" elif token_type != tokenize . NL : prev_end = end
r Separate inline comments by at least two spaces .
20,984
def imports_on_separate_lines ( logical_line ) : r line = logical_line if line . startswith ( 'import ' ) : found = line . find ( ',' ) if - 1 < found and ';' not in line [ : found ] : yield found , "E401 multiple imports on one line"
r Place imports on separate lines .
20,985
def module_imports_on_top_of_file ( logical_line , indent_level , checker_state , noqa ) : r def is_string_literal ( line ) : if line [ 0 ] in 'uUbB' : line = line [ 1 : ] if line and line [ 0 ] in 'rR' : line = line [ 1 : ] return line and ( line [ 0 ] == '"' or line [ 0 ] == "'" ) allowed_try_keywords = ( 'try' , 'except' , 'else' , 'finally' ) if indent_level : return if not logical_line : return if noqa : return line = logical_line if line . startswith ( 'import ' ) or line . startswith ( 'from ' ) : if checker_state . get ( 'seen_non_imports' , False ) : yield 0 , "E402 module level import not at top of file" elif re . match ( DUNDER_REGEX , line ) : return elif any ( line . startswith ( kw ) for kw in allowed_try_keywords ) : return elif is_string_literal ( line ) : if checker_state . get ( 'seen_docstring' , False ) : checker_state [ 'seen_non_imports' ] = True else : checker_state [ 'seen_docstring' ] = True else : checker_state [ 'seen_non_imports' ] = True
r Place imports at the top of the file .
20,986
def explicit_line_join ( logical_line , tokens ) : r prev_start = prev_end = parens = 0 comment = False backslash = None for token_type , text , start , end , line in tokens : if token_type == tokenize . COMMENT : comment = True if start [ 0 ] != prev_start and parens and backslash and not comment : yield backslash , "E502 the backslash is redundant between brackets" if end [ 0 ] != prev_end : if line . rstrip ( '\r\n' ) . endswith ( '\\' ) : backslash = ( end [ 0 ] , len ( line . splitlines ( ) [ - 1 ] ) - 1 ) else : backslash = None prev_start = prev_end = end [ 0 ] else : prev_start = start [ 0 ] if token_type == tokenize . OP : if text in '([{' : parens += 1 elif text in ')]}' : parens -= 1
r Avoid explicit line join between brackets .
20,987
def break_around_binary_operator ( logical_line , tokens ) : r def is_binary_operator ( token_type , text ) : return ( ( token_type == tokenize . OP or text in [ 'and' , 'or' ] ) and text not in "()[]{},:.;@=%~" ) line_break = False unary_context = True previous_token_type = None previous_text = None for token_type , text , start , end , line in tokens : if token_type == tokenize . COMMENT : continue if ( '\n' in text or '\r' in text ) and token_type != tokenize . STRING : line_break = True else : if ( is_binary_operator ( token_type , text ) and line_break and not unary_context and not is_binary_operator ( previous_token_type , previous_text ) ) : yield start , "W503 line break before binary operator" unary_context = text in '([{,;' line_break = False previous_token_type = token_type previous_text = text
r Avoid breaks before binary operators .
20,988
def comparison_to_singleton ( logical_line , noqa ) : r match = not noqa and COMPARE_SINGLETON_REGEX . search ( logical_line ) if match : singleton = match . group ( 1 ) or match . group ( 3 ) same = ( match . group ( 2 ) == '==' ) msg = "'if cond is %s:'" % ( ( '' if same else 'not ' ) + singleton ) if singleton in ( 'None' , ) : code = 'E711' else : code = 'E712' nonzero = ( ( singleton == 'True' and same ) or ( singleton == 'False' and not same ) ) msg += " or 'if %scond:'" % ( '' if nonzero else 'not ' ) yield match . start ( 2 ) , ( "%s comparison to %s should be %s" % ( code , singleton , msg ) )
r Comparison to singletons should use is or is not .
20,989
def comparison_negative ( logical_line ) : r match = COMPARE_NEGATIVE_REGEX . search ( logical_line ) if match : pos = match . start ( 1 ) if match . group ( 2 ) == 'in' : yield pos , "E713 test for membership should be 'not in'" else : yield pos , "E714 test for object identity should be 'is not'"
r Negative comparison should be done using not in and is not .
20,990
def bare_except ( logical_line , noqa ) : r if noqa : return regex = re . compile ( r"except\s*:" ) match = regex . match ( logical_line ) if match : yield match . start ( ) , "E722 do not use bare except'"
r When catching exceptions mention specific exceptions whenever possible .
20,991
def ambiguous_identifier ( logical_line , tokens ) : r idents_to_avoid = ( 'l' , 'O' , 'I' ) prev_type , prev_text , prev_start , prev_end , __ = tokens [ 0 ] for token_type , text , start , end , line in tokens [ 1 : ] : ident = pos = None if token_type == tokenize . OP and '=' in text : if prev_text in idents_to_avoid : ident = prev_text pos = prev_start if prev_text in ( 'as' , 'global' , 'nonlocal' ) : if text in idents_to_avoid : ident = text pos = start if prev_text == 'class' : if text in idents_to_avoid : yield start , "E742 ambiguous class definition '%s'" % text if prev_text == 'def' : if text in idents_to_avoid : yield start , "E743 ambiguous function definition '%s'" % text if ident : yield pos , "E741 ambiguous variable name '%s'" % ident prev_text = text prev_start = start
r Never use the characters l O or I as variable names .
20,992
def expand_indent ( line ) : r if '\t' not in line : return len ( line ) - len ( line . lstrip ( ) ) result = 0 for char in line : if char == '\t' : result = result // 8 * 8 + 8 elif char == ' ' : result += 1 else : break return result
r Return the amount of indentation .
20,993
def parse_udiff ( diff , patterns = None , parent = '.' ) : rv = { } path = nrows = None for line in diff . splitlines ( ) : if nrows : if line [ : 1 ] != '-' : nrows -= 1 continue if line [ : 3 ] == '@@ ' : hunk_match = HUNK_REGEX . match ( line ) ( row , nrows ) = [ int ( g or '1' ) for g in hunk_match . groups ( ) ] rv [ path ] . update ( range ( row , row + nrows ) ) elif line [ : 3 ] == '+++' : path = line [ 4 : ] . split ( '\t' , 1 ) [ 0 ] if path [ : 2 ] == 'b/' : path = path [ 2 : ] rv [ path ] = set ( ) return dict ( [ ( os . path . join ( parent , path ) , rows ) for ( path , rows ) in rv . items ( ) if rows and filename_match ( path , patterns ) ] )
Return a dictionary of matching lines .
20,994
def normalize_paths ( value , parent = os . curdir ) : if not value : return [ ] if isinstance ( value , list ) : return value paths = [ ] for path in value . split ( ',' ) : path = path . strip ( ) if '/' in path : path = os . path . abspath ( os . path . join ( parent , path ) ) paths . append ( path . rstrip ( '/' ) ) return paths
Parse a comma - separated list of paths .
20,995
def filename_match ( filename , patterns , default = True ) : if not patterns : return default return any ( fnmatch ( filename , pattern ) for pattern in patterns )
Check if patterns contains a pattern that matches filename .
20,996
def register_check ( check , codes = None ) : def _add_check ( check , kind , codes , args ) : if check in _checks [ kind ] : _checks [ kind ] [ check ] [ 0 ] . extend ( codes or [ ] ) else : _checks [ kind ] [ check ] = ( codes or [ '' ] , args ) if inspect . isfunction ( check ) : args = _get_parameters ( check ) if args and args [ 0 ] in ( 'physical_line' , 'logical_line' ) : if codes is None : codes = ERRORCODE_REGEX . findall ( check . __doc__ or '' ) _add_check ( check , args [ 0 ] , codes , args ) elif inspect . isclass ( check ) : if _get_parameters ( check . __init__ ) [ : 2 ] == [ 'self' , 'tree' ] : _add_check ( check , 'tree' , codes , None )
Register a new check object .
20,997
def init_checks_registry ( ) : mod = inspect . getmodule ( register_check ) for ( name , function ) in inspect . getmembers ( mod , inspect . isfunction ) : register_check ( function )
Register all globally visible functions .
20,998
def read_config ( options , args , arglist , parser ) : config = RawConfigParser ( ) cli_conf = options . config local_dir = os . curdir if USER_CONFIG and os . path . isfile ( USER_CONFIG ) : if options . verbose : print ( 'user configuration: %s' % USER_CONFIG ) config . read ( USER_CONFIG ) parent = tail = args and os . path . abspath ( os . path . commonprefix ( args ) ) while tail : if config . read ( os . path . join ( parent , fn ) for fn in PROJECT_CONFIG ) : local_dir = parent if options . verbose : print ( 'local configuration: in %s' % parent ) break ( parent , tail ) = os . path . split ( parent ) if cli_conf and os . path . isfile ( cli_conf ) : if options . verbose : print ( 'cli configuration: %s' % cli_conf ) config . read ( cli_conf ) pycodestyle_section = None if config . has_section ( parser . prog ) : pycodestyle_section = parser . prog elif config . has_section ( 'pep8' ) : pycodestyle_section = 'pep8' warnings . warn ( '[pep8] section is deprecated. Use [pycodestyle].' ) if pycodestyle_section : option_list = dict ( [ ( o . dest , o . type or o . action ) for o in parser . option_list ] ) ( new_options , __ ) = parser . parse_args ( [ ] ) for opt in config . options ( pycodestyle_section ) : if opt . replace ( '_' , '-' ) not in parser . config_options : print ( " unknown option '%s' ignored" % opt ) continue if options . verbose > 1 : print ( " %s = %s" % ( opt , config . get ( pycodestyle_section , opt ) ) ) normalized_opt = opt . replace ( '-' , '_' ) opt_type = option_list [ normalized_opt ] if opt_type in ( 'int' , 'count' ) : value = config . getint ( pycodestyle_section , opt ) elif opt_type in ( 'store_true' , 'store_false' ) : value = config . getboolean ( pycodestyle_section , opt ) else : value = config . get ( pycodestyle_section , opt ) if normalized_opt == 'exclude' : value = normalize_paths ( value , local_dir ) setattr ( new_options , normalized_opt , value ) ( options , __ ) = parser . parse_args ( arglist , values = new_options ) options . doctest = options . testsuite = False return options
Read and parse configurations .
20,999
def _parse_multi_options ( options , split_token = ',' ) : r if options : return [ o . strip ( ) for o in options . split ( split_token ) if o . strip ( ) ] else : return options
r Split and strip and discard empties .