idx int64 0 251k | question stringlengths 53 3.53k | target stringlengths 5 1.23k | len_question int64 20 893 | len_target int64 3 238 |
|---|---|---|---|---|
225,100 | def v_unique_name_leaf_list ( ctx , stmt ) : if not stmt . i_config : return seen = [ ] for defval in stmt . i_default : if defval in seen : err_add ( ctx . errors , stmt . pos , 'DUPLICATE_DEFAULT' , ( defval ) ) else : seen . append ( defval ) | Make sure config true leaf - lists do nothave duplicate defaults | 87 | 12 |
225,101 | def v_reference_choice ( ctx , stmt ) : d = stmt . search_one ( 'default' ) if d is not None : m = stmt . search_one ( 'mandatory' ) if m is not None and m . arg == 'true' : err_add ( ctx . errors , stmt . pos , 'DEFAULT_AND_MANDATORY' , ( ) ) ptr = attrsearch ( d . arg , 'arg' , stmt . i_children ) if ptr is None : err_add ( ctx . errors , d . pos , 'DEFAULT_CASE_NOT_FOUND' , d . arg ) else : # make sure there are no mandatory nodes in the default case def chk_no_defaults ( s ) : for c in s . i_children : if c . keyword in ( 'leaf' , 'choice' ) : m = c . search_one ( 'mandatory' ) if m is not None and m . arg == 'true' : err_add ( ctx . errors , c . pos , 'MANDATORY_NODE_IN_DEFAULT_CASE' , ( ) ) elif c . keyword in ( 'list' , 'leaf-list' ) : m = c . search_one ( 'min-elements' ) if m is not None and int ( m . arg ) > 0 : err_add ( ctx . errors , c . pos , 'MANDATORY_NODE_IN_DEFAULT_CASE' , ( ) ) elif c . keyword == 'container' : p = c . search_one ( 'presence' ) if p == None or p . arg == 'false' : chk_no_defaults ( c ) chk_no_defaults ( ptr ) | Make sure that the default case exists | 395 | 7 |
225,102 | def v_reference_leaf_leafref ( ctx , stmt ) : if ( hasattr ( stmt , 'i_leafref' ) and stmt . i_leafref is not None and stmt . i_leafref_expanded is False ) : path_type_spec = stmt . i_leafref not_req_inst = not ( path_type_spec . require_instance ) x = validate_leafref_path ( ctx , stmt , path_type_spec . path_spec , path_type_spec . path_ , accept_non_config_target = not_req_inst ) if x is None : return ptr , expanded_path , path_list = x path_type_spec . i_target_node = ptr path_type_spec . i_expanded_path = expanded_path path_type_spec . i_path_list = path_list stmt . i_leafref_expanded = True if ptr is not None : chk_status ( ctx , stmt , ptr ) stmt . i_leafref_ptr = ( ptr , path_type_spec . pos ) | Verify that all leafrefs in a leaf or leaf - list have correct path | 250 | 17 |
225,103 | def has_type ( type , names ) : if type . arg in names : return type for t in type . search ( 'type' ) : # check all union's member types r = has_type ( t , names ) if r is not None : return r if not hasattr ( type , 'i_typedef' ) : return None if ( type . i_typedef is not None and hasattr ( type . i_typedef , 'i_is_circular' ) and type . i_typedef . i_is_circular == False ) : t = type . i_typedef . search_one ( 'type' ) if t is not None : return has_type ( t , names ) return None | Return type with name if type has name as one of its base types and name is in the names list . otherwise return None . | 161 | 26 |
225,104 | def search_typedef ( stmt , name ) : mod = stmt . i_orig_module while stmt is not None : if name in stmt . i_typedefs : t = stmt . i_typedefs [ name ] if ( mod is not None and mod != t . i_orig_module and t . i_orig_module . keyword == 'submodule' ) : # make sure this submodule is included if mod . search_one ( 'include' , t . i_orig_module . arg ) is None : return None return t stmt = stmt . parent return None | Search for a typedef in scope First search the hierarchy then the module and its submodules . | 135 | 19 |
225,105 | def search_grouping ( stmt , name ) : mod = stmt . i_orig_module while stmt is not None : if name in stmt . i_groupings : g = stmt . i_groupings [ name ] if ( mod is not None and mod != g . i_orig_module and g . i_orig_module . keyword == 'submodule' ) : # make sure this submodule is included if mod . search_one ( 'include' , g . i_orig_module . arg ) is None : return None return g stmt = stmt . parent return None | Search for a grouping in scope First search the hierarchy then the module and its submodules . | 130 | 18 |
225,106 | def is_submodule_included ( src , tgt ) : if tgt is None or not hasattr ( tgt , 'i_orig_module' ) : return True if ( tgt . i_orig_module . keyword == 'submodule' and src . i_orig_module != tgt . i_orig_module and src . i_orig_module . i_modulename == tgt . i_orig_module . i_modulename ) : if src . i_orig_module . search_one ( 'include' , tgt . i_orig_module . arg ) is None : return False return True | Check that the tgt s submodule is included by src if they belong to the same module . | 139 | 20 |
225,107 | def mk_path_str ( stmt , with_prefixes = False , prefix_onchange = False , prefix_to_module = False , resolve_top_prefix_to_module = False ) : resolved_names = mk_path_list ( stmt ) xpath_elements = [ ] last_prefix = None for index , resolved_name in enumerate ( resolved_names ) : module_name , prefix , node_name = resolved_name xpath_element = node_name if with_prefixes or ( prefix_onchange and prefix != last_prefix ) : new_prefix = prefix if ( prefix_to_module or ( index == 0 and resolve_top_prefix_to_module ) ) : new_prefix = module_name xpath_element = '%s:%s' % ( new_prefix , node_name ) xpath_elements . append ( xpath_element ) last_prefix = prefix return '/%s' % '/' . join ( xpath_elements ) | Returns the XPath path of the node . with_prefixes indicates whether or not to prefix every node . | 220 | 22 |
225,108 | def get_xpath ( stmt , qualified = False , prefix_to_module = False ) : return mk_path_str ( stmt , with_prefixes = qualified , prefix_onchange = True , prefix_to_module = prefix_to_module ) | Gets the XPath of the statement . Unless qualified = True does not include prefixes unless the prefix changes mid - XPath . | 58 | 27 |
225,109 | def get_qualified_type ( stmt ) : type_obj = stmt . search_one ( 'type' ) fq_type_name = None if type_obj : if getattr ( type_obj , 'i_typedef' , None ) : # If type_obj has typedef, substitute. # Absolute module:type instead of prefix:type type_obj = type_obj . i_typedef type_name = type_obj . arg if check_primitive_type ( type_obj ) : # Doesn't make sense to qualify a primitive..I think. fq_type_name = type_name else : type_module = type_obj . i_orig_module . arg fq_type_name = '%s:%s' % ( type_module , type_name ) return fq_type_name | Gets the qualified top - level type of the node . This enters the typedef if defined instead of using the prefix to ensure absolute distinction . | 185 | 29 |
225,110 | def get_primitive_type ( stmt ) : type_obj = stmt . search_one ( 'type' ) type_name = getattr ( type_obj , 'arg' , None ) typedef_obj = getattr ( type_obj , 'i_typedef' , None ) if typedef_obj : type_name = get_primitive_type ( typedef_obj ) elif type_obj and not check_primitive_type ( type_obj ) : raise Exception ( '%s is not a primitive! Incomplete parse tree?' % type_name ) return type_name | Recurses through the typedefs and returns the most primitive YANG type defined . | 132 | 17 |
225,111 | def search ( self , keyword , children = None , arg = None ) : if children is None : children = self . substmts return [ ch for ch in children if ( ch . keyword == keyword and ( arg is None or ch . arg == arg ) ) ] | Return list of receiver s substmts with keyword . | 56 | 11 |
225,112 | def search_one ( self , keyword , arg = None , children = None ) : if children is None : children = self . substmts for ch in children : if ch . keyword == keyword and ( arg is None or ch . arg == arg ) : return ch return None | Return receiver s substmt with keyword and optionally arg . | 58 | 11 |
225,113 | def main_module ( self ) : if self . i_module . keyword == "submodule" : return self . i_module . i_ctx . get_module ( self . i_module . i_including_modulename ) return self . i_module | Return the main module to which the receiver belongs . | 57 | 10 |
225,114 | def add_prefix ( prefix , s ) : # tokenize the XPath expression toks = xpath_lexer . scan ( s ) # add default prefix to unprefixed names toks2 = [ _add_prefix ( prefix , tok ) for tok in toks ] # build a string of the patched expression ls = [ x . value for x in toks2 ] return '' . join ( ls ) | Add prefix to all unprefixed names in s | 88 | 9 |
225,115 | def chk_date_arg ( s ) : if re_date . search ( s ) is None : return False comp = s . split ( '-' ) try : dt = datetime . date ( int ( comp [ 0 ] ) , int ( comp [ 1 ] ) , int ( comp [ 2 ] ) ) return True except Exception as e : return False | Checks if the string s is a valid date string . | 77 | 12 |
225,116 | def chk_enum_arg ( s ) : if len ( s ) == 0 or s [ 0 ] . isspace ( ) or s [ - 1 ] . isspace ( ) : return False else : return True | Checks if the string s is a valid enum string . | 46 | 12 |
225,117 | def chk_fraction_digits_arg ( s ) : try : v = int ( s ) if v >= 1 and v <= 18 : return True else : return False except ValueError : return False | Checks if the string s is a valid fraction - digits argument . | 44 | 14 |
225,118 | def combine ( self , patch ) : exclusive = set ( [ "config" , "default" , "mandatory" , "presence" , "min-elements" , "max-elements" ] ) kws = set ( [ s . keyword for s in self . plist ] ) & exclusive add = [ n for n in patch . plist if n . keyword not in kws ] self . plist . extend ( add ) | Add patch . plist to self . plist . | 95 | 11 |
225,119 | def serialize ( self ) : res = '<?xml version="1.0" encoding="UTF-8"?>' for ns in self . namespaces : self . top_grammar . attr [ "xmlns:" + self . namespaces [ ns ] ] = ns res += self . top_grammar . start_tag ( ) for ch in self . top_grammar . children : res += ch . serialize ( ) res += self . tree . serialize ( ) for d in self . global_defs : res += self . global_defs [ d ] . serialize ( ) for i in self . identities : res += self . identities [ i ] . serialize ( ) return res + self . top_grammar . end_tag ( ) | Return the string representation of the receiver . | 164 | 8 |
225,120 | def setup_top ( self ) : self . top_grammar = SchemaNode ( "grammar" ) self . top_grammar . attr = { "xmlns" : "http://relaxng.org/ns/structure/1.0" , "datatypeLibrary" : "http://www.w3.org/2001/XMLSchema-datatypes" } self . tree = SchemaNode ( "start" ) | Create top - level elements of the hybrid schema . | 100 | 10 |
225,121 | def create_roots ( self , yam ) : self . local_grammar = SchemaNode ( "grammar" ) self . local_grammar . attr = { "ns" : yam . search_one ( "namespace" ) . arg , "nma:module" : self . module . arg } src_text = "YANG module '%s'" % yam . arg revs = yam . search ( "revision" ) if len ( revs ) > 0 : src_text += " revision %s" % self . current_revision ( revs ) self . dc_element ( self . local_grammar , "source" , src_text ) start = SchemaNode ( "start" , self . local_grammar ) self . data = SchemaNode ( "nma:data" , start , interleave = True ) self . data . occur = 2 self . rpcs = SchemaNode ( "nma:rpcs" , start , interleave = False ) self . notifications = SchemaNode ( "nma:notifications" , start , interleave = False ) | Create the top - level structure for module yam . | 245 | 11 |
225,122 | def yang_to_xpath ( self , xpe ) : if self . gg_level : pref = "$pref:" else : pref = self . prefix_stack [ - 1 ] + ":" toks = xpath_lexer . scan ( xpe ) prev = None res = "" for tok in toks : if ( tok . type == "SLASH" and prev not in ( "DOT" , "DOTDOT" , "RPAREN" , "RBRACKET" , "name" , "wildcard" , "prefix_test" ) ) : res += "$root" elif tok . type == "name" and ":" not in tok . value : res += pref res += tok . value if tok . type != "_whitespace" : prev = tok . type return res | Transform YANG s xpath to a form suitable for Schematron . | 184 | 15 |
225,123 | def register_identity ( self , id_stmt ) : bst = id_stmt . search_one ( "base" ) if bst : bder = self . identity_deps . setdefault ( bst . i_identity , [ ] ) bder . append ( id_stmt ) | Register id_stmt with its base identity if any . | 68 | 12 |
225,124 | def add_derived_identity ( self , id_stmt ) : p = self . add_namespace ( id_stmt . main_module ( ) ) if id_stmt not in self . identities : # add named pattern def self . identities [ id_stmt ] = SchemaNode . define ( "__%s_%s" % ( p , id_stmt . arg ) ) parent = self . identities [ id_stmt ] if id_stmt in self . identity_deps : parent = SchemaNode . choice ( parent , occur = 2 ) for i in self . identity_deps [ id_stmt ] : parent . subnode ( self . add_derived_identity ( i ) ) idval = SchemaNode ( "value" , parent , p + ":" + id_stmt . arg ) idval . attr [ "type" ] = "QName" res = SchemaNode ( "ref" ) res . attr [ "name" ] = self . identities [ id_stmt ] . attr [ "name" ] return res | Add pattern def for id_stmt and all derived identities . | 238 | 13 |
225,125 | def preload_defs ( self ) : for d in ( self . module . search ( "grouping" ) + self . module . search ( "typedef" ) ) : uname , dic = self . unique_def_name ( d ) self . install_def ( uname , d , dic ) | Preload all top - level definitions . | 70 | 8 |
225,126 | def add_prefix ( self , name , stmt ) : if self . gg_level : return name pref , colon , local = name . partition ( ":" ) if colon : return ( self . module_prefixes [ stmt . i_module . i_prefixes [ pref ] [ 0 ] ] + ":" + local ) else : return self . prefix_stack [ - 1 ] + ":" + pref | Return name prepended with correct prefix . | 89 | 8 |
225,127 | def dc_element ( self , parent , name , text ) : if self . dc_uri in self . namespaces : dcel = SchemaNode ( self . namespaces [ self . dc_uri ] + ":" + name , text = text ) parent . children . insert ( 0 , dcel ) | Add DC element name containing text to parent . | 65 | 9 |
225,128 | def get_default ( self , stmt , refd ) : if refd [ "default" ] : return refd [ "default" ] defst = stmt . search_one ( "default" ) if defst : return defst . arg return None | Return default value for stmt node . | 56 | 8 |
225,129 | def add_patch ( self , pset , augref ) : try : path = [ self . add_prefix ( c , augref ) for c in augref . arg . split ( "/" ) if c ] except KeyError : # augment of a module that's not among input modules return car = path [ 0 ] patch = Patch ( path [ 1 : ] , augref ) if car in pset : sel = [ x for x in pset [ car ] if patch . path == x . path ] if sel : sel [ 0 ] . combine ( patch ) else : pset [ car ] . append ( patch ) else : pset [ car ] = [ patch ] | Add patch corresponding to augref to pset . | 146 | 10 |
225,130 | def apply_augments ( self , auglist , p_elem , pset ) : for a in auglist : par = a . parent if a . search_one ( "when" ) is None : wel = p_elem else : if p_elem . interleave : kw = "interleave" else : kw = "group" wel = SchemaNode ( kw , p_elem , interleave = p_elem . interleave ) wel . occur = p_elem . occur if par . keyword == "uses" : self . handle_substmts ( a , wel , pset ) continue if par . keyword == "submodule" : mnam = par . i_including_modulename else : mnam = par . arg if self . prefix_stack [ - 1 ] == self . module_prefixes [ mnam ] : self . handle_substmts ( a , wel , pset ) else : self . prefix_stack . append ( self . module_prefixes [ mnam ] ) self . handle_substmts ( a , wel , pset ) self . prefix_stack . pop ( ) | Handle substatements of augments from auglist . | 252 | 11 |
225,131 | def current_revision ( self , r_stmts ) : cur = max ( [ [ int ( p ) for p in r . arg . split ( "-" ) ] for r in r_stmts ] ) return "%4d-%02d-%02d" % tuple ( cur ) | Pick the most recent revision date . | 66 | 7 |
225,132 | def install_def ( self , name , dstmt , def_map , interleave = False ) : delem = SchemaNode . define ( name , interleave = interleave ) delem . attr [ "name" ] = name def_map [ name ] = delem if def_map is self . global_defs : self . gg_level += 1 self . handle_substmts ( dstmt , delem ) if def_map is self . global_defs : self . gg_level -= 1 | Install definition name into the appropriate dictionary . | 115 | 8 |
225,133 | def rng_annotation ( self , stmt , p_elem ) : ext = stmt . i_extension prf , extkw = stmt . raw_keyword ( modname , rev ) = stmt . i_module . i_prefixes [ prf ] prefix = self . add_namespace ( statements . modulename_to_module ( self . module , modname , rev ) ) eel = SchemaNode ( prefix + ":" + extkw , p_elem ) argst = ext . search_one ( "argument" ) if argst : if argst . search_one ( "yin-element" , "true" ) : SchemaNode ( prefix + ":" + argst . arg , eel , stmt . arg ) else : eel . attr [ argst . arg ] = stmt . arg self . handle_substmts ( stmt , eel ) | Append YIN representation of extension statement stmt . | 203 | 11 |
225,134 | def propagate_occur ( self , node , value ) : while node . occur < value : node . occur = value if node . name == "define" : break node = node . parent | Propagate occurence value to node and its ancestors . | 40 | 12 |
225,135 | def process_patches ( self , pset , stmt , elem , altname = None ) : if altname : name = altname else : name = stmt . arg new_pset = { } augments = [ ] refine_dict = dict . fromkeys ( ( "presence" , "default" , "mandatory" , "min-elements" , "max-elements" ) ) for p in pset . pop ( self . add_prefix ( name , stmt ) , [ ] ) : if p . path : head = p . pop ( ) if head in new_pset : new_pset [ head ] . append ( p ) else : new_pset [ head ] = [ p ] else : for refaug in p . plist : if refaug . keyword == "augment" : augments . append ( refaug ) else : for s in refaug . substmts : if s . keyword == "description" : self . description_stmt ( s , elem , None ) elif s . keyword == "reference" : self . reference_stmt ( s , elem , None ) elif s . keyword == "must" : self . must_stmt ( s , elem , None ) elif s . keyword == "config" : self . nma_attribute ( s , elem ) elif refine_dict . get ( s . keyword , False ) is None : refine_dict [ s . keyword ] = s . arg return ( refine_dict , augments , new_pset ) | Process patches for data node name from pset . | 337 | 10 |
225,136 | def lookup_expand ( self , stmt , names ) : if not names : return [ ] todo = [ stmt ] while todo : pst = todo . pop ( ) for sub in pst . substmts : if sub . keyword in self . schema_nodes : qname = self . qname ( sub ) if qname in names : names . remove ( qname ) par = sub . parent while hasattr ( par , "d_ref" ) : # par must be grouping par . d_ref . d_expand = True par = par . d_ref . parent if not names : return [ ] # all found elif sub . keyword == "uses" : g = sub . i_grouping g . d_ref = sub todo . append ( g ) return names | Find schema nodes under stmt also in used groupings . | 174 | 12 |
225,137 | def type_with_ranges ( self , tchain , p_elem , rangekw , gen_data ) : ranges = self . get_ranges ( tchain , rangekw ) if not ranges : return p_elem . subnode ( gen_data ( ) ) if len ( ranges ) > 1 : p_elem = SchemaNode . choice ( p_elem ) p_elem . occur = 2 for r in ranges : d_elem = gen_data ( ) for p in self . range_params ( r , rangekw ) : d_elem . subnode ( p ) p_elem . subnode ( d_elem ) | Handle types with range or length restrictions . | 145 | 8 |
225,138 | def get_ranges ( self , tchain , kw ) : ( lo , hi ) = ( "min" , "max" ) ran = None for t in tchain : rstmt = t . search_one ( kw ) if rstmt is None : continue parts = [ p . strip ( ) for p in rstmt . arg . split ( "|" ) ] ran = [ [ i . strip ( ) for i in p . split ( ".." ) ] for p in parts ] if ran [ 0 ] [ 0 ] != 'min' : lo = ran [ 0 ] [ 0 ] if ran [ - 1 ] [ - 1 ] != 'max' : hi = ran [ - 1 ] [ - 1 ] if ran is None : return None if len ( ran ) == 1 : return [ ( lo , hi ) ] else : return [ ( lo , ran [ 0 ] [ - 1 ] ) ] + ran [ 1 : - 1 ] + [ ( ran [ - 1 ] [ 0 ] , hi ) ] | Return list of ranges defined in tchain . | 221 | 9 |
225,139 | def handle_stmt ( self , stmt , p_elem , pset = { } ) : if self . debug > 0 : sys . stderr . write ( "Handling '%s %s'\n" % ( util . keyword_to_str ( stmt . raw_keyword ) , stmt . arg ) ) try : method = self . stmt_handler [ stmt . keyword ] except KeyError : if isinstance ( stmt . keyword , tuple ) : try : method = self . ext_handler [ stmt . keyword [ 0 ] ] [ stmt . keyword [ 1 ] ] except KeyError : method = self . rng_annotation method ( stmt , p_elem ) return else : raise error . EmitError ( "Unknown keyword %s - this should not happen.\n" % stmt . keyword ) method ( stmt , p_elem , pset ) | Run handler method for statement stmt . | 200 | 8 |
225,140 | def handle_substmts ( self , stmt , p_elem , pset = { } ) : for sub in stmt . substmts : self . handle_stmt ( sub , p_elem , pset ) | Handle all substatements of stmt . | 52 | 9 |
225,141 | def nma_attribute ( self , stmt , p_elem , pset = None ) : att = "nma:" + stmt . keyword if att not in p_elem . attr : p_elem . attr [ att ] = stmt . arg | Map stmt to a NETMOD - specific attribute . | 60 | 11 |
225,142 | def type_stmt ( self , stmt , p_elem , pset ) : typedef = stmt . i_typedef if typedef and not stmt . i_is_derived : # just ref uname , dic = self . unique_def_name ( typedef ) if uname not in dic : self . install_def ( uname , typedef , dic ) SchemaNode ( "ref" , p_elem ) . set_attr ( "name" , uname ) defst = typedef . search_one ( "default" ) if defst : dic [ uname ] . default = defst . arg occur = 1 else : occur = dic [ uname ] . occur if occur > 0 : self . propagate_occur ( p_elem , occur ) return chain = [ stmt ] tdefault = None while typedef : type_ = typedef . search_one ( "type" ) chain . insert ( 0 , type_ ) if tdefault is None : tdef = typedef . search_one ( "default" ) if tdef : tdefault = tdef . arg typedef = type_ . i_typedef if tdefault and p_elem . occur == 0 : p_elem . default = tdefault self . propagate_occur ( p_elem , 1 ) self . type_handler [ chain [ 0 ] . arg ] ( chain , p_elem ) | Handle type statement . | 315 | 4 |
225,143 | def choice_type ( self , tchain , p_elem ) : elem = SchemaNode . choice ( p_elem , occur = 2 ) self . handle_substmts ( tchain [ 0 ] , elem ) | Handle enumeration and union types . | 52 | 7 |
225,144 | def mapped_type ( self , tchain , p_elem ) : SchemaNode ( "data" , p_elem ) . set_attr ( "type" , self . datatype_map [ tchain [ 0 ] . arg ] ) | Handle types that are simply mapped to RELAX NG . | 55 | 11 |
225,145 | def numeric_type ( self , tchain , p_elem ) : typ = tchain [ 0 ] . arg def gen_data ( ) : elem = SchemaNode ( "data" ) . set_attr ( "type" , self . datatype_map [ typ ] ) if typ == "decimal64" : fd = tchain [ 0 ] . search_one ( "fraction-digits" ) . arg SchemaNode ( "param" , elem , "19" ) . set_attr ( "name" , "totalDigits" ) SchemaNode ( "param" , elem , fd ) . set_attr ( "name" , "fractionDigits" ) return elem self . type_with_ranges ( tchain , p_elem , "range" , gen_data ) | Handle numeric types . | 183 | 4 |
225,146 | def add_stmt ( stmt , arg_rules ) : ( arg , rules ) = arg_rules stmt_map [ stmt ] = ( arg , rules ) | Use by plugins to add grammar for an extension statement . | 37 | 11 |
225,147 | def add_to_stmts_rules ( stmts , rules ) : def is_rule_less_than ( ra , rb ) : rka = ra [ 0 ] rkb = rb [ 0 ] if not util . is_prefixed ( rkb ) : # old rule is non-prefixed; append new rule after return False if not util . is_prefixed ( rka ) : # old rule prefixed, but new rule is not, insert return True # both are prefixed, compare modulename return rka [ 0 ] < rkb [ 0 ] for s in stmts : ( arg , rules0 ) = stmt_map [ s ] for r in rules : i = 0 while i < len ( rules0 ) : if is_rule_less_than ( r , rules0 [ i ] ) : rules0 . insert ( i , r ) break i += 1 if i == len ( rules0 ) : rules0 . insert ( i , r ) | Use by plugins to add extra rules to the existing rules for a statement . | 213 | 15 |
225,148 | def chk_module_statements ( ctx , module_stmt , canonical = False ) : return chk_statement ( ctx , module_stmt , top_stmts , canonical ) | Validate the statement hierarchy according to the grammar . | 44 | 10 |
225,149 | def chk_statement ( ctx , stmt , grammar , canonical = False ) : n = len ( ctx . errors ) if canonical == True : canspec = grammar else : canspec = [ ] _chk_stmts ( ctx , stmt . pos , [ stmt ] , None , ( grammar , canspec ) , canonical ) return n == len ( ctx . errors ) | Validate stmt according to grammar . | 86 | 8 |
225,150 | def sort_canonical ( keyword , stmts ) : try : ( _arg_type , subspec ) = stmt_map [ keyword ] except KeyError : return stmts res = [ ] # keep the order of data definition statements and case keep = [ s [ 0 ] for s in data_def_stmts ] + [ 'case' ] for ( kw , _spec ) in flatten_spec ( subspec ) : # keep comments before a statement together with that statement comments = [ ] for s in stmts : if s . keyword == '_comment' : comments . append ( s ) elif s . keyword == kw and kw not in keep : res . extend ( comments ) comments = [ ] res . append ( s ) else : comments = [ ] # then copy all other statements (extensions) res . extend ( [ stmt for stmt in stmts if stmt not in res ] ) return res | Sort all stmts in the canonical order defined by keyword . Return the sorted list . The stmt list is not modified . If keyword does not have a canonical order the list is returned as is . | 204 | 41 |
225,151 | def scan ( s ) : line = 1 linepos = 1 pos = 0 toks = [ ] while pos < len ( s ) : matched = False for ( tokname , r ) in patterns : m = r . match ( s , pos ) if m is not None : # found a matching token v = m . group ( 0 ) prec = _preceding_token ( toks ) if tokname == 'STAR' and prec is not None and _is_special ( prec ) : # XPath 1.0 spec, 3.7 special rule 1a # interpret '*' as a wildcard tok = XPathTok ( 'wildcard' , v , line , linepos ) elif ( tokname == 'name' and prec is not None and not _is_special ( prec ) and v in operators ) : # XPath 1.0 spec, 3.7 special rule 1b # interpret the name as an operator tok = XPathTok ( operators [ v ] , v , line , linepos ) elif tokname == 'name' : # check if next token is '(' if re_open_para . match ( s , pos + len ( v ) ) : # XPath 1.0 spec, 3.7 special rule 2 if v in node_types : # XPath 1.0 spec, 3.7 special rule 2a tok = XPathTok ( 'node_type' , v , line , linepos ) else : # XPath 1.0 spec, 3.7 special rule 2b tok = XPathTok ( 'function_name' , v , line , linepos ) # check if next token is '::' elif re_axis . match ( s , pos + len ( v ) ) : # XPath 1.0 spec, 3.7 special rule 3 if v in axes : tok = XPathTok ( 'axis' , v , line , linepos ) else : e = "unknown axis %s" % v raise XPathError ( e , line , linepos ) else : tok = XPathTok ( 'name' , v , line , linepos ) else : tok = XPathTok ( tokname , v , line , linepos ) if tokname == '_whitespace' : n = v . count ( '\n' ) if n > 0 : line = line + n linepos = len ( v ) - v . rfind ( '\n' ) else : linepos += len ( v ) else : linepos += len ( v ) pos += len ( v ) toks . append ( tok ) matched = True break if matched == False : # no patterns matched raise XPathError ( 'syntax error' , line , linepos ) return toks | Return a list of tokens or throw SyntaxError on failure . | 594 | 13 |
225,152 | def yang_modules ( self ) : res = { } for c in self . capabilities : m = c . parameters . get ( "module" ) if m is None or m in res : continue res [ m ] = c . parameters . get ( "revision" ) return res . items ( ) | Return a list of advertised YANG module names with revisions . | 64 | 12 |
225,153 | def get_features ( self , yam ) : mcap = [ c for c in self . capabilities if c . parameters . get ( "module" , None ) == yam ] [ 0 ] if not mcap . parameters . get ( "features" ) : return [ ] return mcap . parameters [ "features" ] . split ( "," ) | Return list of features declared for module yam . | 75 | 10 |
225,154 | def registered_capabilities ( self ) : return dict ( [ ( CAPABILITIES [ c . id ] , c ) for c in self . capabilities if c . id in CAPABILITIES ] ) | Return dictionary of non - YANG capabilities . | 41 | 9 |
225,155 | def listsdelete ( x , xs ) : i = xs . index ( x ) return xs [ : i ] + xs [ ( i + 1 ) : ] | Return a new list with x removed from xs | 37 | 10 |
225,156 | def unique_prefixes ( context ) : res = { } for m in context . modules . values ( ) : if m . keyword == "submodule" : continue prf = new = m . i_prefix suff = 0 while new in res . values ( ) : suff += 1 new = "%s%x" % ( prf , suff ) res [ m ] = new return res | Return a dictionary with unique prefixes for modules in context . | 82 | 12 |
225,157 | def preprocess_files ( self , prefix ) : if prefix is None : return files = ( "bin/yang2dsdl" , "man/man1/yang2dsdl.1" , "pyang/plugins/jsonxsl.py" ) regex = re . compile ( "^(.*)/usr/local(.*)$" ) for f in files : inf = open ( f ) cnt = inf . readlines ( ) inf . close ( ) ouf = open ( f , "w" ) for line in cnt : mo = regex . search ( line ) if mo is None : ouf . write ( line ) else : ouf . write ( mo . group ( 1 ) + prefix + mo . group ( 2 ) + "\n" ) ouf . close ( ) | Change the installation prefix where necessary . | 171 | 7 |
225,158 | def add_module ( self , ref , text , format = None , expect_modulename = None , expect_revision = None , expect_failure_error = True ) : if format == None : format = util . guess_format ( text ) if format == 'yin' : p = yin_parser . YinParser ( ) else : p = yang_parser . YangParser ( ) module = p . parse ( self , ref , text ) if module is None : return None if expect_modulename is not None : if not re . match ( syntax . re_identifier , expect_modulename ) : error . err_add ( self . errors , module . pos , 'FILENAME_BAD_MODULE_NAME' , ( ref , expect_modulename , syntax . identifier ) ) elif expect_modulename != module . arg : if expect_failure_error : error . err_add ( self . errors , module . pos , 'BAD_MODULE_NAME' , ( module . arg , ref , expect_modulename ) ) return None else : error . err_add ( self . errors , module . pos , 'WBAD_MODULE_NAME' , ( module . arg , ref , expect_modulename ) ) latest_rev = util . get_latest_revision ( module ) if expect_revision is not None : if not re . match ( syntax . re_date , expect_revision ) : error . err_add ( self . errors , module . pos , 'FILENAME_BAD_REVISION' , ( ref , expect_revision , 'YYYY-MM-DD' ) ) elif expect_revision != latest_rev : if expect_failure_error : error . err_add ( self . errors , module . pos , 'BAD_REVISION' , ( latest_rev , ref , expect_revision ) ) return None else : error . err_add ( self . errors , module . pos , 'WBAD_REVISION' , ( latest_rev , ref , expect_revision ) ) if module . arg not in self . revs : self . revs [ module . arg ] = [ ] revs = self . revs [ module . arg ] revs . append ( ( latest_rev , None ) ) return self . add_parsed_module ( module ) | Parse a module text and add the module data to the context | 521 | 13 |
225,159 | def del_module ( self , module ) : rev = util . get_latest_revision ( module ) del self . modules [ ( module . arg , rev ) ] | Remove a module from the context | 36 | 6 |
225,160 | def get_module ( self , modulename , revision = None ) : if revision is None and modulename in self . revs : ( revision , _handle ) = self . _get_latest_rev ( self . revs [ modulename ] ) if revision is not None : if ( modulename , revision ) in self . modules : return self . modules [ ( modulename , revision ) ] else : return None | Return the module if it exists in the context | 92 | 9 |
225,161 | def init ( plugindirs = [ ] ) : # initialize the builtin plugins from . translators import yang , yin , dsdl yang . pyang_plugin_init ( ) yin . pyang_plugin_init ( ) dsdl . pyang_plugin_init ( ) # initialize installed plugins for ep in pkg_resources . iter_entry_points ( group = 'pyang.plugin' ) : plugin_init = ep . load ( ) plugin_init ( ) # search for plugins in std directories (plugins directory first) basedir = os . path . split ( sys . modules [ 'pyang' ] . __file__ ) [ 0 ] plugindirs . insert ( 0 , basedir + "/transforms" ) plugindirs . insert ( 0 , basedir + "/plugins" ) # add paths from env pluginpath = os . getenv ( 'PYANG_PLUGINPATH' ) if pluginpath is not None : plugindirs . extend ( pluginpath . split ( os . pathsep ) ) syspath = sys . path for plugindir in plugindirs : sys . path = [ plugindir ] + syspath try : fnames = os . listdir ( plugindir ) except OSError : continue modnames = [ ] for fname in fnames : if ( fname . startswith ( ".#" ) or fname . startswith ( "__init__.py" ) or fname . endswith ( "_flymake.py" ) or fname . endswith ( "_flymake.pyc" ) ) : pass elif fname . endswith ( ".py" ) : modname = fname [ : - 3 ] if modname not in modnames : modnames . append ( modname ) elif fname . endswith ( ".pyc" ) : modname = fname [ : - 4 ] if modname not in modnames : modnames . append ( modname ) for modname in modnames : pluginmod = __import__ ( modname ) try : pluginmod . pyang_plugin_init ( ) except AttributeError as s : print ( pluginmod . __dict__ ) raise AttributeError ( pluginmod . __file__ + ': ' + str ( s ) ) sys . path = syspath | Initialize the plugin framework | 505 | 5 |
225,162 | def split_qname ( qname ) : res = qname . split ( YinParser . ns_sep ) if len ( res ) == 1 : # no namespace return None , res [ 0 ] else : return res | Split qname into namespace URI and local name | 47 | 9 |
225,163 | def check_attr ( self , pos , attrs ) : for at in attrs : ( ns , local_name ) = self . split_qname ( at ) if ns is None : error . err_add ( self . ctx . errors , pos , 'UNEXPECTED_ATTRIBUTE' , local_name ) elif ns == yin_namespace : error . err_add ( self . ctx . errors , pos , 'UNEXPECTED_ATTRIBUTE' , "{" + at ) | Check for unknown attributes . | 114 | 5 |
225,164 | def search_definition ( self , module , keyword , arg ) : r = module . search_one ( keyword , arg ) if r is not None : return r for i in module . search ( 'include' ) : modulename = i . arg m = self . ctx . search_module ( i . pos , modulename ) if m is not None : r = m . search_one ( keyword , arg ) if r is not None : return r return None | Search for a defintion with keyword name Search the module and its submodules . | 100 | 17 |
225,165 | def emit_path_arg ( keywordstr , arg , fd , indent , max_line_len , line_len , eol ) : quote = '"' arg = escape_str ( arg ) if not ( need_new_line ( max_line_len , line_len , arg ) ) : fd . write ( " " + quote + arg + quote ) return False ## FIXME: we should split the path on '/' and '[]' into multiple lines ## and then print each line num_chars = max_line_len - line_len if num_chars <= 0 : # really small max_line_len; we give up fd . write ( " " + quote + arg + quote ) return False while num_chars > 2 and arg [ num_chars - 1 : num_chars ] . isalnum ( ) : num_chars -= 1 fd . write ( " " + quote + arg [ : num_chars ] + quote ) arg = arg [ num_chars : ] keyword_cont = ( ( len ( keywordstr ) - 1 ) * ' ' ) + '+' while arg != '' : line_len = len ( "%s%s %s%s%s%s" % ( indent , keyword_cont , quote , arg , quote , eol ) ) num_chars = len ( arg ) - ( line_len - max_line_len ) while num_chars > 2 and arg [ num_chars - 1 : num_chars ] . isalnum ( ) : num_chars -= 1 fd . write ( '\n' + indent + keyword_cont + " " + quote + arg [ : num_chars ] + quote ) arg = arg [ num_chars : ] | Heuristically pretty print a path argument | 385 | 8 |
225,166 | def emit_arg ( keywordstr , stmt , fd , indent , indentstep , max_line_len , line_len ) : arg = escape_str ( stmt . arg ) lines = arg . splitlines ( True ) if len ( lines ) <= 1 : if len ( arg ) > 0 and arg [ - 1 ] == '\n' : arg = arg [ : - 1 ] + r'\n' if ( stmt . keyword in _force_newline_arg or need_new_line ( max_line_len , line_len , arg ) ) : fd . write ( '\n' + indent + indentstep + '"' + arg + '"' ) return True else : fd . write ( ' "' + arg + '"' ) return False else : need_nl = False if stmt . keyword in _force_newline_arg : need_nl = True elif len ( keywordstr ) > 8 : # Heuristics: multi-line after a "long" keyword looks better # than after a "short" keyword (compare 'when' and 'error-message') need_nl = True else : for line in lines : if need_new_line ( max_line_len , line_len + 1 , line ) : need_nl = True break if need_nl : fd . write ( '\n' + indent + indentstep ) prefix = indent + indentstep else : fd . write ( ' ' ) prefix = indent + len ( keywordstr ) * ' ' + ' ' fd . write ( '"' + lines [ 0 ] ) for line in lines [ 1 : - 1 ] : if line [ 0 ] == '\n' : fd . write ( '\n' ) else : fd . write ( prefix + ' ' + line ) # write last line fd . write ( prefix + ' ' + lines [ - 1 ] ) if lines [ - 1 ] [ - 1 ] == '\n' : # last line ends with a newline, indent the ending quote fd . write ( prefix + '"' ) else : fd . write ( '"' ) return True | Heuristically pretty print the argument string with double quotes | 461 | 11 |
225,167 | def process_children ( self , node , elem , module , path , omit = [ ] ) : for ch in node . i_children : if ch not in omit and ( ch . i_config or self . doctype == "data" ) : self . node_handler . get ( ch . keyword , self . ignore ) ( ch , elem , module , path ) | Proceed with all children of node . | 80 | 8 |
225,168 | def container ( self , node , elem , module , path ) : nel , newm , path = self . sample_element ( node , elem , module , path ) if path is None : return if self . annots : pres = node . search_one ( "presence" ) if pres is not None : nel . append ( etree . Comment ( " presence: %s " % pres . arg ) ) self . process_children ( node , nel , newm , path ) | Create a sample container element and proceed with its children . | 107 | 11 |
225,169 | def leaf ( self , node , elem , module , path ) : if node . i_default is None : nel , newm , path = self . sample_element ( node , elem , module , path ) if path is None : return if self . annots : nel . append ( etree . Comment ( " type: %s " % node . search_one ( "type" ) . arg ) ) elif self . defaults : nel , newm , path = self . sample_element ( node , elem , module , path ) if path is None : return nel . text = str ( node . i_default_str ) | Create a sample leaf element . | 140 | 6 |
225,170 | def anyxml ( self , node , elem , module , path ) : nel , newm , path = self . sample_element ( node , elem , module , path ) if path is None : return if self . annots : nel . append ( etree . Comment ( " anyxml " ) ) | Create a sample anyxml element . | 67 | 7 |
225,171 | def list ( self , node , elem , module , path ) : nel , newm , path = self . sample_element ( node , elem , module , path ) if path is None : return for kn in node . i_key : self . node_handler . get ( kn . keyword , self . ignore ) ( kn , nel , newm , path ) self . process_children ( node , nel , newm , path , node . i_key ) minel = node . search_one ( "min-elements" ) self . add_copies ( node , elem , nel , minel ) if self . annots : self . list_comment ( node , nel , minel ) | Create sample entries of a list . | 156 | 7 |
225,172 | def leaf_list ( self , node , elem , module , path ) : nel , newm , path = self . sample_element ( node , elem , module , path ) if path is None : return minel = node . search_one ( "min-elements" ) self . add_copies ( node , elem , nel , minel ) self . list_comment ( node , nel , minel ) | Create sample entries of a leaf - list . | 94 | 9 |
225,173 | def sample_element ( self , node , parent , module , path ) : if path is None : return parent , module , None elif path == [ ] : # GO ON pass else : if node . arg == path [ 0 ] : path = path [ 1 : ] else : return parent , module , None res = etree . SubElement ( parent , node . arg ) mm = node . main_module ( ) if mm != module : res . attrib [ "xmlns" ] = self . ns_uri [ mm ] module = mm return res , module , path | Create element under parent . | 121 | 5 |
225,174 | def add_copies ( self , node , parent , elem , minel ) : rep = 0 if minel is None else int ( minel . arg ) - 1 for i in range ( rep ) : parent . append ( copy . deepcopy ( elem ) ) | Add appropriate number of elem copies to parent . | 58 | 10 |
225,175 | def list_comment ( self , node , elem , minel ) : lo = "0" if minel is None else minel . arg maxel = node . search_one ( "max-elements" ) hi = "" if maxel is None else maxel . arg elem . insert ( 0 , etree . Comment ( " # entries: %s..%s " % ( lo , hi ) ) ) if node . keyword == 'list' : elem . insert ( 0 , etree . Comment ( " # keys: " + "," . join ( [ k . arg for k in node . i_key ] ) ) ) | Add list annotation to elem . | 138 | 7 |
225,176 | def slugify ( value , separator = '-' , max_length = 0 , word_boundary = False , entities = True , decimal = True , hexadecimal = True ) : value = normalize ( 'NFKD' , to_string ( value , 'utf-8' , 'ignore' ) ) if unidecode : value = unidecode ( value ) # character entity reference if entities : value = CHAR_ENTITY_REXP . sub ( lambda m : chr ( name2codepoint [ m . group ( 1 ) ] ) , value ) # decimal character reference if decimal : try : value = DECIMAL_REXP . sub ( lambda m : chr ( int ( m . group ( 1 ) ) ) , value ) except Exception : pass # hexadecimal character reference if hexadecimal : try : value = HEX_REXP . sub ( lambda m : chr ( int ( m . group ( 1 ) , 16 ) ) , value ) except Exception : pass value = value . lower ( ) value = REPLACE1_REXP . sub ( '' , value ) value = REPLACE2_REXP . sub ( '-' , value ) # remove redundant - value = REMOVE_REXP . sub ( '-' , value ) . strip ( '-' ) # smart truncate if requested if max_length > 0 : value = smart_truncate ( value , max_length , word_boundary , '-' ) if separator != '-' : value = value . replace ( '-' , separator ) return value | Normalizes string removes non - alpha characters and converts spaces to separator character | 337 | 15 |
225,177 | def smart_truncate ( value , max_length = 0 , word_boundaries = False , separator = ' ' ) : value = value . strip ( separator ) if not max_length : return value if len ( value ) < max_length : return value if not word_boundaries : return value [ : max_length ] . strip ( separator ) if separator not in value : return value [ : max_length ] truncated = '' for word in value . split ( separator ) : if word : next_len = len ( truncated ) + len ( word ) + len ( separator ) if next_len <= max_length : truncated += '{0}{1}' . format ( word , separator ) if not truncated : truncated = value [ : max_length ] return truncated . strip ( separator ) | Truncate a string | 183 | 5 |
225,178 | def get ( self ) : if self . _current : return self . _resume ( self . _current , False ) else : return self . _get ( None ) | Called by the protocol consumer | 36 | 6 |
225,179 | def pack_pipeline ( self , commands ) : return b'' . join ( starmap ( lambda * args : b'' . join ( self . _pack_command ( args ) ) , ( a for a , _ in commands ) ) ) | Packs pipeline commands into bytes . | 53 | 7 |
225,180 | def pypi_release ( self ) : meta = self . distribution . metadata pypi = ServerProxy ( self . pypi_index_url ) releases = pypi . package_releases ( meta . name ) if releases : return next ( iter ( sorted ( releases , reverse = True ) ) ) | Get the latest pypi release | 67 | 7 |
225,181 | def is_mainthread ( thread = None ) : thread = thread if thread is not None else current_thread ( ) return isinstance ( thread , threading . _MainThread ) | Check if thread is the main thread . | 38 | 8 |
225,182 | def process_data ( name = None ) : ct = current_process ( ) if not hasattr ( ct , '_pulsar_local' ) : ct . _pulsar_local = { } loc = ct . _pulsar_local return loc . get ( name ) if name else loc | Fetch the current process local data dictionary . | 70 | 9 |
225,183 | def thread_data ( name , value = NOTHING , ct = None ) : ct = ct or current_thread ( ) if is_mainthread ( ct ) : loc = process_data ( ) elif not hasattr ( ct , '_pulsar_local' ) : ct . _pulsar_local = loc = { } else : loc = ct . _pulsar_local if value is not NOTHING : if name in loc : if loc [ name ] is not value : raise RuntimeError ( '%s is already available on this thread' % name ) else : loc [ name ] = value return loc . get ( name ) | Set or retrieve an attribute name from thread ct . | 147 | 11 |
225,184 | def parse_address ( netloc , default_port = 8000 ) : if isinstance ( netloc , tuple ) : if len ( netloc ) != 2 : raise ValueError ( 'Invalid address %s' % str ( netloc ) ) return netloc # netloc = native_str ( netloc ) auth = None # Check if auth is available if '@' in netloc : auth , netloc = netloc . split ( '@' ) if netloc . startswith ( "unix:" ) : host = netloc . split ( "unix:" ) [ 1 ] return '%s@%s' % ( auth , host ) if auth else host # get host if '[' in netloc and ']' in netloc : host = netloc . split ( ']' ) [ 0 ] [ 1 : ] . lower ( ) elif ':' in netloc : host = netloc . split ( ':' ) [ 0 ] . lower ( ) elif netloc == "" : host = "0.0.0.0" else : host = netloc . lower ( ) # get port netloc = netloc . split ( ']' ) [ - 1 ] if ":" in netloc : port = netloc . split ( ':' , 1 ) [ 1 ] if not port . isdigit ( ) : raise ValueError ( "%r is not a valid port number." % port ) port = int ( port ) else : port = default_port return ( '%s@%s' % ( auth , host ) if auth else host , port ) | Parse an internet address netloc and return a tuple with host and port . | 333 | 16 |
225,185 | def is_socket_closed ( sock ) : # pragma nocover if not sock : return True try : if not poll : # pragma nocover if not select : return False try : return bool ( select ( [ sock ] , [ ] , [ ] , 0.0 ) [ 0 ] ) except socket . error : return True # This version is better on platforms that support it. p = poll ( ) p . register ( sock , POLLIN ) for ( fno , ev ) in p . poll ( 0.0 ) : if fno == sock . fileno ( ) : # Either data is buffered (bad), or the connection is dropped. return True except Exception : return True | Check if socket sock is closed . | 148 | 7 |
225,186 | def close_socket ( sock ) : if sock : try : sock . shutdown ( socket . SHUT_RDWR ) except Exception : pass try : sock . close ( ) except Exception : pass | Shutdown and close the socket . | 40 | 7 |
225,187 | async def rpc_server_info ( self , request ) : info = await send ( 'arbiter' , 'info' ) info = self . extra_server_info ( request , info ) try : info = await info except TypeError : pass return info | Return a dictionary of information regarding the server and workers . | 57 | 11 |
225,188 | def bind ( self , callback ) : handlers = self . _handlers if self . _self is None : raise RuntimeError ( '%s already fired, cannot add callbacks' % self ) if handlers is None : handlers = [ ] self . _handlers = handlers handlers . append ( callback ) | Bind a callback to this event . | 63 | 7 |
225,189 | def unbind ( self , callback ) : handlers = self . _handlers if handlers : filtered_callbacks = [ f for f in handlers if f != callback ] removed_count = len ( handlers ) - len ( filtered_callbacks ) if removed_count : self . _handlers = filtered_callbacks return removed_count return 0 | Remove a callback from the list | 72 | 6 |
225,190 | def fire ( self , exc = None , data = None ) : o = self . _self if o is not None : handlers = self . _handlers if self . _onetime : self . _handlers = None self . _self = None if handlers : if exc is not None : for hnd in handlers : hnd ( o , exc = exc ) elif data is not None : for hnd in handlers : hnd ( o , data = data ) else : for hnd in handlers : hnd ( o ) if self . _waiter : if exc : self . _waiter . set_exception ( exc ) else : self . _waiter . set_result ( data if data is not None else o ) self . _waiter = None | Fire the event | 163 | 3 |
225,191 | def fire_event ( self , name , exc = None , data = None ) : if self . _events and name in self . _events : self . _events [ name ] . fire ( exc = exc , data = data ) | Fire event at name if it is registered | 49 | 8 |
225,192 | def bind_events ( self , events ) : evs = self . _events if evs and events : for event in evs . values ( ) : if event . name in events : event . bind ( events [ event . name ] ) | Register all known events found in events key - valued parameters . | 51 | 12 |
225,193 | def _get_args_for_reloading ( ) : rv = [ sys . executable ] py_script = sys . argv [ 0 ] if os . name == 'nt' and not os . path . exists ( py_script ) and os . path . exists ( py_script + '.exe' ) : py_script += '.exe' rv . append ( py_script ) rv . extend ( sys . argv [ 1 : ] ) return rv | Returns the executable . This contains a workaround for windows if the executable is incorrectly reported to not have the . exe extension which can cause bugs on reloading . | 101 | 32 |
225,194 | def restart_with_reloader ( self ) : while True : LOGGER . info ( 'Restarting with %s reloader' % self . name ) args = _get_args_for_reloading ( ) new_environ = os . environ . copy ( ) new_environ [ PULSAR_RUN_MAIN ] = 'true' exit_code = subprocess . call ( args , env = new_environ , close_fds = False ) if exit_code != EXIT_CODE : return exit_code | Spawn a new Python interpreter with the same arguments as this one | 120 | 12 |
225,195 | def kill ( self , sig ) : if self . is_alive ( ) and self . _loop : self . _loop . call_soon_threadsafe ( self . _loop . stop ) | Invoke the stop on the event loop method . | 42 | 10 |
225,196 | def encode ( self , method , uri ) : if not self . username or not self . password : return o = self . options qop = o . get ( 'qop' ) realm = o . get ( 'realm' ) nonce = o . get ( 'nonce' ) entdig = None p_parsed = urlparse ( uri ) path = p_parsed . path if p_parsed . query : path += '?' + p_parsed . query ha1 = self . ha1 ( realm , self . password ) ha2 = self . ha2 ( qop , method , path ) if qop == 'auth' : if nonce == self . last_nonce : self . nonce_count += 1 else : self . nonce_count = 1 ncvalue = '%08x' % self . nonce_count s = str ( self . nonce_count ) . encode ( 'utf-8' ) s += nonce . encode ( 'utf-8' ) s += time . ctime ( ) . encode ( 'utf-8' ) s += os . urandom ( 8 ) cnonce = sha1 ( s ) . hexdigest ( ) [ : 16 ] noncebit = "%s:%s:%s:%s:%s" % ( nonce , ncvalue , cnonce , qop , ha2 ) respdig = self . KD ( ha1 , noncebit ) elif qop is None : respdig = self . KD ( ha1 , "%s:%s" % ( nonce , ha2 ) ) else : # XXX handle auth-int. return base = ( 'username="%s", realm="%s", nonce="%s", uri="%s", ' 'response="%s"' % ( self . username , realm , nonce , path , respdig ) ) opaque = o . get ( 'opaque' ) if opaque : base += ', opaque="%s"' % opaque if entdig : base += ', digest="%s"' % entdig base += ', algorithm="%s"' % self . algorithm if qop : base += ', qop=%s, nc=%s, cnonce="%s"' % ( qop , ncvalue , cnonce ) return 'Digest %s' % base | Called by the client to encode Authentication header . | 513 | 10 |
225,197 | def handle_wsgi_error ( environ , exc ) : if isinstance ( exc , tuple ) : exc_info = exc exc = exc [ 1 ] else : exc_info = True request = wsgi_request ( environ ) request . cache . handle_wsgi_error = True old_response = request . cache . pop ( 'response' , None ) response = request . response if old_response : response . content_type = old_response . content_type logger = request . logger # if isinstance ( exc , HTTPError ) : response . status_code = exc . code or 500 else : response . status_code = getattr ( exc , 'status' , 500 ) response . headers . update ( getattr ( exc , 'headers' , None ) or ( ) ) status = response . status_code if status >= 500 : logger . critical ( '%s - @ %s.\n%s' , exc , request . first_line , dump_environ ( environ ) , exc_info = exc_info ) else : log_wsgi_info ( logger . warning , environ , response . status , exc ) if has_empty_content ( status , request . method ) or status in REDIRECT_CODES : response . content_type = None response . content = None else : request . cache . pop ( 'html_document' , None ) renderer = environ . get ( 'error.handler' ) or render_error try : content = renderer ( request , exc ) except Exception : logger . critical ( 'Error while rendering error' , exc_info = True ) response . content_type = 'text/plain' content = 'Critical server error' if content is not response : response . content = content return response | The default error handler while serving a WSGI request . | 379 | 11 |
225,198 | def render_error ( request , exc ) : cfg = request . get ( 'pulsar.cfg' ) debug = cfg . debug if cfg else False response = request . response if not response . content_type : content_type = request . get ( 'default.content_type' ) response . content_type = request . content_types . best_match ( as_tuple ( content_type or DEFAULT_RESPONSE_CONTENT_TYPES ) ) content_type = None if response . content_type : content_type = response . content_type . split ( ';' ) [ 0 ] is_html = content_type == 'text/html' if debug : msg = render_error_debug ( request , exc , is_html ) else : msg = escape ( error_messages . get ( response . status_code ) or exc ) if is_html : msg = textwrap . dedent ( """ <h1>{0[reason]}</h1> {0[msg]} <h3>{0[version]}</h3> """ ) . format ( { "reason" : response . status , "msg" : msg , "version" : request . environ [ 'SERVER_SOFTWARE' ] } ) # if content_type == 'text/html' : doc = HtmlDocument ( title = response . status ) doc . head . embedded_css . append ( error_css ) doc . body . append ( Html ( 'div' , msg , cn = 'pulsar-error' ) ) return doc . to_bytes ( request ) elif content_type in JSON_CONTENT_TYPES : return json . dumps ( { 'status' : response . status_code , 'message' : msg } ) else : return '\n' . join ( msg ) if isinstance ( msg , ( list , tuple ) ) else msg | Default renderer for errors . | 413 | 6 |
225,199 | def render_error_debug ( request , exception , is_html ) : error = Html ( 'div' , cn = 'well well-lg' ) if is_html else [ ] for trace in format_traceback ( exception ) : counter = 0 for line in trace . split ( '\n' ) : if line . startswith ( ' ' ) : counter += 1 line = line [ 2 : ] if line : if is_html : line = Html ( 'p' , escape ( line ) , cn = 'text-danger' ) if counter : line . css ( { 'margin-left' : '%spx' % ( 20 * counter ) } ) error . append ( line ) if is_html : error = Html ( 'div' , Html ( 'h1' , request . response . status ) , error ) return error | Render the exception traceback | 189 | 5 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.