idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
37,100
def getSecret ( name , version = "" , region = None , table = "credential-store" , context = None , dynamodb = None , kms = None , ** kwargs ) : if not context : context = { } if dynamodb is None or kms is None : session = get_session ( ** kwargs ) if dynamodb is None : dynamodb = session . resource ( 'dynamodb' , region_name = region ) if kms is None : kms = session . client ( 'kms' , region_name = region ) secrets = dynamodb . Table ( table ) if version == "" : response = secrets . query ( Limit = 1 , ScanIndexForward = False , ConsistentRead = True , KeyConditionExpression = boto3 . dynamodb . conditions . Key ( "name" ) . eq ( name ) ) if response [ "Count" ] == 0 : raise ItemNotFound ( "Item {'name': '%s'} couldn't be found." % name ) material = response [ "Items" ] [ 0 ] else : response = secrets . get_item ( Key = { "name" : name , "version" : version } ) if "Item" not in response : raise ItemNotFound ( "Item {'name': '%s', 'version': '%s'} couldn't be found." % ( name , version ) ) material = response [ "Item" ] key_service = KeyService ( kms , None , context ) return open_aes_ctr_legacy ( key_service , material )
fetch and decrypt the secret called name
37,101
def createDdbTable ( region = None , table = "credential-store" , ** kwargs ) : session = get_session ( ** kwargs ) dynamodb = session . resource ( "dynamodb" , region_name = region ) if table in ( t . name for t in dynamodb . tables . all ( ) ) : print ( "Credential Store table already exists" ) return print ( "Creating table..." ) dynamodb . create_table ( TableName = table , KeySchema = [ { "AttributeName" : "name" , "KeyType" : "HASH" , } , { "AttributeName" : "version" , "KeyType" : "RANGE" , } ] , AttributeDefinitions = [ { "AttributeName" : "name" , "AttributeType" : "S" , } , { "AttributeName" : "version" , "AttributeType" : "S" , } , ] , ProvisionedThroughput = { "ReadCapacityUnits" : 1 , "WriteCapacityUnits" : 1 , } ) print ( "Waiting for table to be created..." ) client = session . client ( "dynamodb" , region_name = region ) response = client . describe_table ( TableName = table ) client . get_waiter ( "table_exists" ) . wait ( TableName = table ) print ( "Adding tag..." ) client . tag_resource ( ResourceArn = response [ "Table" ] [ "TableArn" ] , Tags = [ { 'Key' : "Name" , 'Value' : "credstash" } , ] ) print ( "Table has been created. " "Go read the README about how to create your KMS key" )
create the secret store table in DDB in the specified region
37,102
def seal_aes_ctr_legacy ( key_service , secret , digest_method = DEFAULT_DIGEST ) : key , encoded_key = key_service . generate_key_data ( 64 ) ciphertext , hmac = _seal_aes_ctr ( secret , key , LEGACY_NONCE , digest_method , ) return { 'key' : b64encode ( encoded_key ) . decode ( 'utf-8' ) , 'contents' : b64encode ( ciphertext ) . decode ( 'utf-8' ) , 'hmac' : codecs . encode ( hmac , "hex_codec" ) , 'digest' : digest_method , }
Encrypts secret using the key service . You can decrypt with the companion method open_aes_ctr_legacy .
37,103
def check_status ( self ) : logger . debug ( "Checking for a broker_url on django settings..." ) broker_url = getattr ( settings , "BROKER_URL" , None ) logger . debug ( "Got %s as the broker_url. Connecting to rabbit..." , broker_url ) logger . debug ( "Attempting to connect to rabbit..." ) try : with Connection ( broker_url ) as conn : conn . connect ( ) except ConnectionRefusedError as e : self . add_error ( ServiceUnavailable ( "Unable to connect to RabbitMQ: Connection was refused." ) , e ) except AccessRefused as e : self . add_error ( ServiceUnavailable ( "Unable to connect to RabbitMQ: Authentication error." ) , e ) except IOError as e : self . add_error ( ServiceUnavailable ( "IOError" ) , e ) except BaseException as e : self . add_error ( ServiceUnavailable ( "Unknown error" ) , e ) else : logger . debug ( "Connection estabilished. RabbitMQ is healthy." )
Check RabbitMQ service by opening and closing a broker channel .
37,104
def from_string ( cls , value ) : match = cls . pattern . search ( value ) if match is None : raise ValueError ( '"%s" is not a valid media type' % value ) try : return cls ( match . group ( 'mime_type' ) , float ( match . group ( 'weight' ) or 1 ) ) except ValueError : return cls ( value )
Return single instance parsed from given accept header string .
37,105
def parse_header ( cls , value = '*/*' ) : yield from sorted ( ( cls . from_string ( token . strip ( ) ) for token in value . split ( ',' ) if token . strip ( ) ) , reverse = True )
Parse HTTP accept header and return instances sorted by weight .
37,106
def convert_to_timezone_naive ( time_to_freeze ) : if time_to_freeze . tzinfo : time_to_freeze -= time_to_freeze . utcoffset ( ) time_to_freeze = time_to_freeze . replace ( tzinfo = None ) return time_to_freeze
Converts a potentially timezone - aware datetime to be a naive UTC datetime
37,107
def move_to ( self , target_datetime ) : target_datetime = _parse_time_to_freeze ( target_datetime ) delta = target_datetime - self . time_to_freeze self . tick ( delta = delta )
Moves frozen date to the given target_datetime
37,108
def process_module ( self , yam ) : for ann in yam . search ( ( "ietf-yang-metadata" , "annotation" ) ) : self . process_annotation ( ann ) for ch in yam . i_children [ : ] : if ch . keyword == "rpc" : self . process_rpc ( ch ) elif ch . keyword == "notification" : self . process_notification ( ch ) else : continue yam . i_children . remove ( ch ) self . process_children ( yam , "//nc:*" , 1 )
Process data nodes RPCs and notifications in a single module .
37,109
def process_annotation ( self , ann ) : tmpl = self . xsl_template ( "@" + self . qname ( ann ) ) ET . SubElement ( tmpl , "param" , name = "level" , select = "0" ) ct = self . xsl_calltemplate ( "leaf" , tmpl ) ET . SubElement ( ct , "with-param" , name = "level" , select = "$level" ) self . xsl_withparam ( "nsid" , ann . i_module . i_modulename + ":" , ct ) self . type_param ( ann , ct )
Process metadata annotation .
37,110
def process_rpc ( self , rpc ) : p = "/nc:rpc/" + self . qname ( rpc ) tmpl = self . xsl_template ( p ) inp = rpc . search_one ( "input" ) if inp is not None : ct = self . xsl_calltemplate ( "rpc-input" , tmpl ) self . xsl_withparam ( "nsid" , rpc . i_module . i_modulename + ":" , ct ) self . process_children ( inp , p , 2 ) outp = rpc . search_one ( "output" ) if outp is not None : self . process_children ( outp , "/nc:rpc-reply" , 1 )
Process input and output parts of rpc .
37,111
def process_notification ( self , ntf ) : p = "/en:notification/" + self . qname ( ntf ) tmpl = self . xsl_template ( p ) ct = self . xsl_calltemplate ( "container" , tmpl ) self . xsl_withparam ( "level" , "1" , ct ) if ntf . arg == "eventTime" : self . xsl_withparam ( "nsid" , ntf . i_module . i_modulename + ":" , ct ) self . process_children ( ntf , p , 2 )
Process event notification ntf .
37,112
def process_children ( self , node , path , level , parent = None ) : data_parent = parent if parent else node chs = node . i_children for ch in chs : if ch . keyword in [ "choice" , "case" ] : self . process_children ( ch , path , level , node ) continue p = path + "/" + self . qname ( ch ) tmpl = self . xsl_template ( p ) ct = self . xsl_calltemplate ( ch . keyword , tmpl ) self . xsl_withparam ( "level" , "%d" % level , ct ) if ( data_parent . i_module is None or ch . i_module . i_modulename != data_parent . i_module . i_modulename ) : self . xsl_withparam ( "nsid" , ch . i_module . i_modulename + ":" , ct ) if ch . keyword in [ "leaf" , "leaf-list" ] : self . type_param ( ch , ct ) elif ch . keyword != "anyxml" : offset = 2 if ch . keyword == "list" else 1 self . process_children ( ch , p , level + offset )
Process all children of node .
37,113
def type_param ( self , node , ct ) : types = self . get_types ( node ) ftyp = types [ 0 ] if len ( types ) == 1 : if ftyp in type_class : jtyp = type_class [ ftyp ] else : jtyp = "other" self . xsl_withparam ( "type" , jtyp , ct ) elif ftyp in [ "string" , "enumeration" , "bits" , "binary" , "identityref" , "instance-identifier" ] : self . xsl_withparam ( "type" , "string" , ct ) else : opts = [ ] for t in types : if t in union_class : ut = union_class [ t ] elif t in [ "int64" , "uint64" ] or t . startswith ( "decimal@" ) : ut = t else : ut = "other" if ut not in opts : opts . append ( ut ) if ut == "other" : break if ut == "decimal" and "integer" not in opts : opts . append ( "integer" ) self . xsl_withparam ( "type" , "union" , ct ) self . xsl_withparam ( "options" , "," . join ( opts ) + "," , ct )
Resolve the type of a leaf or leaf - list node for JSON .
37,114
def xsl_text ( self , text , parent ) : res = ET . SubElement ( parent , "text" ) res . text = text return res
Construct an XSLT text element containing text .
37,115
def xsl_withparam ( self , name , value , parent ) : res = ET . SubElement ( parent , "with-param" , name = name ) res . text = value return res
Construct an XSLT with - param element .
37,116
def element ( cls , name , parent = None , interleave = None , occur = 0 ) : node = cls ( "element" , parent , interleave = interleave ) node . attr [ "name" ] = name node . occur = occur return node
Create an element node .
37,117
def leaf_list ( cls , name , parent = None , interleave = None ) : node = cls ( "_list_" , parent , interleave = interleave ) node . attr [ "name" ] = name node . keys = None node . minEl = "0" node . maxEl = None node . occur = 3 return node
Create _list_ node for a leaf - list .
37,118
def list ( cls , name , parent = None , interleave = None ) : node = cls . leaf_list ( name , parent , interleave = interleave ) node . keys = [ ] node . keymap = { } return node
Create _list_ node for a list .
37,119
def choice ( cls , parent = None , occur = 0 ) : node = cls ( "choice" , parent ) node . occur = occur node . default_case = None return node
Create choice node .
37,120
def define ( cls , name , parent = None , interleave = False ) : node = cls ( "define" , parent , interleave = interleave ) node . occur = 0 node . attr [ "name" ] = name return node
Create define node .
37,121
def adjust_interleave ( self , interleave ) : if interleave == None and self . parent : self . interleave = self . parent . interleave else : self . interleave = interleave
Inherit interleave status from parent if undefined .
37,122
def subnode ( self , node ) : self . children . append ( node ) node . parent = self node . adjust_interleave ( node . interleave )
Make node receiver s child .
37,123
def annot ( self , node ) : self . annots . append ( node ) node . parent = self
Add node as an annotation of the receiver .
37,124
def start_tag ( self , alt = None , empty = False ) : if alt : name = alt else : name = self . name result = "<" + name for it in self . attr : result += ' %s="%s"' % ( it , escape ( self . attr [ it ] , { '"' : "&quot;" , '%' : "%%" } ) ) if empty : return result + "/>%s" else : return result + ">"
Return XML start tag for the receiver .
37,125
def end_tag ( self , alt = None ) : if alt : name = alt else : name = self . name return "</" + name + ">"
Return XML end tag for the receiver .
37,126
def serialize ( self , occur = None ) : fmt = self . ser_format . get ( self . name , SchemaNode . _default_format ) return fmt ( self , occur ) % ( escape ( self . text ) + self . serialize_children ( ) )
Return RELAX NG representation of the receiver and subtree .
37,127
def _default_format ( self , occur ) : if self . text or self . children : return self . start_tag ( ) + "%s" + self . end_tag ( ) return self . start_tag ( empty = True )
Return the default serialization format .
37,128
def _define_format ( self , occur ) : if hasattr ( self , "default" ) : self . attr [ "nma:default" ] = self . default middle = self . _chorder ( ) if self . rng_children ( ) else "<empty/>%s" return ( self . start_tag ( ) + self . serialize_annots ( ) . replace ( "%" , "%%" ) + middle + self . end_tag ( ) )
Return the serialization format for a define node .
37,129
def _element_format ( self , occur ) : if occur : occ = occur else : occ = self . occur if occ == 1 : if hasattr ( self , "default" ) : self . attr [ "nma:default" ] = self . default else : self . attr [ "nma:implicit" ] = "true" middle = self . _chorder ( ) if self . rng_children ( ) else "<empty/>%s" fmt = ( self . start_tag ( ) + self . serialize_annots ( ) . replace ( "%" , "%%" ) + middle + self . end_tag ( ) ) if ( occ == 2 or self . parent . name == "choice" or self . parent . name == "case" and len ( self . parent . children ) == 1 ) : return fmt else : return "<optional>" + fmt + "</optional>"
Return the serialization format for an element node .
37,130
def _list_format ( self , occur ) : if self . keys : self . attr [ "nma:key" ] = " " . join ( self . keys ) keys = '' . join ( [ self . keymap [ k ] . serialize ( occur = 2 ) for k in self . keys ] ) else : keys = "" if self . maxEl : self . attr [ "nma:max-elements" ] = self . maxEl if int ( self . minEl ) == 0 : ord_ = "zeroOrMore" else : ord_ = "oneOrMore" if int ( self . minEl ) > 1 : self . attr [ "nma:min-elements" ] = self . minEl middle = self . _chorder ( ) if self . rng_children ( ) else "<empty/>%s" return ( "<" + ord_ + ">" + self . start_tag ( "element" ) + ( self . serialize_annots ( ) + keys ) . replace ( "%" , "%%" ) + middle + self . end_tag ( "element" ) + "</" + ord_ + ">" )
Return the serialization format for a _list_ node .
37,131
def _choice_format ( self , occur ) : middle = "%s" if self . rng_children ( ) else "<empty/>%s" fmt = self . start_tag ( ) + middle + self . end_tag ( ) if self . occur != 2 : return "<optional>" + fmt + "</optional>" else : return fmt
Return the serialization format for a choice node .
37,132
def _case_format ( self , occur ) : if self . occur == 1 : self . attr [ "nma:implicit" ] = "true" ccnt = len ( self . rng_children ( ) ) if ccnt == 0 : return "<empty/>%s" if ccnt == 1 or not self . interleave : return self . start_tag ( "group" ) + "%s" + self . end_tag ( "group" ) return ( self . start_tag ( "interleave" ) + "%s" + self . end_tag ( "interleave" ) )
Return the serialization format for a case node .
37,133
def process_children ( self , node , parent , pmod ) : for ch in node . i_children : if ch . keyword in [ "rpc" , "notification" ] : continue if ch . keyword in [ "choice" , "case" ] : self . process_children ( ch , parent , pmod ) continue if ch . i_module . i_modulename == pmod : nmod = pmod nodename = ch . arg else : nmod = ch . i_module . i_modulename nodename = "%s:%s" % ( nmod , ch . arg ) ndata = [ ch . keyword ] if ch . keyword == "container" : ndata . append ( { } ) self . process_children ( ch , ndata [ 1 ] , nmod ) elif ch . keyword == "list" : ndata . append ( { } ) self . process_children ( ch , ndata [ 1 ] , nmod ) ndata . append ( [ ( k . i_module . i_modulename , k . arg ) for k in ch . i_key ] ) elif ch . keyword in [ "leaf" , "leaf-list" ] : ndata . append ( self . base_type ( ch . search_one ( "type" ) ) ) modname = ch . i_module . i_modulename parent [ nodename ] = ndata
Process all children of node except rpc and notification .
37,134
def base_type ( self , type ) : while 1 : if type . arg == "leafref" : node = type . i_type_spec . i_target_node elif type . i_typedef is None : break else : node = type . i_typedef type = node . search_one ( "type" ) if type . arg == "decimal64" : return [ type . arg , int ( type . search_one ( "fraction-digits" ) . arg ) ] elif type . arg == "union" : return [ type . arg , [ self . base_type ( x ) for x in type . i_type_spec . types ] ] else : return type . arg
Return the base type of type .
37,135
def skip ( self ) : buflen = len ( self . buf ) while True : self . buf = self . buf . lstrip ( ) if self . buf == '' : self . readline ( ) buflen = len ( self . buf ) else : self . offset += ( buflen - len ( self . buf ) ) break if not self . keep_comments : if self . buf [ 0 ] == '/' : if self . buf [ 1 ] == '/' : self . readline ( ) return self . skip ( ) elif self . buf [ 1 ] == '*' : i = self . buf . find ( '*/' ) while i == - 1 : self . readline ( ) i = self . buf . find ( '*/' ) self . set_buf ( i + 2 ) return self . skip ( )
Skip whitespace and count position
37,136
def parse ( self , ctx , ref , text ) : self . ctx = ctx self . pos = error . Position ( ref ) self . top = None try : self . tokenizer = YangTokenizer ( text , self . pos , ctx . errors , ctx . max_line_len , ctx . keep_comments , not ctx . lax_quote_checks ) stmt = self . _parse_statement ( None ) except error . Abort : return None except error . Eof as e : error . err_add ( self . ctx . errors , self . pos , 'EOF_ERROR' , ( ) ) return None try : self . tokenizer . peek ( ) except error . Eof : return stmt except : pass error . err_add ( self . ctx . errors , self . pos , 'TRAILING_GARBAGE' , ( ) ) return None
Parse the string text containing a YANG statement .
37,137
def add_validation_phase ( phase , before = None , after = None ) : idx = 0 for x in _validation_phases : if x == before : _validation_phases . insert ( idx , phase ) return elif x == after : _validation_phases . insert ( idx + 1 , phase ) return idx = idx + 1 _validation_phases . append ( phase )
Add a validation phase to the framework .
37,138
def add_validation_fun ( phase , keywords , f ) : for keyword in keywords : if ( phase , keyword ) in _validation_map : oldf = _validation_map [ ( phase , keyword ) ] def newf ( ctx , s ) : oldf ( ctx , s ) f ( ctx , s ) _validation_map [ ( phase , keyword ) ] = newf else : _validation_map [ ( phase , keyword ) ] = f
Add a validation function to some phase in the framework .
37,139
def v_init_extension ( ctx , stmt ) : ( prefix , identifier ) = stmt . raw_keyword ( modname , revision ) = prefix_to_modulename_and_revision ( stmt . i_module , prefix , stmt . pos , ctx . errors ) stmt . keyword = ( modname , identifier ) stmt . i_extension_modulename = modname stmt . i_extension_revision = revision stmt . i_extension = None
find the modulename of the prefix and set stmt . keyword
37,140
def v_grammar_unique_defs ( ctx , stmt ) : defs = [ ( 'typedef' , 'TYPE_ALREADY_DEFINED' , stmt . i_typedefs ) , ( 'grouping' , 'GROUPING_ALREADY_DEFINED' , stmt . i_groupings ) ] if stmt . parent is None : defs . extend ( [ ( 'feature' , 'FEATURE_ALREADY_DEFINED' , stmt . i_features ) , ( 'identity' , 'IDENTITY_ALREADY_DEFINED' , stmt . i_identities ) , ( 'extension' , 'EXTENSION_ALREADY_DEFINED' , stmt . i_extensions ) ] ) for ( keyword , errcode , dict ) in defs : for definition in stmt . search ( keyword ) : if definition . arg in dict : other = dict [ definition . arg ] err_add ( ctx . errors , definition . pos , errcode , ( definition . arg , other . pos ) ) else : dict [ definition . arg ] = definition
Verify that all typedefs and groupings are unique Called for every statement . Stores all typedefs in stmt . i_typedef groupings in stmt . i_grouping
37,141
def v_type_extension ( ctx , stmt ) : ( modulename , identifier ) = stmt . keyword revision = stmt . i_extension_revision module = modulename_to_module ( stmt . i_module , modulename , revision ) if module is None : return if identifier not in module . i_extensions : if module . i_modulename == stmt . i_orig_module . i_modulename : if identifier not in stmt . i_orig_module . i_extensions : err_add ( ctx . errors , stmt . pos , 'EXTENSION_NOT_DEFINED' , ( identifier , module . arg ) ) return else : stmt . i_extension = stmt . i_orig_module . i_extensions [ identifier ] else : err_add ( ctx . errors , stmt . pos , 'EXTENSION_NOT_DEFINED' , ( identifier , module . arg ) ) return else : stmt . i_extension = module . i_extensions [ identifier ] ext_arg = stmt . i_extension . search_one ( 'argument' ) if stmt . arg is not None and ext_arg is None : err_add ( ctx . errors , stmt . pos , 'EXTENSION_ARGUMENT_PRESENT' , identifier ) elif stmt . arg is None and ext_arg is not None : err_add ( ctx . errors , stmt . pos , 'EXTENSION_NO_ARGUMENT_PRESENT' , identifier )
verify that the extension matches the extension definition
37,142
def v_type_if_feature ( ctx , stmt , no_error_report = False ) : stmt . i_feature = None expr = syntax . parse_if_feature_expr ( stmt . arg ) if stmt . i_module . i_version == '1' : if type ( expr ) != type ( '' ) : err_add ( ctx . errors , stmt . pos , 'BAD_VALUE' , ( stmt . arg , 'identifier-ref' ) ) return def eval ( expr ) : if type ( expr ) == type ( '' ) : return has_feature ( expr ) else : ( op , op1 , op2 ) = expr if op == 'not' : return not eval ( op1 ) elif op == 'and' : return eval ( op1 ) and eval ( op2 ) elif op == 'or' : return eval ( op1 ) or eval ( op2 ) def has_feature ( name ) : found = None if name . find ( ":" ) == - 1 : prefix = None else : [ prefix , name ] = name . split ( ':' , 1 ) if prefix is None or stmt . i_module . i_prefix == prefix : pmodule = stmt . i_module else : pmodule = prefix_to_module ( stmt . i_module , prefix , stmt . pos , ctx . errors ) if pmodule is None : raise Abort if name in pmodule . i_features : f = pmodule . i_features [ name ] if prefix is None and not is_submodule_included ( stmt , f ) : pass else : found = pmodule . i_features [ name ] v_type_feature ( ctx , found ) if pmodule . i_modulename in ctx . features : if name not in ctx . features [ pmodule . i_modulename ] : return False if found is None and no_error_report == False : err_add ( ctx . errors , stmt . pos , 'FEATURE_NOT_FOUND' , ( name , pmodule . arg ) ) raise Abort return found is not None try : if eval ( expr ) == False : if stmt . parent not in stmt . i_module . i_prune : stmt . i_module . i_prune . append ( stmt . parent ) except Abort : pass
verify that the referenced feature exists .
37,143
def v_type_base ( ctx , stmt , no_error_report = False ) : name = stmt . arg stmt . i_identity = None if name . find ( ":" ) == - 1 : prefix = None else : [ prefix , name ] = name . split ( ':' , 1 ) if prefix is None or stmt . i_module . i_prefix == prefix : pmodule = stmt . i_module else : pmodule = prefix_to_module ( stmt . i_module , prefix , stmt . pos , ctx . errors ) if pmodule is None : return if name in pmodule . i_identities : i = pmodule . i_identities [ name ] if prefix is None and not is_submodule_included ( stmt , i ) : pass else : stmt . i_identity = i v_type_identity ( ctx , stmt . i_identity ) if stmt . i_identity is None and no_error_report == False : err_add ( ctx . errors , stmt . pos , 'IDENTITY_NOT_FOUND' , ( name , pmodule . arg ) )
verify that the referenced identity exists .
37,144
def v_unique_name_defintions ( ctx , stmt ) : defs = [ ( 'typedef' , 'TYPE_ALREADY_DEFINED' , stmt . i_typedefs ) , ( 'grouping' , 'GROUPING_ALREADY_DEFINED' , stmt . i_groupings ) ] def f ( s ) : for ( keyword , errcode , dict ) in defs : if s . keyword == keyword and s . arg in dict : err_add ( ctx . errors , dict [ s . arg ] . pos , errcode , ( s . arg , s . pos ) ) for i in stmt . search ( 'include' ) : submodulename = i . arg subm = ctx . get_module ( submodulename ) if subm is not None : for s in subm . substmts : for ss in s . substmts : iterate_stmt ( ss , f )
Make sure that all top - level definitions in a module are unique
37,145
def v_unique_name_children ( ctx , stmt ) : def sort_pos ( p1 , p2 ) : if p1 . line < p2 . line : return ( p1 , p2 ) else : return ( p2 , p1 ) dict = { } chs = stmt . i_children def check ( c ) : key = ( c . i_module . i_modulename , c . arg ) if key in dict : dup = dict [ key ] ( minpos , maxpos ) = sort_pos ( c . pos , dup . pos ) pos = chk_uses_pos ( c , maxpos ) err_add ( ctx . errors , pos , 'DUPLICATE_CHILD_NAME' , ( stmt . arg , stmt . pos , c . arg , minpos ) ) else : dict [ key ] = c if c . keyword == 'choice' : for case in c . i_children : for cc in case . i_children : check ( cc ) for c in chs : check ( c )
Make sure that each child of stmt has a unique name
37,146
def v_unique_name_leaf_list ( ctx , stmt ) : if not stmt . i_config : return seen = [ ] for defval in stmt . i_default : if defval in seen : err_add ( ctx . errors , stmt . pos , 'DUPLICATE_DEFAULT' , ( defval ) ) else : seen . append ( defval )
Make sure config true leaf - lists do nothave duplicate defaults
37,147
def v_reference_choice ( ctx , stmt ) : d = stmt . search_one ( 'default' ) if d is not None : m = stmt . search_one ( 'mandatory' ) if m is not None and m . arg == 'true' : err_add ( ctx . errors , stmt . pos , 'DEFAULT_AND_MANDATORY' , ( ) ) ptr = attrsearch ( d . arg , 'arg' , stmt . i_children ) if ptr is None : err_add ( ctx . errors , d . pos , 'DEFAULT_CASE_NOT_FOUND' , d . arg ) else : def chk_no_defaults ( s ) : for c in s . i_children : if c . keyword in ( 'leaf' , 'choice' ) : m = c . search_one ( 'mandatory' ) if m is not None and m . arg == 'true' : err_add ( ctx . errors , c . pos , 'MANDATORY_NODE_IN_DEFAULT_CASE' , ( ) ) elif c . keyword in ( 'list' , 'leaf-list' ) : m = c . search_one ( 'min-elements' ) if m is not None and int ( m . arg ) > 0 : err_add ( ctx . errors , c . pos , 'MANDATORY_NODE_IN_DEFAULT_CASE' , ( ) ) elif c . keyword == 'container' : p = c . search_one ( 'presence' ) if p == None or p . arg == 'false' : chk_no_defaults ( c ) chk_no_defaults ( ptr )
Make sure that the default case exists
37,148
def v_reference_leaf_leafref ( ctx , stmt ) : if ( hasattr ( stmt , 'i_leafref' ) and stmt . i_leafref is not None and stmt . i_leafref_expanded is False ) : path_type_spec = stmt . i_leafref not_req_inst = not ( path_type_spec . require_instance ) x = validate_leafref_path ( ctx , stmt , path_type_spec . path_spec , path_type_spec . path_ , accept_non_config_target = not_req_inst ) if x is None : return ptr , expanded_path , path_list = x path_type_spec . i_target_node = ptr path_type_spec . i_expanded_path = expanded_path path_type_spec . i_path_list = path_list stmt . i_leafref_expanded = True if ptr is not None : chk_status ( ctx , stmt , ptr ) stmt . i_leafref_ptr = ( ptr , path_type_spec . pos )
Verify that all leafrefs in a leaf or leaf - list have correct path
37,149
def has_type ( type , names ) : if type . arg in names : return type for t in type . search ( 'type' ) : r = has_type ( t , names ) if r is not None : return r if not hasattr ( type , 'i_typedef' ) : return None if ( type . i_typedef is not None and hasattr ( type . i_typedef , 'i_is_circular' ) and type . i_typedef . i_is_circular == False ) : t = type . i_typedef . search_one ( 'type' ) if t is not None : return has_type ( t , names ) return None
Return type with name if type has name as one of its base types and name is in the names list . otherwise return None .
37,150
def search_typedef ( stmt , name ) : mod = stmt . i_orig_module while stmt is not None : if name in stmt . i_typedefs : t = stmt . i_typedefs [ name ] if ( mod is not None and mod != t . i_orig_module and t . i_orig_module . keyword == 'submodule' ) : if mod . search_one ( 'include' , t . i_orig_module . arg ) is None : return None return t stmt = stmt . parent return None
Search for a typedef in scope First search the hierarchy then the module and its submodules .
37,151
def search_grouping ( stmt , name ) : mod = stmt . i_orig_module while stmt is not None : if name in stmt . i_groupings : g = stmt . i_groupings [ name ] if ( mod is not None and mod != g . i_orig_module and g . i_orig_module . keyword == 'submodule' ) : if mod . search_one ( 'include' , g . i_orig_module . arg ) is None : return None return g stmt = stmt . parent return None
Search for a grouping in scope First search the hierarchy then the module and its submodules .
37,152
def is_submodule_included ( src , tgt ) : if tgt is None or not hasattr ( tgt , 'i_orig_module' ) : return True if ( tgt . i_orig_module . keyword == 'submodule' and src . i_orig_module != tgt . i_orig_module and src . i_orig_module . i_modulename == tgt . i_orig_module . i_modulename ) : if src . i_orig_module . search_one ( 'include' , tgt . i_orig_module . arg ) is None : return False return True
Check that the tgt s submodule is included by src if they belong to the same module .
37,153
def mk_path_str ( stmt , with_prefixes = False , prefix_onchange = False , prefix_to_module = False , resolve_top_prefix_to_module = False ) : resolved_names = mk_path_list ( stmt ) xpath_elements = [ ] last_prefix = None for index , resolved_name in enumerate ( resolved_names ) : module_name , prefix , node_name = resolved_name xpath_element = node_name if with_prefixes or ( prefix_onchange and prefix != last_prefix ) : new_prefix = prefix if ( prefix_to_module or ( index == 0 and resolve_top_prefix_to_module ) ) : new_prefix = module_name xpath_element = '%s:%s' % ( new_prefix , node_name ) xpath_elements . append ( xpath_element ) last_prefix = prefix return '/%s' % '/' . join ( xpath_elements )
Returns the XPath path of the node . with_prefixes indicates whether or not to prefix every node .
37,154
def get_xpath ( stmt , qualified = False , prefix_to_module = False ) : return mk_path_str ( stmt , with_prefixes = qualified , prefix_onchange = True , prefix_to_module = prefix_to_module )
Gets the XPath of the statement . Unless qualified = True does not include prefixes unless the prefix changes mid - XPath .
37,155
def get_qualified_type ( stmt ) : type_obj = stmt . search_one ( 'type' ) fq_type_name = None if type_obj : if getattr ( type_obj , 'i_typedef' , None ) : type_obj = type_obj . i_typedef type_name = type_obj . arg if check_primitive_type ( type_obj ) : fq_type_name = type_name else : type_module = type_obj . i_orig_module . arg fq_type_name = '%s:%s' % ( type_module , type_name ) return fq_type_name
Gets the qualified top - level type of the node . This enters the typedef if defined instead of using the prefix to ensure absolute distinction .
37,156
def get_primitive_type ( stmt ) : type_obj = stmt . search_one ( 'type' ) type_name = getattr ( type_obj , 'arg' , None ) typedef_obj = getattr ( type_obj , 'i_typedef' , None ) if typedef_obj : type_name = get_primitive_type ( typedef_obj ) elif type_obj and not check_primitive_type ( type_obj ) : raise Exception ( '%s is not a primitive! Incomplete parse tree?' % type_name ) return type_name
Recurses through the typedefs and returns the most primitive YANG type defined .
37,157
def search ( self , keyword , children = None , arg = None ) : if children is None : children = self . substmts return [ ch for ch in children if ( ch . keyword == keyword and ( arg is None or ch . arg == arg ) ) ]
Return list of receiver s substmts with keyword .
37,158
def search_one ( self , keyword , arg = None , children = None ) : if children is None : children = self . substmts for ch in children : if ch . keyword == keyword and ( arg is None or ch . arg == arg ) : return ch return None
Return receiver s substmt with keyword and optionally arg .
37,159
def main_module ( self ) : if self . i_module . keyword == "submodule" : return self . i_module . i_ctx . get_module ( self . i_module . i_including_modulename ) return self . i_module
Return the main module to which the receiver belongs .
37,160
def add_prefix ( prefix , s ) : "Add `prefix` to all unprefixed names in `s`" toks = xpath_lexer . scan ( s ) toks2 = [ _add_prefix ( prefix , tok ) for tok in toks ] ls = [ x . value for x in toks2 ] return '' . join ( ls )
Add prefix to all unprefixed names in s
37,161
def chk_date_arg ( s ) : if re_date . search ( s ) is None : return False comp = s . split ( '-' ) try : dt = datetime . date ( int ( comp [ 0 ] ) , int ( comp [ 1 ] ) , int ( comp [ 2 ] ) ) return True except Exception as e : return False
Checks if the string s is a valid date string .
37,162
def chk_enum_arg ( s ) : if len ( s ) == 0 or s [ 0 ] . isspace ( ) or s [ - 1 ] . isspace ( ) : return False else : return True
Checks if the string s is a valid enum string .
37,163
def chk_fraction_digits_arg ( s ) : try : v = int ( s ) if v >= 1 and v <= 18 : return True else : return False except ValueError : return False
Checks if the string s is a valid fraction - digits argument .
37,164
def combine ( self , patch ) : exclusive = set ( [ "config" , "default" , "mandatory" , "presence" , "min-elements" , "max-elements" ] ) kws = set ( [ s . keyword for s in self . plist ] ) & exclusive add = [ n for n in patch . plist if n . keyword not in kws ] self . plist . extend ( add )
Add patch . plist to self . plist .
37,165
def serialize ( self ) : res = '<?xml version="1.0" encoding="UTF-8"?>' for ns in self . namespaces : self . top_grammar . attr [ "xmlns:" + self . namespaces [ ns ] ] = ns res += self . top_grammar . start_tag ( ) for ch in self . top_grammar . children : res += ch . serialize ( ) res += self . tree . serialize ( ) for d in self . global_defs : res += self . global_defs [ d ] . serialize ( ) for i in self . identities : res += self . identities [ i ] . serialize ( ) return res + self . top_grammar . end_tag ( )
Return the string representation of the receiver .
37,166
def setup_top ( self ) : self . top_grammar = SchemaNode ( "grammar" ) self . top_grammar . attr = { "xmlns" : "http://relaxng.org/ns/structure/1.0" , "datatypeLibrary" : "http://www.w3.org/2001/XMLSchema-datatypes" } self . tree = SchemaNode ( "start" )
Create top - level elements of the hybrid schema .
37,167
def create_roots ( self , yam ) : self . local_grammar = SchemaNode ( "grammar" ) self . local_grammar . attr = { "ns" : yam . search_one ( "namespace" ) . arg , "nma:module" : self . module . arg } src_text = "YANG module '%s'" % yam . arg revs = yam . search ( "revision" ) if len ( revs ) > 0 : src_text += " revision %s" % self . current_revision ( revs ) self . dc_element ( self . local_grammar , "source" , src_text ) start = SchemaNode ( "start" , self . local_grammar ) self . data = SchemaNode ( "nma:data" , start , interleave = True ) self . data . occur = 2 self . rpcs = SchemaNode ( "nma:rpcs" , start , interleave = False ) self . notifications = SchemaNode ( "nma:notifications" , start , interleave = False )
Create the top - level structure for module yam .
37,168
def yang_to_xpath ( self , xpe ) : if self . gg_level : pref = "$pref:" else : pref = self . prefix_stack [ - 1 ] + ":" toks = xpath_lexer . scan ( xpe ) prev = None res = "" for tok in toks : if ( tok . type == "SLASH" and prev not in ( "DOT" , "DOTDOT" , "RPAREN" , "RBRACKET" , "name" , "wildcard" , "prefix_test" ) ) : res += "$root" elif tok . type == "name" and ":" not in tok . value : res += pref res += tok . value if tok . type != "_whitespace" : prev = tok . type return res
Transform YANG s xpath to a form suitable for Schematron .
37,169
def register_identity ( self , id_stmt ) : bst = id_stmt . search_one ( "base" ) if bst : bder = self . identity_deps . setdefault ( bst . i_identity , [ ] ) bder . append ( id_stmt )
Register id_stmt with its base identity if any .
37,170
def add_derived_identity ( self , id_stmt ) : p = self . add_namespace ( id_stmt . main_module ( ) ) if id_stmt not in self . identities : self . identities [ id_stmt ] = SchemaNode . define ( "__%s_%s" % ( p , id_stmt . arg ) ) parent = self . identities [ id_stmt ] if id_stmt in self . identity_deps : parent = SchemaNode . choice ( parent , occur = 2 ) for i in self . identity_deps [ id_stmt ] : parent . subnode ( self . add_derived_identity ( i ) ) idval = SchemaNode ( "value" , parent , p + ":" + id_stmt . arg ) idval . attr [ "type" ] = "QName" res = SchemaNode ( "ref" ) res . attr [ "name" ] = self . identities [ id_stmt ] . attr [ "name" ] return res
Add pattern def for id_stmt and all derived identities .
37,171
def preload_defs ( self ) : for d in ( self . module . search ( "grouping" ) + self . module . search ( "typedef" ) ) : uname , dic = self . unique_def_name ( d ) self . install_def ( uname , d , dic )
Preload all top - level definitions .
37,172
def add_prefix ( self , name , stmt ) : if self . gg_level : return name pref , colon , local = name . partition ( ":" ) if colon : return ( self . module_prefixes [ stmt . i_module . i_prefixes [ pref ] [ 0 ] ] + ":" + local ) else : return self . prefix_stack [ - 1 ] + ":" + pref
Return name prepended with correct prefix .
37,173
def dc_element ( self , parent , name , text ) : if self . dc_uri in self . namespaces : dcel = SchemaNode ( self . namespaces [ self . dc_uri ] + ":" + name , text = text ) parent . children . insert ( 0 , dcel )
Add DC element name containing text to parent .
37,174
def get_default ( self , stmt , refd ) : if refd [ "default" ] : return refd [ "default" ] defst = stmt . search_one ( "default" ) if defst : return defst . arg return None
Return default value for stmt node .
37,175
def add_patch ( self , pset , augref ) : try : path = [ self . add_prefix ( c , augref ) for c in augref . arg . split ( "/" ) if c ] except KeyError : return car = path [ 0 ] patch = Patch ( path [ 1 : ] , augref ) if car in pset : sel = [ x for x in pset [ car ] if patch . path == x . path ] if sel : sel [ 0 ] . combine ( patch ) else : pset [ car ] . append ( patch ) else : pset [ car ] = [ patch ]
Add patch corresponding to augref to pset .
37,176
def apply_augments ( self , auglist , p_elem , pset ) : for a in auglist : par = a . parent if a . search_one ( "when" ) is None : wel = p_elem else : if p_elem . interleave : kw = "interleave" else : kw = "group" wel = SchemaNode ( kw , p_elem , interleave = p_elem . interleave ) wel . occur = p_elem . occur if par . keyword == "uses" : self . handle_substmts ( a , wel , pset ) continue if par . keyword == "submodule" : mnam = par . i_including_modulename else : mnam = par . arg if self . prefix_stack [ - 1 ] == self . module_prefixes [ mnam ] : self . handle_substmts ( a , wel , pset ) else : self . prefix_stack . append ( self . module_prefixes [ mnam ] ) self . handle_substmts ( a , wel , pset ) self . prefix_stack . pop ( )
Handle substatements of augments from auglist .
37,177
def current_revision ( self , r_stmts ) : cur = max ( [ [ int ( p ) for p in r . arg . split ( "-" ) ] for r in r_stmts ] ) return "%4d-%02d-%02d" % tuple ( cur )
Pick the most recent revision date .
37,178
def install_def ( self , name , dstmt , def_map , interleave = False ) : delem = SchemaNode . define ( name , interleave = interleave ) delem . attr [ "name" ] = name def_map [ name ] = delem if def_map is self . global_defs : self . gg_level += 1 self . handle_substmts ( dstmt , delem ) if def_map is self . global_defs : self . gg_level -= 1
Install definition name into the appropriate dictionary .
37,179
def rng_annotation ( self , stmt , p_elem ) : ext = stmt . i_extension prf , extkw = stmt . raw_keyword ( modname , rev ) = stmt . i_module . i_prefixes [ prf ] prefix = self . add_namespace ( statements . modulename_to_module ( self . module , modname , rev ) ) eel = SchemaNode ( prefix + ":" + extkw , p_elem ) argst = ext . search_one ( "argument" ) if argst : if argst . search_one ( "yin-element" , "true" ) : SchemaNode ( prefix + ":" + argst . arg , eel , stmt . arg ) else : eel . attr [ argst . arg ] = stmt . arg self . handle_substmts ( stmt , eel )
Append YIN representation of extension statement stmt .
37,180
def propagate_occur ( self , node , value ) : while node . occur < value : node . occur = value if node . name == "define" : break node = node . parent
Propagate occurence value to node and its ancestors .
37,181
def process_patches ( self , pset , stmt , elem , altname = None ) : if altname : name = altname else : name = stmt . arg new_pset = { } augments = [ ] refine_dict = dict . fromkeys ( ( "presence" , "default" , "mandatory" , "min-elements" , "max-elements" ) ) for p in pset . pop ( self . add_prefix ( name , stmt ) , [ ] ) : if p . path : head = p . pop ( ) if head in new_pset : new_pset [ head ] . append ( p ) else : new_pset [ head ] = [ p ] else : for refaug in p . plist : if refaug . keyword == "augment" : augments . append ( refaug ) else : for s in refaug . substmts : if s . keyword == "description" : self . description_stmt ( s , elem , None ) elif s . keyword == "reference" : self . reference_stmt ( s , elem , None ) elif s . keyword == "must" : self . must_stmt ( s , elem , None ) elif s . keyword == "config" : self . nma_attribute ( s , elem ) elif refine_dict . get ( s . keyword , False ) is None : refine_dict [ s . keyword ] = s . arg return ( refine_dict , augments , new_pset )
Process patches for data node name from pset .
37,182
def lookup_expand ( self , stmt , names ) : if not names : return [ ] todo = [ stmt ] while todo : pst = todo . pop ( ) for sub in pst . substmts : if sub . keyword in self . schema_nodes : qname = self . qname ( sub ) if qname in names : names . remove ( qname ) par = sub . parent while hasattr ( par , "d_ref" ) : par . d_ref . d_expand = True par = par . d_ref . parent if not names : return [ ] elif sub . keyword == "uses" : g = sub . i_grouping g . d_ref = sub todo . append ( g ) return names
Find schema nodes under stmt also in used groupings .
37,183
def type_with_ranges ( self , tchain , p_elem , rangekw , gen_data ) : ranges = self . get_ranges ( tchain , rangekw ) if not ranges : return p_elem . subnode ( gen_data ( ) ) if len ( ranges ) > 1 : p_elem = SchemaNode . choice ( p_elem ) p_elem . occur = 2 for r in ranges : d_elem = gen_data ( ) for p in self . range_params ( r , rangekw ) : d_elem . subnode ( p ) p_elem . subnode ( d_elem )
Handle types with range or length restrictions .
37,184
def get_ranges ( self , tchain , kw ) : ( lo , hi ) = ( "min" , "max" ) ran = None for t in tchain : rstmt = t . search_one ( kw ) if rstmt is None : continue parts = [ p . strip ( ) for p in rstmt . arg . split ( "|" ) ] ran = [ [ i . strip ( ) for i in p . split ( ".." ) ] for p in parts ] if ran [ 0 ] [ 0 ] != 'min' : lo = ran [ 0 ] [ 0 ] if ran [ - 1 ] [ - 1 ] != 'max' : hi = ran [ - 1 ] [ - 1 ] if ran is None : return None if len ( ran ) == 1 : return [ ( lo , hi ) ] else : return [ ( lo , ran [ 0 ] [ - 1 ] ) ] + ran [ 1 : - 1 ] + [ ( ran [ - 1 ] [ 0 ] , hi ) ]
Return list of ranges defined in tchain .
37,185
def handle_stmt ( self , stmt , p_elem , pset = { } ) : if self . debug > 0 : sys . stderr . write ( "Handling '%s %s'\n" % ( util . keyword_to_str ( stmt . raw_keyword ) , stmt . arg ) ) try : method = self . stmt_handler [ stmt . keyword ] except KeyError : if isinstance ( stmt . keyword , tuple ) : try : method = self . ext_handler [ stmt . keyword [ 0 ] ] [ stmt . keyword [ 1 ] ] except KeyError : method = self . rng_annotation method ( stmt , p_elem ) return else : raise error . EmitError ( "Unknown keyword %s - this should not happen.\n" % stmt . keyword ) method ( stmt , p_elem , pset )
Run handler method for statement stmt .
37,186
def handle_substmts ( self , stmt , p_elem , pset = { } ) : for sub in stmt . substmts : self . handle_stmt ( sub , p_elem , pset )
Handle all substatements of stmt .
37,187
def nma_attribute ( self , stmt , p_elem , pset = None ) : att = "nma:" + stmt . keyword if att not in p_elem . attr : p_elem . attr [ att ] = stmt . arg
Map stmt to a NETMOD - specific attribute .
37,188
def type_stmt ( self , stmt , p_elem , pset ) : typedef = stmt . i_typedef if typedef and not stmt . i_is_derived : uname , dic = self . unique_def_name ( typedef ) if uname not in dic : self . install_def ( uname , typedef , dic ) SchemaNode ( "ref" , p_elem ) . set_attr ( "name" , uname ) defst = typedef . search_one ( "default" ) if defst : dic [ uname ] . default = defst . arg occur = 1 else : occur = dic [ uname ] . occur if occur > 0 : self . propagate_occur ( p_elem , occur ) return chain = [ stmt ] tdefault = None while typedef : type_ = typedef . search_one ( "type" ) chain . insert ( 0 , type_ ) if tdefault is None : tdef = typedef . search_one ( "default" ) if tdef : tdefault = tdef . arg typedef = type_ . i_typedef if tdefault and p_elem . occur == 0 : p_elem . default = tdefault self . propagate_occur ( p_elem , 1 ) self . type_handler [ chain [ 0 ] . arg ] ( chain , p_elem )
Handle type statement .
37,189
def choice_type ( self , tchain , p_elem ) : elem = SchemaNode . choice ( p_elem , occur = 2 ) self . handle_substmts ( tchain [ 0 ] , elem )
Handle enumeration and union types .
37,190
def mapped_type ( self , tchain , p_elem ) : SchemaNode ( "data" , p_elem ) . set_attr ( "type" , self . datatype_map [ tchain [ 0 ] . arg ] )
Handle types that are simply mapped to RELAX NG .
37,191
def numeric_type ( self , tchain , p_elem ) : typ = tchain [ 0 ] . arg def gen_data ( ) : elem = SchemaNode ( "data" ) . set_attr ( "type" , self . datatype_map [ typ ] ) if typ == "decimal64" : fd = tchain [ 0 ] . search_one ( "fraction-digits" ) . arg SchemaNode ( "param" , elem , "19" ) . set_attr ( "name" , "totalDigits" ) SchemaNode ( "param" , elem , fd ) . set_attr ( "name" , "fractionDigits" ) return elem self . type_with_ranges ( tchain , p_elem , "range" , gen_data )
Handle numeric types .
37,192
def add_stmt ( stmt , arg_rules ) : ( arg , rules ) = arg_rules stmt_map [ stmt ] = ( arg , rules )
Use by plugins to add grammar for an extension statement .
37,193
def add_to_stmts_rules ( stmts , rules ) : def is_rule_less_than ( ra , rb ) : rka = ra [ 0 ] rkb = rb [ 0 ] if not util . is_prefixed ( rkb ) : return False if not util . is_prefixed ( rka ) : return True return rka [ 0 ] < rkb [ 0 ] for s in stmts : ( arg , rules0 ) = stmt_map [ s ] for r in rules : i = 0 while i < len ( rules0 ) : if is_rule_less_than ( r , rules0 [ i ] ) : rules0 . insert ( i , r ) break i += 1 if i == len ( rules0 ) : rules0 . insert ( i , r )
Use by plugins to add extra rules to the existing rules for a statement .
37,194
def chk_module_statements ( ctx , module_stmt , canonical = False ) : return chk_statement ( ctx , module_stmt , top_stmts , canonical )
Validate the statement hierarchy according to the grammar .
37,195
def chk_statement ( ctx , stmt , grammar , canonical = False ) : n = len ( ctx . errors ) if canonical == True : canspec = grammar else : canspec = [ ] _chk_stmts ( ctx , stmt . pos , [ stmt ] , None , ( grammar , canspec ) , canonical ) return n == len ( ctx . errors )
Validate stmt according to grammar .
37,196
def sort_canonical ( keyword , stmts ) : try : ( _arg_type , subspec ) = stmt_map [ keyword ] except KeyError : return stmts res = [ ] keep = [ s [ 0 ] for s in data_def_stmts ] + [ 'case' ] for ( kw , _spec ) in flatten_spec ( subspec ) : comments = [ ] for s in stmts : if s . keyword == '_comment' : comments . append ( s ) elif s . keyword == kw and kw not in keep : res . extend ( comments ) comments = [ ] res . append ( s ) else : comments = [ ] res . extend ( [ stmt for stmt in stmts if stmt not in res ] ) return res
Sort all stmts in the canonical order defined by keyword . Return the sorted list . The stmt list is not modified . If keyword does not have a canonical order the list is returned as is .
37,197
def scan ( s ) : line = 1 linepos = 1 pos = 0 toks = [ ] while pos < len ( s ) : matched = False for ( tokname , r ) in patterns : m = r . match ( s , pos ) if m is not None : v = m . group ( 0 ) prec = _preceding_token ( toks ) if tokname == 'STAR' and prec is not None and _is_special ( prec ) : tok = XPathTok ( 'wildcard' , v , line , linepos ) elif ( tokname == 'name' and prec is not None and not _is_special ( prec ) and v in operators ) : tok = XPathTok ( operators [ v ] , v , line , linepos ) elif tokname == 'name' : if re_open_para . match ( s , pos + len ( v ) ) : if v in node_types : tok = XPathTok ( 'node_type' , v , line , linepos ) else : tok = XPathTok ( 'function_name' , v , line , linepos ) elif re_axis . match ( s , pos + len ( v ) ) : if v in axes : tok = XPathTok ( 'axis' , v , line , linepos ) else : e = "unknown axis %s" % v raise XPathError ( e , line , linepos ) else : tok = XPathTok ( 'name' , v , line , linepos ) else : tok = XPathTok ( tokname , v , line , linepos ) if tokname == '_whitespace' : n = v . count ( '\n' ) if n > 0 : line = line + n linepos = len ( v ) - v . rfind ( '\n' ) else : linepos += len ( v ) else : linepos += len ( v ) pos += len ( v ) toks . append ( tok ) matched = True break if matched == False : raise XPathError ( 'syntax error' , line , linepos ) return toks
Return a list of tokens or throw SyntaxError on failure .
37,198
def yang_modules ( self ) : res = { } for c in self . capabilities : m = c . parameters . get ( "module" ) if m is None or m in res : continue res [ m ] = c . parameters . get ( "revision" ) return res . items ( )
Return a list of advertised YANG module names with revisions .
37,199
def get_features ( self , yam ) : mcap = [ c for c in self . capabilities if c . parameters . get ( "module" , None ) == yam ] [ 0 ] if not mcap . parameters . get ( "features" ) : return [ ] return mcap . parameters [ "features" ] . split ( "," )
Return list of features declared for module yam .