idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
225,000
def _generate_identifier_name ( self , columns , prefix = "" , max_size = 30 ) : hash = "" for column in columns : hash += "%x" % binascii . crc32 ( encode ( str ( column ) ) ) return ( prefix + "_" + hash ) [ : max_size ]
Generates an identifier from a list of column names obeying a certain string length .
71
17
225,001
def only_trashed ( cls ) : instance = cls ( ) column = instance . get_qualified_deleted_at_column ( ) return instance . new_query_without_scope ( SoftDeletingScope ( ) ) . where_not_null ( column )
Get a new query builder that only includes soft deletes
60
11
225,002
def connection ( self , name = None ) : name , type = self . _parse_connection_name ( name ) if name not in self . _connections : logger . debug ( "Initiating connection %s" % name ) connection = self . _make_connection ( name ) self . _set_connection_for_type ( connection , type ) self . _connections [ name ] = self . _prepare ( connection ) return self . _connections [ name ]
Get a database connection instance
102
5
225,003
def apply ( self , builder , model ) : builder . where_null ( model . get_qualified_deleted_at_column ( ) ) self . extend ( builder )
Apply the scope to a given query builder .
37
9
225,004
def _on_delete ( self , builder ) : column = self . _get_deleted_at_column ( builder ) return builder . update ( { column : builder . get_model ( ) . fresh_timestamp ( ) } )
The delete replacement function .
51
5
225,005
def _get_deleted_at_column ( self , builder ) : if len ( builder . get_query ( ) . joins ) > 0 : return builder . get_model ( ) . get_qualified_deleted_at_column ( ) else : return builder . get_model ( ) . get_deleted_at_column ( )
Get the deleted at column for the builder .
74
9
225,006
def _restore ( self , builder ) : builder . with_trashed ( ) return builder . update ( { builder . get_model ( ) . get_deleted_at_column ( ) : None } )
The restore extension .
46
4
225,007
def has_table ( self , table ) : sql = self . _grammar . compile_table_exists ( ) table = self . _connection . get_table_prefix ( ) + table return len ( self . _connection . select ( sql , [ table ] ) ) > 0
Determine if the given table exists .
61
9
225,008
def has_column ( self , table , column ) : column = column . lower ( ) return column in list ( map ( lambda x : x . lower ( ) , self . get_column_listing ( table ) ) )
Determine if the given table has a given column .
48
12
225,009
def table ( self , table ) : try : blueprint = self . _create_blueprint ( table ) yield blueprint except Exception as e : raise try : self . _build ( blueprint ) except Exception : raise
Modify a table on the schema .
43
8
225,010
def rename ( self , from_ , to ) : blueprint = self . _create_blueprint ( from_ ) blueprint . rename ( to ) self . _build ( blueprint )
Rename a table on the schema .
37
8
225,011
def _write_migration ( self , creator , name , table , create , path ) : file_ = os . path . basename ( creator . create ( name , path , table , create ) ) return file_
Write the migration file to disk .
46
7
225,012
def compile_delete ( self , query ) : table = self . wrap_table ( query . from__ ) if isinstance ( query . wheres , list ) : wheres = self . _compile_wheres ( query ) else : wheres = "" if query . joins : joins = " %s" % self . _compile_joins ( query , query . joins ) sql = "DELETE %s FROM %s%s %s" % ( table , table , joins , wheres ) else : sql = "DELETE FROM %s %s" % ( table , wheres ) sql = sql . strip ( ) if query . orders : sql += " %s" % self . _compile_orders ( query , query . orders ) if query . limit_ : sql += " %s" % self . _compile_limit ( query , query . limit_ ) return sql
Compile a delete statement into SQL
193
7
225,013
def _check_config ( self ) : current_path = os . path . relpath ( os . getcwd ( ) ) accepted_files = [ "orator.yml" , "orator.py" ] for accepted_file in accepted_files : config_file = os . path . join ( current_path , accepted_file ) if os . path . exists ( config_file ) : if self . _handle_config ( config_file ) : return True return False
Check presence of default config files .
104
7
225,014
def _handle_config ( self , config_file ) : config = self . _get_config ( config_file ) self . resolver = DatabaseManager ( config . get ( "databases" , config . get ( "DATABASES" , { } ) ) ) return True
Check and handle a config file .
62
7
225,015
def log ( self , file , batch ) : record = { "migration" : file , "batch" : batch } self . table ( ) . insert ( * * record )
Log that a migration was run .
38
7
225,016
def create_repository ( self ) : schema = self . get_connection ( ) . get_schema_builder ( ) with schema . create ( self . _table ) as table : # The migrations table is responsible for keeping track of which of the # migrations have actually run for the application. We'll create the # table to hold the migration file's path as well as the batch ID. table . string ( "migration" ) table . integer ( "batch" )
Create the migration repository data store .
102
7
225,017
def repository_exists ( self ) : schema = self . get_connection ( ) . get_schema_builder ( ) return schema . has_table ( self . _table )
Determine if the repository exists .
39
8
225,018
def create ( self , name , path , table = None , create = False ) : path = self . _get_path ( name , path ) if not os . path . exists ( os . path . dirname ( path ) ) : mkdir_p ( os . path . dirname ( path ) ) parent = os . path . join ( os . path . dirname ( path ) , "__init__.py" ) if not os . path . exists ( parent ) : with open ( parent , "w" ) : pass stub = self . _get_stub ( table , create ) with open ( path , "w" ) as fh : fh . write ( self . _populate_stub ( name , stub , table ) ) return path
Create a new migration at the given path .
163
9
225,019
def _get_stub ( self , table , create ) : if table is None : return BLANK_STUB else : if create : stub = CREATE_STUB else : stub = UPDATE_STUB return stub
Get the migration stub template
47
5
225,020
def get_quoted_local_columns ( self , platform ) : columns = [ ] for column in self . _local_column_names . values ( ) : columns . append ( column . get_quoted_name ( platform ) ) return columns
Returns the quoted representation of the referencing table column names the foreign key constraint is associated with .
54
18
225,021
def get_quoted_foreign_columns ( self , platform ) : columns = [ ] for column in self . _foreign_column_names . values ( ) : columns . append ( column . get_quoted_name ( platform ) ) return columns
Returns the quoted representation of the referenced table column names the foreign key constraint is associated with .
54
18
225,022
def _on_event ( self , event ) : if self . has_option ( event ) : on_event = self . get_option ( event ) . upper ( ) if on_event not in [ "NO ACTION" , "RESTRICT" ] : return on_event return False
Returns the referential action for a given database operation on the referenced table the foreign key constraint is associated with .
62
23
225,023
def run ( self , path , pretend = False ) : self . _notes = [ ] files = self . _get_migration_files ( path ) ran = self . _repository . get_ran ( ) migrations = [ f for f in files if f not in ran ] self . run_migration_list ( path , migrations , pretend )
Run the outstanding migrations for a given path .
78
10
225,024
def run_migration_list ( self , path , migrations , pretend = False ) : if not migrations : self . _note ( "<info>Nothing to migrate</info>" ) return batch = self . _repository . get_next_batch_number ( ) for f in migrations : self . _run_up ( path , f , batch , pretend )
Run a list of migrations .
80
7
225,025
def reset ( self , path , pretend = False ) : self . _notes = [ ] migrations = sorted ( self . _repository . get_ran ( ) , reverse = True ) count = len ( migrations ) if count == 0 : self . _note ( "<info>Nothing to rollback.</info>" ) else : for migration in migrations : self . _run_down ( path , { "migration" : migration } , pretend ) return count
Rolls all of the currently applied migrations back .
99
11
225,026
def _get_migration_files ( self , path ) : files = glob . glob ( os . path . join ( path , "[0-9]*_*.py" ) ) if not files : return [ ] files = list ( map ( lambda f : os . path . basename ( f ) . replace ( ".py" , "" ) , files ) ) files = sorted ( files ) return files
Get all of the migration files in a given path .
86
11
225,027
def _compile_update_columns ( self , values ) : columns = [ ] for key , value in values . items ( ) : columns . append ( "%s = %s" % ( self . wrap ( key ) , self . parameter ( value ) ) ) return ", " . join ( columns )
Compile the columns for the update statement
65
8
225,028
def _compile_update_from ( self , query ) : if not query . joins : return "" froms = [ ] for join in query . joins : froms . append ( self . wrap_table ( join . table ) ) if len ( froms ) : return " FROM %s" % ", " . join ( froms ) return ""
Compile the from clause for an update with a join .
74
12
225,029
def _compile_update_wheres ( self , query ) : base_where = self . _compile_wheres ( query ) if not query . joins : return base_where join_where = self . _compile_update_join_wheres ( query ) if not base_where . strip ( ) : return "WHERE %s" % self . _remove_leading_boolean ( join_where ) return "%s %s" % ( base_where , join_where )
Compile the additional where clauses for updates with joins .
106
11
225,030
def _compile_update_join_wheres ( self , query ) : join_wheres = [ ] for join in query . joins : for clause in join . clauses : join_wheres . append ( self . _compile_join_constraints ( clause ) ) return " " . join ( join_wheres )
Compile the join clauses for an update .
71
9
225,031
def compile_insert_get_id ( self , query , values , sequence = None ) : if sequence is None : sequence = "id" return "%s RETURNING %s" % ( self . compile_insert ( query , values ) , self . wrap ( sequence ) , )
Compile an insert and get ID statement into SQL .
60
11
225,032
def qmark ( cls , query ) : def sub_sequence ( m ) : s = m . group ( 0 ) if s == "??" : return "?" if s == "%" : return "%%" else : return "%s" return cls . RE_QMARK . sub ( sub_sequence , query )
Convert a qmark query into format style .
70
10
225,033
def touch ( self ) : column = self . get_related ( ) . get_updated_at_column ( ) self . raw_update ( { column : self . get_related ( ) . fresh_timestamp ( ) } )
Touch all of the related models for the relationship .
50
10
225,034
def raw_update ( self , attributes = None ) : if attributes is None : attributes = { } if self . _query is not None : return self . _query . update ( attributes )
Run a raw update against the base query .
40
9
225,035
def wrap ( self , value ) : return self . _parent . new_query ( ) . get_query ( ) . get_grammar ( ) . wrap ( value )
Wrap the given value with the parent s query grammar .
37
12
225,036
def load ( template ) : try : data = load_json ( template ) return data , "json" except ValueError as e : try : data = load_yaml ( template ) return data , "yaml" except Exception : raise e
Try to guess the input format
51
6
225,037
def dump_yaml ( data , clean_up = False , long_form = False ) : return yaml . dump ( data , Dumper = get_dumper ( clean_up , long_form ) , default_flow_style = False , allow_unicode = True )
Output some YAML
61
5
225,038
def to_json ( template , clean_up = False ) : data = load_yaml ( template ) if clean_up : data = clean ( data ) return dump_json ( data )
Assume the input is YAML and convert to JSON
41
12
225,039
def to_yaml ( template , clean_up = False , long_form = False ) : data = load_json ( template ) if clean_up : data = clean ( data ) return dump_yaml ( data , clean_up , long_form )
Assume the input is JSON and convert to YAML
56
12
225,040
def flip ( template , in_format = None , out_format = None , clean_up = False , no_flip = False , long_form = False ) : # Do we need to figure out the input format? if not in_format : # Load the template as JSON? if ( out_format == "json" and no_flip ) or ( out_format == "yaml" and not no_flip ) : in_format = "json" elif ( out_format == "yaml" and no_flip ) or ( out_format == "json" and not no_flip ) : in_format = "yaml" # Load the data if in_format == "json" : data = load_json ( template ) elif in_format == "yaml" : data = load_yaml ( template ) else : data , in_format = load ( template ) # Clean up? if clean_up : data = clean ( data ) # Figure out the output format if not out_format : if ( in_format == "json" and no_flip ) or ( in_format == "yaml" and not no_flip ) : out_format = "json" else : out_format = "yaml" # Finished! if out_format == "json" : if sys . version [ 0 ] == "3" : return dump_json ( data ) else : return dump_json ( data ) . encode ( 'utf-8' ) return dump_yaml ( data , clean_up , long_form )
Figure out the input format and convert the data to the opposing output format
338
14
225,041
def convert_join ( value ) : if not isinstance ( value , list ) or len ( value ) != 2 : # Cowardly refuse return value sep , parts = value [ 0 ] , value [ 1 ] if isinstance ( parts , six . string_types ) : return parts if not isinstance ( parts , list ) : # This looks tricky, just return the join as it was return { "Fn::Join" : value , } plain_string = True args = ODict ( ) new_parts = [ ] for part in parts : part = clean ( part ) if isinstance ( part , dict ) : plain_string = False if "Ref" in part : new_parts . append ( "${{{}}}" . format ( part [ "Ref" ] ) ) elif "Fn::GetAtt" in part : params = part [ "Fn::GetAtt" ] new_parts . append ( "${{{}}}" . format ( "." . join ( params ) ) ) else : for key , val in args . items ( ) : # we want to bail if a conditional can evaluate to AWS::NoValue if isinstance ( val , dict ) : if "Fn::If" in val and "AWS::NoValue" in str ( val [ "Fn::If" ] ) : return { "Fn::Join" : value , } if val == part : param_name = key break else : param_name = "Param{}" . format ( len ( args ) + 1 ) args [ param_name ] = part new_parts . append ( "${{{}}}" . format ( param_name ) ) elif isinstance ( part , six . string_types ) : new_parts . append ( part . replace ( "${" , "${!" ) ) else : # Doing something weird; refuse return { "Fn::Join" : value } source = sep . join ( new_parts ) if plain_string : return source if args : return ODict ( ( ( "Fn::Sub" , [ source , args ] ) , ) ) return ODict ( ( ( "Fn::Sub" , source ) , ) )
Fix a Join ; )
461
5
225,042
def map_representer ( dumper , value ) : value = ODict ( value . items ( ) ) if len ( value . keys ( ) ) == 1 : key = list ( value . keys ( ) ) [ 0 ] if key in CONVERTED_SUFFIXES : return fn_representer ( dumper , key , value [ key ] ) if key . startswith ( FN_PREFIX ) : return fn_representer ( dumper , key [ 4 : ] , value [ key ] ) return dumper . represent_mapping ( TAG_MAP , value , flow_style = False )
Deal with !Ref style function format and OrderedDict
131
12
225,043
def multi_constructor ( loader , tag_suffix , node ) : if tag_suffix not in UNCONVERTED_SUFFIXES : tag_suffix = "{}{}" . format ( FN_PREFIX , tag_suffix ) constructor = None if tag_suffix == "Fn::GetAtt" : constructor = construct_getatt elif isinstance ( node , yaml . ScalarNode ) : constructor = loader . construct_scalar elif isinstance ( node , yaml . SequenceNode ) : constructor = loader . construct_sequence elif isinstance ( node , yaml . MappingNode ) : constructor = loader . construct_mapping else : raise Exception ( "Bad tag: !{}" . format ( tag_suffix ) ) return ODict ( ( ( tag_suffix , constructor ( node ) ) , ) )
Deal with !Ref style function format
186
7
225,044
def construct_getatt ( node ) : if isinstance ( node . value , six . text_type ) : return node . value . split ( "." , 1 ) elif isinstance ( node . value , list ) : return [ s . value for s in node . value ] else : raise ValueError ( "Unexpected node type: {}" . format ( type ( node . value ) ) )
Reconstruct !GetAtt into a list
85
9
225,045
def construct_mapping ( self , node , deep = False ) : mapping = ODict ( ) for key_node , value_node in node . value : key = self . construct_object ( key_node , deep = deep ) value = self . construct_object ( value_node , deep = deep ) mapping [ key ] = value return mapping
Use ODict for maps
74
5
225,046
def main ( ctx , * * kwargs ) : in_format = kwargs . pop ( 'in_format' ) out_format = kwargs . pop ( 'out_format' ) or kwargs . pop ( 'out_flag' ) no_flip = kwargs . pop ( 'no_flip' ) clean = kwargs . pop ( 'clean' ) long_form = kwargs . pop ( 'long' ) input_file = kwargs . pop ( 'input' ) output_file = kwargs . pop ( 'output' ) if not in_format : if input_file . name . endswith ( ".json" ) : in_format = "json" elif input_file . name . endswith ( ".yaml" ) or input_file . name . endswith ( ".yml" ) : in_format = "yaml" if input_file . name == "<stdin>" and sys . stdin . isatty ( ) : click . echo ( ctx . get_help ( ) ) ctx . exit ( ) try : output_file . write ( flip ( input_file . read ( ) , in_format = in_format , out_format = out_format , clean_up = clean , no_flip = no_flip , long_form = long_form ) ) except Exception as e : raise click . ClickException ( "{}" . format ( e ) )
AWS CloudFormation Template Flip is a tool that converts AWS CloudFormation templates between JSON and YAML formats making use of the YAML format s short function syntax where possible .
321
39
225,047
def updateVersions ( region = "us-east-1" , table = "credential-store" ) : dynamodb = boto3 . resource ( 'dynamodb' , region_name = region ) secrets = dynamodb . Table ( table ) response = secrets . scan ( ProjectionExpression = "#N, version, #K, contents, hmac" , ExpressionAttributeNames = { "#N" : "name" , "#K" : "key" } ) items = response [ "Items" ] for old_item in items : if isInt ( old_item [ 'version' ] ) : new_item = copy . copy ( old_item ) new_item [ 'version' ] = credstash . paddedInt ( new_item [ 'version' ] ) if new_item [ 'version' ] != old_item [ 'version' ] : secrets . put_item ( Item = new_item ) secrets . delete_item ( Key = { 'name' : old_item [ 'name' ] , 'version' : old_item [ 'version' ] } ) else : print "Skipping item: %s, %s" % ( old_item [ 'name' ] , old_item [ 'version' ] )
do a full - table scan of the credential - store and update the version format of every credential if it is an integer
273
24
225,048
def paddedInt ( i ) : i_str = str ( i ) pad = PAD_LEN - len ( i_str ) return ( pad * "0" ) + i_str
return a string that contains i left - padded with 0 s up to PAD_LEN digits
41
20
225,049
def getHighestVersion ( name , region = None , table = "credential-store" , * * kwargs ) : session = get_session ( * * kwargs ) dynamodb = session . resource ( 'dynamodb' , region_name = region ) secrets = dynamodb . Table ( table ) response = secrets . query ( Limit = 1 , ScanIndexForward = False , ConsistentRead = True , KeyConditionExpression = boto3 . dynamodb . conditions . Key ( "name" ) . eq ( name ) , ProjectionExpression = "version" ) if response [ "Count" ] == 0 : return 0 return response [ "Items" ] [ 0 ] [ "version" ]
Return the highest version of name in the table
157
9
225,050
def clean_fail ( func ) : def func_wrapper ( * args , * * kwargs ) : try : return func ( * args , * * kwargs ) except botocore . exceptions . ClientError as e : print ( str ( e ) , file = sys . stderr ) sys . exit ( 1 ) return func_wrapper
A decorator to cleanly exit on a failed call to AWS . catch a botocore . exceptions . ClientError raised from an action . This sort of error is raised if you are targeting a region that isn t set up ( see credstash setup .
74
53
225,051
def listSecrets ( region = None , table = "credential-store" , * * kwargs ) : session = get_session ( * * kwargs ) dynamodb = session . resource ( 'dynamodb' , region_name = region ) secrets = dynamodb . Table ( table ) last_evaluated_key = True items = [ ] while last_evaluated_key : params = dict ( ProjectionExpression = "#N, version, #C" , ExpressionAttributeNames = { "#N" : "name" , "#C" : "comment" } ) if last_evaluated_key is not True : params [ 'ExclusiveStartKey' ] = last_evaluated_key response = secrets . scan ( * * params ) last_evaluated_key = response . get ( 'LastEvaluatedKey' ) # will set last evaluated key to a number items . extend ( response [ 'Items' ] ) return items
do a full - table scan of the credential - store and return the names and versions of every credential
207
20
225,052
def putSecret ( name , secret , version = "" , kms_key = "alias/credstash" , region = None , table = "credential-store" , context = None , digest = DEFAULT_DIGEST , comment = "" , * * kwargs ) : if not context : context = { } session = get_session ( * * kwargs ) kms = session . client ( 'kms' , region_name = region ) key_service = KeyService ( kms , kms_key , context ) sealed = seal_aes_ctr_legacy ( key_service , secret , digest_method = digest , ) dynamodb = session . resource ( 'dynamodb' , region_name = region ) secrets = dynamodb . Table ( table ) data = { 'name' : name , 'version' : paddedInt ( version ) , } if comment : data [ 'comment' ] = comment data . update ( sealed ) return secrets . put_item ( Item = data , ConditionExpression = Attr ( 'name' ) . not_exists ( ) )
put a secret called name into the secret - store protected by the key kms_key
242
18
225,053
def getAllSecrets ( version = "" , region = None , table = "credential-store" , context = None , credential = None , session = None , * * kwargs ) : if session is None : session = get_session ( * * kwargs ) dynamodb = session . resource ( 'dynamodb' , region_name = region ) kms = session . client ( 'kms' , region_name = region ) secrets = listSecrets ( region , table , * * kwargs ) # Only return the secrets that match the pattern in `credential` # This already works out of the box with the CLI get action, # but that action doesn't support wildcards when using as library if credential and WILDCARD_CHAR in credential : names = set ( expand_wildcard ( credential , [ x [ "name" ] for x in secrets ] ) ) else : names = set ( x [ "name" ] for x in secrets ) pool = ThreadPool ( min ( len ( names ) , THREAD_POOL_MAX_SIZE ) ) results = pool . map ( lambda credential : getSecret ( credential , version , region , table , context , dynamodb , kms , * * kwargs ) , names ) pool . close ( ) pool . join ( ) return dict ( zip ( names , results ) )
fetch and decrypt all secrets
292
6
225,054
def getSecret ( name , version = "" , region = None , table = "credential-store" , context = None , dynamodb = None , kms = None , * * kwargs ) : if not context : context = { } # Can we cache if dynamodb is None or kms is None : session = get_session ( * * kwargs ) if dynamodb is None : dynamodb = session . resource ( 'dynamodb' , region_name = region ) if kms is None : kms = session . client ( 'kms' , region_name = region ) secrets = dynamodb . Table ( table ) if version == "" : # do a consistent fetch of the credential with the highest version response = secrets . query ( Limit = 1 , ScanIndexForward = False , ConsistentRead = True , KeyConditionExpression = boto3 . dynamodb . conditions . Key ( "name" ) . eq ( name ) ) if response [ "Count" ] == 0 : raise ItemNotFound ( "Item {'name': '%s'} couldn't be found." % name ) material = response [ "Items" ] [ 0 ] else : response = secrets . get_item ( Key = { "name" : name , "version" : version } ) if "Item" not in response : raise ItemNotFound ( "Item {'name': '%s', 'version': '%s'} couldn't be found." % ( name , version ) ) material = response [ "Item" ] key_service = KeyService ( kms , None , context ) return open_aes_ctr_legacy ( key_service , material )
fetch and decrypt the secret called name
363
8
225,055
def createDdbTable ( region = None , table = "credential-store" , * * kwargs ) : session = get_session ( * * kwargs ) dynamodb = session . resource ( "dynamodb" , region_name = region ) if table in ( t . name for t in dynamodb . tables . all ( ) ) : print ( "Credential Store table already exists" ) return print ( "Creating table..." ) dynamodb . create_table ( TableName = table , KeySchema = [ { "AttributeName" : "name" , "KeyType" : "HASH" , } , { "AttributeName" : "version" , "KeyType" : "RANGE" , } ] , AttributeDefinitions = [ { "AttributeName" : "name" , "AttributeType" : "S" , } , { "AttributeName" : "version" , "AttributeType" : "S" , } , ] , ProvisionedThroughput = { "ReadCapacityUnits" : 1 , "WriteCapacityUnits" : 1 , } ) print ( "Waiting for table to be created..." ) client = session . client ( "dynamodb" , region_name = region ) response = client . describe_table ( TableName = table ) client . get_waiter ( "table_exists" ) . wait ( TableName = table ) print ( "Adding tag..." ) client . tag_resource ( ResourceArn = response [ "Table" ] [ "TableArn" ] , Tags = [ { 'Key' : "Name" , 'Value' : "credstash" } , ] ) print ( "Table has been created. " "Go read the README about how to create your KMS key" )
create the secret store table in DDB in the specified region
390
12
225,056
def seal_aes_ctr_legacy ( key_service , secret , digest_method = DEFAULT_DIGEST ) : # generate a a 64 byte key. # Half will be for data encryption, the other half for HMAC key , encoded_key = key_service . generate_key_data ( 64 ) ciphertext , hmac = _seal_aes_ctr ( secret , key , LEGACY_NONCE , digest_method , ) return { 'key' : b64encode ( encoded_key ) . decode ( 'utf-8' ) , 'contents' : b64encode ( ciphertext ) . decode ( 'utf-8' ) , 'hmac' : codecs . encode ( hmac , "hex_codec" ) , 'digest' : digest_method , }
Encrypts secret using the key service . You can decrypt with the companion method open_aes_ctr_legacy .
179
26
225,057
def check_status ( self ) : logger . debug ( "Checking for a broker_url on django settings..." ) broker_url = getattr ( settings , "BROKER_URL" , None ) logger . debug ( "Got %s as the broker_url. Connecting to rabbit..." , broker_url ) logger . debug ( "Attempting to connect to rabbit..." ) try : # conn is used as a context to release opened resources later with Connection ( broker_url ) as conn : conn . connect ( ) # exceptions may be raised upon calling connect except ConnectionRefusedError as e : self . add_error ( ServiceUnavailable ( "Unable to connect to RabbitMQ: Connection was refused." ) , e ) except AccessRefused as e : self . add_error ( ServiceUnavailable ( "Unable to connect to RabbitMQ: Authentication error." ) , e ) except IOError as e : self . add_error ( ServiceUnavailable ( "IOError" ) , e ) except BaseException as e : self . add_error ( ServiceUnavailable ( "Unknown error" ) , e ) else : logger . debug ( "Connection estabilished. RabbitMQ is healthy." )
Check RabbitMQ service by opening and closing a broker channel .
253
12
225,058
def from_string ( cls , value ) : match = cls . pattern . search ( value ) if match is None : raise ValueError ( '"%s" is not a valid media type' % value ) try : return cls ( match . group ( 'mime_type' ) , float ( match . group ( 'weight' ) or 1 ) ) except ValueError : return cls ( value )
Return single instance parsed from given accept header string .
88
10
225,059
def parse_header ( cls , value = '*/*' ) : yield from sorted ( ( cls . from_string ( token . strip ( ) ) for token in value . split ( ',' ) if token . strip ( ) ) , reverse = True )
Parse HTTP accept header and return instances sorted by weight .
56
12
225,060
def convert_to_timezone_naive ( time_to_freeze ) : if time_to_freeze . tzinfo : time_to_freeze -= time_to_freeze . utcoffset ( ) time_to_freeze = time_to_freeze . replace ( tzinfo = None ) return time_to_freeze
Converts a potentially timezone - aware datetime to be a naive UTC datetime
80
17
225,061
def move_to ( self , target_datetime ) : target_datetime = _parse_time_to_freeze ( target_datetime ) delta = target_datetime - self . time_to_freeze self . tick ( delta = delta )
Moves frozen date to the given target_datetime
56
11
225,062
def process_module ( self , yam ) : for ann in yam . search ( ( "ietf-yang-metadata" , "annotation" ) ) : self . process_annotation ( ann ) for ch in yam . i_children [ : ] : if ch . keyword == "rpc" : self . process_rpc ( ch ) elif ch . keyword == "notification" : self . process_notification ( ch ) else : continue yam . i_children . remove ( ch ) self . process_children ( yam , "//nc:*" , 1 )
Process data nodes RPCs and notifications in a single module .
129
12
225,063
def process_annotation ( self , ann ) : tmpl = self . xsl_template ( "@" + self . qname ( ann ) ) ET . SubElement ( tmpl , "param" , name = "level" , select = "0" ) ct = self . xsl_calltemplate ( "leaf" , tmpl ) ET . SubElement ( ct , "with-param" , name = "level" , select = "$level" ) self . xsl_withparam ( "nsid" , ann . i_module . i_modulename + ":" , ct ) self . type_param ( ann , ct )
Process metadata annotation .
145
4
225,064
def process_rpc ( self , rpc ) : p = "/nc:rpc/" + self . qname ( rpc ) tmpl = self . xsl_template ( p ) inp = rpc . search_one ( "input" ) if inp is not None : ct = self . xsl_calltemplate ( "rpc-input" , tmpl ) self . xsl_withparam ( "nsid" , rpc . i_module . i_modulename + ":" , ct ) self . process_children ( inp , p , 2 ) outp = rpc . search_one ( "output" ) if outp is not None : self . process_children ( outp , "/nc:rpc-reply" , 1 )
Process input and output parts of rpc .
168
9
225,065
def process_notification ( self , ntf ) : p = "/en:notification/" + self . qname ( ntf ) tmpl = self . xsl_template ( p ) ct = self . xsl_calltemplate ( "container" , tmpl ) self . xsl_withparam ( "level" , "1" , ct ) if ntf . arg == "eventTime" : # local name collision self . xsl_withparam ( "nsid" , ntf . i_module . i_modulename + ":" , ct ) self . process_children ( ntf , p , 2 )
Process event notification ntf .
140
6
225,066
def process_children ( self , node , path , level , parent = None ) : data_parent = parent if parent else node chs = node . i_children for ch in chs : if ch . keyword in [ "choice" , "case" ] : self . process_children ( ch , path , level , node ) continue p = path + "/" + self . qname ( ch ) tmpl = self . xsl_template ( p ) ct = self . xsl_calltemplate ( ch . keyword , tmpl ) self . xsl_withparam ( "level" , "%d" % level , ct ) if ( data_parent . i_module is None or ch . i_module . i_modulename != data_parent . i_module . i_modulename ) : self . xsl_withparam ( "nsid" , ch . i_module . i_modulename + ":" , ct ) if ch . keyword in [ "leaf" , "leaf-list" ] : self . type_param ( ch , ct ) elif ch . keyword != "anyxml" : offset = 2 if ch . keyword == "list" else 1 self . process_children ( ch , p , level + offset )
Process all children of node .
274
6
225,067
def type_param ( self , node , ct ) : types = self . get_types ( node ) ftyp = types [ 0 ] if len ( types ) == 1 : if ftyp in type_class : jtyp = type_class [ ftyp ] else : jtyp = "other" self . xsl_withparam ( "type" , jtyp , ct ) elif ftyp in [ "string" , "enumeration" , "bits" , "binary" , "identityref" , "instance-identifier" ] : self . xsl_withparam ( "type" , "string" , ct ) else : opts = [ ] for t in types : if t in union_class : ut = union_class [ t ] elif t in [ "int64" , "uint64" ] or t . startswith ( "decimal@" ) : ut = t else : ut = "other" if ut not in opts : opts . append ( ut ) if ut == "other" : break if ut == "decimal" and "integer" not in opts : opts . append ( "integer" ) self . xsl_withparam ( "type" , "union" , ct ) self . xsl_withparam ( "options" , "," . join ( opts ) + "," , ct )
Resolve the type of a leaf or leaf - list node for JSON .
298
15
225,068
def xsl_text ( self , text , parent ) : res = ET . SubElement ( parent , "text" ) res . text = text return res
Construct an XSLT text element containing text .
33
10
225,069
def xsl_withparam ( self , name , value , parent ) : res = ET . SubElement ( parent , "with-param" , name = name ) res . text = value return res
Construct an XSLT with - param element .
42
10
225,070
def element ( cls , name , parent = None , interleave = None , occur = 0 ) : node = cls ( "element" , parent , interleave = interleave ) node . attr [ "name" ] = name node . occur = occur return node
Create an element node .
57
5
225,071
def leaf_list ( cls , name , parent = None , interleave = None ) : node = cls ( "_list_" , parent , interleave = interleave ) node . attr [ "name" ] = name node . keys = None node . minEl = "0" node . maxEl = None node . occur = 3 return node
Create _list_ node for a leaf - list .
75
11
225,072
def list ( cls , name , parent = None , interleave = None ) : node = cls . leaf_list ( name , parent , interleave = interleave ) node . keys = [ ] node . keymap = { } return node
Create _list_ node for a list .
52
9
225,073
def choice ( cls , parent = None , occur = 0 ) : node = cls ( "choice" , parent ) node . occur = occur node . default_case = None return node
Create choice node .
40
4
225,074
def define ( cls , name , parent = None , interleave = False ) : node = cls ( "define" , parent , interleave = interleave ) node . occur = 0 node . attr [ "name" ] = name return node
Create define node .
53
4
225,075
def adjust_interleave ( self , interleave ) : if interleave == None and self . parent : self . interleave = self . parent . interleave else : self . interleave = interleave
Inherit interleave status from parent if undefined .
42
11
225,076
def subnode ( self , node ) : self . children . append ( node ) node . parent = self node . adjust_interleave ( node . interleave )
Make node receiver s child .
34
6
225,077
def annot ( self , node ) : self . annots . append ( node ) node . parent = self
Add node as an annotation of the receiver .
22
9
225,078
def start_tag ( self , alt = None , empty = False ) : if alt : name = alt else : name = self . name result = "<" + name for it in self . attr : result += ' %s="%s"' % ( it , escape ( self . attr [ it ] , { '"' : "&quot;" , '%' : "%%" } ) ) if empty : return result + "/>%s" else : return result + ">"
Return XML start tag for the receiver .
104
8
225,079
def end_tag ( self , alt = None ) : if alt : name = alt else : name = self . name return "</" + name + ">"
Return XML end tag for the receiver .
34
8
225,080
def serialize ( self , occur = None ) : fmt = self . ser_format . get ( self . name , SchemaNode . _default_format ) return fmt ( self , occur ) % ( escape ( self . text ) + self . serialize_children ( ) )
Return RELAX NG representation of the receiver and subtree .
59
12
225,081
def _default_format ( self , occur ) : if self . text or self . children : return self . start_tag ( ) + "%s" + self . end_tag ( ) return self . start_tag ( empty = True )
Return the default serialization format .
51
7
225,082
def _define_format ( self , occur ) : if hasattr ( self , "default" ) : self . attr [ "nma:default" ] = self . default middle = self . _chorder ( ) if self . rng_children ( ) else "<empty/>%s" return ( self . start_tag ( ) + self . serialize_annots ( ) . replace ( "%" , "%%" ) + middle + self . end_tag ( ) )
Return the serialization format for a define node .
103
10
225,083
def _element_format ( self , occur ) : if occur : occ = occur else : occ = self . occur if occ == 1 : if hasattr ( self , "default" ) : self . attr [ "nma:default" ] = self . default else : self . attr [ "nma:implicit" ] = "true" middle = self . _chorder ( ) if self . rng_children ( ) else "<empty/>%s" fmt = ( self . start_tag ( ) + self . serialize_annots ( ) . replace ( "%" , "%%" ) + middle + self . end_tag ( ) ) if ( occ == 2 or self . parent . name == "choice" or self . parent . name == "case" and len ( self . parent . children ) == 1 ) : return fmt else : return "<optional>" + fmt + "</optional>"
Return the serialization format for an element node .
194
10
225,084
def _list_format ( self , occur ) : if self . keys : self . attr [ "nma:key" ] = " " . join ( self . keys ) keys = '' . join ( [ self . keymap [ k ] . serialize ( occur = 2 ) for k in self . keys ] ) else : keys = "" if self . maxEl : self . attr [ "nma:max-elements" ] = self . maxEl if int ( self . minEl ) == 0 : ord_ = "zeroOrMore" else : ord_ = "oneOrMore" if int ( self . minEl ) > 1 : self . attr [ "nma:min-elements" ] = self . minEl middle = self . _chorder ( ) if self . rng_children ( ) else "<empty/>%s" return ( "<" + ord_ + ">" + self . start_tag ( "element" ) + ( self . serialize_annots ( ) + keys ) . replace ( "%" , "%%" ) + middle + self . end_tag ( "element" ) + "</" + ord_ + ">" )
Return the serialization format for a _list_ node .
253
12
225,085
def _choice_format ( self , occur ) : middle = "%s" if self . rng_children ( ) else "<empty/>%s" fmt = self . start_tag ( ) + middle + self . end_tag ( ) if self . occur != 2 : return "<optional>" + fmt + "</optional>" else : return fmt
Return the serialization format for a choice node .
73
10
225,086
def _case_format ( self , occur ) : if self . occur == 1 : self . attr [ "nma:implicit" ] = "true" ccnt = len ( self . rng_children ( ) ) if ccnt == 0 : return "<empty/>%s" if ccnt == 1 or not self . interleave : return self . start_tag ( "group" ) + "%s" + self . end_tag ( "group" ) return ( self . start_tag ( "interleave" ) + "%s" + self . end_tag ( "interleave" ) )
Return the serialization format for a case node .
130
10
225,087
def process_children ( self , node , parent , pmod ) : for ch in node . i_children : if ch . keyword in [ "rpc" , "notification" ] : continue if ch . keyword in [ "choice" , "case" ] : self . process_children ( ch , parent , pmod ) continue if ch . i_module . i_modulename == pmod : nmod = pmod nodename = ch . arg else : nmod = ch . i_module . i_modulename nodename = "%s:%s" % ( nmod , ch . arg ) ndata = [ ch . keyword ] if ch . keyword == "container" : ndata . append ( { } ) self . process_children ( ch , ndata [ 1 ] , nmod ) elif ch . keyword == "list" : ndata . append ( { } ) self . process_children ( ch , ndata [ 1 ] , nmod ) ndata . append ( [ ( k . i_module . i_modulename , k . arg ) for k in ch . i_key ] ) elif ch . keyword in [ "leaf" , "leaf-list" ] : ndata . append ( self . base_type ( ch . search_one ( "type" ) ) ) modname = ch . i_module . i_modulename parent [ nodename ] = ndata
Process all children of node except rpc and notification .
305
11
225,088
def base_type ( self , type ) : while 1 : if type . arg == "leafref" : node = type . i_type_spec . i_target_node elif type . i_typedef is None : break else : node = type . i_typedef type = node . search_one ( "type" ) if type . arg == "decimal64" : return [ type . arg , int ( type . search_one ( "fraction-digits" ) . arg ) ] elif type . arg == "union" : return [ type . arg , [ self . base_type ( x ) for x in type . i_type_spec . types ] ] else : return type . arg
Return the base type of type .
156
7
225,089
def skip ( self ) : buflen = len ( self . buf ) while True : self . buf = self . buf . lstrip ( ) if self . buf == '' : self . readline ( ) buflen = len ( self . buf ) else : self . offset += ( buflen - len ( self . buf ) ) break # do not keep comments in the syntax tree if not self . keep_comments : # skip line comment if self . buf [ 0 ] == '/' : if self . buf [ 1 ] == '/' : self . readline ( ) return self . skip ( ) # skip block comment elif self . buf [ 1 ] == '*' : i = self . buf . find ( '*/' ) while i == - 1 : self . readline ( ) i = self . buf . find ( '*/' ) self . set_buf ( i + 2 ) return self . skip ( )
Skip whitespace and count position
197
6
225,090
def parse ( self , ctx , ref , text ) : self . ctx = ctx self . pos = error . Position ( ref ) self . top = None try : self . tokenizer = YangTokenizer ( text , self . pos , ctx . errors , ctx . max_line_len , ctx . keep_comments , not ctx . lax_quote_checks ) stmt = self . _parse_statement ( None ) except error . Abort : return None except error . Eof as e : error . err_add ( self . ctx . errors , self . pos , 'EOF_ERROR' , ( ) ) return None try : # we expect a error.Eof at this point, everything else is an error self . tokenizer . peek ( ) except error . Eof : return stmt except : pass error . err_add ( self . ctx . errors , self . pos , 'TRAILING_GARBAGE' , ( ) ) return None
Parse the string text containing a YANG statement .
213
11
225,091
def add_validation_phase ( phase , before = None , after = None ) : idx = 0 for x in _validation_phases : if x == before : _validation_phases . insert ( idx , phase ) return elif x == after : _validation_phases . insert ( idx + 1 , phase ) return idx = idx + 1 # otherwise append at the end _validation_phases . append ( phase )
Add a validation phase to the framework .
100
8
225,092
def add_validation_fun ( phase , keywords , f ) : for keyword in keywords : if ( phase , keyword ) in _validation_map : oldf = _validation_map [ ( phase , keyword ) ] def newf ( ctx , s ) : oldf ( ctx , s ) f ( ctx , s ) _validation_map [ ( phase , keyword ) ] = newf else : _validation_map [ ( phase , keyword ) ] = f
Add a validation function to some phase in the framework .
104
11
225,093
def v_init_extension ( ctx , stmt ) : ( prefix , identifier ) = stmt . raw_keyword ( modname , revision ) = prefix_to_modulename_and_revision ( stmt . i_module , prefix , stmt . pos , ctx . errors ) stmt . keyword = ( modname , identifier ) stmt . i_extension_modulename = modname stmt . i_extension_revision = revision stmt . i_extension = None
find the modulename of the prefix and set stmt . keyword
113
14
225,094
def v_grammar_unique_defs ( ctx , stmt ) : defs = [ ( 'typedef' , 'TYPE_ALREADY_DEFINED' , stmt . i_typedefs ) , ( 'grouping' , 'GROUPING_ALREADY_DEFINED' , stmt . i_groupings ) ] if stmt . parent is None : defs . extend ( [ ( 'feature' , 'FEATURE_ALREADY_DEFINED' , stmt . i_features ) , ( 'identity' , 'IDENTITY_ALREADY_DEFINED' , stmt . i_identities ) , ( 'extension' , 'EXTENSION_ALREADY_DEFINED' , stmt . i_extensions ) ] ) for ( keyword , errcode , dict ) in defs : for definition in stmt . search ( keyword ) : if definition . arg in dict : other = dict [ definition . arg ] err_add ( ctx . errors , definition . pos , errcode , ( definition . arg , other . pos ) ) else : dict [ definition . arg ] = definition
Verify that all typedefs and groupings are unique Called for every statement . Stores all typedefs in stmt . i_typedef groupings in stmt . i_grouping
255
41
225,095
def v_type_extension ( ctx , stmt ) : ( modulename , identifier ) = stmt . keyword revision = stmt . i_extension_revision module = modulename_to_module ( stmt . i_module , modulename , revision ) if module is None : return if identifier not in module . i_extensions : if module . i_modulename == stmt . i_orig_module . i_modulename : # extension defined in current submodule if identifier not in stmt . i_orig_module . i_extensions : err_add ( ctx . errors , stmt . pos , 'EXTENSION_NOT_DEFINED' , ( identifier , module . arg ) ) return else : stmt . i_extension = stmt . i_orig_module . i_extensions [ identifier ] else : err_add ( ctx . errors , stmt . pos , 'EXTENSION_NOT_DEFINED' , ( identifier , module . arg ) ) return else : stmt . i_extension = module . i_extensions [ identifier ] ext_arg = stmt . i_extension . search_one ( 'argument' ) if stmt . arg is not None and ext_arg is None : err_add ( ctx . errors , stmt . pos , 'EXTENSION_ARGUMENT_PRESENT' , identifier ) elif stmt . arg is None and ext_arg is not None : err_add ( ctx . errors , stmt . pos , 'EXTENSION_NO_ARGUMENT_PRESENT' , identifier )
verify that the extension matches the extension definition
358
9
225,096
def v_type_if_feature ( ctx , stmt , no_error_report = False ) : stmt . i_feature = None # Verify the argument type expr = syntax . parse_if_feature_expr ( stmt . arg ) if stmt . i_module . i_version == '1' : # version 1 allows only a single value as if-feature if type ( expr ) != type ( '' ) : err_add ( ctx . errors , stmt . pos , 'BAD_VALUE' , ( stmt . arg , 'identifier-ref' ) ) return def eval ( expr ) : if type ( expr ) == type ( '' ) : return has_feature ( expr ) else : ( op , op1 , op2 ) = expr if op == 'not' : return not eval ( op1 ) elif op == 'and' : return eval ( op1 ) and eval ( op2 ) elif op == 'or' : return eval ( op1 ) or eval ( op2 ) def has_feature ( name ) : # raises Abort if the feature is not defined # returns True if we compile with the feature # returns False if we compile without the feature found = None if name . find ( ":" ) == - 1 : prefix = None else : [ prefix , name ] = name . split ( ':' , 1 ) if prefix is None or stmt . i_module . i_prefix == prefix : # check local features pmodule = stmt . i_module else : # this is a prefixed name, check the imported modules pmodule = prefix_to_module ( stmt . i_module , prefix , stmt . pos , ctx . errors ) if pmodule is None : raise Abort if name in pmodule . i_features : f = pmodule . i_features [ name ] if prefix is None and not is_submodule_included ( stmt , f ) : pass else : found = pmodule . i_features [ name ] v_type_feature ( ctx , found ) if pmodule . i_modulename in ctx . features : if name not in ctx . features [ pmodule . i_modulename ] : return False if found is None and no_error_report == False : err_add ( ctx . errors , stmt . pos , 'FEATURE_NOT_FOUND' , ( name , pmodule . arg ) ) raise Abort return found is not None # Evaluate the if-feature expression, and verify that all # referenced features exist. try : if eval ( expr ) == False : # prune the parent. # since the parent can have more than one if-feature # statement, we must check if the parent # already has been scheduled for removal if stmt . parent not in stmt . i_module . i_prune : stmt . i_module . i_prune . append ( stmt . parent ) except Abort : pass
verify that the referenced feature exists .
634
8
225,097
def v_type_base ( ctx , stmt , no_error_report = False ) : # Find the identity name = stmt . arg stmt . i_identity = None if name . find ( ":" ) == - 1 : prefix = None else : [ prefix , name ] = name . split ( ':' , 1 ) if prefix is None or stmt . i_module . i_prefix == prefix : # check local identities pmodule = stmt . i_module else : # this is a prefixed name, check the imported modules pmodule = prefix_to_module ( stmt . i_module , prefix , stmt . pos , ctx . errors ) if pmodule is None : return if name in pmodule . i_identities : i = pmodule . i_identities [ name ] if prefix is None and not is_submodule_included ( stmt , i ) : pass else : stmt . i_identity = i v_type_identity ( ctx , stmt . i_identity ) if stmt . i_identity is None and no_error_report == False : err_add ( ctx . errors , stmt . pos , 'IDENTITY_NOT_FOUND' , ( name , pmodule . arg ) )
verify that the referenced identity exists .
278
8
225,098
def v_unique_name_defintions ( ctx , stmt ) : defs = [ ( 'typedef' , 'TYPE_ALREADY_DEFINED' , stmt . i_typedefs ) , ( 'grouping' , 'GROUPING_ALREADY_DEFINED' , stmt . i_groupings ) ] def f ( s ) : for ( keyword , errcode , dict ) in defs : if s . keyword == keyword and s . arg in dict : err_add ( ctx . errors , dict [ s . arg ] . pos , errcode , ( s . arg , s . pos ) ) for i in stmt . search ( 'include' ) : submodulename = i . arg subm = ctx . get_module ( submodulename ) if subm is not None : for s in subm . substmts : for ss in s . substmts : iterate_stmt ( ss , f )
Make sure that all top - level definitions in a module are unique
217
13
225,099
def v_unique_name_children ( ctx , stmt ) : def sort_pos ( p1 , p2 ) : if p1 . line < p2 . line : return ( p1 , p2 ) else : return ( p2 , p1 ) dict = { } chs = stmt . i_children def check ( c ) : key = ( c . i_module . i_modulename , c . arg ) if key in dict : dup = dict [ key ] ( minpos , maxpos ) = sort_pos ( c . pos , dup . pos ) pos = chk_uses_pos ( c , maxpos ) err_add ( ctx . errors , pos , 'DUPLICATE_CHILD_NAME' , ( stmt . arg , stmt . pos , c . arg , minpos ) ) else : dict [ key ] = c # also check all data nodes in the cases if c . keyword == 'choice' : for case in c . i_children : for cc in case . i_children : check ( cc ) for c in chs : check ( c )
Make sure that each child of stmt has a unique name
240
12