idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
37,000
def _compile_create_encoding ( self , sql , connection , blueprint ) : charset = blueprint . charset or connection . get_config ( "charset" ) if charset : sql += " DEFAULT CHARACTER SET %s" % charset collation = blueprint . collation or connection . get_config ( "collation" ) if collation : sql += " COLLATE %s" % collation return sql
Append the character set specifications to a command .
37,001
def _get_path ( self , name ) : path = self . option ( "path" ) if path is None : path = self . _get_seeders_path ( ) return os . path . join ( path , "%s.py" % name )
Get the destination class path .
37,002
def update ( self , _attributes = None , ** attributes ) : if _attributes is not None : attributes . update ( _attributes ) if self . _related . uses_timestamps ( ) : attributes [ self . get_related_updated_at ( ) ] = self . _related . fresh_timestamp ( ) return self . _query . update ( attributes )
Perform an update on all the related models .
37,003
def get_dialect ( self ) : if "+" not in self . drivername : name = self . drivername else : name = self . drivername . replace ( "+" , "." ) cls = registry . load ( name ) if ( hasattr ( cls , "dialect" ) and isinstance ( cls . dialect , type ) and issubclass ( cls . dialect , Dialect ) ) : return cls . dialect else : return cls
Return the SQLAlchemy database dialect class corresponding to this URL s driver name .
37,004
def translate_connect_args ( self , names = [ ] , ** kw ) : translated = { } attribute_names = [ "host" , "database" , "username" , "password" , "port" ] for sname in attribute_names : if names : name = names . pop ( 0 ) elif sname in kw : name = kw [ sname ] else : name = sname if name is not None and getattr ( self , sname , False ) : translated [ name ] = getattr ( self , sname ) return translated
Translate url attributes into a dictionary of connection arguments .
37,005
def get_check_declaration_sql ( self , definition ) : constraints = [ ] for field , def_ in definition . items ( ) : if isinstance ( def_ , basestring ) : constraints . append ( "CHECK (%s)" % def_ ) else : if "min" in def_ : constraints . append ( "CHECK (%s >= %s)" % ( field , def_ [ "min" ] ) ) if "max" in def_ : constraints . append ( "CHECK (%s <= %s)" % ( field , def_ [ "max" ] ) ) return ", " . join ( constraints )
Obtains DBMS specific SQL code portion needed to set a CHECK constraint declaration to be used in statements like CREATE TABLE .
37,006
def get_unique_constraint_declaration_sql ( self , name , index ) : columns = index . get_quoted_columns ( self ) name = Identifier ( name ) if not columns : raise DBALException ( 'Incomplete definition. "columns" required.' ) return "CONSTRAINT %s UNIQUE (%s)%s" % ( name . get_quoted_name ( self ) , self . get_index_field_declaration_list_sql ( columns ) , self . get_partial_index_sql ( index ) , )
Obtains DBMS specific SQL code portion needed to set a unique constraint declaration to be used in statements like CREATE TABLE .
37,007
def get_foreign_key_declaration_sql ( self , foreign_key ) : sql = self . get_foreign_key_base_declaration_sql ( foreign_key ) sql += self . get_advanced_foreign_key_options_sql ( foreign_key ) return sql
Obtain DBMS specific SQL code portion needed to set the FOREIGN KEY constraint of a field declaration to be used in statements like CREATE TABLE .
37,008
def get_advanced_foreign_key_options_sql ( self , foreign_key ) : query = "" if self . supports_foreign_key_on_update ( ) and foreign_key . has_option ( "on_update" ) : query += " ON UPDATE %s" % self . get_foreign_key_referential_action_sql ( foreign_key . get_option ( "on_update" ) ) if foreign_key . has_option ( "on_delete" ) : query += " ON DELETE %s" % self . get_foreign_key_referential_action_sql ( foreign_key . get_option ( "on_delete" ) ) return query
Returns the FOREIGN KEY query section dealing with non - standard options as MATCH INITIALLY DEFERRED ON UPDATE ...
37,009
def get_foreign_key_referential_action_sql ( self , action ) : action = action . upper ( ) if action not in [ "CASCADE" , "SET NULL" , "NO ACTION" , "RESTRICT" , "SET DEFAULT" , ] : raise DBALException ( "Invalid foreign key action: %s" % action ) return action
Returns the given referential action in uppercase if valid otherwise throws an exception .
37,010
def get_foreign_key_base_declaration_sql ( self , foreign_key ) : sql = "" if foreign_key . get_name ( ) : sql += "CONSTRAINT %s " % foreign_key . get_quoted_name ( self ) sql += "FOREIGN KEY (" if not foreign_key . get_local_columns ( ) : raise DBALException ( 'Incomplete definition. "local" required.' ) if not foreign_key . get_foreign_columns ( ) : raise DBALException ( 'Incomplete definition. "foreign" required.' ) if not foreign_key . get_foreign_table_name ( ) : raise DBALException ( 'Incomplete definition. "foreign_table" required.' ) sql += "%s) REFERENCES %s (%s)" % ( ", " . join ( foreign_key . get_quoted_local_columns ( self ) ) , foreign_key . get_quoted_foreign_table_name ( self ) , ", " . join ( foreign_key . get_quoted_foreign_columns ( self ) ) , ) return sql
Obtains DBMS specific SQL code portion needed to set the FOREIGN KEY constraint of a field declaration to be used in statements like CREATE TABLE .
37,011
def get_column_declaration_list_sql ( self , fields ) : query_fields = [ ] for name , field in fields . items ( ) : query_fields . append ( self . get_column_declaration_sql ( name , field ) ) return ", " . join ( query_fields )
Gets declaration of a number of fields in bulk .
37,012
def get_create_index_sql ( self , index , table ) : if isinstance ( table , Table ) : table = table . get_quoted_name ( self ) name = index . get_quoted_name ( self ) columns = index . get_quoted_columns ( self ) if not columns : raise DBALException ( 'Incomplete definition. "columns" required.' ) if index . is_primary ( ) : return self . get_create_primary_key_sql ( index , table ) query = "CREATE %sINDEX %s ON %s" % ( self . get_create_index_sql_flags ( index ) , name , table , ) query += " (%s)%s" % ( self . get_index_field_declaration_list_sql ( columns ) , self . get_partial_index_sql ( index ) , ) return query
Returns the SQL to create an index on a table on this platform .
37,013
def get_create_primary_key_sql ( self , index , table ) : return "ALTER TABLE %s ADD PRIMARY KEY (%s)" % ( table , self . get_index_field_declaration_list_sql ( index . get_quoted_columns ( self ) ) , )
Returns the SQL to create an unnamed primary key constraint .
37,014
def get_create_foreign_key_sql ( self , foreign_key , table ) : if isinstance ( table , Table ) : table = table . get_quoted_name ( self ) query = "ALTER TABLE %s ADD %s" % ( table , self . get_foreign_key_declaration_sql ( foreign_key ) , ) return query
Returns the SQL to create a new foreign key .
37,015
def get_drop_table_sql ( self , table ) : if isinstance ( table , Table ) : table = table . get_quoted_name ( self ) return "DROP TABLE %s" % table
Returns the SQL snippet to drop an existing table .
37,016
def get_drop_index_sql ( self , index , table = None ) : if isinstance ( index , Index ) : index = index . get_quoted_name ( self ) return "DROP INDEX %s" % index
Returns the SQL to drop an index from a table .
37,017
def _get_create_table_sql ( self , table_name , columns , options = None ) : options = options or { } column_list_sql = self . get_column_declaration_list_sql ( columns ) if options . get ( "unique_constraints" ) : for name , definition in options [ "unique_constraints" ] . items ( ) : column_list_sql += ", %s" % self . get_unique_constraint_declaration_sql ( name , definition ) if options . get ( "primary" ) : column_list_sql += ", PRIMARY KEY(%s)" % ", " . join ( options [ "primary" ] ) if options . get ( "indexes" ) : for index , definition in options [ "indexes" ] : column_list_sql += ", %s" % self . get_index_declaration_sql ( index , definition ) query = "CREATE TABLE %s (%s" % ( table_name , column_list_sql ) check = self . get_check_declaration_sql ( columns ) if check : query += ", %s" % check query += ")" sql = [ query ] if options . get ( "foreign_keys" ) : for definition in options [ "foreign_keys" ] : sql . append ( self . get_create_foreign_key_sql ( definition , table_name ) ) return sql
Returns the SQL used to create a table .
37,018
def quote_identifier ( self , string ) : if "." in string : parts = list ( map ( self . quote_single_identifier , string . split ( "." ) ) ) return "." . join ( parts ) return self . quote_single_identifier ( string )
Quotes a string so that it can be safely used as a table or column name even if it is a reserved word of the platform . This also detects identifier chains separated by dot and quotes them independently .
37,019
def _detect_database_platform ( self ) : version = self . _get_database_platform_version ( ) if version is not None : self . _platform = self . _create_database_platform_for_version ( version ) else : self . _platform = self . get_dbal_platform ( )
Detects and sets the database platform .
37,020
def _check_for_more_pages ( self ) : self . _has_more = len ( self . _items ) > self . per_page self . _items = self . _items [ 0 : self . per_page ]
Check for more pages . The last item will be sliced off .
37,021
def diff_index ( self , index1 , index2 ) : if index1 . is_fullfilled_by ( index2 ) and index2 . is_fullfilled_by ( index1 ) : return False return True
Finds the difference between the indexes index1 and index2 .
37,022
def call ( self , klass ) : self . _resolve ( klass ) . run ( ) if self . _command : self . _command . line ( "<info>Seeded:</info> <fg=cyan>%s</>" % klass . __name__ )
Seed the given connection from the given class .
37,023
def _resolve ( self , klass ) : resolver = None if self . _resolver : resolver = self . _resolver elif self . _command : resolver = self . _command . resolver instance = klass ( ) instance . set_connection_resolver ( resolver ) if self . _command : instance . set_command ( self . _command ) return instance
Resolve an instance of the given seeder klass .
37,024
def select ( self , * columns ) : if not columns : columns = [ "*" ] self . columns = list ( columns ) return self
Set the columns to be selected
37,025
def select_raw ( self , expression , bindings = None ) : self . add_select ( QueryExpression ( expression ) ) if bindings : self . add_binding ( bindings , "select" ) return self
Add a new raw select expression to the query
37,026
def select_sub ( self , query , as_ ) : if isinstance ( query , QueryBuilder ) : bindings = query . get_bindings ( ) query = query . to_sql ( ) elif isinstance ( query , basestring ) : bindings = [ ] else : raise ArgumentError ( "Invalid subselect" ) return self . select_raw ( "(%s) AS %s" % ( query , self . _grammar . wrap ( as_ ) ) , bindings )
Add a subselect expression to the query
37,027
def add_select ( self , * column ) : if not column : column = [ ] self . columns += list ( column ) return self
Add a new select column to query
37,028
def left_join_where ( self , table , one , operator , two ) : return self . join_where ( table , one , operator , two , "left" )
Add a left join where clause to the query
37,029
def right_join ( self , table , one = None , operator = None , two = None ) : if isinstance ( table , JoinClause ) : table . type = "right" return self . join ( table , one , operator , two , "right" )
Add a right join to the query
37,030
def right_join_where ( self , table , one , operator , two ) : return self . join_where ( table , one , operator , two , "right" )
Add a right join where clause to the query
37,031
def group_by ( self , * columns ) : for column in columns : self . groups . append ( column ) return self
Add a group by clause to the query
37,032
def having_raw ( self , sql , bindings = None , boolean = "and" ) : type = "raw" self . havings . append ( { "type" : type , "sql" : sql , "boolean" : boolean } ) self . add_binding ( bindings , "having" ) return self
Add a raw having clause to the query
37,033
def order_by ( self , column , direction = "asc" ) : if self . unions : prop = "union_orders" else : prop = "orders" if direction . lower ( ) == "asc" : direction = "asc" else : direction = "desc" getattr ( self , prop ) . append ( { "column" : column , "direction" : direction } ) return self
Add a order by clause to the query
37,034
def order_by_raw ( self , sql , bindings = None ) : if bindings is None : bindings = [ ] type = "raw" self . orders . append ( { "type" : type , "sql" : sql } ) self . add_binding ( bindings , "order" ) return self
Add a raw order by clause to the query
37,035
def get ( self , columns = None ) : if not columns : columns = [ "*" ] original = self . columns if not original : self . columns = columns results = self . _processor . process_select ( self , self . _run_select ( ) ) self . columns = original return Collection ( results )
Execute the query as a select statement
37,036
def _run_select ( self ) : return self . _connection . select ( self . to_sql ( ) , self . get_bindings ( ) , not self . _use_write_connection )
Run the query as a select statement against the connection .
37,037
def exists ( self ) : limit = self . limit_ result = self . limit ( 1 ) . count ( ) > 0 self . limit ( limit ) return result
Determine if any rows exist for the current query .
37,038
def count ( self , * columns ) : if not columns and self . distinct_ : columns = self . columns if not columns : columns = [ "*" ] return int ( self . aggregate ( "count" , * columns ) )
Retrieve the count result of the query
37,039
def aggregate ( self , func , * columns ) : if not columns : columns = [ "*" ] self . aggregate_ = { "function" : func , "columns" : columns } previous_columns = self . columns results = self . get ( * columns ) . all ( ) self . aggregate_ = None self . columns = previous_columns if len ( results ) > 0 : return dict ( ( k . lower ( ) , v ) for k , v in results [ 0 ] . items ( ) ) [ "aggregate" ]
Execute an aggregate function against the database
37,040
def insert ( self , _values = None , ** values ) : if not values and not _values : return True if not isinstance ( _values , list ) : if _values is not None : values . update ( _values ) values = [ values ] else : values = _values for i , value in enumerate ( values ) : values [ i ] = OrderedDict ( sorted ( value . items ( ) ) ) bindings = [ ] for record in values : for value in record . values ( ) : bindings . append ( value ) sql = self . _grammar . compile_insert ( self , values ) bindings = self . _clean_bindings ( bindings ) return self . _connection . insert ( sql , bindings )
Insert a new record into the database
37,041
def insert_get_id ( self , values , sequence = None ) : values = OrderedDict ( sorted ( values . items ( ) ) ) sql = self . _grammar . compile_insert_get_id ( self , values , sequence ) values = self . _clean_bindings ( values . values ( ) ) return self . _processor . process_insert_get_id ( self , sql , values , sequence )
Insert a new record and get the value of the primary key
37,042
def truncate ( self ) : for sql , bindings in self . _grammar . compile_truncate ( self ) . items ( ) : self . _connection . statement ( sql , bindings )
Run a truncate statement on the table
37,043
def _clean_bindings ( self , bindings ) : return list ( filter ( lambda b : not isinstance ( b , QueryExpression ) , bindings ) )
Remove all of the expressions from bindings
37,044
def merge ( self , query ) : self . columns += query . columns self . joins += query . joins self . wheres += query . wheres self . groups += query . groups self . havings += query . havings self . orders += query . orders self . distinct_ = query . distinct_ if self . columns : self . columns = Collection ( self . columns ) . unique ( ) . all ( ) if query . limit_ : self . limit_ = query . limit_ if query . offset_ : self . offset_ = None self . unions += query . unions if query . union_limit : self . union_limit = query . union_limit if query . union_offset : self . union_offset = query . union_offset self . union_orders += query . union_orders self . merge_bindings ( query )
Merge current query with another .
37,045
def _set_name ( self , name ) : if self . _is_identifier_quoted ( name ) : self . _quoted = True name = self . _trim_quotes ( name ) if "." in name : parts = name . split ( "." , 1 ) self . _namespace = parts [ 0 ] name = parts [ 1 ] self . _name = name
Sets the name of this asset .
37,046
def _generate_identifier_name ( self , columns , prefix = "" , max_size = 30 ) : hash = "" for column in columns : hash += "%x" % binascii . crc32 ( encode ( str ( column ) ) ) return ( prefix + "_" + hash ) [ : max_size ]
Generates an identifier from a list of column names obeying a certain string length .
37,047
def only_trashed ( cls ) : instance = cls ( ) column = instance . get_qualified_deleted_at_column ( ) return instance . new_query_without_scope ( SoftDeletingScope ( ) ) . where_not_null ( column )
Get a new query builder that only includes soft deletes
37,048
def connection ( self , name = None ) : name , type = self . _parse_connection_name ( name ) if name not in self . _connections : logger . debug ( "Initiating connection %s" % name ) connection = self . _make_connection ( name ) self . _set_connection_for_type ( connection , type ) self . _connections [ name ] = self . _prepare ( connection ) return self . _connections [ name ]
Get a database connection instance
37,049
def apply ( self , builder , model ) : builder . where_null ( model . get_qualified_deleted_at_column ( ) ) self . extend ( builder )
Apply the scope to a given query builder .
37,050
def _on_delete ( self , builder ) : column = self . _get_deleted_at_column ( builder ) return builder . update ( { column : builder . get_model ( ) . fresh_timestamp ( ) } )
The delete replacement function .
37,051
def _get_deleted_at_column ( self , builder ) : if len ( builder . get_query ( ) . joins ) > 0 : return builder . get_model ( ) . get_qualified_deleted_at_column ( ) else : return builder . get_model ( ) . get_deleted_at_column ( )
Get the deleted at column for the builder .
37,052
def _restore ( self , builder ) : builder . with_trashed ( ) return builder . update ( { builder . get_model ( ) . get_deleted_at_column ( ) : None } )
The restore extension .
37,053
def has_table ( self , table ) : sql = self . _grammar . compile_table_exists ( ) table = self . _connection . get_table_prefix ( ) + table return len ( self . _connection . select ( sql , [ table ] ) ) > 0
Determine if the given table exists .
37,054
def has_column ( self , table , column ) : column = column . lower ( ) return column in list ( map ( lambda x : x . lower ( ) , self . get_column_listing ( table ) ) )
Determine if the given table has a given column .
37,055
def table ( self , table ) : try : blueprint = self . _create_blueprint ( table ) yield blueprint except Exception as e : raise try : self . _build ( blueprint ) except Exception : raise
Modify a table on the schema .
37,056
def rename ( self , from_ , to ) : blueprint = self . _create_blueprint ( from_ ) blueprint . rename ( to ) self . _build ( blueprint )
Rename a table on the schema .
37,057
def _write_migration ( self , creator , name , table , create , path ) : file_ = os . path . basename ( creator . create ( name , path , table , create ) ) return file_
Write the migration file to disk .
37,058
def compile_delete ( self , query ) : table = self . wrap_table ( query . from__ ) if isinstance ( query . wheres , list ) : wheres = self . _compile_wheres ( query ) else : wheres = "" if query . joins : joins = " %s" % self . _compile_joins ( query , query . joins ) sql = "DELETE %s FROM %s%s %s" % ( table , table , joins , wheres ) else : sql = "DELETE FROM %s %s" % ( table , wheres ) sql = sql . strip ( ) if query . orders : sql += " %s" % self . _compile_orders ( query , query . orders ) if query . limit_ : sql += " %s" % self . _compile_limit ( query , query . limit_ ) return sql
Compile a delete statement into SQL
37,059
def _check_config ( self ) : current_path = os . path . relpath ( os . getcwd ( ) ) accepted_files = [ "orator.yml" , "orator.py" ] for accepted_file in accepted_files : config_file = os . path . join ( current_path , accepted_file ) if os . path . exists ( config_file ) : if self . _handle_config ( config_file ) : return True return False
Check presence of default config files .
37,060
def _handle_config ( self , config_file ) : config = self . _get_config ( config_file ) self . resolver = DatabaseManager ( config . get ( "databases" , config . get ( "DATABASES" , { } ) ) ) return True
Check and handle a config file .
37,061
def log ( self , file , batch ) : record = { "migration" : file , "batch" : batch } self . table ( ) . insert ( ** record )
Log that a migration was run .
37,062
def create_repository ( self ) : schema = self . get_connection ( ) . get_schema_builder ( ) with schema . create ( self . _table ) as table : table . string ( "migration" ) table . integer ( "batch" )
Create the migration repository data store .
37,063
def repository_exists ( self ) : schema = self . get_connection ( ) . get_schema_builder ( ) return schema . has_table ( self . _table )
Determine if the repository exists .
37,064
def create ( self , name , path , table = None , create = False ) : path = self . _get_path ( name , path ) if not os . path . exists ( os . path . dirname ( path ) ) : mkdir_p ( os . path . dirname ( path ) ) parent = os . path . join ( os . path . dirname ( path ) , "__init__.py" ) if not os . path . exists ( parent ) : with open ( parent , "w" ) : pass stub = self . _get_stub ( table , create ) with open ( path , "w" ) as fh : fh . write ( self . _populate_stub ( name , stub , table ) ) return path
Create a new migration at the given path .
37,065
def _get_stub ( self , table , create ) : if table is None : return BLANK_STUB else : if create : stub = CREATE_STUB else : stub = UPDATE_STUB return stub
Get the migration stub template
37,066
def get_quoted_local_columns ( self , platform ) : columns = [ ] for column in self . _local_column_names . values ( ) : columns . append ( column . get_quoted_name ( platform ) ) return columns
Returns the quoted representation of the referencing table column names the foreign key constraint is associated with .
37,067
def get_quoted_foreign_columns ( self , platform ) : columns = [ ] for column in self . _foreign_column_names . values ( ) : columns . append ( column . get_quoted_name ( platform ) ) return columns
Returns the quoted representation of the referenced table column names the foreign key constraint is associated with .
37,068
def _on_event ( self , event ) : if self . has_option ( event ) : on_event = self . get_option ( event ) . upper ( ) if on_event not in [ "NO ACTION" , "RESTRICT" ] : return on_event return False
Returns the referential action for a given database operation on the referenced table the foreign key constraint is associated with .
37,069
def run ( self , path , pretend = False ) : self . _notes = [ ] files = self . _get_migration_files ( path ) ran = self . _repository . get_ran ( ) migrations = [ f for f in files if f not in ran ] self . run_migration_list ( path , migrations , pretend )
Run the outstanding migrations for a given path .
37,070
def run_migration_list ( self , path , migrations , pretend = False ) : if not migrations : self . _note ( "<info>Nothing to migrate</info>" ) return batch = self . _repository . get_next_batch_number ( ) for f in migrations : self . _run_up ( path , f , batch , pretend )
Run a list of migrations .
37,071
def reset ( self , path , pretend = False ) : self . _notes = [ ] migrations = sorted ( self . _repository . get_ran ( ) , reverse = True ) count = len ( migrations ) if count == 0 : self . _note ( "<info>Nothing to rollback.</info>" ) else : for migration in migrations : self . _run_down ( path , { "migration" : migration } , pretend ) return count
Rolls all of the currently applied migrations back .
37,072
def _get_migration_files ( self , path ) : files = glob . glob ( os . path . join ( path , "[0-9]*_*.py" ) ) if not files : return [ ] files = list ( map ( lambda f : os . path . basename ( f ) . replace ( ".py" , "" ) , files ) ) files = sorted ( files ) return files
Get all of the migration files in a given path .
37,073
def _compile_update_columns ( self , values ) : columns = [ ] for key , value in values . items ( ) : columns . append ( "%s = %s" % ( self . wrap ( key ) , self . parameter ( value ) ) ) return ", " . join ( columns )
Compile the columns for the update statement
37,074
def _compile_update_from ( self , query ) : if not query . joins : return "" froms = [ ] for join in query . joins : froms . append ( self . wrap_table ( join . table ) ) if len ( froms ) : return " FROM %s" % ", " . join ( froms ) return ""
Compile the from clause for an update with a join .
37,075
def _compile_update_wheres ( self , query ) : base_where = self . _compile_wheres ( query ) if not query . joins : return base_where join_where = self . _compile_update_join_wheres ( query ) if not base_where . strip ( ) : return "WHERE %s" % self . _remove_leading_boolean ( join_where ) return "%s %s" % ( base_where , join_where )
Compile the additional where clauses for updates with joins .
37,076
def _compile_update_join_wheres ( self , query ) : join_wheres = [ ] for join in query . joins : for clause in join . clauses : join_wheres . append ( self . _compile_join_constraints ( clause ) ) return " " . join ( join_wheres )
Compile the join clauses for an update .
37,077
def compile_insert_get_id ( self , query , values , sequence = None ) : if sequence is None : sequence = "id" return "%s RETURNING %s" % ( self . compile_insert ( query , values ) , self . wrap ( sequence ) , )
Compile an insert and get ID statement into SQL .
37,078
def qmark ( cls , query ) : def sub_sequence ( m ) : s = m . group ( 0 ) if s == "??" : return "?" if s == "%" : return "%%" else : return "%s" return cls . RE_QMARK . sub ( sub_sequence , query )
Convert a qmark query into format style .
37,079
def touch ( self ) : column = self . get_related ( ) . get_updated_at_column ( ) self . raw_update ( { column : self . get_related ( ) . fresh_timestamp ( ) } )
Touch all of the related models for the relationship .
37,080
def raw_update ( self , attributes = None ) : if attributes is None : attributes = { } if self . _query is not None : return self . _query . update ( attributes )
Run a raw update against the base query .
37,081
def wrap ( self , value ) : return self . _parent . new_query ( ) . get_query ( ) . get_grammar ( ) . wrap ( value )
Wrap the given value with the parent s query grammar .
37,082
def load ( template ) : try : data = load_json ( template ) return data , "json" except ValueError as e : try : data = load_yaml ( template ) return data , "yaml" except Exception : raise e
Try to guess the input format
37,083
def dump_yaml ( data , clean_up = False , long_form = False ) : return yaml . dump ( data , Dumper = get_dumper ( clean_up , long_form ) , default_flow_style = False , allow_unicode = True )
Output some YAML
37,084
def to_json ( template , clean_up = False ) : data = load_yaml ( template ) if clean_up : data = clean ( data ) return dump_json ( data )
Assume the input is YAML and convert to JSON
37,085
def to_yaml ( template , clean_up = False , long_form = False ) : data = load_json ( template ) if clean_up : data = clean ( data ) return dump_yaml ( data , clean_up , long_form )
Assume the input is JSON and convert to YAML
37,086
def flip ( template , in_format = None , out_format = None , clean_up = False , no_flip = False , long_form = False ) : if not in_format : if ( out_format == "json" and no_flip ) or ( out_format == "yaml" and not no_flip ) : in_format = "json" elif ( out_format == "yaml" and no_flip ) or ( out_format == "json" and not no_flip ) : in_format = "yaml" if in_format == "json" : data = load_json ( template ) elif in_format == "yaml" : data = load_yaml ( template ) else : data , in_format = load ( template ) if clean_up : data = clean ( data ) if not out_format : if ( in_format == "json" and no_flip ) or ( in_format == "yaml" and not no_flip ) : out_format = "json" else : out_format = "yaml" if out_format == "json" : if sys . version [ 0 ] == "3" : return dump_json ( data ) else : return dump_json ( data ) . encode ( 'utf-8' ) return dump_yaml ( data , clean_up , long_form )
Figure out the input format and convert the data to the opposing output format
37,087
def convert_join ( value ) : if not isinstance ( value , list ) or len ( value ) != 2 : return value sep , parts = value [ 0 ] , value [ 1 ] if isinstance ( parts , six . string_types ) : return parts if not isinstance ( parts , list ) : return { "Fn::Join" : value , } plain_string = True args = ODict ( ) new_parts = [ ] for part in parts : part = clean ( part ) if isinstance ( part , dict ) : plain_string = False if "Ref" in part : new_parts . append ( "${{{}}}" . format ( part [ "Ref" ] ) ) elif "Fn::GetAtt" in part : params = part [ "Fn::GetAtt" ] new_parts . append ( "${{{}}}" . format ( "." . join ( params ) ) ) else : for key , val in args . items ( ) : if isinstance ( val , dict ) : if "Fn::If" in val and "AWS::NoValue" in str ( val [ "Fn::If" ] ) : return { "Fn::Join" : value , } if val == part : param_name = key break else : param_name = "Param{}" . format ( len ( args ) + 1 ) args [ param_name ] = part new_parts . append ( "${{{}}}" . format ( param_name ) ) elif isinstance ( part , six . string_types ) : new_parts . append ( part . replace ( "${" , "${!" ) ) else : return { "Fn::Join" : value } source = sep . join ( new_parts ) if plain_string : return source if args : return ODict ( ( ( "Fn::Sub" , [ source , args ] ) , ) ) return ODict ( ( ( "Fn::Sub" , source ) , ) )
Fix a Join ; )
37,088
def map_representer ( dumper , value ) : value = ODict ( value . items ( ) ) if len ( value . keys ( ) ) == 1 : key = list ( value . keys ( ) ) [ 0 ] if key in CONVERTED_SUFFIXES : return fn_representer ( dumper , key , value [ key ] ) if key . startswith ( FN_PREFIX ) : return fn_representer ( dumper , key [ 4 : ] , value [ key ] ) return dumper . represent_mapping ( TAG_MAP , value , flow_style = False )
Deal with !Ref style function format and OrderedDict
37,089
def multi_constructor ( loader , tag_suffix , node ) : if tag_suffix not in UNCONVERTED_SUFFIXES : tag_suffix = "{}{}" . format ( FN_PREFIX , tag_suffix ) constructor = None if tag_suffix == "Fn::GetAtt" : constructor = construct_getatt elif isinstance ( node , yaml . ScalarNode ) : constructor = loader . construct_scalar elif isinstance ( node , yaml . SequenceNode ) : constructor = loader . construct_sequence elif isinstance ( node , yaml . MappingNode ) : constructor = loader . construct_mapping else : raise Exception ( "Bad tag: !{}" . format ( tag_suffix ) ) return ODict ( ( ( tag_suffix , constructor ( node ) ) , ) )
Deal with !Ref style function format
37,090
def construct_getatt ( node ) : if isinstance ( node . value , six . text_type ) : return node . value . split ( "." , 1 ) elif isinstance ( node . value , list ) : return [ s . value for s in node . value ] else : raise ValueError ( "Unexpected node type: {}" . format ( type ( node . value ) ) )
Reconstruct !GetAtt into a list
37,091
def construct_mapping ( self , node , deep = False ) : mapping = ODict ( ) for key_node , value_node in node . value : key = self . construct_object ( key_node , deep = deep ) value = self . construct_object ( value_node , deep = deep ) mapping [ key ] = value return mapping
Use ODict for maps
37,092
def main ( ctx , ** kwargs ) : in_format = kwargs . pop ( 'in_format' ) out_format = kwargs . pop ( 'out_format' ) or kwargs . pop ( 'out_flag' ) no_flip = kwargs . pop ( 'no_flip' ) clean = kwargs . pop ( 'clean' ) long_form = kwargs . pop ( 'long' ) input_file = kwargs . pop ( 'input' ) output_file = kwargs . pop ( 'output' ) if not in_format : if input_file . name . endswith ( ".json" ) : in_format = "json" elif input_file . name . endswith ( ".yaml" ) or input_file . name . endswith ( ".yml" ) : in_format = "yaml" if input_file . name == "<stdin>" and sys . stdin . isatty ( ) : click . echo ( ctx . get_help ( ) ) ctx . exit ( ) try : output_file . write ( flip ( input_file . read ( ) , in_format = in_format , out_format = out_format , clean_up = clean , no_flip = no_flip , long_form = long_form ) ) except Exception as e : raise click . ClickException ( "{}" . format ( e ) )
AWS CloudFormation Template Flip is a tool that converts AWS CloudFormation templates between JSON and YAML formats making use of the YAML format s short function syntax where possible .
37,093
def updateVersions ( region = "us-east-1" , table = "credential-store" ) : dynamodb = boto3 . resource ( 'dynamodb' , region_name = region ) secrets = dynamodb . Table ( table ) response = secrets . scan ( ProjectionExpression = "#N, version, #K, contents, hmac" , ExpressionAttributeNames = { "#N" : "name" , "#K" : "key" } ) items = response [ "Items" ] for old_item in items : if isInt ( old_item [ 'version' ] ) : new_item = copy . copy ( old_item ) new_item [ 'version' ] = credstash . paddedInt ( new_item [ 'version' ] ) if new_item [ 'version' ] != old_item [ 'version' ] : secrets . put_item ( Item = new_item ) secrets . delete_item ( Key = { 'name' : old_item [ 'name' ] , 'version' : old_item [ 'version' ] } ) else : print "Skipping item: %s, %s" % ( old_item [ 'name' ] , old_item [ 'version' ] )
do a full - table scan of the credential - store and update the version format of every credential if it is an integer
37,094
def paddedInt ( i ) : i_str = str ( i ) pad = PAD_LEN - len ( i_str ) return ( pad * "0" ) + i_str
return a string that contains i left - padded with 0 s up to PAD_LEN digits
37,095
def getHighestVersion ( name , region = None , table = "credential-store" , ** kwargs ) : session = get_session ( ** kwargs ) dynamodb = session . resource ( 'dynamodb' , region_name = region ) secrets = dynamodb . Table ( table ) response = secrets . query ( Limit = 1 , ScanIndexForward = False , ConsistentRead = True , KeyConditionExpression = boto3 . dynamodb . conditions . Key ( "name" ) . eq ( name ) , ProjectionExpression = "version" ) if response [ "Count" ] == 0 : return 0 return response [ "Items" ] [ 0 ] [ "version" ]
Return the highest version of name in the table
37,096
def clean_fail ( func ) : def func_wrapper ( * args , ** kwargs ) : try : return func ( * args , ** kwargs ) except botocore . exceptions . ClientError as e : print ( str ( e ) , file = sys . stderr ) sys . exit ( 1 ) return func_wrapper
A decorator to cleanly exit on a failed call to AWS . catch a botocore . exceptions . ClientError raised from an action . This sort of error is raised if you are targeting a region that isn t set up ( see credstash setup .
37,097
def listSecrets ( region = None , table = "credential-store" , ** kwargs ) : session = get_session ( ** kwargs ) dynamodb = session . resource ( 'dynamodb' , region_name = region ) secrets = dynamodb . Table ( table ) last_evaluated_key = True items = [ ] while last_evaluated_key : params = dict ( ProjectionExpression = "#N, version, #C" , ExpressionAttributeNames = { "#N" : "name" , "#C" : "comment" } ) if last_evaluated_key is not True : params [ 'ExclusiveStartKey' ] = last_evaluated_key response = secrets . scan ( ** params ) last_evaluated_key = response . get ( 'LastEvaluatedKey' ) items . extend ( response [ 'Items' ] ) return items
do a full - table scan of the credential - store and return the names and versions of every credential
37,098
def putSecret ( name , secret , version = "" , kms_key = "alias/credstash" , region = None , table = "credential-store" , context = None , digest = DEFAULT_DIGEST , comment = "" , ** kwargs ) : if not context : context = { } session = get_session ( ** kwargs ) kms = session . client ( 'kms' , region_name = region ) key_service = KeyService ( kms , kms_key , context ) sealed = seal_aes_ctr_legacy ( key_service , secret , digest_method = digest , ) dynamodb = session . resource ( 'dynamodb' , region_name = region ) secrets = dynamodb . Table ( table ) data = { 'name' : name , 'version' : paddedInt ( version ) , } if comment : data [ 'comment' ] = comment data . update ( sealed ) return secrets . put_item ( Item = data , ConditionExpression = Attr ( 'name' ) . not_exists ( ) )
put a secret called name into the secret - store protected by the key kms_key
37,099
def getAllSecrets ( version = "" , region = None , table = "credential-store" , context = None , credential = None , session = None , ** kwargs ) : if session is None : session = get_session ( ** kwargs ) dynamodb = session . resource ( 'dynamodb' , region_name = region ) kms = session . client ( 'kms' , region_name = region ) secrets = listSecrets ( region , table , ** kwargs ) if credential and WILDCARD_CHAR in credential : names = set ( expand_wildcard ( credential , [ x [ "name" ] for x in secrets ] ) ) else : names = set ( x [ "name" ] for x in secrets ) pool = ThreadPool ( min ( len ( names ) , THREAD_POOL_MAX_SIZE ) ) results = pool . map ( lambda credential : getSecret ( credential , version , region , table , context , dynamodb , kms , ** kwargs ) , names ) pool . close ( ) pool . join ( ) return dict ( zip ( names , results ) )
fetch and decrypt all secrets