idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
29,800
def generate_categories ( ) : code_points_ranges = [ ] iso_15924_aliases = [ ] categories = [ ] match = re . compile ( r'([0-9A-F]+)(?:\.\.([0-9A-F]+))?\W+(\w+)\s*#\s*(\w+)' , re . UNICODE ) url = 'ftp://ftp.unicode.org/Public/UNIDATA/Scripts.txt' file = get ( url ) for line in file : p = re . findall ( match , line ) ...
Generates the categories JSON data file from the unicode specification .
29,801
def docstring_to_markdown ( docstring ) : new_docstring_lst = [ ] for idx , line in enumerate ( docstring . split ( '\n' ) ) : line = line . strip ( ) if set ( line ) in ( { '-' } , { '=' } ) : new_docstring_lst [ idx - 1 ] = '**%s**' % new_docstring_lst [ idx - 1 ] elif line . startswith ( '>>>' ) : line = ' %s' % ...
Convert a Python object s docstring to markdown
29,802
def object_to_markdownpage ( obj_name , obj , s = '' ) : s += '## %s\n' % obj_name sig = str ( inspect . signature ( obj ) ) . replace ( '(self, ' , '(' ) s += '\n*%s%s*\n\n' % ( obj_name , sig ) doc = str ( inspect . getdoc ( obj ) ) ds = docstring_to_markdown ( doc ) s += '\n' . join ( ds ) if inspect . isclass ( obj...
Generate the markdown documentation of a Python object .
29,803
def import_package ( rel_path_to_package , package_name ) : try : curr_dir = os . path . dirname ( os . path . realpath ( __file__ ) ) except NameError : curr_dir = os . path . dirname ( os . path . realpath ( os . getcwd ( ) ) ) package_path = os . path . join ( curr_dir , rel_path_to_package ) if package_path not in ...
Imports a python package into the current namespace .
29,804
def get_functions_and_classes ( package ) : classes , functions = [ ] , [ ] for name , member in inspect . getmembers ( package ) : if not name . startswith ( '_' ) : if inspect . isclass ( member ) : classes . append ( [ name , member ] ) elif inspect . isfunction ( member ) : functions . append ( [ name , member ] ) ...
Retun lists of functions and classes from a package .
29,805
def generate_api_docs ( package , api_dir , clean = False , printlog = True ) : if printlog : print ( '\n\nGenerating Module Files\n%s\n' % ( 50 * '=' ) ) prefix = package . __name__ + "." if clean : if os . path . isdir ( api_dir ) : shutil . rmtree ( api_dir ) api_docs = { } for importer , pkg_name , is_pkg in pkguti...
Generate a module level API documentation of a python package .
29,806
def summarize_methdods_and_functions ( api_modules , out_dir , printlog = False , clean = True , str_above_header = '' ) : if printlog : print ( '\n\nGenerating Subpackage Files\n%s\n' % ( 50 * '=' ) ) if clean : if os . path . isdir ( out_dir ) : shutil . rmtree ( out_dir ) if not os . path . isdir ( out_dir ) : os . ...
Generates subpacke - level summary files .
29,807
def fetch_pdb ( self , pdb_code ) : self . pdb_path , self . pdb_text = self . _fetch_pdb ( pdb_code ) self . _df = self . _construct_df ( pdb_lines = self . pdb_text . splitlines ( True ) ) return self
Fetches PDB file contents from the Protein Databank at rcsb . org .
29,808
def get ( self , s , df = None , invert = False , records = ( 'ATOM' , 'HETATM' ) ) : if isinstance ( records , str ) : warnings . warn ( 'Using a string as `records` argument is ' 'deprecated and will not be supported in future' ' versions. Please use a tuple or' ' other iterable instead' , DeprecationWarning ) record...
Filter PDB DataFrames by properties
29,809
def impute_element ( self , records = ( 'ATOM' , 'HETATM' ) , inplace = False ) : if inplace : t = self . df else : t = self . df . copy ( ) for d in self . df : t [ d ] = self . df [ d ] . copy ( ) for sec in records : t [ sec ] [ 'element_symbol' ] = t [ sec ] [ [ 'atom_name' , 'element_symbol' ] ] . apply ( lambda x...
Impute element_symbol from atom_name section .
29,810
def rmsd ( df1 , df2 , s = None , invert = False ) : if df1 . shape [ 0 ] != df2 . shape [ 0 ] : raise AttributeError ( 'DataFrames have unequal lengths' ) get_dict = PandasPdb . _init_get_dict ( ) if s : if s not in get_dict . keys ( ) : raise AttributeError ( 's must be in ' '%s or None' % get_dict . keys ( ) ) df1 =...
Compute the Root Mean Square Deviation between molecules .
29,811
def _init_get_dict ( ) : get_dict = { 'main chain' : PandasPdb . _get_mainchain , 'hydrogen' : PandasPdb . _get_hydrogen , 'c-alpha' : PandasPdb . _get_calpha , 'carbon' : PandasPdb . _get_carbon , 'heavy' : PandasPdb . _get_heavy } return get_dict
Initialize dictionary for filter operations .
29,812
def _read_pdb ( path ) : r_mode = 'r' openf = open if path . endswith ( '.gz' ) : r_mode = 'rb' openf = gzip . open with openf ( path , r_mode ) as f : txt = f . read ( ) if path . endswith ( '.gz' ) : if sys . version_info [ 0 ] >= 3 : txt = txt . decode ( 'utf-8' ) else : txt = txt . encode ( 'ascii' ) return path , ...
Read PDB file from local drive .
29,813
def _fetch_pdb ( pdb_code ) : txt = None url = 'http://www.rcsb.org/pdb/files/%s.pdb' % pdb_code . lower ( ) try : response = urlopen ( url ) txt = response . read ( ) if sys . version_info [ 0 ] >= 3 : txt = txt . decode ( 'utf-8' ) else : txt = txt . encode ( 'ascii' ) except HTTPError as e : print ( 'HTTP Error %s' ...
Load PDB file from rcsb . org .
29,814
def _parse_header_code ( self ) : code , header = '' , '' if 'OTHERS' in self . df : header = ( self . df [ 'OTHERS' ] [ self . df [ 'OTHERS' ] [ 'record_name' ] == 'HEADER' ] ) if not header . empty : header = header [ 'entry' ] . values [ 0 ] s = header . split ( ) if s : code = s [ - 1 ] . lower ( ) return header , ...
Extract header information and PDB code .
29,815
def _get_mainchain ( df , invert ) : if invert : mc = df [ ( df [ 'atom_name' ] != 'C' ) & ( df [ 'atom_name' ] != 'O' ) & ( df [ 'atom_name' ] != 'N' ) & ( df [ 'atom_name' ] != 'CA' ) ] else : mc = df [ ( df [ 'atom_name' ] == 'C' ) | ( df [ 'atom_name' ] == 'O' ) | ( df [ 'atom_name' ] == 'N' ) | ( df [ 'atom_name' ...
Return only main chain atom entries from a DataFrame
29,816
def amino3to1 ( self , record = 'ATOM' , residue_col = 'residue_name' , fillna = '?' ) : tmp = self . df [ record ] cmp = 'placeholder' indices = [ ] for num , ind in zip ( tmp [ 'residue_number' ] , np . arange ( tmp . shape [ 0 ] ) ) : if num != cmp : indices . append ( ind ) cmp = num transl = tmp . iloc [ indices ]...
Creates 1 - letter amino acid codes from DataFrame
29,817
def to_pdb ( self , path , records = None , gz = False , append_newline = True ) : if gz : openf = gzip . open w_mode = 'wt' else : openf = open w_mode = 'w' if not records : records = self . df . keys ( ) dfs = { r : self . df [ r ] . copy ( ) for r in records if not self . df [ r ] . empty } for r in dfs . keys ( ) :...
Write record DataFrames to a PDB file or gzipped PDB file .
29,818
def split_multimol2 ( mol2_path ) : r if mol2_path . endswith ( '.gz' ) : open_file = gzip . open read_mode = 'rb' else : open_file = open read_mode = 'r' check = { 'rb' : b'@<TRIPOS>MOLECULE' , 'r' : '@<TRIPOS>MOLECULE' } with open_file ( mol2_path , read_mode ) as f : mol2 = [ '' , [ ] ] while True : try : line = nex...
r Splits a multi - mol2 file into individual Mol2 file contents .
29,819
def _load_mol2 ( self , mol2_lines , mol2_code , columns ) : if columns is None : col_names = COLUMN_NAMES col_types = COLUMN_TYPES else : col_names , col_types = [ ] , [ ] for i in range ( len ( columns ) ) : col_names . append ( columns [ i ] [ 0 ] ) col_types . append ( columns [ i ] [ 1 ] ) try : self . mol2_text =...
Load mol2 contents into assert_raise_message instance
29,820
def read_mol2_from_list ( self , mol2_lines , mol2_code , columns = None ) : r self . _load_mol2 ( mol2_lines , mol2_code , columns ) return self
r Reads Mol2 file from a list into DataFrames
29,821
def _get_atomsection ( mol2_lst ) : started = False for idx , s in enumerate ( mol2_lst ) : if s . startswith ( '@<TRIPOS>ATOM' ) : first_idx = idx + 1 started = True elif started and s . startswith ( '@<TRIPOS>' ) : last_idx_plus1 = idx break return mol2_lst [ first_idx : last_idx_plus1 ]
Returns atom section from mol2 provided as list of strings
29,822
def rmsd ( df1 , df2 , heavy_only = True ) : if df1 . shape [ 0 ] != df2 . shape [ 0 ] : raise AttributeError ( 'DataFrames have unequal lengths' ) if heavy_only : d1 = df1 [ df1 [ 'atom_type' ] != 'H' ] d2 = df2 [ df2 [ 'atom_type' ] != 'H' ] else : d1 , d2 = df1 , df2 total = ( ( d1 [ 'x' ] . values - d2 [ 'x' ] . va...
Compute the Root Mean Square Deviation between molecules
29,823
def distance ( self , xyz = ( 0.00 , 0.00 , 0.00 ) ) : return np . sqrt ( np . sum ( self . df [ [ 'x' , 'y' , 'z' ] ] . subtract ( xyz , axis = 1 ) ** 2 , axis = 1 ) )
Computes Euclidean distance between atoms in self . df and a 3D point .
29,824
def parse_yaml_file ( self , path ) : with open ( path , 'r' ) as fp : data = yaml . safe_load ( fp ) if not data : return { } def traverse ( namespace , d ) : cfg = { } for key , val in d . items ( ) : if isinstance ( d [ key ] , dict ) : cfg . update ( traverse ( namespace + [ key ] , d [ key ] ) ) else : if not isin...
Parse yaml file at path and return a dict .
29,825
def import_class ( clspath ) : modpath , clsname = split_clspath ( clspath ) __import__ ( modpath ) module = sys . modules [ modpath ] return getattr ( module , clsname )
Given a clspath returns the class .
29,826
def upper_lower_none ( arg ) : if not arg : return arg arg = arg . strip ( ) . lower ( ) if arg in [ 'upper' , 'lower' ] : return arg raise ValueError ( 'argument must be "upper", "lower" or None' )
Validate arg value as upper lower or None .
29,827
def setup ( app ) : app . add_domain ( EverettDomain ) app . add_directive ( 'autocomponent' , AutoComponentDirective ) return { 'version' : __version__ , 'parallel_read_safe' : True , 'parallel_write_safe' : True }
Register domain and directive in Sphinx .
29,828
def handle_signature ( self , sig , signode ) : if sig != 'Configuration' : signode . clear ( ) signode += addnodes . desc_annotation ( 'component ' , 'component ' ) if '.' in sig : modname , clsname = sig . rsplit ( '.' , 1 ) else : modname , clsname = '' , sig if modname : signode += addnodes . desc_addname ( modname...
Create a signature for this thing .
29,829
def add_target_and_index ( self , name , sig , signode ) : targetname = '%s-%s' % ( self . objtype , name ) if targetname not in self . state . document . ids : signode [ 'names' ] . append ( targetname ) signode [ 'ids' ] . append ( targetname ) signode [ 'first' ] = ( not self . names ) self . state . document . note...
Add a target and index for this thing .
29,830
def add_line ( self , line , source , * lineno ) : self . result . append ( line , source , * lineno )
Add a line to the result
29,831
def generate_docs ( self , clspath , more_content ) : obj = import_class ( clspath ) sourcename = 'docstring of %s' % clspath all_options = [ ] indent = ' ' config = obj . get_required_config ( ) if config . options : for option in config : if 'namespace' in self . options : namespaced_key = self . options [ 'namesp...
Generate documentation for this configman class
29,832
def qualname ( thing ) : parts = [ ] mod = inspect . getmodule ( thing ) if mod and mod . __name__ not in ( '__main__' , '__builtin__' , 'builtins' ) : parts . append ( mod . __name__ ) if hasattr ( thing , '__qualname__' ) : parts . append ( thing . __qualname__ ) return '.' . join ( parts ) if inspect . ismodule ( th...
Return the dot name for a given thing .
29,833
def parse_bool ( val ) : true_vals = ( 't' , 'true' , 'yes' , 'y' , '1' , 'on' ) false_vals = ( 'f' , 'false' , 'no' , 'n' , '0' , 'off' ) val = val . lower ( ) if val in true_vals : return True if val in false_vals : return False raise ValueError ( '"%s" is not a valid bool value' % val )
Parse a bool value .
29,834
def parse_env_file ( envfile ) : data = { } for line_no , line in enumerate ( envfile ) : line = line . strip ( ) if not line or line . startswith ( '#' ) : continue if '=' not in line : raise ConfigurationError ( 'Env file line missing = operator (line %s)' % ( line_no + 1 ) ) k , v = line . split ( '=' , 1 ) k = k . ...
Parse the content of an iterable of lines as . env .
29,835
def parse_class ( val ) : module , class_name = val . rsplit ( '.' , 1 ) module = importlib . import_module ( module ) try : return getattr ( module , class_name ) except AttributeError : raise ValueError ( '"%s" is not a valid member of %s' % ( class_name , qualname ( module ) ) )
Parse a string imports the module and returns the class .
29,836
def generate_uppercase_key ( key , namespace = None ) : if namespace : namespace = [ part for part in listify ( namespace ) if part ] key = '_' . join ( namespace + [ key ] ) key = key . upper ( ) return key
Given a key and a namespace generates a final uppercase key .
29,837
def get_key_from_envs ( envs , key ) : if hasattr ( envs , 'get' ) : envs = [ envs ] for env in envs : if key in env : return env [ key ] return NO_VALUE
Return the value of a key from the given dict respecting namespaces .
29,838
def with_options ( self , component ) : options = component . get_required_config ( ) component_name = _get_component_name ( component ) return BoundConfig ( self . _get_base_config ( ) , component_name , options )
Apply options component options to this configuration .
29,839
def decorate ( self , fun ) : @ wraps ( fun ) def _decorated ( * args , ** kwargs ) : self . push_config ( ) try : return fun ( * args , ** kwargs ) finally : self . pop_config ( ) return _decorated
Decorate a function for overriding configuration .
29,840
def add_option ( self , key , default = NO_VALUE , alternate_keys = NO_VALUE , doc = '' , parser = str , ** meta ) : option = Option ( key , default , alternate_keys , doc , parser , meta ) self . options [ key ] = option
Add an option to the group .
29,841
def update ( self , new_options ) : for option in new_options : if option . key in self . options : del self . options [ option . key ] self . options [ option . key ] = option
Update this ConfigOptions using data from another .
29,842
def get_required_config ( cls ) : options = ConfigOptions ( ) for cls in reversed ( cls . __mro__ ) : try : options . update ( cls . required_config ) except AttributeError : pass return options
Roll up configuration options for this class and parent classes .
29,843
def get_runtime_config ( self , namespace = None ) : namespace = namespace or [ ] cfg = getattr ( self , 'config' , None ) if cfg is None or not isinstance ( cfg , BoundConfig ) : return for key , opt in self . get_required_config ( ) . options . items ( ) : yield ( namespace , key , self . config ( key , raise_error =...
Roll up the runtime config for this class and all children .
29,844
def parse_ini_file ( self , path ) : cfgobj = ConfigObj ( path , list_values = False ) def extract_section ( namespace , d ) : cfg = { } for key , val in d . items ( ) : if isinstance ( d [ key ] , dict ) : cfg . update ( extract_section ( namespace + [ key ] , d [ key ] ) ) else : cfg [ '_' . join ( namespace + [ key ...
Parse ini file at path and return dict .
29,845
def team ( self , team , simple = False ) : return Team ( self . _get ( 'team/%s%s' % ( self . team_key ( team ) , '/simple' if simple else '' ) ) )
Get data on a single specified team .
29,846
def team_events ( self , team , year = None , simple = False , keys = False ) : if year : if keys : return self . _get ( 'team/%s/events/%s/keys' % ( self . team_key ( team ) , year ) ) else : return [ Event ( raw ) for raw in self . _get ( 'team/%s/events/%s%s' % ( self . team_key ( team ) , year , '/simple' if simple...
Get team events a team has participated in .
29,847
def team_awards ( self , team , year = None , event = None ) : if event : return [ Award ( raw ) for raw in self . _get ( 'team/%s/event/%s/awards' % ( self . team_key ( team ) , event ) ) ] else : if year : return [ Award ( raw ) for raw in self . _get ( 'team/%s/awards/%s' % ( self . team_key ( team ) , year ) ) ] el...
Get list of awards team has recieved .
29,848
def team_matches ( self , team , event = None , year = None , simple = False , keys = False ) : if event : if keys : return self . _get ( 'team/%s/event/%s/matches/keys' % ( self . team_key ( team ) , event ) ) else : return [ Match ( raw ) for raw in self . _get ( 'team/%s/event/%s/matches%s' % ( self . team_key ( tea...
Get list of matches team has participated in .
29,849
def team_media ( self , team , year = None , tag = None ) : return [ Media ( raw ) for raw in self . _get ( 'team/%s/media%s%s' % ( self . team_key ( team ) , ( '/tag/%s' % tag ) if tag else '' , ( '/%s' % year ) if year else '' ) ) ]
Get media for a given team .
29,850
def team_robots ( self , team ) : return [ Robot ( raw ) for raw in self . _get ( 'team/%s/robots' % self . team_key ( team ) ) ]
Get data about a team s robots .
29,851
def team_districts ( self , team ) : return [ District ( raw ) for raw in self . _get ( 'team/%s/districts' % self . team_key ( team ) ) ]
Get districts a team has competed in .
29,852
def team_profiles ( self , team ) : return [ Profile ( raw ) for raw in self . _get ( 'team/%s/social_media' % self . team_key ( team ) ) ]
Get team s social media profiles linked on their TBA page .
29,853
def team_status ( self , team , event ) : return Status ( self . _get ( 'team/%s/event/%s/status' % ( self . team_key ( team ) , event ) ) )
Get status of a team at an event .
29,854
def events ( self , year , simple = False , keys = False ) : if keys : return self . _get ( 'events/%s/keys' % year ) else : return [ Event ( raw ) for raw in self . _get ( 'events/%s%s' % ( year , '/simple' if simple else '' ) ) ]
Get a list of events in a given year .
29,855
def event ( self , event , simple = False ) : return Event ( self . _get ( 'event/%s%s' % ( event , '/simple' if simple else '' ) ) )
Get basic information about an event .
29,856
def event_teams ( self , event , simple = False , keys = False ) : if keys : return self . _get ( 'event/%s/teams/keys' % event ) else : return [ Team ( raw ) for raw in self . _get ( 'event/%s/teams%s' % ( event , '/simple' if simple else '' ) ) ]
Get list of teams at an event .
29,857
def event_matches ( self , event , simple = False , keys = False ) : if keys : return self . _get ( 'event/%s/matches/keys' % event ) else : return [ Match ( raw ) for raw in self . _get ( 'event/%s/matches%s' % ( event , '/simple' if simple else '' ) ) ]
Get list of matches played at an event .
29,858
def match ( self , key = None , year = None , event = None , type = 'qm' , number = None , round = None , simple = False ) : if key : return Match ( self . _get ( 'match/%s%s' % ( key , '/simple' if simple else '' ) ) ) else : return Match ( self . _get ( 'match/{year}{event}_{type}{number}{round}{simple}' . format ( y...
Get data on a match .
29,859
def district_events ( self , district , simple = False , keys = False ) : if keys : return self . _get ( 'district/%s/events/keys' % district ) else : return [ Event ( raw ) for raw in self . _get ( 'district/%s/events%s' % ( district , '/simple' if simple else '' ) ) ]
Return list of events in a given district .
29,860
def district_teams ( self , district , simple = False , keys = False ) : if keys : return self . _get ( 'district/%s/teams/keys' % district ) else : return [ Team ( raw ) for raw in self . _get ( 'district/%s/teams' % district ) ]
Get list of teams in the given district .
29,861
def update_trusted ( self , auth_id , auth_secret , event_key ) : self . session . headers . update ( { 'X-TBA-Auth-Id' : auth_id } ) self . auth_secret = auth_secret self . event_key = event_key
Set Trusted API ID and Secret and the event key they are assigned to .
29,862
def delete_event_matches ( self , data = None ) : return self . _post ( 'event/%s/matches/delete_all' if data is None else 'event/%s/matches/delete' , json . dumps ( self . event_key ) if data is None else json . dumps ( data ) )
Delete an event s matches on The Blue Alliance .
29,863
def from_where ( cls , where ) : if where . conjunction : return Conjunction . from_clause ( where ) else : return cls . from_clause ( where [ 0 ] )
Factory method for creating the top - level expression
29,864
def _factory ( cls , constraints , op ) : pieces = [ ] for i , constraint in enumerate ( constraints ) : pieces . append ( constraint ) if i != len ( constraints ) - 1 : pieces . append ( op ) return cls ( pieces )
Factory for joining constraints with a single conjunction
29,865
def remove_index ( self , index ) : query = [ ] remainder = [ ] for i in range ( 0 , len ( self . pieces ) , 2 ) : const = self . pieces [ i ] if const . hash_field == index . hash_key : query . append ( const ) elif index . range_key is not None and const . range_field == index . range_key : query . append ( const ) e...
This one takes some explanation . When we do a query with a WHERE statement it may end up being a scan and it may end up being a query . If it is a query we need to remove the hash and range key constraints from the expression and return that as the query_constraints . The remaining constraints if any are returned as t...
29,866
def default ( value ) : if isinstance ( value , Decimal ) : primative = float ( value ) if int ( primative ) == primative : return int ( primative ) else : return primative elif isinstance ( value , set ) : return list ( value ) elif isinstance ( value , Binary ) : return b64encode ( value . value ) raise TypeError ( "...
Default encoder for JSON
29,867
def add_query_kwargs ( kwargs , visitor , constraints , index ) : ( query_const , filter_const ) = constraints . remove_index ( index ) kwargs [ "key_condition_expr" ] = query_const . build ( visitor ) if filter_const : kwargs [ "filter" ] = filter_const . build ( visitor ) if index . name != "TABLE" : kwargs [ "index"...
Construct KeyConditionExpression and FilterExpression
29,868
def iter_insert_items ( tree ) : if tree . list_values : keys = tree . attrs for values in tree . list_values : if len ( keys ) != len ( values ) : raise SyntaxError ( "Values '%s' do not match attributes " "'%s'" % ( values , keys ) ) yield dict ( zip ( keys , map ( resolve , values ) ) ) elif tree . map_values : for ...
Iterate over the items to insert from an INSERT statement
29,869
def connect ( self , * args , ** kwargs ) : self . connection = DynamoDBConnection . connect ( * args , ** kwargs ) self . _session = kwargs . get ( "session" ) if self . _session is None : self . _session = botocore . session . get_session ( )
Proxy to DynamoDBConnection . connect .
29,870
def connection ( self , connection ) : if connection is not None : connection . subscribe ( "capacity" , self . _on_capacity_data ) connection . default_return_capacity = True if self . _connection is not None : connection . unsubscribe ( "capacity" , self . _on_capacity_data ) self . _connection = connection self . _c...
Change the dynamo connection
29,871
def cloudwatch_connection ( self ) : if self . _cloudwatch_connection is None : conn = self . _session . create_client ( "cloudwatch" , self . connection . region ) self . _cloudwatch_connection = conn return self . _cloudwatch_connection
Lazy create a connection to cloudwatch
29,872
def _format_explain ( self ) : lines = [ ] for ( command , kwargs ) in self . _call_list : lines . append ( command + " " + pformat ( kwargs ) ) return "\n" . join ( lines )
Format the results of an EXPLAIN
29,873
def _pretty_format ( self , statement , result ) : if result is None : return "Success" ret = result if statement . action in ( "SELECT" , "SCAN" ) : if statement . save_file : filename = statement . save_file [ 0 ] if filename [ 0 ] in [ '"' , "'" ] : filename = unwrap ( filename ) ret = "Saved %d record%s to %s" % ( ...
Format the return value of a query for humans
29,874
def describe_all ( self , refresh = True ) : tables = self . connection . list_tables ( ) descs = [ ] for tablename in tables : descs . append ( self . describe ( tablename , refresh ) ) return descs
Describe all tables in the connected region
29,875
def execute ( self , commands , pretty_format = False ) : tree = parser . parseString ( commands ) self . consumed_capacities = [ ] self . _analyzing = False self . _query_rate_limit = None for statement in tree : try : result = self . _run ( statement ) except ExplainSignal : return self . _format_explain ( ) if prett...
Parse and run a DQL string
29,876
def _run ( self , tree ) : if tree . throttle : limiter = self . _parse_throttle ( tree . table , tree . throttle ) self . _query_rate_limit = limiter del tree [ "throttle" ] return self . _run ( tree ) if tree . action == "SELECT" : return self . _select ( tree , self . allow_select_scan ) elif tree . action == "SCAN"...
Run a query from a parse tree
29,877
def _parse_throttle ( self , tablename , throttle ) : amount = [ ] desc = self . describe ( tablename ) throughputs = [ desc . read_throughput , desc . write_throughput ] for value , throughput in zip ( throttle [ 1 : ] , throughputs ) : if value == "*" : amount . append ( 0 ) elif value [ - 1 ] == "%" : amount . appen...
Parse a throttle statement and return a RateLimit
29,878
def _on_capacity_data ( self , conn , command , kwargs , response , capacity ) : if self . _analyzing : self . consumed_capacities . append ( ( command , capacity ) ) if self . _query_rate_limit is not None : self . _query_rate_limit . on_capacity ( conn , command , kwargs , response , capacity ) elif self . rate_limit...
Log the received consumed capacity data
29,879
def _on_throttle ( self , conn , command , kwargs , response , capacity , seconds ) : LOG . info ( "Throughput limit exceeded during %s. " "Sleeping for %d second%s" , command , seconds , plural ( seconds ) , )
Print out a message when the query is throttled
29,880
def _explain ( self , tree ) : self . _explaining = True self . _call_list = [ ] old_call = self . connection . call def fake_call ( command , ** kwargs ) : if command == "describe_table" : return old_call ( command , ** kwargs ) self . _call_list . append ( ( command , kwargs ) ) raise ExplainSignal self . connection ...
Set up the engine to do a dry run of a query
29,881
def _iter_where_in ( self , tree ) : desc = self . describe ( tree . table , require = True ) for keypair in tree . keys_in : yield desc . primary_key ( * map ( resolve , keypair ) )
Iterate over the KEYS IN and generate primary keys
29,882
def _query_and_op ( self , tree , table , method_name , method_kwargs ) : result = [ ] if tree . keys_in : if tree . using : raise SyntaxError ( "Cannot use USING with KEYS IN" ) keys = self . _iter_where_in ( tree ) else : visitor = Visitor ( self . reserved_words ) ( action , kwargs , _ ) = self . _build_query ( tabl...
Query the table and perform an operation on each item
29,883
def _delete ( self , tree ) : tablename = tree . table table = self . describe ( tablename , require = True ) kwargs = { } visitor = Visitor ( self . reserved_words ) if tree . where : constraints = ConstraintExpression . from_where ( tree . where ) kwargs [ "condition" ] = constraints . build ( visitor ) kwargs [ "exp...
Run a DELETE statement
29,884
def _update ( self , tree ) : tablename = tree . table table = self . describe ( tablename , require = True ) kwargs = { } if tree . returns : kwargs [ "returns" ] = "_" . join ( tree . returns ) else : kwargs [ "returns" ] = "NONE" visitor = Visitor ( self . reserved_words ) updates = UpdateExpression . from_update ( ...
Run an UPDATE statement
29,885
def _parse_global_index ( self , clause , attrs ) : index_type , name = clause [ : 2 ] name = resolve ( name ) def get_key ( field , data_type = None ) : if field in attrs : key = attrs [ field ] if data_type is not None : if TYPES [ data_type ] != key . data_type : raise SyntaxError ( "Key %r %s already declared with ...
Parse a global index clause and return a GlobalIndex
29,886
def _insert ( self , tree ) : tablename = tree . table count = 0 kwargs = { } batch = self . connection . batch_write ( tablename , ** kwargs ) with batch : for item in iter_insert_items ( tree ) : batch . put ( item ) count += 1 return count
Run an INSERT statement
29,887
def _drop ( self , tree ) : tablename = tree . table kwargs = { } try : ret = self . connection . delete_table ( tablename , ** kwargs ) except DynamoDBError as e : if e . kwargs [ "Code" ] == "ResourceNotFoundException" and tree . exists : return False raise return True
Run a DROP statement
29,888
def _update_throughput ( self , tablename , read , write , index ) : def get_desc ( ) : desc = self . describe ( tablename , refresh = True , require = True ) if index is not None : return desc . global_indexes [ index ] return desc desc = get_desc ( ) def num_or_star ( value ) : return 0 if value == "*" else resolve (...
Update the throughput on a table or index
29,889
def _alter ( self , tree ) : if tree . throughput : [ read , write ] = tree . throughput index = None if tree . index : index = tree . index self . _update_throughput ( tree . table , read , write , index ) elif tree . drop_index : updates = [ IndexUpdate . delete ( tree . drop_index [ 0 ] ) ] try : self . connection ....
Run an ALTER statement
29,890
def _dump ( self , tree ) : schema = [ ] if tree . tables : for table in tree . tables : desc = self . describe ( table , refresh = True , require = True ) schema . append ( desc . schema ) else : for table in self . describe_all ( ) : schema . append ( table . schema ) return "\n\n" . join ( schema )
Run a DUMP statement
29,891
def _load ( self , tree ) : filename = tree . load_file [ 0 ] if filename [ 0 ] in [ '"' , "'" ] : filename = unwrap ( filename ) if not os . path . exists ( filename ) : raise Exception ( "No such file %r" % filename ) batch = self . connection . batch_write ( tree . table ) count = 0 with batch : remainder , ext = os...
Run a LOAD statement
29,892
def execute ( self , fragment , pretty_format = True ) : self . fragments = ( self . fragments + "\n" + fragment ) . lstrip ( ) try : line_parser . parseString ( self . fragments ) except ParseException : pass else : self . last_query = self . fragments . strip ( ) self . fragments = "" return super ( FragmentEngine , ...
Run or aggregate a query fragment
29,893
def pformat_exc ( self , exc ) : lines = [ ] try : pre_nl = self . last_query . rindex ( "\n" , 0 , exc . loc ) + 1 except ValueError : pre_nl = 0 try : post_nl = self . last_query . index ( "\n" , exc . loc ) except ValueError : post_nl = len ( self . last_query ) lines . append ( self . last_query [ : post_nl ] ) lin...
Format an exception message for the last query s parse error
29,894
def from_update ( cls , update ) : expressions = [ ] if update . set_expr : expressions . append ( UpdateSetMany . from_clause ( update . set_expr ) ) if update . remove_expr : expressions . append ( UpdateRemove . from_clause ( update . remove_expr ) ) if update . add_expr : expressions . append ( UpdateAdd . from_cla...
Factory for creating an Update expression
29,895
def to_index ( self , index_type , index_name , includes = None ) : return IndexField ( self . name , self . data_type , index_type , index_name , includes )
Create an index field from this field
29,896
def from_description ( cls , description , attrs ) : hash_key = None range_key = None index_type = description [ "Projection" ] [ "ProjectionType" ] includes = description [ "Projection" ] . get ( "NonKeyAttributes" ) for data in description [ "KeySchema" ] : name = data [ "AttributeName" ] if name not in attrs : conti...
Create an object from a dynamo3 response
29,897
def pformat ( self , consumed_capacity = None ) : consumed_capacity = consumed_capacity or { } lines = [ ] parts = [ "GLOBAL" , self . index_type , "INDEX" , self . name ] if self . status != "ACTIVE" : parts . insert ( 0 , "[%s]" % self . status ) lines . append ( " " . join ( parts ) ) lines . append ( " items: {0:,...
Pretty format for insertion into table pformat
29,898
def schema ( self ) : if self . status == "DELETING" : return "" parts = [ "GLOBAL" , self . index_type , "INDEX" ] parts . append ( "('%s', %s," % ( self . name , self . hash_key . name ) ) if self . range_key : parts . append ( "%s," % self . range_key . name ) if self . includes : parts . append ( "[%s]," % ", " . j...
The DQL fragment for constructing this index
29,899
def get_matching_indexes ( self , possible_hash , possible_range ) : matches = [ index for index in self . iter_query_indexes ( ) if index . hash_key in possible_hash ] range_matches = [ index for index in matches if index . range_key in possible_range ] if range_matches : return range_matches return matches
Get all indexes that could be queried on using a set of keys .