idx int64 0 251k | question stringlengths 53 3.53k | target stringlengths 5 1.23k | len_question int64 20 893 | len_target int64 3 238 |
|---|---|---|---|---|
6,400 | def access_storage_view ( name , * * kwargs ) : ctx = Context ( * * kwargs ) ctx . execute_action ( 'access:storage:view' , * * { 'storage' : ctx . repo . create_secure_service ( 'storage' ) , 'name' : name , } ) | Shows ACL for the specified collection . | 73 | 8 |
6,401 | def access_storage_create ( name , * * kwargs ) : ctx = Context ( * * kwargs ) ctx . execute_action ( 'access:storage:create' , * * { 'storage' : ctx . repo . create_secure_service ( 'storage' ) , 'name' : name , } ) | Creates new ACL for the specified collection . | 73 | 9 |
6,402 | def access_storage_edit ( name , cid , uid , perm , * * kwargs ) : ctx = Context ( * * kwargs ) ctx . execute_action ( 'access:storage:edit' , * * { 'storage' : ctx . repo . create_secure_service ( 'storage' ) , 'name' : name , 'cids' : cid , 'uids' : uid , 'perm' : perm , } ) | Edits ACL for the specified collection . | 103 | 8 |
6,403 | def access_storage_rm ( name , yes , * * kwargs ) : if name is None : if not yes : click . confirm ( 'Are you sure you want to remove all ACL?' , abort = True ) ctx = Context ( * * kwargs ) ctx . execute_action ( 'access:storage:rm' , * * { 'storage' : ctx . repo . create_secure_service ( 'storage' ) , 'name' : name , } ) | Remove ACL for the specified collection . | 104 | 7 |
6,404 | def access_list ( * * kwargs ) : ctx = Context ( * * kwargs ) ctx . execute_action ( 'access:list' , * * { 'unicorn' : ctx . repo . create_secure_service ( 'unicorn' ) , } ) | Shows services for which there are ACL specified . | 63 | 10 |
6,405 | def access_view ( name , * * kwargs ) : ctx = Context ( * * kwargs ) ctx . execute_action ( 'access:view' , * * { 'unicorn' : ctx . repo . create_secure_service ( 'unicorn' ) , 'service' : name , } ) | Shows ACL for the specified service . | 71 | 8 |
6,406 | def keyring_view ( * * kwargs ) : ctx = Context ( * * kwargs ) ctx . execute_action ( 'keyring:view' , * * { 'storage' : ctx . repo . create_secure_service ( 'storage' ) , } ) | View saved public keys . | 63 | 5 |
6,407 | def keyring_remove ( key , yes , * * kwargs ) : if key is None : if not yes : click . confirm ( 'Are you sure you want to remove all keys?' , abort = True ) ctx = Context ( * * kwargs ) ctx . execute_action ( 'keyring:remove' , * * { 'key' : key , 'storage' : ctx . repo . create_secure_service ( 'storage' ) , } ) | Removes a public key from the keyring . | 102 | 10 |
6,408 | def keyring_edit ( * * kwargs ) : ctx = Context ( * * kwargs ) ctx . timeout = None ctx . execute_action ( 'keyring:edit' , * * { 'storage' : ctx . repo . create_secure_service ( 'storage' ) , } ) | Edits interactively the keyring . | 69 | 8 |
6,409 | def keyring_refresh ( * * kwargs ) : ctx = Context ( * * kwargs ) ctx . execute_action ( 'keyring:refresh' , * * { 'tvm' : ctx . repo . create_secure_service ( 'tvm' ) , } ) | Refresh the keyring in the cocaine - runtime . | 67 | 11 |
6,410 | def evaluate_parameter_sets ( self ) : #parameter_interpreter = ParameterInterpreter(self.modelInstance) #parameter_interpreter.evaluate_parameter_sets() self . parameter_interpreter = LcoptParameterSet ( self . modelInstance ) self . modelInstance . evaluated_parameter_sets = self . parameter_interpreter . evaluated_parameter_sets self . modelInstance . bw2_export_params = self . parameter_interpreter . bw2_export_params | This takes the parameter sets of the model instance and evaluates any formulas using the parameter values to create a fixed full set of parameters for each parameter set in the model | 118 | 32 |
6,411 | def create_parameter_map ( self ) : names = self . modelInstance . names db = self . modelInstance . database [ 'items' ] parameter_map = { } def get_names_index ( my_thing ) : return [ i for i , x in enumerate ( names ) if x == my_thing ] [ 0 ] for k , this_item in db . items ( ) : if this_item [ 'type' ] == 'process' : production_id = [ x [ 'input' ] for x in this_item [ 'exchanges' ] if x [ 'type' ] == 'production' ] [ 0 ] input_ids = [ x [ 'input' ] for x in this_item [ 'exchanges' ] if x [ 'type' ] == 'technosphere' ] production_index = get_names_index ( db [ production_id ] [ 'name' ] ) input_indexes = [ get_names_index ( db [ x ] [ 'name' ] ) for x in input_ids ] parameter_ids = [ 'n_p_{}_{}' . format ( x , production_index ) for x in input_indexes ] parameter_map_items = { ( input_ids [ n ] , k ) : parameter_ids [ n ] for n , x in enumerate ( input_ids ) } #check = [self.modelInstance.params[x]['description'] for x in parameter_ids] #print(check) #print(parameter_map_items) parameter_map . update ( parameter_map_items ) self . parameter_map = parameter_map | Creates a parameter map which takes a tuple of the exchange from and exchange to codes and returns the parameter name for that exchange | 352 | 25 |
6,412 | def get_config ( override = None ) : # Set location config_path = os . path . expanduser ( '~' ) + '/.dbschema.yml' if override : config_path = override # Check if the config file exists check_exists ( config_path ) # Load config with open ( config_path ) as f : # use safe_load instead load config = yaml . safe_load ( f ) return config | Get config file | 95 | 3 |
6,413 | def check_exists ( path , type = 'file' ) : if type == 'file' : if not os . path . isfile ( path ) : raise RuntimeError ( 'The file `%s` does not exist.' % path ) else : if not os . path . isdir ( path ) : raise RuntimeError ( 'The folder `%s` does not exist.' % path ) return True | Check if a file or a folder exists | 86 | 8 |
6,414 | def get_connection ( engine , host , user , port , password , database , ssl = { } ) : if engine == 'mysql' : # Connection return get_mysql_connection ( host , user , port , password , database , ssl ) elif engine == 'postgresql' : # Connection return get_pg_connection ( host , user , port , password , database , ssl ) else : raise RuntimeError ( '`%s` is not a valid engine.' % engine ) | Returns a PostgreSQL or MySQL connection | 107 | 7 |
6,415 | def parse_statements ( queries_input , engine ) : queries = [ ] query = '' sql_delimiter = ';' # Possible delimiters used in PostgreSQL functions postgres_func_delimiters = [ '$$' , '##' ] # Split input by lines lines = queries_input . strip ( ) . split ( '\n' ) for k , line in enumerate ( lines ) : # Strip line line = line . strip ( ) # Skip empty lines and comments if not line or line . startswith ( '--' ) : continue # Detect new SQL delimiter if engine == 'mysql' and line . upper ( ) . startswith ( 'DELIMITER ' ) : sql_delimiter = line . split ( ) [ 1 ] continue elif engine == 'postgresql' and [ delimiter for delimiter in postgres_func_delimiters if 'AS ' + delimiter in line . upper ( ) ] : sql_delimiter = line . split ( ) [ - 1 ] # Ensure that we leave 'AS [DELIMITER]' query += line + '\n' continue # Statement is not finished if sql_delimiter not in line and k != len ( lines ) - 1 : # Append line query += line + '\n' else : # Statement is finished # Replace non default delimiter if sql_delimiter != ';' and engine == 'mysql' and line . endswith ( sql_delimiter ) : line = line . replace ( sql_delimiter , ';' ) queries . append ( query + line ) query = '' return queries | Parse input and return a list of SQL statements | 358 | 10 |
6,416 | def run_migration ( connection , queries , engine ) : # Execute query with connection . cursor ( ) as cursorMig : # Parse statements queries = parse_statements ( queries , engine ) for query in queries : cursorMig . execute ( query ) connection . commit ( ) return True | Apply a migration to the SQL server | 63 | 7 |
6,417 | def save_migration ( connection , basename ) : # Prepare query sql = "INSERT INTO migrations_applied (name, date) VALUES (%s, NOW())" # Run with connection . cursor ( ) as cursor : cursor . execute ( sql , ( basename , ) ) connection . commit ( ) return True | Save a migration in migrations_applied table | 69 | 10 |
6,418 | def delete_migration ( connection , basename ) : # Prepare query sql = "DELETE FROM migrations_applied WHERE name = %s" # Run with connection . cursor ( ) as cursor : cursor . execute ( sql , ( basename , ) ) connection . commit ( ) return True | Delete a migration in migrations_applied table | 63 | 10 |
6,419 | def get_migrations_applied ( engine , connection ) : try : # Get cursor based on engine if engine == 'postgresql' : cursor = connection . cursor ( cursor_factory = psycopg2 . extras . RealDictCursor ) else : cursor = connection . cursor ( ) sql = "SELECT id, name, date FROM migrations_applied" cursor . execute ( sql ) rows = cursor . fetchall ( ) # print (rows); return rows except psycopg2 . ProgrammingError : raise RuntimeError ( 'The table `migrations_applied` is missing. Please refer to the project documentation at https://github.com/gabfl/dbschema.' ) except pymysql . err . ProgrammingError : raise RuntimeError ( 'The table `migrations_applied` is missing. Please refer to the project documentation at https://github.com/gabfl/dbschema.' ) | Get list of migrations already applied | 202 | 7 |
6,420 | def apply_migrations ( engine , connection , path ) : # Get migrations applied migrations_applied = get_migrations_applied ( engine , connection ) # print(migrationsApplied) # Get migrations folder for file in get_migrations_files ( path ) : # Set vars basename = os . path . basename ( os . path . dirname ( file ) ) # Skip migrations if they are already applied if is_applied ( migrations_applied , basename ) : continue # Get migration source source = get_migration_source ( file ) # print (source); # Run migration run_migration ( connection , source , engine ) # Save migration save_migration ( connection , basename ) # Log print ( ' -> Migration `%s` applied' % ( basename ) ) # Log print ( ' * Migrations applied' ) return True | Apply all migrations in a chronological order | 196 | 8 |
6,421 | def rollback_migration ( engine , connection , path , migration_to_rollback ) : # Get migrations applied migrations_applied = get_migrations_applied ( engine , connection ) # Ensure that the migration was previously applied if not is_applied ( migrations_applied , migration_to_rollback ) : raise RuntimeError ( '`%s` is not in the list of previously applied migrations.' % ( migration_to_rollback ) ) # Rollback file file = path + migration_to_rollback + '/down.sql' # Ensure that the file exists check_exists ( file ) # Set vars basename = os . path . basename ( os . path . dirname ( file ) ) # Get migration source source = get_migration_source ( file ) # print (source); # Run migration rollback run_migration ( connection , source , engine ) # Delete migration delete_migration ( connection , basename ) # Log print ( ' -> Migration `%s` has been rolled back' % ( basename ) ) return True | Rollback a migration | 234 | 4 |
6,422 | def get_ssl ( database ) : # Set available keys per engine if database [ 'engine' ] == 'postgresql' : keys = [ 'sslmode' , 'sslcert' , 'sslkey' , 'sslrootcert' , 'sslcrl' , 'sslcompression' ] else : keys = [ 'ssl_ca' , 'ssl_capath' , 'ssl_cert' , 'ssl_key' , 'ssl_cipher' , 'ssl_check_hostname' ] # Loop thru keys ssl = { } for key in keys : value = database . get ( key , None ) if value is not None : ssl [ key ] = value return ssl | Returns SSL options for the selected engine | 150 | 7 |
6,423 | def apply ( config_override = None , tag_override = None , rollback = None , skip_missing = None ) : # Load config config = get_config ( config_override ) databases = config [ 'databases' ] # If we are rolling back, ensure that we have a database tag if rollback and not tag_override : raise RuntimeError ( 'To rollback a migration you need to specify the database tag with `--tag`' ) for tag in sorted ( databases ) : # If a tag is specified, skip other tags if tag_override and tag_override != tag : continue # Set vars engine = databases [ tag ] . get ( 'engine' , 'mysql' ) host = databases [ tag ] . get ( 'host' , 'localhost' ) port = databases [ tag ] . get ( 'port' , 3306 ) user = databases [ tag ] [ 'user' ] password = databases [ tag ] [ 'password' ] db = databases [ tag ] [ 'db' ] path = add_slash ( databases [ tag ] [ 'path' ] ) pre_migration = databases [ tag ] . get ( 'pre_migration' , None ) post_migration = databases [ tag ] . get ( 'post_migration' , None ) # Check if the migration path exists if skip_missing : try : check_exists ( path , 'dir' ) except RuntimeError : continue else : check_exists ( path , 'dir' ) # Get database connection connection = get_connection ( engine , host , user , port , password , db , get_ssl ( databases [ tag ] ) ) # Run pre migration queries if pre_migration : run_migration ( connection , pre_migration , engine ) if rollback : print ( ' * Rolling back %s (`%s` on %s)' % ( tag , db , engine ) ) rollback_migration ( engine , connection , path , rollback ) else : print ( ' * Applying migrations for %s (`%s` on %s)' % ( tag , db , engine ) ) apply_migrations ( engine , connection , path ) # Run post migration queries if post_migration : run_migration ( connection , post_migration , engine ) return True | Look thru migrations and apply them | 494 | 7 |
6,424 | def resolve ( self , authorization : http . Header ) : if authorization is None : return None scheme , token = authorization . split ( ) if scheme . lower ( ) != 'basic' : return None username , password = base64 . b64decode ( token ) . decode ( 'utf-8' ) . split ( ':' ) user = authenticate ( username = username , password = password ) return user | Determine the user associated with a request using HTTP Basic Authentication . | 85 | 14 |
6,425 | def resolve ( self , authorization : http . Header ) : from django_apistar . authentication . models import Token if authorization is None : return None scheme , token = authorization . split ( ) if scheme . lower ( ) != 'bearer' : return None try : user = Token . objects . get ( key = token ) . user except Token . DoesNotExist : return None return user | Determine the user associated with a request using Token Authentication . | 82 | 13 |
6,426 | def loop ( self ) : if not self . _loop : self . _loop = IOLoop . current ( ) return self . _loop return self . _loop | Lazy event loop initialization | 35 | 5 |
6,427 | def execute_action ( self , action_name , * * options ) : assert action_name in NG_ACTIONS , 'wrong action - {0}' . format ( action_name ) action = NG_ACTIONS [ action_name ] self . loop . run_sync ( lambda : action . execute ( * * options ) , timeout = self . timeout ) | Execute action with specified options . | 77 | 7 |
6,428 | def json2excel ( items , keys , filename , page_size = 60000 ) : wb = xlwt . Workbook ( ) rowindex = 0 sheetindex = 0 for item in items : if rowindex % page_size == 0 : sheetname = "%02d" % sheetindex ws = wb . add_sheet ( sheetname ) rowindex = 0 sheetindex += 1 colindex = 0 for key in keys : ws . write ( rowindex , colindex , key ) colindex += 1 rowindex += 1 colindex = 0 for key in keys : v = item . get ( key , "" ) if type ( v ) == list : v = ',' . join ( v ) if type ( v ) == set : v = ',' . join ( v ) ws . write ( rowindex , colindex , v ) colindex += 1 rowindex += 1 logging . debug ( filename ) wb . save ( filename ) | max_page_size is 65000 because we output old excel . xls format | 202 | 17 |
6,429 | def check_production_parameters_exist ( self ) : for k , v in self . modelInstance . parameter_sets . items ( ) : for p_id in self . modelInstance . production_params . keys ( ) : if v . get ( p_id ) : #print('{} already exists'.format(p_id)) pass else : #print('No production parameter called {} - setting it to 1'.format(p_id)) v [ p_id ] = 1.0 for p_id in self . modelInstance . allocation_params . keys ( ) : if v . get ( p_id ) : #print('{} already exists'.format(p_id)) pass else : #print('No production parameter called {} - setting it to 1'.format(p_id)) v [ p_id ] = 1.0 | old versions of models won t have produciton parameters leading to ZeroDivision errors and breaking things | 181 | 20 |
6,430 | def _create_symbol ( self , id , symbolobj ) : result = { 'id' : symbolobj . name , 'isPoint' : False , 'isStroke' : False , 'isFill' : False } matcher = Mapfile . _SYMBOL_NAME_REGEXP . match ( symbolobj . name ) if matcher : result [ 'name' ] = matcher . group ( 2 ) for c in matcher . group ( 1 ) : field = Mapfile . _STYLE_CHAR2NAME [ c ] result [ field ] = True else : result [ 'name' ] = symbolobj . name result [ 'isPoint' ] = result [ 'isStroke' ] = result [ 'isFill' ] = True return result | Creates the JSON representation of a symbol | 166 | 8 |
6,431 | def translate ( pua , composed = True ) : from . encoder import PUAComposedEncoder from . encoder import PUADecomposedEncoder if composed : JamoEncoder = PUAComposedEncoder else : JamoEncoder = PUADecomposedEncoder encoder = JamoEncoder ( ) return encoder . encode ( pua , final = True ) | Convert a unicode string with Hanyang - PUA codes to a Syllable - Initial - Peak - Final encoded unicode string . | 84 | 30 |
6,432 | def codes2unicode ( codes , composed = True ) : pua = u'' . join ( unichr ( code ) for code in codes ) return translate ( pua , composed = composed ) | Convert Hanyang - PUA code iterable to Syllable - Initial - Peak - Final encoded unicode string . | 42 | 26 |
6,433 | def read ( self , len = 1024 ) : return self . _with_retry ( functools . partial ( self . _read_attempt , len ) , self . gettimeout ( ) ) | read up to len bytes and return them or empty string on EOF | 43 | 14 |
6,434 | def connect ( self , address ) : tout = _timeout ( self . gettimeout ( ) ) while 1 : self . _wait_event ( tout . now , write = True ) err = self . _connect ( address , tout . now ) if err in ( errno . EINPROGRESS , errno . EALREADY , errno . EWOULDBLOCK ) : continue if err : raise socket . error ( err , errno . errorcode [ err ] ) return 0 | connects to the address and wraps the connection in an SSL context | 106 | 13 |
6,435 | def accept ( self ) : while 1 : try : sock , addr = self . _sock . accept ( ) return ( type ( self ) ( sock , keyfile = self . keyfile , certfile = self . certfile , server_side = True , cert_reqs = self . cert_reqs , ssl_version = self . ssl_version , ca_certs = self . ca_certs , do_handshake_on_connect = self . do_handshake_on_connect , suppress_ragged_eofs = self . suppress_ragged_eofs , ciphers = self . ciphers ) , addr ) except socket . error , exc : if exc . args [ 0 ] not in ( errno . EAGAIN , errno . EWOULDBLOCK ) : raise sys . exc_clear ( ) self . _wait_event ( self . gettimeout ( ) ) | accept a connection attempt from a remote client | 199 | 8 |
6,436 | def makefile ( self , mode = 'r' , bufsize = - 1 ) : sockfile = gsock . SocketFile . __new__ ( gsock . SocketFile ) gfiles . FileBase . __init__ ( sockfile ) sockfile . _sock = self sockfile . mode = mode if bufsize > 0 : sockfile . CHUNKSIZE = bufsize return sockfile | return a file - like object that operates on the ssl connection | 86 | 13 |
6,437 | def run ( cont , util , shell , argv = None ) : cont . fetch_and_import ( "setup/python/setup.py" ) . run ( cont , util , shell , argv ) cmake_cont = cont . fetch_and_import ( "setup/cmake/setup.py" ) . run ( cont , util , shell , argv ) # Now that the cmake container is set up, use its execute method # to find out where cmake scripts are actually installed. # # The answer might be obvious from just using "which" on OS X and # Linux, but on Windows, the binaries are symlinked into another # directory - we need the true install location. with cont . in_temp_cache_dir ( ) : with open ( "cmroot.cmake" , "w" ) as root_script : root_script . write ( "message (${CMAKE_ROOT})" ) install_path = bytearray ( ) def steal_output ( process , outputs ) : """Steal output from container executor.""" install_path . extend ( outputs [ 1 ] . read ( ) . strip ( ) ) return process . wait ( ) cmake_cont . execute ( cont , steal_output , "cmake" , "-P" , root_script . name ) # If we're on linux, then the returned path is going to be # relative to the container, so make that explicit, since we're # not running the tests inside the container. if platform . system ( ) == "Linux" : root_fs_path_bytes = cmake_cont . root_fs_path ( ) install_path = os . path . join ( root_fs_path_bytes . decode ( "utf-8" ) , install_path . decode ( "utf-8" ) [ 1 : ] ) install_path = os . path . normpath ( install_path ) else : install_path = install_path . decode ( "utf-8" ) util . overwrite_environment_variable ( shell , "CMAKE_INSTALL_PATH" , install_path ) | Set up language runtimes and pass control to python project script . | 457 | 13 |
6,438 | def get_content ( pattern , string , tag = 'content' ) : output = [ ] for match in re . finditer ( pattern , string ) : output . append ( match . group ( tag ) ) return output | Finds the content tag from a pattern in the provided string | 46 | 12 |
6,439 | def extract ( pattern , string , * , assert_equal = False , one = False , condense = False , default = None , default_if_multiple = True , default_if_none = True ) : if isinstance ( pattern , str ) : output = get_content ( pattern , string ) else : # Must be a linear container output = [ ] for p in pattern : output += get_content ( p , string ) output = process_output ( output , one = one , condense = condense , default = default , default_if_multiple = default_if_multiple , default_if_none = default_if_none ) if assert_equal : assert_output ( output , assert_equal ) else : return output | Used to extract a given regex pattern from a string given several options | 156 | 13 |
6,440 | def extractRuntime ( runtime_dirs ) : names = [ str ( item ) for name in runtime_dirs for item in os . listdir ( name ) ] string = '\n' . join ( names ) result = extract ( RUNTIME_PATTERN , string , condense = True ) return result | Used to find the correct static lib name to pass to gcc | 66 | 12 |
6,441 | def extractVersion ( string , default = '?' ) : return extract ( VERSION_PATTERN , string , condense = True , default = default , one = True ) | Extracts a three digit standard format version number | 37 | 10 |
6,442 | def render_secrets ( config_path , secret_path , ) : with open ( secret_path , 'r' ) as s_fh : secret_ini = anyconfig . load ( s_fh , ac_parser = 'ini' ) with open ( config_path , 'r' ) as c_fh : raw_cfg = c_fh . read ( ) rendered_cfg = anytemplate . renders ( raw_cfg , secret_ini , at_engine = 'jinja2' ) p_config = ProsperConfig ( config_path ) local_config = configparser . ConfigParser ( ) local_config . optionxform = str local_config . read_string ( rendered_cfg ) p_config . local_config = local_config return p_config | combine a jinja template with a secret . ini file | 169 | 14 |
6,443 | def check_value ( config , section , option , jinja_pattern = JINJA_PATTERN , ) : value = config [ section ] [ option ] if re . match ( jinja_pattern , value ) : return None return value | try to figure out if value is valid or jinja2 template value | 54 | 15 |
6,444 | def read_config ( config_filepath , logger = logging . getLogger ( 'ProsperCommon' ) , ) : config_parser = configparser . ConfigParser ( interpolation = ExtendedInterpolation ( ) , allow_no_value = True , delimiters = ( '=' ) , inline_comment_prefixes = ( '#' ) ) logger . debug ( 'config_filepath=%s' , config_filepath ) with open ( config_filepath , 'r' ) as filehandle : config_parser . read_file ( filehandle ) return config_parser | fetch and parse config file | 127 | 6 |
6,445 | def get_local_config_filepath ( config_filepath , force_local = False , ) : local_config_name = path . basename ( config_filepath ) . split ( '.' ) [ 0 ] + '_local.cfg' local_config_filepath = path . join ( path . split ( config_filepath ) [ 0 ] , local_config_name ) real_config_filepath = '' if path . isfile ( local_config_filepath ) or force_local : #if _local.cfg version exists, use it instead real_config_filepath = local_config_filepath else : #else use tracked default real_config_filepath = config_filepath return real_config_filepath | helper for finding local filepath for config | 161 | 9 |
6,446 | def get_option ( self , section_name , key_name , args_option = None , args_default = None , ) : if args_option != args_default and args_option is not None : self . logger . debug ( '-- using function args' ) return args_option section_info = section_name + '.' + key_name option = None try : option = check_value ( self . local_config , section_name , key_name ) self . logger . debug ( '-- using local config' ) if option : return option except ( KeyError , configparser . NoOptionError , configparser . NoSectionError ) : self . logger . debug ( '`%s` not found in local config' , section_info ) try : option = check_value ( self . global_config , section_name , key_name ) self . logger . debug ( '-- using global config' ) if option : return option except ( KeyError , configparser . NoOptionError , configparser . NoSectionError ) : self . logger . warning ( '`%s` not found in global config' , section_info ) env_option = get_value_from_environment ( section_name , key_name , logger = self . logger ) if env_option : self . logger . debug ( '-- using environment value' ) return env_option self . logger . debug ( '-- using default argument' ) return args_default | evaluates the requested option and returns the correct value | 308 | 10 |
6,447 | def SetCredentials ( api_username , api_passwd ) : global V2_API_USERNAME global V2_API_PASSWD global _V2_ENABLED _V2_ENABLED = True V2_API_USERNAME = api_username V2_API_PASSWD = api_passwd | Establish API username and password associated with APIv2 commands . | 72 | 13 |
6,448 | def get_session ( username , password , default_endpoints = clc . defaults , cert = None ) : if cert is None : cert = API . _ResourcePath ( 'clc/cacert.pem' ) session = requests . Session ( ) request = session . request ( "POST" , "{}/v2/authentication/login" . format ( default_endpoints . ENDPOINT_URL_V2 ) , data = { "username" : username , "password" : password } , verify = cert ) data = request . json ( ) if request . status_code == 200 : token = data [ 'bearerToken' ] alias = data [ 'accountAlias' ] location = data [ 'locationAlias' ] elif request . status_code == 400 : raise Exception ( "Invalid V2 API login. {}" . format ( data [ 'message' ] ) ) else : raise Exception ( "Error logging into V2 API. Response code {}. message {}" . format ( request . status_code , data [ 'message' ] ) ) return { 'username' : username , 'password' : password , 'http_session' : session , 'token' : token , 'alias' : alias , 'location' : location } | Start a session with the given parameters | 270 | 7 |
6,449 | def set_id ( self , pid ) : if self . type == 'KAF' : return self . node . set ( 'pid' , pid ) elif self . type == 'NAF' : return self . node . set ( 'id' , pid ) | Set the property identifier | 57 | 4 |
6,450 | def to_kaf ( self ) : if self . type == 'NAF' : ##convert all the properties for node in self . node . findall ( 'properties/property' ) : node . set ( 'pid' , node . get ( 'id' ) ) del node . attrib [ 'id' ] | Converts the element to NAF | 69 | 7 |
6,451 | def to_naf ( self ) : if self . type == 'KAF' : ##convert all the properties for node in self . node . findall ( 'properties/property' ) : node . set ( 'id' , node . get ( 'pid' ) ) del node . attrib [ 'pid' ] | Converts the element to KAF | 69 | 7 |
6,452 | def get_properties ( self ) : node_prop = self . node . find ( 'properties' ) if node_prop is not None : obj_properties = Cproperties ( node_prop , self . type ) for prop in obj_properties : yield prop | Iterator that returns all the properties of the layuer | 54 | 10 |
6,453 | def remove_properties ( self ) : node_prop = self . node . find ( 'properties' ) if node_prop is not None : self . node . remove ( node_prop ) | Removes the property layer if exists | 40 | 7 |
6,454 | def get_id ( self ) : if self . type == 'NAF' : return self . node . get ( 'id' ) elif self . type == 'KAF' : return self . node . get ( 'mid' ) | Returns the term identifier | 51 | 4 |
6,455 | def set_id ( self , i ) : if self . type == 'NAF' : self . node . set ( 'id' , i ) elif self . type == 'KAF' : self . node . set ( 'mid' , i ) | Sets the identifier for the term | 55 | 7 |
6,456 | def add_external_reference ( self , ext_ref ) : ext_refs_node = self . node . find ( 'externalReferences' ) if ext_refs_node is None : ext_refs_obj = CexternalReferences ( ) self . node . append ( ext_refs_obj . get_node ( ) ) else : ext_refs_obj = CexternalReferences ( ext_refs_node ) ext_refs_obj . add_external_reference ( ext_ref ) | Adds an external reference object to the markable | 110 | 9 |
6,457 | def get_external_references ( self ) : for ext_ref_node in self . node . findall ( 'externalReferences' ) : ext_refs_obj = CexternalReferences ( ext_ref_node ) for ref in ext_refs_obj : yield ref | Iterator that returns all the external references of the markable | 60 | 11 |
6,458 | def add_external_reference ( self , markable_id , external_ref ) : if markable_id in self . idx : markable_obj = Cterm ( self . idx [ markable_id ] , self . type ) markable_obj . add_external_reference ( external_ref ) else : print ( '{markable_id} not in self.idx' . format ( * * locals ( ) ) ) | Adds an external reference for the given markable | 96 | 9 |
6,459 | def remove_markables ( self , list_mark_ids ) : nodes_to_remove = set ( ) for markable in self : if markable . get_id ( ) in list_mark_ids : nodes_to_remove . add ( markable . get_node ( ) ) #For removing the previous comment prv = markable . get_node ( ) . getprevious ( ) if prv is not None : nodes_to_remove . add ( prv ) for node in nodes_to_remove : self . node . remove ( node ) | Removes a list of markables from the layer | 121 | 10 |
6,460 | def getinfo ( ee_obj , n = 4 ) : output = None for i in range ( 1 , n ) : try : output = ee_obj . getInfo ( ) except ee . ee_exception . EEException as e : if 'Earth Engine memory capacity exceeded' in str ( e ) : logging . info ( ' Resending query ({}/10)' . format ( i ) ) logging . debug ( ' {}' . format ( e ) ) sleep ( i ** 2 ) else : raise e if output : break # output = ee_obj.getInfo() return output | Make an exponential back off getInfo call on an Earth Engine object | 128 | 13 |
6,461 | def constant_image_value ( image , crs = 'EPSG:32613' , scale = 1 ) : return getinfo ( ee . Image ( image ) . reduceRegion ( reducer = ee . Reducer . first ( ) , scale = scale , geometry = ee . Geometry . Rectangle ( [ 0 , 0 , 10 , 10 ] , crs , False ) ) ) | Extract the output value from a calculation done with constant images | 86 | 12 |
6,462 | def date_0utc ( date ) : return ee . Date . fromYMD ( date . get ( 'year' ) , date . get ( 'month' ) , date . get ( 'day' ) ) | Get the 0 UTC date for a date | 47 | 8 |
6,463 | def get_version ( here_path , default_version = DEFAULT_VERSION , ) : if 'site-packages' in here_path : # Running as dependency return _version_from_file ( here_path ) if os . environ . get ( 'TRAVIS_TAG' ) : # Running on Travis-CI: trumps all if not TEST_MODE : # pragma: no cover return os . environ . get ( 'TRAVIS_TAG' ) . replace ( 'v' , '' ) else : warnings . warn ( 'Travis detected, but TEST_MODE enabled' , exceptions . ProsperVersionTestModeWarning ) try : current_tag = _read_git_tags ( default_version = default_version ) except Exception : # pragma: no cover return _version_from_file ( here_path ) # TODO: if #steps from tag root, increment minor # TODO: check if off main branch and add name to prerelease with open ( os . path . join ( here_path , 'version.txt' ) , 'w' ) as v_fh : # save version info somewhere static v_fh . write ( current_tag ) return current_tag | tries to resolve version number | 258 | 6 |
6,464 | def _read_git_tags ( default_version = DEFAULT_VERSION , git_command = ( 'git' , 'tag' ) , ) : try : current_tags = check_output ( git_command ) . splitlines ( ) except Exception : # pragma: no cover raise if not current_tags [ 0 ] : warnings . warn ( 'Unable to resolve current version' , exceptions . ProsperDefaultVersionWarning ) return default_version latest_version = semantic_version . Version ( default_version ) for tag in current_tags : tag_str = decode ( tag , 'utf-8' ) . replace ( 'v' , '' ) try : tag_ver = semantic_version . Version ( tag_str ) except Exception : # pragma: no cover continue # invalid tags ok, but no release if tag_ver > latest_version : latest_version = tag_ver return str ( latest_version ) | tries to find current git tag | 196 | 7 |
6,465 | def _version_from_file ( path_to_version , default_version = DEFAULT_VERSION , ) : version_filepath = os . path . join ( path_to_version , 'version.txt' ) if not os . path . isfile ( version_filepath ) : warnings . warn ( 'Unable to resolve current version' , exceptions . ProsperDefaultVersionWarning ) return default_version with open ( version_filepath , 'r' ) as v_fh : data = v_fh . read ( ) return data | for PyPI installed versions just get data from file | 117 | 10 |
6,466 | def _configure_common ( self , prefix , fallback_level , fallback_format , handler_name , handler , custom_args = '' ) : ## Retrieve settings from config ## log_level = self . config . get_option ( 'LOGGING' , prefix + 'log_level' , None , fallback_level ) log_format_name = self . config . get_option ( 'LOGGING' , prefix + 'log_format' , None , None ) log_format = ReportingFormats [ log_format_name ] . value if log_format_name else fallback_format log_format = log_format . format ( custom_args = custom_args ) # should work even if no {custom_args} ## Attach handlers/formatter ## formatter = logging . Formatter ( log_format ) handler . setFormatter ( formatter ) handler . setLevel ( log_level ) self . logger . addHandler ( handler ) if not self . logger . isEnabledFor ( logging . getLevelName ( log_level ) ) : # make sure logger level is not lower than handler level self . logger . setLevel ( log_level ) ## Save info about handler created ## self . log_info . append ( handler_name + ' @ ' + str ( log_level ) ) self . log_handlers . append ( handler ) | commom configuration code | 293 | 4 |
6,467 | def configure_default_logger ( self , log_freq = 'midnight' , log_total = 30 , log_level = 'INFO' , log_format = ReportingFormats . DEFAULT . value , custom_args = '' ) : ## Override defaults if required ## log_freq = self . config . get_option ( 'LOGGING' , 'log_freq' , None , log_freq ) log_total = self . config . get_option ( 'LOGGING' , 'log_total' , None , log_total ) ## Set up log file handles/name ## log_filename = self . log_name + '.log' log_abspath = path . join ( self . log_path , log_filename ) general_handler = TimedRotatingFileHandler ( log_abspath , when = log_freq , interval = 1 , backupCount = int ( log_total ) ) self . _configure_common ( '' , log_level , log_format , 'default' , general_handler , custom_args = custom_args ) | default logger that every Prosper script should use!! | 238 | 9 |
6,468 | def configure_discord_logger ( self , discord_webhook = None , discord_recipient = None , log_level = 'ERROR' , log_format = ReportingFormats . PRETTY_PRINT . value , custom_args = '' ) : # Override defaults if required # discord_webhook = self . config . get_option ( 'LOGGING' , 'discord_webhook' , None , discord_webhook ) discord_recipient = self . config . get_option ( 'LOGGING' , 'discord_recipient' , None , discord_recipient ) log_level = self . config . get_option ( 'LOGGING' , 'discord_level' , None , log_level ) # Actually build discord logging handler # discord_obj = DiscordWebhook ( ) discord_obj . webhook ( discord_webhook ) # vv TODO vv: Test review # if discord_obj . can_query : discord_handler = HackyDiscordHandler ( discord_obj , discord_recipient ) self . _configure_common ( 'discord_' , log_level , log_format , 'Discord' , discord_handler , custom_args = custom_args ) else : warnings . warn ( 'Unable to execute webhook' , exceptions . WebhookCreateFailed ) | logger for sending messages to Discord . Easy way to alert humans of issues | 292 | 15 |
6,469 | def configure_slack_logger ( self , slack_webhook = None , log_level = 'ERROR' , log_format = ReportingFormats . SLACK_PRINT . value , custom_args = '' ) : # Override defaults if required # slack_webhook = self . config . get_option ( 'LOGGING' , 'slack_webhook' , None , slack_webhook ) log_level = self . config . get_option ( 'LOGGING' , 'slack_level' , None , log_level ) # Actually build slack logging handler # # vv TODO vv: Test review # slack_handler = HackySlackHandler ( slack_webhook ) self . _configure_common ( 'slack_' , log_level , log_format , 'Slack' , slack_handler , custom_args = custom_args ) | logger for sending messages to Slack . Easy way to alert humans of issues | 193 | 15 |
6,470 | def configure_hipchat_logger ( self , hipchat_webhook = None , log_level = 'ERROR' , log_format = ReportingFormats . PRETTY_PRINT . value , custom_args = '' ) : # Override defaults if required # hipchat_webhook = self . config . get_option ( 'LOGGING' , 'hipchat_webhook' , None , hipchat_webhook ) log_level = self . config . get_option ( 'LOGGING' , 'hipchat_level' , None , log_level ) # Actually build HipChat logging handler # # vv TODO vv: Test review # try : hipchat_handler = HackyHipChatHandler ( hipchat_webhook ) self . _configure_common ( 'hipchat_' , log_level , log_format , 'HipChat' , hipchat_handler , custom_args = custom_args ) except Exception as error_msg : raise error_msg | logger for sending messages to HipChat . Easy way to alert humans of issues | 216 | 16 |
6,471 | def webhook ( self , webhook_url ) : if not webhook_url : raise Exception ( 'Url can not be None' ) matcher = re . match ( self . __webhook_url_format , webhook_url ) if not matcher : raise Exception ( 'Invalid url format, looking for: ' + self . __webhook_url_format ) self . api_keys ( int ( matcher . group ( 1 ) ) , matcher . group ( 2 ) ) | Load object with webhook_url | 105 | 7 |
6,472 | def emit ( self , record ) : # pragma: no cover if record . exc_text : record . exc_text = '```python\n{0}\n```' . format ( record . exc_text ) # recast to code block log_msg = self . format ( record ) if len ( log_msg ) + self . alert_length > DISCORD_MESSAGE_LIMIT : log_msg = log_msg [ : ( DISCORD_MESSAGE_LIMIT - DISCORD_PAD_SIZE ) ] if self . alert_recipient and record . levelno == logging . CRITICAL : log_msg = log_msg + '\n' + str ( self . alert_recipient ) self . send_msg_to_webhook ( log_msg ) | required classmethod for logging to execute logging message | 181 | 9 |
6,473 | def send_msg_to_webhook ( self , message ) : payload = { 'content' : message } header = { 'Content-Type' : 'application/json' } try : request = requests . post ( self . api_url , headers = header , json = payload ) request . raise_for_status ( ) except Exception as error_msg : #pragma: no cover warning_msg = ( 'EXCEPTION: UNABLE TO COMMIT LOG MESSAGE' + '\n\texception={0}' . format ( repr ( error_msg ) ) + '\n\tmessage={0}' . format ( message ) ) warnings . warn ( warning_msg , exceptions . WebhookFailedEmitWarning ) | separated Requests logic for easier testing | 162 | 8 |
6,474 | def decorate ( self , record ) : attachments = { } ## Set color if record . levelno >= logging . ERROR : attachments [ 'color' ] = 'warning' #builtin if record . levelno >= logging . CRITICAL : attachments [ 'color' ] = 'danger' #builtin ## Log text attach_text = '{levelname}: {name} {module}.{funcName}:{lineno}' . format ( levelname = record . levelname , name = record . name , module = record . module , funcName = record . funcName , lineno = record . lineno ) attachments [ 'text' ] = attach_text attachments [ 'fallback' ] = attach_text return attachments | add slack - specific flourishes to responses | 153 | 8 |
6,475 | def send_msg_to_webhook ( self , json_payload , log_msg ) : if SILENCE_OVERRIDE : # pragma: no cover return payload = { 'text' : log_msg , 'attachments' : [ json_payload ] } header = { 'Content-Type' : 'application/json' } try : request = requests . post ( self . webhook_url , headers = header , json = payload ) request . raise_for_status ( ) except Exception as error_msg : #pragma: no cover warning_msg = ( 'EXCEPTION: UNABLE TO COMMIT LOG MESSAGE' + '\n\texception={0}' . format ( repr ( error_msg ) ) + '\n\tmessage={0}' . format ( log_msg ) ) warnings . warn ( warning_msg , exceptions . WebhookFailedEmitWarning ) | push message out to webhook | 201 | 6 |
6,476 | def decorate ( self , record ) : color = 'gray' if record . levelno == logging . WARNING : color = 'yellow' if record . levelno == logging . INFO : color = 'green' if record . levelno == logging . DEBUG : color = 'gray' if record . levelno >= logging . ERROR : color = 'red' notify = False if record . levelno >= logging . ERROR : nofiy = True payload = { 'color' : color , 'notify' : notify , 'message_format' : 'text' } return payload | Build up HipChat specific values for log record | 120 | 9 |
6,477 | def GetNetworks ( alias = None , location = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) r = clc . v1 . API . Call ( 'post' , 'Network/GetAccountNetworks' , { 'AccountAlias' : alias , 'Location' : location } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'Networks' ] ) | Gets the list of Networks mapped to the account in the specified datacenter . | 116 | 17 |
6,478 | def GetNetworkDetails ( network , alias = None , location = None ) : if alias is None : alias = clc . v1 . Account . GetAlias ( ) if location is None : location = clc . v1 . Account . GetLocation ( ) r = clc . v1 . API . Call ( 'post' , 'Network/GetNetworkDetails' , { 'AccountAlias' : alias , 'Location' : location , 'Name' : network } ) if int ( r [ 'StatusCode' ] ) == 0 : return ( r [ 'NetworkDetails' ] [ 'IPAddresses' ] ) | Gets the details for a Network and its IP Addresses . | 130 | 13 |
6,479 | def get_variable_from_exception ( exception , variable_name ) : for frame in reversed ( trace ( ) ) : try : # From http://stackoverflow.com/a/9059407/6461688 frame_variable = frame [ 0 ] . f_locals [ variable_name ] except KeyError : pass else : return frame_variable else : raise KeyError ( "Variable '%s' not in any stack frames" , variable_name ) | Grab the variable from closest frame in the stack | 100 | 9 |
6,480 | def force_delete_key ( address ) : address_object = Address . objects . get ( address = address ) address_object . key . delete ( ) address_object . delete ( ) | Delete the key from the keyring and the Key and Address objects from the database | 40 | 16 |
6,481 | def __wrap_accepted_val ( self , value ) : if isinstance ( value , tuple ) : value = list ( value ) elif not isinstance ( value , list ) : value = [ value ] return value | Wrap accepted value in the list if yet not wrapped . | 47 | 12 |
6,482 | def __validate_args ( self , func_name , args , kwargs ) : from pyvalid . validators import Validator for i , ( arg_name , accepted_values ) in enumerate ( self . accepted_args ) : if i < len ( args ) : value = args [ i ] else : if arg_name in kwargs : value = kwargs [ arg_name ] elif i in self . optional_args : continue else : raise InvalidArgumentNumberError ( func_name ) is_valid = False for accepted_val in accepted_values : is_validator = ( isinstance ( accepted_val , Validator ) or ( isinstance ( accepted_val , MethodType ) and hasattr ( accepted_val , '__func__' ) and isinstance ( accepted_val . __func__ , Validator ) ) ) if is_validator : is_valid = accepted_val ( value ) elif isinstance ( accepted_val , type ) : is_valid = isinstance ( value , accepted_val ) else : is_valid = value == accepted_val if is_valid : break if not is_valid : ord_num = self . __ordinal ( i + 1 ) raise ArgumentValidationError ( ord_num , func_name , value , accepted_values ) | Compare value of each required argument with list of accepted values . | 282 | 12 |
6,483 | def __ordinal ( self , num ) : if 10 <= num % 100 < 20 : return str ( num ) + 'th' else : ord_info = { 1 : 'st' , 2 : 'nd' , 3 : 'rd' } . get ( num % 10 , 'th' ) return '{}{}' . format ( num , ord_info ) | Returns the ordinal number of a given integer as a string . eg . 1 - > 1st 2 - > 2nd 3 - > 3rd etc . | 79 | 32 |
6,484 | def get_file ( self , name , save_to , add_to_cache = True , force_refresh = False , _lock_exclusive = False ) : uname , version = split_name ( name ) lock = None if self . local_store : lock = self . lock_manager . lock_for ( uname ) if _lock_exclusive : lock . lock_exclusive ( ) else : lock . lock_shared ( ) else : add_to_cache = False t = time . time ( ) logger . debug ( ' downloading %s' , name ) try : if not self . remote_store or ( version is not None and not force_refresh ) : try : if self . local_store and self . local_store . exists ( name ) : return self . local_store . get_file ( name , save_to ) except Exception : if self . remote_store : logger . warning ( "Error getting '%s' from local store" , name , exc_info = True ) else : raise if self . remote_store : if not _lock_exclusive and add_to_cache : if lock : lock . unlock ( ) return self . get_file ( name , save_to , add_to_cache , _lock_exclusive = True ) vname = self . remote_store . get_file ( name , save_to ) if add_to_cache : self . _add_to_cache ( vname , save_to ) return vname raise FiletrackerError ( "File not available: %s" % name ) finally : if lock : lock . close ( ) logger . debug ( ' processed %s in %.2fs' , name , time . time ( ) - t ) | Retrieves file identified by name . | 370 | 8 |
6,485 | def get_stream ( self , name , force_refresh = False , serve_from_cache = False ) : uname , version = split_name ( name ) lock = None if self . local_store : lock = self . lock_manager . lock_for ( uname ) lock . lock_shared ( ) try : if not self . remote_store or ( version is not None and not force_refresh ) : try : if self . local_store and self . local_store . exists ( name ) : return self . local_store . get_stream ( name ) except Exception : if self . remote_store : logger . warning ( "Error getting '%s' from local store" , name , exc_info = True ) else : raise if self . remote_store : if self . local_store and serve_from_cache : if version is None : version = self . remote_store . file_version ( name ) if version : name = versioned_name ( uname , version ) if force_refresh or not self . local_store . exists ( name ) : ( stream , vname ) = self . remote_store . get_stream ( name ) name = self . local_store . add_stream ( vname , stream ) return self . local_store . get_stream ( name ) return self . remote_store . get_stream ( name ) raise FiletrackerError ( "File not available: %s" % name ) finally : if lock : lock . close ( ) | Retrieves file identified by name in streaming mode . | 322 | 11 |
6,486 | def file_version ( self , name ) : if self . remote_store : return self . remote_store . file_version ( name ) else : return self . local_store . file_version ( name ) | Returns the newest available version number of the file . | 45 | 10 |
6,487 | def file_size ( self , name , force_refresh = False ) : uname , version = split_name ( name ) t = time . time ( ) logger . debug ( ' querying size of %s' , name ) try : if not self . remote_store or ( version is not None and not force_refresh ) : try : if self . local_store and self . local_store . exists ( name ) : return self . local_store . file_size ( name ) except Exception : if self . remote_store : logger . warning ( "Error getting '%s' from local store" , name , exc_info = True ) else : raise if self . remote_store : return self . remote_store . file_size ( name ) raise FiletrackerError ( "File not available: %s" % name ) finally : logger . debug ( ' processed %s in %.2fs' , name , time . time ( ) - t ) | Returns the size of the file . | 207 | 7 |
6,488 | def put_file ( self , name , filename , to_local_store = True , to_remote_store = True , compress_hint = True ) : if not to_local_store and not to_remote_store : raise ValueError ( "Neither to_local_store nor to_remote_store set " "in a call to filetracker.Client.put_file" ) check_name ( name ) lock = None if self . local_store : lock = self . lock_manager . lock_for ( name ) lock . lock_exclusive ( ) try : if ( to_local_store or not self . remote_store ) and self . local_store : versioned_name = self . local_store . add_file ( name , filename ) if ( to_remote_store or not self . local_store ) and self . remote_store : versioned_name = self . remote_store . add_file ( name , filename , compress_hint = compress_hint ) finally : if lock : lock . close ( ) return versioned_name | Adds file filename to the filetracker under the name name . | 231 | 13 |
6,489 | def delete_file ( self , name ) : if self . local_store : lock = self . lock_manager . lock_for ( name ) lock . lock_exclusive ( ) try : self . local_store . delete_file ( name ) finally : lock . close ( ) if self . remote_store : self . remote_store . delete_file ( name ) | Deletes the file identified by name along with its metadata . | 78 | 12 |
6,490 | def list_local_files ( self ) : result = [ ] if self . local_store : result . extend ( self . local_store . list_files ( ) ) return result | Returns list of all stored local files . | 39 | 8 |
6,491 | def load_checkers ( ) : for loader , name , _ in pkgutil . iter_modules ( [ os . path . join ( __path__ [ 0 ] , 'checkers' ) ] ) : loader . find_module ( name ) . load_module ( name ) | Load the checkers | 60 | 4 |
6,492 | def check ( operations , loud = False ) : if not CHECKERS : load_checkers ( ) roll_call = [ ] everything_ok = True if loud and operations : title = "Preflyt Checklist" sys . stderr . write ( "{}\n{}\n" . format ( title , "=" * len ( title ) ) ) for operation in operations : if operation . get ( 'checker' ) not in CHECKERS : raise CheckerNotFoundError ( operation ) checker_cls = CHECKERS [ operation [ 'checker' ] ] args = { k : v for k , v in operation . items ( ) if k != 'checker' } checker = checker_cls ( * * args ) success , message = checker . check ( ) if not success : everything_ok = False roll_call . append ( { "check" : operation , "success" : success , "message" : message } ) if loud : sys . stderr . write ( " {}\n" . format ( pformat_check ( success , operation , message ) ) ) return everything_ok , roll_call | Check all the things | 246 | 4 |
6,493 | def verify ( operations , loud = False ) : everything_ok , roll_call = check ( operations , loud = loud ) if not everything_ok : raise CheckFailedException ( roll_call ) return roll_call | Check all the things and be assertive about it | 46 | 10 |
6,494 | def deci2sexa ( deci , pre = 3 , trunc = False , lower = None , upper = None , b = False , upper_trim = False ) : if lower is not None and upper is not None : deci = normalize ( deci , lower = lower , upper = upper , b = b ) sign = 1 if deci < 0 : deci = abs ( deci ) sign = - 1 hd , f1 = divmod ( deci , 1 ) mm , f2 = divmod ( f1 * 60.0 , 1 ) sf = f2 * 60.0 # Find the seconds part to required precision. fp = 10 ** pre if trunc : ss , _ = divmod ( sf * fp , 1 ) else : ss = round ( sf * fp , 0 ) ss = int ( ss ) # If ss is 60 to given precision then update mm, and if necessary # hd. if ss == 60 * fp : mm += 1 ss = 0 if mm == 60 : hd += 1 mm = 0 hd = int ( hd ) mm = int ( mm ) if lower is not None and upper is not None and upper_trim : # For example 24h0m0s => 0h0m0s. if hd == upper : hd = int ( lower ) if hd == 0 and mm == 0 and ss == 0 : sign = 1 ss /= float ( fp ) # hd and mm parts are integer values but of type float return ( sign , hd , mm , ss ) | Returns the sexagesimal representation of a decimal number . | 338 | 11 |
6,495 | def sexa2deci ( sign , hd , mm , ss , todeg = False ) : divisors = [ 1.0 , 60.0 , 3600.0 ] d = 0.0 # sexages[0] is sign. if sign not in ( - 1 , 1 ) : raise ValueError ( "Sign has to be -1 or 1." ) sexages = [ sign , hd , mm , ss ] for i , divis in zip ( sexages [ 1 : ] , divisors ) : d += i / divis # Add proper sign. d *= sexages [ 0 ] if todeg : d = h2d ( d ) return d | Combine sexagesimal components into a decimal number . | 146 | 11 |
6,496 | def fmt_angle ( val , s1 = " " , s2 = " " , s3 = "" , pre = 3 , trunc = False , lower = None , upper = None , b = False , upper_trim = False ) : x = deci2sexa ( val , pre = pre , trunc = trunc , lower = lower , upper = upper , upper_trim = upper_trim , b = b ) left_digits_plus_deci_point = 3 if pre > 0 else 2 p = "{3:0" + "{0}.{1}" . format ( pre + left_digits_plus_deci_point , pre ) + "f}" + s3 p = "{0}{1:02d}" + s1 + "{2:02d}" + s2 + p return p . format ( "-" if x [ 0 ] < 0 else "+" , * x [ 1 : ] ) | Return sexagesimal string of given angle in degrees or hours . | 201 | 13 |
6,497 | def pposition ( hd , details = False ) : # :TODO: split two angles based on user entered separator and process each part separately. # Split at any character other than a digit, ".", "-", and "+". p = re . split ( r"[^\d\-+.]*" , hd ) if len ( p ) not in [ 2 , 6 ] : raise ValueError ( "Input must contain either 2 or 6 numbers." ) # Two floating point numbers if string has 2 numbers. if len ( p ) == 2 : x , y = float ( p [ 0 ] ) , float ( p [ 1 ] ) if details : numvals = 2 raw_x = p [ 0 ] raw_y = p [ 1 ] # Two sexagesimal numbers if string has 6 numbers. elif len ( p ) == 6 : x_p = phmsdms ( " " . join ( p [ : 3 ] ) ) x = sexa2deci ( x_p [ 'sign' ] , * x_p [ 'vals' ] ) y_p = phmsdms ( " " . join ( p [ 3 : ] ) ) y = sexa2deci ( y_p [ 'sign' ] , * y_p [ 'vals' ] ) if details : raw_x = x_p raw_y = y_p numvals = 6 if details : result = dict ( x = x , y = y , numvals = numvals , raw_x = raw_x , raw_y = raw_y ) else : result = x , y return result | Parse string into angular position . | 343 | 7 |
6,498 | def sep ( a1 , b1 , a2 , b2 ) : # Tolerance to decide if the calculated separation is zero. tol = 1e-15 v = CartesianVector . from_spherical ( 1.0 , a1 , b1 ) v2 = CartesianVector . from_spherical ( 1.0 , a2 , b2 ) d = v . dot ( v2 ) c = v . cross ( v2 ) . mod res = math . atan2 ( c , d ) if abs ( res ) < tol : return 0.0 else : return res | Angular spearation between two points on a unit sphere . | 127 | 12 |
6,499 | def normalize_sphere ( alpha , delta ) : v = CartesianVector . from_spherical ( r = 1.0 , alpha = d2r ( alpha ) , delta = d2r ( delta ) ) angles = v . normalized_angles return r2d ( angles [ 0 ] ) , r2d ( angles [ 1 ] ) | Normalize angles of a point on a sphere . | 74 | 10 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.