idx int64 0 252k | question stringlengths 48 5.28k | target stringlengths 5 1.23k |
|---|---|---|
9,600 | def ensure_newline ( self ) : DECTCEM_SHOW = '\033[?25h' AT_END = DECTCEM_SHOW + '\n' if not self . _cursor_at_newline : self . write ( AT_END ) self . _cursor_at_newline = True | use before any custom printing when using the progress iter to ensure your print statement starts on a new line instead of at the end of a progress line |
9,601 | def _get_timethresh_heuristics ( self ) : if self . length > 1E5 : time_thresh = 2.5 elif self . length > 1E4 : time_thresh = 2.0 elif self . length > 1E3 : time_thresh = 1.0 else : time_thresh = 0.5 return time_thresh | resonably decent hueristics for how much time to wait before updating progress . |
9,602 | def load_code ( name , base_path = None , recurse = False ) : if '/' in name : return load_location ( name , base_path , module = False ) return importer . import_code ( name , base_path , recurse = recurse ) | Load executable code from a URL or a path |
9,603 | def load ( name , base_path = None ) : if '/' in name : return load_location ( name , base_path , module = True ) return importer . import_symbol ( name , base_path ) | Load a module from a URL or a path |
9,604 | def extend ( path = None , cache = None ) : if path is None : path = config . PATH try : path = path . split ( ':' ) except : pass sys . path . extend ( [ library . to_path ( p , cache ) for p in path ] ) | Extend sys . path by a list of git paths . |
9,605 | def extender ( path = None , cache = None ) : old_path = sys . path [ : ] extend ( path , cache = None ) try : yield finally : sys . path = old_path | A context that temporarily extends sys . path and reverts it after the context is complete . |
9,606 | def add ( self , child ) : if isinstance ( child , Case ) : self . add_case ( child ) else : raise ModelError ( 'Unsupported child element' ) | Adds a typed child object to the conditional derived variable . |
9,607 | def add ( self , child ) : if isinstance ( child , Action ) : self . add_action ( child ) else : raise ModelError ( 'Unsupported child element' ) | Adds a typed child object to the event handler . |
9,608 | def add ( self , child ) : if isinstance ( child , StateVariable ) : self . add_state_variable ( child ) elif isinstance ( child , DerivedVariable ) : self . add_derived_variable ( child ) elif isinstance ( child , ConditionalDerivedVariable ) : self . add_conditional_derived_variable ( child ) elif isinstance ( child ... | Adds a typed child object to the behavioral object . |
9,609 | def add ( self , child ) : if isinstance ( child , Regime ) : self . add_regime ( child ) else : Behavioral . add ( self , child ) | Adds a typed child object to the dynamics object . |
9,610 | def create_bioset_lookup ( lookupdb , spectrafns , set_names ) : unique_setnames = set ( set_names ) lookupdb . store_biosets ( ( ( x , ) for x in unique_setnames ) ) set_id_map = lookupdb . get_setnames ( ) mzmlfiles = ( ( os . path . basename ( fn ) , set_id_map [ setname ] ) for fn , setname in zip ( spectrafns , se... | Fills lookup database with biological set names |
9,611 | def get_modpath_from_modname ( modname , prefer_pkg = False , prefer_main = False ) : from os . path import dirname , basename , join , exists initname = '__init__.py' mainname = '__main__.py' if modname in sys . modules : modpath = sys . modules [ modname ] . __file__ . replace ( '.pyc' , '.py' ) else : import pkgutil... | Same as get_modpath but doesnt import directly |
9,612 | def check_module_installed ( modname ) : import pkgutil if '.' in modname : parts = modname . split ( '.' ) base = parts [ 0 ] submods = parts [ 1 : ] loader = pkgutil . find_loader ( base ) if loader is not None : submods return True loader = pkgutil . find_loader ( modname ) is_installed = loader is not None return i... | Check if a python module is installed without attempting to import it . Note that if modname indicates a child module the parent module is always loaded . |
9,613 | def import_module_from_fpath ( module_fpath ) : r from os . path import basename , splitext , isdir , join , exists , dirname , split import platform if isdir ( module_fpath ) : module_fpath = join ( module_fpath , '__init__.py' ) print ( 'module_fpath = {!r}' . format ( module_fpath ) ) if not exists ( module_fpath ) ... | r imports module from a file path |
9,614 | def print_locals ( * args , ** kwargs ) : from utool import util_str from utool import util_dbg from utool import util_dict locals_ = util_dbg . get_parent_frame ( ) . f_locals keys = kwargs . get ( 'keys' , None if len ( args ) == 0 else [ ] ) to_print = { } for arg in args : varname = util_dbg . get_varname_from_loca... | Prints local variables in function . |
9,615 | def _extract_archive ( archive_fpath , archive_file , archive_namelist , output_dir , force_commonprefix = True , prefix = None , dryrun = False , verbose = not QUIET , overwrite = None ) : if prefix is not None : output_dir = join ( output_dir , prefix ) util_path . ensurepath ( output_dir ) archive_basename , ext = s... | archive_fpath = zip_fpath archive_file = zip_file |
9,616 | def open_url_in_browser ( url , browsername = None , fallback = False ) : r import webbrowser print ( '[utool] Opening url=%r in browser' % ( url , ) ) if browsername is None : browser = webbrowser . open ( url ) else : browser = get_prefered_browser ( pref_list = [ browsername ] , fallback = fallback ) return browser ... | r Opens a url in the specified or default browser |
9,617 | def url_read ( url , verbose = True ) : r if url . find ( '://' ) == - 1 : url = 'http://' + url if verbose : print ( 'Reading data from url=%r' % ( url , ) ) try : file_ = _urllib . request . urlopen ( url ) except IOError : raise data = file_ . read ( ) file_ . close ( ) return data | r Directly reads data from url |
9,618 | def url_read_text ( url , verbose = True ) : r data = url_read ( url , verbose ) text = data . decode ( 'utf8' ) return text | r Directly reads text data from url |
9,619 | def clean_dropbox_link ( dropbox_url ) : cleaned_url = dropbox_url . replace ( 'www.dropbox' , 'dl.dropbox' ) postfix_list = [ '?dl=0' ] for postfix in postfix_list : if cleaned_url . endswith ( postfix ) : cleaned_url = cleaned_url [ : - 1 * len ( postfix ) ] return cleaned_url | Dropbox links should be en - mass downloaed from dl . dropbox |
9,620 | def grab_selenium_chromedriver ( redownload = False ) : r import utool as ut import os import stat chromedriver_dpath = ut . ensuredir ( ut . truepath ( '~/bin' ) ) chromedriver_fpath = join ( chromedriver_dpath , 'chromedriver' ) if not ut . checkpath ( chromedriver_fpath ) or redownload : assert chromedriver_dpath in... | r Automatically download selenium chrome driver if needed |
9,621 | def grab_selenium_driver ( driver_name = None ) : from selenium import webdriver if driver_name is None : driver_name = 'firefox' if driver_name . lower ( ) == 'chrome' : grab_selenium_chromedriver ( ) return webdriver . Chrome ( ) elif driver_name . lower ( ) == 'firefox' : return webdriver . Firefox ( ) else : raise ... | pip install selenium - U |
9,622 | def grab_file_url ( file_url , appname = 'utool' , download_dir = None , delay = None , spoof = False , fname = None , verbose = True , redownload = False , check_hash = False ) : r file_url = clean_dropbox_link ( file_url ) if fname is None : fname = basename ( file_url ) if download_dir is None : download_dir = util_... | r Downloads a file and returns the local path of the file . |
9,623 | def grab_zipped_url ( zipped_url , ensure = True , appname = 'utool' , download_dir = None , force_commonprefix = True , cleanup = False , redownload = False , spoof = False ) : r zipped_url = clean_dropbox_link ( zipped_url ) zip_fname = split ( zipped_url ) [ 1 ] data_name = split_archive_ext ( zip_fname ) [ 0 ] if d... | r downloads and unzips the url |
9,624 | def scp_pull ( remote_path , local_path = '.' , remote = 'localhost' , user = None ) : r import utool as ut if user is not None : remote_uri = user + '@' + remote + ':' + remote_path else : remote_uri = remote + ':' + remote_path scp_exe = 'scp' scp_args = ( scp_exe , '-r' , remote_uri , local_path ) ut . cmd ( scp_arg... | r wrapper for scp |
9,625 | def list_remote ( remote_uri , verbose = False ) : remote_uri1 , remote_dpath = remote_uri . split ( ':' ) if not remote_dpath : remote_dpath = '.' import utool as ut out = ut . cmd ( 'ssh' , remote_uri1 , 'ls -l %s' % ( remote_dpath , ) , verbose = verbose ) import re split_lines = [ re . split ( r'\s+' , t ) for t in... | remote_uri = user |
9,626 | def rsync ( src_uri , dst_uri , exclude_dirs = [ ] , port = 22 , dryrun = False ) : r from utool import util_cplat rsync_exe = 'rsync' rsync_options = '-avhzP' rsync_options += ' -e "ssh -p %d"' % ( port , ) if len ( exclude_dirs ) > 0 : exclude_tup = [ '--exclude ' + dir_ for dir_ in exclude_dirs ] exclude_opts = ' ' ... | r Wrapper for rsync |
9,627 | def get_cache ( self , namespace , query_hash , length , start , end ) : query = 'SELECT start, value FROM gauged_cache WHERE namespace = ? ' 'AND hash = ? AND length = ? AND start BETWEEN ? AND ?' cursor = self . cursor cursor . execute ( query , ( namespace , query_hash , length , start , end ) ) return tuple ( curso... | Get a cached value for the specified date range and query |
9,628 | def review ( cls , content , log , parent , window_icon ) : dlg = DlgReview ( content , log , parent , window_icon ) if dlg . exec_ ( ) : return dlg . ui . edit_main . toPlainText ( ) , dlg . ui . edit_log . toPlainText ( ) return None , None | Reviews the final bug report . |
9,629 | def get_version ( ) : version_desc = open ( os . path . join ( os . path . abspath ( APISettings . VERSION_FILE ) ) ) version_file = version_desc . read ( ) try : version = re . search ( r"version=['\"]([^'\"]+)['\"]" , version_file ) . group ( 1 ) return version except FileNotFoundError : Shell . fail ( 'File not foun... | Return version from setup . py |
9,630 | def set_version ( old_version , new_version ) : try : if APISettings . DEBUG : Shell . debug ( '* ' + old_version + ' + new_version ) return True for line in fileinput . input ( os . path . abspath ( APISettings . VERSION_FILE ) , inplace = True ) : print ( line . replace ( old_version , new_version ) , end = '' ) She... | Write new version into VERSION_FILE |
9,631 | def set_major ( self ) : old_version = self . get_version ( ) new_version = str ( int ( old_version . split ( '.' , 5 ) [ 0 ] ) + 1 ) + '.0.0' self . set_version ( old_version , new_version ) | Increment the major number of project |
9,632 | def set_minor ( self ) : old_version = self . get_version ( ) new_version = str ( int ( old_version . split ( '.' , 5 ) [ 0 ] ) ) + '.' + str ( int ( old_version . split ( '.' , 5 ) [ 1 ] ) + 1 ) + '.0' self . set_version ( old_version , new_version ) | Increment the minor number of project |
9,633 | def set_patch ( self , pre_release_tag = '' ) : current_version = self . get_version ( ) current_patch = self . get_patch_version ( current_version ) current_pre_release_tag = self . get_current_pre_release_tag ( current_patch ) current_RELEASE_SEPARATOR = self . get_current_RELEASE_SEPARATOR ( current_patch ) new_patc... | Increment the patch number of project |
9,634 | def flush ( self ) : ( slice_ , self . __buffer ) = ( self . __buffer , '' ) self . __size = 0 return slice_ | Return all buffered data and clear the stack . |
9,635 | def __send_hello ( self ) : _logger . debug ( "Saying hello: [%s]" , self ) self . __c . send ( nsq . config . protocol . MAGIC_IDENTIFIER ) | Initiate the handshake . |
9,636 | def __sender ( self ) : while ( self . __ignore_quit is True or self . __nice_quit_ev . is_set ( ) is False ) and self . __force_quit_ev . is_set ( ) is False : try : ( command , parts ) = self . __outgoing_q . get ( block = False ) except gevent . queue . Empty : gevent . sleep ( nsq . config . client . WRITE_THROTTLE... | Send - loop . |
9,637 | def __receiver ( self ) : while ( self . __ignore_quit is True or self . __nice_quit_ev . is_set ( ) is False ) and self . __force_quit_ev . is_set ( ) is False : try : self . __read_frame ( ) except errno . EAGAIN : gevent . sleep ( nsq . config . client . READ_THROTTLE_S ) self . __receive_thread_ev . set ( ) | Receive - loop . |
9,638 | def run ( self ) : while self . __nice_quit_ev . is_set ( ) is False : self . __connect ( ) _logger . info ( "Connection re-connect loop has terminated: %s" , self . __mc ) | Connect the server and maintain the connection . This shall not return until a connection has been determined to absolutely not be available . |
9,639 | def save ( obj , filename , protocol = 4 ) : with open ( filename , 'wb' ) as f : pickle . dump ( obj , f , protocol = protocol ) | Serialize an object to disk using pickle protocol . |
9,640 | def load_json ( filename , ** kwargs ) : with open ( filename , 'r' , encoding = 'utf-8' ) as f : return json . load ( f , ** kwargs ) | Load a JSON object from the specified file . |
9,641 | def save_json ( obj , filename , ** kwargs ) : with open ( filename , 'w' , encoding = 'utf-8' ) as f : json . dump ( obj , f , ** kwargs ) | Save an object as a JSON file . |
9,642 | def load_lines ( filename ) : with open ( filename , 'r' , encoding = 'utf-8' ) as f : return [ line . rstrip ( '\n' ) for line in f . readlines ( ) ] | Load a text file as an array of lines . |
9,643 | def save_lines ( lines , filename ) : with open ( filename , 'w' , encoding = 'utf-8' ) as f : f . write ( '\n' . join ( lines ) ) | Save an array of lines to a file . |
9,644 | def add ( self , child ) : if isinstance ( child , Component ) : self . add_child ( child ) else : raise ModelError ( 'Unsupported child element' ) | Adds a typed child object to the component . |
9,645 | def write_peps ( self , peps , reverse_seqs ) : if reverse_seqs : peps = [ ( x [ 0 ] [ : : - 1 ] , ) for x in peps ] cursor = self . get_cursor ( ) cursor . executemany ( 'INSERT INTO known_searchspace(seqs) VALUES (?)' , peps ) self . conn . commit ( ) | Writes peps to db . We can reverse to be able to look up peptides that have some amino acids missing at the N - terminal . This way we can still use the index . |
9,646 | def send_http_request ( self , app : str , service : str , version : str , method : str , entity : str , params : dict ) : host , port , node_id , service_type = self . _registry_client . resolve ( service , version , entity , HTTP ) url = 'http://{}:{}{}' . format ( host , port , params . pop ( 'path' ) ) http_keys = ... | A convenience method that allows you to send a well formatted http request to another service |
9,647 | def install_except_hook ( except_hook = _hooks . except_hook ) : if not _backends : raise ValueError ( 'no backends found, you must at least install one ' 'backend before calling this function' ) global _except_hook _except_hook = _hooks . QtExceptHook ( except_hook ) | Install an except hook that will show the crash report dialog when an unhandled exception has occured . |
9,648 | def show_report_dialog ( window_title = 'Report an issue...' , window_icon = None , traceback = None , issue_title = '' , issue_description = '' , parent = None , modal = None , include_log = True , include_sys_info = True ) : if not _backends : raise ValueError ( 'no backends found, you must at least install one ' 'ba... | Show the issue report dialog manually . |
9,649 | def middleware ( self , args ) : if self . url [ ( len ( self . url ) - 1 ) ] == ( self . url_ , self . controller , dict ( method = self . method , request_type = self . request_type , middleware = None ) ) : self . url . pop ( ) self . url . append ( ( self . url_ , self . controller , dict ( method = self . method ,... | Appends a Middleware to the route which is to be executed before the route runs |
9,650 | def get ( self , url , controller ) : self . request_type = 'GET' controller_class , controller_method = self . __return_controller__ ( controller ) self . controller = controller_class self . method = controller_method self . url_ = url self . url . append ( ( url , controller_class , dict ( method = controller_method... | Gets the Controller and adds the route controller and method to the url list for GET request |
9,651 | def to_bytes ( value ) : if isinstance ( value , unicode ) : return value . encode ( 'utf8' ) elif not isinstance ( value , str ) : return str ( value ) return value | Get a byte array representing the value |
9,652 | def table_repr ( columns , rows , data , padding = 2 ) : padding = ' ' * padding column_lengths = [ len ( column ) for column in columns ] for row in rows : for i , column in enumerate ( columns ) : item = str ( data [ row ] [ column ] ) column_lengths [ i ] = max ( len ( item ) , column_lengths [ i ] ) max_row_length ... | Generate a table for cli output |
9,653 | def get_proteins_for_db ( fastafn ) : objects = { } for record in parse_fasta ( fastafn ) : objects [ parse_protein_identifier ( record ) ] = record return ( ( ( acc , ) for acc in list ( objects ) ) , ( ( acc , str ( record . seq ) ) for acc , record in objects . items ( ) ) , ( ( acc , get_uniprot_evidence_level ( re... | Runs through fasta file and returns proteins accession nrs sequences and evidence levels for storage in lookup DB . Duplicate accessions in fasta are accepted and removed by keeping only the last one . |
9,654 | def get_uniprot_evidence_level ( header ) : header = header . split ( ) for item in header : item = item . split ( '=' ) try : if item [ 0 ] == 'PE' : return 5 - int ( item [ 1 ] ) except IndexError : continue return - 1 | Returns uniprot protein existence evidence level for a fasta header . Evidence levels are 1 - 5 but we return 5 - x since sorting still demands that higher is better . |
9,655 | def run ( self ) : self . pre_run ( ) first = True while self . runnable : self . pre_call_message ( ) if first : self . pre_first_call_message ( ) message , payload = self . listener . get ( ) getattr ( self , message ) ( payload ) if first : first = False self . post_first_call_message ( ) self . post_call_message ( ... | Run our loop and any defined hooks ... |
9,656 | def count_multiplicities ( times , tmax = 20 ) : n = times . shape [ 0 ] mtp = np . ones ( n , dtype = '<i4' ) cid = np . zeros ( n , '<i4' ) idx0 = 0 _mtp = 1 _cid = 0 t0 = times [ idx0 ] for i in range ( 1 , n ) : dt = times [ i ] - t0 if dt > tmax : mtp [ idx0 : i ] = _mtp cid [ idx0 : i ] = _cid _mtp = 0 _cid += 1 ... | Calculate an array of multiplicities and corresponding coincidence IDs |
9,657 | def build_machine ( lines ) : if lines == [ ] : raise SyntaxError ( 'Empty file' ) else : machine = Machine ( lines [ 0 ] . split ( ) ) for line in lines [ 1 : ] : if line . strip ( ) != '' : machine . add_state ( line ) machine . check ( ) return machine | Build machine from list of lines . |
9,658 | def add_state ( self , string ) : parsed_string = string . split ( ) if len ( parsed_string ) > 0 : state , rules = parsed_string [ 0 ] , parsed_string [ 1 : ] if len ( rules ) != len ( self . alphabet ) : raise SyntaxError ( 'Wrong count of rules ({cur}/{exp}): {string}' . format ( cur = len ( rules ) , exp = len ( se... | Add state and rules to machine . |
9,659 | def check ( self ) : has_term = False if self . START_STATE not in self . states : raise SyntaxError ( 'Undefined start rule' ) for state in self . states : for rule in self . states [ state ] : if rule is not None : if rule [ 2 ] == self . TERM_STATE : has_term = True elif rule [ 2 ] not in self . states : raise Synta... | Check semantic rules . |
9,660 | def init_tape ( self , string ) : for char in string : if char not in self . alphabet and not char . isspace ( ) and char != self . EMPTY_SYMBOL : raise RuntimeError ( 'Invalid symbol: "' + char + '"' ) self . check ( ) self . state = self . START_STATE self . head = 0 self . tape = { } for i in range ( len ( string ) ... | Init system values . |
9,661 | def get_tape ( self ) : result = '' for i in range ( min ( self . tape ) , max ( self . tape ) + 1 ) : symbol = self . tape [ i ] if self . tape [ i ] != self . EMPTY_SYMBOL else ' ' result += symbol return result . strip ( ) | Get content of tape . |
9,662 | def execute_once ( self ) : symbol = self . tape . get ( self . head , self . EMPTY_SYMBOL ) index = self . alphabet . index ( symbol ) rule = self . states [ self . state ] [ index ] if rule is None : raise RuntimeError ( 'Unexpected symbol: ' + symbol ) self . tape [ self . head ] = rule [ 0 ] if rule [ 1 ] == 'L' : ... | One step of execution . |
9,663 | def compile ( self ) : result = TEMPLATE result += 'machine = Machine(' + repr ( self . alphabet ) + ')\n' for state in self . states : repr_state = state [ 0 ] for rule in self . states [ state ] : repr_state += ' ' + ( ',' . join ( rule ) if rule is not None else '-' ) result += ( "machine.add_state({repr_state})\n" ... | Return python code for create and execute machine . |
9,664 | def get_missing_services ( self , services ) : required_services = set ( services ) provided_services = set ( self . _services . keys ( ) ) missing_services = required_services . difference ( provided_services ) return sorted ( missing_services ) | Check if all required services are provided |
9,665 | def _drain ( self , cycles = None ) : log . info ( "Now draining..." ) if not cycles : log . info ( "No cycle count, the pipeline may be drained forever." ) if self . calibration : log . info ( "Setting up the detector calibration." ) for module in self . modules : module . detector = self . calibration . get_detector ... | Activate the pump and let the flow go . |
9,666 | def _check_service_requirements ( self ) : missing = self . services . get_missing_services ( self . required_services . keys ( ) ) if missing : self . log . critical ( "Following services are required and missing: {}" . format ( ', ' . join ( missing ) ) ) return False return True | Final comparison of provided and required modules |
9,667 | def drain ( self , cycles = None ) : if not self . _check_service_requirements ( ) : self . init_timer . stop ( ) return self . finish ( ) if self . anybar : self . anybar . change ( "orange" ) self . init_timer . stop ( ) log . info ( "Trapping CTRL+C and starting to drain." ) signal . signal ( signal . SIGINT , self ... | Execute _drain while trapping KeyboardInterrupt |
9,668 | def _handle_ctrl_c ( self , * args ) : if self . anybar : self . anybar . change ( "exclamation" ) if self . _stop : print ( "\nForced shutdown..." ) raise SystemExit if not self . _stop : hline = 42 * '=' print ( '\n' + hline + "\nGot CTRL+C, waiting for current cycle...\n" "Press CTRL+C again if you're in hurry!\n" +... | Handle the keyboard interrupts . |
9,669 | def get ( self , name , default = None ) : value = self . parameters . get ( name ) self . _processed_parameters . append ( name ) if value is None : return default return value | Return the value of the requested parameter or default if None . |
9,670 | def require ( self , name ) : value = self . get ( name ) if value is None : raise TypeError ( "{0} requires the parameter '{1}'." . format ( self . __class__ , name ) ) return value | Return the value of the requested parameter or raise an error . |
9,671 | def _check_unused_parameters ( self ) : all_params = set ( self . parameters . keys ( ) ) processed_params = set ( self . _processed_parameters ) unused_params = all_params - processed_params - RESERVED_ARGS if unused_params : self . log . warning ( "The following parameters were ignored: {}" . format ( ', ' . join ( s... | Check if any of the parameters passed in are ignored |
9,672 | def open_file ( self , filename ) : try : if filename . endswith ( '.gz' ) : self . blob_file = gzip . open ( filename , 'rb' ) else : self . blob_file = open ( filename , 'rb' ) except TypeError : log . error ( "Please specify a valid filename." ) raise SystemExit except IOError as error_message : log . error ( error_... | Open the file with filename |
9,673 | def parse ( cls , date_string ) : try : date = dateparser . parse ( date_string ) if date . tzinfo is None : date = dateparser . parse ( date_string , tzinfos = cls . tzd ) return date except Exception : raise ValueError ( "Could not parse date string!" ) | Parse any time string . Use a custom timezone matching if the original matching does not pull one out . |
9,674 | def epsg_code ( geojson ) : if isinstance ( geojson , dict ) : if 'crs' in geojson : urn = geojson [ 'crs' ] [ 'properties' ] [ 'name' ] . split ( ':' ) if 'EPSG' in urn : try : return int ( urn [ - 1 ] ) except ( TypeError , ValueError ) : return None return None | get the espg code from the crs system |
9,675 | def convert_coordinates ( coords , origin , wgs84 , wrapped ) : if isinstance ( coords , list ) or isinstance ( coords , tuple ) : try : if isinstance ( coords [ 0 ] , list ) or isinstance ( coords [ 0 ] , tuple ) : return [ convert_coordinates ( list ( c ) , origin , wgs84 , wrapped ) for c in coords ] elif isinstance... | Convert coordinates from one crs to another |
9,676 | def to_latlon ( geojson , origin_espg = None ) : if isinstance ( geojson , dict ) : if origin_espg : code = origin_espg else : code = epsg_code ( geojson ) if code : origin = Proj ( init = 'epsg:%s' % code ) wgs84 = Proj ( init = 'epsg:4326' ) wrapped = test_wrap_coordinates ( geojson [ 'coordinates' ] , origin , wgs84... | Convert a given geojson to wgs84 . The original epsg must be included insde the crs tag of geojson |
9,677 | def camelcase_underscore ( name ) : s1 = re . sub ( '(.)([A-Z][a-z]+)' , r'\1_\2' , name ) return re . sub ( '([a-z0-9])([A-Z])' , r'\1_\2' , s1 ) . lower ( ) | Convert camelcase names to underscore |
9,678 | def get_tiles_list ( element ) : tiles = { } for el in element : g = ( el . findall ( './/Granules' ) or el . findall ( './/Granule' ) ) [ 0 ] name = g . attrib [ 'granuleIdentifier' ] name_parts = name . split ( '_' ) mgs = name_parts [ - 2 ] tiles [ mgs ] = name return tiles | Returns the list of all tile names from Product_Organisation element in metadata . xml |
9,679 | def metadata_to_dict ( metadata ) : tree = etree . parse ( metadata ) root = tree . getroot ( ) meta = OrderedDict ( ) keys = [ 'SPACECRAFT_NAME' , 'PRODUCT_STOP_TIME' , 'Cloud_Coverage_Assessment' , 'PROCESSING_LEVEL' , 'PRODUCT_TYPE' , 'PROCESSING_BASELINE' , 'SENSING_ORBIT_NUMBER' , 'SENSING_ORBIT_DIRECTION' , 'PROD... | Looks at metadata . xml file of sentinel product and extract useful keys Returns a python dict |
9,680 | def get_tile_geometry ( path , origin_espg , tolerance = 500 ) : with rasterio . open ( path ) as src : b = src . bounds tile_shape = Polygon ( [ ( b [ 0 ] , b [ 1 ] ) , ( b [ 2 ] , b [ 1 ] ) , ( b [ 2 ] , b [ 3 ] ) , ( b [ 0 ] , b [ 3 ] ) , ( b [ 0 ] , b [ 1 ] ) ] ) tile_geojson = mapping ( tile_shape ) image = src . ... | Calculate the data and tile geometry for sentinel - 2 tiles |
9,681 | def tile_metadata ( tile , product , geometry_check = None ) : grid = 'T{0}{1}{2}' . format ( pad ( tile [ 'utmZone' ] , 2 ) , tile [ 'latitudeBand' ] , tile [ 'gridSquare' ] ) meta = OrderedDict ( { 'tile_name' : product [ 'tiles' ] [ grid ] } ) logger . info ( '%s Processing tile %s' % ( threading . current_thread ( ... | Generate metadata for a given tile |
9,682 | def load_markov ( argv , stdin ) : if len ( argv ) > 3 : with open ( argv [ 3 ] ) as input_file : return Algorithm ( input_file . readlines ( ) ) else : return Algorithm ( stdin . readlines ( ) ) | Load and return markov algorithm . |
9,683 | def load_turing ( argv , stdin ) : if len ( argv ) > 3 : with open ( argv [ 3 ] ) as input_file : return build_machine ( input_file . readlines ( ) ) else : return build_machine ( stdin . readlines ( ) ) | Load and return turing machine . |
9,684 | def main ( argv , stdin , stdout ) : if len ( argv ) > 1 and argv [ 1 : 3 ] == [ "compile" , "markov" ] : algo = load_markov ( argv , stdin ) print ( algo . compile ( ) , file = stdout ) elif len ( argv ) == 4 and argv [ 1 : 3 ] == [ "run" , "markov" ] : algo = load_markov ( argv , stdin ) for line in stdin : print ( a... | Execute when user call turingmarkov . |
9,685 | def detectors ( regex = None , sep = '\t' , temporary = False ) : db = DBManager ( temporary = temporary ) dt = db . detectors if regex is not None : try : re . compile ( regex ) except re . error : log . error ( "Invalid regex!" ) return dt = dt [ dt [ 'OID' ] . str . contains ( regex ) | dt [ 'CITY' ] . str . contain... | Print the detectors table |
9,686 | def get_product_metadata_path ( product_name ) : string_date = product_name . split ( '_' ) [ - 1 ] date = datetime . datetime . strptime ( string_date , '%Y%m%dT%H%M%S' ) path = 'products/{0}/{1}/{2}/{3}' . format ( date . year , date . month , date . day , product_name ) return { product_name : { 'metadata' : '{0}/{1... | gets a single products metadata |
9,687 | def get_products_metadata_path ( year , month , day ) : products = { } path = 'products/{0}/{1}/{2}/' . format ( year , month , day ) for key in bucket . objects . filter ( Prefix = path ) : product_path = key . key . replace ( path , '' ) . split ( '/' ) name = product_path [ 0 ] if name not in products : products [ n... | Get paths to multiple products metadata |
9,688 | def start ( backdate = None ) : if f . s . cum : raise StartError ( "Already have stamps, can't start again (must reset)." ) if f . t . subdvsn_awaiting or f . t . par_subdvsn_awaiting : raise StartError ( "Already have subdivisions, can't start again (must reset)." ) if f . t . stopped : raise StoppedError ( "Timer al... | Mark the start of timing overwriting the automatic start data written on import or the automatic start at the beginning of a subdivision . |
9,689 | def stamp ( name , backdate = None , unique = None , keep_subdivisions = None , quick_print = None , un = None , ks = None , qp = None ) : t = timer ( ) if f . t . stopped : raise StoppedError ( "Cannot stamp stopped timer." ) if f . t . paused : raise PausedError ( "Cannot stamp paused timer." ) if backdate is None : ... | Mark the end of a timing interval . |
9,690 | def stop ( name = None , backdate = None , unique = None , keep_subdivisions = None , quick_print = None , un = None , ks = None , qp = None ) : t = timer ( ) if f . t . stopped : raise StoppedError ( "Timer already stopped." ) if backdate is None : t_stop = t else : if f . t is f . root : raise BackdateError ( "Cannot... | Mark the end of timing . Optionally performs a stamp hence accepts the same arguments . |
9,691 | def pause ( ) : t = timer ( ) if f . t . stopped : raise StoppedError ( "Cannot pause stopped timer." ) if f . t . paused : raise PausedError ( "Timer already paused." ) f . t . paused = True f . t . tmp_total += t - f . t . start_t f . t . start_t = None f . t . last_t = None return t | Pause the timer preventing subsequent time from accumulating in the total . Renders the timer inactive disabling other timing commands . |
9,692 | def resume ( ) : t = timer ( ) if f . t . stopped : raise StoppedError ( "Cannot resume stopped timer." ) if not f . t . paused : raise PausedError ( "Cannot resume timer that is not paused." ) f . t . paused = False f . t . start_t = t f . t . last_t = t return t | Resume a paused timer re - activating it . Subsequent time accumulates in the total . |
9,693 | def collapse_times ( ) : orig_ts = f . timer_stack orig_ls = f . loop_stack copy_ts = _copy_timer_stack ( ) copy_ls = copy . deepcopy ( f . loop_stack ) f . timer_stack = copy_ts f . loop_stack = copy_ls f . refresh_shortcuts ( ) while ( len ( f . timer_stack ) > 1 ) or f . t . in_loop : _collapse_subdivision ( ) timer... | Make copies of everything assign to global shortcuts so functions work on them extract the times then restore the running stacks . |
9,694 | def create_plate ( self , plate_id , description , meta_data_id , values , complement , parent_plate ) : with switch_db ( PlateDefinitionModel , db_alias = 'hyperstream' ) : try : p = PlateDefinitionModel . objects . get ( plate_id = plate_id ) if p : logging . info ( "Plate with id {} already exists" . format ( plate_... | Create a new plate and commit it to the database |
9,695 | def timed_loop ( name = None , rgstr_stamps = None , save_itrs = SET [ 'SI' ] , loop_end_stamp = None , end_stamp_unique = SET [ 'UN' ] , keep_prev_subdivisions = SET [ 'KS' ] , keep_end_subdivisions = SET [ 'KS' ] , quick_print = SET [ 'QP' ] ) : return TimedLoop ( name = name , rgstr_stamps = rgstr_stamps , save_itrs... | Instantiate a TimedLoop object for measuring loop iteration timing data . Can be used with either for or while loops . |
9,696 | def timed_for ( iterable , name = None , rgstr_stamps = None , save_itrs = SET [ 'SI' ] , loop_end_stamp = None , end_stamp_unique = SET [ 'UN' ] , keep_prev_subdivisions = SET [ 'KS' ] , keep_end_subdivisions = SET [ 'KS' ] , quick_print = SET [ 'QP' ] ) : return TimedFor ( iterable , name = name , rgstr_stamps = rgst... | Instantiate a TimedLoop object for measuring for loop iteration timing data . Can be used only on for loops . |
9,697 | def write_calibration ( calib , f , loc ) : for i , node in enumerate ( [ p + '_' + s for p in [ 'pos' , 'dir' ] for s in 'xyz' ] ) : h5loc = loc + '/' + node ca = f . get_node ( h5loc ) ca . append ( calib [ : , i ] ) du = f . get_node ( loc + '/du' ) du . append ( calib [ : , 7 ] . astype ( 'u1' ) ) floor = f . get_n... | Write calibration set to file |
9,698 | def initialise_arrays ( group , f ) : for node in [ 'pos_x' , 'pos_y' , 'pos_z' , 'dir_x' , 'dir_y' , 'dir_z' , 'du' , 'floor' , 't0' ] : if node in [ 'floor' , 'du' ] : atom = U1_ATOM else : atom = F4_ATOM f . create_earray ( group , node , atom , ( 0 , ) , filters = FILTERS ) | Create EArrays for calibrated hits |
9,699 | def blob_counter ( self ) : import aa from ROOT import EventFile try : event_file = EventFile ( self . filename ) except Exception : raise SystemExit ( "Could not open file" ) num_blobs = 0 for event in event_file : num_blobs += 1 return num_blobs | Create a blob counter . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.