idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
53,300
def validate_pair ( ob : Any ) -> bool : try : if len ( ob ) != 2 : log . warning ( "Unexpected result: {!r}" , ob ) raise ValueError ( ) except ValueError : return False return True
Does the object have length 2?
53,301
def clean_message ( message : Message , topmost : bool = False ) -> Message : if message . is_multipart ( ) : if message . get_content_type ( ) != 'message/external-body' : parts = message . get_payload ( ) parts [ : ] = map ( clean_message , parts ) elif message_is_binary ( message ) : if not topmost : message = gut_message ( message ) return message
Clean a message of all its binary parts .
53,302
def is_password_valid ( plaintextpw : str , storedhash : str ) -> bool : if storedhash is None : storedhash = "" storedhash = str ( storedhash ) if plaintextpw is None : plaintextpw = "" plaintextpw = str ( plaintextpw ) try : h = bcrypt . hashpw ( plaintextpw , storedhash ) except ValueError : return False return h == storedhash
Checks if a plaintext password matches a stored hash .
53,303
def version ( self ) : try : f = self . func . __call__ . __code__ except AttributeError : f = self . func . __code__ h = md5 ( ) h . update ( f . co_code ) h . update ( str ( f . co_names ) . encode ( ) ) try : closure = self . func . __closure__ except AttributeError : return h . hexdigest ( ) if closure is None or self . closure_fingerprint is None : return h . hexdigest ( ) d = dict ( ( name , cell . cell_contents ) for name , cell in zip ( f . co_freevars , closure ) ) h . update ( self . closure_fingerprint ( d ) . encode ( ) ) return h . hexdigest ( )
Compute the version identifier for this functional node using the func code and local names . Optionally also allow closed - over variable values to affect the version number when closure_fingerprint is specified
53,304
def mapper_init ( self ) : self . counties = CachedCountyLookup ( precision = GEOHASH_PRECISION ) self . extractor = WordExtractor ( )
Download counties geojson from S3 and build spatial index and cache
53,305
def run ( self ) : logging . info ( "Starting GeoJSON MongoDB loading process." ) mongo = dict ( uri = self . mongo , db = self . db , collection = self . collection ) self . load ( self . source , ** mongo ) logging . info ( "Finished loading {0} into MongoDB" . format ( self . source ) )
Top level runner to load State and County GeoJSON files into Mongo DB
53,306
def load ( self , geojson , uri = None , db = None , collection = None ) : logging . info ( "Mongo URI: {0}" . format ( uri ) ) logging . info ( "Mongo DB: {0}" . format ( db ) ) logging . info ( "Mongo Collection: {0}" . format ( collection ) ) logging . info ( "Geojson File to be loaded: {0}" . format ( geojson ) ) mongo = MongoGeo ( db = db , collection = collection , uri = uri ) GeoJSONLoader ( ) . load ( geojson , mongo . insert )
Load geojson file into mongodb instance
53,307
def get_version ( ) : if all ( [ VERSION , UPDATED , any ( [ isinstance ( UPDATED , date ) , isinstance ( UPDATED , datetime ) , ] ) , ] ) : return FORMAT_STRING . format ( ** { "version" : VERSION , "updated" : UPDATED , } ) elif VERSION : return VERSION elif UPDATED : return localize ( UPDATED ) if any ( [ isinstance ( UPDATED , date ) , isinstance ( UPDATED , datetime ) , ] ) else "" else : return ""
Return formatted version string .
53,308
def process_file ( filename : str , filetypes : List [ str ] , move_to : str , delete_if_not_specified_file_type : bool , show_zip_output : bool ) -> None : try : reader = CorruptedOpenXmlReader ( filename , show_zip_output = show_zip_output ) if reader . file_type in filetypes : log . info ( "Found {}: {}" , reader . description , filename ) if move_to : dest_file = os . path . join ( move_to , os . path . basename ( filename ) ) _ , ext = os . path . splitext ( dest_file ) if ext != reader . suggested_extension ( ) : dest_file += reader . suggested_extension ( ) reader . move_to ( destination_filename = dest_file ) else : log . info ( "Unrecognized or unwanted contents: " + filename ) if delete_if_not_specified_file_type : log . info ( "Deleting: " + filename ) os . remove ( filename ) except Exception as e : log . critical ( "Uncaught error in subprocess: {!r}\n{}" , e , traceback . format_exc ( ) ) raise
Deals with an OpenXML including if it is potentially corrupted .
53,309
def should_build ( self , fpath , meta ) : if meta . get ( 'layout' , self . default_template ) in self . inc_layout : if self . prev_mtime . get ( fpath , 0 ) == os . path . getmtime ( fpath ) : return False else : return True return True
Checks if the file should be built or not Only skips layouts which are tagged as INCREMENTAL Rebuilds only those files with mtime changed since previous build
53,310
def clusterQueues ( self ) : servers = yield self . getClusterServers ( ) queues = { } for sname in servers : qs = yield self . get ( 'rhumba.server.%s.queues' % sname ) uuid = yield self . get ( 'rhumba.server.%s.uuid' % sname ) qs = json . loads ( qs ) for q in qs : if q not in queues : queues [ q ] = [ ] queues [ q ] . append ( { 'host' : sname , 'uuid' : uuid } ) defer . returnValue ( queues )
Return a dict of queues in cluster and servers running them
53,311
def close ( self ) : def validate_client ( client ) : host , port = client . addr parsed_url = urlparse ( self . _hostname ) return host == parsed_url . hostname and port == parsed_url . port def _check_fds ( _ ) : fds = set ( reactor . getReaders ( ) + reactor . getReaders ( ) ) if not [ fd for fd in fds if isinstance ( fd , Client ) and validate_client ( fd ) ] : return return deferLater ( reactor , 0 , _check_fds , None ) pool = self . _async_http_client_params [ "pool" ] return pool . closeCachedConnections ( ) . addBoth ( _check_fds )
close all http connections . returns a deferred that fires once they re all closed .
53,312
def extract_line ( geom , dem , ** kwargs ) : kwargs . setdefault ( 'masked' , True ) coords_in = coords_array ( geom ) f = lambda * x : ~ dem . transform * x px = transform ( f , geom ) interval = kwargs . pop ( 'subdivide' , 1 ) if interval is not None : px = subdivide ( px , interval = interval ) f = lambda * x : dem . transform * ( x [ 0 ] , x [ 1 ] ) geom = transform ( f , px ) coords_px = coords_array ( px ) mins = N . floor ( coords_px . min ( axis = 0 ) ) maxs = N . ceil ( coords_px . max ( axis = 0 ) ) window = tuple ( ( int ( mn ) , int ( mx ) ) for mn , mx in zip ( mins [ : : - 1 ] , maxs [ : : - 1 ] ) ) aff = Affine . translation ( * ( - mins ) ) f = lambda * x : aff * x px_to_extract = transform ( f , px ) band = dem . read ( 1 , window = window , ** kwargs ) extracted = bilinear ( band , px_to_extract ) coords = coords_array ( extracted ) coords [ : , : 2 ] = coords_array ( geom ) return coords
Extract a linear feature from a rasterio geospatial dataset .
53,313
def fork ( executable , args = ( ) , env = { } , path = None , timeout = 3600 ) : d = defer . Deferred ( ) p = ProcessProtocol ( d , timeout ) reactor . spawnProcess ( p , executable , ( executable , ) + tuple ( args ) , env , path ) return d
fork Provides a deferred wrapper function with a timeout function
53,314
def number_to_dp ( number : Optional [ float ] , dp : int , default : Optional [ str ] = "" , en_dash_for_minus : bool = True ) -> str : if number is None : return default if number == float ( "inf" ) : return u"∞" if number == float ( "-inf" ) : s = u"-∞" else : s = u"{:.{precision}f}" . format ( number , precision = dp ) if en_dash_for_minus : s = s . replace ( "-" , u"–") return s
Format number to dp decimal places optionally using a UTF - 8 en dash for minus signs .
53,315
def debug_form_contents ( form : cgi . FieldStorage , to_stderr : bool = True , to_logger : bool = False ) -> None : for k in form . keys ( ) : text = "{0} = {1}" . format ( k , form . getvalue ( k ) ) if to_stderr : sys . stderr . write ( text ) if to_logger : log . info ( text )
Writes the keys and values of a CGI form to stderr .
53,316
def cgi_method_is_post ( environ : Dict [ str , str ] ) -> bool : method = environ . get ( "REQUEST_METHOD" , None ) if not method : return False return method . upper ( ) == "POST"
Determines if the CGI method was POST given the CGI environment .
53,317
def get_cgi_parameter_str_or_none ( form : cgi . FieldStorage , key : str ) -> Optional [ str ] : s = get_cgi_parameter_str ( form , key ) if s is None or len ( s ) == 0 : return None return s
Extracts a string parameter from a CGI form or None if the key doesn t exist or the string is zero - length .
53,318
def get_cgi_parameter_list ( form : cgi . FieldStorage , key : str ) -> List [ str ] : return form . getlist ( key )
Extracts a list of values all with the same key from a CGI form .
53,319
def get_cgi_parameter_bool ( form : cgi . FieldStorage , key : str ) -> bool : return is_1 ( get_cgi_parameter_str ( form , key ) )
Extracts a boolean parameter from a CGI form on the assumption that 1 is True and everything else is False .
53,320
def get_cgi_parameter_int ( form : cgi . FieldStorage , key : str ) -> Optional [ int ] : return get_int_or_none ( get_cgi_parameter_str ( form , key ) )
Extracts an integer parameter from a CGI form or None if the key is absent or the string value is not convertible to int .
53,321
def get_cgi_parameter_float ( form : cgi . FieldStorage , key : str ) -> Optional [ float ] : return get_float_or_none ( get_cgi_parameter_str ( form , key ) )
Extracts a float parameter from a CGI form or None if the key is absent or the string value is not convertible to float .
53,322
def get_cgi_parameter_file ( form : cgi . FieldStorage , key : str ) -> Optional [ bytes ] : ( filename , filecontents ) = get_cgi_parameter_filename_and_file ( form , key ) return filecontents
Extracts a file s contents from a file input in a CGI form or None if no such file was uploaded .
53,323
def cgi_parameter_exists ( form : cgi . FieldStorage , key : str ) -> bool : s = get_cgi_parameter_str ( form , key ) return s is not None
Does a CGI form contain the key?
53,324
def getenv_escaped ( key : str , default : str = None ) -> Optional [ str ] : value = os . getenv ( key , default ) return cgi . escape ( value ) if value is not None else None
Returns an environment variable s value CGI - escaped or None .
53,325
def get_png_data_url ( blob : Optional [ bytes ] ) -> str : return BASE64_PNG_URL_PREFIX + base64 . b64encode ( blob ) . decode ( 'ascii' )
Converts a PNG blob into a local URL encapsulating the PNG .
53,326
def print_result_for_plain_cgi_script_from_tuple ( contenttype_headers_content : WSGI_TUPLE_TYPE , status : str = '200 OK' ) -> None : contenttype , headers , content = contenttype_headers_content print_result_for_plain_cgi_script ( contenttype , headers , content , status )
Writes HTTP result to stdout .
53,327
def print_result_for_plain_cgi_script ( contenttype : str , headers : TYPE_WSGI_RESPONSE_HEADERS , content : bytes , status : str = '200 OK' ) -> None : headers = [ ( "Status" , status ) , ( "Content-Type" , contenttype ) , ( "Content-Length" , str ( len ( content ) ) ) , ] + headers sys . stdout . write ( "\n" . join ( [ h [ 0 ] + ": " + h [ 1 ] for h in headers ] ) + "\n\n" ) sys . stdout . write ( content )
Writes HTTP request result to stdout .
53,328
def wsgi_simple_responder ( result : Union [ str , bytes ] , handler : Callable [ [ Union [ str , bytes ] ] , WSGI_TUPLE_TYPE ] , start_response : TYPE_WSGI_START_RESPONSE , status : str = '200 OK' , extraheaders : TYPE_WSGI_RESPONSE_HEADERS = None ) -> TYPE_WSGI_APP_RESULT : extraheaders = extraheaders or [ ] ( contenttype , extraheaders2 , output ) = handler ( result ) response_headers = [ ( 'Content-Type' , contenttype ) , ( 'Content-Length' , str ( len ( output ) ) ) ] response_headers . extend ( extraheaders ) if extraheaders2 is not None : response_headers . extend ( extraheaders2 ) start_response ( status , response_headers ) return [ output ]
Simple WSGI app .
53,329
def bold_if_not_blank ( x : Optional [ str ] ) -> str : if x is None : return u"{}" . format ( x ) return u"<b>{}</b>" . format ( x )
HTML - emboldens content unless blank .
53,330
def make_urls_hyperlinks ( text : str ) -> str : find_url = r replace_url = r'<a href="\1">\1</a>' find_email = re . compile ( r'([.\w\-]+@(\w[\w\-]+\.)+[\w\-]+)' ) replace_email = r'<a href="mailto:\1">\1</a>' text = re . sub ( find_url , replace_url , text ) text = re . sub ( find_email , replace_email , text ) return text
Adds hyperlinks to text that appears to contain URLs .
53,331
def rst_underline ( heading : str , underline_char : str ) -> str : assert "\n" not in heading assert len ( underline_char ) == 1 return heading + "\n" + ( underline_char * len ( heading ) )
Underlines a heading for RST files .
53,332
def write_if_allowed ( filename : str , content : str , overwrite : bool = False , mock : bool = False ) -> None : if not overwrite and exists ( filename ) : fail ( "File exists, not overwriting: {!r}" . format ( filename ) ) directory = dirname ( filename ) if not mock : mkdir_p ( directory ) log . info ( "Writing to {!r}" , filename ) if mock : log . warning ( "Skipping writes as in mock mode" ) else : with open ( filename , "wt" ) as outfile : outfile . write ( content )
Writes the contents to a file if permitted .
53,333
def rst_filename_rel_autodoc_index ( self , index_filename : str ) -> str : index_dir = dirname ( abspath ( expanduser ( index_filename ) ) ) return relpath ( self . target_rst_filename , start = index_dir )
Returns the filename of the target RST file relative to a specified index file . Used to make the index refer to the RST .
53,334
def python_module_name ( self ) -> str : if not self . is_python : return "" filepath = self . source_filename_rel_python_root dirs_and_base = splitext ( filepath ) [ 0 ] dir_and_file_parts = dirs_and_base . split ( sep ) return "." . join ( dir_and_file_parts )
Returns the name of the Python module that this instance refers to in dotted Python module notation or a blank string if it doesn t .
53,335
def write_rst ( self , prefix : str = "" , suffix : str = "" , heading_underline_char : str = "=" , method : AutodocMethod = None , overwrite : bool = False , mock : bool = False ) -> None : content = self . rst_content ( prefix = prefix , suffix = suffix , heading_underline_char = heading_underline_char , method = method ) write_if_allowed ( self . target_rst_filename , content , overwrite = overwrite , mock = mock )
Writes the RST file to our destination RST filename making any necessary directories .
53,336
def add_source_files ( self , source_filenames_or_globs : Union [ str , List [ str ] ] , method : AutodocMethod = None , recursive : bool = None , source_rst_title_style_python : bool = None , pygments_language_override : Dict [ str , str ] = None ) -> None : if not source_filenames_or_globs : return if method is None : method = self . method if recursive is None : recursive = self . recursive if source_rst_title_style_python is None : source_rst_title_style_python = self . source_rst_title_style_python if pygments_language_override is None : pygments_language_override = self . pygments_language_override final_filenames = self . get_sorted_source_files ( source_filenames_or_globs , recursive = recursive ) for source_filename in final_filenames : self . files_to_index . append ( FileToAutodocument ( source_filename = source_filename , project_root_dir = self . project_root_dir , python_package_root_dir = self . python_package_root_dir , target_rst_filename = self . specific_file_rst_filename ( source_filename ) , method = method , source_rst_title_style_python = source_rst_title_style_python , pygments_language_override = pygments_language_override , ) )
Adds source files to the index .
53,337
def filename_matches_glob ( filename : str , globtext : str ) -> bool : if fnmatch ( filename , globtext ) : log . debug ( "{!r} matches {!r}" , filename , globtext ) return True bname = basename ( filename ) if fnmatch ( bname , globtext ) : log . debug ( "{!r} matches {!r}" , bname , globtext ) return True return False
The glob . glob function doesn t do exclusion very well . We don t want to have to specify root directories for exclusion patterns . We don t want to have to trawl a massive set of files to find exclusion files . So let s implement a glob match .
53,338
def should_exclude ( self , filename ) -> bool : for skip_glob in self . skip_globs : if self . filename_matches_glob ( filename , skip_glob ) : return True return False
Should we exclude this file from consideration?
53,339
def specific_file_rst_filename ( self , source_filename : str ) -> str : highest_code_to_target = relative_filename_within_dir ( source_filename , self . highest_code_dir ) bname = basename ( source_filename ) result = join ( self . autodoc_rst_root_dir , dirname ( highest_code_to_target ) , bname + EXT_RST ) log . debug ( "Source {!r} -> RST {!r}" , source_filename , result ) return result
Gets the RST filename corresponding to a source filename . See the help for the constructor for more details .
53,340
def write_index_and_rst_files ( self , overwrite : bool = False , mock : bool = False ) -> None : for f in self . files_to_index : if isinstance ( f , FileToAutodocument ) : f . write_rst ( prefix = self . rst_prefix , suffix = self . rst_suffix , heading_underline_char = self . source_rst_heading_underline_char , overwrite = overwrite , mock = mock , ) elif isinstance ( f , AutodocIndex ) : f . write_index_and_rst_files ( overwrite = overwrite , mock = mock ) else : fail ( "Unknown thing in files_to_index: {!r}" . format ( f ) ) self . write_index ( overwrite = overwrite , mock = mock )
Writes both the individual RST files and the index .
53,341
def write_index ( self , overwrite : bool = False , mock : bool = False ) -> None : write_if_allowed ( self . index_filename , self . index_content ( ) , overwrite = overwrite , mock = mock )
Writes the index file if permitted .
53,342
def basic_animation ( frames = 100 , interval = 30 ) : fig = plt . figure ( ) ax = plt . axes ( xlim = ( 0 , 10 ) , ylim = ( - 2 , 2 ) ) line , = ax . plot ( [ ] , [ ] , lw = 2 ) x = np . linspace ( 0 , 10 , 1000 ) def init ( ) : line . set_data ( [ ] , [ ] ) return line , def animate ( i ) : y = np . cos ( i * 0.02 * np . pi ) * np . sin ( x - i * 0.02 * np . pi ) line . set_data ( x , y ) return line , return animation . FuncAnimation ( fig , animate , init_func = init , frames = frames , interval = interval )
Plot a basic sine wave with oscillating amplitude
53,343
def lorenz_animation ( N_trajectories = 20 , rseed = 1 , frames = 200 , interval = 30 ) : from scipy import integrate from mpl_toolkits . mplot3d import Axes3D from matplotlib . colors import cnames def lorentz_deriv ( coords , t0 , sigma = 10. , beta = 8. / 3 , rho = 28.0 ) : x , y , z = coords return [ sigma * ( y - x ) , x * ( rho - z ) - y , x * y - beta * z ] np . random . seed ( rseed ) x0 = - 15 + 30 * np . random . random ( ( N_trajectories , 3 ) ) t = np . linspace ( 0 , 2 , 500 ) x_t = np . asarray ( [ integrate . odeint ( lorentz_deriv , x0i , t ) for x0i in x0 ] ) fig = plt . figure ( ) ax = fig . add_axes ( [ 0 , 0 , 1 , 1 ] , projection = '3d' ) ax . axis ( 'off' ) colors = plt . cm . jet ( np . linspace ( 0 , 1 , N_trajectories ) ) lines = sum ( [ ax . plot ( [ ] , [ ] , [ ] , '-' , c = c ) for c in colors ] , [ ] ) pts = sum ( [ ax . plot ( [ ] , [ ] , [ ] , 'o' , c = c , ms = 4 ) for c in colors ] , [ ] ) ax . set_xlim ( ( - 25 , 25 ) ) ax . set_ylim ( ( - 35 , 35 ) ) ax . set_zlim ( ( 5 , 55 ) ) ax . view_init ( 30 , 0 ) def init ( ) : for line , pt in zip ( lines , pts ) : line . set_data ( [ ] , [ ] ) line . set_3d_properties ( [ ] ) pt . set_data ( [ ] , [ ] ) pt . set_3d_properties ( [ ] ) return lines + pts def animate ( i ) : i = ( 2 * i ) % x_t . shape [ 1 ] for line , pt , xi in zip ( lines , pts , x_t ) : x , y , z = xi [ : i + 1 ] . T line . set_data ( x , y ) line . set_3d_properties ( z ) pt . set_data ( x [ - 1 : ] , y [ - 1 : ] ) pt . set_3d_properties ( z [ - 1 : ] ) ax . view_init ( 30 , 0.3 * i ) fig . canvas . draw ( ) return lines + pts return animation . FuncAnimation ( fig , animate , init_func = init , frames = frames , interval = interval )
Plot a 3D visualization of the dynamics of the Lorenz system
53,344
def _included_frames ( frame_list , frame_format ) : return INCLUDED_FRAMES . format ( Nframes = len ( frame_list ) , frame_dir = os . path . dirname ( frame_list [ 0 ] ) , frame_format = frame_format )
frame_list should be a list of filenames
53,345
def _embedded_frames ( frame_list , frame_format ) : template = ' frames[{0}] = "data:image/{1};base64,{2}"\n' embedded = "\n" for i , frame_data in enumerate ( frame_list ) : embedded += template . format ( i , frame_format , frame_data . replace ( '\n' , '\\\n' ) ) return embedded
frame_list should be a list of base64 - encoded png files
53,346
def remove_noise ( time , magnitude , error , error_limit = 3 , std_limit = 5 ) : data , mjd = magnitude , time data_len = len ( mjd ) error_mean = np . mean ( error ) error_tolerance = error_limit * ( error_mean or 1 ) data_mean = np . mean ( data ) data_std = np . std ( data ) mjd_out , data_out , error_out = [ ] , [ ] , [ ] for i in range ( data_len ) : is_not_noise = ( error [ i ] < error_tolerance and ( np . absolute ( data [ i ] - data_mean ) / data_std ) < std_limit ) if is_not_noise : mjd_out . append ( mjd [ i ] ) data_out . append ( data [ i ] ) error_out . append ( error [ i ] ) data_out = np . asarray ( data_out ) mjd_out = np . asarray ( mjd_out ) error_out = np . asarray ( error_out ) return mjd_out , data_out , error_out
Points within std_limit standard deviations from the mean and with errors greater than error_limit times the error mean are considered as noise and thus are eliminated .
53,347
def align ( time , time2 , magnitude , magnitude2 , error , error2 ) : error = np . zeros ( time . shape ) if error is None else error error2 = np . zeros ( time2 . shape ) if error2 is None else error2 sserie = pd . DataFrame ( { "mag" : magnitude , "error" : error } , index = time ) lserie = pd . DataFrame ( { "mag" : magnitude2 , "error" : error2 } , index = time2 ) if len ( time ) > len ( time2 ) : sserie , lserie = lserie , sserie merged = sserie . join ( lserie , how = "inner" , rsuffix = '2' ) new_time = merged . index . values new_mag , new_mag2 = merged . mag . values , merged . mag2 . values new_error , new_error2 = merged . error . values , merged . error2 . values if len ( time ) > len ( time2 ) : new_mag , new_mag2 = new_mag2 , new_mag new_error , new_error2 = new_error2 , new_error return new_time , new_mag , new_mag2 , new_error , new_error2
Synchronizes the light - curves in the two different bands .
53,348
def load_OGLE3_catalog ( ) : with bz2 . BZ2File ( CATALOG_PATH ) as bz2fp , warnings . catch_warnings ( ) : warnings . simplefilter ( "ignore" ) df = pd . read_table ( bz2fp , skiprows = 6 ) df . rename ( columns = { "# ID" : "ID" } , inplace = True ) return df
Return the full list of variables stars of OGLE - 3 as a DataFrame
53,349
def fetch_OGLE3 ( ogle3_id , data_home = None , metadata = None , download_if_missing = True ) : store_path = _get_OGLE3_data_home ( data_home ) file_path = os . path . join ( store_path , "{}.tar" . format ( ogle3_id ) ) members = { "I" : "./{}.I.dat" . format ( ogle3_id ) , "V" : "./{}.V.dat" . format ( ogle3_id ) } if download_if_missing : url = URL . format ( ogle3_id ) base . fetch ( url , file_path ) bands = [ ] data = { } with tarfile . TarFile ( file_path ) as tfp : members_names = tfp . getnames ( ) for band_name , member_name in members . items ( ) : if member_name in members_names : member = tfp . getmember ( member_name ) src = tfp . extractfile ( member ) lc = _check_dim ( np . loadtxt ( src ) ) data [ band_name ] = { "time" : lc [ : , 0 ] , "magnitude" : lc [ : , 1 ] , "error" : lc [ : , 2 ] } bands . append ( band_name ) if metadata : cat = load_OGLE3_catalog ( ) metadata = cat [ cat . ID == ogle3_id ] . iloc [ 0 ] . to_dict ( ) del cat return Data ( id = ogle3_id , metadata = metadata , ds_name = "OGLE-III" , description = DESCR , bands = bands , data = data )
Retrieve a lighte curve from OGLE - 3 database
53,350
def sort_by_dependencies ( exts , retry = None ) : sorted_ext , features_from_sorted = [ ] , set ( ) pending = [ ( e , 0 ) for e in exts ] retry = len ( exts ) * 100 if retry is None else retry while pending : ext , cnt = pending . pop ( 0 ) if not isinstance ( ext , Extractor ) and not issubclass ( ext , Extractor ) : msg = "Only Extractor instances are allowed. Found {}." raise TypeError ( msg . format ( type ( ext ) ) ) deps = ext . get_dependencies ( ) if deps . difference ( features_from_sorted ) : if cnt + 1 > retry : msg = "Maximun retry ({}) to sort achieved from extractor {}." raise RuntimeError ( msg . format ( retry , type ( ext ) ) ) pending . append ( ( ext , cnt + 1 ) ) else : sorted_ext . append ( ext ) features_from_sorted . update ( ext . get_features ( ) ) return tuple ( sorted_ext )
Calculate the Feature Extractor Resolution Order .
53,351
def getSignificance ( wk1 , wk2 , nout , ofac ) : expy = exp ( - wk2 ) effm = 2.0 * ( nout ) / ofac sig = effm * expy ind = ( sig > 0.01 ) . nonzero ( ) sig [ ind ] = 1.0 - ( 1.0 - expy [ ind ] ) ** effm return sig
returns the peak false alarm probabilities Hence the lower is the probability and the more significant is the peak
53,352
def fetch ( url , dest , force = False ) : cached = True if force or not os . path . exists ( dest ) : cached = False r = requests . get ( url , stream = True ) if r . status_code == 200 : with open ( dest , 'wb' ) as f : for chunk in r . iter_content ( 1024 ) : f . write ( chunk ) return cached , dest
Retrieve data from an url and store it into dest .
53,353
def create_random ( magf , magf_params , errf , errf_params , timef = np . linspace , timef_params = None , size = DEFAULT_SIZE , id = None , ds_name = DS_NAME , description = DESCRIPTION , bands = BANDS , metadata = METADATA ) : timef_params = ( { "start" : 0. , "stop" : 1. } if timef_params is None else timef_params . copy ( ) ) timef_params . update ( num = size ) magf_params = magf_params . copy ( ) magf_params . update ( size = size ) errf_params = errf_params . copy ( ) errf_params . update ( size = size ) data = { } for band in bands : data [ band ] = { "time" : timef ( ** timef_params ) , "magnitude" : magf ( ** magf_params ) , "error" : errf ( ** errf_params ) } return Data ( id = id , ds_name = ds_name , description = description , bands = bands , metadata = metadata , data = data )
Generate a data with any given random function .
53,354
def create_normal ( mu = 0. , sigma = 1. , mu_err = 0. , sigma_err = 1. , seed = None , ** kwargs ) : random = np . random . RandomState ( seed ) return create_random ( magf = random . normal , magf_params = { "loc" : mu , "scale" : sigma } , errf = random . normal , errf_params = { "loc" : mu_err , "scale" : sigma_err } , ** kwargs )
Generate a data with magnitudes that follows a Gaussian distribution . Also their errors are gaussian .
53,355
def create_uniform ( low = 0. , high = 1. , mu_err = 0. , sigma_err = 1. , seed = None , ** kwargs ) : random = np . random . RandomState ( seed ) return create_random ( magf = random . uniform , magf_params = { "low" : low , "high" : high } , errf = random . normal , errf_params = { "loc" : mu_err , "scale" : sigma_err } , ** kwargs )
Generate a data with magnitudes that follows a uniform distribution ; the error instead are gaussian .
53,356
def create_periodic ( mu_err = 0. , sigma_err = 1. , seed = None , ** kwargs ) : random = np . random . RandomState ( seed ) size = kwargs . get ( "size" , DEFAULT_SIZE ) times , mags , errors = [ ] , [ ] , [ ] for b in kwargs . get ( "bands" , BANDS ) : time = 100 * random . rand ( size ) error = random . normal ( size = size , loc = mu_err , scale = sigma_err ) mag = np . sin ( 2 * np . pi * time ) + error * random . randn ( size ) times . append ( time ) errors . append ( error ) mags . append ( mag ) times , mags , errors = iter ( times ) , iter ( mags ) , iter ( errors ) return create_random ( magf = lambda ** k : next ( mags ) , magf_params = { } , errf = lambda ** k : next ( errors ) , errf_params = { } , timef = lambda ** k : next ( times ) , timef_params = { } , ** kwargs )
Generate a data with magnitudes with periodic variability distribution ; the error instead are gaussian .
53,357
def pdf_single ( z , N , normalization , dH = 1 , dK = 3 ) : if dK - dH != 2 : raise NotImplementedError ( "Degrees of freedom != 2" ) Nk = N - dK if normalization == 'psd' : return np . exp ( - z ) elif normalization == 'standard' : return 0.5 * Nk * ( 1 - z ) ** ( 0.5 * Nk - 1 ) elif normalization == 'model' : return 0.5 * Nk * ( 1 + z ) ** ( - 0.5 * Nk - 1 ) elif normalization == 'log' : return 0.5 * Nk * np . exp ( - 0.5 * Nk * z ) else : raise ValueError ( "normalization='{0}' is not recognized" "" . format ( normalization ) )
Probability density function for Lomb - Scargle periodogram
53,358
def cdf_single ( z , N , normalization , dH = 1 , dK = 3 ) : return 1 - fap_single ( z , N , normalization = normalization , dH = dH , dK = dK )
Cumulative distribution for the Lomb - Scargle periodogram
53,359
def fap_simple ( Z , fmax , t , y , dy , normalization = 'standard' ) : N = len ( t ) T = max ( t ) - min ( t ) N_eff = fmax * T p_s = cdf_single ( Z , N , normalization = normalization ) return 1 - p_s ** N_eff
False Alarm Probability based on estimated number of indep frequencies
53,360
def fap_davies ( Z , fmax , t , y , dy , normalization = 'standard' ) : N = len ( t ) fap_s = fap_single ( Z , N , normalization = normalization ) tau = tau_davies ( Z , fmax , t , y , dy , normalization = normalization ) return fap_s + tau
Davies upper - bound to the false alarm probability
53,361
def fap_baluev ( Z , fmax , t , y , dy , normalization = 'standard' ) : cdf = cdf_single ( Z , len ( t ) , normalization ) tau = tau_davies ( Z , fmax , t , y , dy , normalization = normalization ) return 1 - cdf * np . exp ( - tau )
Alias - free approximation to false alarm probability
53,362
def false_alarm_probability ( Z , fmax , t , y , dy , normalization , method = 'baluev' , method_kwds = None ) : if method not in METHODS : raise ValueError ( "Unrecognized method: {0}" . format ( method ) ) method = METHODS [ method ] method_kwds = method_kwds or { } return method ( Z , fmax , t , y , dy , normalization , ** method_kwds )
Approximate the False Alarm Probability
53,363
def anim_to_html ( anim , fps = None , embed_frames = True , default_mode = 'loop' ) : if fps is None and hasattr ( anim , '_interval' ) : fps = 1000. / anim . _interval plt . close ( anim . _fig ) if hasattr ( anim , "_html_representation" ) : return anim . _html_representation else : with _NameOnlyTemporaryFile ( suffix = '.html' ) as f : anim . save ( f . name , writer = HTMLWriter ( fps = fps , embed_frames = embed_frames , default_mode = default_mode ) ) html = open ( f . name ) . read ( ) anim . _html_representation = html return html
Generate HTML representation of the animation
53,364
def display_animation ( anim , ** kwargs ) : from IPython . display import HTML return HTML ( anim_to_html ( anim , ** kwargs ) )
Display the animation with an IPython HTML object
53,365
def indent ( s , c = " " , n = 4 ) : indentation = c * n return "\n" . join ( [ indentation + l for l in s . splitlines ( ) ] )
Indent the string s with the character c n times .
53,366
def generate_date_tail_boost_queries ( field , timedeltas_and_boosts , relative_to = None ) : relative_to = relative_to or datetime . datetime . now ( ) times = { } for timedelta , boost in timedeltas_and_boosts . items ( ) : date = ( relative_to - timedelta ) . date ( ) times [ date ] = boost times = sorted ( times . items ( ) , key = lambda i : i [ 0 ] ) queries = [ ] for ( x , time ) in enumerate ( times ) : kwargs = { "field" : field , "boost" : time [ 1 ] } if x == 0 : kwargs [ "lte" ] = time [ 0 ] else : kwargs [ "gt" ] = time [ 0 ] if x < len ( times ) - 1 : kwargs [ "lte" ] = times [ x + 1 ] [ 0 ] if kwargs [ "boost" ] > 0 : q = RangeQuery ( ) q . add_range ( ** kwargs ) queries . append ( q ) return queries
Generate a list of RangeQueries usable to boost the scores of more recent documents .
53,367
def batch_iterable ( iterable , count ) : if count <= 0 : return current_batch = [ ] for item in iterable : if len ( current_batch ) == count : yield current_batch current_batch = [ ] current_batch . append ( item ) if current_batch : yield current_batch
Yield batches of count items from the given iterable .
53,368
def validate_nb ( nb ) : if nb [ 'nbformat' ] != 4 : return False language_name = ( nb . get ( 'metadata' , { } ) . get ( 'kernelspec' , { } ) . get ( 'language' , '' ) . lower ( ) ) return language_name == 'python'
Validate that given notebook JSON is importable
53,369
def filter_ast ( module_ast ) : def node_predicate ( node ) : for an in ALLOWED_NODES : if isinstance ( node , an ) : return True if isinstance ( node , ast . Assign ) : return all ( [ node_predicate ( t ) for t in node . targets if not hasattr ( t , 'id' ) ] ) and all ( [ t . id . isupper ( ) for t in node . targets if hasattr ( t , 'id' ) ] ) return False module_ast . body = [ n for n in module_ast . body if node_predicate ( n ) ] return module_ast
Filters a given module ast removing non - whitelisted nodes
53,370
def code_from_ipynb ( nb , markdown = False ) : code = PREAMBLE for cell in nb [ 'cells' ] : if cell [ 'cell_type' ] == 'code' : code += '' . join ( cell [ 'source' ] ) if cell [ 'cell_type' ] == 'markdown' : code += '\n# ' + '# ' . join ( cell [ 'source' ] ) code += '\n\n' return code
Get the code for a given notebook
53,371
def _get_paths ( self , fullname ) : real_path = os . path . join ( * fullname [ len ( self . package_prefix ) : ] . split ( '.' ) ) for base_path in sys . path : if base_path == '' : base_path = os . getcwd ( ) path = os . path . join ( base_path , real_path ) yield path + '.ipynb' yield path + '.py' yield os . path . join ( path , '__init__.ipynb' ) yield os . path . join ( path , '__init__.py' )
Generate ordered list of paths we should look for fullname module in
53,372
def find_spec ( self , fullname , path , target = None ) : if fullname . startswith ( self . package_prefix ) : for path in self . _get_paths ( fullname ) : if os . path . exists ( path ) : return ModuleSpec ( name = fullname , loader = self . loader_class ( fullname , path ) , origin = path , is_package = ( path . endswith ( '__init__.ipynb' ) or path . endswith ( '__init__.py' ) ) , )
Claims modules that are under ipynb . fs
53,373
def coroutine ( func ) : @ wraps ( func ) def start ( * args , ** kwargs ) : g = func ( * args , ** kwargs ) next ( g ) return g return start
Decorator for priming generator - based coroutines .
53,374
async def ticker ( delay , to ) : for i in range ( to ) : yield i await asyncio . sleep ( delay )
Yield numbers from 0 to to every delay seconds .
53,375
def rprint ( sep = '\n' , end = '\n' , file = sys . stdout , flush = False ) : try : first_item = ( yield ) file . write ( str ( first_item ) ) if flush : file . flush ( ) while True : item = ( yield ) file . write ( sep ) file . write ( str ( item ) ) if flush : file . flush ( ) except GeneratorExit : file . write ( end ) if flush : file . flush ( )
A coroutine sink which prints received items stdout
53,376
def iterable_source ( iterable , target ) : it = iter ( iterable ) for item in it : try : target . send ( item ) except StopIteration : return prepend ( item , it ) return empty_iter ( )
Convert an iterable into a stream of events .
53,377
def poisson_source ( rate , iterable , target ) : if rate <= 0.0 : raise ValueError ( "poisson_source rate {} is not positive" . format ( rate ) ) it = iter ( iterable ) for item in it : duration = random . expovariate ( rate ) sleep ( duration ) try : target . send ( item ) except StopIteration : return prepend ( item , it ) return empty_iter ( )
Send events at random times with uniform probability .
53,378
def compose ( f , * fs ) : rfs = list ( chain ( [ f ] , fs ) ) rfs . reverse ( ) def composed ( * args , ** kwargs ) : return reduce ( lambda result , fn : fn ( result ) , rfs [ 1 : ] , rfs [ 0 ] ( * args , ** kwargs ) ) return composed
Compose functions right to left .
53,379
def reducing ( reducer , init = UNSET ) : reducer2 = reducer def reducing_transducer ( reducer ) : return Reducing ( reducer , reducer2 , init ) return reducing_transducer
Create a reducing transducer with the given reducer .
53,380
def scanning ( reducer , init = UNSET ) : reducer2 = reducer def scanning_transducer ( reducer ) : return Scanning ( reducer , reducer2 , init ) return scanning_transducer
Create a scanning reducer .
53,381
def taking ( n ) : if n < 0 : raise ValueError ( "Cannot take fewer than zero ({}) items" . format ( n ) ) def taking_transducer ( reducer ) : return Taking ( reducer , n ) return taking_transducer
Create a transducer which takes the first n items
53,382
def dropping ( n ) : if n < 0 : raise ValueError ( "Cannot drop fewer than zero ({}) items" . format ( n ) ) def dropping_transducer ( reducer ) : return Dropping ( reducer , n ) return dropping_transducer
Create a transducer which drops the first n items
53,383
def batching ( size ) : if size < 1 : raise ValueError ( "batching() size must be at least 1" ) def batching_transducer ( reducer ) : return Batching ( reducer , size ) return batching_transducer
Create a transducer which produces non - overlapping batches .
53,384
def windowing ( size , padding = UNSET , window_type = tuple ) : if size < 1 : raise ValueError ( "windowing() size {} is not at least 1" . format ( size ) ) def windowing_transducer ( reducer ) : return Windowing ( reducer , size , padding , window_type ) return windowing_transducer
Create a transducer which produces a moving window over items .
53,385
def first ( predicate = None ) : predicate = true if predicate is None else predicate def first_transducer ( reducer ) : return First ( reducer , predicate ) return first_transducer
Create a transducer which obtains the first item then terminates .
53,386
def last ( predicate = None ) : predicate = true if predicate is None else predicate def last_transducer ( reducer ) : return Last ( reducer , predicate ) return last_transducer
Create a transducer which obtains the last item .
53,387
def element_at ( index ) : if index < 0 : raise IndexError ( "element_at used with illegal index {}" . format ( index ) ) def element_at_transducer ( reducer ) : return ElementAt ( reducer , index ) return element_at_transducer
Create a transducer which obtains the item at the specified index .
53,388
def compile_sources ( files , CompilerRunner_ = None , destdir = None , cwd = None , keep_dir_struct = False , per_file_kwargs = None , ** kwargs ) : _per_file_kwargs = { } if per_file_kwargs is not None : for k , v in per_file_kwargs . items ( ) : if isinstance ( k , Glob ) : for path in glob . glob ( k . pathname ) : _per_file_kwargs [ path ] = v elif isinstance ( k , ArbitraryDepthGlob ) : for path in glob_at_depth ( k . filename , cwd ) : _per_file_kwargs [ path ] = v else : _per_file_kwargs [ k ] = v destdir = destdir or '.' if not os . path . isdir ( destdir ) : if os . path . exists ( destdir ) : raise IOError ( "{} is not a directory" . format ( destdir ) ) else : make_dirs ( destdir ) if cwd is None : cwd = '.' for f in files : copy ( f , destdir , only_update = True , dest_is_dir = True ) dstpaths = [ ] for f in files : if keep_dir_struct : name , ext = os . path . splitext ( f ) else : name , ext = os . path . splitext ( os . path . basename ( f ) ) file_kwargs = kwargs . copy ( ) file_kwargs . update ( _per_file_kwargs . get ( f , { } ) ) dstpaths . append ( src2obj ( f , CompilerRunner_ , cwd = cwd , ** file_kwargs ) ) return dstpaths
Compile source code files to object files .
53,389
def link ( obj_files , out_file = None , shared = False , CompilerRunner_ = None , cwd = None , cplus = False , fort = False , ** kwargs ) : if out_file is None : out_file , ext = os . path . splitext ( os . path . basename ( obj_files [ - 1 ] ) ) if shared : out_file += sharedext if not CompilerRunner_ : if fort : CompilerRunner_ , extra_kwargs , vendor = get_mixed_fort_c_linker ( vendor = kwargs . get ( 'vendor' , None ) , metadir = kwargs . get ( 'metadir' , None ) , cplus = cplus , cwd = cwd , ) for k , v in extra_kwargs . items ( ) : expand_collection_in_dict ( kwargs , k , v ) else : if cplus : CompilerRunner_ = CppCompilerRunner else : CompilerRunner_ = CCompilerRunner flags = kwargs . pop ( 'flags' , [ ] ) if shared : if '-shared' not in flags : flags . append ( '-shared' ) dl_flag = '-undefined dynamic_lookup' if sys . platform == 'darwin' and dl_flag not in flags : flags . append ( dl_flag ) run_linker = kwargs . pop ( 'run_linker' , True ) if not run_linker : raise ValueError ( "link(..., run_linker=False)!?" ) out_file = get_abspath ( out_file , cwd = cwd ) runner = CompilerRunner_ ( obj_files , out_file , flags , cwd = cwd , ** kwargs ) runner . run ( ) return out_file
Link object files .
53,390
def simple_cythonize ( src , destdir = None , cwd = None , logger = None , full_module_name = None , only_update = False , ** cy_kwargs ) : from Cython . Compiler . Main import ( default_options , CompilationOptions ) from Cython . Compiler . Main import compile as cy_compile assert src . lower ( ) . endswith ( '.pyx' ) or src . lower ( ) . endswith ( '.py' ) cwd = cwd or '.' destdir = destdir or '.' ext = '.cpp' if cy_kwargs . get ( 'cplus' , False ) else '.c' c_name = os . path . splitext ( os . path . basename ( src ) ) [ 0 ] + ext dstfile = os . path . join ( destdir , c_name ) if only_update : if not missing_or_other_newer ( dstfile , src , cwd = cwd ) : msg = '{0} newer than {1}, did not re-cythonize.' . format ( dstfile , src ) if logger : logger . info ( msg ) else : print ( msg ) return dstfile if cwd : ori_dir = os . getcwd ( ) else : ori_dir = '.' os . chdir ( cwd ) try : cy_options = CompilationOptions ( default_options ) cy_options . __dict__ . update ( cy_kwargs ) if logger : logger . info ( "Cythonizing {0} to {1}" . format ( src , dstfile ) ) cy_result = cy_compile ( [ src ] , cy_options , full_module_name = full_module_name ) if cy_result . num_errors > 0 : raise ValueError ( "Cython compilation failed." ) if os . path . abspath ( os . path . dirname ( src ) ) != os . path . abspath ( destdir ) : if os . path . exists ( dstfile ) : os . unlink ( dstfile ) shutil . move ( os . path . join ( os . path . dirname ( src ) , c_name ) , destdir ) finally : os . chdir ( ori_dir ) return dstfile
Generates a C file from a Cython source file .
53,391
def src2obj ( srcpath , CompilerRunner_ = None , objpath = None , only_update = False , cwd = None , out_ext = None , inc_py = False , ** kwargs ) : name , ext = os . path . splitext ( os . path . basename ( srcpath ) ) if objpath is None : if os . path . isabs ( srcpath ) : objpath = '.' else : objpath = os . path . dirname ( srcpath ) objpath = objpath or '.' out_ext = out_ext or objext if os . path . isdir ( objpath ) : objpath = os . path . join ( objpath , name + out_ext ) include_dirs = kwargs . pop ( 'include_dirs' , [ ] ) if inc_py : from distutils . sysconfig import get_python_inc py_inc_dir = get_python_inc ( ) if py_inc_dir not in include_dirs : include_dirs . append ( py_inc_dir ) if ext . lower ( ) == '.pyx' : return pyx2obj ( srcpath , objpath = objpath , include_dirs = include_dirs , cwd = cwd , only_update = only_update , ** kwargs ) if CompilerRunner_ is None : CompilerRunner_ , std = extension_mapping [ ext . lower ( ) ] if 'std' not in kwargs : kwargs [ 'std' ] = std run_linker = kwargs . pop ( 'run_linker' , False ) if run_linker : raise CompilationError ( "src2obj called with run_linker=True" ) if only_update : if not missing_or_other_newer ( objpath , srcpath , cwd = cwd ) : msg = "Found {0}, did not recompile." . format ( objpath ) if kwargs . get ( 'logger' , None ) : kwargs [ 'logger' ] . info ( msg ) else : print ( msg ) return objpath runner = CompilerRunner_ ( [ srcpath ] , objpath , include_dirs = include_dirs , run_linker = run_linker , cwd = cwd , ** kwargs ) runner . run ( ) return objpath
Compiles a source code file to an object file . Files ending with . pyx assumed to be cython files and are dispatched to pyx2obj .
53,392
def compile_link_import_strings ( codes , build_dir = None , ** kwargs ) : build_dir = build_dir or tempfile . mkdtemp ( ) if not os . path . isdir ( build_dir ) : raise OSError ( "Non-existent directory: " , build_dir ) source_files = [ ] if kwargs . get ( 'logger' , False ) is True : import logging logging . basicConfig ( level = logging . DEBUG ) kwargs [ 'logger' ] = logging . getLogger ( ) only_update = kwargs . get ( 'only_update' , True ) for name , code_ in codes : dest = os . path . join ( build_dir , name ) differs = True md5_in_mem = md5_of_string ( code_ . encode ( 'utf-8' ) ) . hexdigest ( ) if only_update and os . path . exists ( dest ) : if os . path . exists ( dest + '.md5' ) : md5_on_disk = open ( dest + '.md5' , 'rt' ) . read ( ) else : md5_on_disk = md5_of_file ( dest ) . hexdigest ( ) differs = md5_on_disk != md5_in_mem if not only_update or differs : with open ( dest , 'wt' ) as fh : fh . write ( code_ ) open ( dest + '.md5' , 'wt' ) . write ( md5_in_mem ) source_files . append ( dest ) return compile_link_import_py_ext ( source_files , build_dir = build_dir , ** kwargs )
Creates a temporary directory and dumps compiles and links provided source code .
53,393
def arguments ( self ) : if 'arguments' in self . attributes : LOGGER . warning ( "WARNING: 'arguments' use in OSU yaml configuration file is deprecated. Please use 'options'!" ) arguments = self . attributes [ 'arguments' ] if isinstance ( arguments , dict ) : return arguments else : return { k : arguments for k in self . categories } elif 'options' in self . attributes : options = self . attributes [ 'options' ] if isinstance ( options , dict ) : return options else : return { k : options for k in self . categories }
Dictionary providing the list of arguments for every benchmark
53,394
def _ ( obj ) : tz_offset = obj . utcoffset ( ) if not tz_offset or tz_offset == UTC_ZERO : iso_datetime = obj . strftime ( '%Y-%m-%dT%H:%M:%S.%fZ' ) else : iso_datetime = obj . isoformat ( ) return iso_datetime
ISO 8601 format . Interprets naive datetime as UTC with zulu suffix .
53,395
def get_row_generator ( self , ref , cache = None ) : from inspect import isgenerator from rowgenerators import get_generator g = get_generator ( ref ) if not g : raise GenerateError ( "Cant figure out how to generate rows from {} ref: {}" . format ( type ( ref ) , ref ) ) else : return g
Return a row generator for a reference
53,396
def create_key_filter ( properties : Dict [ str , list ] ) -> List [ Tuple ] : combinations = ( product ( [ k ] , v ) for k , v in properties . items ( ) ) return chain . from_iterable ( combinations )
Generate combinations of key value pairs for each key in properties .
53,397
def create_indexer ( indexes : list ) : if len ( indexes ) == 1 : index = indexes [ 0 ] return lambda x : ( x [ index ] , ) else : return itemgetter ( * indexes )
Create indexer function to pluck values from list .
53,398
def including ( self , sequence ) -> Generator : return ( element for element in sequence if self . indexer ( element ) in self . predicates )
Include the sequence elements matching the filter set .
53,399
def excluding ( self , sequence ) -> Generator : return ( element for element in sequence if self . indexer ( element ) not in self . predicates )
Exclude the sequence elements matching the filter set .