idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
24,400
def build_tree ( self ) : if self . built : return self . doc_root = self . root . element ( ) for key in self . sorted_fields ( ) : if key not in self . _fields : continue field = self . _fields [ key ] if field != self . root : if isinstance ( field , XmlModel ) : field . build_tree ( ) if ( self . drop_empty and field . drop_empty and len ( field . doc_root ) == 0 ) : continue self . doc_root . append ( field . doc_root ) elif isinstance ( field , list ) : for item in field : if isinstance ( item , XmlField ) : ele = item . element ( ) if self . drop_empty and len ( ele ) == 0 : continue self . doc_root . append ( ele ) elif isinstance ( item , XmlModel ) : item . build_tree ( ) if self . drop_empty and len ( item . doc_root ) == 0 : continue self . doc_root . append ( item . doc_root ) elif isinstance ( item , ( six . text_type , six . string_types ) ) : ele = etree . fromstring ( clean_xml ( item ) ) self . doc_root . append ( ele ) item = None elif ( field . parent or self . root . name ) == self . root . name : ele = field . element ( ) if self . drop_empty and len ( ele ) == 0 and not ele . text : continue ele = field . element ( parent = self . doc_root ) else : nodes = [ n for n in self . doc_root . iterdescendants ( tag = field . parent ) ] if nodes : ele = field . element ( ) if ( self . drop_empty and len ( ele ) == 0 and not ele . text ) : continue ele = field . element ( parent = nodes [ 0 ] ) self . built = True
Bulids the tree with all the fields converted to Elements
24,401
def _preserve_settings ( method : T . Callable ) -> T . Callable : @ functools . wraps ( method ) def _wrapper ( old : "ObservableProperty" , handler : T . Callable ) -> "ObservableProperty" : new = method ( old , handler ) new . event = old . event new . observable = old . observable return new return _wrapper
Decorator that ensures ObservableProperty - specific attributes are kept when using methods to change deleter getter or setter .
24,402
def _trigger_event ( self , holder : T . Any , alt_name : str , action : str , * event_args : T . Any ) -> None : if isinstance ( self . observable , Observable ) : observable = self . observable elif isinstance ( self . observable , str ) : observable = getattr ( holder , self . observable ) elif isinstance ( holder , Observable ) : observable = holder else : raise TypeError ( "This ObservableProperty is no member of an Observable " "object. Specify where to find the Observable object for " "triggering events with the observable keyword argument " "when initializing the ObservableProperty." ) name = alt_name if self . event is None else self . event event = "{}_{}" . format ( action , name ) observable . trigger ( event , * event_args )
Triggers an event on the associated Observable object . The Holder is the object this property is a member of alt_name is used as the event name when self . event is not set action is prepended to the event name and event_args are passed through to the registered event handlers .
24,403
def create_with ( cls , event : str = None , observable : T . Union [ str , Observable ] = None ) -> T . Callable [ ... , "ObservableProperty" ] : return functools . partial ( cls , event = event , observable = observable )
Creates a partial application of ObservableProperty with event and observable preset .
24,404
def get_all_handlers ( self ) -> T . Dict [ str , T . List [ T . Callable ] ] : events = { } for event , handlers in self . _events . items ( ) : events [ event ] = list ( handlers ) return events
Returns a dict with event names as keys and lists of registered handlers as values .
24,405
def get_handlers ( self , event : str ) -> T . List [ T . Callable ] : return list ( self . _events . get ( event , [ ] ) )
Returns a list of handlers registered for the given event .
24,406
def is_registered ( self , event : str , handler : T . Callable ) -> bool : return handler in self . _events . get ( event , [ ] )
Returns whether the given handler is registered for the given event .
24,407
def on ( self , event : str , * handlers : T . Callable ) -> T . Callable : def _on_wrapper ( * handlers : T . Callable ) -> T . Callable : self . _events [ event ] . extend ( handlers ) return handlers [ 0 ] if handlers : return _on_wrapper ( * handlers ) return _on_wrapper
Registers one or more handlers to a specified event . This method may as well be used as a decorator for the handler .
24,408
def once ( self , event : str , * handlers : T . Callable ) -> T . Callable : def _once_wrapper ( * handlers : T . Callable ) -> T . Callable : def _wrapper ( * args : T . Any , ** kw : T . Any ) -> None : self . off ( event , _wrapper ) for handler in handlers : handler ( * args , ** kw ) return _wrapper if handlers : return self . on ( event , _once_wrapper ( * handlers ) ) return lambda x : self . on ( event , _once_wrapper ( x ) )
Registers one or more handlers to a specified event but removes them when the event is first triggered . This method may as well be used as a decorator for the handler .
24,409
def trigger ( self , event : str , * args : T . Any , ** kw : T . Any ) -> bool : callbacks = list ( self . _events . get ( event , [ ] ) ) if not callbacks : return False for callback in callbacks : callback ( * args , ** kw ) return True
Triggers all handlers which are subscribed to an event . Returns True when there were callbacks to execute False otherwise .
24,410
def connection ( profile_name = 'default' , api_key = None ) : if api_key is None : profile_fname = datapoint . profile . API_profile_fname ( profile_name ) if not os . path . exists ( profile_fname ) : raise ValueError ( 'Profile not found in {}. Please install your API \n' 'key with datapoint.profile.install_API_key(' '"<YOUR-KEY>")' . format ( profile_fname ) ) with open ( profile_fname ) as fh : api_key = fh . readlines ( ) return Manager ( api_key = api_key )
Connect to DataPoint with the given API key profile name .
24,411
def elements ( self ) : elements = [ ] for el in ct : if isinstance ( el [ 1 ] , datapoint . Element . Element ) : elements . append ( el [ 1 ] ) return elements
Return a list of the elements which are not None
24,412
def __retry_session ( self , retries = 10 , backoff_factor = 0.3 , status_forcelist = ( 500 , 502 , 504 ) , session = None ) : the_session = session or requests . Session ( ) retry = Retry ( total = retries , read = retries , connect = retries , backoff_factor = backoff_factor , status_forcelist = status_forcelist ) adapter = HTTPAdapter ( max_retries = retry ) the_session . mount ( 'http://' , adapter ) the_session . mount ( 'https://' , adapter ) return the_session
Retry the connection using requests if it fails . Use this as a wrapper to request from datapoint
24,413
def __call_api ( self , path , params = None , api_url = FORECAST_URL ) : if not params : params = dict ( ) payload = { 'key' : self . api_key } payload . update ( params ) url = "%s/%s" % ( api_url , path ) sess = self . __retry_session ( ) req = sess . get ( url , params = payload , timeout = 1 ) try : data = req . json ( ) except ValueError : raise APIException ( "DataPoint has not returned any data, this could be due to an incorrect API key" ) self . call_response = data if req . status_code != 200 : msg = [ data [ m ] for m in ( "message" , "error_message" , "status" ) if m in data ] [ 0 ] raise Exception ( msg ) return data
Call the datapoint api using the requests module
24,414
def _get_wx_units ( self , params , name ) : units = "" for param in params : if str ( name ) == str ( param [ 'name' ] ) : units = param [ 'units' ] return units
Give the Wx array returned from datapoint and an element name and return the units for that element .
24,415
def _visibility_to_text ( self , distance ) : if not isinstance ( distance , ( int , long ) ) : raise ValueError ( "Distance must be an integer not" , type ( distance ) ) if distance < 0 : raise ValueError ( "Distance out of bounds, should be 0 or greater" ) if 0 <= distance < 1000 : return 'VP' elif 1000 <= distance < 4000 : return 'PO' elif 4000 <= distance < 10000 : return 'MO' elif 10000 <= distance < 20000 : return 'GO' elif 20000 <= distance < 40000 : return 'VG' else : return 'EX'
Convert observed visibility in metres to text used in forecast
24,416
def get_forecast_sites ( self ) : time_now = time ( ) if ( time_now - self . forecast_sites_last_update ) > self . forecast_sites_update_time or self . forecast_sites_last_request is None : data = self . __call_api ( "sitelist/" ) sites = list ( ) for jsoned in data [ 'Locations' ] [ 'Location' ] : site = Site ( ) site . name = jsoned [ 'name' ] site . id = jsoned [ 'id' ] site . latitude = jsoned [ 'latitude' ] site . longitude = jsoned [ 'longitude' ] if 'region' in jsoned : site . region = jsoned [ 'region' ] if 'elevation' in jsoned : site . elevation = jsoned [ 'elevation' ] if 'unitaryAuthArea' in jsoned : site . unitaryAuthArea = jsoned [ 'unitaryAuthArea' ] if 'nationalPark' in jsoned : site . nationalPark = jsoned [ 'nationalPark' ] site . api_key = self . api_key sites . append ( site ) self . forecast_sites_last_request = sites self . forecast_sites_last_update = time_now else : sites = self . forecast_sites_last_request return sites
This function returns a list of Site object .
24,417
def get_nearest_site ( self , latitude = None , longitude = None ) : warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead' warn ( warning_message , DeprecationWarning , stacklevel = 2 ) return self . get_nearest_forecast_site ( latitude , longitude )
Deprecated . This function returns nearest Site object to the specified coordinates .
24,418
def get_nearest_forecast_site ( self , latitude = None , longitude = None ) : if longitude is None : print ( 'ERROR: No latitude given.' ) return False if latitude is None : print ( 'ERROR: No latitude given.' ) return False nearest = False distance = None sites = self . get_forecast_sites ( ) for site in sites : new_distance = self . _distance_between_coords ( float ( site . longitude ) , float ( site . latitude ) , float ( longitude ) , float ( latitude ) ) if ( ( distance == None ) or ( new_distance < distance ) ) : distance = new_distance nearest = site if distance > 30 : raise APIException ( "There is no site within 30km." ) return nearest
This function returns the nearest Site object to the specified coordinates .
24,419
def get_observation_sites ( self ) : if ( time ( ) - self . observation_sites_last_update ) > self . observation_sites_update_time : self . observation_sites_last_update = time ( ) data = self . __call_api ( "sitelist/" , None , OBSERVATION_URL ) sites = list ( ) for jsoned in data [ 'Locations' ] [ 'Location' ] : site = Site ( ) site . name = jsoned [ 'name' ] site . id = jsoned [ 'id' ] site . latitude = jsoned [ 'latitude' ] site . longitude = jsoned [ 'longitude' ] if 'region' in jsoned : site . region = jsoned [ 'region' ] if 'elevation' in jsoned : site . elevation = jsoned [ 'elevation' ] if 'unitaryAuthArea' in jsoned : site . unitaryAuthArea = jsoned [ 'unitaryAuthArea' ] if 'nationalPark' in jsoned : site . nationalPark = jsoned [ 'nationalPark' ] site . api_key = self . api_key sites . append ( site ) self . observation_sites_last_request = sites else : sites = observation_self . sites_last_request return sites
This function returns a list of Site objects for which observations are available .
24,420
def get_nearest_observation_site ( self , latitude = None , longitude = None ) : if longitude is None : print ( 'ERROR: No longitude given.' ) return False if latitude is None : print ( 'ERROR: No latitude given.' ) return False nearest = False distance = None sites = self . get_observation_sites ( ) for site in sites : new_distance = self . _distance_between_coords ( float ( site . longitude ) , float ( site . latitude ) , float ( longitude ) , float ( latitude ) ) if ( ( distance == None ) or ( new_distance < distance ) ) : distance = new_distance nearest = site if distance > 20 : raise APIException ( "There is no site within 30km." ) return nearest
This function returns the nearest Site to the specified coordinates that supports observations
24,421
def call_api ( self , path , ** kwargs ) : if 'key' not in kwargs : kwargs [ 'key' ] = self . api_key req = requests . get ( '{0}{1}' . format ( self . base_url , path ) , params = kwargs ) if req . status_code != requests . codes . ok : req . raise_for_status ( ) return req . json ( )
Call datapoint api
24,422
def get_all_regions ( self ) : if ( time ( ) - self . regions_last_update ) < self . regions_update_time : return self . regions_last_request response = self . call_api ( self . all_regions_path ) regions = [ ] for location in response [ 'Locations' ] [ 'Location' ] : region = Site ( ) region . id = location [ '@id' ] region . region = location [ '@name' ] region . name = REGION_NAMES [ location [ '@name' ] ] regions . append ( region ) self . regions_last_update = time ( ) self . regions_last_request = regions return regions
Request a list of regions from Datapoint . Returns each Region as a Site object . Regions rarely change so we cache the response for one hour to minimise requests to API .
24,423
def now ( self ) : now = None d = datetime . datetime . now ( tz = self . days [ 0 ] . date . tzinfo ) for_total_seconds = d - d . replace ( hour = 0 , minute = 0 , second = 0 , microsecond = 0 ) msm = for_total_seconds . total_seconds ( ) / 60 if self . days [ 0 ] . date . strftime ( "%Y-%m-%dZ" ) == d . strftime ( "%Y-%m-%dZ" ) : for timestep in self . days [ 0 ] . timesteps : if timestep . name > msm : break now = timestep return now elif abs ( self . days [ 0 ] . timesteps [ - 1 ] . date - d ) . total_seconds ( ) < 14400 : timestep_to_return = self . days [ 0 ] . timesteps [ - 1 ] return timestep_to_return else : return False
Function to return just the current timestep from this forecast
24,424
def future ( self , in_days = None , in_hours = None , in_minutes = None , in_seconds = None ) : future = None dd , hh , mm , ss = [ 0 for i in range ( 4 ) ] if ( in_days != None ) : dd = dd + in_days if ( in_hours != None ) : hh = hh + in_hours if ( in_minutes != None ) : mm = mm + in_minutes if ( in_seconds != None ) : ss = ss + in_seconds dnow = datetime . datetime . utcnow ( ) d = dnow + datetime . timedelta ( hours = hh , minutes = mm , seconds = ss ) for_total_seconds = d - d . replace ( hour = 0 , minute = 0 , second = 0 , microsecond = 0 ) try : msm = for_total_seconds . total_seconds ( ) / 60. except : msm = self . timedelta_total_seconds ( for_total_seconds ) / 60. if ( dd < len ( self . days ) ) : for timestep in self . days [ dd ] . timesteps : if timestep . name >= msm : future = timestep return future else : print ( 'ERROR: requested date is outside the forecast range selected,' + str ( len ( self . days ) ) ) return False
Function to return a future timestep
24,425
def install_API_key ( api_key , profile_name = 'default' ) : fname = API_profile_fname ( profile_name ) if not os . path . isdir ( os . path . dirname ( fname ) ) : os . makedirs ( os . path . dirname ( fname ) ) with open ( fname , 'w' ) as fh : fh . write ( api_key )
Put the given API key into the given profile name .
24,426
def is_namedtuple ( type_ : Type [ Any ] ) -> bool : return _issubclass ( type_ , tuple ) and hasattr ( type_ , '_field_types' ) and hasattr ( type_ , '_fields' )
Generated with typing . NamedTuple
24,427
def uniontypes ( type_ : Type [ Any ] ) -> Set [ Type [ Any ] ] : if not is_union ( type_ ) : raise ValueError ( 'Not a Union: ' + str ( type_ ) ) if hasattr ( type_ , '__args__' ) : return set ( type_ . __args__ ) elif hasattr ( type_ , '__union_params__' ) : return set ( type_ . __union_params__ ) raise AttributeError ( 'The typing API for this Python version is unknown' )
Returns the types of a Union .
24,428
def index ( self , value : Any ) -> int : for i , cond in ( ( j [ 0 ] , j [ 1 ] [ 0 ] ) for j in enumerate ( self . handlers ) ) : try : match = cond ( value ) except : if self . raiseconditionerrors : raise match = False if match : return i raise TypedloadValueError ( 'Unable to dump %s' % value , value = value )
Returns the index in the handlers list that matches the given value .
24,429
def dump ( self , value : Any ) -> Any : index = self . index ( value ) func = self . handlers [ index ] [ 1 ] return func ( self , value )
Dump the typed data structure into its untyped equivalent .
24,430
def _forwardrefload ( l : Loader , value : Any , type_ : type ) -> Any : if l . frefs is None : raise TypedloadException ( 'ForwardRef resolving is disabled for the loader' , value = value , type_ = type_ ) tname = type_ . __forward_arg__ t = l . frefs . get ( tname ) if t is None : raise TypedloadValueError ( "ForwardRef '%s' unknown" % tname , value = value , type_ = type_ ) return l . load ( value , t , annotation = Annotation ( AnnotationType . FORWARDREF , tname ) )
This resolves a ForwardRef .
24,431
def _basicload ( l : Loader , value : Any , type_ : type ) -> Any : if type ( value ) != type_ : if l . basiccast : try : return type_ ( value ) except ValueError as e : raise TypedloadValueError ( str ( e ) , value = value , type_ = type_ ) except TypeError as e : raise TypedloadTypeError ( str ( e ) , value = value , type_ = type_ ) except Exception as e : raise TypedloadException ( str ( e ) , value = value , type_ = type_ ) else : raise TypedloadValueError ( 'Not of type %s' % type_ , value = value , type_ = type_ ) return value
This converts a value into a basic type .
24,432
def _unionload ( l : Loader , value , type_ ) -> Any : try : args = uniontypes ( type_ ) except AttributeError : raise TypedloadAttributeError ( 'The typing API for this Python version is unknown' ) if type ( value ) in args . intersection ( l . basictypes ) : return value exceptions = [ ] for t in args : try : return l . load ( value , t , annotation = Annotation ( AnnotationType . UNION , t ) ) except Exception as e : exceptions . append ( e ) raise TypedloadValueError ( 'Value could not be loaded into %s' % type_ , value = value , type_ = type_ , exceptions = exceptions )
Loads a value into a union .
24,433
def _enumload ( l : Loader , value , type_ ) -> Enum : try : return type_ ( value ) except : pass for _ , t in get_type_hints ( type_ ) . items ( ) : try : return type_ ( l . load ( value , t ) ) except : pass raise TypedloadValueError ( 'Value could not be loaded into %s' % type_ , value = value , type_ = type_ )
This loads something into an Enum .
24,434
def _noneload ( l : Loader , value , type_ ) -> None : if value is None : return None raise TypedloadValueError ( 'Not None' , value = value , type_ = type_ )
Loads a value that can only be None so it fails if it isn t
24,435
def index ( self , type_ : Type [ T ] ) -> int : for i , cond in ( ( q [ 0 ] , q [ 1 ] [ 0 ] ) for q in enumerate ( self . handlers ) ) : try : match = cond ( type_ ) except : if self . raiseconditionerrors : raise match = False if match : return i raise ValueError ( 'No matching condition found' )
Returns the index in the handlers list that matches the given type .
24,436
def load ( self , value : Any , type_ : Type [ T ] , * , annotation : Optional [ Annotation ] = None ) -> T : try : index = self . index ( type_ ) except ValueError : raise TypedloadTypeError ( 'Cannot deal with value of type %s' % type_ , value = value , type_ = type_ ) if self . frefs is not None and hasattr ( type_ , '__name__' ) : tname = type_ . __name__ if tname not in self . frefs : self . frefs [ tname ] = type_ func = self . handlers [ index ] [ 1 ] try : return func ( self , value , type_ ) except Exception as e : assert isinstance ( e , TypedloadException ) e . trace . insert ( 0 , TraceItem ( value , type_ , annotation ) ) raise e
Loads value into the typed data structure .
24,437
def get_data ( city : Optional [ str ] ) -> Dict [ str , Any ] : req = urllib . request . Request ( get_url ( city ) ) with urllib . request . urlopen ( req ) as f : response = f . read ( ) answer = response . decode ( 'ascii' ) data = json . loads ( answer ) r = data [ 'query' ] [ 'results' ] [ 'channel' ] return r
Use the Yahoo weather API to get weather information
24,438
def load ( value : Any , type_ : Type [ T ] , ** kwargs ) -> T : from . import dataloader loader = dataloader . Loader ( ** kwargs ) return loader . load ( value , type_ )
Quick function call to load data into a type .
24,439
def dump ( value : Any , ** kwargs ) -> Any : from . import datadumper dumper = datadumper . Dumper ( ** kwargs ) return dumper . dump ( value )
Quick function to dump a data structure into something that is compatible with json or other programs and languages .
24,440
def attrload ( value : Any , type_ : Type [ T ] , ** kwargs ) -> T : from . import dataloader from . plugins import attrload as loadplugin loader = dataloader . Loader ( ** kwargs ) loadplugin . add2loader ( loader ) return loader . load ( value , type_ )
Quick function call to load data supporting the attr module in addition to the default ones .
24,441
def attrdump ( value : Any , ** kwargs ) -> Any : from . import datadumper from . plugins import attrdump as dumpplugin dumper = datadumper . Dumper ( ** kwargs ) dumpplugin . add2dumper ( dumper ) return dumper . dump ( value )
Quick function to do a dump that supports the attr module .
24,442
def on_panic ( etype , value , tb ) : _logging . critical ( _traceback . format_exception ( etype , value , tb ) ) _be . propagate_exc ( SystemExit , 1 )
Called when there is an unhandled error in a goroutine . By default logs and exits the process .
24,443
def stdout_to_results ( s ) : results = s . strip ( ) . split ( '\n' ) return [ BenchmarkResult ( * r . split ( ) ) for r in results ]
Turns the multi - line output of a benchmark process into a sequence of BenchmarkResult instances .
24,444
def benchmark_process_and_backend ( exe , backend ) : env = dict ( os . environ ) env [ 'GOLESS_BACKEND' ] = backend args = [ exe , '-m' , 'benchmark' ] return get_benchproc_results ( args , env = env )
Returns BenchmarkResults for a given executable and backend .
24,445
def insert_seperator_results ( results ) : sepbench = BenchmarkResult ( * [ ' ' * w for w in COLUMN_WIDTHS ] ) last_bm = None for r in results : if last_bm is None : last_bm = r . benchmark elif last_bm != r . benchmark : yield sepbench last_bm = r . benchmark yield r
Given a sequence of BenchmarkResults return a new sequence where a seperator BenchmarkResult has been placed between differing benchmarks to provide a visual difference .
24,446
def parse ( self ) -> typing . Union [ list , dict , None ] : root_node_offset = self . _read_u32 ( 12 ) if root_node_offset == 0 : return None node_type = self . _data [ root_node_offset ] if not _is_container_type ( node_type ) : raise ValueError ( "Invalid root node: expected array or dict, got type 0x%x" % node_type ) return self . _parse_node ( node_type , 12 )
Parse the BYML and get the root node with all children .
24,447
def check_permission ( self , request ) : return all ( ( permission . has_permission ( request ) for permission in self . permission_classes ) )
Check this field s permissions to determine whether or not it may be shown .
24,448
def build_github_url ( repo , branch = None , path = 'requirements.txt' , token = None ) : repo = re . sub ( r"^http(s)?://github.com/" , "" , repo ) . strip ( '/' ) if not path : path = 'requirements.txt' if not branch : branch = get_default_branch ( repo ) url = 'https://raw.githubusercontent.com/{}/{}/{}' . format ( repo , branch , path ) if token : url = '{}?token={}' . format ( url , token ) return url
Builds a URL to a file inside a Github repository .
24,449
def get_default_branch ( repo ) : url = "{}/repos/{}" . format ( GITHUB_API_BASE , repo ) response = requests . get ( url ) if response . status_code == 200 : api_response = json . loads ( response . text ) return api_response [ 'default_branch' ] else : return 'master'
returns the name of the default branch of the repo
24,450
def get_requirements_file_from_url ( url ) : response = requests . get ( url ) if response . status_code == 200 : return StringIO ( response . text ) else : return StringIO ( "" )
fetches the requiremets from the url
24,451
def longest_one_seg_prefix ( self , word ) : match = self . seg_regex . match ( word ) if match : return match . group ( 0 ) else : return ''
Return longest IPA Unicode prefix of word
24,452
def filter_segs ( self , segs ) : def whole_seg ( seg ) : m = self . seg_regex . match ( seg ) if m and m . group ( 0 ) == seg : return True else : return False return list ( filter ( whole_seg , segs ) )
Given list of strings return only those which are valid segments .
24,453
def validate_line ( self , line ) : line0 = line pos = 0 while line : seg_m = self . ft . seg_regex . match ( line ) wsp_m = self . ws_punc_regex . match ( line ) if seg_m : length = len ( seg_m . group ( 0 ) ) line = line [ length : ] pos += length elif wsp_m : length = len ( wsp_m . group ( 0 ) ) line = line [ length : ] pos += length else : msg = 'IPA not valid at position {} in "{}".' . format ( pos , line0 . strip ( ) ) print ( msg , file = sys . stderr ) line = line [ 1 : ] pos += 1
Validate Unicode IPA string relative to panphon .
24,454
def segment_text ( text , seg_regex = SEG_REGEX ) : for m in seg_regex . finditer ( text ) : yield m . group ( 0 )
Return an iterator of segments in the text .
24,455
def fts_match ( self , features , segment ) : features = set ( features ) if self . seg_known ( segment ) : return features <= self . fts ( segment ) else : return None
Answer question are ft_mask s features a subset of ft_seg?
24,456
def longest_one_seg_prefix ( self , word ) : for i in range ( self . longest_seg , 0 , - 1 ) : if word [ : i ] in self . seg_dict : return word [ : i ] return ''
Return longest Unicode IPA prefix of a word
24,457
def validate_word ( self , word ) : while word : match = self . seg_regex . match ( word ) if match : word = word [ len ( match . group ( 0 ) ) : ] else : return False return True
Returns True if word consists exhaustively of valid IPA segments
24,458
def segs ( self , word ) : return [ m . group ( 'all' ) for m in self . seg_regex . finditer ( word ) ]
Returns a list of segments from a word
24,459
def word_fts ( self , word ) : return list ( map ( self . fts , self . segs ( word ) ) )
Return featural analysis of word
24,460
def filter_string ( self , word ) : segs = [ m . group ( 0 ) for m in self . seg_regex . finditer ( word ) ] return '' . join ( segs )
Return a string like the input but containing only legal IPA segments
24,461
def fts_intersection ( self , segs ) : fts_vecs = [ self . fts ( s ) for s in self . filter_segs ( segs ) ] return reduce ( lambda a , b : a & b , fts_vecs )
Return the features shared by segs
24,462
def fts_match_any ( self , fts , inv ) : return any ( [ self . fts_match ( fts , s ) for s in inv ] )
Return True if any segment in inv matches the features in fts
24,463
def fts_match_all ( self , fts , inv ) : return all ( [ self . fts_match ( fts , s ) for s in inv ] )
Return True if all segments in inv matches the features in fts
24,464
def fts_contrast2 ( self , fs , ft_name , inv ) : inv_fts = [ self . fts ( x ) for x in inv if set ( fs ) <= self . fts ( x ) ] for a in inv_fts : for b in inv_fts : if a != b : diff = a ^ b if len ( diff ) == 2 : if all ( [ nm == ft_name for ( _ , nm ) in diff ] ) : return True return False
Return True if there is a segment in inv that contrasts in feature ft_name .
24,465
def fts_count ( self , fts , inv ) : return len ( list ( filter ( lambda s : self . fts_match ( fts , s ) , inv ) ) )
Return the count of segments in an inventory matching a given feature mask .
24,466
def match_pattern ( self , pat , word ) : segs = self . word_fts ( word ) if len ( pat ) != len ( segs ) : return None else : if all ( [ set ( p ) <= s for ( p , s ) in zip ( pat , segs ) ] ) : return segs
Implements fixed - width pattern matching .
24,467
def compile_regex_from_str ( self , ft_str ) : sequence = [ ] for m in re . finditer ( r'\[([^]]+)\]' , ft_str ) : ft_mask = fts ( m . group ( 1 ) ) segs = self . all_segs_matching_fts ( ft_mask ) sub_pat = '({})' . format ( '|' . join ( segs ) ) sequence . append ( sub_pat ) pattern = '' . join ( sequence ) regex = re . compile ( pattern ) return regex
Given a string describing features masks for a sequence of segments return a regex matching the corresponding strings .
24,468
def segment_to_vector ( self , seg ) : ft_dict = { ft : val for ( val , ft ) in self . fts ( seg ) } return [ ft_dict [ name ] for name in self . names ]
Given a Unicode IPA segment return a list of feature specificiations in cannonical order .
24,469
def word_to_vector_list ( self , word , numeric = False , xsampa = False ) : if xsampa : word = self . xsampa . convert ( word ) tensor = list ( map ( self . segment_to_vector , self . segs ( word ) ) ) if numeric : return self . tensor_to_numeric ( tensor ) else : return tensor
Return a list of feature vectors given a Unicode IPA word .
24,470
def clown_strike_ioc ( self , ioc ) : r = requests . get ( 'http://threatbutt.io/api' , data = 'ioc={0}' . format ( ioc ) ) self . _output ( r . text )
Performs Clown Strike lookup on an IoC .
24,471
def bespoke_md5 ( self , md5 ) : r = requests . post ( 'http://threatbutt.io/api/md5/{0}' . format ( md5 ) ) self . _output ( r . text )
Performs Bespoke MD5 lookup on an MD5 .
24,472
def sonority_from_fts ( self , seg ) : def match ( m ) : return self . fm . match ( fts ( m ) , seg ) minusHi = BoolTree ( match ( '-hi' ) , 9 , 8 ) minusNas = BoolTree ( match ( '-nas' ) , 6 , 5 ) plusVoi1 = BoolTree ( match ( '+voi' ) , 4 , 3 ) plusVoi2 = BoolTree ( match ( '+voi' ) , 2 , 1 ) plusCont = BoolTree ( match ( '+cont' ) , plusVoi1 , plusVoi2 ) plusSon = BoolTree ( match ( '+son' ) , minusNas , plusCont ) minusCons = BoolTree ( match ( '-cons' ) , 7 , plusSon ) plusSyl = BoolTree ( match ( '+syl' ) , minusHi , minusCons ) return plusSyl . get_value ( )
Given a segment as features returns the sonority on a scale of 1 to 9 .
24,473
def from_dict ( cls , d ) : main_memory = MainMemory ( ) caches = { } referred_caches = set ( ) for name , conf in d . items ( ) : caches [ name ] = Cache ( name = name , ** { k : v for k , v in conf . items ( ) if k not in [ 'store_to' , 'load_from' , 'victims_to' ] } ) if 'store_to' in conf : referred_caches . add ( conf [ 'store_to' ] ) if 'load_from' in conf : referred_caches . add ( conf [ 'load_from' ] ) if 'victims_to' in conf : referred_caches . add ( conf [ 'victims_to' ] ) for name , conf in d . items ( ) : if 'store_to' in conf and conf [ 'store_to' ] is not None : caches [ name ] . set_store_to ( caches [ conf [ 'store_to' ] ] ) if 'load_from' in conf and conf [ 'load_from' ] is not None : caches [ name ] . set_load_from ( caches [ conf [ 'load_from' ] ] ) if 'victims_to' in conf and conf [ 'victims_to' ] is not None : caches [ name ] . set_victims_to ( caches [ conf [ 'victims_to' ] ] ) first_level = set ( d . keys ( ) ) - referred_caches assert len ( first_level ) == 1 , "Unable to find first cache level." first_level = caches [ list ( first_level ) [ 0 ] ] last_level_load = c = first_level while c is not None : last_level_load = c c = c . load_from assert last_level_load is not None , "Unable to find last cache level." last_level_store = c = first_level while c is not None : last_level_store = c c = c . store_to assert last_level_store is not None , "Unable to find last cache level." main_memory . load_to ( last_level_load ) main_memory . store_from ( last_level_store ) return cls ( first_level , main_memory ) , caches , main_memory
Create cache hierarchy from dictionary .
24,474
def load ( self , addr , length = 1 ) : if addr is None : return elif not isinstance ( addr , Iterable ) : self . first_level . load ( addr , length = length ) else : self . first_level . iterload ( addr , length = length )
Load one or more addresses .
24,475
def store ( self , addr , length = 1 , non_temporal = False ) : if non_temporal : raise ValueError ( "non_temporal stores are not yet supported" ) if addr is None : return elif not isinstance ( addr , Iterable ) : self . first_level . store ( addr , length = length ) else : self . first_level . iterstore ( addr , length = length )
Store one or more adresses .
24,476
def loadstore ( self , addrs , length = 1 ) : if not isinstance ( addrs , Iterable ) : raise ValueError ( "addr must be iteratable" ) self . first_level . loadstore ( addrs , length = length )
Load and store address in order given .
24,477
def print_stats ( self , header = True , file = sys . stdout ) : if header : print ( "CACHE {:*^18} {:*^18} {:*^18} {:*^18} {:*^18}" . format ( "HIT" , "MISS" , "LOAD" , "STORE" , "EVICT" ) , file = file ) for s in self . stats ( ) : print ( "{name:>5} {HIT_count:>6} ({HIT_byte:>8}B) {MISS_count:>6} ({MISS_byte:>8}B) " "{LOAD_count:>6} ({LOAD_byte:>8}B) {STORE_count:>6} " "({STORE_byte:>8}B) {EVICT_count:>6} ({EVICT_byte:>8}B)" . format ( HIT_bytes = 2342 , ** s ) , file = file )
Pretty print stats table .
24,478
def levels ( self , with_mem = True ) : p = self . first_level while p is not None : yield p if p . victims_to is not None and p . victims_to != p . load_from : yield p . victims_to if p . store_to is not None and p . store_to != p . load_from and p . store_to != p . victims_to : yield p . store_to p = p . load_from if with_mem : yield self . main_memory
Return cache levels optionally including main memory .
24,479
def count_invalid_entries ( self ) : return sum ( [ c . count_invalid_entries ( ) for c in self . levels ( with_mem = False ) ] )
Sum of all invalid entry counts from cache levels .
24,480
def set_load_from ( self , load_from ) : assert load_from is None or isinstance ( load_from , Cache ) , "load_from needs to be None or a Cache object." assert load_from is None or load_from . cl_size <= self . cl_size , "cl_size may only increase towards main memory." self . load_from = load_from self . backend . load_from = load_from . backend
Update load_from in Cache and backend .
24,481
def set_store_to ( self , store_to ) : assert store_to is None or isinstance ( store_to , Cache ) , "store_to needs to be None or a Cache object." assert store_to is None or store_to . cl_size <= self . cl_size , "cl_size may only increase towards main memory." self . store_to = store_to self . backend . store_to = store_to . backend
Update store_to in Cache and backend .
24,482
def set_victims_to ( self , victims_to ) : assert victims_to is None or isinstance ( victims_to , Cache ) , "store_to needs to be None or a Cache object." assert victims_to is None or victims_to . cl_size == self . cl_size , "cl_size may only increase towards main memory." self . victims_to = victims_to self . backend . victims_to = victims_to . backend
Update victims_to in Cache and backend .
24,483
def load_to ( self , last_level_load ) : assert isinstance ( last_level_load , Cache ) , "last_level needs to be a Cache object." assert last_level_load . load_from is None , "last_level_load must be a last level cache (.load_from is None)." self . last_level_load = last_level_load
Set level where to load from .
24,484
def store_from ( self , last_level_store ) : assert isinstance ( last_level_store , Cache ) , "last_level needs to be a Cache object." assert last_level_store . store_to is None , "last_level_store must be a last level cache (.store_to is None)." self . last_level_store = last_level_store
Set level where to store to .
24,485
def list ( self ) : if not self . _list : self . _list = map ( Namespace , self . _string . split ( '/' ) ) return self . _list
Returns the list representation of this Key .
24,486
def instance ( self , other ) : assert '/' not in str ( other ) return Key ( str ( self ) + ':' + str ( other ) )
Returns an instance Key by appending a name to the namespace .
24,487
def isAncestorOf ( self , other ) : if isinstance ( other , Key ) : return other . _string . startswith ( self . _string + '/' ) raise TypeError ( '%s is not of type %s' % ( other , Key ) )
Returns whether this Key is an ancestor of other .
24,488
def isDescendantOf ( self , other ) : if isinstance ( other , Key ) : return other . isAncestorOf ( self ) raise TypeError ( '%s is not of type %s' % ( other , Key ) )
Returns whether this Key is a descendant of other .
24,489
def ensure_directory_exists ( directory ) : if not os . path . exists ( directory ) : os . makedirs ( directory ) elif os . path . isfile ( directory ) : raise RuntimeError ( 'Path %s is a file, not a directory.' % directory )
Ensures directory exists . May make directory and intermediate dirs . Raises RuntimeError if directory is a file .
24,490
def relative_path ( self , key ) : key = str ( key ) key = key . replace ( ':' , '/' ) key = key [ 1 : ] if not self . case_sensitive : key = key . lower ( ) return os . path . normpath ( key )
Returns the relative path for given key
24,491
def path ( self , key ) : return os . path . join ( self . root_path , self . relative_path ( key ) )
Returns the path for given key
24,492
def object_path ( self , key ) : return os . path . join ( self . root_path , self . relative_object_path ( key ) )
return the object path for key .
24,493
def _write_object ( self , path , value ) : ensure_directory_exists ( os . path . dirname ( path ) ) with open ( path , 'w' ) as f : f . write ( value )
write out object to file at path
24,494
def _read_object ( self , path ) : if not os . path . exists ( path ) : return None if os . path . isdir ( path ) : raise RuntimeError ( '%s is a directory, not a file.' % path ) with open ( path ) as f : file_contents = f . read ( ) return file_contents
read in object from file at path
24,495
def get ( self , key ) : path = self . object_path ( key ) return self . _read_object ( path )
Return the object named by key or None if it does not exist .
24,496
def query ( self , query ) : path = self . path ( query . key ) if os . path . exists ( path ) : filenames = os . listdir ( path ) filenames = list ( set ( filenames ) - set ( self . ignore_list ) ) filenames = map ( lambda f : os . path . join ( path , f ) , filenames ) iterable = self . _read_object_gen ( filenames ) else : iterable = list ( ) return query ( iterable )
Returns an iterable of objects matching criteria expressed in query FSDatastore . query queries all the . obj files within the directory specified by the query . key .
24,497
def contains ( self , key ) : path = self . object_path ( key ) return os . path . exists ( path ) and os . path . isfile ( path )
Returns whether the object named by key exists . Optimized to only check whether the file object exists .
24,498
def _collection ( self , key ) : collection = str ( key . path ) if not collection in self . _items : self . _items [ collection ] = dict ( ) return self . _items [ collection ]
Returns the namespace collection for key .
24,499
def query ( self , query ) : if str ( query . key ) in self . _items : return query ( self . _items [ str ( query . key ) ] . values ( ) ) else : return query ( [ ] )
Returns an iterable of objects matching criteria expressed in query