idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
24,300
def __call_api ( self , path , params = None , api_url = FORECAST_URL ) : if not params : params = dict ( ) payload = { 'key' : self . api_key } payload . update ( params ) url = "%s/%s" % ( api_url , path ) # Add a timeout to the request. # The value of 1 second is based on attempting 100 connections to # datapoint and taking ten times the mean connection time (rounded up). # Could expose to users in the functions which need to call the api. #req = requests.get(url, params=payload, timeout=1) # The wrapper function __retry_session returns a requests.Session # object. This has a .get() function like requests.get(), so the use # doesn't change here. sess = self . __retry_session ( ) req = sess . get ( url , params = payload , timeout = 1 ) try : data = req . json ( ) except ValueError : raise APIException ( "DataPoint has not returned any data, this could be due to an incorrect API key" ) self . call_response = data if req . status_code != 200 : msg = [ data [ m ] for m in ( "message" , "error_message" , "status" ) if m in data ] [ 0 ] raise Exception ( msg ) return data
Call the datapoint api using the requests module
299
10
24,301
def _get_wx_units ( self , params , name ) : units = "" for param in params : if str ( name ) == str ( param [ 'name' ] ) : units = param [ 'units' ] return units
Give the Wx array returned from datapoint and an element name and return the units for that element .
49
22
24,302
def _visibility_to_text ( self , distance ) : if not isinstance ( distance , ( int , long ) ) : raise ValueError ( "Distance must be an integer not" , type ( distance ) ) if distance < 0 : raise ValueError ( "Distance out of bounds, should be 0 or greater" ) if 0 <= distance < 1000 : return 'VP' elif 1000 <= distance < 4000 : return 'PO' elif 4000 <= distance < 10000 : return 'MO' elif 10000 <= distance < 20000 : return 'GO' elif 20000 <= distance < 40000 : return 'VG' else : return 'EX'
Convert observed visibility in metres to text used in forecast
136
11
24,303
def get_forecast_sites ( self ) : time_now = time ( ) if ( time_now - self . forecast_sites_last_update ) > self . forecast_sites_update_time or self . forecast_sites_last_request is None : data = self . __call_api ( "sitelist/" ) sites = list ( ) for jsoned in data [ 'Locations' ] [ 'Location' ] : site = Site ( ) site . name = jsoned [ 'name' ] site . id = jsoned [ 'id' ] site . latitude = jsoned [ 'latitude' ] site . longitude = jsoned [ 'longitude' ] if 'region' in jsoned : site . region = jsoned [ 'region' ] if 'elevation' in jsoned : site . elevation = jsoned [ 'elevation' ] if 'unitaryAuthArea' in jsoned : site . unitaryAuthArea = jsoned [ 'unitaryAuthArea' ] if 'nationalPark' in jsoned : site . nationalPark = jsoned [ 'nationalPark' ] site . api_key = self . api_key sites . append ( site ) self . forecast_sites_last_request = sites # Only set self.sites_last_update once self.sites_last_request has # been set self . forecast_sites_last_update = time_now else : sites = self . forecast_sites_last_request return sites
This function returns a list of Site object .
315
9
24,304
def get_nearest_site ( self , latitude = None , longitude = None ) : warning_message = 'This function is deprecated. Use get_nearest_forecast_site() instead' warn ( warning_message , DeprecationWarning , stacklevel = 2 ) return self . get_nearest_forecast_site ( latitude , longitude )
Deprecated . This function returns nearest Site object to the specified coordinates .
77
14
24,305
def get_nearest_forecast_site ( self , latitude = None , longitude = None ) : if longitude is None : print ( 'ERROR: No latitude given.' ) return False if latitude is None : print ( 'ERROR: No latitude given.' ) return False nearest = False distance = None sites = self . get_forecast_sites ( ) # Sometimes there is a TypeError exception here: sites is None # So, sometimes self.get_all_sites() has returned None. for site in sites : new_distance = self . _distance_between_coords ( float ( site . longitude ) , float ( site . latitude ) , float ( longitude ) , float ( latitude ) ) if ( ( distance == None ) or ( new_distance < distance ) ) : distance = new_distance nearest = site # If the nearest site is more than 30km away, raise an error if distance > 30 : raise APIException ( "There is no site within 30km." ) return nearest
This function returns the nearest Site object to the specified coordinates .
211
12
24,306
def get_observation_sites ( self ) : if ( time ( ) - self . observation_sites_last_update ) > self . observation_sites_update_time : self . observation_sites_last_update = time ( ) data = self . __call_api ( "sitelist/" , None , OBSERVATION_URL ) sites = list ( ) for jsoned in data [ 'Locations' ] [ 'Location' ] : site = Site ( ) site . name = jsoned [ 'name' ] site . id = jsoned [ 'id' ] site . latitude = jsoned [ 'latitude' ] site . longitude = jsoned [ 'longitude' ] if 'region' in jsoned : site . region = jsoned [ 'region' ] if 'elevation' in jsoned : site . elevation = jsoned [ 'elevation' ] if 'unitaryAuthArea' in jsoned : site . unitaryAuthArea = jsoned [ 'unitaryAuthArea' ] if 'nationalPark' in jsoned : site . nationalPark = jsoned [ 'nationalPark' ] site . api_key = self . api_key sites . append ( site ) self . observation_sites_last_request = sites else : sites = observation_self . sites_last_request return sites
This function returns a list of Site objects for which observations are available .
285
14
24,307
def get_nearest_observation_site ( self , latitude = None , longitude = None ) : if longitude is None : print ( 'ERROR: No longitude given.' ) return False if latitude is None : print ( 'ERROR: No latitude given.' ) return False nearest = False distance = None sites = self . get_observation_sites ( ) for site in sites : new_distance = self . _distance_between_coords ( float ( site . longitude ) , float ( site . latitude ) , float ( longitude ) , float ( latitude ) ) if ( ( distance == None ) or ( new_distance < distance ) ) : distance = new_distance nearest = site # If the nearest site is more than 20km away, raise an error if distance > 20 : raise APIException ( "There is no site within 30km." ) return nearest
This function returns the nearest Site to the specified coordinates that supports observations
185
13
24,308
def call_api ( self , path , * * kwargs ) : if 'key' not in kwargs : kwargs [ 'key' ] = self . api_key req = requests . get ( '{0}{1}' . format ( self . base_url , path ) , params = kwargs ) if req . status_code != requests . codes . ok : req . raise_for_status ( ) return req . json ( )
Call datapoint api
99
5
24,309
def get_all_regions ( self ) : if ( time ( ) - self . regions_last_update ) < self . regions_update_time : return self . regions_last_request response = self . call_api ( self . all_regions_path ) regions = [ ] for location in response [ 'Locations' ] [ 'Location' ] : region = Site ( ) region . id = location [ '@id' ] region . region = location [ '@name' ] region . name = REGION_NAMES [ location [ '@name' ] ] regions . append ( region ) self . regions_last_update = time ( ) self . regions_last_request = regions return regions
Request a list of regions from Datapoint . Returns each Region as a Site object . Regions rarely change so we cache the response for one hour to minimise requests to API .
152
36
24,310
def now ( self ) : # From the comments in issue 19: forecast.days[0] is dated for the # previous day shortly after midnight now = None # Set the time now to be in the same time zone as the first timestep in # the forecast. This shouldn't cause problems with daylight savings as # the change is far enough after midnight. d = datetime . datetime . now ( tz = self . days [ 0 ] . date . tzinfo ) # d is something like datetime.datetime(2019, 1, 19, 17, 5, 28, 337439) # d.replace(...) is datetime.datetime(2019, 1, 19, 0, 0) # for_total_seconds is then: datetime.timedelta(seconds=61528, # microseconds=337439) # In this example, this is (17*60*60) + (5*60) + 28 = 61528 # this is the number of seconds through the day for_total_seconds = d - d . replace ( hour = 0 , minute = 0 , second = 0 , microsecond = 0 ) # In the example time, # for_total_seconds.total_seconds() = 61528 + 0.337439 # This is the number of seconds after midnight # msm is then the number of minutes after midnight msm = for_total_seconds . total_seconds ( ) / 60 # If the date now and the date in the forecast are the same, proceed if self . days [ 0 ] . date . strftime ( "%Y-%m-%dZ" ) == d . strftime ( "%Y-%m-%dZ" ) : # We have determined that the date in the forecast and the date now # are the same. # # Now, test if timestep.name is larger than the number of minutes # since midnight for each timestep. # The timestep we keep is the one with the largest timestep.name # which is less than the number of minutes since midnight for timestep in self . days [ 0 ] . timesteps : if timestep . name > msm : # break here stops the for loop break # now is assigned to the last timestep that did not break the # loop now = timestep return now # Bodge to get around problems near midnight: # Previous method does not account for the end of the month. The test # trying to be evaluated is that the absolute difference between the # last timestep of the first day and the current time is less than 4 # hours. 4 hours is because the final timestep of the previous day is # for 21:00 elif abs ( self . days [ 0 ] . timesteps [ - 1 ] . date - d ) . total_seconds ( ) < 14400 : # This is verbose to check that the returned data makes sense timestep_to_return = self . days [ 0 ] . timesteps [ - 1 ] return timestep_to_return else : return False
Function to return just the current timestep from this forecast
654
12
24,311
def future ( self , in_days = None , in_hours = None , in_minutes = None , in_seconds = None ) : future = None # Initialize variables to 0 dd , hh , mm , ss = [ 0 for i in range ( 4 ) ] if ( in_days != None ) : dd = dd + in_days if ( in_hours != None ) : hh = hh + in_hours if ( in_minutes != None ) : mm = mm + in_minutes if ( in_seconds != None ) : ss = ss + in_seconds # Set the hours, minutes and seconds from now (minus the days) dnow = datetime . datetime . utcnow ( ) # Now d = dnow + datetime . timedelta ( hours = hh , minutes = mm , seconds = ss ) # Time from midnight for_total_seconds = d - d . replace ( hour = 0 , minute = 0 , second = 0 , microsecond = 0 ) # Convert into minutes since midnight try : msm = for_total_seconds . total_seconds ( ) / 60. except : # For versions before 2.7 msm = self . timedelta_total_seconds ( for_total_seconds ) / 60. if ( dd < len ( self . days ) ) : for timestep in self . days [ dd ] . timesteps : if timestep . name >= msm : future = timestep return future else : print ( 'ERROR: requested date is outside the forecast range selected,' + str ( len ( self . days ) ) ) return False
Function to return a future timestep
344
8
24,312
def install_API_key ( api_key , profile_name = 'default' ) : fname = API_profile_fname ( profile_name ) if not os . path . isdir ( os . path . dirname ( fname ) ) : os . makedirs ( os . path . dirname ( fname ) ) with open ( fname , 'w' ) as fh : fh . write ( api_key )
Put the given API key into the given profile name .
95
11
24,313
def is_namedtuple ( type_ : Type [ Any ] ) -> bool : return _issubclass ( type_ , tuple ) and hasattr ( type_ , '_field_types' ) and hasattr ( type_ , '_fields' )
Generated with typing . NamedTuple
55
8
24,314
def uniontypes ( type_ : Type [ Any ] ) -> Set [ Type [ Any ] ] : if not is_union ( type_ ) : raise ValueError ( 'Not a Union: ' + str ( type_ ) ) if hasattr ( type_ , '__args__' ) : return set ( type_ . __args__ ) elif hasattr ( type_ , '__union_params__' ) : return set ( type_ . __union_params__ ) raise AttributeError ( 'The typing API for this Python version is unknown' )
Returns the types of a Union .
118
7
24,315
def index ( self , value : Any ) -> int : for i , cond in ( ( j [ 0 ] , j [ 1 ] [ 0 ] ) for j in enumerate ( self . handlers ) ) : try : match = cond ( value ) except : if self . raiseconditionerrors : raise match = False if match : return i raise TypedloadValueError ( 'Unable to dump %s' % value , value = value )
Returns the index in the handlers list that matches the given value .
94
13
24,316
def dump ( self , value : Any ) -> Any : index = self . index ( value ) func = self . handlers [ index ] [ 1 ] return func ( self , value )
Dump the typed data structure into its untyped equivalent .
38
13
24,317
def _forwardrefload ( l : Loader , value : Any , type_ : type ) -> Any : if l . frefs is None : raise TypedloadException ( 'ForwardRef resolving is disabled for the loader' , value = value , type_ = type_ ) tname = type_ . __forward_arg__ # type: ignore t = l . frefs . get ( tname ) if t is None : raise TypedloadValueError ( "ForwardRef '%s' unknown" % tname , value = value , type_ = type_ ) return l . load ( value , t , annotation = Annotation ( AnnotationType . FORWARDREF , tname ) )
This resolves a ForwardRef .
146
6
24,318
def _basicload ( l : Loader , value : Any , type_ : type ) -> Any : if type ( value ) != type_ : if l . basiccast : try : return type_ ( value ) except ValueError as e : raise TypedloadValueError ( str ( e ) , value = value , type_ = type_ ) except TypeError as e : raise TypedloadTypeError ( str ( e ) , value = value , type_ = type_ ) except Exception as e : raise TypedloadException ( str ( e ) , value = value , type_ = type_ ) else : raise TypedloadValueError ( 'Not of type %s' % type_ , value = value , type_ = type_ ) return value
This converts a value into a basic type .
159
9
24,319
def _unionload ( l : Loader , value , type_ ) -> Any : try : args = uniontypes ( type_ ) except AttributeError : raise TypedloadAttributeError ( 'The typing API for this Python version is unknown' ) # Do not convert basic types, if possible if type ( value ) in args . intersection ( l . basictypes ) : return value exceptions = [ ] # Try all types for t in args : try : return l . load ( value , t , annotation = Annotation ( AnnotationType . UNION , t ) ) except Exception as e : exceptions . append ( e ) raise TypedloadValueError ( 'Value could not be loaded into %s' % type_ , value = value , type_ = type_ , exceptions = exceptions )
Loads a value into a union .
165
8
24,320
def _enumload ( l : Loader , value , type_ ) -> Enum : try : # Try naïve conversion return type_ ( value ) except : pass # Try with the typing hints for _ , t in get_type_hints ( type_ ) . items ( ) : try : return type_ ( l . load ( value , t ) ) except : pass raise TypedloadValueError ( 'Value could not be loaded into %s' % type_ , value = value , type_ = type_ )
This loads something into an Enum .
109
8
24,321
def _noneload ( l : Loader , value , type_ ) -> None : if value is None : return None raise TypedloadValueError ( 'Not None' , value = value , type_ = type_ )
Loads a value that can only be None so it fails if it isn t
48
16
24,322
def index ( self , type_ : Type [ T ] ) -> int : for i , cond in ( ( q [ 0 ] , q [ 1 ] [ 0 ] ) for q in enumerate ( self . handlers ) ) : try : match = cond ( type_ ) except : if self . raiseconditionerrors : raise match = False if match : return i raise ValueError ( 'No matching condition found' )
Returns the index in the handlers list that matches the given type .
88
13
24,323
def load ( self , value : Any , type_ : Type [ T ] , * , annotation : Optional [ Annotation ] = None ) -> T : try : index = self . index ( type_ ) except ValueError : raise TypedloadTypeError ( 'Cannot deal with value of type %s' % type_ , value = value , type_ = type_ ) # Add type to known types, to resolve ForwardRef later on if self . frefs is not None and hasattr ( type_ , '__name__' ) : tname = type_ . __name__ if tname not in self . frefs : self . frefs [ tname ] = type_ func = self . handlers [ index ] [ 1 ] try : return func ( self , value , type_ ) except Exception as e : assert isinstance ( e , TypedloadException ) e . trace . insert ( 0 , TraceItem ( value , type_ , annotation ) ) raise e
Loads value into the typed data structure .
204
9
24,324
def get_data ( city : Optional [ str ] ) -> Dict [ str , Any ] : req = urllib . request . Request ( get_url ( city ) ) with urllib . request . urlopen ( req ) as f : response = f . read ( ) answer = response . decode ( 'ascii' ) data = json . loads ( answer ) r = data [ 'query' ] [ 'results' ] [ 'channel' ] # Remove some useless nesting return r
Use the Yahoo weather API to get weather information
105
9
24,325
def load ( value : Any , type_ : Type [ T ] , * * kwargs ) -> T : from . import dataloader loader = dataloader . Loader ( * * kwargs ) return loader . load ( value , type_ )
Quick function call to load data into a type .
57
10
24,326
def dump ( value : Any , * * kwargs ) -> Any : from . import datadumper dumper = datadumper . Dumper ( * * kwargs ) return dumper . dump ( value )
Quick function to dump a data structure into something that is compatible with json or other programs and languages .
46
20
24,327
def attrload ( value : Any , type_ : Type [ T ] , * * kwargs ) -> T : from . import dataloader from . plugins import attrload as loadplugin loader = dataloader . Loader ( * * kwargs ) loadplugin . add2loader ( loader ) return loader . load ( value , type_ )
Quick function call to load data supporting the attr module in addition to the default ones .
78
18
24,328
def attrdump ( value : Any , * * kwargs ) -> Any : from . import datadumper from . plugins import attrdump as dumpplugin dumper = datadumper . Dumper ( * * kwargs ) dumpplugin . add2dumper ( dumper ) return dumper . dump ( value )
Quick function to do a dump that supports the attr module .
77
13
24,329
def on_panic ( etype , value , tb ) : _logging . critical ( _traceback . format_exception ( etype , value , tb ) ) _be . propagate_exc ( SystemExit , 1 )
Called when there is an unhandled error in a goroutine . By default logs and exits the process .
50
22
24,330
def stdout_to_results ( s ) : results = s . strip ( ) . split ( '\n' ) return [ BenchmarkResult ( * r . split ( ) ) for r in results ]
Turns the multi - line output of a benchmark process into a sequence of BenchmarkResult instances .
44
20
24,331
def benchmark_process_and_backend ( exe , backend ) : env = dict ( os . environ ) env [ 'GOLESS_BACKEND' ] = backend args = [ exe , '-m' , 'benchmark' ] return get_benchproc_results ( args , env = env )
Returns BenchmarkResults for a given executable and backend .
68
11
24,332
def insert_seperator_results ( results ) : sepbench = BenchmarkResult ( * [ ' ' * w for w in COLUMN_WIDTHS ] ) last_bm = None for r in results : if last_bm is None : last_bm = r . benchmark elif last_bm != r . benchmark : yield sepbench last_bm = r . benchmark yield r
Given a sequence of BenchmarkResults return a new sequence where a seperator BenchmarkResult has been placed between differing benchmarks to provide a visual difference .
84
31
24,333
def parse ( self ) -> typing . Union [ list , dict , None ] : root_node_offset = self . _read_u32 ( 12 ) if root_node_offset == 0 : return None node_type = self . _data [ root_node_offset ] if not _is_container_type ( node_type ) : raise ValueError ( "Invalid root node: expected array or dict, got type 0x%x" % node_type ) return self . _parse_node ( node_type , 12 )
Parse the BYML and get the root node with all children .
113
14
24,334
def check_permission ( self , request ) : return all ( ( permission . has_permission ( request ) for permission in self . permission_classes ) )
Check this field s permissions to determine whether or not it may be shown .
34
15
24,335
def build_github_url ( repo , branch = None , path = 'requirements.txt' , token = None ) : repo = re . sub ( r"^http(s)?://github.com/" , "" , repo ) . strip ( '/' ) # args come is as 'None' instead of not being provided if not path : path = 'requirements.txt' if not branch : branch = get_default_branch ( repo ) url = 'https://raw.githubusercontent.com/{}/{}/{}' . format ( repo , branch , path ) if token : url = '{}?token={}' . format ( url , token ) return url
Builds a URL to a file inside a Github repository .
148
12
24,336
def get_default_branch ( repo ) : url = "{}/repos/{}" . format ( GITHUB_API_BASE , repo ) response = requests . get ( url ) if response . status_code == 200 : api_response = json . loads ( response . text ) return api_response [ 'default_branch' ] else : return 'master'
returns the name of the default branch of the repo
82
11
24,337
def get_requirements_file_from_url ( url ) : response = requests . get ( url ) if response . status_code == 200 : return StringIO ( response . text ) else : return StringIO ( "" )
fetches the requiremets from the url
48
9
24,338
def longest_one_seg_prefix ( self , word ) : match = self . seg_regex . match ( word ) if match : return match . group ( 0 ) else : return ''
Return longest IPA Unicode prefix of word
43
7
24,339
def filter_segs ( self , segs ) : def whole_seg ( seg ) : m = self . seg_regex . match ( seg ) if m and m . group ( 0 ) == seg : return True else : return False return list ( filter ( whole_seg , segs ) )
Given list of strings return only those which are valid segments .
70
12
24,340
def validate_line ( self , line ) : line0 = line pos = 0 while line : seg_m = self . ft . seg_regex . match ( line ) wsp_m = self . ws_punc_regex . match ( line ) if seg_m : length = len ( seg_m . group ( 0 ) ) line = line [ length : ] pos += length elif wsp_m : length = len ( wsp_m . group ( 0 ) ) line = line [ length : ] pos += length else : msg = 'IPA not valid at position {} in "{}".' . format ( pos , line0 . strip ( ) ) # msg = msg.decode('utf-8') print ( msg , file = sys . stderr ) line = line [ 1 : ] pos += 1
Validate Unicode IPA string relative to panphon .
183
11
24,341
def segment_text ( text , seg_regex = SEG_REGEX ) : for m in seg_regex . finditer ( text ) : yield m . group ( 0 )
Return an iterator of segments in the text .
42
9
24,342
def fts_match ( self , features , segment ) : features = set ( features ) if self . seg_known ( segment ) : return features <= self . fts ( segment ) else : return None
Answer question are ft_mask s features a subset of ft_seg?
44
16
24,343
def longest_one_seg_prefix ( self , word ) : for i in range ( self . longest_seg , 0 , - 1 ) : if word [ : i ] in self . seg_dict : return word [ : i ] return ''
Return longest Unicode IPA prefix of a word
55
8
24,344
def validate_word ( self , word ) : while word : match = self . seg_regex . match ( word ) if match : word = word [ len ( match . group ( 0 ) ) : ] else : # print('{}\t->\t{}\t'.format(orig, word).encode('utf-8'), file=sys.stderr) return False return True
Returns True if word consists exhaustively of valid IPA segments
85
11
24,345
def segs ( self , word ) : return [ m . group ( 'all' ) for m in self . seg_regex . finditer ( word ) ]
Returns a list of segments from a word
36
8
24,346
def word_fts ( self , word ) : return list ( map ( self . fts , self . segs ( word ) ) )
Return featural analysis of word
29
6
24,347
def filter_string ( self , word ) : segs = [ m . group ( 0 ) for m in self . seg_regex . finditer ( word ) ] return '' . join ( segs )
Return a string like the input but containing only legal IPA segments
45
12
24,348
def fts_intersection ( self , segs ) : fts_vecs = [ self . fts ( s ) for s in self . filter_segs ( segs ) ] return reduce ( lambda a , b : a & b , fts_vecs )
Return the features shared by segs
59
7
24,349
def fts_match_any ( self , fts , inv ) : return any ( [ self . fts_match ( fts , s ) for s in inv ] )
Return True if any segment in inv matches the features in fts
38
13
24,350
def fts_match_all ( self , fts , inv ) : return all ( [ self . fts_match ( fts , s ) for s in inv ] )
Return True if all segments in inv matches the features in fts
38
13
24,351
def fts_contrast2 ( self , fs , ft_name , inv ) : inv_fts = [ self . fts ( x ) for x in inv if set ( fs ) <= self . fts ( x ) ] for a in inv_fts : for b in inv_fts : if a != b : diff = a ^ b if len ( diff ) == 2 : if all ( [ nm == ft_name for ( _ , nm ) in diff ] ) : return True return False
Return True if there is a segment in inv that contrasts in feature ft_name .
105
17
24,352
def fts_count ( self , fts , inv ) : return len ( list ( filter ( lambda s : self . fts_match ( fts , s ) , inv ) ) )
Return the count of segments in an inventory matching a given feature mask .
41
14
24,353
def match_pattern ( self , pat , word ) : segs = self . word_fts ( word ) if len ( pat ) != len ( segs ) : return None else : if all ( [ set ( p ) <= s for ( p , s ) in zip ( pat , segs ) ] ) : return segs
Implements fixed - width pattern matching .
69
9
24,354
def compile_regex_from_str ( self , ft_str ) : sequence = [ ] for m in re . finditer ( r'\[([^]]+)\]' , ft_str ) : ft_mask = fts ( m . group ( 1 ) ) segs = self . all_segs_matching_fts ( ft_mask ) sub_pat = '({})' . format ( '|' . join ( segs ) ) sequence . append ( sub_pat ) pattern = '' . join ( sequence ) regex = re . compile ( pattern ) return regex
Given a string describing features masks for a sequence of segments return a regex matching the corresponding strings .
125
19
24,355
def segment_to_vector ( self , seg ) : ft_dict = { ft : val for ( val , ft ) in self . fts ( seg ) } return [ ft_dict [ name ] for name in self . names ]
Given a Unicode IPA segment return a list of feature specificiations in cannonical order .
52
17
24,356
def word_to_vector_list ( self , word , numeric = False , xsampa = False ) : if xsampa : word = self . xsampa . convert ( word ) tensor = list ( map ( self . segment_to_vector , self . segs ( word ) ) ) if numeric : return self . tensor_to_numeric ( tensor ) else : return tensor
Return a list of feature vectors given a Unicode IPA word .
87
12
24,357
def clown_strike_ioc ( self , ioc ) : r = requests . get ( 'http://threatbutt.io/api' , data = 'ioc={0}' . format ( ioc ) ) self . _output ( r . text )
Performs Clown Strike lookup on an IoC .
56
10
24,358
def bespoke_md5 ( self , md5 ) : r = requests . post ( 'http://threatbutt.io/api/md5/{0}' . format ( md5 ) ) self . _output ( r . text )
Performs Bespoke MD5 lookup on an MD5 .
52
13
24,359
def sonority_from_fts ( self , seg ) : def match ( m ) : return self . fm . match ( fts ( m ) , seg ) minusHi = BoolTree ( match ( '-hi' ) , 9 , 8 ) minusNas = BoolTree ( match ( '-nas' ) , 6 , 5 ) plusVoi1 = BoolTree ( match ( '+voi' ) , 4 , 3 ) plusVoi2 = BoolTree ( match ( '+voi' ) , 2 , 1 ) plusCont = BoolTree ( match ( '+cont' ) , plusVoi1 , plusVoi2 ) plusSon = BoolTree ( match ( '+son' ) , minusNas , plusCont ) minusCons = BoolTree ( match ( '-cons' ) , 7 , plusSon ) plusSyl = BoolTree ( match ( '+syl' ) , minusHi , minusCons ) return plusSyl . get_value ( )
Given a segment as features returns the sonority on a scale of 1 to 9 .
220
17
24,360
def from_dict ( cls , d ) : main_memory = MainMemory ( ) caches = { } referred_caches = set ( ) # First pass, create all named caches and collect references for name , conf in d . items ( ) : caches [ name ] = Cache ( name = name , * * { k : v for k , v in conf . items ( ) if k not in [ 'store_to' , 'load_from' , 'victims_to' ] } ) if 'store_to' in conf : referred_caches . add ( conf [ 'store_to' ] ) if 'load_from' in conf : referred_caches . add ( conf [ 'load_from' ] ) if 'victims_to' in conf : referred_caches . add ( conf [ 'victims_to' ] ) # Second pass, connect caches for name , conf in d . items ( ) : if 'store_to' in conf and conf [ 'store_to' ] is not None : caches [ name ] . set_store_to ( caches [ conf [ 'store_to' ] ] ) if 'load_from' in conf and conf [ 'load_from' ] is not None : caches [ name ] . set_load_from ( caches [ conf [ 'load_from' ] ] ) if 'victims_to' in conf and conf [ 'victims_to' ] is not None : caches [ name ] . set_victims_to ( caches [ conf [ 'victims_to' ] ] ) # Find first level (not target of any load_from or store_to) first_level = set ( d . keys ( ) ) - referred_caches assert len ( first_level ) == 1 , "Unable to find first cache level." first_level = caches [ list ( first_level ) [ 0 ] ] # Find last level caches (has no load_from or store_to target) last_level_load = c = first_level while c is not None : last_level_load = c c = c . load_from assert last_level_load is not None , "Unable to find last cache level." last_level_store = c = first_level while c is not None : last_level_store = c c = c . store_to assert last_level_store is not None , "Unable to find last cache level." # Set main memory connections main_memory . load_to ( last_level_load ) main_memory . store_from ( last_level_store ) return cls ( first_level , main_memory ) , caches , main_memory
Create cache hierarchy from dictionary .
573
6
24,361
def load ( self , addr , length = 1 ) : if addr is None : return elif not isinstance ( addr , Iterable ) : self . first_level . load ( addr , length = length ) else : self . first_level . iterload ( addr , length = length )
Load one or more addresses .
61
6
24,362
def store ( self , addr , length = 1 , non_temporal = False ) : if non_temporal : raise ValueError ( "non_temporal stores are not yet supported" ) if addr is None : return elif not isinstance ( addr , Iterable ) : self . first_level . store ( addr , length = length ) else : self . first_level . iterstore ( addr , length = length )
Store one or more adresses .
90
7
24,363
def loadstore ( self , addrs , length = 1 ) : if not isinstance ( addrs , Iterable ) : raise ValueError ( "addr must be iteratable" ) self . first_level . loadstore ( addrs , length = length )
Load and store address in order given .
54
8
24,364
def print_stats ( self , header = True , file = sys . stdout ) : if header : print ( "CACHE {:*^18} {:*^18} {:*^18} {:*^18} {:*^18}" . format ( "HIT" , "MISS" , "LOAD" , "STORE" , "EVICT" ) , file = file ) for s in self . stats ( ) : print ( "{name:>5} {HIT_count:>6} ({HIT_byte:>8}B) {MISS_count:>6} ({MISS_byte:>8}B) " "{LOAD_count:>6} ({LOAD_byte:>8}B) {STORE_count:>6} " "({STORE_byte:>8}B) {EVICT_count:>6} ({EVICT_byte:>8}B)" . format ( HIT_bytes = 2342 , * * s ) , file = file )
Pretty print stats table .
222
5
24,365
def levels ( self , with_mem = True ) : p = self . first_level while p is not None : yield p # FIXME bad hack to include victim caches, need a more general solution, probably # involving recursive tree walking if p . victims_to is not None and p . victims_to != p . load_from : yield p . victims_to if p . store_to is not None and p . store_to != p . load_from and p . store_to != p . victims_to : yield p . store_to p = p . load_from if with_mem : yield self . main_memory
Return cache levels optionally including main memory .
135
8
24,366
def count_invalid_entries ( self ) : return sum ( [ c . count_invalid_entries ( ) for c in self . levels ( with_mem = False ) ] )
Sum of all invalid entry counts from cache levels .
42
10
24,367
def set_load_from ( self , load_from ) : assert load_from is None or isinstance ( load_from , Cache ) , "load_from needs to be None or a Cache object." assert load_from is None or load_from . cl_size <= self . cl_size , "cl_size may only increase towards main memory." self . load_from = load_from self . backend . load_from = load_from . backend
Update load_from in Cache and backend .
98
9
24,368
def set_store_to ( self , store_to ) : assert store_to is None or isinstance ( store_to , Cache ) , "store_to needs to be None or a Cache object." assert store_to is None or store_to . cl_size <= self . cl_size , "cl_size may only increase towards main memory." self . store_to = store_to self . backend . store_to = store_to . backend
Update store_to in Cache and backend .
98
9
24,369
def set_victims_to ( self , victims_to ) : assert victims_to is None or isinstance ( victims_to , Cache ) , "store_to needs to be None or a Cache object." assert victims_to is None or victims_to . cl_size == self . cl_size , "cl_size may only increase towards main memory." self . victims_to = victims_to self . backend . victims_to = victims_to . backend
Update victims_to in Cache and backend .
99
9
24,370
def load_to ( self , last_level_load ) : assert isinstance ( last_level_load , Cache ) , "last_level needs to be a Cache object." assert last_level_load . load_from is None , "last_level_load must be a last level cache (.load_from is None)." self . last_level_load = last_level_load
Set level where to load from .
83
7
24,371
def store_from ( self , last_level_store ) : assert isinstance ( last_level_store , Cache ) , "last_level needs to be a Cache object." assert last_level_store . store_to is None , "last_level_store must be a last level cache (.store_to is None)." self . last_level_store = last_level_store
Set level where to store to .
83
7
24,372
def list ( self ) : if not self . _list : self . _list = map ( Namespace , self . _string . split ( '/' ) ) return self . _list
Returns the list representation of this Key .
39
8
24,373
def instance ( self , other ) : assert '/' not in str ( other ) return Key ( str ( self ) + ':' + str ( other ) )
Returns an instance Key by appending a name to the namespace .
33
13
24,374
def isAncestorOf ( self , other ) : if isinstance ( other , Key ) : return other . _string . startswith ( self . _string + '/' ) raise TypeError ( '%s is not of type %s' % ( other , Key ) )
Returns whether this Key is an ancestor of other .
60
10
24,375
def isDescendantOf ( self , other ) : if isinstance ( other , Key ) : return other . isAncestorOf ( self ) raise TypeError ( '%s is not of type %s' % ( other , Key ) )
Returns whether this Key is a descendant of other .
52
10
24,376
def ensure_directory_exists ( directory ) : if not os . path . exists ( directory ) : os . makedirs ( directory ) elif os . path . isfile ( directory ) : raise RuntimeError ( 'Path %s is a file, not a directory.' % directory )
Ensures directory exists . May make directory and intermediate dirs . Raises RuntimeError if directory is a file .
61
24
24,377
def relative_path ( self , key ) : key = str ( key ) # stringify key = key . replace ( ':' , '/' ) # turn namespace delimiters into slashes key = key [ 1 : ] # remove first slash (absolute) if not self . case_sensitive : key = key . lower ( ) # coerce to lowercase return os . path . normpath ( key )
Returns the relative path for given key
85
7
24,378
def path ( self , key ) : return os . path . join ( self . root_path , self . relative_path ( key ) )
Returns the path for given key
30
6
24,379
def object_path ( self , key ) : return os . path . join ( self . root_path , self . relative_object_path ( key ) )
return the object path for key .
34
7
24,380
def _write_object ( self , path , value ) : ensure_directory_exists ( os . path . dirname ( path ) ) with open ( path , 'w' ) as f : f . write ( value )
write out object to file at path
48
7
24,381
def _read_object ( self , path ) : if not os . path . exists ( path ) : return None if os . path . isdir ( path ) : raise RuntimeError ( '%s is a directory, not a file.' % path ) with open ( path ) as f : file_contents = f . read ( ) return file_contents
read in object from file at path
76
7
24,382
def get ( self , key ) : path = self . object_path ( key ) return self . _read_object ( path )
Return the object named by key or None if it does not exist .
28
14
24,383
def query ( self , query ) : path = self . path ( query . key ) if os . path . exists ( path ) : filenames = os . listdir ( path ) filenames = list ( set ( filenames ) - set ( self . ignore_list ) ) filenames = map ( lambda f : os . path . join ( path , f ) , filenames ) iterable = self . _read_object_gen ( filenames ) else : iterable = list ( ) return query ( iterable )
Returns an iterable of objects matching criteria expressed in query FSDatastore . query queries all the . obj files within the directory specified by the query . key .
115
34
24,384
def contains ( self , key ) : path = self . object_path ( key ) return os . path . exists ( path ) and os . path . isfile ( path )
Returns whether the object named by key exists . Optimized to only check whether the file object exists .
37
20
24,385
def _collection ( self , key ) : collection = str ( key . path ) if not collection in self . _items : self . _items [ collection ] = dict ( ) return self . _items [ collection ]
Returns the namespace collection for key .
45
7
24,386
def query ( self , query ) : # entire dataset already in memory, so ok to apply query naively if str ( query . key ) in self . _items : return query ( self . _items [ str ( query . key ) ] . values ( ) ) else : return query ( [ ] )
Returns an iterable of objects matching criteria expressed in query
63
11
24,387
def get ( self , key ) : key = self . _service_key ( key ) return self . _service_ops [ 'get' ] ( key )
Return the object in service named by key or None .
34
11
24,388
def put ( self , key , value ) : key = self . _service_key ( key ) self . _service_ops [ 'put' ] ( key , value )
Stores the object value named by key in service .
37
11
24,389
def delete ( self , key ) : key = self . _service_key ( key ) self . _service_ops [ 'delete' ] ( key )
Removes the object named by key in service .
33
10
24,390
def get ( self , key ) : value = self . cache_datastore . get ( key ) return value if value is not None else self . child_datastore . get ( key )
Return the object named by key or None if it does not exist . CacheShimDatastore first checks its cache_datastore .
42
29
24,391
def put ( self , key , value ) : self . cache_datastore . put ( key , value ) self . child_datastore . put ( key , value )
Stores the object value named by key self . Writes to both cache_datastore and child_datastore .
38
26
24,392
def delete ( self , key ) : self . cache_datastore . delete ( key ) self . child_datastore . delete ( key )
Removes the object named by key . Writes to both cache_datastore and child_datastore .
32
24
24,393
def contains ( self , key ) : return self . cache_datastore . contains ( key ) or self . child_datastore . contains ( key )
Returns whether the object named by key exists . First checks cache_datastore .
34
17
24,394
def get ( self , key ) : self . logger . info ( '%s: get %s' % ( self , key ) ) value = super ( LoggingDatastore , self ) . get ( key ) self . logger . debug ( '%s: %s' % ( self , value ) ) return value
Return the object named by key or None if it does not exist . LoggingDatastore logs the access .
68
23
24,395
def delete ( self , key ) : self . logger . info ( '%s: delete %s' % ( self , key ) ) super ( LoggingDatastore , self ) . delete ( key )
Removes the object named by key . LoggingDatastore logs the access .
44
17
24,396
def contains ( self , key ) : self . logger . info ( '%s: contains %s' % ( self , key ) ) return super ( LoggingDatastore , self ) . contains ( key )
Returns whether the object named by key exists . LoggingDatastore logs the access .
45
18
24,397
def query ( self , query ) : self . logger . info ( '%s: query %s' % ( self , query ) ) return super ( LoggingDatastore , self ) . query ( query )
Returns an iterable of objects matching criteria expressed in query . LoggingDatastore logs the access .
45
21
24,398
def nestKey ( self , key ) : nest = self . nest_keyfn ( key ) # if depth * length > len(key.name), we need to pad. mult = 1 + int ( self . nest_depth * self . nest_length / len ( nest ) ) nest = nest * mult pref = Key ( self . nestedPath ( nest , self . nest_depth , self . nest_length ) ) return pref . child ( key )
Returns a nested key .
96
5
24,399
def _link_for_value ( self , value ) : try : key = Key ( value ) if key . name == self . sentinel : return key . parent except : pass return None
Returns the linked key if value is a link or None .
40
12