idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
24,000
def load_csv ( filename , dialect = 'excel' , encoding = 'utf-8' ) : return Context . fromfile ( filename , 'csv' , encoding , dialect = dialect )
Load and return formal context from CSV file .
24,001
def ensure_compatible ( left , right ) : conflicts = list ( conflicting_pairs ( left , right ) ) if conflicts : raise ValueError ( 'conflicting values for object/property pairs: %r' % conflicts )
Raise an informative ValueError if the two definitions disagree .
24,002
def rename_object ( self , old , new ) : self . _objects . replace ( old , new ) pairs = self . _pairs pairs |= { ( new , p ) for p in self . _properties if ( old , p ) in pairs and not pairs . remove ( ( old , p ) ) }
Replace the name of an object by a new one .
24,003
def rename_property ( self , old , new ) : self . _properties . replace ( old , new ) pairs = self . _pairs pairs |= { ( o , new ) for o in self . _objects if ( o , old ) in pairs and not pairs . remove ( ( o , old ) ) }
Replace the name of a property by a new one .
24,004
def add_object ( self , obj , properties = ( ) ) : self . _objects . add ( obj ) self . _properties |= properties self . _pairs . update ( ( obj , p ) for p in properties )
Add an object to the definition and add properties as related .
24,005
def add_property ( self , prop , objects = ( ) ) : self . _properties . add ( prop ) self . _objects |= objects self . _pairs . update ( ( o , prop ) for o in objects )
Add a property to the definition and add objects as related .
24,006
def remove_object ( self , obj ) : self . _objects . remove ( obj ) self . _pairs . difference_update ( ( obj , p ) for p in self . _properties )
Remove an object from the definition .
24,007
def remove_property ( self , prop ) : self . _properties . remove ( prop ) self . _pairs . difference_update ( ( o , prop ) for o in self . _objects )
Remove a property from the definition .
24,008
def set_object ( self , obj , properties ) : self . _objects . add ( obj ) properties = set ( properties ) self . _properties |= properties pairs = self . _pairs for p in self . _properties : if p in properties : pairs . add ( ( obj , p ) ) else : pairs . discard ( ( obj , p ) )
Add an object to the definition and set its properties .
24,009
def set_property ( self , prop , objects ) : self . _properties . add ( prop ) objects = set ( objects ) self . _objects |= objects pairs = self . _pairs for o in self . _objects : if o in objects : pairs . add ( ( o , prop ) ) else : pairs . discard ( ( o , prop ) )
Add a property to the definition and set its objects .
24,010
def union_update ( self , other , ignore_conflicts = False ) : if not ignore_conflicts : ensure_compatible ( self , other ) self . _objects |= other . _objects self . _properties |= other . _properties self . _pairs |= other . _pairs
Update the definition with the union of the other .
24,011
def union ( self , other , ignore_conflicts = False ) : result = self . copy ( ) result . union_update ( other , ignore_conflicts ) return result
Return a new definition from the union of the definitions .
24,012
def intersection ( self , other , ignore_conflicts = False ) : result = self . copy ( ) result . intersection_update ( other , ignore_conflicts ) return result
Return a new definition from the intersection of the definitions .
24,013
def maximal ( iterable , comparison = operator . lt , _groupkey = operator . itemgetter ( 0 ) ) : iterable = set ( iterable ) if len ( iterable ) < 2 : return iterable return ( item for item , pairs in groupby ( permutations ( iterable , 2 ) , key = _groupkey ) if not any ( starmap ( comparison , pairs ) ) )
Yield the unique maximal elements from iterable using comparison .
24,014
def replace ( self , item , new_item ) : if new_item in self . _seen : raise ValueError ( '%r already in list' % new_item ) idx = self . _items . index ( item ) self . _seen . remove ( item ) self . _seen . add ( new_item ) self . _items [ idx ] = new_item
Replace an item preserving order .
24,015
def move ( self , item , new_index ) : idx = self . _items . index ( item ) if idx != new_index : item = self . _items . pop ( idx ) self . _items . insert ( new_index , item )
Move an item to the given position .
24,016
def issuperset ( self , items ) : return all ( _compat . map ( self . _seen . __contains__ , items ) )
Return whether this collection contains all items .
24,017
def rsub ( self , items ) : ignore = self . _seen seen = set ( ) add = seen . add items = [ i for i in items if i not in ignore and i not in seen and not add ( i ) ] return self . _fromargs ( seen , items )
Return order preserving unique items not in this collection .
24,018
def merge_schema ( first , second ) : if not ( type ( first ) == type ( second ) == dict ) : raise ValueError ( "Argument is not a schema" ) if not ( first . get ( 'type' ) == second . get ( 'type' ) == 'object' ) : raise NotImplementedError ( "Unsupported root type" ) return merge_objects ( first , second )
Returns the result of merging the two given schemas .
24,019
def generate_and_merge_schemas ( samples ) : merged = generate_schema_for_sample ( next ( iter ( samples ) ) ) for sample in samples : merged = merge_schema ( merged , generate_schema_for_sample ( sample ) ) return merged
Iterates through the given samples generating schemas and merging them returning the resulting merged schema .
24,020
def sine_psd ( data , delta , number_of_tapers = None , number_of_iterations = 2 , degree_of_smoothing = 1.0 , statistics = False , verbose = False ) : if verbose is True : verbose = C . byref ( C . c_char ( 'y' ) ) else : verbose = None if number_of_tapers is None : number_of_tapers = 0 mt = _MtspecType ( "float32" ) data = np . require ( data , dtype = mt . float , requirements = [ mt . order ] ) npts = len ( data ) number_of_frequency_bins = int ( npts / 2 ) + 1 frequency_bins = mt . empty ( number_of_frequency_bins ) spectrum = mt . empty ( number_of_frequency_bins ) if statistics is True : tapers_per_freq_point = np . empty ( number_of_frequency_bins , dtype = 'int32' , order = mt . order ) tapers_per_freq_point_p = tapers_per_freq_point . ctypes . data_as ( C . POINTER ( C . c_int ) ) errors = mt . empty ( ( number_of_frequency_bins , 2 ) ) else : tapers_per_freq_point_p = errors = None mtspeclib . sine_psd_ ( C . byref ( C . c_int ( npts ) ) , C . byref ( C . c_float ( delta ) ) , mt . p ( data ) , C . byref ( C . c_int ( number_of_tapers ) ) , C . byref ( C . c_int ( number_of_iterations ) ) , C . byref ( C . c_float ( degree_of_smoothing ) ) , C . byref ( C . c_int ( number_of_frequency_bins ) ) , mt . p ( frequency_bins ) , mt . p ( spectrum ) , tapers_per_freq_point_p , mt . p ( errors ) , verbose ) return_values = [ spectrum , frequency_bins ] if statistics is True : return_values . extend ( [ errors , tapers_per_freq_point ] ) return return_values
Wrapper method for the sine_psd subroutine in the library by German A . Prieto .
24,021
def dpss ( npts , fw , number_of_tapers , auto_spline = True , npts_max = None ) : mt = _MtspecType ( "float64" ) v = mt . empty ( ( npts , number_of_tapers ) ) lamb = mt . empty ( number_of_tapers ) theta = mt . empty ( number_of_tapers ) if npts_max and npts_max < npts : auto_spline = True else : npts_max = 200000 if auto_spline is True and npts > npts_max : mtspeclib . dpss_spline_ ( C . byref ( C . c_int ( npts_max ) ) , C . byref ( C . c_int ( npts ) ) , C . byref ( C . c_double ( fw ) ) , C . byref ( C . c_int ( number_of_tapers ) ) , mt . p ( v ) , mt . p ( lamb ) , mt . p ( theta ) ) else : mtspeclib . dpss_ ( C . byref ( C . c_int ( npts ) ) , C . byref ( C . c_double ( fw ) ) , C . byref ( C . c_int ( number_of_tapers ) ) , mt . p ( v ) , mt . p ( lamb ) , mt . p ( theta ) ) return ( v , lamb , theta )
Calculates DPSS also known as Slepian sequences or Slepian tapers .
24,022
def wigner_ville_spectrum ( data , delta , time_bandwidth = 3.5 , number_of_tapers = None , smoothing_filter = None , filter_width = 100 , frequency_divider = 1 , verbose = False ) : data = np . require ( data , 'float32' ) mt = _MtspecType ( "float32" ) npts = len ( data ) if number_of_tapers is None : number_of_tapers = int ( 2 * time_bandwidth ) - 1 if not smoothing_filter : smoothing_filter = 0 elif smoothing_filter == 'boxcar' : smoothing_filter = 1 elif smoothing_filter == 'gauss' : smoothing_filter = 2 else : msg = 'Invalid value for smoothing filter.' raise Exception ( msg ) if verbose : verbose = C . byref ( C . c_char ( 'y' ) ) else : verbose = None output = mt . empty ( ( npts // 2 // int ( frequency_divider ) + 1 , npts ) ) mtspeclib . wv_spec_to_array_ ( C . byref ( C . c_int ( npts ) ) , C . byref ( C . c_float ( delta ) ) , mt . p ( data ) , mt . p ( output ) , C . byref ( C . c_float ( time_bandwidth ) ) , C . byref ( C . c_int ( number_of_tapers ) ) , C . byref ( C . c_int ( smoothing_filter ) ) , C . byref ( C . c_float ( filter_width ) ) , C . byref ( C . c_int ( frequency_divider ) ) , verbose ) return output
Function to calculate the Wigner - Ville Distribution or Wigner - Ville Spectrum of a signal using multitaper spectral estimates .
24,023
def mt_deconvolve ( data_a , data_b , delta , nfft = None , time_bandwidth = None , number_of_tapers = None , weights = "adaptive" , demean = True , fmax = 0.0 ) : npts = len ( data_a ) if len ( data_b ) != npts : raise ValueError ( "Input arrays must have the same length!" ) if nfft is None : nfft = npts elif nfft < npts : raise ValueError ( "nfft must be larger then the number of samples in " "the array." ) mt = _MtspecType ( "float32" ) if number_of_tapers is None : number_of_tapers = int ( 2 * time_bandwidth ) - 1 data_a = np . require ( data_a , mt . float , requirements = [ mt . order ] ) data_b = np . require ( data_b , mt . float , requirements = [ mt . order ] ) nf = nfft // 2 + 1 if demean : demean = 1 else : demean = 0 if weights == "constant" : adaptive = 1 elif weights == "adaptive" : adaptive = 0 else : raise ValueError ( 'Weights must be either "adaptive" or "constant".' ) tfun = mt . empty ( nfft ) freq = mt . empty ( nf ) spec_ratio = mt . empty ( nf ) speci = mt . empty ( nf ) specj = mt . empty ( nf ) mtspeclib . mt_deconv_ ( C . byref ( C . c_int ( int ( npts ) ) ) , C . byref ( C . c_int ( int ( nfft ) ) ) , C . byref ( C . c_float ( float ( delta ) ) ) , mt . p ( data_a ) , mt . p ( data_b ) , C . byref ( C . c_float ( float ( time_bandwidth ) ) ) , C . byref ( C . c_int ( int ( number_of_tapers ) ) ) , C . byref ( C . c_int ( int ( nf ) ) ) , C . byref ( C . c_int ( adaptive ) ) , mt . p ( freq ) , mt . p ( tfun ) , mt . p ( spec_ratio ) , mt . p ( speci ) , mt . p ( specj ) , C . byref ( C . c_int ( demean ) ) , C . byref ( C . c_float ( fmax ) ) ) return { "frequencies" : freq , "deconvolved" : tfun , "spectral_ratio" : spec_ratio , "spectrum_a" : speci , "spectrum_b" : specj }
Deconvolve two time series using multitapers .
24,024
def empty ( self , shape , complex = False ) : if complex : return np . empty ( shape , dtype = self . complex , order = self . order ) return np . empty ( shape , dtype = self . float , order = self . order )
A wrapper around np . empty which automatically sets the correct type and returns an empty array .
24,025
def signal_bursts ( ) : np . random . seed ( 815 ) length = 5 * 512 data = np . sin ( np . linspace ( 0 , 80 * np . pi , length ) ) noise = np . random . ranf ( length ) noise /= noise . max ( ) noise /= 15 data += noise data [ - 2 * 512 : ] *= 2.0 chirp1 = 2.5 * np . sin ( np . linspace ( 0 , 400 * np . pi , 512 ) ) chirp1 *= np . linspace ( 1 , 0 , 512 ) data [ 512 : 2 * 512 ] += chirp1 chirp2 = 5.0 * np . sin ( np . linspace ( 0 , 200 * np . pi , 512 ) ) chirp2 *= np . linspace ( 1 , 0 , 512 ) data [ 3 * 512 : 4 * 512 ] += chirp2 return data
Generates a signal with two bursts inside . Useful for testing time frequency distributions .
24,026
def linear_chirp ( npts = 2000 ) : time = np . linspace ( 0 , 20 , npts ) chirp = np . sin ( 0.2 * np . pi * ( 0.1 + 24.0 / 2.0 * time ) * time ) return chirp
Generates a simple linear chirp .
24,027
def exponential_chirp ( npts = 2000 ) : time = np . linspace ( 0 , 20 , npts ) chirp = np . sin ( 2 * np . pi * 0.2 * ( 1.3 ** time - 1 ) / np . log ( 1.3 ) ) return chirp
Generates an exponential chirp .
24,028
def get_libgfortran_dir ( ) : for ending in [ ".3.dylib" , ".dylib" , ".3.so" , ".so" ] : try : p = Popen ( [ 'gfortran' , "-print-file-name=libgfortran" + ending ] , stdout = PIPE , stderr = PIPE ) p . stderr . close ( ) line = p . stdout . readline ( ) . decode ( ) . strip ( ) p . stdout . close ( ) if os . path . exists ( line ) : return [ os . path . dirname ( line ) ] except : continue return [ ]
Helper function returning the library directory of libgfortran . Useful on OSX where the C compiler oftentimes has no knowledge of the library directories of the Fortran compiler . I don t think it can do any harm on Linux .
24,029
def create ( cls , obj ) : self = cls . __new__ ( cls ) self . __proto__ = obj return self
Create a new prototype object with the argument as the source prototype .
24,030
def bind ( self , func ) : if self . __methods__ is None : self . __methods__ = { } self . __methods__ [ func . __name__ ] = BoundFunction ( func )
Take a function and create a bound method
24,031
def has_own_property ( self , attr ) : try : object . __getattribute__ ( self , attr ) except AttributeError : return False else : return True
Returns if the property
24,032
def add_router ( self , path , router ) : if self . strict_router_check and not isinstance ( router , Router ) : raise TypeError ( "Expected object of type Router, found %r" % type ( router ) ) log . info ( "{} Adding router {} on path {}" , id ( self ) , router , path ) self . middleware . add ( path = path , func = router , method_mask = HTTPMethod . ALL , )
Adds a router to the list of routers
24,033
def create_server ( self , loop = None , as_coroutine = False , protocol_factory = None , ** server_config ) : if loop is None : import asyncio loop = asyncio . get_event_loop ( ) if protocol_factory is None : from growler . aio import GrowlerHTTPProtocol protocol_factory = GrowlerHTTPProtocol . get_factory create_server = loop . create_server ( protocol_factory ( self , loop = loop ) , ** server_config ) if as_coroutine : return create_server else : return loop . run_until_complete ( create_server )
Helper function which constructs a listening server using the default growler . http . protocol . Protocol which responds to this app .
24,034
def create_server_and_run_forever ( self , loop = None , ** server_config ) : if loop is None : import asyncio loop = asyncio . get_event_loop ( ) self . create_server ( loop = loop , ** server_config ) try : loop . run_forever ( ) except KeyboardInterrupt : pass
Helper function which constructs an HTTP server and listens the loop forever .
24,035
def find_template_filename ( self , template_name ) : def next_file ( ) : filename = self . path / template_name yield filename try : exts = self . default_file_extensions except AttributeError : return strfilename = str ( filename ) for ext in exts : yield Path ( strfilename + ext ) for filename in next_file ( ) : if filename . is_file ( ) : return filename
Searches for a file matching the given template name .
24,036
def set_request_line ( self , method , url , version ) : self . parsed_request = ( method , url , version ) self . request = { 'method' : method , 'url' : url , 'version' : version }
Sets the request line on the responder .
24,037
def init_body_buffer ( self , method , headers ) : content_length = headers . get ( "CONTENT-LENGTH" , None ) if method in ( HTTPMethod . POST , HTTPMethod . PUT ) : if content_length is None : raise HTTPErrorBadRequest ( "HTTP Method requires a CONTENT-LENGTH header" ) self . content_length = int ( content_length ) self . body_buffer = bytearray ( 0 ) elif content_length is not None : raise HTTPErrorBadRequest ( "HTTP method %s may NOT have a CONTENT-LENGTH header" )
Sets up the body_buffer and content_length attributes based on method and headers .
24,038
def build_req_and_res ( self ) : req = self . build_req ( self , self . headers ) res = self . build_res ( self . _handler ) return req , res
Simple method which calls the request and response factories the responder was given and returns the pair .
24,039
def validate_and_store_body_data ( self , data ) : self . body_buffer [ - 1 : ] = data if len ( self . body_buffer ) > self . content_length : problem = "Content length exceeds expected value (%d > %d)" % ( len ( self . body_buffer ) , self . content_length ) raise HTTPErrorBadRequest ( phrase = problem )
Attempts simple body data validation by comparining incoming data to the content length header . If passes store the data into self . _buffer .
24,040
def begin_application ( self , req , res ) : self . loop . create_task ( self . http_application . handle_client_request ( req , res ) )
Entry point for the application middleware chain for an asyncio event loop .
24,041
def calculate_etag ( file_path ) : stat = file_path . stat ( ) etag = "%x-%x" % ( stat . st_mtime_ns , stat . st_size ) return etag
Calculate an etag value
24,042
def _set_default_headers ( self ) : self . headers . setdefault ( 'Date' , self . get_current_time ) self . headers . setdefault ( 'Server' , self . SERVER_INFO ) self . headers . setdefault ( 'Content-Length' , "%d" % len ( self . message ) ) if self . app . enabled ( 'x-powered-by' ) : self . headers . setdefault ( 'X-Powered-By' , 'Growler' )
Create some default headers that should be sent along with every HTTP response
24,043
def send_headers ( self ) : self . events . sync_emit ( 'headers' ) self . _set_default_headers ( ) header_str = self . status_line + self . EOL + str ( self . headers ) self . stream . write ( header_str . encode ( ) ) self . events . sync_emit ( 'after_headers' )
Sends the headers to the client
24,044
def end ( self ) : self . send_headers ( ) self . write ( ) self . write_eof ( ) self . has_ended = True
Ends the response . Useful for quickly ending connection with no data sent
24,045
def redirect ( self , url , status = None ) : self . status_code = 302 if status is None else status self . headers = Headers ( [ ( 'location' , url ) ] ) self . message = '' self . end ( )
Redirect to the specified url optional status code defaults to 302 .
24,046
def set ( self , header , value = None ) : if value is None : for k , v in header . items ( ) : self . headers [ k ] = v else : self . headers [ header ] = value
Set header to the value
24,047
def links ( self , links ) : s = [ '<{}>; rel="{}"' . format ( link , rel ) for link , rel in links . items ( ) ] self . headers [ 'Link' ] = ',' . join ( s )
Sets the Link
24,048
def send_file ( self , filename , status = 200 ) : if isinstance ( filename , Path ) and sys . version_info >= ( 3 , 5 ) : self . message = filename . read_bytes ( ) else : with io . FileIO ( str ( filename ) ) as f : self . message = f . read ( ) self . status_code = status self . send_headers ( ) self . write ( ) self . write_eof ( )
Reads in the file filename and sends bytes to client
24,049
def update ( self , * args , ** kwargs ) : for next_dict in chain ( args , ( kwargs , ) ) : for k , v in next_dict . items ( ) : self [ k ] = v
Equivalent to the python dict update method .
24,050
def add_header ( self , key , value , ** params ) : key = self . escape ( key ) ci_key = key . casefold ( ) def quoted_params ( items ) : for p in items : param_name = self . escape ( p [ 0 ] ) param_val = self . de_quote ( self . escape ( p [ 1 ] ) ) yield param_name , param_val sorted_items = sorted ( params . items ( ) ) quoted_iter = ( '%s="%s"' % p for p in quoted_params ( sorted_items ) ) param_str = ' ' . join ( quoted_iter ) if param_str : value = "%s; %s" % ( value , param_str ) self . _header_data [ ci_key ] = ( key , value )
Add a header to the collection including potential parameters .
24,051
def index ( req , res ) : number = req . session . get ( 'counter' , - 1 ) req . session [ 'counter' ] = int ( number ) + 1 print ( " -- Session '{id}' returned {counter} times" . format ( ** req . session ) ) msg = "Hello!! You've been here [[%s]] times" % ( req . session [ 'counter' ] ) res . send_text ( msg ) req . session . save ( )
Return root page of website .
24,052
async def body ( self ) : if not isinstance ( self . _body , bytes ) : self . _body = await self . _body return self . _body
A helper function which blocks until the body has been read completely . Returns the bytes of the body which the user should decode .
24,053
def event_emitter ( cls_ = None , * , events = ( '*' , ) ) : event_dict = dict . fromkeys ( events , [ ] ) allow_any_eventname = event_dict . pop ( '*' , False ) == [ ] def _event_emitter ( cls ) : def on ( self , name , callback ) : if not ( callable ( callback ) or isawaitable ( callback ) ) : raise ValueError ( "Callback not callable: %r" % callback ) try : event_dict [ name ] . append ( callback ) except KeyError : if allow_any_eventname : event_dict [ name ] = [ callback ] else : msg = "Event Emitter has no event {!r}" . format ( name ) raise KeyError ( msg ) return self async def emit ( self , name ) : for cb in event_dict [ name ] : if isawaitable ( cb ) : await cb else : cb ( ) cls . on = on cls . emit = emit return cls if cls_ is None : return _event_emitter else : return _event_emitter ( cls_ )
A class - decorator which will add the specified events and the methods on and emit to the class .
24,054
def on ( self , name , _callback = None ) : if _callback is None : return lambda cb : self . on ( name , cb ) if not ( callable ( _callback ) or isawaitable ( _callback ) ) : msg = "Callback not callable: {0!r}" . format ( _callback ) raise ValueError ( msg ) self . _event_list [ name ] . append ( _callback ) return _callback
Add a callback to the event named name . Returns callback object for decorationable calls .
24,055
async def emit ( self , name ) : for cb in self . _event_list [ name ] : if isawaitable ( cb ) : await cb else : cb ( )
Add a callback to the event named name . Returns this object for chained on calls .
24,056
def routerify ( obj ) : router = Router ( ) for info in get_routing_attributes ( obj ) : router . add_route ( * info ) obj . __growler_router = router return router
Scan through attributes of object parameter looking for any which match a route signature . A router will be created and added to the object with parameter .
24,057
def _add_route ( self , method , path , middleware = None ) : if middleware is not None : self . add ( method , path , middleware ) return self else : return lambda func : ( self . add ( method , path , func ) , func ) [ 1 ]
The implementation of adding a route
24,058
def use ( self , middleware , path = None ) : self . log . info ( " Using middleware {}" , middleware ) if path is None : path = MiddlewareChain . ROOT_PATTERN self . add ( HTTPMethod . ALL , path , middleware ) return self
Call the provided middleware upon requests matching the path . If path is not provided or None all requests will match .
24,059
def sinatra_path_to_regex ( cls , path ) : if type ( path ) is cls . regex_type : return path regex = [ "(?P<{}>\w+)" . format ( segment [ 1 : ] ) if cls . sinatra_param_regex . match ( segment ) else segment for segment in path . split ( '/' ) ] return re . compile ( '/' . join ( regex ) )
Converts a sinatra - style path to a regex with named parameters .
24,060
def _parse_and_store_headers ( self ) : header_storage = self . _store_header ( ) header_storage . send ( None ) for header_line in self . _next_header_line ( ) : if header_line is None : self . _buffer += yield continue else : header_storage . send ( header_line ) self . headers = header_storage . send ( None )
Coroutine used retrieve header data and parse each header until the body is found .
24,061
def _store_header ( self ) : key , value = None , None headers = [ ] header_line = yield while header_line is not None : if not header_line . startswith ( ( b' ' , b'\t' ) ) : if key : headers . append ( ( key , value ) ) key , value = self . split_header_key_value ( header_line ) key = key . upper ( ) else : next_val = header_line . strip ( ) . decode ( ) if isinstance ( value , list ) : value . append ( next_val ) else : value = [ value , next_val ] header_line = yield if key is not None : headers . append ( ( key , value ) ) yield dict ( headers )
Logic & state behind storing headers . This is a coroutine that should be sent header lines in the usual fashion . Sending it None will indicate there are no more lines and the dictionary of headers will be returned .
24,062
def _store_request_line ( self , req_line ) : if not isinstance ( req_line , str ) : try : req_line = self . raw_request_line = req_line . decode ( ) except UnicodeDecodeError : raise HTTPErrorBadRequest try : self . method_str , self . original_url , self . version = req_line . split ( ) except ValueError : raise HTTPErrorBadRequest ( ) if self . version not in ( 'HTTP/1.1' , 'HTTP/1.0' ) : raise HTTPErrorVersionNotSupported ( self . version ) try : self . method = HTTPMethod [ self . method_str ] except KeyError : err = "Unknown HTTP Method '{}'" . format ( self . method_str ) raise HTTPErrorNotImplemented ( err ) self . _process_headers = { HTTPMethod . GET : self . process_get_headers , HTTPMethod . POST : self . process_post_headers } . get ( self . method , lambda data : True ) _ , num_str = self . version . split ( '/' , 1 ) self . HTTP_VERSION = tuple ( num_str . split ( '.' ) ) self . version_number = float ( num_str ) self . parsed_url = urlparse ( self . original_url ) self . path = unquote ( self . parsed_url . path ) self . query = parse_qs ( self . parsed_url . query ) return self . method , self . parsed_url , self . version
Splits the request line given into three components . Ensures that the version and method are valid for this server and uses the urllib . parse function to parse the request URI .
24,063
def determine_newline ( data ) : line_end_pos = data . find ( b'\n' ) if line_end_pos == - 1 : return None elif line_end_pos == 0 : return b'\n' prev_char = data [ line_end_pos - 1 ] return b'\r\n' if ( prev_char is b'\r' [ 0 ] ) else b'\n'
Looks for a newline character in bytestring parameter data . Currently only looks for strings \ r \ n \ n . If \ n is found at the first position of the string this raises an exception .
24,064
def path_split ( self , path ) : match = self . path . match ( path ) if match is None : return None , None the_rest = path [ match . end ( ) : ] if the_rest : if match . group ( ) . endswith ( '/' ) : pass elif the_rest . startswith ( '/' ) : pass else : return None , None if self . IGNORE_TRAILING_SLASH and the_rest == '/' : the_rest = '' return match , the_rest
Splits a path into the part matching this middleware and the part remaining . If path does not exist it returns a pair of None values . If the regex matches the entire pair the second item in returned tuple is None .
24,065
def find_matching_middleware ( self , method , path ) : for mw in self . mw_list : if not mw . matches_method ( method ) : continue path_match , rest_url = mw . path_split ( path ) if self . should_skip_middleware ( mw , path_match , rest_url ) : continue yield mw , path_match , rest_url
Iterator handling the matching of middleware against a method + path pair . Yields the middleware and the
24,066
def add ( self , method_mask , path , func ) : is_err = len ( signature ( func ) . parameters ) == 3 is_subchain = isinstance ( func , MiddlewareChain ) tup = MiddlewareNode ( func = func , mask = method_mask , path = path , is_errorhandler = is_err , is_subchain = is_subchain , ) self . mw_list . append ( tup )
Add a function to the middleware chain . This function is returned when iterating over the chain with matching method and path .
24,067
def count_all ( self ) : return sum ( x . func . count_all ( ) if x . is_subchain else 1 for x in self )
Returns the total number of middleware in this chain and subchains .
24,068
def if_relationship ( parser , token ) : bits = list ( token . split_contents ( ) ) if len ( bits ) != 4 : raise TemplateSyntaxError ( "%r takes 3 arguments:\n%s" % ( bits [ 0 ] , if_relationship . __doc__ ) ) end_tag = 'end' + bits [ 0 ] nodelist_true = parser . parse ( ( 'else' , end_tag ) ) token = parser . next_token ( ) if token . contents == 'else' : nodelist_false = parser . parse ( ( end_tag , ) ) parser . delete_first_token ( ) else : nodelist_false = template . NodeList ( ) return IfRelationshipNode ( nodelist_true , nodelist_false , * bits [ 1 : ] )
Determine if a certain type of relationship exists between two users . The status parameter must be a slug matching either the from_slug to_slug or symmetrical_slug of a RelationshipStatus .
24,069
def add_relationship_url ( user , status ) : if isinstance ( status , RelationshipStatus ) : status = status . from_slug return reverse ( 'relationship_add' , args = [ user . username , status ] )
Generate a url for adding a relationship on a given user . user is a User object and status is either a relationship_status object or a string denoting a RelationshipStatus
24,070
def _rename_glyphs_from_ufo ( self ) : rename_map = self . _build_production_names ( ) otf = self . otf otf . setGlyphOrder ( [ rename_map . get ( n , n ) for n in otf . getGlyphOrder ( ) ] ) if 'post' in otf and otf [ 'post' ] . formatType == 2.0 : otf [ 'post' ] . compile ( self . otf ) if 'CFF ' in otf : cff = otf [ 'CFF ' ] . cff . topDictIndex [ 0 ] char_strings = cff . CharStrings . charStrings cff . CharStrings . charStrings = { rename_map . get ( n , n ) : v for n , v in char_strings . items ( ) } cff . charset = [ rename_map . get ( n , n ) for n in cff . charset ]
Rename glyphs using ufo . lib . public . postscriptNames in UFO .
24,071
def _unique_name ( name , seen ) : if name in seen : n = seen [ name ] while ( name + ".%d" % n ) in seen : n += 1 seen [ name ] = n + 1 name += ".%d" % n seen [ name ] = 1 return name
Append incremental . N suffix if glyph is a duplicate .
24,072
def _build_production_name ( self , glyph ) : if self . _postscriptNames : production_name = self . _postscriptNames . get ( glyph . name ) return production_name if production_name else glyph . name unicode_val = glyph . unicode if glyph . unicode is not None : return '%s%04X' % ( 'u' if unicode_val > 0xffff else 'uni' , unicode_val ) parts = glyph . name . rsplit ( '.' , 1 ) if len ( parts ) == 2 and parts [ 0 ] in self . glyphSet : return '%s.%s' % ( self . _build_production_name ( self . glyphSet [ parts [ 0 ] ] ) , parts [ 1 ] ) parts = glyph . name . split ( '.' , 1 ) if len ( parts ) == 2 : liga_parts = [ '%s.%s' % ( n , parts [ 1 ] ) for n in parts [ 0 ] . split ( '_' ) ] else : liga_parts = glyph . name . split ( '_' ) if len ( liga_parts ) > 1 and all ( n in self . glyphSet for n in liga_parts ) : unicode_vals = [ self . glyphSet [ n ] . unicode for n in liga_parts ] if all ( v and v <= 0xffff for v in unicode_vals ) : return 'uni' + '' . join ( '%04X' % v for v in unicode_vals ) return '_' . join ( self . _build_production_name ( self . glyphSet [ n ] ) for n in liga_parts ) return glyph . name
Build a production name for a single glyph .
24,073
def makeFeaClassName ( name , existingClassNames = None ) : name = re . sub ( r"[^A-Za-z0-9._]" , r"" , name ) if existingClassNames is None : return name i = 1 origName = name while name in existingClassNames : name = "%s_%d" % ( origName , i ) i += 1 return name
Make a glyph class name which is legal to use in feature text .
24,074
def addLookupReference ( feature , lookup , script = None , languages = None , exclude_dflt = False ) : return addLookupReferences ( feature , ( lookup , ) , script = script , languages = languages , exclude_dflt = exclude_dflt , )
Shortcut for addLookupReferences but for a single lookup .
24,075
def openTypeHeadCreatedFallback ( info ) : if "SOURCE_DATE_EPOCH" in os . environ : t = datetime . utcfromtimestamp ( int ( os . environ [ "SOURCE_DATE_EPOCH" ] ) ) return t . strftime ( _date_format ) else : return dateStringForNow ( )
Fallback to the environment variable SOURCE_DATE_EPOCH if set otherwise now .
24,076
def preflightInfo ( info ) : missingRequired = set ( ) missingRecommended = set ( ) for attr in requiredAttributes : if not hasattr ( info , attr ) or getattr ( info , attr ) is None : missingRequired . add ( attr ) for attr in recommendedAttributes : if not hasattr ( info , attr ) or getattr ( info , attr ) is None : missingRecommended . add ( attr ) return dict ( missingRequired = missingRequired , missingRecommended = missingRecommended )
Returns a dict containing two items . The value for each item will be a list of info attribute names .
24,077
def add ( self , user , status = None , symmetrical = False ) : if not status : status = RelationshipStatus . objects . following ( ) relationship , created = Relationship . objects . get_or_create ( from_user = self . instance , to_user = user , status = status , site = Site . objects . get_current ( ) ) if symmetrical : return ( relationship , user . relationships . add ( self . instance , status , False ) ) else : return relationship
Add a relationship from one user to another with the given status which defaults to following .
24,078
def remove ( self , user , status = None , symmetrical = False ) : if not status : status = RelationshipStatus . objects . following ( ) res = Relationship . objects . filter ( from_user = self . instance , to_user = user , status = status , site__pk = settings . SITE_ID ) . delete ( ) if symmetrical : return ( res , user . relationships . remove ( self . instance , status , False ) ) else : return res
Remove a relationship from one user to another with the same caveats and behavior as adding a relationship .
24,079
def get_relationships ( self , status , symmetrical = False ) : query = self . _get_from_query ( status ) if symmetrical : query . update ( self . _get_to_query ( status ) ) return User . objects . filter ( ** query )
Returns a QuerySet of user objects with which the given user has established a relationship .
24,080
def only_to ( self , status ) : from_relationships = self . get_relationships ( status ) to_relationships = self . get_related_to ( status ) return to_relationships . exclude ( pk__in = from_relationships . values_list ( 'pk' ) )
Returns a QuerySet of user objects who have created a relationship to the given user but which the given user has not reciprocated
24,081
def makeOfficialGlyphOrder ( font , glyphOrder = None ) : if glyphOrder is None : glyphOrder = getattr ( font , "glyphOrder" , ( ) ) names = set ( font . keys ( ) ) order = [ ] if ".notdef" in names : names . remove ( ".notdef" ) order . append ( ".notdef" ) for name in glyphOrder : if name not in names : continue names . remove ( name ) order . append ( name ) order . extend ( sorted ( names ) ) return order
Make the final glyph order for font .
24,082
def from_layer ( cls , font , layerName = None , copy = False , skipExportGlyphs = None ) : if layerName is not None : layer = font . layers [ layerName ] else : layer = font . layers . defaultLayer if copy : self = _copyLayer ( layer , obj_type = cls ) self . lib = deepcopy ( layer . lib ) else : self = cls ( ( g . name , g ) for g in layer ) self . lib = layer . lib if skipExportGlyphs : for glyph in self . values ( ) : if any ( c . baseGlyph in skipExportGlyphs for c in glyph . components ) : deepCopyContours ( self , glyph , glyph , Transform ( ) , skipExportGlyphs ) if hasattr ( glyph , "removeComponent" ) : for c in [ component for component in glyph . components if component . baseGlyph in skipExportGlyphs ] : glyph . removeComponent ( c ) else : glyph . components [ : ] = [ c for c in glyph . components if c . baseGlyph not in skipExportGlyphs ] for glyph_name in skipExportGlyphs : if glyph_name in self : del self [ glyph_name ] self . name = layer . name if layerName is not None else None return self
Return a mapping of glyph names to glyph objects from font .
24,083
def parseLayoutFeatures ( font ) : featxt = tounicode ( font . features . text or "" , "utf-8" ) if not featxt : return ast . FeatureFile ( ) buf = UnicodeIO ( featxt ) ufoPath = font . path if ufoPath is not None : buf . name = ufoPath glyphNames = set ( font . keys ( ) ) try : parser = Parser ( buf , glyphNames ) doc = parser . parse ( ) except IncludedFeaNotFound as e : if ufoPath and os . path . exists ( os . path . join ( ufoPath , e . args [ 0 ] ) ) : logger . warning ( "Please change the file name in the include(...); " "statement to be relative to the UFO itself, " "instead of relative to the 'features.fea' file " "contained in it." ) raise return doc
Parse OpenType layout features in the UFO and return a feaLib . ast . FeatureFile instance .
24,084
def setupFeatures ( self ) : if self . featureWriters : featureFile = parseLayoutFeatures ( self . ufo ) for writer in self . featureWriters : writer . write ( self . ufo , featureFile , compiler = self ) self . features = featureFile . asFea ( ) else : self . features = tounicode ( self . ufo . features . text or "" , "utf-8" )
Make the features source .
24,085
def buildTables ( self ) : if not self . features : return path = self . ufo . path if not self . featureWriters else None try : addOpenTypeFeaturesFromString ( self . ttFont , self . features , filename = path ) except FeatureLibError : if path is None : data = tobytes ( self . features , encoding = "utf-8" ) with NamedTemporaryFile ( delete = False ) as tmp : tmp . write ( data ) logger . error ( "Compilation failed! Inspect temporary file: %r" , tmp . name ) raise
Compile OpenType feature tables from the source . Raises a FeaLibError if the feature compilation was unsuccessful .
24,086
def maxCtxFont ( font ) : maxCtx = 0 for tag in ( 'GSUB' , 'GPOS' ) : if tag not in font : continue table = font [ tag ] . table if table . LookupList is None : continue for lookup in table . LookupList . Lookup : for st in lookup . SubTable : maxCtx = maxCtxSubtable ( maxCtx , tag , lookup . LookupType , st ) return maxCtx
Calculate the usMaxContext value for an entire font .
24,087
def maxCtxContextualSubtable ( maxCtx , st , ruleType , chain = '' ) : if st . Format == 1 : for ruleset in getattr ( st , '%s%sRuleSet' % ( chain , ruleType ) ) : if ruleset is None : continue for rule in getattr ( ruleset , '%s%sRule' % ( chain , ruleType ) ) : if rule is None : continue maxCtx = maxCtxContextualRule ( maxCtx , rule , chain ) elif st . Format == 2 : for ruleset in getattr ( st , '%s%sClassSet' % ( chain , ruleType ) ) : if ruleset is None : continue for rule in getattr ( ruleset , '%s%sClassRule' % ( chain , ruleType ) ) : if rule is None : continue maxCtx = maxCtxContextualRule ( maxCtx , rule , chain ) elif st . Format == 3 : maxCtx = maxCtxContextualRule ( maxCtx , st , chain ) return maxCtx
Calculate usMaxContext based on a contextual feature subtable .
24,088
def maxCtxContextualRule ( maxCtx , st , chain ) : if not chain : return max ( maxCtx , st . GlyphCount ) elif chain == 'Reverse' : return max ( maxCtx , st . GlyphCount + st . LookAheadGlyphCount ) return max ( maxCtx , st . InputGlyphCount + st . LookAheadGlyphCount )
Calculate usMaxContext based on a contextual feature rule .
24,089
def compileOTF ( ufo , preProcessorClass = OTFPreProcessor , outlineCompilerClass = OutlineOTFCompiler , featureCompilerClass = None , featureWriters = None , glyphOrder = None , useProductionNames = None , optimizeCFF = CFFOptimization . SUBROUTINIZE , roundTolerance = None , removeOverlaps = False , overlapsBackend = None , inplace = False , layerName = None , skipExportGlyphs = None , _tables = None , ) : logger . info ( "Pre-processing glyphs" ) if skipExportGlyphs is None : skipExportGlyphs = ufo . lib . get ( "public.skipExportGlyphs" , [ ] ) preProcessor = preProcessorClass ( ufo , inplace = inplace , removeOverlaps = removeOverlaps , overlapsBackend = overlapsBackend , layerName = layerName , skipExportGlyphs = skipExportGlyphs , ) glyphSet = preProcessor . process ( ) logger . info ( "Building OpenType tables" ) optimizeCFF = CFFOptimization ( optimizeCFF ) outlineCompiler = outlineCompilerClass ( ufo , glyphSet = glyphSet , glyphOrder = glyphOrder , roundTolerance = roundTolerance , optimizeCFF = optimizeCFF >= CFFOptimization . SPECIALIZE , tables = _tables , ) otf = outlineCompiler . compile ( ) if layerName is None : compileFeatures ( ufo , otf , glyphSet = glyphSet , featureWriters = featureWriters , featureCompilerClass = featureCompilerClass , ) postProcessor = PostProcessor ( otf , ufo , glyphSet = glyphSet ) otf = postProcessor . process ( useProductionNames , optimizeCFF = optimizeCFF >= CFFOptimization . SUBROUTINIZE , ) return otf
Create FontTools CFF font from a UFO .
24,090
def compileTTF ( ufo , preProcessorClass = TTFPreProcessor , outlineCompilerClass = OutlineTTFCompiler , featureCompilerClass = None , featureWriters = None , glyphOrder = None , useProductionNames = None , convertCubics = True , cubicConversionError = None , reverseDirection = True , rememberCurveType = True , removeOverlaps = False , overlapsBackend = None , inplace = False , layerName = None , skipExportGlyphs = None , ) : logger . info ( "Pre-processing glyphs" ) if skipExportGlyphs is None : skipExportGlyphs = ufo . lib . get ( "public.skipExportGlyphs" , [ ] ) preProcessor = preProcessorClass ( ufo , inplace = inplace , removeOverlaps = removeOverlaps , overlapsBackend = overlapsBackend , convertCubics = convertCubics , conversionError = cubicConversionError , reverseDirection = reverseDirection , rememberCurveType = rememberCurveType , layerName = layerName , skipExportGlyphs = skipExportGlyphs , ) glyphSet = preProcessor . process ( ) logger . info ( "Building OpenType tables" ) outlineCompiler = outlineCompilerClass ( ufo , glyphSet = glyphSet , glyphOrder = glyphOrder ) otf = outlineCompiler . compile ( ) if layerName is None : compileFeatures ( ufo , otf , glyphSet = glyphSet , featureWriters = featureWriters , featureCompilerClass = featureCompilerClass , ) postProcessor = PostProcessor ( otf , ufo , glyphSet = glyphSet ) otf = postProcessor . process ( useProductionNames ) return otf
Create FontTools TrueType font from a UFO .
24,091
def compileInterpolatableTTFs ( ufos , preProcessorClass = TTFInterpolatablePreProcessor , outlineCompilerClass = OutlineTTFCompiler , featureCompilerClass = None , featureWriters = None , glyphOrder = None , useProductionNames = None , cubicConversionError = None , reverseDirection = True , inplace = False , layerNames = None , skipExportGlyphs = None , ) : from ufo2ft . util import _LazyFontName if layerNames is None : layerNames = [ None ] * len ( ufos ) assert len ( ufos ) == len ( layerNames ) if skipExportGlyphs is None : skipExportGlyphs = set ( ) for ufo in ufos : skipExportGlyphs . update ( ufo . lib . get ( "public.skipExportGlyphs" , [ ] ) ) logger . info ( "Pre-processing glyphs" ) preProcessor = preProcessorClass ( ufos , inplace = inplace , conversionError = cubicConversionError , reverseDirection = reverseDirection , layerNames = layerNames , skipExportGlyphs = skipExportGlyphs , ) glyphSets = preProcessor . process ( ) for ufo , glyphSet , layerName in zip ( ufos , glyphSets , layerNames ) : fontName = _LazyFontName ( ufo ) if layerName is not None : logger . info ( "Building OpenType tables for %s-%s" , fontName , layerName ) else : logger . info ( "Building OpenType tables for %s" , fontName ) outlineCompiler = outlineCompilerClass ( ufo , glyphSet = glyphSet , glyphOrder = glyphOrder , tables = SPARSE_TTF_MASTER_TABLES if layerName else None , ) ttf = outlineCompiler . compile ( ) if layerName is None : compileFeatures ( ufo , ttf , glyphSet = glyphSet , featureWriters = featureWriters , featureCompilerClass = featureCompilerClass , ) postProcessor = PostProcessor ( ttf , ufo , glyphSet = glyphSet ) ttf = postProcessor . process ( useProductionNames ) if layerName is not None : ttf [ "post" ] . underlinePosition = - 0x8000 ttf [ "post" ] . underlineThickness = - 0x8000 yield ttf
Create FontTools TrueType fonts from a list of UFOs with interpolatable outlines . Cubic curves are converted compatibly to quadratic curves using the Cu2Qu conversion algorithm .
24,092
def compileInterpolatableTTFsFromDS ( designSpaceDoc , preProcessorClass = TTFInterpolatablePreProcessor , outlineCompilerClass = OutlineTTFCompiler , featureCompilerClass = None , featureWriters = None , glyphOrder = None , useProductionNames = None , cubicConversionError = None , reverseDirection = True , inplace = False , ) : ufos , layerNames = [ ] , [ ] for source in designSpaceDoc . sources : if source . font is None : raise AttributeError ( "designspace source '%s' is missing required 'font' attribute" % getattr ( source , "name" , "<Unknown>" ) ) ufos . append ( source . font ) layerNames . append ( source . layerName ) skipExportGlyphs = designSpaceDoc . lib . get ( "public.skipExportGlyphs" , [ ] ) ttfs = compileInterpolatableTTFs ( ufos , preProcessorClass = preProcessorClass , outlineCompilerClass = outlineCompilerClass , featureCompilerClass = featureCompilerClass , featureWriters = featureWriters , glyphOrder = glyphOrder , useProductionNames = useProductionNames , cubicConversionError = cubicConversionError , reverseDirection = reverseDirection , inplace = inplace , layerNames = layerNames , skipExportGlyphs = skipExportGlyphs , ) if inplace : result = designSpaceDoc else : result = designSpaceDoc . __class__ . fromstring ( designSpaceDoc . tostring ( ) ) for source , ttf in zip ( result . sources , ttfs ) : source . font = ttf return result
Create FontTools TrueType fonts from the DesignSpaceDocument UFO sources with interpolatable outlines . Cubic curves are converted compatibly to quadratic curves using the Cu2Qu conversion algorithm .
24,093
def compileInterpolatableOTFsFromDS ( designSpaceDoc , preProcessorClass = OTFPreProcessor , outlineCompilerClass = OutlineOTFCompiler , featureCompilerClass = None , featureWriters = None , glyphOrder = None , useProductionNames = None , roundTolerance = None , inplace = False , ) : for source in designSpaceDoc . sources : if source . font is None : raise AttributeError ( "designspace source '%s' is missing required 'font' attribute" % getattr ( source , "name" , "<Unknown>" ) ) skipExportGlyphs = designSpaceDoc . lib . get ( "public.skipExportGlyphs" , [ ] ) otfs = [ ] for source in designSpaceDoc . sources : otfs . append ( compileOTF ( ufo = source . font , layerName = source . layerName , preProcessorClass = preProcessorClass , outlineCompilerClass = outlineCompilerClass , featureCompilerClass = featureCompilerClass , featureWriters = featureWriters , glyphOrder = glyphOrder , useProductionNames = useProductionNames , optimizeCFF = CFFOptimization . NONE , roundTolerance = roundTolerance , removeOverlaps = False , overlapsBackend = None , inplace = inplace , skipExportGlyphs = skipExportGlyphs , _tables = SPARSE_OTF_MASTER_TABLES if source . layerName else None , ) ) if inplace : result = designSpaceDoc else : result = designSpaceDoc . __class__ . fromstring ( designSpaceDoc . tostring ( ) ) for source , otf in zip ( result . sources , otfs ) : source . font = otf return result
Create FontTools CFF fonts from the DesignSpaceDocument UFO sources with interpolatable outlines .
24,094
def compileFeatures ( ufo , ttFont = None , glyphSet = None , featureWriters = None , featureCompilerClass = None , ) : if featureCompilerClass is None : if any ( fn . startswith ( MTI_FEATURES_PREFIX ) and fn . endswith ( ".mti" ) for fn in ufo . data . fileNames ) : featureCompilerClass = MtiFeatureCompiler else : featureCompilerClass = FeatureCompiler featureCompiler = featureCompilerClass ( ufo , ttFont , glyphSet = glyphSet , featureWriters = featureWriters ) return featureCompiler . compile ( )
Compile OpenType Layout features from ufo into FontTools OTL tables . If ttFont is None a new TTFont object is created containing the new tables else the provided ttFont is updated with the new tables .
24,095
def _propagate_glyph_anchors ( glyphSet , composite , processed ) : if composite . name in processed : return processed . add ( composite . name ) if not composite . components : return base_components = [ ] mark_components = [ ] anchor_names = set ( ) to_add = { } for component in composite . components : try : glyph = glyphSet [ component . baseGlyph ] except KeyError : logger . warning ( 'Anchors not propagated for inexistent component {} ' 'in glyph {}' . format ( component . baseGlyph , composite . name ) ) else : _propagate_glyph_anchors ( glyphSet , glyph , processed ) if any ( a . name . startswith ( '_' ) for a in glyph . anchors ) : mark_components . append ( component ) else : base_components . append ( component ) anchor_names |= { a . name for a in glyph . anchors } if mark_components and not base_components and _is_ligature_mark ( composite ) : try : component = _component_closest_to_origin ( mark_components , glyphSet ) except Exception as e : raise Exception ( "Error while determining which component of composite " "'{}' is the lowest: {}" . format ( composite . name , str ( e ) ) ) mark_components . remove ( component ) base_components . append ( component ) glyph = glyphSet [ component . baseGlyph ] anchor_names |= { a . name for a in glyph . anchors } for anchor_name in anchor_names : if not any ( a . name . startswith ( anchor_name ) for a in composite . anchors ) : _get_anchor_data ( to_add , glyphSet , base_components , anchor_name ) for component in mark_components : _adjust_anchors ( to_add , glyphSet , component ) for name , ( x , y ) in sorted ( to_add . items ( ) ) : anchor_dict = { 'name' : name , 'x' : x , 'y' : y } try : composite . appendAnchor ( anchor_dict ) except TypeError : composite . appendAnchor ( name , ( x , y ) )
Propagate anchors from base glyphs to a given composite glyph and to all composite glyphs used in between .
24,096
def _get_anchor_data ( anchor_data , glyphSet , components , anchor_name ) : anchors = [ ] for component in components : for anchor in glyphSet [ component . baseGlyph ] . anchors : if anchor . name == anchor_name : anchors . append ( ( anchor , component ) ) break if len ( anchors ) > 1 : for i , ( anchor , component ) in enumerate ( anchors ) : t = Transform ( * component . transformation ) name = '%s_%d' % ( anchor . name , i + 1 ) anchor_data [ name ] = t . transformPoint ( ( anchor . x , anchor . y ) ) elif anchors : anchor , component = anchors [ 0 ] t = Transform ( * component . transformation ) anchor_data [ anchor . name ] = t . transformPoint ( ( anchor . x , anchor . y ) )
Get data for an anchor from a list of components .
24,097
def setContext ( self , font , feaFile , compiler = None ) : todo = set ( self . features ) if self . mode == "skip" : existing = ast . findFeatureTags ( feaFile ) todo . difference_update ( existing ) self . context = SimpleNamespace ( font = font , feaFile = feaFile , compiler = compiler , todo = todo ) return self . context
Populate a temporary self . context namespace which is reset after each new call to _write method . Subclasses can override this to provide contextual information which depends on other data or set any temporary attributes .
24,098
def write ( self , font , feaFile , compiler = None ) : self . setContext ( font , feaFile , compiler = compiler ) try : if self . shouldContinue ( ) : return self . _write ( ) else : return False finally : del self . context
Write features and class definitions for this font to a feaLib FeatureFile object . Returns True if feature file was modified False if no new features were generated .
24,099
def makeUnicodeToGlyphNameMapping ( self ) : compiler = self . context . compiler cmap = None if compiler is not None : table = compiler . ttFont . get ( "cmap" ) if table is not None : cmap = table . getBestCmap ( ) if cmap is None : from ufo2ft . util import makeUnicodeToGlyphNameMapping if compiler is not None : glyphSet = compiler . glyphSet else : glyphSet = self . context . font cmap = makeUnicodeToGlyphNameMapping ( glyphSet ) return cmap
Return the Unicode to glyph name mapping for the current font .