idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
15,800
def draw_on ( self , folium_map ) : f = getattr ( folium_map , self . _map_method_name ) f ( ** self . _folium_kwargs )
Add feature to Folium map object .
15,801
def _set_folium_map ( self ) : m = Map ( features = [ self ] , width = self . _width , height = self . _height ) self . _folium_map = m . draw ( )
A map containing only the feature .
15,802
def geojson ( self , feature_id ) : lat , lon = self . lat_lon return { 'type' : 'Feature' , 'id' : feature_id , 'geometry' : { 'type' : 'Point' , 'coordinates' : ( lon , lat ) , } , }
GeoJSON representation of the marker as a point .
15,803
def _convert_point ( cls , feature ) : lon , lat = feature [ 'geometry' ] [ 'coordinates' ] popup = feature [ 'properties' ] . get ( 'name' , '' ) return cls ( lat , lon )
Convert a GeoJSON point to a Marker .
15,804
def map ( cls , latitudes , longitudes , labels = None , colors = None , areas = None , ** kwargs ) : assert len ( latitudes ) == len ( longitudes ) assert areas is None or hasattr ( cls , '_has_radius' ) , "A " + cls . __name__ + " has no radius" inputs = [ latitudes , longitudes ] if labels is not None : assert len ( labels ) == len ( latitudes ) inputs . append ( labels ) else : inputs . append ( ( "" , ) * len ( latitudes ) ) if colors is not None : assert len ( colors ) == len ( latitudes ) inputs . append ( colors ) if areas is not None : assert len ( areas ) == len ( latitudes ) inputs . append ( np . array ( areas ) ** 0.5 / math . pi ) ms = [ cls ( * args , ** kwargs ) for args in zip ( * inputs ) ] return Map ( ms )
Return markers from columns of coordinates labels & colors .
15,805
def polygons ( self ) : if self . type == 'Polygon' : polygons = [ self . _geojson [ 'geometry' ] [ 'coordinates' ] ] elif self . type == 'MultiPolygon' : polygons = self . _geojson [ 'geometry' ] [ 'coordinates' ] return [ [ [ _lat_lons_from_geojson ( s ) for s in ring ] for ring in polygon ] for polygon in polygons ]
Return a list of polygons describing the region .
15,806
def geojson ( self , feature_id ) : if self . _geojson . get ( 'id' , feature_id ) == feature_id : return self . _geojson else : geo = self . _geojson . copy ( ) geo [ 'id' ] = feature_id return geo
Return GeoJSON with ID substituted .
15,807
def between ( y , z ) : return _combinable ( lambda x : ( y <= x < z ) or _equal_or_float_equal ( x , y ) )
Greater than or equal to y and less than z .
15,808
def between_or_equal_to ( y , z ) : return _combinable ( lambda x : ( y <= x <= z ) or _equal_or_float_equal ( x , y ) or _equal_or_float_equal ( x , z ) )
Greater than or equal to y and less than or equal to z .
15,809
def format_column ( self , label , column ) : if len ( column ) == 0 : val_width = 0 else : val_width = max ( len ( self . format_value ( v ) ) for v in column ) val_width = min ( val_width , self . max_width ) width = max ( val_width , len ( str ( label ) ) , self . min_width , len ( self . etc ) ) def pad ( value , label = False ) : if label : raw = value else : raw = self . format_value ( value ) if len ( raw ) > width : prefix = raw [ : width - len ( self . etc ) ] + self . etc else : prefix = raw return prefix . ljust ( width ) return pad
Return a formatting function that pads & truncates values .
15,810
def format_value ( value ) : if isinstance ( value , ( bool , np . bool_ ) ) : return str ( value ) elif isinstance ( value , ( int , np . integer ) ) : return '{:n}' . format ( value ) elif isinstance ( value , ( float , np . floating ) ) : return '{:g}' . format ( value ) else : return str ( value )
Pretty - print an arbitrary value .
15,811
def converts_values ( self ) : return self . convert_value is not Formatter . convert_value or self . convert_column is not Formatter . convert_column
Whether this Formatter also converts values .
15,812
def convert_value ( self , value ) : if isinstance ( value , str ) : value = value . replace ( self . separator , '' ) if self . decimal_point not in value : return int ( value ) else : return float ( value . replace ( self . decimal_point , '.' ) ) elif self . int_to_float : return float ( value ) else : return value
Convert string 93 000 . 00 to float 93000 . 0 .
15,813
def convert_value ( self , value ) : if isinstance ( value , str ) : assert value . startswith ( self . symbol ) , "Currency does not start with " + self . symbol value = value . lstrip ( self . symbol ) return super ( ) . convert_value ( value )
Convert value to float . If value is a string ensure that the first character is the same as symbol ie . the value is in the currency this formatter is representing .
15,814
def convert_column ( self , values ) : assert all ( values >= 0 ) , 'Cannot normalize a column with negatives' total = sum ( values ) if total > 0 : return values / total else : return values
Normalize values .
15,815
def decode ( bstr ) : bstr = bstr . replace ( b':' , b'' ) if len ( bstr ) != 12 : raise ValueError ( 'not a valid MAC address: {!r}' . format ( bstr ) ) try : return int ( bstr , 16 ) except ValueError : raise ValueError ( 'not a valid MAC address: {!r}' . format ( bstr ) )
Decodes an ASCII encoded binary MAC address tring into a number .
15,816
def init ( lib_name = None , bin_path = None , sdk_path = None ) : if sum ( bool ( x ) for x in [ lib_name , bin_path , sdk_path ] ) > 1 : raise ValueError ( 'expected zero or one arguments' ) if sdk_path : if sys . platform . startswith ( 'win32' ) : bin_path = os . path . join ( sdk_path , 'bin' ) elif sys . platform . startswith ( 'darwin' ) : bin_path = os . path . join ( sdk_path , 'myo.framework' ) else : raise RuntimeError ( 'unsupported platform: {!r}' . format ( sys . platform ) ) if bin_path : lib_name = os . path . join ( bin_path , _getdlname ( ) ) if not lib_name : lib_name = _getdlname ( ) global libmyo libmyo = ffi . dlopen ( lib_name )
Initialize the Myo SDK by loading the libmyo shared library . With no arguments libmyo must be on your PATH or LD_LIBRARY_PATH .
15,817
def reset ( self , value = None ) : if value is None : value = time . clock ( ) self . start = value if self . value_on_reset : self . value = self . value_on_reset
Resets the start time of the interval to now or the specified value .
15,818
def normalized ( self ) : norm = self . magnitude ( ) return Vector ( self . x / norm , self . y / norm , self . z / norm )
Returns a normalized copy of this vector .
15,819
def magnitude ( self ) : return math . sqrt ( self . x ** 2 + self . y ** 2 + self . z ** 2 + self . w ** 2 )
Returns the magnitude of the quaternion .
15,820
def normalized ( self ) : magnitude = self . magnitude ( ) return Quaternion ( self . x / magnitude , self . y / magnitude , self . z / magnitude , self . w / magnitude )
Returns the unit quaternion corresponding to the same rotation as this one .
15,821
def roll ( self ) : x , y , z , w = self . x , self . y , self . z , self . w return math . atan2 ( 2 * y * w - 2 * x * z , 1 - 2 * y * y - 2 * z * z )
Calculates the Roll of the Quaternion .
15,822
def pitch ( self ) : x , y , z , w = self . x , self . y , self . z , self . w return math . atan2 ( 2 * x * w - 2 * y * z , 1 - 2 * x * x - 2 * z * z )
Calculates the Pitch of the Quaternion .
15,823
def yaw ( self ) : x , y , z , w = self . x , self . y , self . z , self . w return math . asin ( 2 * x * y + 2 * z * w )
Calculates the Yaw of the Quaternion .
15,824
def rpy ( self ) : x , y , z , w = self . x , self . y , self . z , self . w roll = math . atan2 ( 2 * y * w - 2 * x * z , 1 - 2 * y * y - 2 * z * z ) pitch = math . atan2 ( 2 * x * w - 2 * y * z , 1 - 2 * x * x - 2 * z * z ) yaw = math . asin ( 2 * x * y + 2 * z * w ) return ( roll , pitch , yaw )
Calculates the Roll Pitch and Yaw of the Quaternion .
15,825
def get_iso_packet_buffer_list ( transfer_p ) : transfer = transfer_p . contents offset = 0 result = [ ] append = result . append for iso_transfer in _get_iso_packet_list ( transfer ) : length = iso_transfer . length append ( _get_iso_packet_buffer ( transfer , offset , length ) ) offset += length return result
Python - specific helper extracting a list of iso packet buffers .
15,826
def get_extra ( descriptor ) : result = [ ] extra_length = descriptor . extra_length if extra_length : extra = buffer_at ( descriptor . extra . value , extra_length ) append = result . append while extra : length = _string_item_to_int ( extra [ 0 ] ) if not 0 < length <= len ( extra ) : raise ValueError ( 'Extra descriptor %i is incomplete/invalid' % ( len ( result ) , ) , ) append ( extra [ : length ] ) extra = extra [ length : ] return result
Python - specific helper to access extra field of descriptors because it s not as straight - forward as in C . Returns a list where each entry is an individual extra descriptor .
15,827
def create_binary_buffer ( init_or_size ) : if isinstance ( init_or_size , ( int , long ) ) : init_or_size = bytearray ( init_or_size ) return create_initialised_buffer ( init_or_size )
ctypes . create_string_buffer variant which does not add a trailing null when init_or_size is not a size .
15,828
def close ( self ) : if self . __submitted : raise ValueError ( 'Cannot close a submitted transfer' ) self . doom ( ) self . __initialized = False self . __callback = None self . __user_data = None self . __ctypesCallbackWrapper = None if self . __transfer is not None : self . __libusb_free_transfer ( self . __transfer ) self . __transfer = None self . __transfer_buffer = None self . __before_submit = None self . __after_completion = None
Break reference cycles to allow instance to be garbage - collected . Raises if called on a submitted transfer .
15,829
def setControl ( self , request_type , request , value , index , buffer_or_len , callback = None , user_data = None , timeout = 0 ) : if self . __submitted : raise ValueError ( 'Cannot alter a submitted transfer' ) if self . __doomed : raise DoomedTransferError ( 'Cannot reuse a doomed transfer' ) if isinstance ( buffer_or_len , ( int , long ) ) : length = buffer_or_len string_buffer , transfer_py_buffer = create_binary_buffer ( length + CONTROL_SETUP_SIZE , ) else : length = len ( buffer_or_len ) string_buffer , transfer_py_buffer = create_binary_buffer ( CONTROL_SETUP + buffer_or_len , ) self . __initialized = False self . __transfer_buffer = string_buffer self . __transfer_py_buffer = integer_memoryview ( transfer_py_buffer , ) [ CONTROL_SETUP_SIZE : ] self . __user_data = user_data libusb1 . libusb_fill_control_setup ( string_buffer , request_type , request , value , index , length ) libusb1 . libusb_fill_control_transfer ( self . __transfer , self . __handle , string_buffer , self . __ctypesCallbackWrapper , None , timeout ) self . __callback = callback self . __initialized = True
Setup transfer for control use .
15,830
def setInterrupt ( self , endpoint , buffer_or_len , callback = None , user_data = None , timeout = 0 ) : if self . __submitted : raise ValueError ( 'Cannot alter a submitted transfer' ) if self . __doomed : raise DoomedTransferError ( 'Cannot reuse a doomed transfer' ) string_buffer , self . __transfer_py_buffer = create_binary_buffer ( buffer_or_len ) self . __initialized = False self . __transfer_buffer = string_buffer self . __user_data = user_data libusb1 . libusb_fill_interrupt_transfer ( self . __transfer , self . __handle , endpoint , string_buffer , sizeof ( string_buffer ) , self . __ctypesCallbackWrapper , None , timeout ) self . __callback = callback self . __initialized = True
Setup transfer for interrupt use .
15,831
def setIsochronous ( self , endpoint , buffer_or_len , callback = None , user_data = None , timeout = 0 , iso_transfer_length_list = None ) : if self . __submitted : raise ValueError ( 'Cannot alter a submitted transfer' ) num_iso_packets = self . __num_iso_packets if num_iso_packets == 0 : raise TypeError ( 'This transfer canot be used for isochronous I/O. ' 'You must get another one with a non-zero iso_packets ' 'parameter.' ) if self . __doomed : raise DoomedTransferError ( 'Cannot reuse a doomed transfer' ) string_buffer , transfer_py_buffer = create_binary_buffer ( buffer_or_len ) buffer_length = sizeof ( string_buffer ) if iso_transfer_length_list is None : iso_length , remainder = divmod ( buffer_length , num_iso_packets ) if remainder : raise ValueError ( 'Buffer size %i cannot be evenly distributed among %i ' 'transfers' % ( buffer_length , num_iso_packets , ) ) iso_transfer_length_list = [ iso_length ] * num_iso_packets configured_iso_packets = len ( iso_transfer_length_list ) if configured_iso_packets > num_iso_packets : raise ValueError ( 'Too many ISO transfer lengths (%i), there are ' 'only %i ISO transfers available' % ( configured_iso_packets , num_iso_packets , ) ) if sum ( iso_transfer_length_list ) > buffer_length : raise ValueError ( 'ISO transfers too long (%i), there are only ' '%i bytes available' % ( sum ( iso_transfer_length_list ) , buffer_length , ) ) transfer_p = self . __transfer self . __initialized = False self . __transfer_buffer = string_buffer self . __transfer_py_buffer = transfer_py_buffer self . __user_data = user_data libusb1 . libusb_fill_iso_transfer ( transfer_p , self . __handle , endpoint , string_buffer , buffer_length , configured_iso_packets , self . __ctypesCallbackWrapper , None , timeout ) for length , iso_packet_desc in zip ( iso_transfer_length_list , libusb1 . get_iso_packet_list ( transfer_p ) ) : if length <= 0 : raise ValueError ( 'Negative/null length transfers are not possible.' ) iso_packet_desc . length = length self . __callback = callback self . __initialized = True
Setup transfer for isochronous use .
15,832
def getISOBufferList ( self ) : transfer_p = self . __transfer transfer = transfer_p . contents if transfer . type != TRANSFER_TYPE_ISOCHRONOUS : raise TypeError ( 'This method cannot be called on non-iso transfers.' ) return libusb1 . get_iso_packet_buffer_list ( transfer_p )
Get individual ISO transfer s buffer . Returns a list with one item per ISO transfer with their individually - configured sizes . Returned list is consistent with getISOSetupList return value . Should not be called on a submitted transfer .
15,833
def submit ( self ) : if self . __submitted : raise ValueError ( 'Cannot submit a submitted transfer' ) if not self . __initialized : raise ValueError ( 'Cannot submit a transfer until it has been initialized' ) if self . __doomed : raise DoomedTransferError ( 'Cannot submit doomed transfer' ) self . __before_submit ( self ) self . __submitted = True result = libusb1 . libusb_submit_transfer ( self . __transfer ) if result : self . __after_completion ( self ) self . __submitted = False raiseUSBError ( result )
Submit transfer for asynchronous handling .
15,834
def register ( self , fd , events ) : if fd in self . __fd_set : raise ValueError ( 'This fd is a special USB event fd, it cannot be polled.' ) self . __poller . register ( fd , events )
Register an USB - unrelated fd to poller . Convenience method .
15,835
def unregister ( self , fd ) : if fd in self . __fd_set : raise ValueError ( 'This fd is a special USB event fd, it must stay registered.' ) self . __poller . unregister ( fd )
Unregister an USB - unrelated fd from poller . Convenience method .
15,836
def close ( self ) : handle = self . __handle if handle is None : return weak_transfer_set = self . __transfer_set transfer_set = self . __set ( ) while True : try : transfer = weak_transfer_set . pop ( ) except self . __KeyError : break transfer_set . add ( transfer ) transfer . doom ( ) inflight = self . __inflight for transfer in inflight : try : transfer . cancel ( ) except ( self . __USBErrorNotFound , self . __USBErrorNoDevice ) : pass while inflight : try : self . __context . handleEvents ( ) except self . __USBErrorInterrupted : pass for transfer in transfer_set : transfer . close ( ) self . __libusb_close ( handle ) self . __handle = None
Close this handle . If not called explicitely will be called by destructor .
15,837
def getConfiguration ( self ) : configuration = c_int ( ) mayRaiseUSBError ( libusb1 . libusb_get_configuration ( self . __handle , byref ( configuration ) , ) ) return configuration . value
Get the current configuration number for this device .
15,838
def kernelDriverActive ( self , interface ) : result = libusb1 . libusb_kernel_driver_active ( self . __handle , interface ) if result == 0 : return False elif result == 1 : return True raiseUSBError ( result )
Tell whether a kernel driver is active on given interface number .
15,839
def getStringDescriptor ( self , descriptor , lang_id , errors = 'strict' ) : if descriptor == 0 : return None descriptor_string = bytearray ( STRING_LENGTH ) try : received = mayRaiseUSBError ( libusb1 . libusb_get_string_descriptor ( self . __handle , descriptor , lang_id , create_binary_buffer ( descriptor_string ) [ 0 ] , STRING_LENGTH , ) ) except USBErrorNotFound : return None if received < 2 or descriptor_string [ 1 ] != DT_STRING : raise ValueError ( 'Invalid string descriptor' ) return descriptor_string [ 2 : min ( received , descriptor_string [ 0 ] , ) ] . decode ( 'UTF-16-LE' , errors = errors )
Fetch description string for given descriptor and in given language . Use getSupportedLanguageList to know which languages are available . Return value is a unicode string . Return None if there is no such descriptor on device .
15,840
def getASCIIStringDescriptor ( self , descriptor , errors = 'strict' ) : if descriptor == 0 : return None descriptor_string = bytearray ( STRING_LENGTH ) try : received = mayRaiseUSBError ( libusb1 . libusb_get_string_descriptor_ascii ( self . __handle , descriptor , create_binary_buffer ( descriptor_string ) [ 0 ] , STRING_LENGTH , ) ) except USBErrorNotFound : return None return descriptor_string [ : received ] . decode ( 'ASCII' , errors = errors )
Fetch description string for given descriptor in first available language . Return value is a unicode string . Return None if there is no such descriptor on device .
15,841
def getPortNumberList ( self ) : port_list = ( c_uint8 * PATH_MAX_DEPTH ) ( ) result = libusb1 . libusb_get_port_numbers ( self . device_p , port_list , len ( port_list ) ) mayRaiseUSBError ( result ) return list ( port_list [ : result ] )
Get the port number of each hub toward device .
15,842
def getMaxPacketSize ( self , endpoint ) : result = libusb1 . libusb_get_max_packet_size ( self . device_p , endpoint ) mayRaiseUSBError ( result ) return result
Get device s max packet size for given endpoint .
15,843
def getMaxISOPacketSize ( self , endpoint ) : result = libusb1 . libusb_get_max_iso_packet_size ( self . device_p , endpoint ) mayRaiseUSBError ( result ) return result
Get the maximum size for a single isochronous packet for given endpoint .
15,844
def open ( self ) : handle = libusb1 . libusb_device_handle_p ( ) mayRaiseUSBError ( libusb1 . libusb_open ( self . device_p , byref ( handle ) ) ) result = USBDeviceHandle ( self . __context , handle , self ) self . __close_set . add ( result ) return result
Open device . Returns an USBDeviceHandle instance .
15,845
def getDeviceIterator ( self , skip_on_error = False ) : device_p_p = libusb1 . libusb_device_p_p ( ) libusb_device_p = libusb1 . libusb_device_p device_list_len = libusb1 . libusb_get_device_list ( self . __context_p , byref ( device_p_p ) ) mayRaiseUSBError ( device_list_len ) try : for device_p in device_p_p [ : device_list_len ] : try : device = USBDevice ( self , libusb_device_p ( device_p . contents ) ) except USBError : if not skip_on_error : raise else : self . __close_set . add ( device ) yield device finally : libusb1 . libusb_free_device_list ( device_p_p , 1 )
Return an iterator over all USB devices currently plugged in as USBDevice instances .
15,846
def getDeviceList ( self , skip_on_access_error = False , skip_on_error = False ) : return list ( self . getDeviceIterator ( skip_on_error = skip_on_access_error or skip_on_error , ) , )
Return a list of all USB devices currently plugged in as USBDevice instances .
15,847
def getPollFDList ( self ) : pollfd_p_p = libusb1 . libusb_get_pollfds ( self . __context_p ) if not pollfd_p_p : errno = get_errno ( ) if errno : raise OSError ( errno ) else : raise NotImplementedError ( 'Your libusb does not seem to implement pollable FDs' ) try : result = [ ] append = result . append fd_index = 0 while pollfd_p_p [ fd_index ] : append ( ( pollfd_p_p [ fd_index ] . contents . fd , pollfd_p_p [ fd_index ] . contents . events , ) ) fd_index += 1 finally : _free ( pollfd_p_p ) return result
Return file descriptors to be used to poll USB events . You should not have to call this method unless you are integrating this class with a polling mechanism .
15,848
def handleEventsTimeout ( self , tv = 0 ) : if tv is None : tv = 0 tv_s = int ( tv ) real_tv = libusb1 . timeval ( tv_s , int ( ( tv - tv_s ) * 1000000 ) ) mayRaiseUSBError ( libusb1 . libusb_handle_events_timeout ( self . __context_p , byref ( real_tv ) , ) , )
Handle any pending event . If tv is 0 will return immediately after handling already - pending events . Otherwise defines the maximum amount of time to wait for events in seconds .
15,849
def getNextTimeout ( self ) : timeval = libusb1 . timeval ( ) result = libusb1 . libusb_get_next_timeout ( self . __context_p , byref ( timeval ) ) if result == 0 : return None elif result == 1 : return timeval . tv_sec + ( timeval . tv_usec * 0.000001 ) raiseUSBError ( result )
Returns the next internal timeout that libusb needs to handle in seconds or None if no timeout is needed . You should not have to call this method unless you are integrating this class with a polling mechanism .
15,850
def waitForEvent ( self , tv = 0 ) : if tv is None : tv = 0 tv_s = int ( tv ) real_tv = libusb1 . timeval ( tv_s , int ( ( tv - tv_s ) * 1000000 ) ) libusb1 . libusb_wait_for_event ( self . __context_p , byref ( real_tv ) )
See libusb_wait_for_event doc .
15,851
def read ( address , length ) : arr = create_string_buffer ( length ) return i2c_msg ( addr = address , flags = I2C_M_RD , len = length , buf = arr )
Prepares an i2c read transaction .
15,852
def write ( address , buf ) : if sys . version_info . major >= 3 : if type ( buf ) is str : buf = bytes ( map ( ord , buf ) ) else : buf = bytes ( buf ) else : if type ( buf ) is not str : buf = '' . join ( [ chr ( x ) for x in buf ] ) arr = create_string_buffer ( buf , len ( buf ) ) return i2c_msg ( addr = address , flags = 0 , len = len ( arr ) , buf = arr )
Prepares an i2c write transaction .
15,853
def open ( self , bus ) : self . fd = os . open ( "/dev/i2c-{}" . format ( bus ) , os . O_RDWR ) self . funcs = self . _get_funcs ( )
Open a given i2c bus .
15,854
def close ( self ) : if self . fd : os . close ( self . fd ) self . fd = None
Close the i2c connection .
15,855
def _set_address ( self , address , force = None ) : force = force if force is not None else self . force if self . address != address or self . _force_last != force : if force is True : ioctl ( self . fd , I2C_SLAVE_FORCE , address ) else : ioctl ( self . fd , I2C_SLAVE , address ) self . address = address self . _force_last = force
Set i2c slave address to use for subsequent calls .
15,856
def _get_funcs ( self ) : f = c_uint32 ( ) ioctl ( self . fd , I2C_FUNCS , f ) return f . value
Returns a 32 - bit value stating supported I2C functions .
15,857
def read_byte ( self , i2c_addr , force = None ) : self . _set_address ( i2c_addr , force = force ) msg = i2c_smbus_ioctl_data . create ( read_write = I2C_SMBUS_READ , command = 0 , size = I2C_SMBUS_BYTE ) ioctl ( self . fd , I2C_SMBUS , msg ) return msg . data . contents . byte
Read a single byte from a device .
15,858
def write_byte ( self , i2c_addr , value , force = None ) : self . _set_address ( i2c_addr , force = force ) msg = i2c_smbus_ioctl_data . create ( read_write = I2C_SMBUS_WRITE , command = value , size = I2C_SMBUS_BYTE ) ioctl ( self . fd , I2C_SMBUS , msg )
Write a single byte to a device .
15,859
def read_byte_data ( self , i2c_addr , register , force = None ) : self . _set_address ( i2c_addr , force = force ) msg = i2c_smbus_ioctl_data . create ( read_write = I2C_SMBUS_READ , command = register , size = I2C_SMBUS_BYTE_DATA ) ioctl ( self . fd , I2C_SMBUS , msg ) return msg . data . contents . byte
Read a single byte from a designated register .
15,860
def write_byte_data ( self , i2c_addr , register , value , force = None ) : self . _set_address ( i2c_addr , force = force ) msg = i2c_smbus_ioctl_data . create ( read_write = I2C_SMBUS_WRITE , command = register , size = I2C_SMBUS_BYTE_DATA ) msg . data . contents . byte = value ioctl ( self . fd , I2C_SMBUS , msg )
Write a byte to a given register .
15,861
def process_call ( self , i2c_addr , register , value , force = None ) : self . _set_address ( i2c_addr , force = force ) msg = i2c_smbus_ioctl_data . create ( read_write = I2C_SMBUS_WRITE , command = register , size = I2C_SMBUS_PROC_CALL ) msg . data . contents . word = value ioctl ( self . fd , I2C_SMBUS , msg ) return msg . data . contents . word
Executes a SMBus Process Call sending a 16 - bit value and receiving a 16 - bit response
15,862
def read_block_data ( self , i2c_addr , register , force = None ) : self . _set_address ( i2c_addr , force = force ) msg = i2c_smbus_ioctl_data . create ( read_write = I2C_SMBUS_READ , command = register , size = I2C_SMBUS_BLOCK_DATA ) ioctl ( self . fd , I2C_SMBUS , msg ) length = msg . data . contents . block [ 0 ] return msg . data . contents . block [ 1 : length + 1 ]
Read a block of up to 32 - bytes from a given register .
15,863
def write_block_data ( self , i2c_addr , register , data , force = None ) : length = len ( data ) if length > I2C_SMBUS_BLOCK_MAX : raise ValueError ( "Data length cannot exceed %d bytes" % I2C_SMBUS_BLOCK_MAX ) self . _set_address ( i2c_addr , force = force ) msg = i2c_smbus_ioctl_data . create ( read_write = I2C_SMBUS_WRITE , command = register , size = I2C_SMBUS_BLOCK_DATA ) msg . data . contents . block [ 0 ] = length msg . data . contents . block [ 1 : length + 1 ] = data ioctl ( self . fd , I2C_SMBUS , msg )
Write a block of byte data to a given register .
15,864
def read_i2c_block_data ( self , i2c_addr , register , length , force = None ) : if length > I2C_SMBUS_BLOCK_MAX : raise ValueError ( "Desired block length over %d bytes" % I2C_SMBUS_BLOCK_MAX ) self . _set_address ( i2c_addr , force = force ) msg = i2c_smbus_ioctl_data . create ( read_write = I2C_SMBUS_READ , command = register , size = I2C_SMBUS_I2C_BLOCK_DATA ) msg . data . contents . byte = length ioctl ( self . fd , I2C_SMBUS , msg ) return msg . data . contents . block [ 1 : length + 1 ]
Read a block of byte data from a given register .
15,865
def max_dimension ( cellmap , sheet = None ) : cells = list ( cellmap . values ( ) ) rows = 0 cols = 0 for cell in cells : if sheet is None or cell . sheet == sheet : rows = max ( rows , int ( cell . row ) ) cols = max ( cols , int ( col2num ( cell . col ) ) ) return ( rows , cols )
This function calculates the maximum dimension of the workbook or optionally the worksheet . It returns a tupple of two integers the first being the rows and the second being the columns .
15,866
def _cast_number ( value ) : "Convert numbers as string to an int or float" m = FLOAT_REGEX . search ( value ) if m is not None : return float ( value ) return int ( value )
Convert numbers as string to an int or float
15,867
def read_rels ( archive ) : xml_source = archive . read ( ARC_WORKBOOK_RELS ) tree = fromstring ( xml_source ) for element in safe_iterator ( tree , '{%s}Relationship' % PKG_REL_NS ) : rId = element . get ( 'Id' ) pth = element . get ( "Target" ) typ = element . get ( 'Type' ) if pth . startswith ( "/xl" ) : pth = pth . replace ( "/xl" , "xl" ) elif not pth . startswith ( "xl" ) and not pth . startswith ( ".." ) : pth = "xl/" + pth yield rId , { 'path' : pth , 'type' : typ }
Read relationships for a workbook
15,868
def read_content_types ( archive ) : xml_source = archive . read ( ARC_CONTENT_TYPES ) root = fromstring ( xml_source ) contents_root = root . findall ( '{%s}Override' % CONTYPES_NS ) for type in contents_root : yield type . get ( 'ContentType' ) , type . get ( 'PartName' )
Read content types .
15,869
def read_sheets ( archive ) : xml_source = archive . read ( ARC_WORKBOOK ) tree = fromstring ( xml_source ) for element in safe_iterator ( tree , '{%s}sheet' % SHEET_MAIN_NS ) : attrib = element . attrib attrib [ 'id' ] = attrib [ "{%s}id" % REL_NS ] del attrib [ "{%s}id" % REL_NS ] if attrib [ 'id' ] : yield attrib
Read worksheet titles and ids for a workbook
15,870
def detect_worksheets ( archive ) : content_types = read_content_types ( archive ) valid_sheets = dict ( ( path , ct ) for ct , path in content_types if ct == WORKSHEET_TYPE ) rels = dict ( read_rels ( archive ) ) for sheet in read_sheets ( archive ) : rel = rels [ sheet [ 'id' ] ] rel [ 'title' ] = sheet [ 'name' ] rel [ 'sheet_id' ] = sheet [ 'sheetId' ] rel [ 'state' ] = sheet . get ( 'state' , 'visible' ) if ( "/" + rel [ 'path' ] in valid_sheets or "worksheets" in rel [ 'path' ] ) : yield rel
Return a list of worksheets
15,871
def read_string_table ( xml_source ) : strings = [ ] src = _get_xml_iter ( xml_source ) for _ , node in iterparse ( src ) : if node . tag == '{%s}si' % SHEET_MAIN_NS : text = Text . from_tree ( node ) . content text = text . replace ( 'x005F_' , '' ) strings . append ( text ) node . clear ( ) return IndexedList ( strings )
Read in all shared strings in the table
15,872
def create_node ( t , ref = None , debug = False ) : if t . ttype == "operand" : if t . tsubtype in [ "range" , "named_range" , "pointer" ] : return RangeNode ( t , ref , debug = debug ) else : return OperandNode ( t ) elif t . ttype == "function" : return FunctionNode ( t , ref , debug = debug ) elif t . ttype . startswith ( "operator" ) : return OperatorNode ( t , ref , debug = debug ) else : return ASTNode ( t , debug = debug )
Simple factory function
15,873
def build_ast ( expression , debug = False ) : G = DiGraph ( ) stack = [ ] for n in expression : if isinstance ( n , OperatorNode ) : if n . ttype == "operator-infix" : arg2 = stack . pop ( ) arg1 = stack . pop ( ) if ( n . tvalue == ':' ) : if '!' in arg1 . tvalue and arg2 . ttype == 'operand' and '!' not in arg2 . tvalue : arg2 . tvalue = arg1 . tvalue . split ( '!' ) [ 0 ] + '!' + arg2 . tvalue G . add_node ( arg1 , pos = 1 ) G . add_node ( arg2 , pos = 2 ) G . add_edge ( arg1 , n ) G . add_edge ( arg2 , n ) else : arg1 = stack . pop ( ) G . add_node ( arg1 , pos = 1 ) G . add_edge ( arg1 , n ) elif isinstance ( n , FunctionNode ) : args = [ ] for _ in range ( n . num_args ) : try : args . append ( stack . pop ( ) ) except : raise Exception ( ) args . reverse ( ) for i , a in enumerate ( args ) : G . add_node ( a , pos = i ) G . add_edge ( a , n ) else : G . add_node ( n , pos = 0 ) stack . append ( n ) return G , stack . pop ( )
build an AST from an Excel formula expression in reverse polish notation
15,874
def cell2code ( cell , named_ranges ) : if cell . formula : debug = False ref = parse_cell_address ( cell . address ( ) ) if not cell . is_named_range else None sheet = cell . sheet e = shunting_yard ( cell . formula , named_ranges , ref = ref , tokenize_range = False ) ast , root = build_ast ( e , debug = debug ) code = root . emit ( ast , context = sheet ) else : ast = None if isinstance ( cell . value , unicode ) : code = u'u"' + cell . value . replace ( u'"' , u'\\"' ) + u'"' elif isinstance ( cell . value , str ) : raise RuntimeError ( "Got unexpected non-unicode str" ) else : code = str ( cell . value ) return code , ast
Generate python code for the given cell
15,875
def examine ( self ) : if self . _changes : return self . _changes . pop ( ) action_file = None if self . _changed : self . _changed = False action_file = self . _action_file or True return action_file , None
Called by LiveReloadHandler s poll_tasks method .
15,876
def watch ( self , path , action , * args , ** kwargs ) : if action is None : action = _set_changed event_handler = _WatchdogHandler ( self , action ) self . _observer . schedule ( event_handler , path = path , recursive = True )
Called by the Server instance when a new watch task is requested .
15,877
def authors ( ) : fmt_re = re . compile ( r'([^<]+) <([^>]+)>' ) authors = local ( 'git shortlog -s -e -n | cut -f 2-' , capture = True ) with open ( 'AUTHORS' , 'w' ) as fh : fh . write ( 'Project contributors\n' ) fh . write ( '====================\n\n' ) for line in authors . splitlines ( ) : match = fmt_re . match ( line ) name , email = match . groups ( ) if email in env . ignored_authors : continue fh . write ( ' * ' ) fh . write ( line ) fh . write ( '\n' )
Updates the AUTHORS file with a list of committers from GIT .
15,878
def release ( ) : if not is_working_tree_clean ( ) : print ( 'Your working tree is not clean. Refusing to create a release.' ) return print ( 'Rebuilding the AUTHORS file to check for modifications...' ) authors ( ) if not is_working_tree_clean ( ) : print ( 'Your working tree is not clean after the AUTHORS file was ' 'rebuilt.' ) print ( 'Please commit the changes before continuing.' ) return if not is_manifest_up_to_date ( ) : print ( 'Manifest is not up to date.' ) print ( 'Please update MANIFEST.in or remove spurious files.' ) return version = 'v{}' . format ( local ( 'python setup.py --version' , capture = True ) ) name = local ( 'python setup.py --name' , capture = True ) tag_message = '{} release version {}.' . format ( name , version ) print ( '----------------------' ) print ( 'Proceeding will tag the release, push the repository upstream,' ) print ( 'and release a new version on PyPI.' ) print ( ) print ( 'Version: {}' . format ( version ) ) print ( 'Tag message: {}' . format ( tag_message ) ) print ( ) if not confirm ( 'Continue?' , default = True ) : print ( 'Aborting.' ) return local ( 'git tag -a {} -m {}' . format ( pipes . quote ( version ) , pipes . quote ( tag_message ) ) ) local ( 'git push --tags origin develop' ) local ( 'python setup.py sdist bdist_wheel upload' )
Create a new release and upload it to PyPI .
15,879
def insert ( self , iterable , index = 0 , data = None , weight = 1.0 ) : if index == len ( iterable ) : self . is_terminal = True self . key = iterable self . weight = weight if data : self . data . add ( data ) else : if iterable [ index ] not in self . children : self . children [ iterable [ index ] ] = TrieNode ( ) self . children [ iterable [ index ] ] . insert ( iterable , index + 1 , data )
Insert new node into tree
15,880
def remove ( self , iterable , data = None , index = 0 ) : if index == len ( iterable ) : if self . is_terminal : if data : self . data . remove ( data ) if len ( self . data ) == 0 : self . is_terminal = False else : self . data . clear ( ) self . is_terminal = False return True else : return False elif iterable [ index ] in self . children : return self . children [ iterable [ index ] ] . remove ( iterable , index = index + 1 , data = data ) else : return False
Remove an element from the trie
15,881
def gather ( self , iterable ) : for result in self . lookup ( iterable , gather = True ) : yield result
Calls the lookup with gather True Passing iterable and yields the result .
15,882
def lookup ( self , iterable , gather = False ) : for result in self . root . lookup ( iterable , gather = gather , edit_distance = 0 , max_edit_distance = self . max_edit_distance , match_threshold = self . match_threshold ) : yield result
Call the lookup on the root node with the given parameters .
15,883
def insert ( self , iterable , data = None , weight = 1.0 ) : self . root . insert ( iterable , index = 0 , data = data , weight = 1.0 )
Used to insert into he root node
15,884
def remove ( self , iterable , data = None ) : return self . root . remove ( iterable , data = data )
Used to remove from the root node
15,885
def bronk ( r , p , x , graph ) : if len ( p ) == 0 and len ( x ) == 0 : yield r return for vertex in p [ : ] : r_new = r [ : : ] r_new . append ( vertex ) p_new = [ val for val in p if val in graph . get_neighbors_of ( vertex ) ] x_new = [ val for val in x if val in graph . get_neighbors_of ( vertex ) ] for result in bronk ( r_new , p_new , x_new , graph ) : yield result p . remove ( vertex ) x . append ( vertex )
This is used to fine cliques and remove them from graph
15,886
def graph_key_from_tag ( tag , entity_index ) : start_token = tag . get ( 'start_token' ) entity = tag . get ( 'entities' , [ ] ) [ entity_index ] return str ( start_token ) + '-' + entity . get ( 'key' ) + '-' + str ( entity . get ( 'confidence' ) )
Returns a key from a tag entity
15,887
def add_edge ( self , a , b ) : neighbors_of_a = self . adjacency_lists . get ( a ) if not neighbors_of_a : neighbors_of_a = set ( ) self . adjacency_lists [ a ] = neighbors_of_a neighbors_of_a . add ( b ) neighbors_of_b = self . adjacency_lists . get ( b ) if not neighbors_of_b : neighbors_of_b = set ( ) self . adjacency_lists [ b ] = neighbors_of_b neighbors_of_b . add ( a )
Used to add edges to the graph . a and b are vertexes and if a or b doesn t exisit then the vertex is created
15,888
def append ( self , data ) : if isinstance ( data , list ) and len ( data ) > 0 : self . nodes . append ( data ) else : self . nodes . append ( [ data ] )
Appends items or lists to the Lattice
15,889
def traverse ( self , index = 0 ) : if index < len ( self . nodes ) : for entity in self . nodes [ index ] : for next_result in self . traverse ( index = index + 1 ) : if isinstance ( entity , list ) : yield entity + next_result else : yield [ entity ] + next_result else : yield [ ]
This is used to produce a list of lists where each each item in that list is a diffrent combination of items from the lists within with every combination of such values .
15,890
def _build_graph ( self , tags ) : graph = SimpleGraph ( ) for tag_index in xrange ( len ( tags ) ) : for entity_index in xrange ( len ( tags [ tag_index ] . get ( 'entities' ) ) ) : a_entity_name = graph_key_from_tag ( tags [ tag_index ] , entity_index ) tokens = self . tokenizer . tokenize ( tags [ tag_index ] . get ( 'entities' , [ ] ) [ entity_index ] . get ( 'match' ) ) for tag in tags [ tag_index + 1 : ] : start_token = tag . get ( 'start_token' ) if start_token >= tags [ tag_index ] . get ( 'start_token' ) + len ( tokens ) : for b_entity_index in xrange ( len ( tag . get ( 'entities' ) ) ) : b_entity_name = graph_key_from_tag ( tag , b_entity_index ) graph . add_edge ( a_entity_name , b_entity_name ) return graph
Builds a graph from the entities included in the tags . Note this is used internally .
15,891
def _sub_expand ( self , tags ) : entities = { } graph = self . _build_graph ( tags ) for tag in tags : for entity_index in xrange ( len ( tag . get ( 'entities' ) ) ) : node_name = graph_key_from_tag ( tag , entity_index ) if not node_name in entities : entities [ node_name ] = [ ] entities [ node_name ] += [ tag . get ( 'entities' , [ ] ) [ entity_index ] , tag . get ( 'entities' , [ ] ) [ entity_index ] . get ( 'confidence' ) , tag ] for clique in get_cliques ( list ( entities ) , graph ) : result = [ ] for entity_name in clique : start_token = int ( entity_name . split ( "-" ) [ 0 ] ) old_tag = entities [ entity_name ] [ 2 ] tag = { 'start_token' : start_token , 'entities' : [ entities . get ( entity_name ) [ 0 ] ] , 'confidence' : entities . get ( entity_name ) [ 1 ] * old_tag . get ( 'confidence' , 1.0 ) , 'end_token' : old_tag . get ( 'end_token' ) , 'match' : old_tag . get ( 'entities' ) [ 0 ] . get ( 'match' ) , 'key' : old_tag . get ( 'entities' ) [ 0 ] . get ( 'key' ) , 'from_context' : old_tag . get ( 'from_context' , False ) } result . append ( tag ) result = sorted ( result , key = lambda e : e . get ( 'start_token' ) ) yield result
This called by expand to find cliques
15,892
def expand ( self , tags , clique_scoring_func = None ) : lattice = Lattice ( ) overlapping_spans = [ ] def end_token_index ( ) : return max ( [ t . get ( 'end_token' ) for t in overlapping_spans ] ) for i in xrange ( len ( tags ) ) : tag = tags [ i ] if len ( overlapping_spans ) > 0 and end_token_index ( ) >= tag . get ( 'start_token' ) : overlapping_spans . append ( tag ) elif len ( overlapping_spans ) > 1 : cliques = list ( self . _sub_expand ( overlapping_spans ) ) if clique_scoring_func : cliques = sorted ( cliques , key = lambda e : - 1 * clique_scoring_func ( e ) ) lattice . append ( cliques ) overlapping_spans = [ tag ] else : lattice . append ( overlapping_spans ) overlapping_spans = [ tag ] if len ( overlapping_spans ) > 1 : cliques = list ( self . _sub_expand ( overlapping_spans ) ) if clique_scoring_func : cliques = sorted ( cliques , key = lambda e : - 1 * clique_scoring_func ( e ) ) lattice . append ( cliques ) else : lattice . append ( overlapping_spans ) return lattice . traverse ( )
This is the main function to expand tags into cliques
15,893
def _iterate_subsequences ( self , tokens ) : for start_idx in xrange ( len ( tokens ) ) : for end_idx in xrange ( start_idx + 1 , len ( tokens ) + 1 ) : yield ' ' . join ( tokens [ start_idx : end_idx ] ) , start_idx
Using regex invokes this function which significantly impacts performance of adapt . it is an N! operation .
15,894
def __best_intent ( self , parse_result , context = [ ] ) : best_intent = None best_tags = None context_as_entities = [ { 'entities' : [ c ] } for c in context ] for intent in self . intent_parsers : i , tags = intent . validate_with_tags ( parse_result . get ( 'tags' ) + context_as_entities , parse_result . get ( 'confidence' ) ) if not best_intent or ( i and i . get ( 'confidence' ) > best_intent . get ( 'confidence' ) ) : best_intent = i best_tags = tags return best_intent , best_tags
Decide the best intent
15,895
def __get_unused_context ( self , parse_result , context ) : tags_keys = set ( [ t [ 'key' ] for t in parse_result [ 'tags' ] if t [ 'from_context' ] ] ) result_context = [ c for c in context if c [ 'key' ] not in tags_keys ] return result_context
Used to get unused context from context . Any keys not in parse_result
15,896
def register_entity ( self , entity_value , entity_type , alias_of = None ) : if alias_of : self . trie . insert ( entity_value . lower ( ) , data = ( alias_of , entity_type ) ) else : self . trie . insert ( entity_value . lower ( ) , data = ( entity_value , entity_type ) ) self . trie . insert ( entity_type . lower ( ) , data = ( entity_type , 'Concept' ) )
Register an entity to be tagged in potential parse results
15,897
def register_intent_parser ( self , intent_parser ) : if hasattr ( intent_parser , 'validate' ) and callable ( intent_parser . validate ) : self . intent_parsers . append ( intent_parser ) else : raise ValueError ( "%s is not an intent parser" % str ( intent_parser ) )
Enforce the intent parser interface at registration time .
15,898
def tokenizer ( self ) : domain = 0 if domain not in self . domains : self . register_domain ( domain = domain ) return self . domains [ domain ] . tokenizer
A property to link into IntentEngine s tokenizer .
15,899
def trie ( self ) : domain = 0 if domain not in self . domains : self . register_domain ( domain = domain ) return self . domains [ domain ] . trie
A property to link into IntentEngine s trie .