idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
45,100
def run_index_cmd ( name , cmd ) : sys . stderr . write ( "Creating {} index...\n" . format ( name ) ) p = sp . Popen ( cmd , shell = True , stdout = sp . PIPE , stderr = sp . PIPE ) stdout , stderr = p . communicate ( ) if p . returncode != 0 : sys . stderr . write ( "Index for {} failed\n" . format ( name ) ) sys . stderr . write ( stdout ) sys . stderr . write ( stderr )
Run command show errors if the returncode is non - zero .
45,101
def scan_cgroups ( subsys_name , filters = list ( ) ) : status = SubsystemStatus ( ) if subsys_name not in status . get_all ( ) : raise NoSuchSubsystemError ( "No such subsystem found: " + subsys_name ) if subsys_name not in status . get_available ( ) : raise EnvironmentError ( "Disabled in the kernel: " + subsys_name ) if subsys_name not in status . get_enabled ( ) : raise EnvironmentError ( "Not enabled in the system: " + subsys_name ) subsystem = _get_subsystem ( subsys_name ) mount_point = status . get_path ( subsys_name ) return _scan_cgroups_recursive ( subsystem , mount_point , mount_point , filters )
It returns a control group hierarchy which belong to the subsys_name . When collecting cgroups filters are applied to the cgroups . See pydoc of apply_filters method of CGroup for more information about the filters .
45,102
def walk_cgroups ( cgroup , action , opaque ) : action ( cgroup , opaque ) for child in cgroup . childs : walk_cgroups ( child , action , opaque )
The function applies the action function with the opaque object to each control group under the cgroup recursively .
45,103
def get_cgroup ( fullpath ) : fullpath = os . path . realpath ( fullpath ) status = SubsystemStatus ( ) name = None for name , path in status . paths . items ( ) : if path in fullpath : break else : raise Exception ( 'Invalid path: ' + fullpath ) subsys = _get_subsystem ( name ) return CGroup ( subsys , fullpath )
It returns a CGroup object which is pointed by the fullpath .
45,104
def parse ( content ) : ret = { } lines = content . split ( '\n' ) for line in lines : m = RdmaStat . _RE . match ( line ) if m is None : continue name = m . group ( 'name' ) hca_handle = long ( m . group ( 'hca_handle' ) ) hca_object = m . group ( 'hca_object' ) if hca_object != "max" : hca_object = long ( hca_object ) ret [ name ] = { "hca_handle" : hca_handle , "hca_object" : hca_object } return ret
Parse rdma . curren and rdma . max
45,105
def apply_filters ( self , filters ) : _configs = self . configs _stats = self . stats self . configs = { } self . stats = { } for f in filters : if f in _configs : self . configs [ f ] = _configs [ f ] elif f in _stats : self . stats [ f ] = _stats [ f ] else : raise NoSuchControlFileError ( "%s for %s" % ( f , self . subsystem . name ) )
It applies a specified filters . The filters are used to reduce the control groups which are accessed by get_confgs get_stats and get_defaults methods .
45,106
def get_configs ( self ) : configs = { } for name , default in self . configs . items ( ) : cls = default . __class__ path = self . paths [ name ] if os . path . exists ( path ) : try : configs [ name ] = self . _PARSERS [ cls ] ( fileops . read ( path ) ) except IOError as e : if e . errno == errno . EOPNOTSUPP : pass else : raise return configs
It returns a name and a current value pairs of control files which are categorised in the configs group .
45,107
def get_stats ( self ) : stats = { } for name , cls in self . stats . items ( ) : path = self . paths [ name ] if os . path . exists ( path ) : try : stats [ name ] = self . _PARSERS [ cls ] ( fileops . read ( path ) ) except IOError as e : if e . errno == errno . EOPNOTSUPP : pass if e . errno == errno . EIO : pass else : raise return stats
It returns a name and a value pairs of control files which are categorised in the stats group .
45,108
def update ( self ) : pids = fileops . readlines ( self . paths [ 'cgroup.procs' ] ) self . pids = [ int ( pid ) for pid in pids if pid != '' ] self . n_procs = len ( pids )
It updates process information of the cgroup .
45,109
def wait ( self ) : ret = os . read ( self . event_fd , 64 / 8 ) return struct . unpack ( 'Q' , ret )
It returns when an event which we have configured by set_threshold happens . Note that it blocks until then .
45,110
def dumpf ( obj , path ) : with open ( path , 'w' ) as f : dump ( obj , f ) return path
Write an nginx configuration to file .
45,111
def as_strings ( self ) : ret = [ ] for x in self . children : if isinstance ( x , ( Key , Comment ) ) : ret . append ( x . as_strings ) else : for y in x . as_strings : ret . append ( y ) if ret : ret [ - 1 ] = re . sub ( '}\n+$' , '}\n' , ret [ - 1 ] ) return ret
Return the entire Conf as nginx config strings .
45,112
def as_list ( self ) : return [ self . name , self . value , [ x . as_list for x in self . children ] ]
Return all child objects in nested lists of strings .
45,113
def as_dict ( self ) : dicts = [ x . as_dict for x in self . children ] return { '{0} {1}' . format ( self . name , self . value ) : dicts }
Return all child objects in nested dict .
45,114
def as_strings ( self ) : ret = [ ] container_title = ( INDENT * self . _depth ) container_title += '{0}{1} {{\n' . format ( self . name , ( ' {0}' . format ( self . value ) if self . value else '' ) ) ret . append ( container_title ) for x in self . children : if isinstance ( x , Key ) : ret . append ( INDENT + x . as_strings ) elif isinstance ( x , Comment ) : if x . inline and len ( ret ) >= 1 : ret [ - 1 ] = ret [ - 1 ] . rstrip ( '\n' ) + ' ' + x . as_strings else : ret . append ( INDENT + x . as_strings ) elif isinstance ( x , Container ) : y = x . as_strings ret . append ( '\n' + y [ 0 ] ) for z in y [ 1 : ] : ret . append ( INDENT + z ) else : y = x . as_strings ret . append ( INDENT + y ) ret [ - 1 ] = re . sub ( '}\n+$' , '}\n' , ret [ - 1 ] ) ret . append ( '}\n\n' ) return ret
Return the entire Container as nginx config strings .
45,115
def as_strings ( self ) : if self . value == '' or self . value is None : return '{0};\n' . format ( self . name ) if '"' not in self . value and ( ';' in self . value or '#' in self . value ) : return '{0} "{1}";\n' . format ( self . name , self . value ) return '{0} {1};\n' . format ( self . name , self . value )
Return key as nginx config string .
45,116
def convert_aws_args ( aws_args ) : if not isinstance ( aws_args , dict ) : raise errors . InvalidConfiguration ( 'Elastic DocManager config option "aws" must be a dict' ) old_session_kwargs = dict ( region = "region_name" , access_id = "aws_access_key_id" , secret_key = "aws_secret_access_key" , ) new_kwargs = { } for arg in aws_args : if arg in old_session_kwargs : new_kwargs [ old_session_kwargs [ arg ] ] = aws_args [ arg ] else : new_kwargs [ arg ] = aws_args [ arg ] return new_kwargs
Convert old style options into arguments to boto3 . session . Session .
45,117
def _index_and_mapping ( self , namespace ) : index , doc_type = namespace . split ( "." , 1 ) return index . lower ( ) , doc_type
Helper method for getting the index and type from a namespace .
45,118
def _stream_search ( self , * args , ** kwargs ) : for hit in scan ( self . elastic , query = kwargs . pop ( "body" , None ) , scroll = "10m" , ** kwargs ) : hit [ "_source" ] [ "_id" ] = hit [ "_id" ] yield hit [ "_source" ]
Helper method for iterating over ES search results .
45,119
def search ( self , start_ts , end_ts ) : return self . _stream_search ( index = self . meta_index_name , body = { "query" : { "range" : { "_ts" : { "gte" : start_ts , "lte" : end_ts } } } } , )
Query Elasticsearch for documents in a time range .
45,120
def commit ( self ) : self . send_buffered_operations ( ) retry_until_ok ( self . elastic . indices . refresh , index = "" )
Send buffered requests and refresh all indexes .
45,121
def add_upsert ( self , action , meta_action , doc_source , update_spec ) : if update_spec : self . bulk_index ( action , meta_action ) self . add_doc_to_update ( action , update_spec , len ( self . action_buffer ) - 2 ) else : if doc_source : self . add_to_sources ( action , doc_source ) self . bulk_index ( action , meta_action )
Function which stores sources for insert actions and decide if for update action has to add docs to get source buffer
45,122
def add_doc_to_update ( self , action , update_spec , action_buffer_index ) : doc = { "_index" : action [ "_index" ] , "_type" : action [ "_type" ] , "_id" : action [ "_id" ] , } get_from_ES = self . should_get_id ( action ) self . doc_to_update . append ( ( doc , update_spec , action_buffer_index , get_from_ES ) )
Prepare document for update based on Elasticsearch response . Set flag if document needs to be retrieved from Elasticsearch
45,123
def get_docs_sources_from_ES ( self ) : docs = [ doc for doc , _ , _ , get_from_ES in self . doc_to_update if get_from_ES ] if docs : documents = self . docman . elastic . mget ( body = { "docs" : docs } , realtime = True ) return iter ( documents [ "docs" ] ) else : return iter ( [ ] )
Get document sources using MGET elasticsearch API
45,124
def update_sources ( self ) : ES_documents = self . get_docs_sources_from_ES ( ) for doc , update_spec , action_buffer_index , get_from_ES in self . doc_to_update : if get_from_ES : ES_doc = next ( ES_documents ) if ES_doc [ "found" ] : source = ES_doc [ "_source" ] else : LOG . error ( "mGET: Document id: %s has not been found " "in Elasticsearch. Due to that " "following update failed: %s" , doc [ "_id" ] , update_spec , ) self . reset_action ( action_buffer_index ) continue else : source = self . get_from_sources ( doc [ "_index" ] , doc [ "_type" ] , doc [ "_id" ] ) if not source : LOG . error ( "mGET: Document id: %s has not been found " "in local sources. Due to that following " "update failed: %s" , doc [ "_id" ] , update_spec , ) self . reset_action ( action_buffer_index ) continue updated = self . docman . apply_update ( source , update_spec ) if "_id" in updated : del updated [ "_id" ] self . add_to_sources ( doc , updated ) self . action_buffer [ action_buffer_index ] [ "_source" ] = self . docman . _formatter . format_document ( updated ) self . action_buffer = [ each_action for each_action in self . action_buffer if each_action ]
Update local sources based on response from Elasticsearch
45,125
def add_to_sources ( self , action , doc_source ) : mapping = self . sources . setdefault ( action [ "_index" ] , { } ) . setdefault ( action [ "_type" ] , { } ) mapping [ action [ "_id" ] ] = doc_source
Store sources locally
45,126
def get_from_sources ( self , index , doc_type , document_id ) : return self . sources . get ( index , { } ) . get ( doc_type , { } ) . get ( document_id , { } )
Get source stored locally
45,127
def clean_up ( self ) : self . action_buffer = [ ] self . sources = { } self . doc_to_get = { } self . doc_to_update = [ ]
Do clean - up before returning buffer
45,128
def get_buffer ( self ) : if self . doc_to_update : self . update_sources ( ) ES_buffer = self . action_buffer self . clean_up ( ) return ES_buffer
Get buffer which needs to be bulked to elasticsearch
45,129
def run ( self ) : self . socket = self . bluez . hci_open_dev ( self . bt_device_id ) filtr = self . bluez . hci_filter_new ( ) self . bluez . hci_filter_all_events ( filtr ) self . bluez . hci_filter_set_ptype ( filtr , self . bluez . HCI_EVENT_PKT ) self . socket . setsockopt ( self . bluez . SOL_HCI , self . bluez . HCI_FILTER , filtr ) self . set_scan_parameters ( ) self . toggle_scan ( True ) while self . keep_going : pkt = self . socket . recv ( 255 ) event = to_int ( pkt [ 1 ] ) subevent = to_int ( pkt [ 3 ] ) if event == LE_META_EVENT and subevent == EVT_LE_ADVERTISING_REPORT : self . process_packet ( pkt ) self . socket . close ( )
Continously scan for BLE advertisements .
45,130
def set_scan_parameters ( self , scan_type = ScanType . ACTIVE , interval_ms = 10 , window_ms = 10 , address_type = BluetoothAddressType . RANDOM , filter_type = ScanFilter . ALL ) : interval_fractions = interval_ms / MS_FRACTION_DIVIDER if interval_fractions < 0x0004 or interval_fractions > 0x4000 : raise ValueError ( "Invalid interval given {}, must be in range of 2.5ms to 10240ms!" . format ( interval_fractions ) ) window_fractions = window_ms / MS_FRACTION_DIVIDER if window_fractions < 0x0004 or window_fractions > 0x4000 : raise ValueError ( "Invalid window given {}, must be in range of 2.5ms to 10240ms!" . format ( window_fractions ) ) interval_fractions , window_fractions = int ( interval_fractions ) , int ( window_fractions ) scan_parameter_pkg = struct . pack ( ">BHHBB" , scan_type , interval_fractions , window_fractions , address_type , filter_type ) self . bluez . hci_send_cmd ( self . socket , OGF_LE_CTL , OCF_LE_SET_SCAN_PARAMETERS , scan_parameter_pkg )
sets the le scan parameters
45,131
def toggle_scan ( self , enable , filter_duplicates = False ) : command = struct . pack ( ">BB" , enable , filter_duplicates ) self . bluez . hci_send_cmd ( self . socket , OGF_LE_CTL , OCF_LE_SET_SCAN_ENABLE , command )
Enables or disables BLE scanning
45,132
def process_packet ( self , pkt ) : if not ( ( ( self . mode & ScannerMode . MODE_IBEACON ) and ( pkt [ 19 : 23 ] == b"\x4c\x00\x02\x15" ) ) or ( ( self . mode & ScannerMode . MODE_EDDYSTONE ) and ( pkt [ 19 : 21 ] == b"\xaa\xfe" ) ) or ( ( self . mode & ScannerMode . MODE_ESTIMOTE ) and ( pkt [ 19 : 21 ] == b"\x9a\xfe" ) ) ) : return bt_addr = bt_addr_to_string ( pkt [ 7 : 13 ] ) rssi = bin_to_int ( pkt [ - 1 ] ) packet = parse_packet ( pkt [ 14 : - 1 ] ) if not packet : return self . save_bt_addr ( packet , bt_addr ) properties = self . get_properties ( packet , bt_addr ) if self . device_filter is None and self . packet_filter is None : self . callback ( bt_addr , rssi , packet , properties ) elif self . device_filter is None : if is_one_of ( packet , self . packet_filter ) : self . callback ( bt_addr , rssi , packet , properties ) else : if self . packet_filter and not is_one_of ( packet , self . packet_filter ) : return for filtr in self . device_filter : if isinstance ( filtr , BtAddrFilter ) : if filtr . matches ( { 'bt_addr' : bt_addr } ) : self . callback ( bt_addr , rssi , packet , properties ) return elif filtr . matches ( properties ) : self . callback ( bt_addr , rssi , packet , properties ) return
Parse the packet and call callback if one of the filters matches .
45,133
def save_bt_addr ( self , packet , bt_addr ) : if isinstance ( packet , EddystoneUIDFrame ) : new_mappings = [ m for m in self . eddystone_mappings if m [ 0 ] != bt_addr ] new_mappings . append ( ( bt_addr , packet . properties ) ) self . eddystone_mappings = new_mappings
Add to the list of mappings .
45,134
def get_properties ( self , packet , bt_addr ) : if is_one_of ( packet , [ EddystoneTLMFrame , EddystoneURLFrame , EddystoneEncryptedTLMFrame , EddystoneEIDFrame ] ) : return self . properties_from_mapping ( bt_addr ) else : return packet . properties
Get properties of beacon depending on type .
45,135
def terminate ( self ) : self . toggle_scan ( False ) self . keep_going = False self . join ( )
Signal runner to stop and join thread .
45,136
def data_to_uuid ( data ) : string = data_to_hexstring ( data ) return string [ 0 : 8 ] + '-' + string [ 8 : 12 ] + '-' + string [ 12 : 16 ] + '-' + string [ 16 : 20 ] + '-' + string [ 20 : 32 ]
Convert an array of binary data to the iBeacon uuid format .
45,137
def bt_addr_to_string ( addr ) : addr_str = array . array ( 'B' , addr ) addr_str . reverse ( ) hex_str = hexlify ( addr_str . tostring ( ) ) . decode ( 'ascii' ) return ':' . join ( a + b for a , b in zip ( hex_str [ : : 2 ] , hex_str [ 1 : : 2 ] ) )
Convert a binary string to the hex representation .
45,138
def is_one_of ( obj , types ) : for type_ in types : if isinstance ( obj , type_ ) : return True return False
Return true iff obj is an instance of one of the types .
45,139
def is_packet_type ( cls ) : from . packet_types import EddystoneUIDFrame , EddystoneURLFrame , EddystoneEncryptedTLMFrame , EddystoneTLMFrame , EddystoneEIDFrame , IBeaconAdvertisement , EstimoteTelemetryFrameA , EstimoteTelemetryFrameB return ( cls in [ EddystoneURLFrame , EddystoneUIDFrame , EddystoneEncryptedTLMFrame , EddystoneTLMFrame , EddystoneEIDFrame , IBeaconAdvertisement , EstimoteTelemetryFrameA , EstimoteTelemetryFrameB ] )
Check if class is one the packet types .
45,140
def bin_to_int ( string ) : if isinstance ( string , str ) : return struct . unpack ( "b" , string ) [ 0 ] else : return struct . unpack ( "b" , bytes ( [ string ] ) ) [ 0 ]
Convert a one element byte string to signed int for python 2 support .
45,141
def get_mode ( device_filter ) : from . device_filters import IBeaconFilter , EddystoneFilter , BtAddrFilter , EstimoteFilter if device_filter is None or len ( device_filter ) == 0 : return ScannerMode . MODE_ALL mode = ScannerMode . MODE_NONE for filtr in device_filter : if isinstance ( filtr , IBeaconFilter ) : mode |= ScannerMode . MODE_IBEACON elif isinstance ( filtr , EddystoneFilter ) : mode |= ScannerMode . MODE_EDDYSTONE elif isinstance ( filtr , EstimoteFilter ) : mode |= ScannerMode . MODE_ESTIMOTE elif isinstance ( filtr , BtAddrFilter ) : mode |= ScannerMode . MODE_ALL break return mode
Determine which beacons the scanner should look for .
45,142
def matches ( self , filter_props ) : if filter_props is None : return False found_one = False for key , value in filter_props . items ( ) : if key in self . properties and value != self . properties [ key ] : return False elif key in self . properties and value == self . properties [ key ] : found_one = True return found_one
Check if the filter matches the supplied properties .
45,143
def parse_packet ( packet ) : frame = parse_ltv_packet ( packet ) if frame is None : frame = parse_ibeacon_packet ( packet ) return frame
Parse a beacon advertisement packet .
45,144
def parse_ltv_packet ( packet ) : try : frame = LTVFrame . parse ( packet ) for ltv in frame : if ltv [ 'type' ] == SERVICE_DATA_TYPE : data = ltv [ 'value' ] if data [ "service_identifier" ] == EDDYSTONE_UUID : return parse_eddystone_service_data ( data ) elif data [ "service_identifier" ] == ESTIMOTE_UUID : return parse_estimote_service_data ( data ) except ConstructError : return None return None
Parse a tag - length - value style beacon packet .
45,145
def parse_eddystone_service_data ( data ) : if data [ 'frame_type' ] == EDDYSTONE_UID_FRAME : return EddystoneUIDFrame ( data [ 'frame' ] ) elif data [ 'frame_type' ] == EDDYSTONE_TLM_FRAME : if data [ 'frame' ] [ 'tlm_version' ] == EDDYSTONE_TLM_ENCRYPTED : return EddystoneEncryptedTLMFrame ( data [ 'frame' ] [ 'data' ] ) elif data [ 'frame' ] [ 'tlm_version' ] == EDDYSTONE_TLM_UNENCRYPTED : return EddystoneTLMFrame ( data [ 'frame' ] [ 'data' ] ) elif data [ 'frame_type' ] == EDDYSTONE_URL_FRAME : return EddystoneURLFrame ( data [ 'frame' ] ) elif data [ 'frame_type' ] == EDDYSTONE_EID_FRAME : return EddystoneEIDFrame ( data [ 'frame' ] ) else : return None
Parse Eddystone service data .
45,146
def parse_estimote_service_data ( data ) : if data [ 'frame_type' ] & 0xF == ESTIMOTE_TELEMETRY_FRAME : protocol_version = ( data [ 'frame_type' ] & 0xF0 ) >> 4 if data [ 'frame' ] [ 'subframe_type' ] == ESTIMOTE_TELEMETRY_SUBFRAME_A : return EstimoteTelemetryFrameA ( data [ 'frame' ] , protocol_version ) elif data [ 'frame' ] [ 'subframe_type' ] == ESTIMOTE_TELEMETRY_SUBFRAME_B : return EstimoteTelemetryFrameB ( data [ 'frame' ] , protocol_version ) return None
Parse Estimote service data .
45,147
def parse_motion_state ( val ) : number = val & 0b00111111 unit = ( val & 0b11000000 ) >> 6 if unit == 1 : number *= 60 elif unit == 2 : number *= 60 * 60 elif unit == 3 and number < 32 : number *= 60 * 60 * 24 elif unit == 3 : number -= 32 number *= 60 * 60 * 24 * 7 return number
Convert motion state byte to seconds .
45,148
def monkey_patch ( cls ) : on_read_the_docs = os . environ . get ( 'READTHEDOCS' , False ) if on_read_the_docs : sys . modules [ 'zbarlight._zbarlight' ] = cls
Monkey path zbarlight C extension on Read The Docs
45,149
def set_pixel ( framebuf , x , y , color ) : index = ( y >> 3 ) * framebuf . stride + x offset = y & 0x07 framebuf . buf [ index ] = ( framebuf . buf [ index ] & ~ ( 0x01 << offset ) ) | ( ( color != 0 ) << offset )
Set a given pixel to a color .
45,150
def get_pixel ( framebuf , x , y ) : index = ( y >> 3 ) * framebuf . stride + x offset = y & 0x07 return ( framebuf . buf [ index ] >> offset ) & 0x01
Get the color of a given pixel
45,151
def pixel ( self , x , y , color = None ) : if self . rotation == 1 : x , y = y , x x = self . width - x - 1 if self . rotation == 2 : x = self . width - x - 1 y = self . height - y - 1 if self . rotation == 3 : x , y = y , x y = self . height - y - 1 if x < 0 or x >= self . width or y < 0 or y >= self . height : return None if color is None : return self . format . get_pixel ( self , x , y ) self . format . set_pixel ( self , x , y , color ) return None
If color is not given get the color value of the specified pixel . If color is given set the specified pixel to the given color .
45,152
def hline ( self , x , y , width , color ) : self . rect ( x , y , width , 1 , color , fill = True )
Draw a horizontal line up to a given length .
45,153
def vline ( self , x , y , height , color ) : self . rect ( x , y , 1 , height , color , fill = True )
Draw a vertical line up to a given length .
45,154
def rect ( self , x , y , width , height , color , * , fill = False ) : if self . rotation == 1 : x , y = y , x width , height = height , width x = self . width - x - width if self . rotation == 2 : x = self . width - x - width y = self . height - y - height if self . rotation == 3 : x , y = y , x width , height = height , width y = self . height - y - height if width < 1 or height < 1 or ( x + width ) <= 0 or ( y + height ) <= 0 or y >= self . height or x >= self . width : return x_end = min ( self . width - 1 , x + width - 1 ) y_end = min ( self . height - 1 , y + height - 1 ) x = max ( x , 0 ) y = max ( y , 0 ) if fill : self . format . fill_rect ( self , x , y , x_end - x + 1 , y_end - y + 1 , color ) else : self . format . fill_rect ( self , x , y , x_end - x + 1 , 1 , color ) self . format . fill_rect ( self , x , y , 1 , y_end - y + 1 , color ) self . format . fill_rect ( self , x , y_end , x_end - x + 1 , 1 , color ) self . format . fill_rect ( self , x_end , y , 1 , y_end - y + 1 , color )
Draw a rectangle at the given location size and color . The rect method draws only a 1 pixel outline .
45,155
def line ( self , x_0 , y_0 , x_1 , y_1 , color ) : d_x = abs ( x_1 - x_0 ) d_y = abs ( y_1 - y_0 ) x , y = x_0 , y_0 s_x = - 1 if x_0 > x_1 else 1 s_y = - 1 if y_0 > y_1 else 1 if d_x > d_y : err = d_x / 2.0 while x != x_1 : self . pixel ( x , y , color ) err -= d_y if err < 0 : y += s_y err += d_x x += s_x else : err = d_y / 2.0 while y != y_1 : self . pixel ( x , y , color ) err -= d_x if err < 0 : x += s_x err += d_y y += s_y self . pixel ( x , y , color )
Bresenham s line algorithm
45,156
def scroll ( self , delta_x , delta_y ) : if delta_x < 0 : shift_x = 0 xend = self . width + delta_x dt_x = 1 else : shift_x = self . width - 1 xend = delta_x - 1 dt_x = - 1 if delta_y < 0 : y = 0 yend = self . height + delta_y dt_y = 1 else : y = self . height - 1 yend = delta_y - 1 dt_y = - 1 while y != yend : x = shift_x while x != xend : self . format . set_pixel ( self , x , y , self . format . get_pixel ( self , x - delta_x , y - delta_y ) ) x += dt_x y += dt_y
shifts framebuf in x and y direction
45,157
def text ( self , string , x , y , color , * , font_name = "font5x8.bin" ) : if not self . _font or self . _font . font_name != font_name : self . _font = BitmapFont ( ) w = self . _font . font_width for i , char in enumerate ( string ) : self . _font . draw_char ( char , x + ( i * ( w + 1 ) ) , y , self , color )
text is not yet implemented
45,158
def parse_auth_token_from_request ( self , auth_header ) : if not auth_header : raise falcon . HTTPUnauthorized ( description = 'Missing Authorization Header' ) parts = auth_header . split ( ) if parts [ 0 ] . lower ( ) != self . auth_header_prefix . lower ( ) : raise falcon . HTTPUnauthorized ( description = 'Invalid Authorization Header: ' 'Must start with {0}' . format ( self . auth_header_prefix ) ) elif len ( parts ) == 1 : raise falcon . HTTPUnauthorized ( description = 'Invalid Authorization Header: Token Missing' ) elif len ( parts ) > 2 : raise falcon . HTTPUnauthorized ( description = 'Invalid Authorization Header: Contains extra content' ) return parts [ 1 ]
Parses and returns Auth token from the request header . Raises falcon . HTTPUnauthoried exception with proper error message
45,159
def authenticate ( self , req , resp , resource ) : payload = self . _decode_jwt_token ( req ) user = self . user_loader ( payload ) if not user : raise falcon . HTTPUnauthorized ( description = 'Invalid JWT Credentials' ) return user
Extract auth token from request authorization header decode jwt token verify configured claims and return either a user object if successful else raise an falcon . HTTPUnauthoried exception
45,160
def get_auth_token ( self , user_payload ) : now = datetime . utcnow ( ) payload = { 'user' : user_payload } if 'iat' in self . verify_claims : payload [ 'iat' ] = now if 'nbf' in self . verify_claims : payload [ 'nbf' ] = now + self . leeway if 'exp' in self . verify_claims : payload [ 'exp' ] = now + self . expiration_delta if self . audience is not None : payload [ 'aud' ] = self . audience if self . issuer is not None : payload [ 'iss' ] = self . issuer return jwt . encode ( payload , self . secret_key , algorithm = self . algorithm , json_encoder = ExtendedJSONEncoder ) . decode ( 'utf-8' )
Create a JWT authentication token from user_payload
45,161
def get_auth_token ( self , user_payload ) : token = user_payload . get ( 'token' ) or None if not token : raise ValueError ( '`user_payload` must provide api token' ) return '{auth_header_prefix} {token}' . format ( auth_header_prefix = self . auth_header_prefix , token = token )
Extracts token from the user_payload
45,162
def parse_auth_token_from_request ( self , auth_header ) : if not auth_header : raise falcon . HTTPUnauthorized ( description = 'Missing Authorization Header' ) try : auth_header_prefix , _ = auth_header . split ( ' ' , 1 ) except ValueError : raise falcon . HTTPUnauthorized ( description = 'Invalid Authorization Header: Missing Scheme or Parameters' ) if auth_header_prefix . lower ( ) != self . auth_header_prefix . lower ( ) : raise falcon . HTTPUnauthorized ( description = 'Invalid Authorization Header: ' 'Must start with {0}' . format ( self . auth_header_prefix ) ) return auth_header
Parses and returns the Hawk Authorization header if it is present and well - formed . Raises falcon . HTTPUnauthoried exception with proper error message
45,163
def _apply_base_theme ( app ) : if QT_VERSION < ( 5 , ) : app . setStyle ( 'plastique' ) else : app . setStyle ( 'Fusion' ) with open ( _STYLESHEET ) as stylesheet : app . setStyleSheet ( stylesheet . read ( ) )
Apply base theme to the application .
45,164
def dark ( app ) : _apply_base_theme ( app ) darkPalette = QPalette ( ) darkPalette . setColor ( QPalette . WindowText , QColor ( 180 , 180 , 180 ) ) darkPalette . setColor ( QPalette . Button , QColor ( 53 , 53 , 53 ) ) darkPalette . setColor ( QPalette . Light , QColor ( 180 , 180 , 180 ) ) darkPalette . setColor ( QPalette . Midlight , QColor ( 90 , 90 , 90 ) ) darkPalette . setColor ( QPalette . Dark , QColor ( 35 , 35 , 35 ) ) darkPalette . setColor ( QPalette . Text , QColor ( 180 , 180 , 180 ) ) darkPalette . setColor ( QPalette . BrightText , QColor ( 180 , 180 , 180 ) ) darkPalette . setColor ( QPalette . ButtonText , QColor ( 180 , 180 , 180 ) ) darkPalette . setColor ( QPalette . Base , QColor ( 42 , 42 , 42 ) ) darkPalette . setColor ( QPalette . Window , QColor ( 53 , 53 , 53 ) ) darkPalette . setColor ( QPalette . Shadow , QColor ( 20 , 20 , 20 ) ) darkPalette . setColor ( QPalette . Highlight , QColor ( 42 , 130 , 218 ) ) darkPalette . setColor ( QPalette . HighlightedText , QColor ( 180 , 180 , 180 ) ) darkPalette . setColor ( QPalette . Link , QColor ( 56 , 252 , 196 ) ) darkPalette . setColor ( QPalette . AlternateBase , QColor ( 66 , 66 , 66 ) ) darkPalette . setColor ( QPalette . ToolTipBase , QColor ( 53 , 53 , 53 ) ) darkPalette . setColor ( QPalette . ToolTipText , QColor ( 180 , 180 , 180 ) ) darkPalette . setColor ( QPalette . Disabled , QPalette . WindowText , QColor ( 127 , 127 , 127 ) ) darkPalette . setColor ( QPalette . Disabled , QPalette . Text , QColor ( 127 , 127 , 127 ) ) darkPalette . setColor ( QPalette . Disabled , QPalette . ButtonText , QColor ( 127 , 127 , 127 ) ) darkPalette . setColor ( QPalette . Disabled , QPalette . Highlight , QColor ( 80 , 80 , 80 ) ) darkPalette . setColor ( QPalette . Disabled , QPalette . HighlightedText , QColor ( 127 , 127 , 127 ) ) app . setPalette ( darkPalette )
Apply Dark Theme to the Qt application instance .
45,165
def inheritance_diagram_directive ( name , arguments , options , content , lineno , content_offset , block_text , state , state_machine ) : node = inheritance_diagram ( ) class_names = arguments graph = InheritanceGraph ( class_names ) for name in graph . get_all_class_names ( ) : refnodes , x = xfileref_role ( 'class' , ':class:`%s`' % name , name , 0 , state ) node . extend ( refnodes ) node [ 'graph' ] = graph node [ 'parts' ] = options . get ( 'parts' , 0 ) node [ 'content' ] = " " . join ( class_names ) return [ node ]
Run when the inheritance_diagram directive is first encountered .
45,166
def run_dot ( self , args , name , parts = 0 , urls = { } , graph_options = { } , node_options = { } , edge_options = { } ) : try : dot = subprocess . Popen ( [ 'dot' ] + list ( args ) , stdin = subprocess . PIPE , stdout = subprocess . PIPE , close_fds = True ) except OSError : raise DotException ( "Could not execute 'dot'. Are you sure you have 'graphviz' installed?" ) except ValueError : raise DotException ( "'dot' called with invalid arguments" ) except : raise DotException ( "Unexpected error calling 'dot'" ) self . generate_dot ( dot . stdin , name , parts , urls , graph_options , node_options , edge_options ) dot . stdin . close ( ) result = dot . stdout . read ( ) returncode = dot . wait ( ) if returncode != 0 : raise DotException ( "'dot' returned the errorcode %d" % returncode ) return result
Run graphviz dot over this graph returning whatever dot writes to stdout .
45,167
def parse_md_to_rst ( file ) : try : from m2r import parse_from_file return parse_from_file ( file ) . replace ( "artwork/" , "http://198.27.119.65/" ) except ImportError : return read ( file )
Read Markdown file and convert to ReStructured Text .
45,168
def deleted_count ( self ) : if isinstance ( self . raw_result , list ) : return len ( self . raw_result ) else : return self . raw_result
The number of documents deleted .
45,169
def generate_id ( ) : try : return unicode ( uuid1 ( ) ) . replace ( u"-" , u"" ) except NameError : return str ( uuid1 ( ) ) . replace ( u"-" , u"" )
Generate new UUID
45,170
def insert ( self , docs , * args , ** kwargs ) : if isinstance ( docs , list ) : return self . insert_many ( docs , * args , ** kwargs ) else : return self . insert_one ( docs , * args , ** kwargs )
Backwards compatibility with insert
45,171
def update ( self , query , doc , * args , ** kwargs ) : if isinstance ( doc , list ) : return [ self . update_one ( query , item , * args , ** kwargs ) for item in doc ] else : return self . update_one ( query , doc , * args , ** kwargs )
BAckwards compatibility with update
45,172
def update_one ( self , query , doc ) : if self . table is None : self . build_table ( ) if u"$set" in doc : doc = doc [ u"$set" ] allcond = self . parse_query ( query ) try : result = self . table . update ( doc , allcond ) except : result = None return UpdateResult ( raw_result = result )
Updates one element of the collection
45,173
def find ( self , filter = None , sort = None , skip = None , limit = None , * args , ** kwargs ) : if self . table is None : self . build_table ( ) if filter is None : result = self . table . all ( ) else : allcond = self . parse_query ( filter ) try : result = self . table . search ( allcond ) except ( AttributeError , TypeError ) : result = [ ] result = TinyMongoCursor ( result , sort = sort , skip = skip , limit = limit ) return result
Finds all matching results
45,174
def find_one ( self , filter = None ) : if self . table is None : self . build_table ( ) allcond = self . parse_query ( filter ) return self . table . get ( allcond )
Finds one matching query element
45,175
def remove ( self , spec_or_id , multi = True , * args , ** kwargs ) : if multi : return self . delete_many ( spec_or_id ) return self . delete_one ( spec_or_id )
Backwards compatibility with remove
45,176
def delete_one ( self , query ) : item = self . find_one ( query ) result = self . table . remove ( where ( u'_id' ) == item [ u'_id' ] ) return DeleteResult ( raw_result = result )
Deletes one document from the collection
45,177
def delete_many ( self , query ) : items = self . find ( query ) result = [ self . table . remove ( where ( u'_id' ) == item [ u'_id' ] ) for item in items ] if query == { } : self . table . _last_id = 0 return DeleteResult ( raw_result = result )
Removes all items matching the mongo query
45,178
def paginate ( self , skip , limit ) : if not self . count ( ) or not limit : return skip = skip or 0 pages = int ( ceil ( self . count ( ) / float ( limit ) ) ) limits = { } last = 0 for i in range ( pages ) : current = limit * i limits [ last ] = current last = current if limit and limit < self . count ( ) : limit = limits . get ( skip , self . count ( ) ) self . cursordat = self . cursordat [ skip : limit ]
Paginate list of records
45,179
def should_stream ( proxy_response ) : content_type = proxy_response . headers . get ( 'Content-Type' ) if is_html_content_type ( content_type ) : return False try : content_length = int ( proxy_response . headers . get ( 'Content-Length' , 0 ) ) except ValueError : content_length = 0 if not content_length or content_length > MIN_STREAMING_LENGTH : return True return False
Function to verify if the proxy_response must be converted into a stream . This will be done by checking the proxy_response content - length and verify if its length is bigger than one stipulated by MIN_STREAMING_LENGTH .
45,180
def get_charset ( content_type ) : if not content_type : return DEFAULT_CHARSET matched = _get_charset_re . search ( content_type ) if matched : return matched . group ( 'charset' ) . replace ( '"' , '' ) return DEFAULT_CHARSET
Function used to retrieve the charset from a content - type . If there is no charset in the content type then the charset defined on DEFAULT_CHARSET will be returned
45,181
def required_header ( header ) : if header in IGNORE_HEADERS : return False if header . startswith ( 'HTTP_' ) or header == 'CONTENT_TYPE' : return True return False
Function that verify if the header parameter is a essential header
45,182
def normalize_request_headers ( request ) : r norm_headers = { } for header , value in request . META . items ( ) : if required_header ( header ) : norm_header = header . replace ( 'HTTP_' , '' ) . title ( ) . replace ( '_' , '-' ) norm_headers [ norm_header ] = value return norm_headers
r Function used to transform header replacing HTTP \ _ to and replace _ to -
45,183
def encode_items ( items ) : encoded = [ ] for key , values in items : for value in values : encoded . append ( ( key . encode ( 'utf-8' ) , value . encode ( 'utf-8' ) ) ) return encoded
Function that encode all elements in the list of items passed as a parameter
45,184
def cookie_from_string ( cookie_string , strict_cookies = False ) : if strict_cookies : cookies = SimpleCookie ( COOKIE_PREFIX + cookie_string ) if not cookies . keys ( ) : return None cookie_name , = cookies . keys ( ) cookie_dict = { k : v for k , v in cookies [ cookie_name ] . items ( ) if v and k != 'comment' } cookie_dict [ 'key' ] = cookie_name cookie_dict [ 'value' ] = cookies [ cookie_name ] . value return cookie_dict else : valid_attrs = ( 'path' , 'domain' , 'comment' , 'expires' , 'max_age' , 'httponly' , 'secure' ) cookie_dict = { } cookie_parts = cookie_string . split ( ';' ) try : key , value = cookie_parts [ 0 ] . split ( '=' , 1 ) cookie_dict [ 'key' ] , cookie_dict [ 'value' ] = key , unquote ( value ) except ValueError : logger . warning ( 'Invalid cookie: `%s`' , cookie_string ) return None if cookie_dict [ 'value' ] . startswith ( '=' ) : logger . warning ( 'Invalid cookie: `%s`' , cookie_string ) return None for part in cookie_parts [ 1 : ] : if '=' in part : attr , value = part . split ( '=' , 1 ) value = value . strip ( ) else : attr = part value = '' attr = attr . strip ( ) . lower ( ) if not attr : continue if attr in valid_attrs : if attr in ( 'httponly' , 'secure' ) : cookie_dict [ attr ] = True elif attr in 'comment' : continue else : cookie_dict [ attr ] = unquote ( value ) else : logger . warning ( 'Unknown cookie attribute %s' , attr ) return cookie_dict
Parser for HTTP header set - cookie The return from this function will be used as parameters for django s response . set_cookie method . Because set_cookie doesn t have parameter comment this cookie attribute will be ignored .
45,185
def unquote ( value ) : if len ( value ) > 1 and value [ 0 ] == '"' and value [ - 1 ] == '"' : value = value [ 1 : - 1 ] . replace ( r'\"' , '"' ) return value
Remove wrapping quotes from a string .
45,186
def asbool ( value ) : is_string = isinstance ( value , string_types ) if is_string : value = value . strip ( ) . lower ( ) if value in ( 'true' , 'yes' , 'on' , 'y' , 't' , '1' , ) : return True elif value in ( 'false' , 'no' , 'off' , 'n' , 'f' , '0' ) : return False else : raise ValueError ( "String is not true/false: %r" % value ) else : return bool ( value )
Function used to convert certain string values into an appropriated boolean value . If value is not a string the built - in python bool function will be used to convert the passed parameter
45,187
def should_transform ( self ) : if not HAS_DIAZO : self . log . info ( "HAS_DIAZO: false" ) return False if asbool ( self . request . META . get ( DIAZO_OFF_REQUEST_HEADER ) ) : self . log . info ( "DIAZO_OFF_REQUEST_HEADER in request.META: off" ) return False if asbool ( self . response . get ( DIAZO_OFF_RESPONSE_HEADER ) ) : self . log . info ( "DIAZO_OFF_RESPONSE_HEADER in response.get: off" ) return False if self . request . is_ajax ( ) : self . log . info ( "Request is AJAX" ) return False if self . response . streaming : self . log . info ( "Response has streaming" ) return False content_type = self . response . get ( 'Content-Type' ) if not is_html_content_type ( content_type ) : self . log . info ( "Content-type: false" ) return False content_encoding = self . response . get ( 'Content-Encoding' ) if content_encoding in ( 'zip' , 'compress' ) : self . log . info ( "Content encode is %s" , content_encoding ) return False status_code = str ( self . response . status_code ) if status_code . startswith ( '3' ) or status_code == '204' or status_code == '401' : self . log . info ( "Status code: %s" , status_code ) return False if len ( self . response . content ) == 0 : self . log . info ( "Response Content is EMPTY" ) return False self . log . info ( "Transform" ) return True
Determine if we should transform the response
45,188
def transform ( self , rules , theme_template , is_html5 , context_data = None ) : if not self . should_transform ( ) : self . log . info ( "Don't need to be transformed" ) return self . response theme = loader . render_to_string ( theme_template , context = context_data , request = self . request ) output_xslt = compile_theme ( rules = rules , theme = StringIO ( theme ) , ) transform = etree . XSLT ( output_xslt ) self . log . debug ( "Transform: %s" , transform ) charset = get_charset ( self . response . get ( 'Content-Type' ) ) try : decoded_response = self . response . content . decode ( charset ) except UnicodeDecodeError : decoded_response = self . response . content . decode ( charset , 'ignore' ) self . log . warning ( "Charset is {} and type of encode used in file is\ different. Some unknown characteres might be\ ignored." . format ( charset ) ) content_doc = etree . fromstring ( decoded_response , parser = etree . HTMLParser ( ) ) self . response . content = transform ( content_doc ) if is_html5 : self . set_html5_doctype ( ) self . reset_headers ( ) self . log . debug ( "Response transformer: %s" , self . response ) return self . response
Method used to make a transformation on the content of the http response based on the rules and theme_templates passed as paremters
45,189
def set_html5_doctype ( self ) : doctype = b'<!DOCTYPE html>\n' content = doctype_re . subn ( doctype , self . response . content , 1 ) [ 0 ] self . response . content = content
Method used to transform a doctype in to a properly html5 doctype
45,190
def _output ( self , s ) : if s . lower ( ) . startswith ( b'host: ' ) : self . _buffer . insert ( 1 , s ) else : self . _buffer . append ( s )
Host header should always be first
45,191
def get_django_response ( proxy_response , strict_cookies = False ) : status = proxy_response . status headers = proxy_response . headers logger . debug ( 'Proxy response headers: %s' , headers ) content_type = headers . get ( 'Content-Type' ) logger . debug ( 'Content-Type: %s' , content_type ) if should_stream ( proxy_response ) : logger . info ( 'Content-Length is bigger than %s' , DEFAULT_AMT ) response = StreamingHttpResponse ( proxy_response . stream ( DEFAULT_AMT ) , status = status , content_type = content_type ) else : content = proxy_response . data or b'' response = HttpResponse ( content , status = status , content_type = content_type ) logger . info ( 'Normalizing response headers' ) set_response_headers ( response , headers ) logger . debug ( 'Response headers: %s' , getattr ( response , '_headers' ) ) cookies = proxy_response . headers . getlist ( 'set-cookie' ) logger . info ( 'Checking for invalid cookies' ) for cookie_string in cookies : cookie_dict = cookie_from_string ( cookie_string , strict_cookies = strict_cookies ) if cookie_dict : response . set_cookie ( ** cookie_dict ) logger . debug ( 'Response cookies: %s' , response . cookies ) return response
This method is used to create an appropriate response based on the Content - Length of the proxy_response . If the content is bigger than MIN_STREAMING_LENGTH which is found on utils . py than django . http . StreamingHttpResponse will be created else a django . http . HTTPResponse will be created instead
45,192
def get_request_headers ( self ) : request_headers = self . get_proxy_request_headers ( self . request ) if ( self . add_remote_user and hasattr ( self . request , 'user' ) and self . request . user . is_active ) : request_headers [ 'REMOTE_USER' ] = self . request . user . get_username ( ) self . log . info ( "REMOTE_USER set" ) return request_headers
Return request headers that will be sent to upstream .
45,193
def get_encoded_query_params ( self ) : get_data = encode_items ( self . request . GET . lists ( ) ) return urlencode ( get_data )
Return encoded query params to be used in proxied request
45,194
def stream_download ( url , target_path , verbose = False ) : response = requests . get ( url , stream = True ) handle = open ( target_path , "wb" ) if verbose : print ( "Beginning streaming download of %s" % url ) start = datetime . now ( ) try : content_length = int ( response . headers [ 'Content-Length' ] ) content_MB = content_length / 1048576.0 print ( "Total file size: %.2f MB" % content_MB ) except KeyError : pass for chunk in response . iter_content ( chunk_size = 512 ) : if chunk : handle . write ( chunk ) if verbose : print ( "Download completed to %s in %s" % ( target_path , datetime . now ( ) - start ) )
Download a large file without loading it into memory .
45,195
def validate_object_id ( object_id ) : result = re . match ( OBJECT_ID_RE , str ( object_id ) ) if not result : print ( "'%s' appears not to be a valid 990 object_id" % object_id ) raise RuntimeError ( OBJECT_ID_MSG ) return object_id
It s easy to make a mistake entering these validate the format
45,196
def _get_table_start ( self ) : if self . documentation : standardized_table_start = { 'object_id' : { 'value' : self . object_id , 'ordering' : - 1 , 'line_number' : 'NA' , 'description' : 'IRS-assigned object id' , 'db_type' : 'String(18)' } , 'ein' : { 'value' : self . ein , 'ordering' : - 2 , 'line_number' : 'NA' , 'description' : 'IRS employer id number' , 'db_type' : 'String(9)' } } if self . documentId : standardized_table_start [ 'documentId' ] = { 'value' : self . documentId , 'description' : 'Document ID' , 'ordering' : 0 } else : standardized_table_start = { 'object_id' : self . object_id , 'ein' : self . ein } if self . documentId : standardized_table_start [ 'documentId' ] = self . documentId return standardized_table_start
prefill the columns we need for all tables
45,197
def debracket ( string ) : result = re . sub ( BRACKET_RE , ';' , str ( string ) ) result = result . lstrip ( ';' ) result = result . lstrip ( ' ' ) result = result . replace ( '; ;' , ';' ) return result
Eliminate the bracketed var names in doc line strings
45,198
def _set_schedules ( self ) : self . schedules = [ 'ReturnHeader990x' , ] self . otherforms = [ ] for sked in self . raw_irs_dict [ 'Return' ] [ 'ReturnData' ] . keys ( ) : if not sked . startswith ( "@" ) : if sked in KNOWN_SCHEDULES : self . schedules . append ( sked ) else : self . otherforms . append ( sked )
Attach the known and unknown schedules
45,199
def get_parsed_sked ( self , skedname ) : if not self . processed : raise Exception ( "Filing must be processed to return parsed sked" ) if skedname in self . schedules : matching_skeds = [ ] for sked in self . result : if sked [ 'schedule_name' ] == skedname : matching_skeds . append ( sked ) return matching_skeds else : return [ ]
Returns an array because multiple sked K s are allowed