idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
35,000
def remove ( self , session_id ) : session = self . _items . get ( session_id , None ) if session is not None : session . promoted = - 1 session . on_delete ( True ) del self . _items [ session_id ] return True return False
Remove session object from the container
35,001
def send_complete ( self ) : self . active = True if self . should_finish ( ) : self . _detach ( ) if not self . _finished : self . safe_finish ( ) else : if self . session : self . session . flush ( )
Verify if connection should be closed based on amount of data that was sent .
35,002
def send ( self , message , binary = False ) : if not self . is_closed : self . session . send_message ( message , binary = binary )
Send message to the client .
35,003
def dump ( self ) : data = dict ( sessions_active = self . sess_active , connections_active = self . conn_active , connections_ps = self . conn_ps . last_average , packets_sent_ps = self . pack_sent_ps . last_average , packets_recv_ps = self . pack_recv_ps . last_average ) for k , v in self . sess_transports . items ( ...
Return dictionary with current statistical information
35,004
def finish ( self , chunk = None ) : self . _log_disconnect ( ) super ( BaseHandler , self ) . finish ( chunk )
Tornado finish handler
35,005
def handle_session_cookie ( self ) : if not self . server . settings [ 'jsessionid' ] : return cookie = self . cookies . get ( 'JSESSIONID' ) if not cookie : cv = 'dummy' else : cv = cookie . value self . set_cookie ( 'JSESSIONID' , cv )
Handle JSESSIONID cookie logic
35,006
def options ( self , * args , ** kwargs ) : self . enable_cache ( ) self . handle_session_cookie ( ) self . preflight ( ) if self . verify_origin ( ) : allowed_methods = getattr ( self , 'access_methods' , 'OPTIONS, POST' ) self . set_header ( 'Access-Control-Allow-Methods' , allowed_methods ) self . set_header ( 'Allo...
XHR cross - domain OPTIONS handler
35,007
def preflight ( self ) : origin = self . request . headers . get ( 'Origin' , '*' ) self . set_header ( 'Access-Control-Allow-Origin' , origin ) headers = self . request . headers . get ( 'Access-Control-Request-Headers' ) if headers : self . set_header ( 'Access-Control-Allow-Headers' , headers ) self . set_header ( '...
Handles request authentication
35,008
def get_argument ( self , name ) : val = self . arguments . get ( name ) if val : return val [ 0 ] return None
Return single argument by name
35,009
def verify_state ( self ) : if self . state == CONNECTING : self . state = OPEN self . conn . on_open ( self . conn_info )
Verify if session was not yet opened . If it is open it and call connections on_open
35,010
def delayed_close ( self ) : self . state = CLOSING self . server . io_loop . add_callback ( self . close )
Delayed close - won t close immediately but on next ioloop tick .
35,011
def on_delete ( self , forced ) : if not forced and self . handler is not None and not self . is_closed : self . promote ( ) else : self . close ( )
Session expiration callback
35,012
def remove_handler ( self , handler ) : super ( Session , self ) . remove_handler ( handler ) self . promote ( ) self . stop_heartbeat ( )
Detach active handler from the session
35,013
def on_open ( self , info ) : self . ip = info . ip self . request = info self . open ( )
sockjs - tornado on_open handler
35,014
def _notebook_model_from_path ( self , path , content = False , format = None ) : model = base_model ( path ) model [ "type" ] = "notebook" if self . fs . isfile ( path ) : model [ "last_modified" ] = model [ "created" ] = self . fs . lstat ( path ) [ "ST_MTIME" ] else : model [ "last_modified" ] = model [ "created" ] ...
Build a notebook model from database record .
35,015
def _file_model_from_path ( self , path , content = False , format = None ) : model = base_model ( path ) model [ "type" ] = "file" if self . fs . isfile ( path ) : model [ "last_modified" ] = model [ "created" ] = self . fs . lstat ( path ) [ "ST_MTIME" ] else : model [ "last_modified" ] = model [ "created" ] = DUMMY_...
Build a file model from database record .
35,016
def _convert_file_records ( self , paths ) : ret = [ ] for path in paths : if os . path . basename ( path ) == self . fs . dir_keep_file : continue type_ = self . guess_type ( path , allow_directory = True ) if type_ == "notebook" : ret . append ( self . _notebook_model_from_path ( path , False ) ) elif type_ == "file"...
Applies _notebook_model_from_s3_path or _file_model_from_s3_path to each entry of paths depending on the result of guess_type .
35,017
def save ( self , model , path ) : self . log . debug ( "S3contents.GenericManager: save %s: '%s'" , model , path ) if "type" not in model : self . do_error ( "No model type provided" , 400 ) if "content" not in model and model [ "type" ] != "directory" : self . do_error ( "No file content provided" , 400 ) if model [ ...
Save a file or directory model to path .
35,018
def rename_file ( self , old_path , new_path ) : self . log . debug ( "S3contents.GenericManager: Init rename of '%s' to '%s'" , old_path , new_path ) if self . file_exists ( new_path ) or self . dir_exists ( new_path ) : self . already_exists ( new_path ) elif self . file_exists ( old_path ) or self . dir_exists ( old...
Rename a file or directory .
35,019
def delete_file ( self , path ) : self . log . debug ( "S3contents.GenericManager: delete_file '%s'" , path ) if self . file_exists ( path ) or self . dir_exists ( path ) : self . fs . rm ( path ) else : self . no_such_entity ( path )
Delete the file or directory at path .
35,020
def path ( self , * path ) : path = list ( filter ( None , path ) ) path = self . unprefix ( path ) items = [ self . prefix_ ] + path return self . join ( * items )
Utility to join paths including the bucket and prefix
35,021
def get ( url , params = { } ) : request_url = url if len ( params ) : request_url = "{}?{}" . format ( url , urlencode ( params ) ) try : req = Request ( request_url , headers = { 'User-Agent' : 'Mozilla/5.0' } ) response = json . loads ( urlopen ( req ) . read ( ) . decode ( "utf-8" ) ) return response except HTTPErr...
Invoke an HTTP GET request on a url
35,022
def find ( self , id ) : url = "{}/{}/{}" . format ( __endpoint__ , self . type . RESOURCE , id ) response = RestClient . get ( url ) [ self . type . RESOURCE [ : - 1 ] ] return self . type ( response )
Get a resource by its id
35,023
def find_many ( self , url , type , resource ) : return [ type ( item ) for item in RestClient . get ( url ) [ resource ] ]
Get a list of resources
35,024
def iter ( self ) : page = 1 fetch_all = True url = "{}/{}" . format ( __endpoint__ , self . type . RESOURCE ) if 'page' in self . params : page = self . params [ 'page' ] fetch_all = False response = RestClient . get ( url , self . params ) [ self . type . RESOURCE ] while len ( response ) : for item in response : yie...
Gets all resources automating paging through data
35,025
def array ( self ) : url = "{}/{}" . format ( __endpoint__ , self . type . RESOURCE ) return RestClient . get ( url , self . params ) [ self . type . RESOURCE ]
Get all resources and return the result as an array
35,026
def make2d ( array , cols = None , dtype = None ) : if not len ( array ) : if cols is None or dtype is None : raise RuntimeError ( "cols and dtype must be specified for empty array" ) return _np . empty ( ( 0 , cols ) , dtype = dtype ) return _np . vstack ( array )
Make a 2D array from an array of arrays . The cols and dtype arguments can be omitted if the array is not empty .
35,027
def _parse_header ( stream ) : parser = _PlyHeaderParser ( ) while parser . consume ( stream . readline ( ) ) : pass return PlyData ( [ PlyElement ( * e ) for e in parser . elements ] , parser . format == 'ascii' , _byte_order_map [ parser . format ] , parser . comments , parser . obj_info )
Parse a PLY header from a readable file - like stream .
35,028
def read ( stream ) : ( must_close , stream ) = _open_stream ( stream , 'read' ) try : data = PlyData . _parse_header ( stream ) for elt in data : elt . _read ( stream , data . text , data . byte_order ) finally : if must_close : stream . close ( ) return data
Read PLY data from a readable file - like object or filename .
35,029
def write ( self , stream ) : ( must_close , stream ) = _open_stream ( stream , 'write' ) try : stream . write ( self . header . encode ( 'ascii' ) ) stream . write ( b'\n' ) for elt in self : elt . _write ( stream , self . text , self . byte_order ) finally : if must_close : stream . close ( )
Write PLY data to a writeable file - like object or filename .
35,030
def header ( self ) : lines = [ 'ply' ] if self . text : lines . append ( 'format ascii 1.0' ) else : lines . append ( 'format ' + _byte_order_reverse [ self . byte_order ] + ' 1.0' ) for c in self . comments : lines . append ( 'comment ' + c ) for c in self . obj_info : lines . append ( 'obj_info ' + c ) lines . exten...
Provide PLY - formatted metadata for the instance .
35,031
def describe ( data , name , len_types = { } , val_types = { } , comments = [ ] ) : if not isinstance ( data , _np . ndarray ) : raise TypeError ( "only numpy arrays are supported" ) if len ( data . shape ) != 1 : raise ValueError ( "only one-dimensional arrays are " "supported" ) count = len ( data ) properties = [ ] ...
Construct a PlyElement from an array s metadata .
35,032
def _read ( self , stream , text , byte_order ) : dtype = self . dtype ( byte_order ) if text : self . _read_txt ( stream ) elif _can_mmap ( stream ) and not self . _have_list : num_bytes = self . count * dtype . itemsize offset = stream . tell ( ) stream . seek ( 0 , 2 ) max_bytes = stream . tell ( ) - offset if max_b...
Read the actual data from a PLY file .
35,033
def _write ( self , stream , text , byte_order ) : if text : self . _write_txt ( stream ) else : if self . _have_list : self . _write_bin ( stream , byte_order ) else : stream . write ( self . data . astype ( self . dtype ( byte_order ) , copy = False ) . data )
Write the data to a PLY file .
35,034
def _read_txt ( self , stream ) : self . _data = _np . empty ( self . count , dtype = self . dtype ( ) ) k = 0 for line in _islice ( iter ( stream . readline , b'' ) , self . count ) : fields = iter ( line . strip ( ) . split ( ) ) for prop in self . properties : try : self . _data [ prop . name ] [ k ] = prop . _from_...
Load a PLY element from an ASCII - format PLY file . The element may contain list properties .
35,035
def _write_txt ( self , stream ) : for rec in self . data : fields = [ ] for prop in self . properties : fields . extend ( prop . _to_fields ( rec [ prop . name ] ) ) _np . savetxt ( stream , [ fields ] , '%.18g' , newline = '\n' )
Save a PLY element to an ASCII - format PLY file . The element may contain list properties .
35,036
def _read_bin ( self , stream , byte_order ) : self . _data = _np . empty ( self . count , dtype = self . dtype ( byte_order ) ) for k in _range ( self . count ) : for prop in self . properties : try : self . _data [ prop . name ] [ k ] = prop . _read_bin ( stream , byte_order ) except StopIteration : raise PlyElementP...
Load a PLY element from a binary PLY file . The element may contain list properties .
35,037
def _write_bin ( self , stream , byte_order ) : for rec in self . data : for prop in self . properties : prop . _write_bin ( rec [ prop . name ] , stream , byte_order )
Save a PLY element to a binary PLY file . The element may contain list properties .
35,038
def header ( self ) : lines = [ 'element %s %d' % ( self . name , self . count ) ] for c in self . comments : lines . append ( 'comment ' + c ) lines . extend ( list ( map ( str , self . properties ) ) ) return '\n' . join ( lines )
Format this element s metadata as it would appear in a PLY header .
35,039
def _from_fields ( self , fields ) : return _np . dtype ( self . dtype ( ) ) . type ( next ( fields ) )
Parse from generator . Raise StopIteration if the property could not be read .
35,040
def _read_bin ( self , stream , byte_order ) : try : return _read_array ( stream , self . dtype ( byte_order ) , 1 ) [ 0 ] except IndexError : raise StopIteration
Read data from a binary stream . Raise StopIteration if the property could not be read .
35,041
def color_stream_st ( istream = sys . stdin , save_palette = False , ** kwargs ) : for line in istream : filename = line . strip ( ) try : palette = extract_colors ( filename , ** kwargs ) except Exception as e : print ( filename , e , file = sys . stderr ) continue print_colors ( filename , palette ) if save_palette :...
Read filenames from the input stream and detect their palette .
35,042
def color_stream_mt ( istream = sys . stdin , n = config . N_PROCESSES , ** kwargs ) : queue = multiprocessing . Queue ( 1000 ) lock = multiprocessing . Lock ( ) pool = [ multiprocessing . Process ( target = color_process , args = ( queue , lock ) , kwargs = kwargs ) for i in range ( n ) ] for p in pool : p . start ( )...
Read filenames from the input stream and detect their palette using multiple processes .
35,043
def color_process ( queue , lock ) : "Receive filenames and get the colors from their images." while True : block = queue . get ( ) if block == config . SENTINEL : break for filename in block : try : palette = extract_colors ( filename ) except : continue lock . acquire ( ) try : print_colors ( filename , palette ) fin...
Receive filenames and get the colors from their images .
35,044
def extract_colors ( filename_or_img , min_saturation = config . MIN_SATURATION , min_distance = config . MIN_DISTANCE , max_colors = config . MAX_COLORS , min_prominence = config . MIN_PROMINENCE , n_quantized = config . N_QUANTIZED ) : if Image . isImageType ( filename_or_img ) : im = filename_or_img else : im = Imag...
Determine what the major colors are in the given image .
35,045
def save_palette_as_image ( filename , palette ) : "Save palette as a PNG with labeled, colored blocks" output_filename = '%s_palette.png' % filename [ : filename . rfind ( '.' ) ] size = ( 80 * len ( palette . colors ) , 80 ) im = Image . new ( 'RGB' , size ) draw = ImageDraw . Draw ( im ) for i , c in enumerate ( pal...
Save palette as a PNG with labeled colored blocks
35,046
def autocrop ( im , bgcolor ) : "Crop away a border of the given background color." if im . mode != "RGB" : im = im . convert ( "RGB" ) bg = Image . new ( "RGB" , im . size , bgcolor ) diff = ImageChops . difference ( im , bg ) bbox = diff . getbbox ( ) if bbox : return im . crop ( bbox ) return im
Crop away a border of the given background color .
35,047
def convert_to_influx ( mac , payload ) : dataFormat = payload [ "data_format" ] if ( 'data_format' in payload ) else None fields = { } fields [ "temperature" ] = payload [ "temperature" ] if ( 'temperature' in payload ) else None fields [ "humidity" ] = payload [ "humidity" ] if ( 'humidity' in payload ) else None fie...
Convert data into RuuviCollector naming schme and scale
35,048
def _run_get_data_background ( macs , queue , shared_data , bt_device ) : run_flag = RunFlag ( ) def add_data ( data ) : if not shared_data [ 'run_flag' ] : run_flag . running = False data [ 1 ] [ 'time' ] = datetime . utcnow ( ) . isoformat ( ) queue . put ( data ) RuuviTagSensor . get_datas ( add_data , macs , run_fl...
Background process function for RuuviTag Sensors
35,049
def _data_update ( subjects , queue , run_flag ) : while run_flag . running : while not queue . empty ( ) : data = queue . get ( ) for subject in [ s for s in subjects if not s . is_disposed ] : subject . on_next ( data ) time . sleep ( 0.1 )
Get data from backgound process and notify all subscribed observers with the new data
35,050
async def data_update ( queue ) : global allData while True : while not queue . empty ( ) : data = queue . get ( ) allData [ data [ 0 ] ] = data [ 1 ] for key , value in tags . items ( ) : if key in allData : allData [ key ] [ 'name' ] = value await asyncio . sleep ( 0.5 )
Update data sent by the background process to global allData variable
35,051
def convert_data ( raw ) : data = RuuviTagSensor . _get_data_format_2and4 ( raw ) if data is not None : return ( 2 , data ) data = RuuviTagSensor . _get_data_format_3 ( raw ) if data is not None : return ( 3 , data ) data = RuuviTagSensor . _get_data_format_5 ( raw ) if data is not None : return ( 5 , data ) return ( N...
Validate that data is from RuuviTag and get correct data part .
35,052
def find_ruuvitags ( bt_device = '' ) : log . info ( 'Finding RuuviTags. Stop with Ctrl+C.' ) datas = dict ( ) for new_data in RuuviTagSensor . _get_ruuvitag_datas ( bt_device = bt_device ) : if new_data [ 0 ] in datas : continue datas [ new_data [ 0 ] ] = new_data [ 1 ] log . info ( new_data [ 0 ] ) log . info ( new_d...
Find all RuuviTags . Function will print the mac and the state of the sensors when found . Function will execute as long as it is stopped . Stop ecexution with Crtl + C .
35,053
def get_data_for_sensors ( macs = [ ] , search_duratio_sec = 5 , bt_device = '' ) : log . info ( 'Get latest data for sensors. Stop with Ctrl+C.' ) log . info ( 'Stops automatically in %ss' , search_duratio_sec ) log . info ( 'MACs: %s' , macs ) datas = dict ( ) for new_data in RuuviTagSensor . _get_ruuvitag_datas ( ma...
Get lates data for sensors in the MAC s list .
35,054
def get_datas ( callback , macs = [ ] , run_flag = RunFlag ( ) , bt_device = '' ) : log . info ( 'Get latest data for sensors. Stop with Ctrl+C.' ) log . info ( 'MACs: %s' , macs ) for new_data in RuuviTagSensor . _get_ruuvitag_datas ( macs , None , run_flag , bt_device ) : callback ( new_data )
Get data for all ruuvitag sensors or sensors in the MAC s list .
35,055
def _get_ruuvitag_datas ( macs = [ ] , search_duratio_sec = None , run_flag = RunFlag ( ) , bt_device = '' ) : mac_blacklist = [ ] start_time = time . time ( ) data_iter = ble . get_datas ( mac_blacklist , bt_device ) for ble_data in data_iter : if search_duratio_sec and time . time ( ) - start_time > search_duratio_se...
Get data from BluetoothCommunication and handle data encoding .
35,056
def update ( self ) : ( data_format , data ) = RuuviTagSensor . get_data ( self . _mac , self . _bt_device ) if data == self . _data : return self . _state self . _data = data if self . _data is None : self . _state = { } else : self . _state = get_decoder ( data_format ) . decode_data ( self . _data ) return self . _s...
Get lates data from the sensor and update own state .
35,057
def write_to_influxdb ( received_data ) : dataFormat = received_data [ 1 ] [ "data_format" ] if ( 'data_format' in received_data [ 1 ] ) else None fields = { } fields [ "temperature" ] = received_data [ 1 ] [ "temperature" ] if ( 'temperature' in received_data [ 1 ] ) else None fields [ "humidity" ] = received_data [ 1...
Convert data into RuuviCollecor naming schme and scale
35,058
def update_data ( ) : global allData while not q . empty ( ) : allData = q . get ( ) for key , value in tags . items ( ) : if key in allData : allData [ key ] [ 'name' ] = value
Update data sent by background process to global allData
35,059
def _get_acceleration ( self , data ) : if ( data [ 7 : 8 ] == 0x7FFF or data [ 9 : 10 ] == 0x7FFF or data [ 11 : 12 ] == 0x7FFF ) : return ( None , None , None ) acc_x = twos_complement ( ( data [ 7 ] << 8 ) + data [ 8 ] , 16 ) acc_y = twos_complement ( ( data [ 9 ] << 8 ) + data [ 10 ] , 16 ) acc_z = twos_complement ...
Return acceleration mG
35,060
def _get_powerinfo ( self , data ) : power_info = ( data [ 13 ] & 0xFF ) << 8 | ( data [ 14 ] & 0xFF ) battery_voltage = rshift ( power_info , 5 ) + 1600 tx_power = ( power_info & 0b11111 ) * 2 - 40 if rshift ( power_info , 5 ) == 0b11111111111 : battery_voltage = None if ( power_info & 0b11111 ) == 0b11111 : tx_power ...
Return battery voltage and tx power
35,061
def split_key ( key , max_keys = 0 ) : parts = [ x for x in re . split ( SPLIT_REGEX , key ) if x != "." ] result = [ ] while len ( parts ) > 0 : if max_keys > 0 and len ( result ) == max_keys : break result . append ( parts . pop ( 0 ) ) if len ( parts ) > 0 : result . append ( "." . join ( parts ) ) return result
Splits a key but allows dots in the key name if they re scaped properly .
35,062
def to_python ( self ) : result = list ( self ) for index , value in enumerate ( result ) : if isinstance ( value , DottedCollection ) : result [ index ] = value . to_python ( ) return result
Returns a plain python list and converts to plain python objects all this object s descendants .
35,063
def to_python ( self ) : result = dict ( self ) for key , value in iteritems ( result ) : if isinstance ( value , DottedCollection ) : result [ key ] = value . to_python ( ) return result
Returns a plain python dict and converts to plain python objects all this object s descendants .
35,064
def handle_nick ( self , params ) : nick = params if re . search ( r'[^a-zA-Z0-9\-\[\]\'`^{}_]' , nick ) : raise IRCError . from_name ( 'erroneusnickname' , ':%s' % nick ) if self . server . clients . get ( nick , None ) == self : return if nick in self . server . clients : raise IRCError . from_name ( 'nicknameinuse' ...
Handle the initial setting of the user s nickname and nick changes .
35,065
def handle_user ( self , params ) : params = params . split ( ' ' , 3 ) if len ( params ) != 4 : raise IRCError . from_name ( 'needmoreparams' , 'USER :Not enough parameters' ) user , mode , unused , realname = params self . user = user self . mode = mode self . realname = realname return ''
Handle the USER command which identifies the user to the server .
35,066
def handle_privmsg ( self , params ) : target , sep , msg = params . partition ( ' ' ) if not msg : raise IRCError . from_name ( 'needmoreparams' , 'PRIVMSG :Not enough parameters' ) message = ':%s PRIVMSG %s %s' % ( self . client_ident ( ) , target , msg ) if target . startswith ( '#' ) or target . startswith ( '$' ) ...
Handle sending a private message to a user or channel .
35,067
def _send_to_others ( self , message , channel ) : other_clients = [ client for client in channel . clients if not client == self ] for client in other_clients : client . send_queue . append ( message )
Send the message to all clients in the specified channel except for self .
35,068
def handle_topic ( self , params ) : channel_name , sep , topic = params . partition ( ' ' ) channel = self . server . channels . get ( channel_name ) if not channel : raise IRCError . from_name ( 'nosuchnick' , 'PRIVMSG :%s' % channel_name ) if channel . name not in self . channels : raise IRCError . from_name ( 'cann...
Handle a topic command .
35,069
def handle_quit ( self , params ) : response = ':%s QUIT :%s' % ( self . client_ident ( ) , params . lstrip ( ':' ) ) for channel in self . channels . values ( ) : for client in channel . clients : client . send_queue . append ( response ) channel . clients . remove ( self )
Handle the client breaking off the connection with a QUIT command .
35,070
def handle_dump ( self , params ) : print ( "Clients:" , self . server . clients ) for client in self . server . clients . values ( ) : print ( " " , client ) for channel in client . channels . values ( ) : print ( " " , channel . name ) print ( "Channels:" , self . server . channels ) for channel in self . server ...
Dump internal server information for debugging purposes .
35,071
def client_ident ( self ) : return irc . client . NickMask . from_params ( self . nick , self . user , self . server . servername )
Return the client identifier as included in many command replies .
35,072
def finish ( self ) : log . info ( 'Client disconnected: %s' , self . client_ident ( ) ) response = ':%s QUIT :EOF from client' % self . client_ident ( ) for channel in self . channels . values ( ) : if self in channel . clients : for client in channel . clients : client . send_queue . append ( response ) channel . cli...
The client conection is finished . Do some cleanup to ensure that the client doesn t linger around in any channel or the client list in case the client didn t properly close the connection with PART and QUIT .
35,073
def send_raw ( self , string ) : log . debug ( 'RAW: {}' . format ( string ) ) if self . transport is None : raise ServerNotConnectedError ( "Not connected." ) self . transport . write ( self . _prep_message ( string ) )
Send raw string to the server via the asyncio transport .
35,074
def _parse_modes ( mode_string , unary_modes = "" ) : if not mode_string or not mode_string [ 0 ] in '+-' : return [ ] modes = [ ] parts = mode_string . split ( ) mode_part , args = parts [ 0 ] , parts [ 1 : ] for ch in mode_part : if ch in "+-" : sign = ch continue arg = args . pop ( 0 ) if ch in unary_modes and args ...
Parse the mode_string and return a list of triples .
35,075
def from_group ( cls , group ) : if not group : return tag_items = group . split ( ";" ) return list ( map ( cls . parse , tag_items ) )
Construct tags from the regex group
35,076
def from_group ( group ) : if not group : return [ ] main , sep , ext = group . partition ( " :" ) arguments = main . split ( ) if sep : arguments . append ( ext ) return arguments
Construct arguments from the regex group
35,077
def set ( self , name , value = True ) : "set a feature value" setattr ( self , name . lower ( ) , value )
set a feature value
35,078
def load ( self , arguments ) : "Load the values from the a ServerConnection arguments" features = arguments [ 1 : - 1 ] list ( map ( self . load_feature , features ) )
Load the values from the a ServerConnection arguments
35,079
def _parse_PREFIX ( value ) : "channel user prefixes" channel_modes , channel_chars = value . split ( ')' ) channel_modes = channel_modes [ 1 : ] return collections . OrderedDict ( zip ( channel_chars , channel_modes ) )
channel user prefixes
35,080
def _connect ( self ) : server = self . servers . peek ( ) try : self . connect ( server . host , server . port , self . _nickname , server . password , ircname = self . _realname , ** self . __connect_params ) except irc . client . ServerConnectionError : pass
Establish a connection to the server at the front of the server_list .
35,081
def jump_server ( self , msg = "Changing servers" ) : if self . connection . is_connected ( ) : self . connection . disconnect ( msg ) next ( self . servers ) self . _connect ( )
Connect to a new server possibly disconnecting from the current .
35,082
def on_ctcp ( self , connection , event ) : nick = event . source . nick if event . arguments [ 0 ] == "VERSION" : connection . ctcp_reply ( nick , "VERSION " + self . get_version ( ) ) elif event . arguments [ 0 ] == "PING" : if len ( event . arguments ) > 1 : connection . ctcp_reply ( nick , "PING " + event . argumen...
Default handler for ctcp events .
35,083
def set_mode ( self , mode , value = None ) : if mode in self . user_modes : self . mode_users [ mode ] [ value ] = 1 else : self . modes [ mode ] = value
Set mode on the channel .
35,084
def clear_mode ( self , mode , value = None ) : try : if mode in self . user_modes : del self . mode_users [ mode ] [ value ] else : del self . modes [ mode ] except KeyError : pass
Clear mode on the channel .
35,085
def ip_numstr_to_quad ( num ) : packed = struct . pack ( '>L' , int ( num ) ) bytes = struct . unpack ( 'BBBB' , packed ) return "." . join ( map ( str , bytes ) )
Convert an IP number as an integer given in ASCII representation to an IP address string .
35,086
def as_nick ( self , name ) : orig = self . get_nickname ( ) self . nick ( name ) try : yield orig finally : self . nick ( orig )
Set the nick for the duration of the context .
35,087
def process_data ( self ) : "read and process input from self.socket" try : reader = getattr ( self . socket , 'read' , self . socket . recv ) new_data = reader ( 2 ** 14 ) except socket . error : self . disconnect ( "Connection reset by peer" ) return if not new_data : self . disconnect ( "Connection reset by peer" ) ...
read and process input from self . socket
35,088
def ctcp ( self , ctcptype , target , parameter = "" ) : ctcptype = ctcptype . upper ( ) tmpl = ( "\001{ctcptype} {parameter}\001" if parameter else "\001{ctcptype}\001" ) self . privmsg ( target , tmpl . format ( ** vars ( ) ) )
Send a CTCP command .
35,089
def kick ( self , channel , nick , comment = "" ) : self . send_items ( 'KICK' , channel , nick , comment and ':' + comment )
Send a KICK command .
35,090
def list ( self , channels = None , server = "" ) : self . send_items ( 'LIST' , ',' . join ( always_iterable ( channels ) ) , server )
Send a LIST command .
35,091
def part ( self , channels , message = "" ) : self . send_items ( 'PART' , ',' . join ( always_iterable ( channels ) ) , message )
Send a PART command .
35,092
def privmsg_many ( self , targets , text ) : target = ',' . join ( targets ) return self . privmsg ( target , text )
Send a PRIVMSG command to multiple targets .
35,093
def user ( self , username , realname ) : cmd = 'USER {username} 0 * :{realname}' . format ( ** locals ( ) ) self . send_raw ( cmd )
Send a USER command .
35,094
def whowas ( self , nick , max = "" , server = "" ) : self . send_items ( 'WHOWAS' , nick , max , server )
Send a WHOWAS command .
35,095
def set_keepalive ( self , interval ) : pinger = functools . partial ( self . ping , 'keep-alive' ) self . reactor . scheduler . execute_every ( period = interval , func = pinger )
Set a keepalive to occur every interval on this connection .
35,096
def server ( self ) : conn = self . connection_class ( self ) with self . mutex : self . connections . append ( conn ) return conn
Creates and returns a ServerConnection object .
35,097
def process_data ( self , sockets ) : with self . mutex : log . log ( logging . DEBUG - 2 , "process_data()" ) for sock , conn in itertools . product ( sockets , self . connections ) : if sock == conn . socket : conn . process_data ( )
Called when there is more data to read on connection sockets .
35,098
def process_once ( self , timeout = 0 ) : log . log ( logging . DEBUG - 2 , "process_once()" ) sockets = self . sockets if sockets : in_ , out , err = select . select ( sockets , [ ] , [ ] , timeout ) self . process_data ( in_ ) else : time . sleep ( timeout ) self . process_timeout ( )
Process data from connections once .
35,099
def disconnect_all ( self , message = "" ) : with self . mutex : for conn in self . connections : conn . disconnect ( message )
Disconnects all connections .