idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
41,800
def _normalize_group_dns ( self , group_dns ) : if isinstance ( group_dns , LDAPGroupQuery ) : query = group_dns elif isinstance ( group_dns , str ) : query = LDAPGroupQuery ( group_dns ) elif isinstance ( group_dns , ( list , tuple ) ) and len ( group_dns ) > 0 : query = reduce ( operator . or_ , map ( LDAPGroupQuery , group_dns ) ) else : raise ValueError ( group_dns ) return query
Converts one or more group DNs to an LDAPGroupQuery .
41,801
def _normalize_mirror_settings ( self ) : def malformed_mirror_groups_except ( ) : return ImproperlyConfigured ( "{} must be a collection of group names" . format ( self . settings . _name ( "MIRROR_GROUPS_EXCEPT" ) ) ) def malformed_mirror_groups ( ) : return ImproperlyConfigured ( "{} must be True or a collection of group names" . format ( self . settings . _name ( "MIRROR_GROUPS" ) ) ) mge = self . settings . MIRROR_GROUPS_EXCEPT mg = self . settings . MIRROR_GROUPS if mge is not None : if isinstance ( mge , ( set , frozenset ) ) : pass elif isinstance ( mge , ( list , tuple ) ) : mge = self . settings . MIRROR_GROUPS_EXCEPT = frozenset ( mge ) else : raise malformed_mirror_groups_except ( ) if not all ( isinstance ( value , str ) for value in mge ) : raise malformed_mirror_groups_except ( ) elif mg : warnings . warn ( ConfigurationWarning ( "Ignoring {} in favor of {}" . format ( self . settings . _name ( "MIRROR_GROUPS" ) , self . settings . _name ( "MIRROR_GROUPS_EXCEPT" ) , ) ) ) mg = self . settings . MIRROR_GROUPS = None if mg is not None : if isinstance ( mg , ( bool , set , frozenset ) ) : pass elif isinstance ( mg , ( list , tuple ) ) : mg = self . settings . MIRROR_GROUPS = frozenset ( mg ) else : raise malformed_mirror_groups ( ) if isinstance ( mg , ( set , frozenset ) ) and ( not all ( isinstance ( value , str ) for value in mg ) ) : raise malformed_mirror_groups ( )
Validates the group mirroring settings and converts them as necessary .
41,802
def _get_groups ( self ) : if self . _groups is None : self . _groups = _LDAPUserGroups ( self ) return self . _groups
Returns an _LDAPUserGroups object which can determine group membership .
41,803
def _bind ( self ) : self . _bind_as ( self . settings . BIND_DN , self . settings . BIND_PASSWORD , sticky = True )
Binds to the LDAP server with AUTH_LDAP_BIND_DN and AUTH_LDAP_BIND_PASSWORD .
41,804
def _bind_as ( self , bind_dn , bind_password , sticky = False ) : self . _get_connection ( ) . simple_bind_s ( bind_dn , bind_password ) self . _connection_bound = sticky
Binds to the LDAP server with the given credentials . This does not trap exceptions .
41,805
def _init_group_settings ( self ) : self . _group_type = self . settings . GROUP_TYPE if self . _group_type is None : raise ImproperlyConfigured ( "AUTH_LDAP_GROUP_TYPE must be an LDAPGroupType instance." ) self . _group_search = self . settings . GROUP_SEARCH if self . _group_search is None : raise ImproperlyConfigured ( "AUTH_LDAP_GROUP_SEARCH must be an LDAPSearch instance." )
Loads the settings we need to deal with groups .
41,806
def get_group_names ( self ) : if self . _group_names is None : self . _load_cached_attr ( "_group_names" ) if self . _group_names is None : group_infos = self . _get_group_infos ( ) self . _group_names = { self . _group_type . group_name_from_info ( group_info ) for group_info in group_infos } self . _cache_attr ( "_group_names" ) return self . _group_names
Returns the set of Django group names that this user belongs to by virtue of LDAP group memberships .
41,807
def is_member_of ( self , group_dn ) : is_member = None group_dn = group_dn . lower ( ) if self . _group_dns is None : is_member = self . _group_type . is_member ( self . _ldap_user , group_dn ) if is_member is None : is_member = group_dn in self . get_group_dns ( ) logger . debug ( "{} is{}a member of {}" . format ( self . _ldap_user . dn , is_member and " " or " not " , group_dn ) ) return is_member
Returns true if our user is a member of the given group .
41,808
def get_ldap ( cls , global_options = None ) : if not cls . _ldap_configured and global_options is not None : for opt , value in global_options . items ( ) : ldap . set_option ( opt , value ) cls . _ldap_configured = True return ldap
Returns the configured ldap module .
41,809
def search_with_additional_terms ( self , term_dict , escape = True ) : term_strings = [ self . filterstr ] for name , value in term_dict . items ( ) : if escape : value = self . ldap . filter . escape_filter_chars ( value ) term_strings . append ( "({}={})" . format ( name , value ) ) filterstr = "(&{})" . format ( "" . join ( term_strings ) ) return self . __class__ ( self . base_dn , self . scope , filterstr , attrlist = self . attrlist )
Returns a new search object with additional search terms and - ed to the filter string . term_dict maps attribute names to assertion values . If you don t want the values escaped pass escape = False .
41,810
def user_groups ( self , ldap_user , group_search ) : groups = [ ] try : user_uid = ldap_user . attrs [ "uid" ] [ 0 ] if "gidNumber" in ldap_user . attrs : user_gid = ldap_user . attrs [ "gidNumber" ] [ 0 ] filterstr = "(|(gidNumber={})(memberUid={}))" . format ( self . ldap . filter . escape_filter_chars ( user_gid ) , self . ldap . filter . escape_filter_chars ( user_uid ) , ) else : filterstr = "(memberUid={})" . format ( self . ldap . filter . escape_filter_chars ( user_uid ) ) search = group_search . search_with_additional_term_string ( filterstr ) groups = search . execute ( ldap_user . connection ) except ( KeyError , IndexError ) : pass return groups
Searches for any group that is either the user s primary or contains the user as a member .
41,811
def is_member ( self , ldap_user , group_dn ) : try : user_uid = ldap_user . attrs [ "uid" ] [ 0 ] try : is_member = ldap_user . connection . compare_s ( group_dn , "memberUid" , user_uid . encode ( ) ) except ( ldap . UNDEFINED_TYPE , ldap . NO_SUCH_ATTRIBUTE ) : is_member = False if not is_member : try : user_gid = ldap_user . attrs [ "gidNumber" ] [ 0 ] is_member = ldap_user . connection . compare_s ( group_dn , "gidNumber" , user_gid . encode ( ) ) except ( ldap . UNDEFINED_TYPE , ldap . NO_SUCH_ATTRIBUTE ) : is_member = False except ( KeyError , IndexError ) : is_member = False return is_member
Returns True if the group is the user s primary group or if the user is listed in the group s memberUid attribute .
41,812
def user_groups ( self , ldap_user , group_search ) : group_info_map = { } member_dn_set = { ldap_user . dn } handled_dn_set = set ( ) while len ( member_dn_set ) > 0 : group_infos = self . find_groups_with_any_member ( member_dn_set , group_search , ldap_user . connection ) new_group_info_map = { info [ 0 ] : info for info in group_infos } group_info_map . update ( new_group_info_map ) handled_dn_set . update ( member_dn_set ) member_dn_set = set ( new_group_info_map . keys ( ) ) - handled_dn_set return group_info_map . values ( )
This searches for all of a user s groups from the bottom up . In other words it returns the groups that the user belongs to the groups that those groups belong to etc . Circular references will be detected and pruned .
41,813
def aggregator ( self ) : if self . connector == self . AND : aggregator = all elif self . connector == self . OR : aggregator = any else : raise ValueError ( self . connector ) return aggregator
Returns a function for aggregating a sequence of sub - results .
41,814
def _resolve_children ( self , ldap_user , groups ) : for child in self . children : if isinstance ( child , LDAPGroupQuery ) : yield child . resolve ( ldap_user , groups ) else : yield groups . is_member_of ( child )
Generates the query result for each child .
41,815
def gather_positions ( tree ) : pos = { 'data-x' : 'r0' , 'data-y' : 'r0' , 'data-z' : 'r0' , 'data-rotate-x' : 'r0' , 'data-rotate-y' : 'r0' , 'data-rotate-z' : 'r0' , 'data-scale' : 'r0' , 'is_path' : False } steps = 0 default_movement = True for step in tree . findall ( 'step' ) : steps += 1 for key in POSITION_ATTRIBS : value = step . get ( key ) if value is not None : default_movement = False pos [ key ] = value elif pos [ key ] and not pos [ key ] . startswith ( 'r' ) : pos [ key ] = 'r0' if steps == 1 and pos [ 'data-scale' ] == 'r0' : pos [ 'data-scale' ] = '1' if default_movement and steps != 1 : pos [ 'data-x' ] = 'r%s' % DEFAULT_MOVEMENT if 'data-rotate' in step . attrib : pos [ 'data-rotate-z' ] = step . get ( 'data-rotate' ) del step . attrib [ 'data-rotate' ] if 'hovercraft-path' in step . attrib : default_movement = False pos [ 'is_path' ] = True pos [ 'path' ] = step . attrib [ 'hovercraft-path' ] yield pos . copy ( ) del pos [ 'path' ] else : if 'data-x' in step . attrib or 'data-y' in step . attrib : pos [ 'is_path' ] = False yield pos . copy ( )
Makes a list of positions and position commands from the tree
41,816
def calculate_positions ( positions ) : current_position = { 'data-x' : 0 , 'data-y' : 0 , 'data-z' : 0 , 'data-rotate-x' : 0 , 'data-rotate-y' : 0 , 'data-rotate-z' : 0 , 'data-scale' : 1 , } positer = iter ( positions ) position = next ( positer ) _update_position ( current_position , position ) while True : if 'path' in position : path = position [ 'path' ] first_point = _pos_to_cord ( current_position ) closed_path = path . strip ( ) [ - 1 ] . upper ( ) == 'Z' path = parse_path ( path ) count = 1 last = False deferred_positions = [ ] while True : try : position = next ( positer ) deferred_positions . append ( position ) except StopIteration : last = True break if not position . get ( 'is_path' ) or 'path' in position : break count += 1 if count < 2 : raise AssertionError ( "The path specification is only used for " "one slide, which makes it pointless." ) if closed_path : endcount = count + 1 else : endcount = count multiplier = ( endcount * DEFAULT_MOVEMENT ) / path . length ( ) offset = path . point ( 0 ) path_iter = iter ( deferred_positions ) for x in range ( count ) : point = path . point ( x / ( endcount - 1 ) ) point = ( ( point - offset ) * multiplier ) + first_point current_position . update ( _coord_to_pos ( point ) ) rotation = _path_angle ( path , x / ( endcount - 1 ) ) current_position [ 'data-rotate-z' ] = rotation yield current_position . copy ( ) try : position = next ( path_iter ) except StopIteration : last = True break _update_position ( current_position , position ) if last : break continue yield current_position . copy ( ) try : position = next ( positer ) except StopIteration : break _update_position ( current_position , position )
Calculates position information
41,817
def update_positions ( tree , positions ) : for step , pos in zip ( tree . findall ( 'step' ) , positions ) : for key in sorted ( pos ) : value = pos . get ( key ) if key . endswith ( "-rel" ) : abs_key = key [ : key . index ( "-rel" ) ] if value is not None : els = tree . findall ( ".//*[@id='" + value + "']" ) for el in els : pos [ abs_key ] = num ( el . get ( abs_key ) ) + pos . get ( abs_key ) step . attrib [ abs_key ] = str ( pos . get ( abs_key ) ) else : step . attrib [ key ] = str ( pos [ key ] ) if 'hovercraft-path' in step . attrib : del step . attrib [ 'hovercraft-path' ]
Updates the tree with new positions
41,818
def position_slides ( tree ) : positions = gather_positions ( tree ) positions = calculate_positions ( positions ) update_positions ( tree , positions )
Position the slides in the tree
41,819
def copy_node ( node ) : element = node . makeelement ( node . tag ) element . text = node . text element . tail = node . tail for key , value in node . items ( ) : element . set ( key , value ) return element
Makes a copy of a node with the same attributes and text but no children .
41,820
def copy_resource ( self , resource , targetdir ) : final_path = resource . final_path ( ) if final_path [ 0 ] == '/' or ( ':' in final_path ) or ( '?' in final_path ) : return source_path = self . get_source_path ( resource ) if resource . resource_type == DIRECTORY_RESOURCE : for file_path in glob . iglob ( os . path . join ( source_path , '**' ) , recursive = True ) : if os . path . isdir ( file_path ) : continue rest_target_path = file_path [ len ( source_path ) + 1 : ] target_path = os . path . join ( targetdir , final_path , rest_target_path ) self . _copy_file ( file_path , target_path ) else : target_path = os . path . join ( targetdir , final_path ) yield self . _copy_file ( source_path , target_path )
Copies a resource file and returns the source path for monitoring
41,821
def generate ( args ) : source_files = { args . presentation } template_info = Template ( args . template ) if args . css : presentation_dir = os . path . split ( args . presentation ) [ 0 ] target_path = os . path . relpath ( args . css , presentation_dir ) template_info . add_resource ( args . css , CSS_RESOURCE , target = target_path , extra_info = 'all' ) source_files . add ( args . css ) if args . js : presentation_dir = os . path . split ( args . presentation ) [ 0 ] target_path = os . path . relpath ( args . js , presentation_dir ) template_info . add_resource ( args . js , JS_RESOURCE , target = target_path , extra_info = JS_POSITION_BODY ) source_files . add ( args . js ) htmldata , dependencies = rst2html ( args . presentation , template_info , args . auto_console , args . skip_help , args . skip_notes , args . mathjax , args . slide_numbers ) source_files . update ( dependencies ) if not os . path . exists ( args . targetdir ) : os . makedirs ( args . targetdir ) with open ( os . path . join ( args . targetdir , 'index.html' ) , 'wb' ) as outfile : outfile . write ( htmldata ) source_files . update ( template_info . copy_resources ( args . targetdir ) ) sourcedir = os . path . split ( os . path . abspath ( args . presentation ) ) [ 0 ] tree = html . fromstring ( htmldata ) for image in tree . iterdescendants ( 'img' ) : filename = image . attrib [ 'src' ] source_files . add ( copy_resource ( filename , sourcedir , args . targetdir ) ) RE_CSS_URL = re . compile ( br ) for resource in template_info . resources : if resource . resource_type != CSS_RESOURCE : continue css_base = template_info . template_root if resource . is_in_template else sourcedir css_sourcedir = os . path . dirname ( os . path . join ( css_base , resource . filepath ) ) css_targetdir = os . path . dirname ( os . path . join ( args . targetdir , resource . final_path ( ) ) ) uris = RE_CSS_URL . findall ( template_info . read_data ( resource ) ) uris = [ uri . decode ( ) for uri in uris ] if resource . is_in_template and template_info . builtin_template : for filename in uris : template_info . add_resource ( filename , OTHER_RESOURCE , target = css_targetdir , is_in_template = True ) else : for filename in uris : source_files . add ( copy_resource ( filename , css_sourcedir , css_targetdir ) ) return { os . path . abspath ( f ) for f in source_files if f }
Generates the presentation and returns a list of files used
41,822
def port ( self , port = None ) : if ( port is None ) or ( port == self . __port ) : return self . __port self . close ( ) if 0 < int ( port ) < 65536 : self . __port = int ( port ) return self . __port else : return None
Get or set TCP port
41,823
def unit_id ( self , unit_id = None ) : if unit_id is None : return self . __unit_id if 0 <= int ( unit_id ) < 256 : self . __unit_id = int ( unit_id ) return self . __unit_id else : return None
Get or set unit ID field
41,824
def timeout ( self , timeout = None ) : if timeout is None : return self . __timeout if 0 < float ( timeout ) < 3600 : self . __timeout = float ( timeout ) return self . __timeout else : return None
Get or set timeout field
41,825
def debug ( self , state = None ) : if state is None : return self . __debug self . __debug = bool ( state ) return self . __debug
Get or set debug mode
41,826
def auto_open ( self , state = None ) : if state is None : return self . __auto_open self . __auto_open = bool ( state ) return self . __auto_open
Get or set automatic TCP connect mode
41,827
def _can_read ( self ) : if self . __sock is None : return None if select . select ( [ self . __sock ] , [ ] , [ ] , self . __timeout ) [ 0 ] : return True else : self . __last_error = const . MB_TIMEOUT_ERR self . __debug_msg ( 'timeout error' ) self . close ( ) return None
Wait data available for socket read
41,828
def _send ( self , data ) : if self . __sock is None : self . __debug_msg ( 'call _send on close socket' ) return None data_l = len ( data ) try : send_l = self . __sock . send ( data ) except socket . error : send_l = None if ( send_l is None ) or ( send_l != data_l ) : self . __last_error = const . MB_SEND_ERR self . __debug_msg ( '_send error' ) self . close ( ) return None else : return send_l
Send data over current socket
41,829
def _recv ( self , max_size ) : if not self . _can_read ( ) : self . close ( ) return None try : r_buffer = self . __sock . recv ( max_size ) except socket . error : r_buffer = None if not r_buffer : self . __last_error = const . MB_RECV_ERR self . __debug_msg ( '_recv error' ) self . close ( ) return None return r_buffer
Receive data over current socket
41,830
def _send_mbus ( self , frame ) : if self . __auto_open and not self . is_open ( ) : self . open ( ) bytes_send = self . _send ( frame ) if bytes_send : if self . __debug : self . _pretty_dump ( 'Tx' , frame ) return bytes_send else : return None
Send modbus frame
41,831
def _recv_mbus ( self ) : if self . __mode == const . MODBUS_TCP : rx_buffer = self . _recv_all ( 7 ) if not ( rx_buffer and len ( rx_buffer ) == 7 ) : self . __last_error = const . MB_RECV_ERR self . __debug_msg ( '_recv MBAP error' ) self . close ( ) return None rx_frame = rx_buffer ( rx_hd_tr_id , rx_hd_pr_id , rx_hd_length , rx_hd_unit_id ) = struct . unpack ( '>HHHB' , rx_frame ) if not ( ( rx_hd_tr_id == self . __hd_tr_id ) and ( rx_hd_pr_id == 0 ) and ( rx_hd_length < 256 ) and ( rx_hd_unit_id == self . __unit_id ) ) : self . __last_error = const . MB_RECV_ERR self . __debug_msg ( 'MBAP format error' ) if self . __debug : rx_frame += self . _recv_all ( rx_hd_length - 1 ) self . _pretty_dump ( 'Rx' , rx_frame ) self . close ( ) return None rx_buffer = self . _recv_all ( rx_hd_length - 1 ) if not ( rx_buffer and ( len ( rx_buffer ) == rx_hd_length - 1 ) and ( len ( rx_buffer ) >= 2 ) ) : self . __last_error = const . MB_RECV_ERR self . __debug_msg ( '_recv frame body error' ) self . close ( ) return None rx_frame += rx_buffer if self . __debug : self . _pretty_dump ( 'Rx' , rx_frame ) rx_bd_fc = struct . unpack ( 'B' , rx_buffer [ 0 : 1 ] ) [ 0 ] f_body = rx_buffer [ 1 : ] elif self . __mode == const . MODBUS_RTU : rx_buffer = self . _recv ( 256 ) if not rx_buffer : return None rx_frame = rx_buffer if self . __debug : self . _pretty_dump ( 'Rx' , rx_frame ) if len ( rx_buffer ) < 5 : self . __last_error = const . MB_RECV_ERR self . __debug_msg ( 'short frame error' ) self . close ( ) return None if not self . _crc_is_ok ( rx_frame ) : self . __last_error = const . MB_CRC_ERR self . __debug_msg ( 'CRC error' ) self . close ( ) return None ( rx_unit_id , rx_bd_fc ) = struct . unpack ( "BB" , rx_frame [ : 2 ] ) if not ( rx_unit_id == self . __unit_id ) : self . __last_error = const . MB_RECV_ERR self . __debug_msg ( 'unit ID mismatch error' ) self . close ( ) return None f_body = rx_frame [ 2 : - 2 ] if self . __auto_close : self . close ( ) if rx_bd_fc > 0x80 : exp_code = struct . unpack ( 'B' , f_body [ 0 : 1 ] ) [ 0 ] self . __last_error = const . MB_EXCEPT_ERR self . __last_except = exp_code self . __debug_msg ( 'except (code ' + str ( exp_code ) + ')' ) return None else : return f_body
Receive a modbus frame
41,832
def _wc_hard_wrap ( line , length ) : chars = [ ] chars_len = 0 for char in line : char_len = wcwidth ( char ) if chars_len + char_len > length : yield "" . join ( chars ) chars = [ ] chars_len = 0 chars . append ( char ) chars_len += char_len if chars : yield "" . join ( chars )
Wrap text to length characters breaking when target length is reached taking into account character width .
41,833
def wc_wrap ( text , length ) : line_words = [ ] line_len = 0 words = re . split ( r"\s+" , text . strip ( ) ) for word in words : word_len = wcswidth ( word ) if line_words and line_len + word_len > length : line = " " . join ( line_words ) if line_len <= length : yield line else : yield from _wc_hard_wrap ( line , length ) line_words = [ ] line_len = 0 line_words . append ( word ) line_len += word_len + 1 if line_words : line = " " . join ( line_words ) if line_len <= length : yield line else : yield from _wc_hard_wrap ( line , length )
Wrap text to given length breaking on whitespace and taking into account character width .
41,834
def trunc ( text , length ) : if length < 1 : raise ValueError ( "length should be 1 or larger" ) text = text . strip ( ) text_length = wcswidth ( text ) if text_length <= length : return text chars_to_truncate = 0 trunc_length = 0 for char in reversed ( text ) : chars_to_truncate += 1 trunc_length += wcwidth ( char ) if text_length - trunc_length <= length : break n = chars_to_truncate + 1 return text [ : - n ] . strip ( ) + '…'
Truncates text to given length taking into account wide characters .
41,835
def pad ( text , length ) : text_length = wcswidth ( text ) if text_length < length : return text + ' ' * ( length - text_length ) return text
Pads text to given length taking into account wide characters .
41,836
def fit_text ( text , length ) : text_length = wcswidth ( text ) if text_length > length : return trunc ( text , length ) if text_length < length : return pad ( text , length ) return text
Makes text fit the given length by padding or truncating it .
41,837
def _get_error_message ( response ) : try : data = response . json ( ) if "error_description" in data : return data [ 'error_description' ] if "error" in data : return data [ 'error' ] except Exception : pass return "Unknown error"
Attempt to extract an error message from response body
41,838
def _get_next_path ( headers ) : links = headers . get ( 'Link' , '' ) matches = re . match ( '<([^>]+)>; rel="next"' , links ) if matches : parsed = urlparse ( matches . group ( 1 ) ) return "?" . join ( [ parsed . path , parsed . query ] )
Given timeline response headers returns the path to the next batch
41,839
def select_previous ( self ) : self . footer . clear_message ( ) if self . selected == 0 : self . footer . draw_message ( "Cannot move beyond first toot." , Color . GREEN ) return old_index = self . selected new_index = self . selected - 1 self . selected = new_index self . redraw_after_selection_change ( old_index , new_index )
Move to the previous status in the timeline .
41,840
def select_next ( self ) : self . footer . clear_message ( ) old_index = self . selected new_index = self . selected + 1 if self . selected + 1 >= len ( self . statuses ) : self . fetch_next ( ) self . left . draw_statuses ( self . statuses , self . selected , new_index - 1 ) self . draw_footer_status ( ) self . selected = new_index self . redraw_after_selection_change ( old_index , new_index )
Move to the next status in the timeline .
41,841
def full_redraw ( self ) : self . left . draw_statuses ( self . statuses , self . selected ) self . right . draw ( self . get_selected_status ( ) ) self . header . draw ( self . user ) self . draw_footer_status ( )
Perform a full redraw of the UI .
41,842
def size_as_drawn ( lines , screen_width ) : y = 0 x = 0 for line in lines : wrapped = list ( wc_wrap ( line , screen_width ) ) if len ( wrapped ) > 0 : for wrapped_line in wrapped : x = len ( wrapped_line ) y += 1 else : x = 0 y += 1 return y - 1 , x - 1 if x != 0 else 0
Get the bottom - right corner of some text as would be drawn by draw_lines
41,843
def _find_account ( app , user , account_name ) : if not account_name : raise ConsoleError ( "Empty account name given" ) accounts = api . search_accounts ( app , user , account_name ) if account_name [ 0 ] == "@" : account_name = account_name [ 1 : ] for account in accounts : if account [ 'acct' ] == account_name : return account raise ConsoleError ( "Account not found" )
For a given account name returns the Account object .
41,844
def add_username ( user , apps ) : if not user : return None apps = [ a for a in apps if a . instance == user . instance ] if not apps : return None from toot . api import verify_credentials creds = verify_credentials ( apps . pop ( ) , user ) return User ( user . instance , creds [ 'username' ] , user . access_token )
When using broser login username was not stored so look it up
41,845
def get_text ( html ) : with warnings . catch_warnings ( ) : warnings . simplefilter ( "ignore" ) text = BeautifulSoup ( html . replace ( '&apos;' , "'" ) , "html.parser" ) . get_text ( ) return unicodedata . normalize ( 'NFKC' , text )
Converts html to text strips all tags .
41,846
def parse_html ( html ) : paragraphs = re . split ( "</?p[^>]*>" , html ) paragraphs = [ re . split ( "<br */?>" , p ) for p in paragraphs if p ] return [ [ get_text ( l ) for l in p ] for p in paragraphs ]
Attempt to convert html to plain text while keeping line breaks . Returns a list of paragraphs each being a list of lines .
41,847
def format_content ( content ) : paragraphs = parse_html ( content ) first = True for paragraph in paragraphs : if not first : yield "" for line in paragraph : yield line first = False
Given a Status contents in HTML converts it into lines of plain text .
41,848
def multiline_input ( ) : lines = [ ] while True : try : lines . append ( input ( ) ) except EOFError : break return "\n" . join ( lines ) . strip ( )
Lets user input multiple lines of text terminated by EOF .
41,849
def to_dict ( self ) : return { "name" : self . table_name , "kind" : self . table_kind , "data" : [ r . to_dict ( ) for r in self ] }
Converts the table to a dict .
41,850
def to_datetime ( value ) : if value is None : return None if isinstance ( value , six . integer_types ) : return parser . parse ( value ) return parser . isoparse ( value )
Converts a string to a datetime .
41,851
def to_timedelta ( value ) : if value is None : return None if isinstance ( value , ( six . integer_types , float ) ) : return timedelta ( microseconds = ( float ( value ) / 10 ) ) match = _TIMESPAN_PATTERN . match ( value ) if match : if match . group ( 1 ) == "-" : factor = - 1 else : factor = 1 return factor * timedelta ( days = int ( match . group ( "d" ) or 0 ) , hours = int ( match . group ( "h" ) ) , minutes = int ( match . group ( "m" ) ) , seconds = float ( match . group ( "s" ) ) , ) else : raise ValueError ( "Timespan value '{}' cannot be decoded" . format ( value ) )
Converts a string to a timedelta .
41,852
def acquire_authorization_header ( self ) : try : return self . _acquire_authorization_header ( ) except AdalError as error : if self . _authentication_method is AuthenticationMethod . aad_username_password : kwargs = { "username" : self . _username , "client_id" : self . _client_id } elif self . _authentication_method is AuthenticationMethod . aad_application_key : kwargs = { "client_id" : self . _client_id } elif self . _authentication_method is AuthenticationMethod . aad_device_login : kwargs = { "client_id" : self . _client_id } elif self . _authentication_method is AuthenticationMethod . aad_application_certificate : kwargs = { "client_id" : self . _client_id , "thumbprint" : self . _thumbprint } else : raise error kwargs [ "resource" ] = self . _kusto_cluster kwargs [ "authority" ] = self . _adal_context . authority . url raise KustoAuthenticationError ( self . _authentication_method . value , error , ** kwargs )
Acquire tokens from AAD .
41,853
def _execute ( self , endpoint , database , query , default_timeout , properties = None ) : request_payload = { "db" : database , "csl" : query } if properties : request_payload [ "properties" ] = properties . to_json ( ) request_headers = { "Accept" : "application/json" , "Accept-Encoding" : "gzip,deflate" , "Content-Type" : "application/json; charset=utf-8" , "x-ms-client-version" : "Kusto.Python.Client:" + VERSION , "x-ms-client-request-id" : "KPC.execute;" + str ( uuid . uuid4 ( ) ) , } if self . _auth_provider : request_headers [ "Authorization" ] = self . _auth_provider . acquire_authorization_header ( ) timeout = self . _get_timeout ( properties , default_timeout ) response = self . _session . post ( endpoint , headers = request_headers , json = request_payload , timeout = timeout . seconds ) if response . status_code == 200 : if endpoint . endswith ( "v2/rest/query" ) : return KustoResponseDataSetV2 ( response . json ( ) ) return KustoResponseDataSetV1 ( response . json ( ) ) raise KustoServiceError ( [ response . json ( ) ] , response )
Executes given query against this client
41,854
def set_option ( self , name , value ) : _assert_value_is_valid ( name ) self . _options [ name ] = value
Sets an option s value
41,855
def parse ( cls , uri ) : match = _URI_FORMAT . search ( uri ) return cls ( match . group ( 1 ) , match . group ( 2 ) , match . group ( 3 ) , match . group ( 4 ) )
Parses uri into a ResourceUri object
41,856
def get_mapping_format ( self ) : if self . format == DataFormat . json or self . format == DataFormat . avro : return self . format . name else : return DataFormat . csv . name
Dictating the corresponding mapping to the format .
41,857
def getAtomChars ( t ) : s = c_char_p ( ) if PL_get_atom_chars ( t , byref ( s ) ) : return s . value else : raise InvalidTypeError ( "atom" )
If t is an atom return it as a string otherwise raise InvalidTypeError .
41,858
def getBool ( t ) : b = c_int ( ) if PL_get_long ( t , byref ( b ) ) : return bool ( b . value ) else : raise InvalidTypeError ( "bool" )
If t is of type bool return it otherwise raise InvalidTypeError .
41,859
def getLong ( t ) : i = c_long ( ) if PL_get_long ( t , byref ( i ) ) : return i . value else : raise InvalidTypeError ( "long" )
If t is of type long return it otherwise raise InvalidTypeError .
41,860
def getFloat ( t ) : d = c_double ( ) if PL_get_float ( t , byref ( d ) ) : return d . value else : raise InvalidTypeError ( "float" )
If t is of type float return it otherwise raise InvalidTypeError .
41,861
def getString ( t ) : slen = c_int ( ) s = c_char_p ( ) if PL_get_string_chars ( t , byref ( s ) , byref ( slen ) ) : return s . value else : raise InvalidTypeError ( "string" )
If t is of type string return it otherwise raise InvalidTypeError .
41,862
def getList ( x ) : t = PL_copy_term_ref ( x ) head = PL_new_term_ref ( ) result = [ ] while PL_get_list ( t , head , t ) : result . append ( getTerm ( head ) ) head = PL_new_term_ref ( ) return result
Return t as a list .
41,863
def fromTerm ( cls , term ) : if isinstance ( term , Term ) : term = term . handle elif not isinstance ( term , ( c_void_p , int ) ) : raise ArgumentTypeError ( ( str ( Term ) , str ( c_void_p ) ) , str ( type ( term ) ) ) a = atom_t ( ) if PL_get_atom ( term , byref ( a ) ) : return cls ( a . value )
Create an atom from a Term or term handle .
41,864
def fromTerm ( cls , term ) : if isinstance ( term , Term ) : term = term . handle elif not isinstance ( term , ( c_void_p , int ) ) : raise ArgumentTypeError ( ( str ( Term ) , str ( int ) ) , str ( type ( term ) ) ) f = functor_t ( ) if PL_get_functor ( term , byref ( f ) ) : args = [ ] arity = PL_functor_arity ( f . value ) a0 = PL_new_term_refs ( arity ) for i , a in enumerate ( range ( 1 , arity + 1 ) ) : if PL_get_arg ( a , term , a0 + i ) : args . append ( getTerm ( a0 + i ) ) return cls ( f . value , args = args , a0 = a0 )
Create a functor from a Term or term handle .
41,865
def _findSwiplWin ( ) : import re dllNames = ( 'swipl.dll' , 'libswipl.dll' ) programFiles = os . getenv ( 'ProgramFiles' ) paths = [ os . path . join ( programFiles , r'pl\bin' , dllName ) for dllName in dllNames ] for path in paths : if os . path . exists ( path ) : return ( path , None ) path = _findSwiplPathFromFindLib ( ) if path is not None and os . path . exists ( path ) : return ( path , None ) try : cmd = Popen ( [ 'reg' , 'query' , r'HKEY_LOCAL_MACHINE\Software\SWI\Prolog' , '/v' , 'home' ] , stdout = PIPE ) ret = cmd . communicate ( ) ret = ret [ 0 ] . splitlines ( ) ret = [ line . decode ( "utf-8" ) for line in ret if len ( line ) > 0 ] pattern = re . compile ( '[^h]*home[^R]*REG_SZ( |\t)*(.*)$' ) match = pattern . match ( ret [ - 1 ] ) if match is not None : path = match . group ( 2 ) paths = [ os . path . join ( path , 'bin' , dllName ) for dllName in dllNames ] for path in paths : if os . path . exists ( path ) : return ( path , None ) except OSError : pass ( path , swiHome ) = _findSwiplFromExec ( ) if path is not None : return ( path , swiHome ) for dllName in dllNames : if os . path . exists ( dllName ) : return ( dllName , None ) return ( None , None )
This function uses several heuristics to gues where SWI - Prolog is installed in Windows . It always returns None as the path of the resource file because in Windows the way to find it is more robust so the SWI - Prolog DLL is always able to find it .
41,866
def _findSwiplLin ( ) : ( path , swiHome ) = _findSwiplFromExec ( ) if path is not None : return ( path , swiHome ) path = _findSwiplPathFromFindLib ( ) if path is not None : return ( path , swiHome ) paths = [ '/lib' , '/usr/lib' , '/usr/local/lib' , '.' , './lib' ] names = [ 'libswipl.so' , 'libpl.so' ] path = None for name in names : for try_ in paths : try_ = os . path . join ( try_ , name ) if os . path . exists ( try_ ) : path = try_ break if path is not None : return ( path , swiHome ) return ( None , None )
This function uses several heuristics to guess where SWI - Prolog is installed in Linuxes .
41,867
def _findSwiplDar ( ) : ( path , swiHome ) = _findSwiplFromExec ( ) if path is not None : return ( path , swiHome ) path = _findSwiplPathFromFindLib ( ) if path is not None : return ( path , swiHome ) paths = [ '.' , './lib' , '/usr/lib/' , '/usr/local/lib' , '/opt/local/lib' ] names = [ 'libswipl.dylib' , 'libpl.dylib' ] for name in names : for path in paths : path = os . path . join ( path , name ) if os . path . exists ( path ) : return ( path , None ) return ( None , None )
This function uses several heuristics to guess where SWI - Prolog is installed in MacOS .
41,868
def _fixWindowsPath ( dll ) : if sys . platform [ : 3 ] != 'win' : return pathToDll = os . path . dirname ( dll ) currentWindowsPath = os . getenv ( 'PATH' ) if pathToDll not in currentWindowsPath : newPath = pathToDll + ';' + currentWindowsPath os . putenv ( 'PATH' , newPath )
When the path to the DLL is not in Windows search path Windows will not be able to find other DLLs on the same directory so we have to add it to the path . This function takes care of it .
41,869
def list_to_bytes_list ( strList ) : pList = c_char_p * len ( strList ) if isinstance ( strList , ( pList , type ( None ) ) ) : return strList if not isinstance ( strList , ( list , set , tuple ) ) : raise TypeError ( "strList must be list, set or tuple, not " + str ( type ( strList ) ) ) pList = pList ( ) for i , elem in enumerate ( strList ) : pList [ i ] = str_to_bytes ( elem ) return pList
This function turns an array of strings into a pointer array with pointers pointing to the encodings of those strings Possibly contained bytes are kept as they are .
41,870
def check_strings ( strings , arrays ) : if isinstance ( strings , int ) : strings = [ strings ] elif strings is None : strings = [ ] for i , k in enumerate ( strings ) : if not isinstance ( k , int ) : raise TypeError ( ( 'Wrong type for index at {0} ' + 'in strings. Must be int, not {1}!' ) . format ( i , k ) ) if isinstance ( arrays , int ) : arrays = [ arrays ] elif arrays is None : arrays = [ ] for i , k in enumerate ( arrays ) : if not isinstance ( k , int ) : raise TypeError ( ( 'Wrong type for index at {0} ' + 'in arrays. Must be int, not {1}!' ) . format ( i , k ) ) if set ( strings ) . intersection ( arrays ) : raise ValueError ( 'One or more elements occur in both arrays and ' + ' strings. One parameter cannot be both list and string!' ) def checker ( func ) : def check_and_call ( * args ) : args = list ( args ) for i in strings : arg = args [ i ] args [ i ] = str_to_bytes ( arg ) for i in arrays : arg = args [ i ] args [ i ] = list_to_bytes_list ( arg ) return func ( * args ) return check_and_call return checker
Decorator function which can be used to automatically turn an incoming string into a bytes object and an incoming list to a pointer array if necessary .
41,871
def add ( self , item ) : def result_fnc ( f ) : if f . result ( ) : return True raise Full ( "Queue is full!" ) return self . offer ( item ) . continue_with ( result_fnc )
Adds the specified item to this queue if there is available space .
41,872
def add_all ( self , items ) : check_not_none ( items , "Value can't be None" ) data_items = [ ] for item in items : check_not_none ( item , "Value can't be None" ) data_items . append ( self . _to_data ( item ) ) return self . _encode_invoke ( queue_add_all_codec , data_list = data_items )
Adds the elements in the specified collection to this queue .
41,873
def contains_all ( self , items ) : check_not_none ( items , "Items can't be None" ) data_items = [ ] for item in items : check_not_none ( item , "item can't be None" ) data_items . append ( self . _to_data ( item ) ) return self . _encode_invoke ( queue_contains_all_codec , data_list = data_items )
Determines whether this queue contains all of the items in the specified collection or not .
41,874
def drain_to ( self , list , max_size = - 1 ) : def drain_result ( f ) : resp = f . result ( ) list . extend ( resp ) return len ( resp ) return self . _encode_invoke ( queue_drain_to_max_size_codec , max_size = max_size ) . continue_with ( drain_result )
Transfers all available items to the given list _ and removes these items from this queue . If a max_size is specified it transfers at most the given number of items . In case of a failure an item can exist in both collections or none of them .
41,875
def put ( self , item ) : check_not_none ( item , "Value can't be None" ) element_data = self . _to_data ( item ) return self . _encode_invoke ( queue_put_codec , value = element_data )
Adds the specified element into this queue . If there is no space it waits until necessary space becomes available .
41,876
def remove_all ( self , items ) : check_not_none ( items , "Value can't be None" ) data_items = [ ] for item in items : check_not_none ( item , "Value can't be None" ) data_items . append ( self . _to_data ( item ) ) return self . _encode_invoke ( queue_compare_and_remove_all_codec , data_list = data_items )
Removes all of the elements of the specified collection from this queue .
41,877
def retain_all ( self , items ) : check_not_none ( items , "Value can't be None" ) data_items = [ ] for item in items : check_not_none ( item , "Value can't be None" ) data_items . append ( self . _to_data ( item ) ) return self . _encode_invoke ( queue_compare_and_retain_all_codec , data_list = data_items )
Removes the items which are not contained in the specified collection . In other words only the items that are contained in the specified collection will be retained .
41,878
def get_and_add ( self , delta ) : return self . _invoke_internal ( pn_counter_add_codec , delta = delta , get_before_update = True )
Adds the given value to the current value and returns the previous value .
41,879
def add_and_get ( self , delta ) : return self . _invoke_internal ( pn_counter_add_codec , delta = delta , get_before_update = False )
Adds the given value to the current value and returns the updated value .
41,880
def get_and_subtract ( self , delta ) : return self . _invoke_internal ( pn_counter_add_codec , delta = - 1 * delta , get_before_update = True )
Subtracts the given value from the current value and returns the previous value .
41,881
def subtract_and_get ( self , delta ) : return self . _invoke_internal ( pn_counter_add_codec , delta = - 1 * delta , get_before_update = False )
Subtracts the given value from the current value and returns the updated value .
41,882
def shutdown ( self ) : if self . lifecycle . is_live : self . lifecycle . fire_lifecycle_event ( LIFECYCLE_STATE_SHUTTING_DOWN ) self . near_cache_manager . destroy_all_near_caches ( ) self . statistics . shutdown ( ) self . partition_service . shutdown ( ) self . heartbeat . shutdown ( ) self . cluster . shutdown ( ) self . reactor . shutdown ( ) self . lifecycle . fire_lifecycle_event ( LIFECYCLE_STATE_SHUTDOWN ) self . logger . info ( "Client shutdown." , extra = self . _logger_extras )
Shuts down this HazelcastClient .
41,883
def publish ( self , message ) : message_data = self . _to_data ( message ) self . _encode_invoke ( topic_publish_codec , message = message_data )
Publishes the message to all subscribers of this topic
41,884
def remove_listener ( self , registration_id ) : return self . _stop_listening ( registration_id , lambda i : topic_remove_message_listener_codec . encode_request ( self . name , i ) )
Stops receiving messages for the given message listener . If the given listener already removed this method does nothing .
41,885
def validate_serializer ( serializer , _type ) : if not issubclass ( serializer , _type ) : raise ValueError ( "Serializer should be an instance of {}" . format ( _type . __name__ ) )
Validates the serializer for given type .
41,886
def create_exception ( error_codec ) : if error_codec . error_code in ERROR_CODE_TO_ERROR : return ERROR_CODE_TO_ERROR [ error_codec . error_code ] ( error_codec . message ) stack_trace = "\n" . join ( [ "\tat %s.%s(%s:%s)" % ( x . declaring_class , x . method_name , x . file_name , x . line_number ) for x in error_codec . stack_trace ] ) message = "Got exception from server:\n %s: %s\n %s" % ( error_codec . class_name , error_codec . message , stack_trace ) return HazelcastError ( message )
Creates an exception with given error codec .
41,887
def capacity ( self ) : if not self . _capacity : def cache_capacity ( f ) : self . _capacity = f . result ( ) return f . result ( ) return self . _encode_invoke ( ringbuffer_capacity_codec ) . continue_with ( cache_capacity ) return ImmediateFuture ( self . _capacity )
Returns the capacity of this Ringbuffer .
41,888
def read_one ( self , sequence ) : check_not_negative ( sequence , "sequence can't be smaller than 0" ) return self . _encode_invoke ( ringbuffer_read_one_codec , sequence = sequence )
Reads one item from the Ringbuffer . If the sequence is one beyond the current tail this call blocks until an item is added . Currently it isn t possible to control how long this call is going to block .
41,889
def read_many ( self , start_sequence , min_count , max_count ) : check_not_negative ( start_sequence , "sequence can't be smaller than 0" ) check_true ( max_count >= min_count , "max count should be greater or equal to min count" ) check_true ( min_count <= self . capacity ( ) . result ( ) , "min count should be smaller or equal to capacity" ) check_true ( max_count < MAX_BATCH_SIZE , "max count can't be greater than %d" % MAX_BATCH_SIZE ) return self . _encode_invoke ( ringbuffer_read_many_codec , response_handler = self . _read_many_response_handler , start_sequence = start_sequence , min_count = min_count , max_count = max_count , filter = None )
Reads a batch of items from the Ringbuffer . If the number of available items after the first read item is smaller than the max_count these items are returned . So it could be the number of items read is smaller than the max_count . If there are less items available than min_count then this call blocks . Reading a batch of items is likely to perform better because less overhead is involved .
41,890
def init ( self , initial ) : if initial <= 0 : return False step = initial // BLOCK_SIZE with self . _lock : init = self . _atomic_long . compare_and_set ( 0 , step + 1 ) . result ( ) if init : self . _local = step self . _residue = ( initial % BLOCK_SIZE ) + 1 return init
Try to initialize this IdGenerator instance with the given id . The first generated id will be 1 greater than id .
41,891
def new_id ( self ) : with self . _lock : curr = self . _residue self . _residue += 1 if self . _residue >= BLOCK_SIZE : increment = self . _atomic_long . get_and_increment ( ) . result ( ) self . _local = increment self . _residue = 0 return self . new_id ( ) return self . _local * BLOCK_SIZE + curr
Generates and returns a cluster - wide unique id . Generated ids are guaranteed to be unique for the entire cluster as long as the cluster is live . If the cluster restarts then id generation will start from 0 .
41,892
def execute_on_key_owner ( self , key , task ) : check_not_none ( key , "key can't be None" ) key_data = self . _to_data ( key ) partition_id = self . _client . partition_service . get_partition_id ( key_data ) uuid = self . _get_uuid ( ) return self . _encode_invoke_on_partition ( executor_service_submit_to_partition_codec , partition_id , uuid = uuid , callable = self . _to_data ( task ) , partition_id = partition_id )
Executes a task on the owner of the specified key .
41,893
def execute_on_member ( self , member , task ) : uuid = self . _get_uuid ( ) address = member . address return self . _execute_on_member ( address , uuid , self . _to_data ( task ) )
Executes a task on the specified member .
41,894
def execute_on_members ( self , members , task ) : task_data = self . _to_data ( task ) futures = [ ] uuid = self . _get_uuid ( ) for member in members : f = self . _execute_on_member ( member . address , uuid , task_data ) futures . append ( f ) return future . combine_futures ( * futures )
Executes a task on each of the specified members .
41,895
def execute_on_all_members ( self , task ) : return self . execute_on_members ( self . _client . cluster . get_member_list ( ) , task )
Executes a task on all of the known cluster members .
41,896
def add_listener ( self , on_lifecycle_change ) : id = str ( uuid . uuid4 ( ) ) self . _listeners [ id ] = on_lifecycle_change return id
Add a listener object to listen for lifecycle events .
41,897
def remove_listener ( self , registration_id ) : try : self . _listeners . pop ( registration_id ) return True except KeyError : return False
Removes a lifecycle listener .
41,898
def fire_lifecycle_event ( self , new_state ) : if new_state == LIFECYCLE_STATE_SHUTTING_DOWN : self . is_live = False self . state = new_state self . logger . info ( self . _git_info + "HazelcastClient is %s" , new_state , extra = self . _logger_extras ) for listener in list ( self . _listeners . values ( ) ) : try : listener ( new_state ) except : self . logger . exception ( "Exception in lifecycle listener" , extra = self . _logger_extras )
Called when instance s state changes .
41,899
def lock ( self , lease_time = - 1 ) : return self . _encode_invoke ( lock_lock_codec , invocation_timeout = MAX_SIZE , lease_time = to_millis ( lease_time ) , thread_id = thread_id ( ) , reference_id = self . reference_id_generator . get_and_increment ( ) )
Acquires the lock . If a lease time is specified lock will be released after this lease time .