idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
46,300
def parse_address ( address : str ) -> str : display_name , parsed_address = email . utils . parseaddr ( address ) return parsed_address or address
Parse an email address falling back to the raw string given .
46,301
def quote_address ( address : str ) -> str : display_name , parsed_address = email . utils . parseaddr ( address ) if parsed_address : quoted_address = "<{}>" . format ( parsed_address ) else : quoted_address = "<{}>" . format ( address . strip ( ) ) return quoted_address
Quote a subset of the email addresses defined by RFC 821 .
46,302
def _extract_sender ( message : Message , resent_dates : List [ Union [ str , Header ] ] = None ) -> str : if resent_dates : sender_header = "Resent-Sender" from_header = "Resent-From" else : sender_header = "Sender" from_header = "From" if sender_header in message : sender = message [ sender_header ] else : sender = message [ from_header ] return str ( sender ) if sender else ""
Extract the sender from the message object given .
46,303
def _extract_recipients ( message : Message , resent_dates : List [ Union [ str , Header ] ] = None ) -> List [ str ] : recipients = [ ] if resent_dates : recipient_headers = ( "Resent-To" , "Resent-Cc" , "Resent-Bcc" ) else : recipient_headers = ( "To" , "Cc" , "Bcc" ) for header in recipient_headers : recipients . extend ( message . get_all ( header , [ ] ) ) parsed_recipients = [ str ( email . utils . formataddr ( address ) ) for address in email . utils . getaddresses ( recipients ) ] return parsed_recipients
Extract the recipients from the message object given .
46,304
async def execute_command ( self , * args : bytes , timeout : DefaultNumType = _default ) -> SMTPResponse : if timeout is _default : timeout = self . timeout self . _raise_error_if_disconnected ( ) try : response = await self . protocol . execute_command ( * args , timeout = timeout ) except SMTPServerDisconnected : self . close ( ) raise if response . code == SMTPStatus . domain_unavailable : self . close ( ) return response
Check that we re connected if we got a timeout value and then pass the command to the protocol .
46,305
def _get_tls_context ( self ) -> ssl . SSLContext : if self . tls_context is not None : context = self . tls_context else : context = ssl . create_default_context ( ssl . Purpose . SERVER_AUTH ) context . check_hostname = bool ( self . validate_certs ) if self . validate_certs : context . verify_mode = ssl . CERT_REQUIRED else : context . verify_mode = ssl . CERT_NONE if self . cert_bundle is not None : context . load_verify_locations ( cafile = self . cert_bundle ) if self . client_cert is not None : context . load_cert_chain ( self . client_cert , keyfile = self . client_key ) return context
Build an SSLContext object from the options we ve been given .
46,306
def _raise_error_if_disconnected ( self ) -> None : if ( self . transport is None or self . protocol is None or self . transport . is_closing ( ) ) : self . close ( ) raise SMTPServerDisconnected ( "Disconnected from SMTP server" )
See if we re still connected and if not raise SMTPServerDisconnected .
46,307
async def sendmail ( self , sender : str , recipients : RecipientsType , message : Union [ str , bytes ] , mail_options : Iterable [ str ] = None , rcpt_options : Iterable [ str ] = None , timeout : DefaultNumType = _default , ) -> SendmailResponseType : if isinstance ( recipients , str ) : recipients = [ recipients ] else : recipients = list ( recipients ) if mail_options is None : mail_options = [ ] else : mail_options = list ( mail_options ) if rcpt_options is None : rcpt_options = [ ] else : rcpt_options = list ( rcpt_options ) async with self . _sendmail_lock : if self . supports_extension ( "size" ) : size_option = "size={}" . format ( len ( message ) ) mail_options . append ( size_option ) try : await self . mail ( sender , options = mail_options , timeout = timeout ) recipient_errors = await self . _send_recipients ( recipients , options = rcpt_options , timeout = timeout ) response = await self . data ( message , timeout = timeout ) except ( SMTPResponseException , SMTPRecipientsRefused ) as exc : try : await self . rset ( timeout = timeout ) except ( ConnectionError , SMTPResponseException ) : pass raise exc return recipient_errors , response . message
This command performs an entire mail transaction .
46,308
def _run_sync ( self , method : Callable , * args , ** kwargs ) -> Any : if self . loop . is_running ( ) : raise RuntimeError ( "Event loop is already running." ) if not self . is_connected : self . loop . run_until_complete ( self . connect ( ) ) task = asyncio . Task ( method ( * args , ** kwargs ) , loop = self . loop ) result = self . loop . run_until_complete ( task ) self . loop . run_until_complete ( self . quit ( ) ) return result
Utility method to run commands synchronously for testing .
46,309
def check_announcements ( ) : res = requests . get ( "https://cs50.me/status/submit50" ) if res . status_code == 200 and res . text . strip ( ) : raise Error ( res . text . strip ( ) )
Check for any announcements from cs50 . me raise Error if so .
46,310
def check_version ( ) : res = requests . get ( "https://cs50.me/versions/submit50" ) if res . status_code != 200 : raise Error ( _ ( "You have an unknown version of submit50. " "Email sysadmins@cs50.harvard.edu!" ) ) required_required = pkg_resources . parse_version ( res . text . strip ( ) )
Check that submit50 is the latest version according to submit50 . io .
46,311
def excepthook ( type , value , tb ) : if ( issubclass ( type , Error ) or issubclass ( type , lib50 . Error ) ) and str ( value ) : for line in str ( value ) . split ( "\n" ) : cprint ( str ( line ) , "yellow" ) else : cprint ( _ ( "Sorry, something's wrong! Let sysadmins@cs50.harvard.edu know!" ) , "yellow" ) if excepthook . verbose : traceback . print_exception ( type , value , tb ) cprint ( _ ( "Submission cancelled." ) , "red" )
Report an exception .
46,312
def generate_transaction_id ( stmt_line ) : return str ( abs ( hash ( ( stmt_line . date , stmt_line . memo , stmt_line . amount ) ) ) )
Generate pseudo - unique id for given statement line .
46,313
def recalculate_balance ( stmt ) : total_amount = sum ( sl . amount for sl in stmt . lines ) stmt . start_balance = stmt . start_balance or D ( 0 ) stmt . end_balance = stmt . start_balance + total_amount stmt . start_date = min ( sl . date for sl in stmt . lines ) stmt . end_date = max ( sl . date for sl in stmt . lines )
Recalculate statement starting and ending dates and balances .
46,314
def assert_valid ( self ) : assert self . trntype in TRANSACTION_TYPES , "trntype must be one of %s" % TRANSACTION_TYPES if self . bank_account_to : self . bank_account_to . assert_valid ( )
Ensure that fields have valid values
46,315
def parse ( self ) : reader = self . split_records ( ) for line in reader : self . cur_record += 1 if not line : continue stmt_line = self . parse_record ( line ) if stmt_line : stmt_line . assert_valid ( ) self . statement . lines . append ( stmt_line ) return self . statement
Read and parse statement
46,316
def acquire_auth_token_ticket ( self , headers = None ) : logging . debug ( '[CAS] Acquiring Auth token ticket' ) url = self . _get_auth_token_tickets_url ( ) text = self . _perform_post ( url , headers = headers ) auth_token_ticket = json . loads ( text ) [ 'ticket' ] logging . debug ( '[CAS] Acquire Auth token ticket: {}' . format ( auth_token_ticket ) ) return auth_token_ticket
Acquire an auth token from the CAS server .
46,317
def create_session ( self , ticket , payload = None , expires = None ) : assert isinstance ( self . session_storage_adapter , CASSessionAdapter ) logging . debug ( '[CAS] Creating session for ticket {}' . format ( ticket ) ) self . session_storage_adapter . create ( ticket , payload = payload , expires = expires , )
Create a session record from a service ticket .
46,318
def delete_session ( self , ticket ) : assert isinstance ( self . session_storage_adapter , CASSessionAdapter ) logging . debug ( '[CAS] Deleting session for ticket {}' . format ( ticket ) ) self . session_storage_adapter . delete ( ticket )
Delete a session record associated with a service ticket .
46,319
def get_api_url ( self , api_resource , auth_token_ticket , authenticator , private_key , service_url = None , ** kwargs ) : auth_token , auth_token_signature = self . _build_auth_token_data ( auth_token_ticket , authenticator , private_key , ** kwargs ) params = { 'at' : auth_token , 'ats' : auth_token_signature , } if service_url is not None : params [ 'service' ] = service_url url = '{}?{}' . format ( self . _get_api_url ( api_resource ) , urlencode ( params ) , ) return url
Build an auth - token - protected CAS API url .
46,320
def get_auth_token_login_url ( self , auth_token_ticket , authenticator , private_key , service_url , username , ) : auth_token , auth_token_signature = self . _build_auth_token_data ( auth_token_ticket , authenticator , private_key , username = username , ) logging . debug ( '[CAS] AuthToken: {}' . format ( auth_token ) ) url = self . _get_auth_token_login_url ( auth_token = auth_token , auth_token_signature = auth_token_signature , service_url = service_url , ) logging . debug ( '[CAS] AuthToken Login URL: {}' . format ( url ) ) return url
Build an auth token login URL .
46,321
def parse_logout_request ( self , message_text ) : result = { } xml_document = parseString ( message_text ) for node in xml_document . getElementsByTagName ( 'saml:NameId' ) : for child in node . childNodes : if child . nodeType == child . TEXT_NODE : result [ 'name_id' ] = child . nodeValue . strip ( ) for node in xml_document . getElementsByTagName ( 'samlp:SessionIndex' ) : for child in node . childNodes : if child . nodeType == child . TEXT_NODE : result [ 'session_index' ] = str ( child . nodeValue . strip ( ) ) for key in xml_document . documentElement . attributes . keys ( ) : result [ str ( key ) ] = str ( xml_document . documentElement . getAttribute ( key ) ) logging . debug ( '[CAS] LogoutRequest:\n{}' . format ( json . dumps ( result , sort_keys = True , indent = 4 , separators = [ ',' , ': ' ] ) , ) ) return result
Parse the contents of a CAS LogoutRequest XML message .
46,322
def perform_api_request ( self , url , method = 'POST' , headers = None , body = None , ** kwargs ) : assert method in ( 'GET' , 'POST' ) if method == 'GET' : response = self . _perform_get ( url , headers = headers , ** kwargs ) elif method == 'POST' : response = self . _perform_post ( url , headers = headers , data = body , ** kwargs ) return response
Perform an auth - token - protected request against a CAS API endpoint .
46,323
def perform_proxy ( self , proxy_ticket , headers = None ) : url = self . _get_proxy_url ( ticket = proxy_ticket ) logging . debug ( '[CAS] Proxy URL: {}' . format ( url ) ) return self . _perform_cas_call ( url , ticket = proxy_ticket , headers = headers , )
Fetch a response from the remote CAS proxy endpoint .
46,324
def perform_proxy_validate ( self , proxied_service_ticket , headers = None ) : url = self . _get_proxy_validate_url ( ticket = proxied_service_ticket ) logging . debug ( '[CAS] ProxyValidate URL: {}' . format ( url ) ) return self . _perform_cas_call ( url , ticket = proxied_service_ticket , headers = headers , )
Fetch a response from the remote CAS proxyValidate endpoint .
46,325
def perform_service_validate ( self , ticket = None , service_url = None , headers = None , ) : url = self . _get_service_validate_url ( ticket , service_url = service_url ) logging . debug ( '[CAS] ServiceValidate URL: {}' . format ( url ) ) return self . _perform_cas_call ( url , ticket = ticket , headers = headers )
Fetch a response from the remote CAS serviceValidate endpoint .
46,326
def session_exists ( self , ticket ) : assert isinstance ( self . session_storage_adapter , CASSessionAdapter ) exists = self . session_storage_adapter . exists ( ticket ) logging . debug ( '[CAS] Session [{}] exists: {}' . format ( ticket , exists ) ) return exists
Test if a session records exists for a service ticket .
46,327
def create ( self , ticket , payload = None , expires = None ) : if not payload : payload = True self . _client . set ( str ( ticket ) , payload , expires )
Create a session identifier in memcache associated with ticket .
46,328
def __check_response ( self , msg ) : if not isinstance ( msg , list ) : msg = msg . split ( "\n" ) if ( len ( msg ) > 2 ) and self . RE_PATTERNS [ 'not_allowed_pattern' ] . match ( msg [ 2 ] ) : raise NotAllowed ( msg [ 2 ] [ 2 : ] ) if self . RE_PATTERNS [ 'credentials_required_pattern' ] . match ( msg [ 0 ] ) : raise AuthorizationError ( 'Credentials required.' ) if self . RE_PATTERNS [ 'syntax_error_pattern' ] . match ( msg [ 0 ] ) : raise APISyntaxError ( msg [ 2 ] [ 2 : ] if len ( msg ) > 2 else 'Syntax error.' ) if self . RE_PATTERNS [ 'bad_request_pattern' ] . match ( msg [ 0 ] ) : raise BadRequest ( msg [ 3 ] if len ( msg ) > 2 else 'Bad request.' )
Search general errors in server response and raise exceptions when found .
46,329
def __normalize_list ( self , msg ) : if isinstance ( msg , list ) : msg = "" . join ( msg ) return list ( map ( lambda x : x . strip ( ) , msg . split ( "," ) ) )
Split message to list by commas and trim whitespace .
46,330
def login ( self , login = None , password = None ) : if ( login is not None ) and ( password is not None ) : login_data = { 'user' : login , 'pass' : password } elif ( self . default_login is not None ) and ( self . default_password is not None ) : login_data = { 'user' : self . default_login , 'pass' : self . default_password } elif self . session . auth : login_data = None else : raise AuthorizationError ( 'Credentials required, fill login and password.' ) try : self . login_result = self . __get_status_code ( self . __request ( '' , post_data = login_data , without_login = True ) ) == 200 except AuthorizationError : return False return self . login_result
Login with default or supplied credetials .
46,331
def logout ( self ) : ret = False if self . login_result is True : ret = self . __get_status_code ( self . __request ( 'logout' ) ) == 200 self . login_result = None return ret
Logout of user .
46,332
def new_correspondence ( self , queue = None ) : return self . search ( Queue = queue , order = '-LastUpdated' , LastUpdatedBy__notexact = self . default_login )
Obtains tickets changed by other users than the system one .
46,333
def last_updated ( self , since , queue = None ) : return self . search ( Queue = queue , order = '-LastUpdated' , LastUpdatedBy__notexact = self . default_login , LastUpdated__gt = since )
Obtains tickets changed after given date .
46,334
def get_ticket ( self , ticket_id ) : msg = self . __request ( 'ticket/{}/show' . format ( str ( ticket_id ) , ) ) status_code = self . __get_status_code ( msg ) if status_code == 200 : pairs = { } msg = msg . split ( '\n' ) if ( len ( msg ) > 2 ) and self . RE_PATTERNS [ 'does_not_exist_pattern' ] . match ( msg [ 2 ] ) : return None req_matching = [ i for i , m in enumerate ( msg ) if self . RE_PATTERNS [ 'requestors_pattern' ] . match ( m ) ] req_id = req_matching [ 0 ] if req_matching else None if not req_id : raise UnexpectedMessageFormat ( 'Missing line starting with `Requestors:`.' ) for i in range ( req_id ) : if ': ' in msg [ i ] : header , content = self . split_header ( msg [ i ] ) pairs [ header . strip ( ) ] = content . strip ( ) requestors = [ msg [ req_id ] [ 12 : ] ] req_id += 1 while ( req_id < len ( msg ) ) and ( msg [ req_id ] [ : 12 ] == ' ' * 12 ) : requestors . append ( msg [ req_id ] [ 12 : ] ) req_id += 1 pairs [ 'Requestors' ] = self . __normalize_list ( requestors ) for i in range ( req_id , len ( msg ) ) : if ': ' in msg [ i ] : header , content = self . split_header ( msg [ i ] ) pairs [ header . strip ( ) ] = content . strip ( ) if 'Cc' in pairs : pairs [ 'Cc' ] = self . __normalize_list ( pairs [ 'Cc' ] ) if 'AdminCc' in pairs : pairs [ 'AdminCc' ] = self . __normalize_list ( pairs [ 'AdminCc' ] ) if 'id' not in pairs and not pairs [ 'id' ] . startswitch ( 'ticket/' ) : raise UnexpectedMessageFormat ( 'Response from RT didn\'t contain a valid ticket_id' ) else : pairs [ 'numerical_id' ] = pairs [ 'id' ] . split ( 'ticket/' ) [ 1 ] return pairs else : raise UnexpectedMessageFormat ( 'Received status code is {:d} instead of 200.' . format ( status_code ) )
Fetch ticket by its ID .
46,335
def create_ticket ( self , Queue = None , files = [ ] , ** kwargs ) : post_data = 'id: ticket/new\nQueue: {}\n' . format ( Queue or self . default_queue , ) for key in kwargs : if key [ : 4 ] == 'Text' : post_data += "{}: {}\n" . format ( key , re . sub ( r'\n' , r'\n ' , kwargs [ key ] ) ) elif key [ : 3 ] == 'CF_' : post_data += "CF.{{{}}}: {}\n" . format ( key [ 3 : ] , kwargs [ key ] ) else : post_data += "{}: {}\n" . format ( key , kwargs [ key ] ) for file_info in files : post_data += "\nAttachment: {}" . format ( file_info [ 0 ] , ) msg = self . __request ( 'ticket/new' , post_data = { 'content' : post_data } , files = files ) for line in msg . split ( '\n' ) [ 2 : - 1 ] : res = self . RE_PATTERNS [ 'ticket_created_pattern' ] . match ( line ) if res is not None : return int ( res . group ( 1 ) ) warnings . warn ( line [ 2 : ] ) return - 1
Create new ticket and set given parameters .
46,336
def edit_ticket ( self , ticket_id , ** kwargs ) : post_data = '' for key , value in iteritems ( kwargs ) : if isinstance ( value , ( list , tuple ) ) : value = ", " . join ( value ) if key [ : 3 ] != 'CF_' : post_data += "{}: {}\n" . format ( key , value ) else : post_data += "CF.{{{}}}: {}\n" . format ( key [ 3 : ] , value ) msg = self . __request ( 'ticket/{}/edit' . format ( str ( ticket_id ) ) , post_data = { 'content' : post_data } ) state = msg . split ( '\n' ) [ 2 ] return self . RE_PATTERNS [ 'update_pattern' ] . match ( state ) is not None
Edit ticket values .
46,337
def get_history ( self , ticket_id , transaction_id = None ) : if transaction_id is None : msgs = self . __request ( 'ticket/{}/history?format=l' . format ( str ( ticket_id ) , ) ) else : msgs = self . __request ( 'ticket/{}/history/id/{}' . format ( str ( ticket_id ) , str ( transaction_id ) ) ) lines = msgs . split ( '\n' ) if ( len ( lines ) > 2 ) and ( self . RE_PATTERNS [ 'does_not_exist_pattern' ] . match ( lines [ 2 ] ) or self . RE_PATTERNS [ 'not_related_pattern' ] . match ( lines [ 2 ] ) ) : return None msgs = msgs . split ( '\n--\n' ) items = [ ] for msg in msgs : pairs = { } msg = msg . split ( '\n' ) cont_matching = [ i for i , m in enumerate ( msg ) if self . RE_PATTERNS [ 'content_pattern' ] . match ( m ) ] cont_id = cont_matching [ 0 ] if cont_matching else None if not cont_id : raise UnexpectedMessageFormat ( 'Unexpected history entry. \ Missing line starting with `Content:`.' ) atta_matching = [ i for i , m in enumerate ( msg ) if self . RE_PATTERNS [ 'attachments_pattern' ] . match ( m ) ] atta_id = atta_matching [ 0 ] if atta_matching else None if not atta_id : raise UnexpectedMessageFormat ( 'Unexpected attachment part of history entry. \ Missing line starting with `Attachements:`.' ) for i in range ( cont_id ) : if ': ' in msg [ i ] : header , content = self . split_header ( msg [ i ] ) pairs [ header . strip ( ) ] = content . strip ( ) content = msg [ cont_id ] [ 9 : ] cont_id += 1 while ( cont_id < len ( msg ) ) and ( msg [ cont_id ] [ : 9 ] == ' ' * 9 ) : content += '\n' + msg [ cont_id ] [ 9 : ] cont_id += 1 pairs [ 'Content' ] = content for i in range ( cont_id , atta_id ) : if ': ' in msg [ i ] : header , content = self . split_header ( msg [ i ] ) pairs [ header . strip ( ) ] = content . strip ( ) attachments = [ ] for i in range ( atta_id + 1 , len ( msg ) ) : if ': ' in msg [ i ] : header , content = self . split_header ( msg [ i ] ) attachments . append ( ( int ( header ) , content . strip ( ) ) ) pairs [ 'Attachments' ] = attachments items . append ( pairs ) return items
Get set of history items .
46,338
def get_short_history ( self , ticket_id ) : msg = self . __request ( 'ticket/{}/history' . format ( str ( ticket_id ) , ) ) items = [ ] lines = msg . split ( '\n' ) multiline_buffer = "" in_multiline = False if self . __get_status_code ( lines [ 0 ] ) == 200 : if ( len ( lines ) > 2 ) and self . RE_PATTERNS [ 'does_not_exist_pattern' ] . match ( lines [ 2 ] ) : return None if len ( lines ) >= 4 : for line in lines [ 4 : ] : if line == "" : if not in_multiline : in_multiline = True else : line = multiline_buffer multiline_buffer = "" in_multiline = False else : if in_multiline : multiline_buffer += line line = "" if ': ' in line : hist_id , desc = line . split ( ': ' , 1 ) items . append ( ( int ( hist_id ) , desc ) ) return items
Get set of short history items
46,339
def reply ( self , ticket_id , text = '' , cc = '' , bcc = '' , content_type = 'text/plain' , files = [ ] ) : return self . __correspond ( ticket_id , text , 'correspond' , cc , bcc , content_type , files )
Sends email message to the contacts in Requestors field of given ticket with subject as is set in Subject field .
46,340
def get_attachments ( self , ticket_id ) : msg = self . __request ( 'ticket/{}/attachments' . format ( str ( ticket_id ) , ) ) lines = msg . split ( '\n' ) if ( len ( lines ) > 2 ) and self . RE_PATTERNS [ 'does_not_exist_pattern' ] . match ( lines [ 2 ] ) : return None attachment_infos = [ ] if ( self . __get_status_code ( lines [ 0 ] ) == 200 ) and ( len ( lines ) >= 4 ) : for line in lines [ 4 : ] : info = self . RE_PATTERNS [ 'attachments_list_pattern' ] . match ( line ) if info : attachment_infos . append ( info . groups ( ) ) return attachment_infos
Get attachment list for a given ticket
46,341
def get_attachments_ids ( self , ticket_id ) : attachments = self . get_attachments ( ticket_id ) return [ int ( at [ 0 ] ) for at in attachments ] if attachments else attachments
Get IDs of attachments for given ticket .
46,342
def get_attachment ( self , ticket_id , attachment_id ) : msg = self . __request ( 'ticket/{}/attachments/{}' . format ( str ( ticket_id ) , str ( attachment_id ) ) , text_response = False ) msg = msg . split ( b'\n' ) if ( len ( msg ) > 2 ) and ( self . RE_PATTERNS [ 'invalid_attachment_pattern_bytes' ] . match ( msg [ 2 ] ) or self . RE_PATTERNS [ 'does_not_exist_pattern_bytes' ] . match ( msg [ 2 ] ) ) : return None msg = msg [ 2 : ] head_matching = [ i for i , m in enumerate ( msg ) if self . RE_PATTERNS [ 'headers_pattern_bytes' ] . match ( m ) ] head_id = head_matching [ 0 ] if head_matching else None if not head_id : raise UnexpectedMessageFormat ( 'Unexpected headers part of attachment entry. \ Missing line starting with `Headers:`.' ) msg [ head_id ] = re . sub ( b'^Headers: (.*)$' , r'\1' , msg [ head_id ] ) cont_matching = [ i for i , m in enumerate ( msg ) if self . RE_PATTERNS [ 'content_pattern_bytes' ] . match ( m ) ] cont_id = cont_matching [ 0 ] if cont_matching else None if not cont_matching : raise UnexpectedMessageFormat ( 'Unexpected content part of attachment entry. \ Missing line starting with `Content:`.' ) pairs = { } for i in range ( head_id ) : if b': ' in msg [ i ] : header , content = msg [ i ] . split ( b': ' , 1 ) pairs [ header . strip ( ) . decode ( 'utf-8' ) ] = content . strip ( ) . decode ( 'utf-8' ) headers = { } for i in range ( head_id , cont_id ) : if b': ' in msg [ i ] : header , content = msg [ i ] . split ( b': ' , 1 ) headers [ header . strip ( ) . decode ( 'utf-8' ) ] = content . strip ( ) . decode ( 'utf-8' ) pairs [ 'Headers' ] = headers content = msg [ cont_id ] [ 9 : ] for i in range ( cont_id + 1 , len ( msg ) ) : if msg [ i ] [ : 9 ] == ( b' ' * 9 ) : content += b'\n' + msg [ i ] [ 9 : ] pairs [ 'Content' ] = content return pairs
Get attachment .
46,343
def get_attachment_content ( self , ticket_id , attachment_id ) : msg = self . __request ( 'ticket/{}/attachments/{}/content' . format ( str ( ticket_id ) , str ( attachment_id ) ) , text_response = False ) lines = msg . split ( b'\n' , 3 ) if ( len ( lines ) == 4 ) and ( self . RE_PATTERNS [ 'invalid_attachment_pattern_bytes' ] . match ( lines [ 2 ] ) or self . RE_PATTERNS [ 'does_not_exist_pattern_bytes' ] . match ( lines [ 2 ] ) ) : return None return msg [ msg . find ( b'\n' ) + 2 : - 3 ]
Get content of attachment without headers .
46,344
def get_user ( self , user_id ) : msg = self . __request ( 'user/{}' . format ( str ( user_id ) , ) ) status_code = self . __get_status_code ( msg ) if ( status_code == 200 ) : pairs = { } lines = msg . split ( '\n' ) if ( len ( lines ) > 2 ) and self . RE_PATTERNS [ 'does_not_exist_pattern' ] . match ( lines [ 2 ] ) : return None for line in lines [ 2 : ] : if ': ' in line : header , content = line . split ( ': ' , 1 ) pairs [ header . strip ( ) ] = content . strip ( ) return pairs else : raise UnexpectedMessageFormat ( 'Received status code is {:d} instead of 200.' . format ( status_code ) )
Get user details .
46,345
def get_links ( self , ticket_id ) : msg = self . __request ( 'ticket/{}/links/show' . format ( str ( ticket_id ) , ) ) status_code = self . __get_status_code ( msg ) if ( status_code == 200 ) : pairs = { } msg = msg . split ( '\n' ) if ( len ( msg ) > 2 ) and self . RE_PATTERNS [ 'does_not_exist_pattern' ] . match ( msg [ 2 ] ) : return None i = 2 while i < len ( msg ) : if ': ' in msg [ i ] : key , link = self . split_header ( msg [ i ] ) links = [ link . strip ( ) ] j = i + 1 pad = len ( key ) + 2 while ( j < len ( msg ) ) and msg [ j ] . startswith ( ' ' * pad ) : links [ - 1 ] = links [ - 1 ] [ : - 1 ] links . append ( msg [ j ] [ pad : ] . strip ( ) ) j += 1 pairs [ key ] = links i = j - 1 i += 1 return pairs else : raise UnexpectedMessageFormat ( 'Received status code is {:d} instead of 200.' . format ( status_code ) )
Gets the ticket links for a single ticket .
46,346
def edit_ticket_links ( self , ticket_id , ** kwargs ) : post_data = '' for key in kwargs : post_data += "{}: {}\n" . format ( key , str ( kwargs [ key ] ) ) msg = self . __request ( 'ticket/{}/links' . format ( str ( ticket_id ) , ) , post_data = { 'content' : post_data } ) state = msg . split ( '\n' ) [ 2 ] return self . RE_PATTERNS [ 'links_updated_pattern' ] . match ( state ) is not None
Edit ticket links .
46,347
def split_header ( line ) : match = re . match ( r'^(CF\.\{.*?}): (.*)$' , line ) if match : return ( match . group ( 1 ) , match . group ( 2 ) ) return line . split ( ': ' , 1 )
Split a header line into field name and field value .
46,348
def PrivateKeyFromWIF ( wif ) : if wif is None or len ( wif ) is not 52 : raise ValueError ( 'Please provide a wif with a length of 52 bytes (LEN: {0:d})' . format ( len ( wif ) ) ) data = base58 . b58decode ( wif ) length = len ( data ) if length is not 38 or data [ 0 ] is not 0x80 or data [ 33 ] is not 0x01 : raise ValueError ( "Invalid format!" ) checksum = Crypto . Hash256 ( data [ 0 : 34 ] ) [ 0 : 4 ] if checksum != data [ 34 : ] : raise ValueError ( "Invalid WIF Checksum!" ) return data [ 1 : 33 ]
Get the private key from a WIF key
46,349
def PrivateKeyFromNEP2 ( nep2_key , passphrase ) : if not nep2_key or len ( nep2_key ) != 58 : raise ValueError ( 'Please provide a nep2_key with a length of 58 bytes (LEN: {0:d})' . format ( len ( nep2_key ) ) ) ADDRESS_HASH_SIZE = 4 ADDRESS_HASH_OFFSET = len ( NEP_FLAG ) + len ( NEP_HEADER ) try : decoded_key = base58 . b58decode_check ( nep2_key ) except Exception as e : raise ValueError ( "Invalid nep2_key" ) address_hash = decoded_key [ ADDRESS_HASH_OFFSET : ADDRESS_HASH_OFFSET + ADDRESS_HASH_SIZE ] encrypted = decoded_key [ - 32 : ] pwd_normalized = bytes ( unicodedata . normalize ( 'NFC' , passphrase ) , 'utf-8' ) derived = scrypt . hash ( pwd_normalized , address_hash , N = SCRYPT_ITERATIONS , r = SCRYPT_BLOCKSIZE , p = SCRYPT_PARALLEL_FACTOR , buflen = SCRYPT_KEY_LEN_BYTES ) derived1 = derived [ : 32 ] derived2 = derived [ 32 : ] cipher = AES . new ( derived2 , AES . MODE_ECB ) decrypted = cipher . decrypt ( encrypted ) private_key = xor_bytes ( decrypted , derived1 ) kp_new = KeyPair ( priv_key = private_key ) kp_new_address = kp_new . GetAddress ( ) kp_new_address_hash_tmp = hashlib . sha256 ( kp_new_address . encode ( "utf-8" ) ) . digest ( ) kp_new_address_hash_tmp2 = hashlib . sha256 ( kp_new_address_hash_tmp ) . digest ( ) kp_new_address_hash = kp_new_address_hash_tmp2 [ : 4 ] if ( kp_new_address_hash != address_hash ) : raise ValueError ( "Wrong passphrase" ) return private_key
Gets the private key from a NEP - 2 encrypted private key
46,350
def GetAddress ( self ) : script = b'21' + self . PublicKey . encode_point ( True ) + b'ac' script_hash = Crypto . ToScriptHash ( script ) address = Crypto . ToAddress ( script_hash ) return address
Returns the public NEO address for this KeyPair
46,351
def Export ( self ) : data = bytearray ( 38 ) data [ 0 ] = 0x80 data [ 1 : 33 ] = self . PrivateKey [ 0 : 32 ] data [ 33 ] = 0x01 checksum = Crypto . Default ( ) . Hash256 ( data [ 0 : 34 ] ) data [ 34 : 38 ] = checksum [ 0 : 4 ] b58 = base58 . b58encode ( bytes ( data ) ) return b58 . decode ( "utf-8" )
Export this KeyPair s private key in WIF format .
46,352
def ExportNEP2 ( self , passphrase ) : if len ( passphrase ) < 2 : raise ValueError ( "Passphrase must have a minimum of 2 characters" ) address_hash_tmp = hashlib . sha256 ( self . GetAddress ( ) . encode ( "utf-8" ) ) . digest ( ) address_hash_tmp2 = hashlib . sha256 ( address_hash_tmp ) . digest ( ) address_hash = address_hash_tmp2 [ : 4 ] pwd_normalized = bytes ( unicodedata . normalize ( 'NFC' , passphrase ) , 'utf-8' ) derived = scrypt . hash ( pwd_normalized , address_hash , N = SCRYPT_ITERATIONS , r = SCRYPT_BLOCKSIZE , p = SCRYPT_PARALLEL_FACTOR , buflen = SCRYPT_KEY_LEN_BYTES ) derived1 = derived [ : 32 ] derived2 = derived [ 32 : ] xor_ed = xor_bytes ( bytes ( self . PrivateKey ) , derived1 ) cipher = AES . new ( derived2 , AES . MODE_ECB ) encrypted = cipher . encrypt ( xor_ed ) assembled = bytearray ( ) assembled . extend ( NEP_HEADER ) assembled . extend ( NEP_FLAG ) assembled . extend ( address_hash ) assembled . extend ( encrypted ) encrypted_key_nep2 = base58 . b58encode_check ( bytes ( assembled ) ) return encrypted_key_nep2 . decode ( "utf-8" )
Export the encrypted private key in NEP - 2 format .
46,353
def ReadByte ( self , do_ord = True ) : try : if do_ord : return ord ( self . stream . read ( 1 ) ) return self . stream . read ( 1 ) except Exception as e : logger . error ( "ord expected character but got none" ) return 0
Read a single byte .
46,354
def SafeReadBytes ( self , length ) : data = self . ReadBytes ( length ) if len ( data ) < length : raise ValueError ( "Not enough data available" ) else : return data
Read exactly length number of bytes from the stream .
46,355
def ToScriptHash ( data , unhex = True ) : if len ( data ) > 1 and unhex : data = binascii . unhexlify ( data ) return UInt160 ( data = binascii . unhexlify ( bytes ( Crypto . Hash160 ( data ) , encoding = 'utf-8' ) ) )
Get a script hash of the data .
46,356
def Sign ( message , private_key ) : hash = hashlib . sha256 ( binascii . unhexlify ( message ) ) . hexdigest ( ) v , r , s = bitcoin . ecdsa_raw_sign ( hash , private_key ) rb = bytearray ( r . to_bytes ( 32 , 'big' ) ) sb = bytearray ( s . to_bytes ( 32 , 'big' ) ) sig = rb + sb return sig
Sign the message with the given private key .
46,357
def sqrt ( self , val , flag ) : if val . iszero ( ) : return val sw = self . p % 8 if sw == 3 or sw == 7 : res = val ** ( ( self . p + 1 ) / 4 ) elif sw == 5 : x = val ** ( ( self . p + 1 ) / 4 ) if x == 1 : res = val ** ( ( self . p + 3 ) / 8 ) else : res = ( 4 * val ) ** ( ( self . p - 5 ) / 8 ) * 2 * val else : raise Exception ( "modsqrt non supported for (p%8)==1" ) if res . value % 2 == flag : return res else : return - res
calculate the square root modulus p
46,358
def value ( self , x ) : return x if isinstance ( x , FiniteField . Value ) and x . field == self else FiniteField . Value ( self , x )
converts an integer or FinitField . Value to a value of this FiniteField .
46,359
def integer ( self , x ) : if type ( x ) is str : hex = binascii . unhexlify ( x ) return int . from_bytes ( hex , 'big' ) return x . value if isinstance ( x , FiniteField . Value ) else x
returns a plain integer
46,360
def add ( self , p , q ) : if p . iszero ( ) : return q if q . iszero ( ) : return p lft = 0 if p == q : if p . y == 0 : return self . zero ( ) lft = ( 3 * p . x ** 2 + self . a ) / ( 2 * p . y ) elif p . x == q . x : return self . zero ( ) else : lft = ( p . y - q . y ) / ( p . x - q . x ) x = lft ** 2 - ( p . x + q . x ) y = lft * ( p . x - x ) - p . y return self . point ( x , y )
perform elliptic curve addition
46,361
def point ( self , x , y ) : return EllipticCurve . ECPoint ( self , self . field . value ( x ) , self . field . value ( y ) )
construct a point from 2 values
46,362
def isoncurve ( self , p ) : return p . iszero ( ) or p . y ** 2 == p . x ** 3 + self . a * p . x + self . b
verifies if a point is on the curve
46,363
def secp256r1 ( ) : GFp = FiniteField ( int ( "FFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF" , 16 ) ) ec = EllipticCurve ( GFp , 115792089210356248762697446949407573530086143415290314195533631308867097853948 , 41058363725152142129326129780047268409114441015993725554835256314039467401291 ) return ECDSA ( ec , ec . point ( 0x6B17D1F2E12C4247F8BCE6E563A440F277037D812DEB33A0F4A13945D898C296 , 0x4FE342E2FE1A7F9B8EE7EB4A7C0F9E162BCE33576B315ECECBB6406837BF51F5 ) , GFp )
create the secp256r1 curve
46,364
def decode_secp256r1 ( str , unhex = True , check_on_curve = True ) : GFp = FiniteField ( int ( "FFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFF" , 16 ) ) ec = EllipticCurve ( GFp , 115792089210356248762697446949407573530086143415290314195533631308867097853948 , 41058363725152142129326129780047268409114441015993725554835256314039467401291 ) point = ec . decode_from_hex ( str , unhex = unhex ) if check_on_curve : if point . isoncurve ( ) : return ECDSA ( GFp , point , int ( "FFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFC" , 16 ) ) else : raise Exception ( "Could not decode string" ) return ECDSA ( GFp , point , int ( "FFFFFFFF00000001000000000000000000000000FFFFFFFFFFFFFFFFFFFFFFFC" , 16 ) )
decode a public key on the secp256r1 curve
46,365
def secp256k1 ( ) : GFp = FiniteField ( 2 ** 256 - 2 ** 32 - 977 ) ec = EllipticCurve ( GFp , 0 , 7 ) return ECDSA ( ec , ec . point ( 0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798 , 0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8 ) , 2 ** 256 - 432420386565659656852420866394968145599 )
create the secp256k1 curve
46,366
def isValidPublicAddress ( address : str ) -> bool : valid = False if len ( address ) == 34 and address [ 0 ] == 'A' : try : base58 . b58decode_check ( address . encode ( ) ) valid = True except ValueError : valid = False return valid
Check if address is a valid NEO address
46,367
def __Build ( leaves ) : if len ( leaves ) < 1 : raise Exception ( 'Leaves must have length' ) if len ( leaves ) == 1 : return leaves [ 0 ] num_parents = int ( ( len ( leaves ) + 1 ) / 2 ) parents = [ MerkleTreeNode ( ) for i in range ( 0 , num_parents ) ] for i in range ( 0 , num_parents ) : node = parents [ i ] node . LeftChild = leaves [ i * 2 ] leaves [ i * 2 ] . Parent = node if ( i * 2 + 1 == len ( leaves ) ) : node . RightChild = node . LeftChild else : node . RightChild = leaves [ i * 2 + 1 ] leaves [ i * 2 + 1 ] . Parent = node hasharray = bytearray ( node . LeftChild . Hash . ToArray ( ) + node . RightChild . Hash . ToArray ( ) ) node . Hash = UInt256 ( data = Crypto . Hash256 ( hasharray ) ) return MerkleTree . __Build ( parents )
Build the merkle tree .
46,368
def ComputeRoot ( hashes ) : if not len ( hashes ) : raise Exception ( 'Hashes must have length' ) if len ( hashes ) == 1 : return hashes [ 0 ] tree = MerkleTree ( hashes ) return tree . Root . Hash
Compute the root hash .
46,369
def ToHashArray ( self ) : hashes = set ( ) MerkleTree . __DepthFirstSearch ( self . Root , hashes ) return list ( hashes )
Turn the tree into a list of hashes .
46,370
def Trim ( self , flags ) : logger . info ( "Trimming!" ) flags = bytearray ( flags ) length = 1 << self . Depth - 1 while len ( flags ) < length : flags . append ( 0 ) MerkleTree . _TrimNode ( self . Root , 0 , self . Depth , flags )
Trim the nodes from the tree keeping only the root hash .
46,371
def _TrimNode ( node , index , depth , flags ) : if depth == 1 or node . LeftChild is None : return if depth == 2 : if not flags [ index * 2 ] and not flags [ index * 2 + 1 ] : node . LeftChild = None node . RightChild = None else : MerkleTree . _TrimNode ( node . LeftChild , index * 2 , depth - 1 , flags ) MerkleTree . _TrimNode ( node . RightChild , index * 2 , depth - 1 , flags ) if node . LeftChild . LeftChild is None and node . RightChild . RightChild is None : node . LeftChild = None node . RightChild = None
Internal helper method to trim a node .
46,372
def double_sha256 ( ba ) : d1 = hashlib . sha256 ( ba ) d2 = hashlib . sha256 ( ) d1 . hexdigest ( ) d2 . update ( d1 . digest ( ) ) return d2 . hexdigest ( )
Perform two SHA256 operations on the input .
46,373
def scripthash_to_address ( scripthash ) : sb = bytearray ( [ ADDRESS_VERSION ] ) + scripthash c256 = bin_dbl_sha256 ( sb ) [ 0 : 4 ] outb = sb + bytearray ( c256 ) return base58 . b58encode ( bytes ( outb ) ) . decode ( "utf-8" )
Convert a script hash to a public address .
46,374
def base256_encode ( n , minwidth = 0 ) : if n > 0 : arr = [ ] while n : n , rem = divmod ( n , 256 ) arr . append ( rem ) b = bytearray ( reversed ( arr ) ) elif n == 0 : b = bytearray ( b'\x00' ) else : raise ValueError ( "Negative numbers not supported" ) if minwidth > 0 and len ( b ) < minwidth : padding = ( minwidth - len ( b ) ) * b'\x00' b = bytearray ( padding ) + b b . reverse ( ) return b
Encode the input with base256 .
46,375
def xor_bytes ( a , b ) : assert isinstance ( a , bytes ) assert isinstance ( b , bytes ) assert len ( a ) == len ( b ) res = bytearray ( ) for i in range ( len ( a ) ) : res . append ( a [ i ] ^ b [ i ] ) return bytes ( res )
XOR on two bytes objects
46,376
def WriteBytes ( self , value , unhex = True ) : if unhex : try : value = binascii . unhexlify ( value ) except binascii . Error : pass return self . stream . write ( value )
Write a bytes type to the stream .
46,377
def WriteUInt160 ( self , value ) : if type ( value ) is UInt160 : value . Serialize ( self ) else : raise Exception ( "value must be UInt160 instance " )
Write a UInt160 type to the stream .
46,378
def WriteUInt256 ( self , value ) : if type ( value ) is UInt256 : value . Serialize ( self ) else : raise Exception ( "Cannot write value that is not UInt256" )
Write a UInt256 type to the stream .
46,379
def Write2000256List ( self , arr ) : for item in arr : ba = bytearray ( binascii . unhexlify ( item ) ) ba . reverse ( ) self . WriteBytes ( ba )
Write an array of 64 byte items to the stream .
46,380
def get_user_id ( self , user ) : user_field = getattr ( settings , 'SAML_IDP_DJANGO_USERNAME_FIELD' , None ) or getattr ( user , 'USERNAME_FIELD' , 'username' ) return str ( getattr ( user , user_field ) )
Get identifier for a user . Take the one defined in settings . SAML_IDP_DJANGO_USERNAME_FIELD first if not set use the USERNAME_FIELD property which is set on the user Model . This defaults to the user . username field .
46,381
def create_identity ( self , user , sp_mapping , ** extra_config ) : return { out_attr : getattr ( user , user_attr ) for user_attr , out_attr in sp_mapping . items ( ) if hasattr ( user , user_attr ) }
Generate an identity dictionary of the user based on the given mapping of desired user attributes by the SP
46,382
def sso_entry ( request ) : if request . method == 'POST' : passed_data = request . POST binding = BINDING_HTTP_POST else : passed_data = request . GET binding = BINDING_HTTP_REDIRECT request . session [ 'Binding' ] = binding try : request . session [ 'SAMLRequest' ] = passed_data [ 'SAMLRequest' ] except ( KeyError , MultiValueDictKeyError ) as e : return HttpResponseBadRequest ( e ) request . session [ 'RelayState' ] = passed_data . get ( 'RelayState' , '' ) if "SigAlg" in passed_data and "Signature" in passed_data : request . session [ 'SigAlg' ] = passed_data [ 'SigAlg' ] request . session [ 'Signature' ] = passed_data [ 'Signature' ] return HttpResponseRedirect ( reverse ( 'djangosaml2idp:saml_login_process' ) )
Entrypoint view for SSO . Gathers the parameters from the HTTP request stores them in the session and redirects the requester to the login_process view .
46,383
def metadata ( request ) : conf = IdPConfig ( ) conf . load ( copy . deepcopy ( settings . SAML_IDP_CONFIG ) ) metadata = entity_descriptor ( conf ) return HttpResponse ( content = text_type ( metadata ) . encode ( 'utf-8' ) , content_type = "text/xml; charset=utf8" )
Returns an XML with the SAML 2 . 0 metadata for this Idp . The metadata is constructed on - the - fly based on the config dict in the django settings .
46,384
def dispatch ( self , request , * args , ** kwargs ) : conf = IdPConfig ( ) try : conf . load ( copy . deepcopy ( settings . SAML_IDP_CONFIG ) ) self . IDP = Server ( config = conf ) except Exception as e : return self . handle_error ( request , exception = e ) return super ( IdPHandlerViewMixin , self ) . dispatch ( request , * args , ** kwargs )
Construct IDP server with config from settings dict
46,385
def get_processor ( self , entity_id , sp_config ) : processor_string = sp_config . get ( 'processor' , None ) if processor_string : try : return import_string ( processor_string ) ( entity_id ) except Exception as e : logger . error ( "Failed to instantiate processor: {} - {}" . format ( processor_string , e ) , exc_info = True ) raise return BaseProcessor ( entity_id )
Instantiate user - specified processor or default to an all - access base processor . Raises an exception if the configured processor class can not be found or initialized .
46,386
def get ( self , include_backups = False ) : leases = [ ] with open ( self . filename ) if not self . gzip else gzip . open ( self . filename ) as lease_file : lease_data = lease_file . read ( ) if self . gzip : lease_data = lease_data . decode ( 'utf-8' ) for match in self . regex_leaseblock . finditer ( lease_data ) : block = match . groupdict ( ) properties , options , sets = _extract_properties ( block [ 'config' ] ) if 'hardware' not in properties and not include_backups : continue lease = Lease ( block [ 'ip' ] , properties = properties , options = options , sets = sets ) leases . append ( lease ) for match in self . regex_leaseblock6 . finditer ( lease_data ) : block = match . groupdict ( ) properties , options , sets = _extract_properties ( block [ 'config' ] ) host_identifier = block [ 'id' ] block_type = block [ 'type' ] last_client_communication = parse_time ( properties [ 'cltt' ] ) for address_block in self . regex_iaaddr . finditer ( block [ 'config' ] ) : block = address_block . groupdict ( ) properties , options , sets = _extract_properties ( block [ 'config' ] ) lease = Lease6 ( block [ 'ip' ] , properties , last_client_communication , host_identifier , block_type , options = options , sets = sets ) leases . append ( lease ) return leases
Parse the lease file and return a list of Lease instances .
46,387
def get_current ( self ) : all_leases = self . get ( ) leases = { } for lease in all_leases : if lease . valid and lease . active : if type ( lease ) is Lease : leases [ lease . ethernet ] = lease elif type ( lease ) is Lease6 : leases [ '%s-%s' % ( lease . type , lease . host_identifier_string ) ] = lease return leases
Parse the lease file and return a dict of active and valid Lease instances . The key for this dict is the ethernet address of the lease .
46,388
def create_layout ( lexer = None , reserve_space_for_menu = 8 , get_prompt_tokens = None , get_bottom_toolbar_tokens = None , extra_input_processors = None , multiline = False , wrap_lines = True ) : input_processors = [ ConditionalProcessor ( HighlightSearchProcessor ( preview_search = True ) , HasFocus ( SEARCH_BUFFER ) ) , ] if extra_input_processors : input_processors . extend ( extra_input_processors ) lexer = PygmentsLexer ( lexer , sync_from_start = True ) multiline = to_cli_filter ( multiline ) sidebar_token = [ ( Token . Toolbar . Status . Key , "[ctrl+d]" ) , ( Token . Toolbar . Status , " Exit" ) ] sidebar_width = token_list_width ( sidebar_token ) get_sidebar_tokens = lambda _ : sidebar_token def get_height ( cli ) : if reserve_space_for_menu and not cli . is_done : buff = cli . current_buffer if buff . complete_while_typing ( ) or buff . complete_state is not None : return LayoutDimension ( min = reserve_space_for_menu ) return LayoutDimension ( ) return HSplit ( [ VSplit ( [ HSplit ( [ FloatContainer ( HSplit ( [ Window ( BufferControl ( input_processors = input_processors , lexer = lexer , preview_search = True ) , get_height = get_height , wrap_lines = wrap_lines , left_margins = [ ConditionalMargin ( PromptMargin ( get_prompt_tokens ) , filter = multiline ) ] , ) , ] ) , [ Float ( xcursor = True , ycursor = True , content = CompletionsMenu ( max_height = 16 , scroll_offset = 1 , extra_filter = HasFocus ( DEFAULT_BUFFER ) ) ) , ] ) , ConditionalContainer ( SearchToolbar ( ) , multiline ) , ] ) ] ) , ] + [ VSplit ( [ ConditionalContainer ( Window ( TokenListControl ( get_bottom_toolbar_tokens ) , height = LayoutDimension . exact ( 1 ) ) , filter = ~ IsDone ( ) & RendererHeightIsKnown ( ) ) , ConditionalContainer ( Window ( TokenListControl ( get_sidebar_tokens ) , height = LayoutDimension . exact ( 1 ) , width = LayoutDimension . exact ( sidebar_width ) ) , filter = ~ IsDone ( ) & RendererHeightIsKnown ( ) ) ] ) ] )
Creates a custom Layout for the Crash input REPL
46,389
def _parse_statements ( lines ) : lines = ( l . strip ( ) for l in lines if l ) lines = ( l for l in lines if l and not l . startswith ( '--' ) ) parts = [ ] for line in lines : parts . append ( line . rstrip ( ';' ) ) if line . endswith ( ';' ) : yield '\n' . join ( parts ) parts [ : ] = [ ] if parts : yield '\n' . join ( parts )
Return a generator of statements
46,390
def _show_tables ( self , * args ) : v = self . connection . lowest_server_version schema_name = "table_schema" if v >= TABLE_SCHEMA_MIN_VERSION else "schema_name" table_filter = " AND table_type = 'BASE TABLE'" if v >= TABLE_TYPE_MIN_VERSION else "" self . _exec ( "SELECT format('%s.%s', {schema}, table_name) AS name " "FROM information_schema.tables " "WHERE {schema} NOT IN ('sys','information_schema', 'pg_catalog')" "{table_filter}" . format ( schema = schema_name , table_filter = table_filter ) )
print the existing tables within the doc schema
46,391
def execute ( self ) : if not self . cmd . is_conn_available ( ) : return if self . cmd . connection . lowest_server_version >= SYSINFO_MIN_VERSION : success , rows = self . _sys_info ( ) self . cmd . exit_code = self . cmd . exit_code or int ( not success ) if success : for result in rows : self . cmd . pprint ( result . rows , result . cols ) self . cmd . logger . info ( "For debugging purposes you can send above listed information to support@crate.io" ) else : tmpl = 'Crate {version} does not support the cluster "sysinfo" command' self . cmd . logger . warn ( tmpl . format ( version = self . cmd . connection . lowest_server_version ) )
print system and cluster info
46,392
def bwc_bool_transform_from ( cls , x ) : if x . lower ( ) == 'true' : return True elif x . lower ( ) == 'false' : return False return bool ( int ( x ) )
Read boolean values from old config files correctly and interpret True and False as correct booleans .
46,393
def _transform_field ( field ) : if isinstance ( field , bool ) : return TRUE if field else FALSE elif isinstance ( field , ( list , dict ) ) : return json . dumps ( field , sort_keys = True , ensure_ascii = False ) else : return field
transform field for displaying
46,394
def script ( ) : parser = argparse . ArgumentParser ( description = "Print all textual tags of one or more audio files." ) parser . add_argument ( "-b" , "--batch" , help = "disable user interaction" , action = "store_true" ) parser . add_argument ( "file" , nargs = "+" , help = "file(s) to print tags of" ) args = parser . parse_args ( ) for filename in args . file : if isinstance ( filename , bytes ) : filename = filename . decode ( sys . getfilesystemencoding ( ) ) line = "TAGS OF '{0}'" . format ( os . path . basename ( filename ) ) print ( "*" * len ( line ) ) print ( line ) print ( "*" * len ( line ) ) audioFile = taglib . File ( filename ) tags = audioFile . tags if len ( tags ) > 0 : maxKeyLen = max ( len ( key ) for key in tags . keys ( ) ) for key , values in tags . items ( ) : for value in values : print ( ( '{0:' + str ( maxKeyLen ) + '} = {1}' ) . format ( key , value ) ) if len ( audioFile . unsupported ) > 0 : print ( 'Unsupported tag elements: ' + "; " . join ( audioFile . unsupported ) ) if sys . version_info [ 0 ] == 2 : inputFunction = raw_input else : inputFunction = input if not args . batch and inputFunction ( "remove unsupported properties? [yN] " ) . lower ( ) in [ "y" , "yes" ] : audioFile . removeUnsupportedProperties ( audioFile . unsupported ) audioFile . save ( )
Run the command - line script .
46,395
def sw_reset ( self ) : self . write8 ( _STATUS_BASE , _STATUS_SWRST , 0xFF ) time . sleep ( .500 ) chip_id = self . read8 ( _STATUS_BASE , _STATUS_HW_ID ) if chip_id != _HW_ID_CODE : raise RuntimeError ( "Seesaw hardware ID returned (0x{:x}) is not " "correct! Expected 0x{:x}. Please check your wiring." . format ( chip_id , _HW_ID_CODE ) ) pid = self . get_version ( ) >> 16 if pid == _CRICKIT_PID : from adafruit_seesaw . crickit import Crickit_Pinmap self . pin_mapping = Crickit_Pinmap elif pid == _ROBOHATMM1_PID : from adafruit_seesaw . robohat import MM1_Pinmap self . pin_mapping = MM1_Pinmap else : from adafruit_seesaw . samd09 import SAMD09_Pinmap self . pin_mapping = SAMD09_Pinmap
Trigger a software reset of the SeeSaw chip
46,396
def _get_default_cache_file_path ( self ) : default_list_path = os . path . join ( self . _get_default_cache_dir ( ) , self . _CACHE_FILE_NAME ) if not os . access ( default_list_path , os . F_OK ) : raise CacheFileError ( "Default cache file does not exist " "'{}'!" . format ( default_list_path ) ) return default_list_path
Returns default cache file path
46,397
def _get_writable_cache_dir ( self ) : dir_path_data = self . _get_default_cache_dir ( ) if os . access ( dir_path_data , os . W_OK ) : self . _default_cache_file = True return dir_path_data dir_path_user = user_cache_dir ( self . _URLEXTRACT_NAME ) if not os . path . exists ( dir_path_user ) : os . makedirs ( dir_path_user , exist_ok = True ) if os . access ( dir_path_user , os . W_OK ) : return dir_path_user dir_path_temp = tempfile . gettempdir ( ) if os . access ( dir_path_temp , os . W_OK ) : return dir_path_temp raise CacheFileError ( "Cache directories are not writable." )
Get writable cache directory with fallback to user s cache directory and global temp directory
46,398
def _get_cache_file_path ( self , cache_dir = None ) : if cache_dir is None : cache_dir = self . _get_writable_cache_dir ( ) else : if not os . access ( cache_dir , os . W_OK ) : raise CacheFileError ( "None of cache directories is writable." ) return os . path . join ( cache_dir , self . _CACHE_FILE_NAME )
Get path for cache file
46,399
def _load_cached_tlds ( self ) : if not os . access ( self . _tld_list_path , os . R_OK ) : self . _logger . error ( "Cached file is not readable for current " "user. ({})" . format ( self . _tld_list_path ) ) raise CacheFileError ( "Cached file is not readable for current user." ) set_of_tlds = set ( ) with open ( self . _tld_list_path , 'r' ) as f_cache_tld : for line in f_cache_tld : tld = line . strip ( ) . lower ( ) if not tld : continue if tld [ 0 ] == '#' : continue set_of_tlds . add ( "." + tld ) set_of_tlds . add ( "." + idna . decode ( tld ) ) return set_of_tlds
Loads TLDs from cached file to set .