idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
19,000
def parse_received ( received ) : values_by_clause = { } for pattern in RECEIVED_COMPILED_LIST : matches = [ match for match in pattern . finditer ( received ) ] if len ( matches ) == 0 : log . debug ( "No matches found for %s in %s" % ( pattern . pattern , received ) ) continue elif len ( matches ) > 1 : msg = "More than one match found for %s in %s" % ( pattern . pattern , received ) log . error ( msg ) raise MailParserReceivedParsingError ( msg ) else : log . debug ( "Found one match for %s in %s" % ( pattern . pattern , received ) ) match = matches [ 0 ] . groupdict ( ) if six . PY2 : values_by_clause [ match . keys ( ) [ 0 ] ] = match . values ( ) [ 0 ] elif six . PY3 : key = list ( match . keys ( ) ) [ 0 ] value = list ( match . values ( ) ) [ 0 ] values_by_clause [ key ] = value if len ( values_by_clause ) == 0 : msg = "Unable to match any clauses in %s" % ( received ) log . error ( msg ) raise MailParserReceivedParsingError ( msg ) return values_by_clause
Parse a single received header . Return a dictionary of values by clause .
19,001
def receiveds_parsing ( receiveds ) : parsed = [ ] receiveds = [ re . sub ( JUNK_PATTERN , " " , i ) . strip ( ) for i in receiveds ] n = len ( receiveds ) log . debug ( "Nr. of receiveds. {}" . format ( n ) ) for idx , received in enumerate ( receiveds ) : log . debug ( "Parsing received {}/{}" . format ( idx + 1 , n ) ) log . debug ( "Try to parse {!r}" . format ( received ) ) try : values_by_clause = parse_received ( received ) except MailParserReceivedParsingError : parsed . append ( { 'raw' : received } ) else : parsed . append ( values_by_clause ) log . debug ( "len(receiveds) %s, len(parsed) %s" % ( len ( receiveds ) , len ( parsed ) ) ) if len ( receiveds ) != len ( parsed ) : log . error ( "len(receiveds): %s, len(parsed): %s, receiveds: %s, \ parsed: %s" % ( len ( receiveds ) , len ( parsed ) , receiveds , parsed ) ) return receiveds_not_parsed ( receiveds ) else : return receiveds_format ( parsed )
This function parses the receiveds headers .
19,002
def receiveds_not_parsed ( receiveds ) : log . debug ( "Receiveds for this email are not parsed" ) output = [ ] counter = Counter ( ) for i in receiveds [ : : - 1 ] : j = { "raw" : i . strip ( ) } j [ "hop" ] = counter [ "hop" ] + 1 counter [ "hop" ] += 1 output . append ( j ) else : return output
If receiveds are not parsed makes a new structure with raw field . It s useful to have the same structure of receiveds parsed .
19,003
def receiveds_format ( receiveds ) : log . debug ( "Receiveds for this email are parsed" ) output = [ ] counter = Counter ( ) for i in receiveds [ : : - 1 ] : j = { k : v . strip ( ) for k , v in i . items ( ) if v } j [ "hop" ] = counter [ "hop" ] + 1 if i . get ( "date" ) : i [ "date" ] = i [ "date" ] . split ( ";" ) [ - 1 ] try : j [ "date_utc" ] , _ = convert_mail_date ( i [ "date" ] ) except TypeError : j [ "date_utc" ] = None size = len ( output ) now = j . get ( "date_utc" ) if size and now : before = output [ counter [ "hop" ] - 1 ] . get ( "date_utc" ) if before : j [ "delay" ] = ( now - before ) . total_seconds ( ) else : j [ "delay" ] = 0 else : j [ "delay" ] = 0 output . append ( j ) counter [ "hop" ] += 1 else : for i in output : if i . get ( "date_utc" ) : i [ "date_utc" ] = i [ "date_utc" ] . isoformat ( ) else : return output
Given a list of receiveds hop adds metadata and reformat field values
19,004
def get_header ( message , name ) : header = message . get ( name ) log . debug ( "Getting header {!r}: {!r}" . format ( name , header ) ) if header : return decode_header_part ( header ) return six . text_type ( )
Gets an email . message . Message and a header name and returns the mail header decoded with the correct charset .
19,005
def get_mail_keys ( message , complete = True ) : if complete : log . debug ( "Get all headers" ) all_headers_keys = { i . lower ( ) for i in message . keys ( ) } all_parts = ADDRESSES_HEADERS | OTHERS_PARTS | all_headers_keys else : log . debug ( "Get only mains headers" ) all_parts = ADDRESSES_HEADERS | OTHERS_PARTS log . debug ( "All parts to get: {}" . format ( ", " . join ( all_parts ) ) ) return all_parts
Given an email . message . Message return a set with all email parts to get
19,006
def write_sample ( binary , payload , path , filename ) : if not os . path . exists ( path ) : os . makedirs ( path ) sample = os . path . join ( path , filename ) if binary : with open ( sample , "wb" ) as f : f . write ( base64 . b64decode ( payload ) ) else : with open ( sample , "w" ) as f : f . write ( payload )
This function writes a sample on file system .
19,007
def from_file_obj ( cls , fp ) : log . debug ( "Parsing email from file object" ) try : fp . seek ( 0 ) except IOError : pass finally : s = fp . read ( ) return cls . from_string ( s )
Init a new object from a file - like object . Not for Outlook msg .
19,008
def from_file ( cls , fp , is_outlook = False ) : log . debug ( "Parsing email from file {!r}" . format ( fp ) ) with ported_open ( fp ) as f : message = email . message_from_file ( f ) if is_outlook : log . debug ( "Removing temp converted Outlook email {!r}" . format ( fp ) ) os . remove ( fp ) return cls ( message )
Init a new object from a file path .
19,009
def from_string ( cls , s ) : log . debug ( "Parsing email from string" ) message = email . message_from_string ( s ) return cls ( message )
Init a new object from a string .
19,010
def from_bytes ( cls , bt ) : log . debug ( "Parsing email from bytes" ) if six . PY2 : raise MailParserEnvironmentError ( "Parsing from bytes is valid only for Python 3.x version" ) message = email . message_from_bytes ( bt ) return cls ( message )
Init a new object from bytes .
19,011
def _reset ( self ) : log . debug ( "Reset all variables" ) self . _attachments = [ ] self . _text_plain = [ ] self . _text_html = [ ] self . _defects = [ ] self . _defects_categories = set ( ) self . _has_defects = False
Reset the state of mail object .
19,012
def _append_defects ( self , part , part_content_type ) : part_defects = { } for e in part . defects : defects = "{}: {}" . format ( e . __class__ . __name__ , e . __doc__ ) self . _defects_categories . add ( e . __class__ . __name__ ) part_defects . setdefault ( part_content_type , [ ] ) . append ( defects ) log . debug ( "Added defect {!r}" . format ( defects ) ) if part_defects : self . _has_defects = True self . _defects . append ( part_defects )
Add new defects and defects categories to object attributes .
19,013
def _make_mail ( self , complete = True ) : mail = { } keys = get_mail_keys ( self . message , complete ) for i in keys : log . debug ( "Getting header or part {!r}" . format ( i ) ) value = getattr ( self , i ) if value : mail [ i ] = value mail [ "has_defects" ] = self . has_defects if self . has_defects : mail [ "defects" ] = self . defects mail [ "defects_categories" ] = list ( self . defects_categories ) return mail
This method assigns the right values to all tokens of email . Returns a parsed object
19,014
def get_server_ipaddress ( self , trust ) : log . debug ( "Trust string is {!r}" . format ( trust ) ) if not trust . strip ( ) : return received = self . message . get_all ( "received" , [ ] ) for i in received : i = ported_string ( i ) if trust in i : log . debug ( "Trust string {!r} is in {!r}" . format ( trust , i ) ) check = REGXIP . findall ( i [ 0 : i . find ( "by" ) ] ) if check : try : ip_str = six . text_type ( check [ - 1 ] ) log . debug ( "Found sender IP {!r} in {!r}" . format ( ip_str , i ) ) ip = ipaddress . ip_address ( ip_str ) except ValueError : return else : if not ip . is_private : log . debug ( "IP {!r} not private" . format ( ip_str ) ) return ip_str
Return the ip address of sender
19,015
def received_raw ( self ) : output = [ ] for i in self . message . get_all ( "received" , [ ] ) : output . append ( decode_header_part ( i ) ) return output
Return a list of all received headers in raw format
19,016
def headers ( self ) : d = { } for k , v in self . message . items ( ) : d [ k ] = decode_header_part ( v ) return d
Return only the headers as Python object
19,017
def date ( self ) : date = self . message . get ( 'date' ) conv = None try : conv , _ = convert_mail_date ( date ) finally : return conv
Return the mail date in datetime . datetime format and UTC .
19,018
def timezone ( self ) : date = self . message . get ( 'date' ) timezone = 0 try : _ , timezone = convert_mail_date ( date ) finally : return timezone
Return timezone . Offset from UTC .
19,019
def date_json ( self ) : if self . date : return json . dumps ( self . date . isoformat ( ) , ensure_ascii = False )
Return the JSON of date
19,020
def mail_json ( self ) : if self . mail . get ( "date" ) : self . _mail [ "date" ] = self . date . isoformat ( ) return json . dumps ( self . mail , ensure_ascii = False , indent = 2 )
Return the JSON of mail parsed
19,021
def mail_partial_json ( self ) : if self . mail_partial . get ( "date" ) : self . _mail_partial [ "date" ] = self . date . isoformat ( ) return json . dumps ( self . mail_partial , ensure_ascii = False , indent = 2 )
Return the JSON of mail parsed partial
19,022
def download_worker_fn ( scraper , img_url , pbar , status_flags , status_lock ) : failed = False size_failed = False try : scraper . download_image ( img_url ) except ImageDownloadError : failed = True except ImageSizeError : size_failed = True status_lock . acquire ( True ) if failed : status_flags [ 'failed' ] += 1 elif size_failed : status_flags [ 'under_min_or_over_max_filesize' ] += 1 status_flags [ 'percent' ] = status_flags [ 'percent' ] + old_div ( 100.0 , scraper . no_to_download ) pbar . update ( status_flags [ 'percent' ] % 100 ) status_lock . release ( ) return True
Stnadalone function that downloads images .
19,023
def get_html ( self ) : if self . use_ghost : self . url = urljoin ( "http://" , self . url ) import selenium import selenium . webdriver driver = selenium . webdriver . PhantomJS ( service_log_path = os . path . devnull ) driver . get ( self . url ) page_html = driver . page_source page_url = driver . current_url driver . quit ( ) else : if self . proxy_url : print ( "Using proxy: " + self . proxy_url + "\n" ) try : page = requests . get ( self . url , proxies = self . proxies ) if page . status_code != 200 : raise PageLoadError ( page . status_code ) except requests . exceptions . MissingSchema : self . url = "http://" + self . url page = requests . get ( self . url , proxies = self . proxies ) if page . status_code != 200 : raise PageLoadError ( page . status_code ) except requests . exceptions . ConnectionError : raise PageLoadError ( None ) try : page_html = page . text page_url = page . url except UnboundLocalError : raise PageLoadError ( None ) self . page_html = page_html self . page_url = page_url return ( self . page_html , self . page_url )
Downloads HTML content of page given the page_url
19,024
def get_img_list ( self ) : tree = html . fromstring ( self . page_html ) img = tree . xpath ( '//img/@src' ) links = tree . xpath ( '//a/@href' ) img_list = self . process_links ( img ) img_links = self . process_links ( links ) img_list . extend ( img_links ) if self . filename_pattern : pattern = re . compile ( self . filename_pattern ) def matches_pattern ( img_url ) : img_filename = urlparse ( img_url ) . path . split ( '/' ) [ - 1 ] return pattern . search ( img_filename ) images = [ urljoin ( self . url , img_url ) for img_url in img_list if matches_pattern ( img_url ) ] else : images = [ urljoin ( self . url , img_url ) for img_url in img_list ] images = list ( set ( images ) ) self . images = images if self . scrape_reverse : self . images . reverse ( ) return self . images
Gets list of images from the page_html .
19,025
def process_download_path ( self ) : if os . path . exists ( self . download_path ) : if not os . access ( self . download_path , os . W_OK ) : raise DirectoryAccessError elif os . access ( os . path . dirname ( self . download_path ) , os . W_OK ) : os . makedirs ( self . download_path ) else : raise DirectoryCreateError return True
Processes the download path .
19,026
def download_image ( self , img_url ) : img_request = None try : img_request = requests . request ( 'get' , img_url , stream = True , proxies = self . proxies ) if img_request . status_code != 200 : raise ImageDownloadError ( img_request . status_code ) except : raise ImageDownloadError ( ) if img_url [ - 3 : ] == "svg" or ( int ( img_request . headers [ 'content-length' ] ) > self . min_filesize and int ( img_request . headers [ 'content-length' ] ) < self . max_filesize ) : img_content = img_request . content with open ( os . path . join ( self . download_path , img_url . split ( '/' ) [ - 1 ] ) , 'wb' ) as f : byte_image = bytes ( img_content ) f . write ( byte_image ) else : raise ImageSizeError ( img_request . headers [ 'content-length' ] ) return True
Downloads a single image .
19,027
def process_links ( self , links ) : links_list = [ ] for link in links : if os . path . splitext ( link ) [ 1 ] [ 1 : ] . strip ( ) . lower ( ) in self . format_list : links_list . append ( link ) return links_list
Function to process the list of links and filter required links .
19,028
def update ( self , value ) : "Updates the progress bar to a new value." if value <= 0.1 : value = 0 assert 0 <= value <= self . maxval self . currval = value if not self . _need_update ( ) or self . finished : return if not self . start_time : self . start_time = time . time ( ) self . seconds_elapsed = time . time ( ) - self . start_time self . prev_percentage = self . percentage ( ) if value != self . maxval : self . fd . write ( self . _format_line ( ) + '\r' ) else : self . finished = True self . fd . write ( self . _format_line ( ) + '\n' )
Updates the progress bar to a new value .
19,029
def finish ( self ) : self . update ( self . maxval ) if self . signal_set : signal . signal ( signal . SIGWINCH , signal . SIG_DFL )
Used to tell the progress is finished .
19,030
def console_main ( ) : setproctitle ( 'image-scraper' ) scraper = ImageScraper ( ) scraper . get_arguments ( ) print ( "\nImageScraper\n============\nRequesting page....\n" ) try : scraper . get_html ( ) except PageLoadError as err : if err . status_code is None : print ( "ImageScraper is unable to acces the internet." ) else : print ( "Page failed to load. Status code: {0}" . format ( err . status_code ) ) sys . exit ( ) scraper . get_img_list ( ) if len ( scraper . images ) == 0 : sys . exit ( "Sorry, no images found." ) if scraper . no_to_download is None : scraper . no_to_download = len ( scraper . images ) print ( "Found {0} images: " . format ( len ( scraper . images ) ) ) try : scraper . process_download_path ( ) except DirectoryAccessError : print ( "Sorry, the directory can't be accessed." ) sys . exit ( ) except DirectoryCreateError : print ( "Sorry, the directory can't be created." ) sys . exit ( ) if scraper . dump_urls : for img_url in scraper . images : print ( img_url ) status_flags = { 'count' : 0 , 'percent' : 0.0 , 'failed' : 0 , 'under_min_or_over_max_filesize' : 0 } widgets = [ 'Progress: ' , Percentage ( ) , ' ' , Bar ( marker = RotatingMarker ( ) ) , ' ' , ETA ( ) , ' ' , FileTransferSpeed ( ) ] pbar = ProgressBar ( widgets = widgets , maxval = 100 ) . start ( ) pool = ThreadPoolExecutor ( max_workers = scraper . nthreads ) status_lock = threading . Lock ( ) for img_url in scraper . images : if status_flags [ 'count' ] == scraper . no_to_download : break pool . submit ( download_worker_fn , scraper , img_url , pbar , status_flags , status_lock ) status_flags [ 'count' ] += 1 pool . shutdown ( wait = True ) pbar . finish ( ) print ( "\nDone!\nDownloaded {0} images\nFailed: {1}\n" . format ( status_flags [ 'count' ] - status_flags [ 'failed' ] - status_flags [ 'under_min_or_over_max_filesize' ] , status_flags [ 'failed' ] ) ) return
This function handles all the console action .
19,031
def parse ( cls , raw ) : msg = cls ( ) for line in raw . splitlines ( ) : m = cls . sse_line_pattern . match ( line ) if m is None : warnings . warn ( 'Invalid SSE line: "%s"' % line , SyntaxWarning ) continue name = m . group ( 'name' ) if name == '' : continue value = m . group ( 'value' ) if name == 'data' : if msg . data : msg . data = '%s\n%s' % ( msg . data , value ) else : msg . data = value elif name == 'event' : msg . event = value elif name == 'id' : msg . id = value elif name == 'retry' : msg . retry = int ( value ) return msg
Given a possibly - multiline string representing an SSE message parse it and return a Event object .
19,032
def get_upstream ( repo : Repo ) -> Remote : for remote in repo . remotes : for url in remote . urls : if url . endswith ( "pytest-dev/pluggy.git" ) : return remote raise RuntimeError ( "could not find tox-dev/tox.git remote" )
Find upstream repository for pluggy on the remotes
19,033
def register ( self , plugin , name = None ) : plugin_name = name or self . get_canonical_name ( plugin ) if plugin_name in self . _name2plugin or plugin in self . _plugin2hookcallers : if self . _name2plugin . get ( plugin_name , - 1 ) is None : return raise ValueError ( "Plugin already registered: %s=%s\n%s" % ( plugin_name , plugin , self . _name2plugin ) ) self . _name2plugin [ plugin_name ] = plugin self . _plugin2hookcallers [ plugin ] = hookcallers = [ ] for name in dir ( plugin ) : hookimpl_opts = self . parse_hookimpl_opts ( plugin , name ) if hookimpl_opts is not None : normalize_hookimpl_opts ( hookimpl_opts ) method = getattr ( plugin , name ) hookimpl = HookImpl ( plugin , plugin_name , method , hookimpl_opts ) hook = getattr ( self . hook , name , None ) if hook is None : hook = _HookCaller ( name , self . _hookexec ) setattr ( self . hook , name , hook ) elif hook . has_spec ( ) : self . _verify_hook ( hook , hookimpl ) hook . _maybe_apply_history ( hookimpl ) hook . _add_hookimpl ( hookimpl ) hookcallers . append ( hook ) return plugin_name
Register a plugin and return its canonical name or None if the name is blocked from registering . Raise a ValueError if the plugin is already registered .
19,034
def unregister ( self , plugin = None , name = None ) : if name is None : assert plugin is not None , "one of name or plugin needs to be specified" name = self . get_name ( plugin ) if plugin is None : plugin = self . get_plugin ( name ) if self . _name2plugin . get ( name ) : del self . _name2plugin [ name ] for hookcaller in self . _plugin2hookcallers . pop ( plugin , [ ] ) : hookcaller . _remove_plugin ( plugin ) return plugin
unregister a plugin object and all its contained hook implementations from internal data structures .
19,035
def set_blocked ( self , name ) : self . unregister ( name = name ) self . _name2plugin [ name ] = None
block registrations of the given name unregister if already registered .
19,036
def add_hookspecs ( self , module_or_class ) : names = [ ] for name in dir ( module_or_class ) : spec_opts = self . parse_hookspec_opts ( module_or_class , name ) if spec_opts is not None : hc = getattr ( self . hook , name , None ) if hc is None : hc = _HookCaller ( name , self . _hookexec , module_or_class , spec_opts ) setattr ( self . hook , name , hc ) else : hc . set_specification ( module_or_class , spec_opts ) for hookfunction in hc . get_hookimpls ( ) : self . _verify_hook ( hc , hookfunction ) names . append ( name ) if not names : raise ValueError ( "did not find any %r hooks in %r" % ( self . project_name , module_or_class ) )
add new hook specifications defined in the given module_or_class . Functions are recognized if they have been decorated accordingly .
19,037
def get_name ( self , plugin ) : for name , val in self . _name2plugin . items ( ) : if plugin == val : return name
Return name for registered plugin or None if not registered .
19,038
def check_pending ( self ) : for name in self . hook . __dict__ : if name [ 0 ] != "_" : hook = getattr ( self . hook , name ) if not hook . has_spec ( ) : for hookimpl in hook . get_hookimpls ( ) : if not hookimpl . optionalhook : raise PluginValidationError ( hookimpl . plugin , "unknown hook %r in plugin %r" % ( name , hookimpl . plugin ) , )
Verify that all hooks which have not been verified against a hook specification are optional otherwise raise PluginValidationError
19,039
def load_setuptools_entrypoints ( self , group , name = None ) : from pkg_resources import ( iter_entry_points , DistributionNotFound , VersionConflict , ) count = 0 for ep in iter_entry_points ( group , name = name ) : if self . get_plugin ( ep . name ) or self . is_blocked ( ep . name ) : continue try : plugin = ep . load ( ) except DistributionNotFound : continue except VersionConflict as e : raise PluginValidationError ( plugin = None , message = "Plugin %r could not be loaded: %s!" % ( ep . name , e ) , ) self . register ( plugin , name = ep . name ) self . _plugin_distinfo . append ( ( plugin , ep . dist ) ) count += 1 return count
Load modules from querying the specified setuptools group .
19,040
def enable_tracing ( self ) : hooktrace = self . hook . _trace def before ( hook_name , methods , kwargs ) : hooktrace . root . indent += 1 hooktrace ( hook_name , kwargs ) def after ( outcome , hook_name , methods , kwargs ) : if outcome . excinfo is None : hooktrace ( "finish" , hook_name , " , outcome . get_result ( ) ) hooktrace . root . indent -= 1 return self . add_hookcall_monitoring ( before , after )
enable tracing of hook calls and return an undo function .
19,041
def subset_hook_caller ( self , name , remove_plugins ) : orig = getattr ( self . hook , name ) plugins_to_remove = [ plug for plug in remove_plugins if hasattr ( plug , name ) ] if plugins_to_remove : hc = _HookCaller ( orig . name , orig . _hookexec , orig . spec . namespace , orig . spec . opts ) for hookimpl in orig . get_hookimpls ( ) : plugin = hookimpl . plugin if plugin not in plugins_to_remove : hc . _add_hookimpl ( hookimpl ) self . _plugin2hookcallers . setdefault ( plugin , [ ] ) . append ( hc ) return hc return orig
Return a new _HookCaller instance for the named method which manages calls to all registered plugins except the ones from remove_plugins .
19,042
def varnames ( func ) : cache = getattr ( func , "__dict__" , { } ) try : return cache [ "_varnames" ] except KeyError : pass if inspect . isclass ( func ) : try : func = func . __init__ except AttributeError : return ( ) , ( ) elif not inspect . isroutine ( func ) : try : func = getattr ( func , "__call__" , func ) except Exception : return ( ) try : spec = _getargspec ( func ) except TypeError : return ( ) , ( ) args , defaults = tuple ( spec . args ) , spec . defaults if defaults : index = - len ( defaults ) args , defaults = args [ : index ] , tuple ( args [ index : ] ) else : defaults = ( ) implicit_names = ( "self" , ) if not _PYPY3 else ( "self" , "obj" ) if args : if inspect . ismethod ( func ) or ( "." in getattr ( func , "__qualname__" , ( ) ) and args [ 0 ] in implicit_names ) : args = args [ 1 : ] try : cache [ "_varnames" ] = args , defaults except TypeError : pass return args , defaults
Return tuple of positional and keywrord argument names for a function method class or callable .
19,043
def _add_hookimpl ( self , hookimpl ) : if hookimpl . hookwrapper : methods = self . _wrappers else : methods = self . _nonwrappers if hookimpl . trylast : methods . insert ( 0 , hookimpl ) elif hookimpl . tryfirst : methods . append ( hookimpl ) else : i = len ( methods ) - 1 while i >= 0 and methods [ i ] . tryfirst : i -= 1 methods . insert ( i + 1 , hookimpl ) if "__multicall__" in hookimpl . argnames : warnings . warn ( "Support for __multicall__ is now deprecated and will be" "removed in an upcoming release." , DeprecationWarning , ) self . multicall = _legacymulticall
Add an implementation to the callback chain .
19,044
def call_historic ( self , result_callback = None , kwargs = None , proc = None ) : if proc is not None : warnings . warn ( "Support for `proc` argument is now deprecated and will be" "removed in an upcoming release." , DeprecationWarning , ) result_callback = proc self . _call_history . append ( ( kwargs or { } , result_callback ) ) res = self . _hookexec ( self , self . get_hookimpls ( ) , kwargs ) if result_callback is None : return for x in res or [ ] : result_callback ( x )
Call the hook with given kwargs for all registered plugins and for all plugins which will be registered afterwards .
19,045
def call_extra ( self , methods , kwargs ) : old = list ( self . _nonwrappers ) , list ( self . _wrappers ) for method in methods : opts = dict ( hookwrapper = False , trylast = False , tryfirst = False ) hookimpl = HookImpl ( None , "<temp>" , method , opts ) self . _add_hookimpl ( hookimpl ) try : return self ( ** kwargs ) finally : self . _nonwrappers , self . _wrappers = old
Call the hook with some additional temporarily participating methods using the specified kwargs as call parameters .
19,046
def _maybe_apply_history ( self , method ) : if self . is_historic ( ) : for kwargs , result_callback in self . _call_history : res = self . _hookexec ( self , [ method ] , kwargs ) if res and result_callback is not None : result_callback ( res [ 0 ] )
Apply call history to a new hookimpl if it is marked as historic .
19,047
def install_mp_handler ( logger = None ) : if logger is None : logger = logging . getLogger ( ) for i , orig_handler in enumerate ( list ( logger . handlers ) ) : handler = MultiProcessingHandler ( 'mp-handler-{0}' . format ( i ) , sub_handler = orig_handler ) logger . removeHandler ( orig_handler ) logger . addHandler ( handler )
Wraps the handlers in the given Logger with an MultiProcessingHandler .
19,048
def get_offset ( target ) : from pytz import timezone import pytz from datetime import datetime utc = pytz . utc today = datetime . now ( ) tz_target = timezone ( tf . certain_timezone_at ( lat = target [ 'lat' ] , lng = target [ 'lng' ] ) ) today_target = tz_target . localize ( today ) today_utc = utc . localize ( today ) return ( today_utc - today_target ) . total_seconds ( ) / 60
returns a location s time zone offset from UTC in minutes .
19,049
def connect ( self ) : retries = 0 while retries < MAX_CONNECTION_RETRIES : try : self . _connection . connect ( ) logger . info ( 'Connected to APNs' ) return except Exception : self . _connection . close ( ) retries += 1 logger . exception ( 'Failed connecting to APNs (attempt %s of %s)' , retries , MAX_CONNECTION_RETRIES ) raise ConnectionFailed ( )
Establish a connection to APNs . If already connected the function does nothing . If the connection fails the function retries up to MAX_CONNECTION_RETRIES times .
19,050
def _get_raw ( source , bitarray ) : offset = int ( source [ 'offset' ] ) size = int ( source [ 'size' ] ) return int ( '' . join ( [ '1' if digit else '0' for digit in bitarray [ offset : offset + size ] ] ) , 2 )
Get raw data as integer based on offset and size
19,051
def _set_raw ( target , raw_value , bitarray ) : offset = int ( target [ 'offset' ] ) size = int ( target [ 'size' ] ) for digit in range ( size ) : bitarray [ offset + digit ] = ( raw_value >> ( size - digit - 1 ) ) & 0x01 != 0 return bitarray
put value into bit array
19,052
def _get_value ( self , source , bitarray ) : raw_value = self . _get_raw ( source , bitarray ) rng = source . find ( 'range' ) rng_min = float ( rng . find ( 'min' ) . text ) rng_max = float ( rng . find ( 'max' ) . text ) scl = source . find ( 'scale' ) scl_min = float ( scl . find ( 'min' ) . text ) scl_max = float ( scl . find ( 'max' ) . text ) return { source [ 'shortcut' ] : { 'description' : source . get ( 'description' ) , 'unit' : source [ 'unit' ] , 'value' : ( scl_max - scl_min ) / ( rng_max - rng_min ) * ( raw_value - rng_min ) + scl_min , 'raw_value' : raw_value , } }
Get value based on the data in XML
19,053
def _get_enum ( self , source , bitarray ) : raw_value = self . _get_raw ( source , bitarray ) value_desc = source . find ( 'item' , { 'value' : str ( raw_value ) } ) or self . _get_rangeitem ( source , raw_value ) return { source [ 'shortcut' ] : { 'description' : source . get ( 'description' ) , 'unit' : source . get ( 'unit' , '' ) , 'value' : value_desc [ 'description' ] . format ( value = raw_value ) , 'raw_value' : raw_value , } }
Get enum value based on the data in XML
19,054
def _get_boolean ( self , source , bitarray ) : raw_value = self . _get_raw ( source , bitarray ) return { source [ 'shortcut' ] : { 'description' : source . get ( 'description' ) , 'unit' : source . get ( 'unit' , '' ) , 'value' : True if raw_value else False , 'raw_value' : raw_value , } }
Get boolean value based on the data in XML
19,055
def _set_value ( self , target , value , bitarray ) : rng = target . find ( 'range' ) rng_min = float ( rng . find ( 'min' ) . text ) rng_max = float ( rng . find ( 'max' ) . text ) scl = target . find ( 'scale' ) scl_min = float ( scl . find ( 'min' ) . text ) scl_max = float ( scl . find ( 'max' ) . text ) raw_value = ( value - scl_min ) * ( rng_max - rng_min ) / ( scl_max - scl_min ) + rng_min return self . _set_raw ( target , int ( raw_value ) , bitarray )
set given numeric value to target field in bitarray
19,056
def find_profile ( self , bitarray , eep_rorg , rorg_func , rorg_type , direction = None , command = None ) : if not self . init_ok : self . logger . warn ( 'EEP.xml not loaded!' ) return None if eep_rorg not in self . telegrams . keys ( ) : self . logger . warn ( 'Cannot find rorg in EEP!' ) return None if rorg_func not in self . telegrams [ eep_rorg ] . keys ( ) : self . logger . warn ( 'Cannot find func in EEP!' ) return None if rorg_type not in self . telegrams [ eep_rorg ] [ rorg_func ] . keys ( ) : self . logger . warn ( 'Cannot find type in EEP!' ) return None profile = self . telegrams [ eep_rorg ] [ rorg_func ] [ rorg_type ] if command : eep_command = profile . find ( 'command' , recursive = False ) if not eep_command : return profile . find ( 'data' , recursive = False ) return profile . find ( 'data' , { 'command' : str ( command ) } , recursive = False ) if direction is None : return profile . find ( 'data' , recursive = False ) return profile . find ( 'data' , { 'direction' : direction } , recursive = False )
Find profile and data description matching RORG FUNC and TYPE
19,057
def get_values ( self , profile , bitarray , status ) : if not self . init_ok or profile is None : return [ ] , { } output = OrderedDict ( { } ) for source in profile . contents : if not source . name : continue if source . name == 'value' : output . update ( self . _get_value ( source , bitarray ) ) if source . name == 'enum' : output . update ( self . _get_enum ( source , bitarray ) ) if source . name == 'status' : output . update ( self . _get_boolean ( source , status ) ) return output . keys ( ) , output
Get keys and values from bitarray
19,058
def set_values ( self , profile , data , status , properties ) : if not self . init_ok or profile is None : return data , status for shortcut , value in properties . items ( ) : target = profile . find ( shortcut = shortcut ) if not target : self . logger . warning ( 'Cannot find data description for shortcut %s' , shortcut ) continue if target . name == 'value' : data = self . _set_value ( target , value , data ) if target . name == 'enum' : data = self . _set_enum ( target , value , data ) if target . name == 'status' : status = self . _set_boolean ( target , value , status ) return data , status
Update data based on data contained in properties
19,059
def combine_hex ( data ) : output = 0x00 for i , value in enumerate ( reversed ( data ) ) : output |= ( value << i * 8 ) return output
Combine list of integer values to one big integer
19,060
def parse ( self ) : if self . rorg in [ RORG . RPS , RORG . BS1 , RORG . BS4 ] : self . status = self . data [ - 1 ] if self . rorg == RORG . VLD : self . status = self . optional [ - 1 ] if self . rorg in [ RORG . RPS , RORG . BS1 , RORG . BS4 ] : self . repeater_count = enocean . utils . from_bitarray ( self . _bit_status [ 4 : ] ) return self . parsed
Parse data from Packet
19,061
def select_eep ( self , rorg_func , rorg_type , direction = None , command = None ) : self . rorg_func = rorg_func self . rorg_type = rorg_type self . _profile = self . eep . find_profile ( self . _bit_data , self . rorg , rorg_func , rorg_type , direction , command ) return self . _profile is not None
Set EEP based on FUNC and TYPE
19,062
def parse_eep ( self , rorg_func = None , rorg_type = None , direction = None , command = None ) : if rorg_func is not None and rorg_type is not None : self . select_eep ( rorg_func , rorg_type , direction , command ) provides , values = self . eep . get_values ( self . _profile , self . _bit_data , self . _bit_status ) self . parsed . update ( values ) return list ( provides )
Parse EEP based on FUNC and TYPE
19,063
def set_eep ( self , data ) : self . _bit_data , self . _bit_status = self . eep . set_values ( self . _profile , self . _bit_data , self . _bit_status , data )
Update packet data based on EEP . Input data is a dictionary with keys corresponding to the EEP .
19,064
def build ( self ) : data_length = len ( self . data ) ords = [ 0x55 , ( data_length >> 8 ) & 0xFF , data_length & 0xFF , len ( self . optional ) , int ( self . packet_type ) ] ords . append ( crc8 . calc ( ords [ 1 : 5 ] ) ) ords . extend ( self . data ) ords . extend ( self . optional ) ords . append ( crc8 . calc ( ords [ 6 : ] ) ) return ords
Build Packet for sending to EnOcean controller
19,065
def _get_from_send_queue ( self ) : try : packet = self . transmit . get ( block = False ) self . logger . info ( 'Sending packet' ) self . logger . debug ( packet ) return packet except queue . Empty : pass return None
Get message from send queue if one exists
19,066
def parse ( self ) : while True : status , self . _buffer , packet = Packet . parse_msg ( self . _buffer ) if status == PARSE_RESULT . INCOMPLETE : return status if status == PARSE_RESULT . OK and packet : packet . received = datetime . datetime . now ( ) if isinstance ( packet , UTETeachInPacket ) and self . teach_in : response_packet = packet . create_response_packet ( self . base_id ) self . logger . info ( 'Sending response to UTE teach-in.' ) self . send ( response_packet ) if self . __callback is None : self . receive . put ( packet ) else : self . __callback ( packet ) self . logger . debug ( packet )
Parses messages and puts them to receive queue
19,067
def base_id ( self ) : if self . _base_id is not None : return self . _base_id self . send ( Packet ( PACKET . COMMON_COMMAND , data = [ 0x08 ] ) ) for i in range ( 0 , 10 ) : try : packet = self . receive . get ( block = True , timeout = 0.1 ) if packet . packet_type == PACKET . RESPONSE and packet . response == RETURN_CODE . OK and len ( packet . response_data ) == 4 : self . _base_id = packet . response_data self . receive . put ( packet ) break self . receive . put ( packet ) except queue . Empty : continue return self . _base_id
Fetches Base ID from the transmitter if required . Otherwise returns the currently set Base ID .
19,068
def cli ( * , worker_settings , burst , check , watch , verbose ) : sys . path . append ( os . getcwd ( ) ) worker_settings = import_string ( worker_settings ) logging . config . dictConfig ( default_log_config ( verbose ) ) if check : exit ( check_health ( worker_settings ) ) else : kwargs = { } if burst is None else { 'burst' : burst } if watch : loop = asyncio . get_event_loop ( ) loop . run_until_complete ( watch_reload ( watch , worker_settings , loop ) ) else : run_worker ( worker_settings , ** kwargs )
Job queues in python with asyncio and redis .
19,069
def next_cron ( previous_dt : datetime , * , month : Union [ None , set , int ] = None , day : Union [ None , set , int ] = None , weekday : Union [ None , set , int , str ] = None , hour : Union [ None , set , int ] = None , minute : Union [ None , set , int ] = None , second : Union [ None , set , int ] = 0 , microsecond : int = 123_456 , ) : dt = previous_dt + timedelta ( seconds = 1 ) if isinstance ( weekday , str ) : weekday = weekdays . index ( weekday . lower ( ) ) options = dict ( month = month , day = day , weekday = weekday , hour = hour , minute = minute , second = second , microsecond = microsecond ) while True : next_dt = _get_next_dt ( dt , options ) if next_dt is None : return dt dt = next_dt
Find the next datetime matching the given parameters .
19,070
def cron ( coroutine : Union [ str , Callable ] , * , name : Optional [ str ] = None , month : Union [ None , set , int ] = None , day : Union [ None , set , int ] = None , weekday : Union [ None , set , int , str ] = None , hour : Union [ None , set , int ] = None , minute : Union [ None , set , int ] = None , second : Union [ None , set , int ] = 0 , microsecond : int = 123_456 , run_at_startup : bool = False , unique : bool = True , timeout : Optional [ SecondsTimedelta ] = None , keep_result : Optional [ float ] = 0 , max_tries : Optional [ int ] = 1 , ) -> CronJob : if isinstance ( coroutine , str ) : name = name or 'cron:' + coroutine coroutine = import_string ( coroutine ) assert asyncio . iscoroutinefunction ( coroutine ) , f'{coroutine} is not a coroutine function' timeout = to_seconds ( timeout ) keep_result = to_seconds ( keep_result ) return CronJob ( name or 'cron:' + coroutine . __qualname__ , coroutine , month , day , weekday , hour , minute , second , microsecond , run_at_startup , unique , timeout , keep_result , max_tries , )
Create a cron job eg . it should be executed at specific times .
19,071
def to_unix_ms ( dt : datetime ) -> int : utcoffset = dt . utcoffset ( ) ep = epoch if utcoffset is None else epoch_tz return as_int ( ( dt - ep ) . total_seconds ( ) * 1000 )
convert a datetime to number of milliseconds since 1970 and calculate timezone offset
19,072
async def create_pool ( settings : RedisSettings = None , * , _retry : int = 0 ) -> ArqRedis : settings = settings or RedisSettings ( ) addr = settings . host , settings . port try : pool = await aioredis . create_redis_pool ( addr , db = settings . database , password = settings . password , timeout = settings . conn_timeout , encoding = 'utf8' , commands_factory = ArqRedis , ) except ( ConnectionError , OSError , aioredis . RedisError , asyncio . TimeoutError ) as e : if _retry < settings . conn_retries : logger . warning ( 'redis connection error %s:%s %s %s, %d retries remaining...' , settings . host , settings . port , e . __class__ . __name__ , e , settings . conn_retries - _retry , ) await asyncio . sleep ( settings . conn_retry_delay ) else : raise else : if _retry > 0 : logger . info ( 'redis connection successful' ) return pool return await create_pool ( settings , _retry = _retry + 1 )
Create a new redis pool retrying up to conn_retries times if the connection fails .
19,073
async def enqueue_job ( self , function : str , * args : Any , _job_id : Optional [ str ] = None , _defer_until : Optional [ datetime ] = None , _defer_by : Union [ None , int , float , timedelta ] = None , _expires : Union [ None , int , float , timedelta ] = None , _job_try : Optional [ int ] = None , ** kwargs : Any , ) -> Optional [ Job ] : job_id = _job_id or uuid4 ( ) . hex job_key = job_key_prefix + job_id assert not ( _defer_until and _defer_by ) , "use either 'defer_until' or 'defer_by' or neither, not both" defer_by_ms = to_ms ( _defer_by ) expires_ms = to_ms ( _expires ) with await self as conn : pipe = conn . pipeline ( ) pipe . unwatch ( ) pipe . watch ( job_key ) job_exists = pipe . exists ( job_key ) await pipe . execute ( ) if await job_exists : return enqueue_time_ms = timestamp_ms ( ) if _defer_until is not None : score = to_unix_ms ( _defer_until ) elif defer_by_ms : score = enqueue_time_ms + defer_by_ms else : score = enqueue_time_ms expires_ms = expires_ms or score - enqueue_time_ms + expires_extra_ms job = pickle_job ( function , args , kwargs , _job_try , enqueue_time_ms ) tr = conn . multi_exec ( ) tr . psetex ( job_key , expires_ms , job ) tr . zadd ( queue_name , score , job_id ) try : await tr . execute ( ) except MultiExecError : return return Job ( job_id , self )
Enqueue a job .
19,074
async def all_job_results ( self ) -> List [ JobResult ] : keys = await self . keys ( result_key_prefix + '*' ) results = await asyncio . gather ( * [ self . _get_job_result ( k ) for k in keys ] ) return sorted ( results , key = attrgetter ( 'enqueue_time' ) )
Get results for all jobs in redis .
19,075
def func ( coroutine : Union [ str , Function , Callable ] , * , name : Optional [ str ] = None , keep_result : Optional [ SecondsTimedelta ] = None , timeout : Optional [ SecondsTimedelta ] = None , max_tries : Optional [ int ] = None , ) -> Function : if isinstance ( coroutine , Function ) : return coroutine if isinstance ( coroutine , str ) : name = name or coroutine coroutine = import_string ( coroutine ) assert asyncio . iscoroutinefunction ( coroutine ) , f'{coroutine} is not a coroutine function' timeout = to_seconds ( timeout ) keep_result = to_seconds ( keep_result ) return Function ( name or coroutine . __qualname__ , coroutine , timeout , keep_result , max_tries )
Wrapper for a job function which lets you configure more settings .
19,076
def run ( self ) -> None : self . main_task = self . loop . create_task ( self . main ( ) ) try : self . loop . run_until_complete ( self . main_task ) except asyncio . CancelledError : pass finally : self . loop . run_until_complete ( self . close ( ) )
Sync function to run the worker finally closes worker connections .
19,077
async def async_run ( self ) -> None : self . main_task = self . loop . create_task ( self . main ( ) ) await self . main_task
Asynchronously run the worker does not close connections . Useful when testing .
19,078
async def result ( self , timeout : Optional [ float ] = None , * , pole_delay : float = 0.5 ) -> Any : async for delay in poll ( pole_delay ) : info = await self . result_info ( ) if info : result = info . result if info . success : return result else : raise result if timeout is not None and delay > timeout : raise asyncio . TimeoutError ( )
Get the result of the job including waiting if it s not yet available . If the job raised an exception it will be raised here .
19,079
async def info ( self ) -> Optional [ JobDef ] : info = await self . result_info ( ) if not info : v = await self . _redis . get ( job_key_prefix + self . job_id , encoding = None ) if v : info = unpickle_job ( v ) if info : info . score = await self . _redis . zscore ( queue_name , self . job_id ) return info
All information on a job including its result if it s available does not wait for the result .
19,080
async def result_info ( self ) -> Optional [ JobResult ] : v = await self . _redis . get ( result_key_prefix + self . job_id , encoding = None ) if v : return unpickle_result ( v )
Information about the job result if available does not wait for the result . Does not raise an exception even if the job raised one .
19,081
async def status ( self ) -> JobStatus : if await self . _redis . exists ( result_key_prefix + self . job_id ) : return JobStatus . complete elif await self . _redis . exists ( in_progress_key_prefix + self . job_id ) : return JobStatus . in_progress else : score = await self . _redis . zscore ( queue_name , self . job_id ) if not score : return JobStatus . not_found return JobStatus . deferred if score > timestamp_ms ( ) else JobStatus . queued
Status of the job .
19,082
def email ( self ) : if "profile" in self . _raw : email = self . _raw [ "profile" ] . get ( "email" ) elif "bot_url" in self . _raw : email = self . _raw [ "bot_url" ] else : email = None if not email : logging . debug ( "No email found for %s" , self . _raw . get ( "name" ) ) return email
Shortcut property for finding the e - mail address or bot URL .
19,083
def image_url ( self , pixel_size = None ) : if "profile" not in self . _raw : return profile = self . _raw [ "profile" ] if ( pixel_size ) : img_key = "image_%s" % pixel_size if img_key in profile : return profile [ img_key ] return profile [ self . _DEFAULT_IMAGE_KEY ]
Get the URL for the user icon in the desired pixel size if it exists . If no size is supplied give the URL for the full - size image .
19,084
def fields ( self ) : process_markdown = ( "fields" in self . _raw . get ( "mrkdwn_in" , [ ] ) ) fields = self . _raw . get ( "fields" , [ ] ) if fields : logging . debug ( "Rendering with markdown markdown %s for %s" , process_markdown , fields ) return [ { "title" : e [ "title" ] , "short" : e [ "short" ] , "value" : self . _formatter . render_text ( e [ "value" ] , process_markdown ) } for e in fields ]
Fetch the fields list and process the text within each field including markdown processing if the message indicates that the fields contain markdown .
19,085
def compile_dm_users ( self ) : dm_data = self . _read_from_json ( "dms.json" ) dms = dm_data . values ( ) all_dms_users = [ ] for dm in dms : if dm [ "id" ] not in self . _EMPTY_DMS : try : dm_members = { "id" : dm [ "id" ] , "users" : [ self . __USER_DATA [ m ] for m in dm [ "members" ] ] } all_dms_users . append ( dm_members ) except KeyError : dm_members = None return all_dms_users
Gets the info for the members within the dm
19,086
def compile_mpim_users ( self ) : mpim_data = self . _read_from_json ( "mpims.json" ) mpims = [ c for c in mpim_data . values ( ) ] all_mpim_users = [ ] for mpim in mpims : mpim_members = { "name" : mpim [ "name" ] , "users" : [ self . __USER_DATA [ m ] for m in mpim [ "members" ] ] } all_mpim_users . append ( mpim_members ) return all_mpim_users
Gets the info for the members within the multiple person instant message
19,087
def _create_messages ( self , names , data , isDms = False ) : chats = { } empty_dms = [ ] formatter = SlackFormatter ( self . __USER_DATA , data ) for name in names : dir_path = os . path . join ( self . _PATH , name ) messages = [ ] day_files = glob . glob ( os . path . join ( dir_path , "*.json" ) ) if not day_files : if isDms : empty_dms . append ( name ) continue for day in sorted ( day_files ) : with io . open ( os . path . join ( self . _PATH , day ) , encoding = "utf8" ) as f : day_messages = json . load ( f ) messages . extend ( [ Message ( formatter , d ) for d in day_messages ] ) chats [ name ] = messages if isDms : self . _EMPTY_DMS = empty_dms return chats
Creates object of arrays of messages from each json file specified by the names or ids
19,088
def _read_from_json ( self , file ) : try : with io . open ( os . path . join ( self . _PATH , file ) , encoding = "utf8" ) as f : return { u [ "id" ] : u for u in json . load ( f ) } except IOError : return { }
Reads the file specified from json and creates an object based on the id of each element
19,089
def to_bytes ( s , encoding = "utf8" ) : if PY_VERSION == 2 : b = bytes ( s ) elif PY_VERSION == 3 : b = bytes ( s , encoding ) else : raise ValueError ( "Is Python 4 out already?" ) return b
Converts str s to bytes
19,090
def SHA1_file ( filepath , extra = b'' ) : h = hashlib . sha1 ( ) with io . open ( filepath , 'rb' ) as f : for chunk in iter ( lambda : f . read ( h . block_size ) , b'' ) : h . update ( chunk ) h . update ( extra ) return h . hexdigest ( )
Returns hex digest of SHA1 hash of file at filepath
19,091
def extract_archive ( filepath ) : if os . path . isdir ( filepath ) : path = os . path . abspath ( filepath ) print ( "Archive already extracted. Viewing from {}..." . format ( path ) ) return path elif not zipfile . is_zipfile ( filepath ) : raise TypeError ( "{} is not a zipfile" . format ( filepath ) ) archive_sha = SHA1_file ( filepath = filepath , extra = to_bytes ( slackviewer . __version__ ) ) extracted_path = os . path . join ( SLACKVIEWER_TEMP_PATH , archive_sha ) if os . path . exists ( extracted_path ) : print ( "{} already exists" . format ( extracted_path ) ) else : with zipfile . ZipFile ( filepath ) as zip : print ( "{} extracting to {}..." . format ( filepath , extracted_path ) ) zip . extractall ( path = extracted_path ) print ( "{} extracted to {}" . format ( filepath , extracted_path ) ) create_archive_info ( filepath , extracted_path , archive_sha ) return extracted_path
Returns the path of the archive
19,092
def create_archive_info ( filepath , extracted_path , archive_sha = None ) : archive_info = { "sha1" : archive_sha , "filename" : os . path . split ( filepath ) [ 1 ] , } with io . open ( os . path . join ( extracted_path , ".slackviewer_archive_info.json" , ) , 'w+' , encoding = "utf-8" ) as f : s = json . dumps ( archive_info , ensure_ascii = False ) s = to_unicode ( s ) f . write ( s )
Saves archive info to a json file
19,093
def get_all ( self , key , fallback = None ) : if key in self . headers : value = self . headers [ key ] else : value = fallback or [ ] return value
returns all header values for given key
19,094
def add ( self , key , value ) : if key not in self . headers : self . headers [ key ] = [ ] self . headers [ key ] . append ( value ) if self . sent_time : self . modified_since_sent = True
add header value
19,095
def attach ( self , attachment , filename = None , ctype = None ) : if isinstance ( attachment , Attachment ) : self . attachments . append ( attachment ) elif isinstance ( attachment , str ) : path = os . path . expanduser ( attachment ) part = helper . mimewrap ( path , filename , ctype ) self . attachments . append ( Attachment ( part ) ) else : raise TypeError ( 'attach accepts an Attachment or str' ) if self . sent_time : self . modified_since_sent = True
attach a file
19,096
def parse_template ( self , tmp , reset = False , only_body = False ) : logging . debug ( 'GoT: ' , tmp ) if self . sent_time : self . modified_since_sent = True if only_body : self . body = tmp else : m = re . match ( r'(?P<h>([a-zA-Z0-9_-]+:.+\n)*)\n?(?P<b>(\s*.*)*)' , tmp ) assert m d = m . groupdict ( ) headertext = d [ 'h' ] self . body = d [ 'b' ] if reset : self . headers = { } key = value = None for line in headertext . splitlines ( ) : if re . match ( '[a-zA-Z0-9_-]+:' , line ) : if key and value : self . add ( key , value ) key , value = line . strip ( ) . split ( ':' , 1 ) value = value . strip ( ) elif key and value : value += line if key and value : self . add ( key , value ) if 'Attach' in self : to_attach = [ ] for line in self . get_all ( 'Attach' ) : gpath = os . path . expanduser ( line . strip ( ) ) to_attach += [ g for g in glob . glob ( gpath ) if os . path . isfile ( g ) ] logging . debug ( 'Attaching: %s' , to_attach ) for path in to_attach : self . attach ( path ) del self [ 'Attach' ]
parses a template or user edited string to fills this envelope .
19,097
def split_commandline ( s , comments = False , posix = True ) : s = s . replace ( '\\' , '\\\\' ) s = s . replace ( '\'' , '\\\'' ) s = s . replace ( '\"' , '\\\"' ) lex = shlex . shlex ( s , posix = posix ) lex . whitespace_split = True lex . whitespace = ';' if not comments : lex . commenters = '' return list ( lex )
splits semi - colon separated commandlines
19,098
def string_sanitize ( string , tab_width = 8 ) : r string = string . replace ( '\r' , '' ) lines = list ( ) for line in string . split ( '\n' ) : tab_count = line . count ( '\t' ) if tab_count > 0 : line_length = 0 new_line = list ( ) for i , chunk in enumerate ( line . split ( '\t' ) ) : line_length += len ( chunk ) new_line . append ( chunk ) if i < tab_count : next_tab_stop_in = tab_width - ( line_length % tab_width ) new_line . append ( ' ' * next_tab_stop_in ) line_length += next_tab_stop_in lines . append ( '' . join ( new_line ) ) else : lines . append ( line ) return '\n' . join ( lines )
r strips and replaces non - printable characters
19,099
def string_decode ( string , enc = 'ascii' ) : if enc is None : enc = 'ascii' try : string = str ( string , enc , errors = 'replace' ) except LookupError : string = string . decode ( 'ascii' , errors = 'replace' ) except TypeError : pass return string
safely decodes string to unicode bytestring respecting enc as a hint .