idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
240,300
def check_conflicts ( self ) : shutit_global . shutit_global_object . yield_to_draw ( ) cfg = self . cfg # Now consider conflicts self . log ( 'PHASE: conflicts' , level = logging . DEBUG ) errs = [ ] self . pause_point ( '\nNow checking for conflicts between modules' , print_input = False , level = 3 ) for module_id in self . module_ids ( ) : if not cfg [ module_id ] [ 'shutit.core.module.build' ] : continue conflicter = self . shutit_map [ module_id ] for conflictee in conflicter . conflicts_with : # If the module id isn't there, there's no problem. conflictee_obj = self . shutit_map . get ( conflictee ) if conflictee_obj is None : continue if ( ( cfg [ conflicter . module_id ] [ 'shutit.core.module.build' ] or self . is_to_be_built_or_is_installed ( conflicter ) ) and ( cfg [ conflictee_obj . module_id ] [ 'shutit.core.module.build' ] or self . is_to_be_built_or_is_installed ( conflictee_obj ) ) ) : errs . append ( ( 'conflicter module id: ' + conflicter . module_id + ' is configured to be built or is already built but conflicts with module_id: ' + conflictee_obj . module_id , ) ) return errs
Checks for any conflicts between modules configured to be built .
348
12
240,301
def do_remove ( self , loglevel = logging . DEBUG ) : shutit_global . shutit_global_object . yield_to_draw ( ) cfg = self . cfg # Now get the run_order keys in order and go. self . log ( 'PHASE: remove' , level = loglevel ) self . pause_point ( '\nNow removing any modules that need removing' , print_input = False , level = 3 ) # Login at least once to get the exports. for module_id in self . module_ids ( ) : module = self . shutit_map [ module_id ] self . log ( 'considering whether to remove: ' + module_id , level = logging . DEBUG ) if cfg [ module_id ] [ 'shutit.core.module.remove' ] : self . log ( 'removing: ' + module_id , level = logging . DEBUG ) self . login ( prompt_prefix = module_id , command = shutit_global . shutit_global_object . bash_startup_command , echo = False ) if not module . remove ( self ) : self . log ( self . print_modules ( ) , level = logging . DEBUG ) self . fail ( module_id + ' failed on remove' , shutit_pexpect_child = self . get_shutit_pexpect_session_from_id ( 'target_child' ) . pexpect_child ) # pragma: no cover else : if self . build [ 'delivery' ] in ( 'docker' , 'dockerfile' ) : # Create a directory and files to indicate this has been removed. self . send ( ' command mkdir -p ' + shutit_global . shutit_global_object . shutit_state_dir_build_db_dir + '/module_record/' + module . module_id + ' && command rm -f ' + shutit_global . shutit_global_object . shutit_state_dir_build_db_dir + '/module_record/' + module . module_id + '/built && command touch ' + shutit_global . shutit_global_object . shutit_state_dir_build_db_dir + '/module_record/' + module . module_id + '/removed' , loglevel = loglevel , echo = False ) # Remove from "installed" cache if module . module_id in self . get_current_shutit_pexpect_session_environment ( ) . modules_installed : self . get_current_shutit_pexpect_session_environment ( ) . modules_installed . remove ( module . module_id ) # Add to "not installed" cache self . get_current_shutit_pexpect_session_environment ( ) . modules_not_installed . append ( module . module_id ) self . logout ( echo = False )
Remove modules by calling remove method on those configured for removal .
631
12
240,302
def do_build ( self ) : shutit_global . shutit_global_object . yield_to_draw ( ) cfg = self . cfg self . log ( 'PHASE: build, repository work' , level = logging . DEBUG ) module_id_list = self . module_ids ( ) if self . build [ 'deps_only' ] : module_id_list_build_only = filter ( lambda x : cfg [ x ] [ 'shutit.core.module.build' ] , module_id_list ) for module_id in module_id_list : module = self . shutit_map [ module_id ] self . log ( 'Considering whether to build: ' + module . module_id , level = logging . INFO ) if cfg [ module . module_id ] [ 'shutit.core.module.build' ] : if self . build [ 'delivery' ] not in module . ok_delivery_methods : self . fail ( 'Module: ' + module . module_id + ' can only be built with one of these --delivery methods: ' + str ( module . ok_delivery_methods ) + '\nSee shutit build -h for more info, or try adding: --delivery <method> to your shutit invocation' ) # pragma: no cover if self . is_installed ( module ) : self . build [ 'report' ] = ( self . build [ 'report' ] + '\nBuilt already: ' + module . module_id + ' with run order: ' + str ( module . run_order ) ) else : # We move to the module directory to perform the build, returning immediately afterwards. if self . build [ 'deps_only' ] and module_id == module_id_list_build_only [ - 1 ] : # If this is the last module, and we are only building deps, stop here. self . build [ 'report' ] = ( self . build [ 'report' ] + '\nSkipping: ' + module . module_id + ' with run order: ' + str ( module . run_order ) + '\n\tas this is the final module and we are building dependencies only' ) else : revert_dir = os . getcwd ( ) self . get_current_shutit_pexpect_session_environment ( ) . module_root_dir = os . path . dirname ( self . shutit_file_map [ module_id ] ) self . chdir ( self . get_current_shutit_pexpect_session_environment ( ) . module_root_dir ) self . login ( prompt_prefix = module_id , command = shutit_global . shutit_global_object . bash_startup_command , echo = False ) self . build_module ( module ) self . logout ( echo = False ) self . chdir ( revert_dir ) if self . is_installed ( module ) : self . log ( 'Starting module' , level = logging . DEBUG ) if not module . start ( self ) : self . fail ( module . module_id + ' failed on start' , shutit_pexpect_child = self . get_shutit_pexpect_session_from_id ( 'target_child' ) . pexpect_child )
Runs build phase building any modules that we ve determined need building .
725
14
240,303
def stop_all ( self , run_order = - 1 ) : shutit_global . shutit_global_object . yield_to_draw ( ) # sort them so they're stopped in reverse order for module_id in self . module_ids ( rev = True ) : shutit_module_obj = self . shutit_map [ module_id ] if run_order == - 1 or shutit_module_obj . run_order <= run_order : if self . is_installed ( shutit_module_obj ) : if not shutit_module_obj . stop ( self ) : self . fail ( 'failed to stop: ' + module_id , shutit_pexpect_child = self . get_shutit_pexpect_session_from_id ( 'target_child' ) . shutit_pexpect_child )
Runs stop method on all modules less than the passed - in run_order . Used when target is exporting itself mid - build so we clean up state before committing run files etc .
184
37
240,304
def start_all ( self , run_order = - 1 ) : shutit_global . shutit_global_object . yield_to_draw ( ) # sort them so they're started in order for module_id in self . module_ids ( ) : shutit_module_obj = self . shutit_map [ module_id ] if run_order == - 1 or shutit_module_obj . run_order <= run_order : if self . is_installed ( shutit_module_obj ) : if not shutit_module_obj . start ( self ) : self . fail ( 'failed to start: ' + module_id , shutit_pexpect_child = self . get_shutit_pexpect_session_from_id ( 'target_child' ) . shutit_pexpect_child )
Runs start method on all modules less than the passed - in run_order . Used when target is exporting itself mid - build so we can export a clean target and still depended - on modules running if necessary .
180
43
240,305
def init_shutit_map ( self ) : shutit_global . shutit_global_object . yield_to_draw ( ) modules = self . shutit_modules # Have we got anything to process outside of special modules? if len ( [ mod for mod in modules if mod . run_order > 0 ] ) < 1 : self . log ( modules , level = logging . DEBUG ) path = ':' . join ( self . host [ 'shutit_module_path' ] ) self . log ( '\nIf you are new to ShutIt, see:\n\n\thttp://ianmiell.github.io/shutit/\n\nor try running\n\n\tshutit skeleton\n\n' , level = logging . INFO ) if path == '' : self . fail ( 'No ShutIt modules aside from core ones found and no ShutIt module path given.\nDid you set --shutit_module_path/-m wrongly?\n' ) # pragma: no cover elif path == '.' : self . fail ( 'No modules aside from core ones found and no ShutIt module path given apart from default (.).\n\n- Did you set --shutit_module_path/-m?\n- Is there a STOP* file in your . dir?' ) # pragma: no cover else : self . fail ( 'No modules aside from core ones found and no ShutIt modules in path:\n\n' + path + '\n\nor their subfolders. Check your --shutit_module_path/-m setting and check that there are ShutIt modules below without STOP* files in any relevant directories.' ) # pragma: no cover self . log ( 'PHASE: base setup' , level = logging . DEBUG ) run_orders = { } has_core_module = False for module in modules : assert isinstance ( module , ShutItModule ) , shutit_util . print_debug ( ) if module . module_id in self . shutit_map : self . fail ( 'Duplicated module id: ' + module . module_id + '\n\nYou may want to check your --shutit_module_path setting' ) # pragma: no cover if module . run_order in run_orders : self . fail ( 'Duplicate run order: ' + str ( module . run_order ) + ' for ' + module . module_id + ' and ' + run_orders [ module . run_order ] . module_id + '\n\nYou may want to check your --shutit_module_path setting' ) # pragma: no cover if module . run_order == 0 : has_core_module = True self . shutit_map [ module . module_id ] = run_orders [ module . run_order ] = module self . shutit_file_map [ module . module_id ] = get_module_file ( self , module ) if not has_core_module : self . fail ( 'No module with run_order=0 specified! This is required.' )
Initializes the module map of shutit based on the modules we have gathered .
669
16
240,306
def conn_target ( self ) : shutit_global . shutit_global_object . yield_to_draw ( ) conn_module = None for mod in self . conn_modules : if mod . module_id == self . build [ 'conn_module' ] : conn_module = mod break if conn_module is None : self . fail ( 'Couldn\'t find conn_module ' + self . build [ 'conn_module' ] ) # pragma: no cover # Set up the target in pexpect. conn_module . get_config ( self ) conn_module . build ( self )
Connect to the target .
132
5
240,307
def finalize_target ( self ) : shutit_global . shutit_global_object . yield_to_draw ( ) self . pause_point ( '\nFinalizing the target module (' + self . shutit_main_dir + '/shutit_setup.py)' , print_input = False , level = 3 ) # Can assume conn_module exists at this point for mod in self . conn_modules : if mod . module_id == self . build [ 'conn_module' ] : conn_module = mod break conn_module . finalize ( self )
Finalize the target using the core finalize method .
124
11
240,308
def resolve_dependencies ( self , to_build , depender ) : shutit_global . shutit_global_object . yield_to_draw ( ) self . log ( 'In resolve_dependencies' , level = logging . DEBUG ) cfg = self . cfg for dependee_id in depender . depends_on : dependee = self . shutit_map . get ( dependee_id ) # Don't care if module doesn't exist, we check this later if ( dependee and dependee not in to_build and cfg [ dependee_id ] [ 'shutit.core.module.build_ifneeded' ] ) : to_build . append ( dependee ) cfg [ dependee_id ] [ 'shutit.core.module.build' ] = True return True
Add any required dependencies .
175
5
240,309
def check_dependee_exists ( self , depender , dependee , dependee_id ) : shutit_global . shutit_global_object . yield_to_draw ( ) # If the module id isn't there, there's a problem. if dependee is None : return 'module: \n\n' + dependee_id + '\n\nnot found in paths: ' + str ( self . host [ 'shutit_module_path' ] ) + ' but needed for ' + depender . module_id + '\nCheck your --shutit_module_path setting and ensure that all modules configured to be built are in that path setting, eg "--shutit_module_path /path/to/other/module/:."\n\nAlso check that the module is configured to be built with the correct module id in that module\'s configs/build.cnf file.\n\nSee also help.' return ''
Checks whether a depended - on module is available .
210
11
240,310
def check_dependee_build ( self , depender , dependee , dependee_id ) : shutit_global . shutit_global_object . yield_to_draw ( ) cfg = self . cfg # If depender is installed or will be installed, so must the dependee if not ( cfg [ dependee . module_id ] [ 'shutit.core.module.build' ] or self . is_to_be_built_or_is_installed ( dependee ) ) : return 'depender module id:\n\n[' + depender . module_id + ']\n\nis configured: "build:yes" or is already built but dependee module_id:\n\n[' + dependee_id + ']\n\n is not configured: "build:yes"' return ''
Checks whether a depended on module is configured to be built .
181
13
240,311
def destroy ( self ) : if self . session_type == 'bash' : # TODO: does this work/handle already being logged out/logged in deep OK? self . logout ( ) elif self . session_type == 'vagrant' : # TODO: does this work/handle already being logged out/logged in deep OK? self . logout ( )
Finish up a session .
82
5
240,312
def shutit_method_scope ( func ) : def wrapper ( self , shutit ) : """Wrapper to call a shutit module method, notifying the ShutIt object. """ ret = func ( self , shutit ) return ret return wrapper
Notifies the ShutIt object whenever we call a shutit module method . This allows setting values for the scope of a function .
52
26
240,313
def managing_thread_main_simple ( ) : import shutit_global last_msg = '' while True : printed_anything = False if shutit_global . shutit_global_object . log_trace_when_idle and time . time ( ) - shutit_global . shutit_global_object . last_log_time > 10 : this_msg = '' this_header = '' for thread_id , stack in sys . _current_frames ( ) . items ( ) : # ignore own thread: if thread_id == threading . current_thread ( ) . ident : continue printed_thread_started = False for filename , lineno , name , line in traceback . extract_stack ( stack ) : if not printed_anything : printed_anything = True this_header += '\n=' * 80 + '\n' this_header += 'STACK TRACES PRINTED ON IDLE: THREAD_ID: ' + str ( thread_id ) + ' at ' + time . strftime ( '%c' ) + '\n' this_header += '=' * 80 + '\n' if not printed_thread_started : printed_thread_started = True this_msg += '%s:%d:%s' % ( filename , lineno , name ) + '\n' if line : this_msg += ' %s' % ( line , ) + '\n' if printed_anything : this_msg += '=' * 80 + '\n' this_msg += 'STACK TRACES DONE\n' this_msg += '=' * 80 + '\n' if this_msg != last_msg : print ( this_header + this_msg ) last_msg = this_msg time . sleep ( 5 )
Simpler thread to track whether main thread has been quiet for long enough that a thread dump should be printed .
387
22
240,314
def map_package ( shutit_pexpect_session , package , install_type ) : if package in PACKAGE_MAP . keys ( ) : for itype in PACKAGE_MAP [ package ] . keys ( ) : if itype == install_type : ret = PACKAGE_MAP [ package ] [ install_type ] if isinstance ( ret , str ) : return ret if callable ( ret ) : ret ( shutit_pexpect_session ) return '' # Otherwise, simply return package return package
If package mapping exists then return it else return package .
108
11
240,315
def is_file_secure ( file_name ) : if not os . path . isfile ( file_name ) : return True file_mode = os . stat ( file_name ) . st_mode if file_mode & ( stat . S_IRGRP | stat . S_IWGRP | stat . S_IXGRP | stat . S_IROTH | stat . S_IWOTH | stat . S_IXOTH ) : return False return True
Returns false if file is considered insecure true if secure . If file doesn t exist it s considered secure!
102
21
240,316
def random_id ( size = 8 , chars = string . ascii_letters + string . digits ) : return '' . join ( random . choice ( chars ) for _ in range ( size ) )
Generates a random string of given size from the given chars .
43
13
240,317
def random_word ( size = 6 ) : words = shutit_assets . get_words ( ) . splitlines ( ) word = '' while len ( word ) != size or "'" in word : word = words [ int ( random . random ( ) * ( len ( words ) - 1 ) ) ] return word . lower ( )
Returns a random word in lower case .
71
8
240,318
def ctrl_c_signal_handler ( _ , frame ) : global ctrl_c_calls ctrl_c_calls += 1 if ctrl_c_calls > 10 : shutit_global . shutit_global_object . handle_exit ( exit_code = 1 ) shutit_frame = get_shutit_frame ( frame ) if in_ctrlc : msg = 'CTRL-C hit twice, quitting' if shutit_frame : shutit_global . shutit_global_object . shutit_print ( '\n' ) shutit = shutit_frame . f_locals [ 'shutit' ] shutit . log ( msg , level = logging . CRITICAL ) else : shutit_global . shutit_global_object . shutit_print ( msg ) shutit_global . shutit_global_object . handle_exit ( exit_code = 1 ) if shutit_frame : shutit = shutit_frame . f_locals [ 'shutit' ] if shutit . build [ 'ctrlc_passthrough' ] : shutit . self . get_current_shutit_pexpect_session ( ) . pexpect_child . sendline ( r'' ) return shutit_global . shutit_global_object . shutit_print ( colorise ( 31 , "\r" + r"You may need to wait for a command to complete before a pause point is available. Alternatively, CTRL-\ to quit." ) ) shutit . build [ 'ctrlc_stop' ] = True t = threading . Thread ( target = ctrlc_background ) t . daemon = True t . start ( ) # Reset the ctrl-c calls ctrl_c_calls = 0 return shutit_global . shutit_global_object . shutit_print ( colorise ( 31 , '\n' + '*' * 80 ) ) shutit_global . shutit_global_object . shutit_print ( colorise ( 31 , "CTRL-c caught, CTRL-c twice to quit." ) ) shutit_global . shutit_global_object . shutit_print ( colorise ( 31 , '*' * 80 ) ) t = threading . Thread ( target = ctrlc_background ) t . daemon = True t . start ( ) # Reset the ctrl-c calls ctrl_c_calls = 0
CTRL - c signal handler - enters a pause point if it can .
531
15
240,319
def get_input ( msg , default = '' , valid = None , boolean = False , ispass = False , color = None ) : # switch off log tracing when in get_input log_trace_when_idle_original_value = shutit_global . shutit_global_object . log_trace_when_idle shutit_global . shutit_global_object . log_trace_when_idle = False if boolean and valid is None : valid = ( 'yes' , 'y' , 'Y' , '1' , 'true' , 'no' , 'n' , 'N' , '0' , 'false' ) if color : answer = util_raw_input ( prompt = colorise ( color , msg ) , ispass = ispass ) else : answer = util_raw_input ( msg , ispass = ispass ) if boolean and answer in ( '' , None ) and default != '' : # Revert log trace value to original shutit_global . shutit_global_object . log_trace_when_idle = log_trace_when_idle_original_value return default if valid is not None : while answer not in valid : shutit_global . shutit_global_object . shutit_print ( 'Answer must be one of: ' + str ( valid ) , transient = True ) if color : answer = util_raw_input ( prompt = colorise ( color , msg ) , ispass = ispass ) else : answer = util_raw_input ( msg , ispass = ispass ) if boolean : if answer . lower ( ) in ( 'yes' , 'y' , '1' , 'true' , 't' ) : # Revert log trace value to original shutit_global . shutit_global_object . log_trace_when_idle = log_trace_when_idle_original_value return True elif answer . lower ( ) in ( 'no' , 'n' , '0' , 'false' , 'f' ) : # Revert log trace value to original shutit_global . shutit_global_object . log_trace_when_idle = log_trace_when_idle_original_value return False # Revert log trace value to original shutit_global . shutit_global_object . log_trace_when_idle = log_trace_when_idle_original_value return answer or default
Gets input from the user and returns the answer .
532
11
240,320
def build ( self , shutit ) : target_child = self . start_container ( shutit , 'target_child' ) self . setup_host_child ( shutit ) # TODO: on the host child, check that the image running has bash as its cmd/entrypoint. self . setup_target_child ( shutit , target_child ) shutit . send ( 'chmod -R 777 ' + shutit_global . shutit_global_object . shutit_state_dir + ' && mkdir -p ' + shutit_global . shutit_global_object . shutit_state_dir_build_db_dir + '/' + shutit_global . shutit_global_object . build_id , shutit_pexpect_child = target_child , echo = False ) return True
Sets up the target ready for building .
179
9
240,321
def build ( self , shutit ) : shutit_pexpect_session = ShutItPexpectSession ( shutit , 'target_child' , '/bin/bash' ) target_child = shutit_pexpect_session . pexpect_child shutit_pexpect_session . expect ( shutit_global . shutit_global_object . base_prompt . strip ( ) , timeout = 10 ) self . setup_host_child ( shutit ) self . setup_target_child ( shutit , target_child ) return True
Sets up the machine ready for building .
119
9
240,322
def build ( self , shutit ) : if shutit . build [ 'delivery' ] in ( 'docker' , 'dockerfile' ) : if shutit . get_current_shutit_pexpect_session_environment ( ) . install_type == 'apt' : shutit . add_to_bashrc ( 'export DEBIAN_FRONTEND=noninteractive' ) if not shutit . command_available ( 'lsb_release' ) : shutit . install ( 'lsb-release' ) shutit . lsb_release ( ) elif shutit . get_current_shutit_pexpect_session_environment ( ) . install_type == 'yum' : # yum updates are so often "bad" that we let exit codes of 1 through. # TODO: make this more sophisticated shutit . send ( 'yum update -y' , timeout = 9999 , exit_values = [ '0' , '1' ] ) shutit . pause_point ( 'Anything you want to do to the target host ' + 'before the build starts?' , level = 2 ) return True
Initializes target ready for build and updating package management if in container .
244
14
240,323
def __set_config ( self , cfg_section ) : defaults = [ 'shutit.core.alerting.emailer.mailto' , None , 'shutit.core.alerting.emailer.mailfrom' , 'angry@shutit.tk' , 'shutit.core.alerting.emailer.smtp_server' , 'localhost' , 'shutit.core.alerting.emailer.smtp_port' , 25 , 'shutit.core.alerting.emailer.use_tls' , True , 'shutit.core.alerting.emailer.send_mail' , True , 'shutit.core.alerting.emailer.subject' , 'Shutit Report' , 'shutit.core.alerting.emailer.signature' , '--Angry Shutit' , 'shutit.core.alerting.emailer.compress' , True , 'shutit.core.alerting.emailer.username' , '' , 'shutit.core.alerting.emailer.password' , '' , 'shutit.core.alerting.emailer.safe_mode' , True , 'shutit.core.alerting.emailer.maintainer' , '' , 'shutit.core.alerting.emailer.mailto_maintainer' , True ] for cfg_name , cfg_default in zip ( defaults [ 0 : : 2 ] , defaults [ 1 : : 2 ] ) : try : self . config [ cfg_name ] = self . shutit . cfg [ cfg_section ] [ cfg_name ] except KeyError : if cfg_default is None : raise Exception ( cfg_section + ' ' + cfg_name + ' must be set' ) else : self . config [ cfg_name ] = cfg_default # only send a mail to the module's maintainer if configured correctly if self . config [ 'shutit.core.alerting.emailer.mailto_maintainer' ] and ( self . config [ 'shutit.core.alerting.emailer.maintainer' ] == "" or self . config [ 'shutit.core.alerting.emailer.maintainer' ] == self . config [ 'shutit.core.alerting.emailer.mailto' ] ) : self . config [ 'shutit.core.alerting.emailer.mailto_maintainer' ] = False self . config [ 'shutit.core.alerting.emailer.maintainer' ] = ""
Set a local config array up according to defaults and main shutit configuration
568
14
240,324
def __get_smtp ( self ) : use_tls = self . config [ 'shutit.core.alerting.emailer.use_tls' ] if use_tls : smtp = SMTP ( self . config [ 'shutit.core.alerting.emailer.smtp_server' ] , self . config [ 'shutit.core.alerting.emailer.smtp_port' ] ) smtp . starttls ( ) else : smtp = SMTP_SSL ( self . config [ 'shutit.core.alerting.emailer.smtp_server' ] , self . config [ 'shutit.core.alerting.emailer.smtp_port' ] ) return smtp
Return the appropraite smtplib depending on wherther we re using TLS
162
19
240,325
def __compose ( self ) : msg = MIMEMultipart ( ) msg [ 'Subject' ] = self . config [ 'shutit.core.alerting.emailer.subject' ] msg [ 'To' ] = self . config [ 'shutit.core.alerting.emailer.mailto' ] msg [ 'From' ] = self . config [ 'shutit.core.alerting.emailer.mailfrom' ] # add the module's maintainer as a CC if configured if self . config [ 'shutit.core.alerting.emailer.mailto_maintainer' ] : msg [ 'Cc' ] = self . config [ 'shutit.core.alerting.emailer.maintainer' ] if self . config [ 'shutit.core.alerting.emailer.signature' ] != '' : signature = '\n\n' + self . config [ 'shutit.core.alerting.emailer.signature' ] else : signature = self . config [ 'shutit.core.alerting.emailer.signature' ] body = MIMEText ( '\n' . join ( self . lines ) + signature ) msg . attach ( body ) for attach in self . attaches : msg . attach ( attach ) return msg
Compose the message pulling together body attachments etc
284
9
240,326
def send ( self , attachment_failure = False ) : if not self . config [ 'shutit.core.alerting.emailer.send_mail' ] : self . shutit . log ( 'emailer.send: Not configured to send mail!' , level = logging . INFO ) return True msg = self . __compose ( ) mailto = [ self . config [ 'shutit.core.alerting.emailer.mailto' ] ] smtp = self . __get_smtp ( ) if self . config [ 'shutit.core.alerting.emailer.username' ] != '' : smtp . login ( self . config [ 'shutit.core.alerting.emailer.username' ] , self . config [ 'shutit.core.alerting.emailer.password' ] ) if self . config [ 'shutit.core.alerting.emailer.mailto_maintainer' ] : mailto . append ( self . config [ 'shutit.core.alerting.emailer.maintainer' ] ) try : self . shutit . log ( 'Attempting to send email' , level = logging . INFO ) smtp . sendmail ( self . config [ 'shutit.core.alerting.emailer.mailfrom' ] , mailto , msg . as_string ( ) ) except SMTPSenderRefused as refused : code = refused . args [ 0 ] if code == 552 and not attachment_failure : self . shutit . log ( "Mailserver rejected message due to " + "oversize attachments, attempting to resend without" , level = logging . INFO ) self . attaches = [ ] self . lines . append ( "Oversized attachments not sent" ) self . send ( attachment_failure = True ) else : self . shutit . log ( "Unhandled SMTP error:" + str ( refused ) , level = logging . INFO ) if not self . config [ 'shutit.core.alerting.emailer.safe_mode' ] : raise refused except Exception as error : self . shutit . log ( 'Unhandled exception: ' + str ( error ) , level = logging . INFO ) if not self . config [ 'shutit.core.alerting.emailer.safe_mode' ] : raise error finally : smtp . quit ( )
Send the email according to the configured setup
507
8
240,327
def setup_signals ( ) : signal . signal ( signal . SIGINT , shutit_util . ctrl_c_signal_handler ) signal . signal ( signal . SIGQUIT , shutit_util . ctrl_quit_signal_handler )
Set up the signal handlers .
57
6
240,328
def get_shutit_pexpect_sessions ( ) : sessions = [ ] for shutit_object in shutit_global_object . shutit_objects : for key in shutit_object . shutit_pexpect_sessions : sessions . append ( shutit_object . shutit_pexpect_sessions [ key ] ) return sessions
Returns all the shutit_pexpect sessions in existence .
76
12
240,329
def main ( ) : # Create base shutit object. shutit = shutit_global . shutit_global_object . shutit_objects [ 0 ] if sys . version_info [ 0 ] == 2 : if sys . version_info [ 1 ] < 7 : shutit . fail ( 'Python version must be 2.7+' ) # pragma: no cover try : shutit . setup_shutit_obj ( ) except KeyboardInterrupt : shutit_util . print_debug ( sys . exc_info ( ) ) shutit_global . shutit_global_object . shutit_print ( 'Keyboard interrupt caught, exiting with status 1' ) sys . exit ( 1 )
Main ShutIt function .
150
5
240,330
def has_blocking_background_send ( self ) : for background_object in self . background_objects : # If it's running, or not started yet, it should block other tasks. if background_object . block_other_commands and background_object . run_state in ( 'S' , 'N' ) : self . shutit_obj . log ( 'All objects are: ' + str ( self ) , level = logging . DEBUG ) self . shutit_obj . log ( 'The current blocking send object is: ' + str ( background_object ) , level = logging . DEBUG ) return True elif background_object . block_other_commands and background_object . run_state in ( 'F' , 'C' , 'T' ) : assert False , shutit_util . print_debug ( msg = 'Blocking command should have been removed, in run_state: ' + background_object . run_state ) else : assert background_object . block_other_commands is False , shutit_util . print_debug ( ) return False
Check whether any blocking background commands are waiting to run . If any are return True . If none are return False .
232
23
240,331
def check_background_commands_complete ( self ) : unstarted_command_exists = False self . shutit_obj . log ( 'In check_background_commands_complete: all background objects: ' + str ( self . background_objects ) , level = logging . DEBUG ) self . shutit_obj . log ( 'Login id: ' + str ( self . login_id ) , level = logging . DEBUG ) for background_object in self . background_objects : self . shutit_obj . log ( 'Background object send: ' + str ( background_object . sendspec . send ) , level = logging . DEBUG ) background_objects_to_remove = [ ] def remove_background_objects ( a_background_objects_to_remove ) : for background_object in a_background_objects_to_remove : self . background_objects . remove ( background_object ) for background_object in self . background_objects : self . shutit_obj . log ( 'Checking background object: ' + str ( background_object ) , level = logging . DEBUG ) state = background_object . check_background_command_state ( ) self . shutit_obj . log ( 'State is: ' + state , level = logging . DEBUG ) if state in ( 'C' , 'F' , 'T' ) : background_objects_to_remove . append ( background_object ) self . background_objects_completed . append ( background_object ) elif state == 'S' : # Running command exists self . shutit_obj . log ( 'check_background_command_state returning False (S) for ' + str ( background_object ) , level = logging . DEBUG ) remove_background_objects ( background_objects_to_remove ) return False , 'S' , background_object elif state == 'N' : self . shutit_obj . log ( 'UNSTARTED COMMAND! ' + str ( background_object . sendspec . send ) , level = logging . DEBUG ) unstarted_command_exists = True else : remove_background_objects ( background_objects_to_remove ) assert False , shutit_util . print_debug ( msg = 'Un-handled: ' + state ) if state == 'F' : self . shutit_obj . log ( 'check_background_command_state returning False (F) for ' + str ( background_object ) , level = logging . DEBUG ) remove_background_objects ( background_objects_to_remove ) return False , 'F' , background_object remove_background_objects ( background_objects_to_remove ) self . shutit_obj . log ( 'Checking background objects done.' , level = logging . DEBUG ) if unstarted_command_exists : # Start up an unstarted one (in order), and return False for background_object in self . background_objects : state = background_object . check_background_command_state ( ) if state == 'N' : background_object . run_background_command ( ) self . shutit_obj . log ( 'check_background_command_state returning False (N) for ' + str ( background_object ) , level = logging . DEBUG ) return False , 'N' , background_object # Nothing left to do - return True. self . shutit_obj . log ( 'check_background_command_state returning True (OK)' , level = logging . DEBUG ) return True , 'OK' , None
Check whether any background commands are running or to be run . If none are return True . If any are return False .
752
24
240,332
def open_remote ( cls , username , password , multifactor_password = None , client_id = None ) : blob = cls . fetch_blob ( username , password , multifactor_password , client_id ) return cls . open ( blob , username , password )
Fetches a blob from the server and creates a vault
61
12
240,333
def open ( cls , blob , username , password ) : return cls ( blob , blob . encryption_key ( username , password ) )
Creates a vault from a blob object
30
8
240,334
def fetch_blob ( cls , username , password , multifactor_password = None , client_id = None ) : session = fetcher . login ( username , password , multifactor_password , client_id ) blob = fetcher . fetch ( session ) fetcher . logout ( session ) return blob
Just fetches the blob could be used to store it locally
66
12
240,335
def extract_chunks ( blob ) : chunks = [ ] stream = BytesIO ( blob . bytes ) current_pos = stream . tell ( ) stream . seek ( 0 , 2 ) length = stream . tell ( ) stream . seek ( current_pos , 0 ) while stream . tell ( ) < length : chunks . append ( read_chunk ( stream ) ) return chunks
Splits the blob into chucks grouped by kind .
80
11
240,336
def parse_ACCT ( chunk , encryption_key ) : # TODO: Make a test case that covers secure note account io = BytesIO ( chunk . payload ) id = read_item ( io ) name = decode_aes256_plain_auto ( read_item ( io ) , encryption_key ) group = decode_aes256_plain_auto ( read_item ( io ) , encryption_key ) url = decode_hex ( read_item ( io ) ) notes = decode_aes256_plain_auto ( read_item ( io ) , encryption_key ) skip_item ( io , 2 ) username = decode_aes256_plain_auto ( read_item ( io ) , encryption_key ) password = decode_aes256_plain_auto ( read_item ( io ) , encryption_key ) skip_item ( io , 2 ) secure_note = read_item ( io ) # Parse secure note if secure_note == b'1' : parsed = parse_secure_note_server ( notes ) if parsed . get ( 'type' ) in ALLOWED_SECURE_NOTE_TYPES : url = parsed . get ( 'url' , url ) username = parsed . get ( 'username' , username ) password = parsed . get ( 'password' , password ) return Account ( id , name , username , password , url , group , notes )
Parses an account chunk decrypts and creates an Account object . May return nil when the chunk does not represent an account . All secure notes are ACCTs but not all of them strore account information .
301
42
240,337
def parse_PRIK ( chunk , encryption_key ) : decrypted = decode_aes256 ( 'cbc' , encryption_key [ : 16 ] , decode_hex ( chunk . payload ) , encryption_key ) hex_key = re . match ( br'^LastPassPrivateKey<(?P<hex_key>.*)>LastPassPrivateKey$' , decrypted ) . group ( 'hex_key' ) rsa_key = RSA . importKey ( decode_hex ( hex_key ) ) rsa_key . dmp1 = rsa_key . d % ( rsa_key . p - 1 ) rsa_key . dmq1 = rsa_key . d % ( rsa_key . q - 1 ) rsa_key . iqmp = number . inverse ( rsa_key . q , rsa_key . p ) return rsa_key
Parse PRIK chunk which contains private RSA key
198
10
240,338
def decode_aes256_cbc_base64 ( data , encryption_key ) : if not data : return b'' else : # LastPass AES-256/CBC/base64 encryted string starts with an "!". # Next 24 bytes are the base64 encoded IV for the cipher. # Then comes the "|". # And the rest is the base64 encoded encrypted payload. return decode_aes256 ( 'cbc' , decode_base64 ( data [ 1 : 25 ] ) , decode_base64 ( data [ 26 : ] ) , encryption_key )
Decrypts base64 encoded AES - 256 CBC bytes .
123
12
240,339
def api_delete ( service , file_id , owner_token ) : service += 'api/delete/%s' % file_id r = requests . post ( service , json = { 'owner_token' : owner_token , 'delete_token' : owner_token } ) r . raise_for_status ( ) if r . text == 'OK' : return True return False
Delete a file already uploaded to Send
84
7
240,340
def api_params ( service , file_id , owner_token , download_limit ) : service += 'api/params/%s' % file_id r = requests . post ( service , json = { 'owner_token' : owner_token , 'dlimit' : download_limit } ) r . raise_for_status ( ) if r . text == 'OK' : return True return False
Change the download limit for a file hosted on a Send Server
87
12
240,341
def api_download ( service , fileId , authorisation ) : data = tempfile . SpooledTemporaryFile ( max_size = SPOOL_SIZE , mode = 'w+b' ) headers = { 'Authorization' : 'send-v1 ' + unpadded_urlsafe_b64encode ( authorisation ) } url = service + 'api/download/' + fileId r = requests . get ( url , headers = headers , stream = True ) r . raise_for_status ( ) content_length = int ( r . headers [ 'Content-length' ] ) pbar = progbar ( content_length ) for chunk in r . iter_content ( chunk_size = CHUNK_SIZE ) : data . write ( chunk ) pbar . update ( len ( chunk ) ) pbar . close ( ) data . seek ( 0 ) return data
Given a Send url download and return the encrypted data and metadata
188
12
240,342
def decrypt_filedata ( data , keys ) : # The last 16 bytes / 128 bits of data is the GCM tag # https://www.w3.org/TR/WebCryptoAPI/#aes-gcm-operations :- # 7. Let ciphertext be equal to C | T, where '|' denotes concatenation. data . seek ( - 16 , 2 ) tag = data . read ( ) # now truncate the file to only contain encrypted data data . seek ( - 16 , 2 ) data . truncate ( ) data . seek ( 0 ) plain = tempfile . NamedTemporaryFile ( mode = 'w+b' , delete = False ) pbar = progbar ( fileSize ( data ) ) obj = Cryptodome . Cipher . AES . new ( keys . encryptKey , Cryptodome . Cipher . AES . MODE_GCM , keys . encryptIV ) prev_chunk = b'' for chunk in iter ( lambda : data . read ( CHUNK_SIZE ) , b'' ) : plain . write ( obj . decrypt ( prev_chunk ) ) pbar . update ( len ( chunk ) ) prev_chunk = chunk plain . write ( obj . decrypt_and_verify ( prev_chunk , tag ) ) data . close ( ) pbar . close ( ) plain . seek ( 0 ) return plain
Decrypts a file from Send
293
7
240,343
def sign_nonce ( key , nonce ) : # HMAC.new(key, msg='', digestmod=None) return Cryptodome . Hash . HMAC . new ( key , nonce , digestmod = Cryptodome . Hash . SHA256 ) . digest ( )
sign the server nonce from the WWW - Authenticate header with an authKey
61
17
240,344
def api_password ( service , fileId , ownerToken , newAuthKey ) : service += 'api/password/%s' % fileId auth = sendclient . common . unpadded_urlsafe_b64encode ( newAuthKey ) r = requests . post ( service , json = { 'owner_token' : ownerToken , 'auth' : auth } ) r . raise_for_status ( ) if r . text == 'OK' : return True return False
changes the authKey required to download a file hosted on a send server
102
14
240,345
def set_password ( url , ownerToken , password ) : service , fileId , key = sendclient . common . splitkeyurl ( url ) rawKey = sendclient . common . unpadded_urlsafe_b64decode ( key ) keys = sendclient . common . secretKeys ( rawKey , password , url ) return api_password ( service , fileId , ownerToken , keys . newAuthKey )
set or change the password required to download a file hosted on a send server .
88
16
240,346
def splitkeyurl ( url ) : key = url [ - 22 : ] urlid = url [ - 34 : - 24 ] service = url [ : - 43 ] return service , urlid , key
Splits a Send url into key urlid and prefix for the Send server Should handle any hostname but will brake on key & id length changes
42
29
240,347
def api_upload ( service , encData , encMeta , keys ) : service += 'api/upload' files = requests_toolbelt . MultipartEncoder ( fields = { 'file' : ( 'blob' , encData , 'application/octet-stream' ) } ) pbar = progbar ( files . len ) monitor = requests_toolbelt . MultipartEncoderMonitor ( files , lambda files : pbar . update ( monitor . bytes_read - pbar . n ) ) headers = { 'X-File-Metadata' : unpadded_urlsafe_b64encode ( encMeta ) , 'Authorization' : 'send-v1 ' + unpadded_urlsafe_b64encode ( keys . authKey ) , 'Content-type' : monitor . content_type } r = requests . post ( service , data = monitor , headers = headers , stream = True ) r . raise_for_status ( ) pbar . close ( ) body_json = r . json ( ) secretUrl = body_json [ 'url' ] + '#' + unpadded_urlsafe_b64encode ( keys . secretKey ) fileId = body_json [ 'id' ] fileNonce = unpadded_urlsafe_b64decode ( r . headers [ 'WWW-Authenticate' ] . replace ( 'send-v1 ' , '' ) ) try : owner_token = body_json [ 'owner' ] except : owner_token = body_json [ 'delete' ] return secretUrl , fileId , fileNonce , owner_token
Uploads data to Send . Caution! Data is uploaded as given this function will not encrypt it for you
346
22
240,348
def send_file ( service , file , fileName = None , password = None , ignoreVersion = False ) : if checkServerVersion ( service , ignoreVersion = ignoreVersion ) == False : print ( '\033[1;41m!!! Potentially incompatible server version !!!\033[0m' ) fileName = fileName if fileName != None else os . path . basename ( file . name ) print ( 'Encrypting data from "' + fileName + '"' ) keys = secretKeys ( ) encData = encrypt_file ( file , keys ) encMeta = encrypt_metadata ( keys , fileName ) print ( 'Uploading "' + fileName + '"' ) secretUrl , fileId , fileNonce , owner_token = api_upload ( service , encData , encMeta , keys ) if password != None : print ( 'Setting password' ) sendclient . password . set_password ( secretUrl , owner_token , password ) return secretUrl , fileId , owner_token
Encrypt & Upload a file to send and return the download URL
213
13
240,349
def fetch ( self , endpoint , data = None ) : payload = { "lastServerChangeId" : "-1" , "csrf" : self . __csrf , "apiClient" : "WEB" } if data is not None : payload . update ( data ) return self . post ( endpoint , payload )
for getting data after logged in
68
6
240,350
def __identify_user ( self , username , csrf ) : data = { "username" : username , "csrf" : csrf , "apiClient" : "WEB" , "bindDevice" : "false" , "skipLinkAccount" : "false" , "redirectTo" : "" , "skipFirstUse" : "" , "referrerId" : "" , } r = self . post ( "/login/identifyUser" , data ) if r . status_code == requests . codes . ok : result = r . json ( ) new_csrf = getSpHeaderValue ( result , CSRF_KEY ) auth_level = getSpHeaderValue ( result , AUTH_LEVEL_KEY ) return ( new_csrf , auth_level ) return ( None , None )
Returns reusable CSRF code and the auth level as a 2 - tuple
177
14
240,351
def get_args ( cls , dist , header = None ) : if header is None : header = cls . get_header ( ) spec = str ( dist . as_requirement ( ) ) for type_ in 'console' , 'gui' : group = type_ + '_scripts' for name , ep in dist . get_entry_map ( group ) . items ( ) : # ensure_safe_name if re . search ( r'[\\/]' , name ) : raise ValueError ( "Path separators not allowed in script names" ) script_text = TEMPLATE . format ( ep . module_name , ep . attrs [ 0 ] , '.' . join ( ep . attrs ) , spec , group , name , ) args = cls . _get_script_args ( type_ , name , header , script_text ) for res in args : yield res
Overrides easy_install . ScriptWriter . get_args
193
13
240,352
def clean_pip_env ( ) -> Generator [ None , None , None ] : require_venv = os . environ . pop ( PIP_REQUIRE_VIRTUALENV , None ) try : yield finally : if require_venv is not None : os . environ [ PIP_REQUIRE_VIRTUALENV ] = require_venv
A context manager for temporarily removing PIP_REQUIRE_VIRTUALENV from the environment .
81
21
240,353
def install ( args : List [ str ] ) -> None : with clean_pip_env ( ) : # if being invoked as a pyz, we must ensure we have access to our own # site-packages when subprocessing since there is no guarantee that pip # will be available subprocess_env = os . environ . copy ( ) sitedir_index = _first_sitedir_index ( ) _extend_python_path ( subprocess_env , sys . path [ sitedir_index : ] ) process = subprocess . Popen ( [ sys . executable , "-m" , "pip" , "--disable-pip-version-check" , "install" ] + args , stdout = subprocess . PIPE , stderr = subprocess . STDOUT , env = subprocess_env , ) for output in process . stdout : if output : click . echo ( output . decode ( ) . rstrip ( ) ) if process . wait ( ) > 0 : sys . exit ( PIP_INSTALL_ERROR )
pip install as a function .
229
7
240,354
def current_zipfile ( ) : if zipfile . is_zipfile ( sys . argv [ 0 ] ) : fd = open ( sys . argv [ 0 ] , "rb" ) return zipfile . ZipFile ( fd )
A function to vend the current zipfile if any
53
10
240,355
def extract_site_packages ( archive , target_path , compile_pyc , compile_workers = 0 , force = False ) : parent = target_path . parent target_path_tmp = Path ( parent , target_path . stem + ".tmp" ) lock = Path ( parent , target_path . stem + ".lock" ) # If this is the first time that a pyz is being extracted, we'll need to create the ~/.shiv dir if not parent . exists ( ) : parent . mkdir ( parents = True , exist_ok = True ) with FileLock ( lock ) : # we acquired a lock, it's possible that prior invocation was holding the lock and has # completed bootstrapping, so let's check (again) if we need to do any work if not target_path . exists ( ) or force : # extract our site-packages for filename in archive . namelist ( ) : if filename . startswith ( "site-packages" ) : archive . extract ( filename , target_path_tmp ) if compile_pyc : compileall . compile_dir ( target_path_tmp , quiet = 2 , workers = compile_workers ) # if using `force` we will need to delete our target path if target_path . exists ( ) : shutil . rmtree ( str ( target_path ) ) # atomic move shutil . move ( str ( target_path_tmp ) , str ( target_path ) )
Extract everything in site - packages to a specified path .
310
12
240,356
def bootstrap ( ) : # pragma: no cover # get a handle of the currently executing zip file archive = current_zipfile ( ) # create an environment object (a combination of env vars and json metadata) env = Environment . from_json ( archive . read ( "environment.json" ) . decode ( ) ) # get a site-packages directory (from env var or via build id) site_packages = cache_path ( archive , env . root , env . build_id ) / "site-packages" # determine if first run or forcing extract if not site_packages . exists ( ) or env . force_extract : extract_site_packages ( archive , site_packages . parent , env . compile_pyc , env . compile_workers , env . force_extract ) # get sys.path's length length = len ( sys . path ) # Find the first instance of an existing site-packages on sys.path index = _first_sitedir_index ( ) or length # append site-packages using the stdlib blessed way of extending path # so as to handle .pth files correctly site . addsitedir ( site_packages ) # add our site-packages to the environment, if requested if env . extend_pythonpath : _extend_python_path ( os . environ , sys . path [ index : ] ) # reorder to place our site-packages before any others found sys . path = sys . path [ : index ] + sys . path [ length : ] + sys . path [ index : length ] # first check if we should drop into interactive mode if not env . interpreter : # do entry point import and call if env . entry_point is not None : mod = import_string ( env . entry_point ) try : sys . exit ( mod ( ) ) except TypeError : # catch "<module> is not callable", which is thrown when the entry point's # callable shares a name with it's parent module # e.g. "from foo.bar import bar; bar()" sys . exit ( getattr ( mod , env . entry_point . replace ( ":" , "." ) . split ( "." ) [ 1 ] ) ( ) ) elif env . script is not None : sys . exit ( runpy . run_path ( site_packages / "bin" / env . script , run_name = "__main__" ) ) # all other options exhausted, drop into interactive mode execute_interpreter ( )
Actually bootstrap our shiv environment .
530
8
240,357
def write_file_prefix ( f : IO [ Any ] , interpreter : str ) -> None : # if the provided path is too long for a shebang we should error out if len ( interpreter ) > BINPRM_BUF_SIZE : sys . exit ( BINPRM_ERROR ) f . write ( b"#!" + interpreter . encode ( sys . getfilesystemencoding ( ) ) + b"\n" )
Write a shebang line .
94
6
240,358
def create_archive ( source : Path , target : Path , interpreter : str , main : str , compressed : bool = True ) -> None : # Check that main has the right format. mod , sep , fn = main . partition ( ":" ) mod_ok = all ( part . isidentifier ( ) for part in mod . split ( "." ) ) fn_ok = all ( part . isidentifier ( ) for part in fn . split ( "." ) ) if not ( sep == ":" and mod_ok and fn_ok ) : raise zipapp . ZipAppError ( "Invalid entry point: " + main ) main_py = MAIN_TEMPLATE . format ( module = mod , fn = fn ) with maybe_open ( target , "wb" ) as fd : # write shebang write_file_prefix ( fd , interpreter ) # determine compression compression = zipfile . ZIP_DEFLATED if compressed else zipfile . ZIP_STORED # create zipapp with zipfile . ZipFile ( fd , "w" , compression = compression ) as z : for child in source . rglob ( "*" ) : # skip compiled files if child . suffix == '.pyc' : continue arcname = child . relative_to ( source ) z . write ( str ( child ) , str ( arcname ) ) # write main z . writestr ( "__main__.py" , main_py . encode ( "utf-8" ) ) # make executable # NOTE on windows this is no-op target . chmod ( target . stat ( ) . st_mode | stat . S_IXUSR | stat . S_IXGRP | stat . S_IXOTH )
Create an application archive from SOURCE .
369
8
240,359
def acquire_win ( lock_file ) : # pragma: no cover try : fd = os . open ( lock_file , OPEN_MODE ) except OSError : pass else : try : msvcrt . locking ( fd , msvcrt . LK_NBLCK , 1 ) except ( IOError , OSError ) : os . close ( fd ) else : return fd
Acquire a lock file on windows .
91
8
240,360
def acquire_nix ( lock_file ) : # pragma: no cover fd = os . open ( lock_file , OPEN_MODE ) try : fcntl . flock ( fd , fcntl . LOCK_EX | fcntl . LOCK_NB ) except ( IOError , OSError ) : os . close ( fd ) else : return fd
Acquire a lock file on linux or osx .
87
11
240,361
def find_entry_point ( site_packages : Path , console_script : str ) -> str : config_parser = ConfigParser ( ) config_parser . read ( site_packages . rglob ( "entry_points.txt" ) ) return config_parser [ "console_scripts" ] [ console_script ]
Find a console_script in a site - packages directory .
69
12
240,362
def copy_bootstrap ( bootstrap_target : Path ) -> None : for bootstrap_file in importlib_resources . contents ( bootstrap ) : if importlib_resources . is_resource ( bootstrap , bootstrap_file ) : with importlib_resources . path ( bootstrap , bootstrap_file ) as f : shutil . copyfile ( f . absolute ( ) , bootstrap_target / f . name )
Copy bootstrap code from shiv into the pyz .
92
12
240,363
def _interpreter_path ( append_version : bool = False ) -> str : base_dir = Path ( getattr ( sys , "real_prefix" , sys . base_prefix ) ) . resolve ( ) sys_exec = Path ( sys . executable ) name = sys_exec . stem suffix = sys_exec . suffix if append_version : name += str ( sys . version_info . major ) name += suffix try : return str ( next ( iter ( base_dir . rglob ( name ) ) ) ) except StopIteration : if not append_version : # If we couldn't find an interpreter, it's likely that we looked for # "python" when we should've been looking for "python3" # so we try again with append_version=True return _interpreter_path ( append_version = True ) # If we were still unable to find a real interpreter for some reason # we fallback to the current runtime's interpreter return sys . executable
A function to return the path to the current Python interpreter .
208
12
240,364
def main ( output_file : str , entry_point : Optional [ str ] , console_script : Optional [ str ] , python : Optional [ str ] , site_packages : Optional [ str ] , compressed : bool , compile_pyc : bool , extend_pythonpath : bool , pip_args : List [ str ] , ) -> None : if not pip_args and not site_packages : sys . exit ( NO_PIP_ARGS_OR_SITE_PACKAGES ) if output_file is None : sys . exit ( NO_OUTFILE ) # check for disallowed pip arguments for disallowed in DISALLOWED_ARGS : for supplied_arg in pip_args : if supplied_arg in disallowed : sys . exit ( DISALLOWED_PIP_ARGS . format ( arg = supplied_arg , reason = DISALLOWED_ARGS [ disallowed ] ) ) with TemporaryDirectory ( ) as working_path : tmp_site_packages = Path ( working_path , "site-packages" ) if site_packages : shutil . copytree ( site_packages , tmp_site_packages ) if pip_args : # install deps into staged site-packages pip . install ( [ "--target" , str ( tmp_site_packages ) ] + list ( pip_args ) ) # if entry_point is a console script, get the callable if entry_point is None and console_script is not None : try : entry_point = find_entry_point ( tmp_site_packages , console_script ) except KeyError : if not Path ( tmp_site_packages , "bin" , console_script ) . exists ( ) : sys . exit ( NO_ENTRY_POINT . format ( entry_point = console_script ) ) # create runtime environment metadata env = Environment ( build_id = str ( uuid . uuid4 ( ) ) , entry_point = entry_point , script = console_script , compile_pyc = compile_pyc , extend_pythonpath = extend_pythonpath , ) Path ( working_path , "environment.json" ) . write_text ( env . to_json ( ) ) # create bootstrapping directory in working path bootstrap_target = Path ( working_path , "_bootstrap" ) bootstrap_target . mkdir ( parents = True , exist_ok = True ) # copy bootstrap code copy_bootstrap ( bootstrap_target ) # create the zip builder . create_archive ( Path ( working_path ) , target = Path ( output_file ) . expanduser ( ) , interpreter = python or _interpreter_path ( ) , main = "_bootstrap:bootstrap" , compressed = compressed , )
Shiv is a command line utility for building fully self - contained Python zipapps as outlined in PEP 441 but with all their dependencies included!
589
30
240,365
def get_session ( * , env_vars = None , loop = None ) : loop = loop or asyncio . get_event_loop ( ) return AioSession ( session_vars = env_vars , loop = loop )
Return a new session object .
52
6
240,366
async def read ( self , amt = None ) : # botocore to aiohttp mapping chunk = await self . __wrapped__ . read ( amt if amt is not None else - 1 ) self . _self_amount_read += len ( chunk ) if amt is None or ( not chunk and amt > 0 ) : # If the server sends empty contents or # we ask to read all of the contents, then we know # we need to verify the content length. self . _verify_content_length ( ) return chunk
Read at most amt bytes from the stream .
118
10
240,367
async def iter_lines ( self , chunk_size = 1024 ) : pending = b'' async for chunk in self . iter_chunks ( chunk_size ) : lines = ( pending + chunk ) . splitlines ( True ) for line in lines [ : - 1 ] : await yield_ ( line . splitlines ( ) [ 0 ] ) pending = lines [ - 1 ] if pending : await yield_ ( pending . splitlines ( ) [ 0 ] )
Return an iterator to yield lines from the raw stream .
97
11
240,368
async def iter_chunks ( self , chunk_size = _DEFAULT_CHUNK_SIZE ) : while True : current_chunk = await self . read ( chunk_size ) if current_chunk == b"" : break await yield_ ( current_chunk )
Return an iterator to yield chunks of chunk_size bytes from the raw stream .
61
16
240,369
def get_items ( start_num , num_items ) : result = [ ] for i in range ( start_num , start_num + num_items ) : result . append ( { 'pk' : { 'S' : 'item{0}' . format ( i ) } } ) return result
Generate a sequence of dynamo items
67
8
240,370
def create_batch_write_structure ( table_name , start_num , num_items ) : return { table_name : [ { 'PutRequest' : { 'Item' : item } } for item in get_items ( start_num , num_items ) ] }
Create item structure for passing to batch_write_item
61
11
240,371
def get_paginator ( self , operation_name ) : if not self . can_paginate ( operation_name ) : raise OperationNotPageableError ( operation_name = operation_name ) else : # substitute iterator with async one Paginator . PAGE_ITERATOR_CLS = AioPageIterator actual_operation_name = self . _PY_TO_OP_NAME [ operation_name ] # Create a new paginate method that will serve as a proxy to # the underlying Paginator.paginate method. This is needed to # attach a docstring to the method. def paginate ( self , * * kwargs ) : return Paginator . paginate ( self , * * kwargs ) paginator_config = self . _cache [ 'page_config' ] [ actual_operation_name ] # Rename the paginator class based on the type of paginator. paginator_class_name = str ( '%s.Paginator.%s' % ( get_service_module_name ( self . meta . service_model ) , actual_operation_name ) ) # Create the new paginator class documented_paginator_cls = type ( paginator_class_name , ( Paginator , ) , { 'paginate' : paginate } ) operation_model = self . _service_model . operation_model ( actual_operation_name ) paginator = documented_paginator_cls ( getattr ( self , operation_name ) , paginator_config , operation_model ) return paginator
Create a paginator for an operation .
337
8
240,372
def get_waiter ( self , waiter_name ) : config = self . _get_waiter_config ( ) if not config : raise ValueError ( "Waiter does not exist: %s" % waiter_name ) model = waiter . WaiterModel ( config ) mapping = { } for name in model . waiter_names : mapping [ xform_name ( name ) ] = name if waiter_name not in mapping : raise ValueError ( "Waiter does not exist: %s" % waiter_name ) return waiter . create_waiter_with_client ( mapping [ waiter_name ] , model , self , loop = self . _loop )
Returns an object that can wait for some condition .
141
10
240,373
def url_fix ( s , charset = None ) : if charset : warnings . warn ( "{}.url_fix() charset argument is deprecated" . format ( __name__ ) , DeprecationWarning ) scheme , netloc , path , querystring , fragment = urlsplit ( s ) path = quote ( path , b'/%' ) querystring = quote_plus ( querystring , b':&=' ) return urlunsplit ( ( scheme , netloc , path , querystring , fragment ) )
escapes special characters
111
4
240,374
def httprettified ( test = None , allow_net_connect = True ) : def decorate_unittest_TestCase_setUp ( klass ) : # Prefer addCleanup (added in python 2.7), but fall back # to using tearDown if it isn't available use_addCleanup = hasattr ( klass , 'addCleanup' ) original_setUp = ( klass . setUp if hasattr ( klass , 'setUp' ) else None ) def new_setUp ( self ) : httpretty . reset ( ) httpretty . enable ( allow_net_connect ) if use_addCleanup : self . addCleanup ( httpretty . disable ) if original_setUp : original_setUp ( self ) klass . setUp = new_setUp if not use_addCleanup : original_tearDown = ( klass . setUp if hasattr ( klass , 'tearDown' ) else None ) def new_tearDown ( self ) : httpretty . disable ( ) httpretty . reset ( ) if original_tearDown : original_tearDown ( self ) klass . tearDown = new_tearDown return klass def decorate_test_methods ( klass ) : for attr in dir ( klass ) : if not attr . startswith ( 'test_' ) : continue attr_value = getattr ( klass , attr ) if not hasattr ( attr_value , "__call__" ) : continue setattr ( klass , attr , decorate_callable ( attr_value ) ) return klass def is_unittest_TestCase ( klass ) : try : import unittest return issubclass ( klass , unittest . TestCase ) except ImportError : return False "A decorator for tests that use HTTPretty" def decorate_class ( klass ) : if is_unittest_TestCase ( klass ) : return decorate_unittest_TestCase_setUp ( klass ) return decorate_test_methods ( klass ) def decorate_callable ( test ) : @ functools . wraps ( test ) def wrapper ( * args , * * kw ) : with httprettized ( allow_net_connect ) : return test ( * args , * * kw ) return wrapper if isinstance ( test , ClassTypes ) : return decorate_class ( test ) elif callable ( test ) : return decorate_callable ( test ) return decorate_callable
decorator for test functions
558
6
240,375
def parse_request_body ( self , body ) : PARSING_FUNCTIONS = { 'application/json' : json . loads , 'text/json' : json . loads , 'application/x-www-form-urlencoded' : self . parse_querystring , } content_type = self . headers . get ( 'content-type' , '' ) do_parse = PARSING_FUNCTIONS . get ( content_type , FALLBACK_FUNCTION ) try : body = decode_utf8 ( body ) return do_parse ( body ) except ( Exception , BaseException ) : return body
Attempt to parse the post based on the content - type passed . Return the regular body if not
135
19
240,376
def fill_filekind ( self , fk ) : now = datetime . utcnow ( ) headers = { 'status' : self . status , 'date' : now . strftime ( '%a, %d %b %Y %H:%M:%S GMT' ) , 'server' : 'Python/HTTPretty' , 'connection' : 'close' , } if self . forcing_headers : headers = self . forcing_headers if self . adding_headers : headers . update ( self . normalize_headers ( self . adding_headers ) ) headers = self . normalize_headers ( headers ) status = headers . get ( 'status' , self . status ) if self . body_is_callable : status , headers , self . body = self . callable_body ( self . request , self . info . full_url ( ) , headers ) headers = self . normalize_headers ( headers ) # TODO: document this behavior: if 'content-length' not in headers : headers . update ( { 'content-length' : len ( self . body ) } ) string_list = [ 'HTTP/1.1 %d %s' % ( status , STATUSES [ status ] ) , ] if 'date' in headers : string_list . append ( 'date: %s' % headers . pop ( 'date' ) ) if not self . forcing_headers : content_type = headers . pop ( 'content-type' , 'text/plain; charset=utf-8' ) content_length = headers . pop ( 'content-length' , self . body_length ) string_list . append ( 'content-type: %s' % content_type ) if not self . streaming : string_list . append ( 'content-length: %s' % content_length ) server = headers . pop ( 'server' , None ) if server : string_list . append ( 'server: %s' % server ) for k , v in headers . items ( ) : string_list . append ( '{}: {}' . format ( k , v ) , ) for item in string_list : fk . write ( utf8 ( item ) + b'\n' ) fk . write ( b'\r\n' ) if self . streaming : self . body , body = itertools . tee ( self . body ) for chunk in body : fk . write ( utf8 ( chunk ) ) else : fk . write ( utf8 ( self . body ) ) fk . seek ( 0 )
writes HTTP Response data to a file descriptor
554
9
240,377
def draw_text ( stdscr , text , color = 0 , fallback = None , title = None ) : if fallback is None : fallback = text y , x = stdscr . getmaxyx ( ) if title : title = pad_to_size ( title , x , 1 ) if "\n" in title . rstrip ( "\n" ) : # hack to get more spacing between title and body for figlet title += "\n" * 5 text = title + "\n" + pad_to_size ( text , x , len ( text . split ( "\n" ) ) ) lines = pad_to_size ( text , x , y ) . rstrip ( "\n" ) . split ( "\n" ) try : for i , line in enumerate ( lines ) : stdscr . insstr ( i , 0 , line , curses . color_pair ( color ) ) except : lines = pad_to_size ( fallback , x , y ) . rstrip ( "\n" ) . split ( "\n" ) try : for i , line in enumerate ( lines [ : ] ) : stdscr . insstr ( i , 0 , line , curses . color_pair ( color ) ) except : pass stdscr . refresh ( )
Draws text in the given color . Duh .
275
11
240,378
def format_seconds ( seconds , hide_seconds = False ) : if seconds <= 60 : return str ( seconds ) output = "" for period , period_seconds in ( ( 'y' , 31557600 ) , ( 'd' , 86400 ) , ( 'h' , 3600 ) , ( 'm' , 60 ) , ( 's' , 1 ) , ) : if seconds >= period_seconds and not ( hide_seconds and period == 's' ) : output += str ( int ( seconds / period_seconds ) ) output += period output += " " seconds = seconds % period_seconds return output . strip ( )
Returns a human - readable string representation of the given amount of seconds .
135
14
240,379
def graceful_ctrlc ( func ) : @ wraps ( func ) def wrapper ( * args , * * kwargs ) : try : return func ( * args , * * kwargs ) except KeyboardInterrupt : exit ( 1 ) return wrapper
Makes the decorated function exit with code 1 on CTRL + C .
52
14
240,380
def pad_to_size ( text , x , y ) : input_lines = text . rstrip ( ) . split ( "\n" ) longest_input_line = max ( map ( len , input_lines ) ) number_of_input_lines = len ( input_lines ) x = max ( x , longest_input_line ) y = max ( y , number_of_input_lines ) output = "" padding_top = int ( ( y - number_of_input_lines ) / 2 ) padding_bottom = y - number_of_input_lines - padding_top padding_left = int ( ( x - longest_input_line ) / 2 ) output += padding_top * ( " " * x + "\n" ) for line in input_lines : output += padding_left * " " + line + " " * ( x - padding_left - len ( line ) ) + "\n" output += padding_bottom * ( " " * x + "\n" ) return output
Adds whitespace to text to center it within a frame of the given dimensions .
217
16
240,381
def parse_timestr ( timestr ) : timedelta_secs = parse_timedelta ( timestr ) sync_start = datetime . now ( ) if timedelta_secs : target = datetime . now ( ) + timedelta ( seconds = timedelta_secs ) elif timestr . isdigit ( ) : target = datetime . now ( ) + timedelta ( seconds = int ( timestr ) ) else : try : target = parse ( timestr ) except : # unfortunately, dateutil doesn't raise the best exceptions raise ValueError ( "Unable to parse '{}'" . format ( timestr ) ) # When I do "termdown 10" (the two cases above), I want a # countdown for the next 10 seconds. Okay. But when I do # "termdown 23:52", I want a countdown that ends at that exact # moment -- the countdown is related to real time. Thus, I want # my frames to be drawn at full seconds, so I enforce # microsecond=0. sync_start = sync_start . replace ( microsecond = 0 ) try : # try to convert target to naive local timezone target = target . astimezone ( tz = tz . tzlocal ( ) ) . replace ( tzinfo = None ) except ValueError : # parse() already returned a naive datetime, all is well pass return ( sync_start , target )
Parse a string describing a point in time .
308
10
240,382
def parse_timedelta ( deltastr ) : matches = TIMEDELTA_REGEX . match ( deltastr ) if not matches : return None components = { } for name , value in matches . groupdict ( ) . items ( ) : if value : components [ name ] = int ( value ) for period , hours in ( ( 'days' , 24 ) , ( 'years' , 8766 ) ) : if period in components : components [ 'hours' ] = components . get ( 'hours' , 0 ) + components [ period ] * hours del components [ period ] return int ( timedelta ( * * components ) . total_seconds ( ) )
Parse a string describing a period of time .
143
10
240,383
def _verify_signature ( self , message , signature ) : if self . negotiate_flags & NegotiateFlags . NTLMSSP_NEGOTIATE_EXTENDED_SESSIONSECURITY : actual_checksum = signature [ 4 : 12 ] actual_seq_num = struct . unpack ( "<I" , signature [ 12 : 16 ] ) [ 0 ] else : actual_checksum = signature [ 8 : 12 ] actual_seq_num = struct . unpack ( "<I" , signature [ 12 : 16 ] ) [ 0 ] expected_signature = calc_signature ( message , self . negotiate_flags , self . incoming_signing_key , self . incoming_seq_num , self . incoming_handle ) expected_checksum = expected_signature . checksum expected_seq_num = struct . unpack ( "<I" , expected_signature . seq_num ) [ 0 ] if actual_checksum != expected_checksum : raise Exception ( "The signature checksum does not match, message has been altered" ) if actual_seq_num != expected_seq_num : raise Exception ( "The signature sequence number does not match up, message not received in the correct sequence" ) self . incoming_seq_num += 1
Will verify that the signature received from the server matches up with the expected signature computed locally . Will throw an exception if they do not match
275
27
240,384
def regex_opt_inner ( strings , open_paren ) : close_paren = open_paren and ')' or '' # print strings, repr(open_paren) if not strings : # print '-> nothing left' return '' first = strings [ 0 ] if len ( strings ) == 1 : # print '-> only 1 string' return open_paren + escape ( first ) + close_paren if not first : # print '-> first string empty' return open_paren + regex_opt_inner ( strings [ 1 : ] , '(?:' ) + '?' + close_paren if len ( first ) == 1 : # multiple one-char strings? make a charset oneletter = [ ] rest = [ ] for s in strings : if len ( s ) == 1 : oneletter . append ( s ) else : rest . append ( s ) if len ( oneletter ) > 1 : # do we have more than one oneletter string? if rest : # print '-> 1-character + rest' return open_paren + regex_opt_inner ( rest , '' ) + '|' + make_charset ( oneletter ) + close_paren # print '-> only 1-character' return open_paren + make_charset ( oneletter ) + close_paren prefix = commonprefix ( strings ) if prefix : plen = len ( prefix ) # we have a prefix for all strings # print '-> prefix:', prefix return open_paren + escape ( prefix ) + regex_opt_inner ( [ s [ plen : ] for s in strings ] , '(?:' ) + close_paren # is there a suffix? strings_rev = [ s [ : : - 1 ] for s in strings ] suffix = commonprefix ( strings_rev ) if suffix : slen = len ( suffix ) # print '-> suffix:', suffix[::-1] return open_paren + regex_opt_inner ( sorted ( s [ : - slen ] for s in strings ) , '(?:' ) + escape ( suffix [ : : - 1 ] ) + close_paren # recurse on common 1-string prefixes # print '-> last resort' return open_paren + '|' . join ( regex_opt_inner ( list ( group [ 1 ] ) , '' ) for group in groupby ( strings , lambda s : s [ 0 ] == first [ 0 ] ) ) + close_paren
Return a regex that matches any string in the sorted list of strings .
513
14
240,385
def regex_opt ( strings , prefix = '' , suffix = '' ) : strings = sorted ( strings ) return prefix + regex_opt_inner ( strings , '(' ) + suffix
Return a compiled regex that matches any string in the given list .
38
13
240,386
def open_resource ( name ) : name_parts = name . lstrip ( '/' ) . split ( '/' ) for part in name_parts : if part == os . path . pardir or os . path . sep in part : raise ValueError ( 'Bad path segment: %r' % part ) filename = os . path . join ( os . path . dirname ( __file__ ) , 'zoneinfo' , * name_parts ) if not os . path . exists ( filename ) : # http://bugs.launchpad.net/bugs/383171 - we avoid using this # unless absolutely necessary to help when a broken version of # pkg_resources is installed. try : from pkg_resources import resource_stream except ImportError : resource_stream = None if resource_stream is not None : return resource_stream ( __name__ , 'zoneinfo/' + name ) return open ( filename , 'rb' )
Open a resource from the zoneinfo subdir for reading .
199
12
240,387
def FixedOffset ( offset , _tzinfos = { } ) : if offset == 0 : return UTC info = _tzinfos . get ( offset ) if info is None : # We haven't seen this one before. we need to save it. # Use setdefault to avoid a race condition and make sure we have # only one info = _tzinfos . setdefault ( offset , _FixedOffset ( offset ) ) return info
return a fixed - offset timezone based off a number of minutes .
91
14
240,388
def boolean_or_list ( config_name , args , configs , alternative_names = [ ] ) : # when argument flag present, set to wildcard regex for key in alternative_names + [ config_name ] : if hasattr ( args , key ) and getattr ( args , key ) : setattr ( args , config_name , [ '.*' ] ) return setattr ( args , config_name , [ ] ) option = None alternative_names . insert ( 0 , config_name ) for key in alternative_names : if configs . has_option ( 'settings' , key ) : option = configs . get ( 'settings' , key ) break if option is not None : if option . strip ( ) . lower ( ) == 'true' : setattr ( args , config_name , [ '.*' ] ) elif option . strip ( ) . lower ( ) != 'false' : for pattern in option . split ( "\n" ) : if pattern . strip ( ) != '' : getattr ( args , config_name ) . append ( pattern )
Get a boolean or list of regexes from args and configs .
231
14
240,389
def get_style_defs ( self , arg = '' ) : cp = self . commandprefix styles = [ ] for name , definition in iteritems ( self . cmd2def ) : styles . append ( r'\expandafter\def\csname %s@tok@%s\endcsname{%s}' % ( cp , name , definition ) ) return STYLE_TEMPLATE % { 'cp' : self . commandprefix , 'styles' : '\n' . join ( styles ) }
Return the command sequences needed to define the commands used to format text in the verbatim environment . arg is ignored .
115
24
240,390
def _fn_matches ( fn , glob ) : if glob not in _pattern_cache : pattern = _pattern_cache [ glob ] = re . compile ( fnmatch . translate ( glob ) ) return pattern . match ( fn ) return _pattern_cache [ glob ] . match ( fn )
Return whether the supplied file name fn matches pattern filename .
63
11
240,391
def get_all_formatters ( ) : # NB: this returns formatter classes, not info like get_all_lexers(). for info in itervalues ( FORMATTERS ) : if info [ 1 ] not in _formatter_cache : _load_formatters ( info [ 0 ] ) yield _formatter_cache [ info [ 1 ] ] for _ , formatter in find_plugin_formatters ( ) : yield formatter
Return a generator for all formatter classes .
96
9
240,392
def find_formatter_class ( alias ) : for module_name , name , aliases , _ , _ in itervalues ( FORMATTERS ) : if alias in aliases : if name not in _formatter_cache : _load_formatters ( module_name ) return _formatter_cache [ name ] for _ , cls in find_plugin_formatters ( ) : if alias in cls . aliases : return cls
Lookup a formatter by alias .
94
8
240,393
def get_formatter_by_name ( _alias , * * options ) : cls = find_formatter_class ( _alias ) if cls is None : raise ClassNotFound ( "no formatter found for name %r" % _alias ) return cls ( * * options )
Lookup and instantiate a formatter by alias .
64
11
240,394
def load_formatter_from_file ( filename , formattername = "CustomFormatter" , * * options ) : try : # This empty dict will contain the namespace for the exec'd file custom_namespace = { } exec ( open ( filename , 'rb' ) . read ( ) , custom_namespace ) # Retrieve the class `formattername` from that namespace if formattername not in custom_namespace : raise ClassNotFound ( 'no valid %s class found in %s' % ( formattername , filename ) ) formatter_class = custom_namespace [ formattername ] # And finally instantiate it with the options return formatter_class ( * * options ) except IOError as err : raise ClassNotFound ( 'cannot read %s' % filename ) except ClassNotFound as err : raise except Exception as err : raise ClassNotFound ( 'error when loading custom formatter: %s' % err )
Load a formatter from a file .
204
8
240,395
def get_formatter_for_filename ( fn , * * options ) : fn = basename ( fn ) for modname , name , _ , filenames , _ in itervalues ( FORMATTERS ) : for filename in filenames : if _fn_matches ( fn , filename ) : if name not in _formatter_cache : _load_formatters ( modname ) return _formatter_cache [ name ] ( * * options ) for cls in find_plugin_formatters ( ) : for filename in cls . filenames : if _fn_matches ( fn , filename ) : return cls ( * * options ) raise ClassNotFound ( "no formatter found for file name %r" % fn )
Lookup and instantiate a formatter by filename pattern .
162
12
240,396
def get_tokens_unprocessed ( self , text , stack = ( 'root' , ) ) : self . content_type = None return RegexLexer . get_tokens_unprocessed ( self , text , stack )
Reset the content - type state .
54
8
240,397
def find_lexer_class ( name ) : if name in _lexer_cache : return _lexer_cache [ name ] # lookup builtin lexers for module_name , lname , aliases , _ , _ in itervalues ( LEXERS ) : if name == lname : _load_lexers ( module_name ) return _lexer_cache [ name ] # continue with lexers from setuptools entrypoints for cls in find_plugin_lexers ( ) : if cls . name == name : return cls
Lookup a lexer class by name .
119
9
240,398
def find_lexer_class_by_name ( _alias ) : if not _alias : raise ClassNotFound ( 'no lexer for alias %r found' % _alias ) # lookup builtin lexers for module_name , name , aliases , _ , _ in itervalues ( LEXERS ) : if _alias . lower ( ) in aliases : if name not in _lexer_cache : _load_lexers ( module_name ) return _lexer_cache [ name ] # continue with lexers from setuptools entrypoints for cls in find_plugin_lexers ( ) : if _alias . lower ( ) in cls . aliases : return cls raise ClassNotFound ( 'no lexer for alias %r found' % _alias )
Lookup a lexer class by alias .
167
9
240,399
def load_lexer_from_file ( filename , lexername = "CustomLexer" , * * options ) : try : # This empty dict will contain the namespace for the exec'd file custom_namespace = { } exec ( open ( filename , 'rb' ) . read ( ) , custom_namespace ) # Retrieve the class `lexername` from that namespace if lexername not in custom_namespace : raise ClassNotFound ( 'no valid %s class found in %s' % ( lexername , filename ) ) lexer_class = custom_namespace [ lexername ] # And finally instantiate it with the options return lexer_class ( * * options ) except IOError as err : raise ClassNotFound ( 'cannot read %s' % filename ) except ClassNotFound as err : raise except Exception as err : raise ClassNotFound ( 'error when loading custom lexer: %s' % err )
Load a lexer from a file .
199
8