idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
245,400
def process_config ( ctx , configfile ) : from honeycomb . commands . service . run import run as service_run # from honeycomb.commands.service.logs import logs as service_logs from honeycomb . commands . service . install import install as service_install from honeycomb . commands . integration . install import install as integration_install from honeycomb . commands . integration . configure import configure as integration_configure VERSION = "version" SERVICES = defs . SERVICES INTEGRATIONS = defs . INTEGRATIONS required_top_keys = [ VERSION , SERVICES ] supported_versions = [ 1 ] def validate_yml ( config ) : for key in required_top_keys : if key not in config : raise exceptions . ConfigFieldMissing ( key ) version = config . get ( VERSION ) if version not in supported_versions : raise exceptions . ConfigFieldTypeMismatch ( VERSION , version , "one of: {}" . format ( repr ( supported_versions ) ) ) def install_plugins ( services , integrations ) : for cmd , kwargs in [ ( service_install , { SERVICES : services } ) , ( integration_install , { INTEGRATIONS : integrations } ) ] : try : ctx . invoke ( cmd , * * kwargs ) except SystemExit : # If a plugin is already installed honeycomb will exit abnormally pass def parameters_to_string ( parameters_dict ) : return [ "{}={}" . format ( k , v ) for k , v in parameters_dict . items ( ) ] def configure_integrations ( integrations ) : for integration in integrations : args_list = parameters_to_string ( config [ INTEGRATIONS ] [ integration ] . get ( defs . PARAMETERS , dict ( ) ) ) ctx . invoke ( integration_configure , integration = integration , args = args_list ) def run_services ( services , integrations ) : # TODO: Enable support with multiple services as daemon, and run service.logs afterwards # tricky part is that services launched as daemon are exited with os._exit(0) so you # can't catch it. for service in services : args_list = parameters_to_string ( config [ SERVICES ] [ service ] . get ( defs . PARAMETERS , dict ( ) ) ) ctx . invoke ( service_run , service = service , integration = integrations , args = args_list ) # TODO: Silence normal stdout and follow honeycomb.debug.json instead # This would make monitoring containers and collecting logs easier with open ( configfile , "rb" ) as fh : config = yaml . load ( fh . read ( ) ) validate_yml ( config ) services = config . get ( SERVICES ) . keys ( ) integrations = config . get ( INTEGRATIONS ) . keys ( ) if config . get ( INTEGRATIONS ) else [ ] install_plugins ( services , integrations ) configure_integrations ( integrations ) run_services ( services , integrations )
Process a yaml config with instructions .
655
8
245,401
def get_plugin_path ( home , plugin_type , plugin_name , editable = False ) : if editable : plugin_path = plugin_name else : plugin_path = os . path . join ( home , plugin_type , plugin_name ) return os . path . realpath ( plugin_path )
Return path to plugin .
68
5
245,402
def install_plugin ( pkgpath , plugin_type , install_path , register_func ) : service_name = os . path . basename ( pkgpath ) if os . path . exists ( os . path . join ( install_path , service_name ) ) : raise exceptions . PluginAlreadyInstalled ( pkgpath ) if os . path . exists ( pkgpath ) : logger . debug ( "%s exists in filesystem" , pkgpath ) if os . path . isdir ( pkgpath ) : pip_status = install_dir ( pkgpath , install_path , register_func ) else : # pkgpath is file pip_status = install_from_zip ( pkgpath , install_path , register_func ) else : logger . debug ( "cannot find %s locally, checking github repo" , pkgpath ) click . secho ( "Collecting {}.." . format ( pkgpath ) ) pip_status = install_from_repo ( pkgpath , plugin_type , install_path , register_func ) if pip_status == 0 : click . secho ( "[+] Great success!" ) else : # TODO: rephrase click . secho ( "[-] Service installed but something was odd with dependency install, please review debug logs" )
Install specified plugin .
282
4
245,403
def install_deps ( pkgpath ) : if os . path . exists ( os . path . join ( pkgpath , "requirements.txt" ) ) : logger . debug ( "installing dependencies" ) click . secho ( "[*] Installing dependencies" ) pipargs = [ "install" , "--target" , os . path . join ( pkgpath , defs . DEPS_DIR ) , "--ignore-installed" , "-r" , os . path . join ( pkgpath , "requirements.txt" ) ] logger . debug ( "running pip %s" , pipargs ) return subprocess . check_call ( [ sys . executable , "-m" , "pip" ] + pipargs ) return 0
Install plugin dependencies using pip .
163
6
245,404
def copy_file ( src , dst ) : try : fin = os . open ( src , READ_FLAGS ) stat = os . fstat ( fin ) fout = os . open ( dst , WRITE_FLAGS , stat . st_mode ) for x in iter ( lambda : os . read ( fin , BUFFER_SIZE ) , b"" ) : os . write ( fout , x ) finally : try : os . close ( fin ) except Exception as exc : logger . debug ( "Failed to close file handle when copying: {}" . format ( exc ) ) try : os . close ( fout ) except Exception as exc : logger . debug ( "Failed to close file handle when copying: {}" . format ( exc ) )
Copy a single file .
159
5
245,405
def copy_tree ( src , dst , symlinks = False , ignore = [ ] ) : names = os . listdir ( src ) if not os . path . exists ( dst ) : os . makedirs ( dst ) errors = [ ] for name in names : if name in ignore : continue srcname = os . path . join ( src , name ) dstname = os . path . join ( dst , name ) try : if symlinks and os . path . islink ( srcname ) : linkto = os . readlink ( srcname ) os . symlink ( linkto , dstname ) elif os . path . isdir ( srcname ) : copy_tree ( srcname , dstname , symlinks , ignore ) else : copy_file ( srcname , dstname ) except ( IOError , os . error ) as exc : errors . append ( ( srcname , dstname , str ( exc ) ) ) except CTError as exc : errors . extend ( exc . errors ) if errors : raise CTError ( errors )
Copy a full directory structure .
221
6
245,406
def install_dir ( pkgpath , install_path , register_func , delete_after_install = False ) : logger . debug ( "%s is a directory, attempting to validate" , pkgpath ) plugin = register_func ( pkgpath ) logger . debug ( "%s looks good, copying to %s" , pkgpath , install_path ) try : copy_tree ( pkgpath , os . path . join ( install_path , plugin . name ) ) if delete_after_install : logger . debug ( "deleting %s" , pkgpath ) shutil . rmtree ( pkgpath ) pkgpath = os . path . join ( install_path , plugin . name ) except ( OSError , CTError ) as exc : # TODO: handle package name exists (upgrade? overwrite?) logger . debug ( str ( exc ) , exc_info = True ) raise exceptions . PluginAlreadyInstalled ( plugin . name ) return install_deps ( pkgpath )
Install plugin from specified directory .
219
6
245,407
def install_from_zip ( pkgpath , install_path , register_func , delete_after_install = False ) : logger . debug ( "%s is a file, attempting to load zip" , pkgpath ) pkgtempdir = tempfile . mkdtemp ( prefix = "honeycomb_" ) try : with zipfile . ZipFile ( pkgpath ) as pkgzip : pkgzip . extractall ( pkgtempdir ) except zipfile . BadZipfile as exc : logger . debug ( str ( exc ) ) raise click . ClickException ( str ( exc ) ) if delete_after_install : logger . debug ( "deleting %s" , pkgpath ) os . remove ( pkgpath ) logger . debug ( "installing from unzipped folder %s" , pkgtempdir ) return install_dir ( pkgtempdir , install_path , register_func , delete_after_install = True )
Install plugin from zipfile .
207
6
245,408
def install_from_repo ( pkgname , plugin_type , install_path , register_func ) : rsession = requests . Session ( ) rsession . mount ( "https://" , HTTPAdapter ( max_retries = 3 ) ) logger . debug ( "trying to install %s from online repo" , pkgname ) pkgurl = "{}/{}s/{}.zip" . format ( defs . GITHUB_RAW , plugin_type , pkgname ) try : logger . debug ( "Requesting HTTP HEAD: %s" , pkgurl ) r = rsession . head ( pkgurl ) r . raise_for_status ( ) total_size = int ( r . headers . get ( "content-length" , 0 ) ) pkgsize = _sizeof_fmt ( total_size ) with click . progressbar ( length = total_size , label = "Downloading {} {} ({}).." . format ( plugin_type , pkgname , pkgsize ) ) as bar : r = rsession . get ( pkgurl , stream = True ) with tempfile . NamedTemporaryFile ( delete = False ) as f : downloaded_bytes = 0 for chunk in r . iter_content ( chunk_size = 1 ) : # TODO: Consider increasing to reduce cycles if chunk : f . write ( chunk ) downloaded_bytes += len ( chunk ) bar . update ( downloaded_bytes ) return install_from_zip ( f . name , install_path , register_func , delete_after_install = True ) except requests . exceptions . HTTPError as exc : logger . debug ( str ( exc ) ) raise exceptions . PluginNotFoundInOnlineRepo ( pkgname ) except requests . exceptions . ConnectionError as exc : logger . debug ( str ( exc ) ) raise exceptions . PluginRepoConnectionError ( )
Install plugin from online repo .
403
6
245,409
def uninstall_plugin ( pkgpath , force ) : pkgname = os . path . basename ( pkgpath ) if os . path . exists ( pkgpath ) : if not force : click . confirm ( "[?] Are you sure you want to delete `{}` from honeycomb?" . format ( pkgname ) , abort = True ) try : shutil . rmtree ( pkgpath ) logger . debug ( "successfully uninstalled {}" . format ( pkgname ) ) click . secho ( "[*] Uninstalled {}" . format ( pkgname ) ) except OSError as exc : logger . exception ( str ( exc ) ) else : click . secho ( "[-] doh! I cannot seem to find `{}`, are you sure it's installed?" . format ( pkgname ) )
Uninstall a plugin .
181
5
245,410
def list_remote_plugins ( installed_plugins , plugin_type ) : click . secho ( "\n[*] Additional plugins from online repository:" ) try : rsession = requests . Session ( ) rsession . mount ( "https://" , HTTPAdapter ( max_retries = 3 ) ) r = rsession . get ( "{0}/{1}s/{1}s.txt" . format ( defs . GITHUB_RAW , plugin_type ) ) logger . debug ( "fetching %ss from remote repo" , plugin_type ) plugins = [ _ for _ in r . text . splitlines ( ) if _ not in installed_plugins ] click . secho ( " " . join ( plugins ) ) except requests . exceptions . ConnectionError as exc : logger . debug ( str ( exc ) , exc_info = True ) raise click . ClickException ( "Unable to fetch {} information from online repository" . format ( plugin_type ) )
List remote plugins from online repo .
209
7
245,411
def list_local_plugins ( plugin_type , plugins_path , plugin_details ) : installed_plugins = list ( ) for plugin in next ( os . walk ( plugins_path ) ) [ 1 ] : s = plugin_details ( plugin ) installed_plugins . append ( plugin ) click . secho ( s ) if not installed_plugins : click . secho ( "[*] You do not have any {0}s installed, " "try installing one with `honeycomb {0} install`" . format ( plugin_type ) ) return installed_plugins
List local plugins with details .
121
6
245,412
def parse_plugin_args ( command_args , config_args ) : parsed_args = dict ( ) for arg in command_args : kv = arg . split ( "=" ) if len ( kv ) != 2 : raise click . UsageError ( "Invalid parameter '{}', must be in key=value format" . format ( arg ) ) parsed_args [ kv [ 0 ] ] = config_utils . get_truetype ( kv [ 1 ] ) for arg in config_args : value = arg [ defs . VALUE ] value_type = arg [ defs . TYPE ] if value in parsed_args : # will raise if invalid config_utils . validate_field_matches_type ( value , parsed_args [ value ] , value_type , arg . get ( defs . ITEMS ) , arg . get ( defs . MIN ) , arg . get ( defs . MAX ) ) elif defs . DEFAULT in arg : # Has a default field # return default values for unset parameters parsed_args [ value ] = arg [ defs . DEFAULT ] elif arg [ defs . REQUIRED ] : # requires field is true """parameter was not supplied by user, but it's required and has no default value""" raise exceptions . RequiredFieldMissing ( value ) return parsed_args
Parse command line arguments based on the plugin s parameters config .
288
13
245,413
def get_select_items ( items ) : option_items = list ( ) for item in items : if isinstance ( item , dict ) and defs . VALUE in item and defs . LABEL in item : option_items . append ( item [ defs . VALUE ] ) else : raise exceptions . ParametersFieldError ( item , "a dictionary with {} and {}" . format ( defs . LABEL , defs . VALUE ) ) return option_items
Return list of possible select items .
103
7
245,414
def print_plugin_args ( plugin_path ) : args = config_utils . get_config_parameters ( plugin_path ) args_format = "{:20} {:10} {:^15} {:^10} {:25}" title = args_format . format ( defs . NAME . upper ( ) , defs . TYPE . upper ( ) , defs . DEFAULT . upper ( ) , defs . REQUIRED . upper ( ) , defs . DESCRIPTION . upper ( ) ) click . secho ( title ) click . secho ( "-" * len ( title ) ) for arg in args : help_text = " ({})" . format ( arg [ defs . HELP_TEXT ] ) if defs . HELP_TEXT in arg else "" options = _parse_select_options ( arg ) description = arg [ defs . LABEL ] + options + help_text click . secho ( args_format . format ( arg [ defs . VALUE ] , arg [ defs . TYPE ] , str ( arg . get ( defs . DEFAULT , None ) ) , str ( arg . get ( defs . REQUIRED , False ) ) , description ) )
Print plugin parameters table .
257
5
245,415
def configure_integration ( path ) : integration = register_integration ( path ) integration_args = { } try : with open ( os . path . join ( path , ARGS_JSON ) ) as f : integration_args = json . loads ( f . read ( ) ) except Exception as exc : logger . debug ( str ( exc ) , exc_info = True ) raise click . ClickException ( "Cannot load {} integration args, please configure it first." . format ( os . path . basename ( path ) ) ) click . secho ( "[*] Adding integration {}" . format ( integration . name ) ) logger . debug ( "Adding integration %s" , integration . name , extra = { "integration" : integration . name , "args" : integration_args } ) configured_integration = ConfiguredIntegration ( name = integration . name , integration = integration , path = path ) configured_integration . data = integration_args configured_integration . integration . module = get_integration_module ( path ) . IntegrationActionsClass ( integration_args ) configured_integrations . append ( configured_integration )
Configure and enable an integration .
243
7
245,416
def send_alert_to_subscribed_integrations ( alert ) : valid_configured_integrations = get_valid_configured_integrations ( alert ) for configured_integration in valid_configured_integrations : threading . Thread ( target = create_integration_alert_and_call_send , args = ( alert , configured_integration ) ) . start ( )
Send Alert to relevant integrations .
85
7
245,417
def get_valid_configured_integrations ( alert ) : if not configured_integrations : return [ ] # Collect all integrations that are configured for specific alert_type # or have no specific supported_event_types (i.e., all alert types) valid_configured_integrations = [ _ for _ in configured_integrations if _ . integration . integration_type == IntegrationTypes . EVENT_OUTPUT . name and ( not _ . integration . supported_event_types or alert . alert_type in _ . integration . supported_event_types ) ] return valid_configured_integrations
Return a list of integrations for alert filtered by alert_type .
130
14
245,418
def create_integration_alert_and_call_send ( alert , configured_integration ) : integration_alert = IntegrationAlert ( alert = alert , configured_integration = configured_integration , status = IntegrationAlertStatuses . PENDING . name , retries = configured_integration . integration . max_send_retries ) send_alert_to_configured_integration ( integration_alert )
Create an IntegrationAlert object and send it to Integration .
89
11
245,419
def send_alert_to_configured_integration ( integration_alert ) : try : alert = integration_alert . alert configured_integration = integration_alert . configured_integration integration = configured_integration . integration integration_actions_instance = configured_integration . integration . module alert_fields = dict ( ) if integration . required_fields : if not all ( [ hasattr ( alert , _ ) for _ in integration . required_fields ] ) : logger . debug ( "Alert does not have all required_fields (%s) for integration %s, skipping" , integration . required_fields , integration . name ) return exclude_fields = [ "alert_type" , "service_type" ] alert_fields = { } for field in alert . __slots__ : if hasattr ( alert , field ) and field not in exclude_fields : alert_fields [ field ] = getattr ( alert , field ) logger . debug ( "Sending alert %s to %s" , alert_fields , integration . name ) output_data , output_file_content = integration_actions_instance . send_event ( alert_fields ) if integration . polling_enabled : integration_alert . status = IntegrationAlertStatuses . POLLING . name polling_integration_alerts . append ( integration_alert ) else : integration_alert . status = IntegrationAlertStatuses . DONE . name integration_alert . send_time = get_current_datetime_utc ( ) integration_alert . output_data = json . dumps ( output_data ) # TODO: do something with successfully handled alerts? They are all written to debug log file except exceptions . IntegrationMissingRequiredFieldError as exc : logger . exception ( "Send response formatting for integration alert %s failed. Missing required fields" , integration_alert , exc . message ) integration_alert . status = IntegrationAlertStatuses . ERROR_MISSING_SEND_FIELDS . name except exceptions . IntegrationOutputFormatError : logger . exception ( "Send response formatting for integration alert %s failed" , integration_alert ) integration_alert . status = IntegrationAlertStatuses . ERROR_SENDING_FORMATTING . name except exceptions . IntegrationSendEventError as exc : integration_send_retries = integration_alert . retries if integration_alert . retries <= MAX_SEND_RETRIES else MAX_SEND_RETRIES # making sure we do not exceed celery max retries send_retries_left = integration_send_retries - 1 integration_alert . retries = send_retries_left logger . error ( "Sending integration alert %s failed. Message: %s. Retries left: %s" , integration_alert , exc . message , send_retries_left ) if send_retries_left == 0 : integration_alert . status = IntegrationAlertStatuses . ERROR_SENDING . name if send_retries_left > 0 : sleep ( SEND_ALERT_DATA_INTERVAL ) send_alert_to_configured_integration ( integration_alert )
Send IntegrationAlert to configured integration .
662
7
245,420
def poll_integration_alert_data ( integration_alert ) : logger . info ( "Polling information for integration alert %s" , integration_alert ) try : configured_integration = integration_alert . configured_integration integration_actions_instance = configured_integration . integration . module output_data , output_file_content = integration_actions_instance . poll_for_updates ( json . loads ( integration_alert . output_data ) ) integration_alert . status = IntegrationAlertStatuses . DONE . name integration_alert . output_data = json . dumps ( output_data ) polling_integration_alerts . remove ( integration_alert ) except exceptions . IntegrationNoMethodImplementationError : logger . error ( "No poll_for_updates function found for integration alert %s" , integration_alert ) integration_alert . status = IntegrationAlertStatuses . ERROR_POLLING . name except exceptions . IntegrationPollEventError : # This does not always indicate an error, this is also raised when need to try again later logger . debug ( "Polling for integration alert %s failed" , integration_alert ) except exceptions . IntegrationOutputFormatError : logger . error ( "Integration alert %s formatting error" , integration_alert ) integration_alert . status = IntegrationAlertStatuses . ERROR_POLLING_FORMATTING . name except Exception : logger . exception ( "Error polling integration alert %s" , integration_alert ) integration_alert . status = IntegrationAlertStatuses . ERROR_POLLING . name
Poll for updates on waiting IntegrationAlerts .
329
9
245,421
def wait_until ( func , check_return_value = True , total_timeout = 60 , interval = 0.5 , exc_list = None , error_message = "" , * args , * * kwargs ) : start_function = time . time ( ) while time . time ( ) - start_function < total_timeout : try : logger . debug ( "executing {} with args {} {}" . format ( func , args , kwargs ) ) return_value = func ( * args , * * kwargs ) if not check_return_value or ( check_return_value and return_value ) : return return_value except Exception as exc : if exc_list and any ( [ isinstance ( exc , x ) for x in exc_list ] ) : pass else : raise time . sleep ( interval ) raise TimeoutException ( error_message )
Run a command in a loop until desired result or timeout occurs .
187
13
245,422
def search_json_log ( filepath , key , value ) : try : with open ( filepath , "r" ) as fh : for line in fh . readlines ( ) : log = json . loads ( line ) if key in log and log [ key ] == value : return log except IOError : pass return False
Search json log file for a key = value pair .
71
11
245,423
def list_commands ( self , ctx ) : rv = [ ] files = [ _ for _ in next ( os . walk ( self . folder ) ) [ 2 ] if not _ . startswith ( "_" ) and _ . endswith ( ".py" ) ] for filename in files : rv . append ( filename [ : - 3 ] ) rv . sort ( ) return rv
List commands from folder .
87
5
245,424
def get_command ( self , ctx , name ) : plugin = os . path . basename ( self . folder ) try : command = importlib . import_module ( "honeycomb.commands.{}.{}" . format ( plugin , name ) ) except ImportError : raise click . UsageError ( "No such command {} {}\n\n{}" . format ( plugin , name , self . get_help ( ctx ) ) ) return getattr ( command , name )
Fetch command from folder .
105
6
245,425
def cli ( ctx , home , iamroot , config , verbose ) : _mkhome ( home ) setup_logging ( home , verbose ) logger . debug ( "Honeycomb v%s" , __version__ , extra = { "version" : __version__ } ) logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) try : is_admin = os . getuid ( ) == 0 except AttributeError : is_admin = ctypes . windll . shell32 . IsUserAnAdmin ( ) if is_admin : if not iamroot : raise click . ClickException ( "Honeycomb should not run as a privileged user, if you are just " "trying to bind to a low port try running `setcap 'cap_net_bind_service=+ep' " "$(which honeycomb)` instead. If you insist, use --iamroot" ) logger . warn ( "running as root!" ) ctx . obj [ "HOME" ] = home logger . debug ( "ctx: {}" . format ( ctx . obj ) ) if config : return process_config ( ctx , config )
Honeycomb is a honeypot framework .
283
9
245,426
def setup_logging ( home , verbose ) : logging . setLoggerClass ( MyLogger ) logging . config . dictConfig ( { "version" : 1 , "disable_existing_loggers" : False , "formatters" : { "console" : { "format" : "%(levelname)-8s [%(asctime)s %(name)s] %(filename)s:%(lineno)s %(funcName)s: %(message)s" , } , "json" : { "()" : jsonlogger . JsonFormatter , "format" : "%(levelname)s %(asctime)s %(name)s %(filename)s %(lineno)s %(funcName)s %(message)s" , } , } , "handlers" : { "default" : { "level" : "DEBUG" if verbose else "INFO" , "class" : "logging.StreamHandler" , "formatter" : "console" , } , "file" : { "level" : "DEBUG" , "class" : "logging.handlers.WatchedFileHandler" , "filename" : os . path . join ( home , DEBUG_LOG_FILE ) , "formatter" : "json" , } , } , "loggers" : { "" : { "handlers" : [ "default" , "file" ] , "level" : "DEBUG" , "propagate" : True , } , } } )
Configure logging for honeycomb .
335
7
245,427
def makeRecord ( self , name , level , fn , lno , msg , args , exc_info , func = None , extra = None , sinfo = None ) : # See below commented section for a simple example of what the docstring refers to if six . PY2 : rv = logging . LogRecord ( name , level , fn , lno , msg , args , exc_info , func ) else : rv = logging . LogRecord ( name , level , fn , lno , msg , args , exc_info , func , sinfo ) if extra is None : extra = dict ( ) extra . update ( { "pid" : os . getpid ( ) , "uid" : os . getuid ( ) , "gid" : os . getgid ( ) , "ppid" : os . getppid ( ) } ) for key in extra : # if (key in ["message", "asctime"]) or (key in rv.__dict__): # raise KeyError("Attempt to overwrite %r in LogRecord" % key) rv . __dict__ [ key ] = extra [ key ] return rv
Override default logger to allow overriding of internal attributes .
247
10
245,428
def stop ( ctx , service , editable ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] service_path = plugin_utils . get_plugin_path ( home , SERVICES , service , editable ) logger . debug ( "loading {}" . format ( service ) ) service = register_service ( service_path ) try : with open ( os . path . join ( service_path , ARGS_JSON ) ) as f : service_args = json . loads ( f . read ( ) ) except IOError as exc : logger . debug ( str ( exc ) , exc_info = True ) raise click . ClickException ( "Cannot load service args, are you sure server is running?" ) # get our service class instance service_module = get_service_module ( service_path ) service_obj = service_module . service_class ( alert_types = service . alert_types , service_args = service_args ) # prepare runner runner = myRunner ( service_obj , pidfile = service_path + ".pid" , stdout = open ( os . path . join ( service_path , "stdout.log" ) , "ab" ) , stderr = open ( os . path . join ( service_path , "stderr.log" ) , "ab" ) ) click . secho ( "[*] Stopping {}" . format ( service . name ) ) try : runner . _stop ( ) except daemon . runner . DaemonRunnerStopFailureError as exc : logger . debug ( str ( exc ) , exc_info = True ) raise click . ClickException ( "Unable to stop service, are you sure it is running?" )
Stop a running service daemon .
408
6
245,429
def logs ( ctx , services , num , follow ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] services_path = os . path . join ( home , SERVICES ) tail_threads = [ ] for service in services : logpath = os . path . join ( services_path , service , LOGS_DIR , STDOUTLOG ) if os . path . exists ( logpath ) : logger . debug ( "tailing %s" , logpath ) # TODO: Print log lines from multiple services sorted by timestamp t = threading . Thread ( target = Tailer , kwargs = { "name" : service , "nlines" : num , "filepath" : logpath , "follow" : follow } ) t . daemon = True t . start ( ) tail_threads . append ( t ) if tail_threads : while tail_threads [ 0 ] . isAlive ( ) : tail_threads [ 0 ] . join ( 0.1 )
Show logs of daemonized service .
260
7
245,430
def get_integration_module ( integration_path ) : # add custom paths so imports would work paths = [ os . path . join ( __file__ , ".." , ".." ) , # to import integrationmanager os . path . join ( integration_path , ".." ) , # to import integration itself os . path . join ( integration_path , DEPS_DIR ) , # to import integration deps ] for path in paths : path = os . path . realpath ( path ) logger . debug ( "adding %s to path" , path ) sys . path . insert ( 0 , path ) # get our integration class instance integration_name = os . path . basename ( integration_path ) logger . debug ( "importing %s" , "." . join ( [ integration_name , INTEGRATION ] ) ) return importlib . import_module ( "." . join ( [ integration_name , INTEGRATION ] ) )
Add custom paths to sys and import integration module .
205
10
245,431
def register_integration ( package_folder ) : logger . debug ( "registering integration %s" , package_folder ) package_folder = os . path . realpath ( package_folder ) if not os . path . exists ( package_folder ) : raise IntegrationNotFound ( os . path . basename ( package_folder ) ) json_config_path = os . path . join ( package_folder , CONFIG_FILE_NAME ) if not os . path . exists ( json_config_path ) : raise ConfigFileNotFound ( json_config_path ) with open ( json_config_path , "r" ) as f : config_json = json . load ( f ) # Validate integration and alert config validate_config ( config_json , defs . INTEGRATION_VALIDATE_CONFIG_FIELDS ) validate_config_parameters ( config_json , defs . INTEGRATION_PARAMETERS_ALLOWED_KEYS , defs . INTEGRATION_PARAMETERS_ALLOWED_TYPES ) integration_type = _create_integration_object ( config_json ) return integration_type
Register a honeycomb integration .
253
6
245,432
def list ( ctx , remote ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) click . secho ( "[*] Installed integrations:" ) home = ctx . obj [ "HOME" ] integrations_path = os . path . join ( home , INTEGRATIONS ) plugin_type = "integration" def get_integration_details ( integration_name ) : logger . debug ( "loading {}" . format ( integration_name ) ) integration = register_integration ( os . path . join ( integrations_path , integration_name ) ) supported_event_types = integration . supported_event_types if not supported_event_types : supported_event_types = "All" return "{:s} ({:s}) [Supported event types: {}]" . format ( integration . name , integration . description , supported_event_types ) installed_integrations = list_local_plugins ( plugin_type , integrations_path , get_integration_details ) if remote : list_remote_plugins ( installed_integrations , plugin_type ) else : click . secho ( "\n[*] Try running `honeycomb integrations list -r` " "to see integrations available from our repository" )
List integrations .
305
4
245,433
def run ( ctx , service , args , show_args , daemon , editable , integration ) : home = ctx . obj [ "HOME" ] service_path = plugin_utils . get_plugin_path ( home , SERVICES , service , editable ) service_log_path = os . path . join ( service_path , LOGS_DIR ) logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) logger . debug ( "loading {} ({})" . format ( service , service_path ) ) service = register_service ( service_path ) if show_args : return plugin_utils . print_plugin_args ( service_path ) # get our service class instance service_module = get_service_module ( service_path ) service_args = plugin_utils . parse_plugin_args ( args , config_utils . get_config_parameters ( service_path ) ) service_obj = service_module . service_class ( alert_types = service . alert_types , service_args = service_args ) if not os . path . exists ( service_log_path ) : os . mkdir ( service_log_path ) # prepare runner if daemon : runner = myRunner ( service_obj , pidfile = service_path + ".pid" , stdout = open ( os . path . join ( service_log_path , STDOUTLOG ) , "ab" ) , stderr = open ( os . path . join ( service_log_path , STDERRLOG ) , "ab" ) ) files_preserve = [ ] for handler in logging . getLogger ( ) . handlers : if hasattr ( handler , "stream" ) : if hasattr ( handler . stream , "fileno" ) : files_preserve . append ( handler . stream . fileno ( ) ) if hasattr ( handler , "socket" ) : files_preserve . append ( handler . socket . fileno ( ) ) runner . daemon_context . files_preserve = files_preserve runner . daemon_context . signal_map . update ( { signal . SIGTERM : service_obj . _on_server_shutdown , signal . SIGINT : service_obj . _on_server_shutdown , } ) logger . debug ( "daemon_context" , extra = { "daemon_context" : vars ( runner . daemon_context ) } ) for integration_name in integration : integration_path = plugin_utils . get_plugin_path ( home , INTEGRATIONS , integration_name , editable ) configure_integration ( integration_path ) click . secho ( "[+] Launching {} {}" . format ( service . name , "in daemon mode" if daemon else "" ) ) try : # save service_args for external reference (see test) with open ( os . path . join ( service_path , ARGS_JSON ) , "w" ) as f : f . write ( json . dumps ( service_args ) ) runner . _start ( ) if daemon else service_obj . run ( ) except KeyboardInterrupt : service_obj . _on_server_shutdown ( ) click . secho ( "[*] {} has stopped" . format ( service . name ) )
Load and run a specific service .
733
7
245,434
def read_lines ( self , file_path , empty_lines = False , signal_ready = True ) : file_handler , file_id = self . _get_file ( file_path ) file_handler . seek ( 0 , os . SEEK_END ) if signal_ready : self . signal_ready ( ) while self . thread_server . is_alive ( ) : line = six . text_type ( file_handler . readline ( ) , "utf-8" ) if line : yield line continue elif empty_lines : yield line time . sleep ( 0.1 ) if file_id != self . _get_file_id ( os . stat ( file_path ) ) and os . path . isfile ( file_path ) : file_handler , file_id = self . _get_file ( file_path )
Fetch lines from file .
184
6
245,435
def on_server_start ( self ) : self . _container = self . _docker_client . containers . run ( self . docker_image_name , detach = True , * * self . docker_params ) self . signal_ready ( ) for log_line in self . get_lines ( ) : try : alert_dict = self . parse_line ( log_line ) if alert_dict : self . add_alert_to_queue ( alert_dict ) except Exception : self . logger . exception ( None )
Service run loop function .
112
5
245,436
def on_server_shutdown ( self ) : if not self . _container : return self . _container . stop ( ) self . _container . remove ( v = True , force = True )
Stop the container before shutting down .
42
7
245,437
def uninstall ( ctx , yes , integrations ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] for integration in integrations : integration_path = plugin_utils . get_plugin_path ( home , INTEGRATIONS , integration ) plugin_utils . uninstall_plugin ( integration_path , yes )
Uninstall a integration .
115
5
245,438
def install ( ctx , services , delete_after_install = False ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] services_path = os . path . join ( home , SERVICES ) installed_all_plugins = True for service in services : try : plugin_utils . install_plugin ( service , SERVICE , services_path , register_service ) except exceptions . PluginAlreadyInstalled as exc : click . echo ( exc ) installed_all_plugins = False if not installed_all_plugins : raise ctx . exit ( errno . EEXIST )
Install a honeypot service from the online library local path or zipfile .
169
15
245,439
def uninstall ( ctx , yes , services ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] for service in services : service_path = plugin_utils . get_plugin_path ( home , SERVICES , service ) plugin_utils . uninstall_plugin ( service_path , yes )
Uninstall a service .
110
5
245,440
def get_service_module ( service_path ) : # add custom paths so imports would work paths = [ os . path . dirname ( __file__ ) , # this folder, to catch base_service os . path . realpath ( os . path . join ( service_path , ".." ) ) , # service's parent folder for import os . path . realpath ( os . path . join ( service_path ) ) , # service's folder for local imports os . path . realpath ( os . path . join ( service_path , DEPS_DIR ) ) , # deps dir ] for path in paths : path = os . path . realpath ( path ) logger . debug ( "adding %s to path" , path ) sys . path . insert ( 0 , path ) # get our service class instance service_name = os . path . basename ( service_path ) module = "." . join ( [ service_name , service_name + "_service" ] ) logger . debug ( "importing %s" , module ) return importlib . import_module ( module )
Add custom paths to sys and import service module .
234
10
245,441
def register_service ( package_folder ) : logger . debug ( "registering service %s" , package_folder ) package_folder = os . path . realpath ( package_folder ) if not os . path . exists ( package_folder ) : raise ServiceNotFound ( os . path . basename ( package_folder ) ) json_config_path = os . path . join ( package_folder , CONFIG_FILE_NAME ) if not os . path . exists ( json_config_path ) : raise ConfigFileNotFound ( json_config_path ) with open ( json_config_path , "r" ) as f : config_json = json . load ( f ) # Validate service and alert config config_utils . validate_config ( config_json , defs . SERVICE_ALERT_VALIDATE_FIELDS ) config_utils . validate_config ( config_json . get ( defs . SERVICE_CONFIG_SECTION_KEY , { } ) , defs . SERVICE_CONFIG_VALIDATE_FIELDS ) _validate_supported_platform ( config_json ) _validate_alert_configs ( config_json ) config_utils . validate_config_parameters ( config_json , defs . SERVICE_ALLOWED_PARAMTER_KEYS , defs . SERVICE_ALLOWED_PARAMTER_TYPES ) service_type = _create_service_object ( config_json ) service_type . alert_types = _create_alert_types ( config_json , service_type ) return service_type
Register a honeycomb service .
343
6
245,442
def install ( ctx , integrations , delete_after_install = False ) : logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) home = ctx . obj [ "HOME" ] integrations_path = os . path . join ( home , INTEGRATIONS ) installed_all_plugins = True for integration in integrations : try : plugin_utils . install_plugin ( integration , INTEGRATION , integrations_path , register_integration ) except exceptions . PluginAlreadyInstalled as exc : click . echo ( exc ) installed_all_plugins = False if not installed_all_plugins : raise ctx . exit ( errno . EEXIST )
Install a honeycomb integration from the online library local path or zipfile .
180
15
245,443
def configure ( ctx , integration , args , show_args , editable ) : home = ctx . obj [ "HOME" ] integration_path = plugin_utils . get_plugin_path ( home , defs . INTEGRATIONS , integration , editable ) logger . debug ( "running command %s (%s)" , ctx . command . name , ctx . params , extra = { "command" : ctx . command . name , "params" : ctx . params } ) logger . debug ( "loading {} ({})" . format ( integration , integration_path ) ) integration = register_integration ( integration_path ) if show_args : return plugin_utils . print_plugin_args ( integration_path ) # get our integration class instance integration_args = plugin_utils . parse_plugin_args ( args , config_utils . get_config_parameters ( integration_path ) ) args_file = os . path . join ( integration_path , defs . ARGS_JSON ) with open ( args_file , "w" ) as f : data = json . dumps ( integration_args ) logger . debug ( "writing %s to %s" , data , args_file ) f . write ( json . dumps ( integration_args ) ) click . secho ( "[*] {0} has been configured, make sure to test it with `honeycomb integration test {0}`" . format ( integration . name ) )
Configure an integration with default parameters .
314
8
245,444
def get_match_history ( self , account_id = None , * * kwargs ) : if 'account_id' not in kwargs : kwargs [ 'account_id' ] = account_id url = self . __build_url ( urls . GET_MATCH_HISTORY , * * kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a list of the most recent Dota matches
140
12
245,445
def get_match_history_by_seq_num ( self , start_at_match_seq_num = None , * * kwargs ) : if 'start_at_match_seq_num' not in kwargs : kwargs [ 'start_at_match_seq_num' ] = start_at_match_seq_num url = self . __build_url ( urls . GET_MATCH_HISTORY_BY_SEQ_NUM , * * kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a list of Dota matches in the order they were recorded
177
15
245,446
def get_match_details ( self , match_id = None , * * kwargs ) : if 'match_id' not in kwargs : kwargs [ 'match_id' ] = match_id url = self . __build_url ( urls . GET_MATCH_DETAILS , * * kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing the details for a Dota 2 match
141
11
245,447
def get_league_listing ( self ) : url = self . __build_url ( urls . GET_LEAGUE_LISTING ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a list of all ticketed leagues
98
11
245,448
def get_live_league_games ( self ) : url = self . __build_url ( urls . GET_LIVE_LEAGUE_GAMES ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a list of ticked games in progress
102
12
245,449
def get_team_info_by_team_id ( self , start_at_team_id = None , * * kwargs ) : if 'start_at_team_id' not in kwargs : kwargs [ 'start_at_team_id' ] = start_at_team_id url = self . __build_url ( urls . GET_TEAM_INFO_BY_TEAM_ID , * * kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a in - game teams
168
9
245,450
def get_player_summaries ( self , steamids = None , * * kwargs ) : if not isinstance ( steamids , collections . Iterable ) : steamids = [ steamids ] base64_ids = list ( map ( convert_to_64_bit , filter ( lambda x : x is not None , steamids ) ) ) if 'steamids' not in kwargs : kwargs [ 'steamids' ] = base64_ids url = self . __build_url ( urls . GET_PLAYER_SUMMARIES , * * kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary containing a player summaries
195
8
245,451
def get_heroes ( self , * * kwargs ) : url = self . __build_url ( urls . GET_HEROES , language = self . language , * * kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary of in - game heroes used to parse ids into localised names
111
17
245,452
def get_tournament_prize_pool ( self , leagueid = None , * * kwargs ) : if 'leagueid' not in kwargs : kwargs [ 'leagueid' ] = leagueid url = self . __build_url ( urls . GET_TOURNAMENT_PRIZE_POOL , * * kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary that includes community funded tournament prize pools
146
10
245,453
def get_top_live_games ( self , partner = '' , * * kwargs ) : if 'partner' not in kwargs : kwargs [ 'partner' ] = partner url = self . __build_url ( urls . GET_TOP_LIVE_GAME , * * kwargs ) req = self . executor ( url ) if self . logger : self . logger . info ( 'URL: {0}' . format ( url ) ) if not self . __check_http_err ( req . status_code ) : return response . build ( req , url , self . raw_mode )
Returns a dictionary that includes top MMR live games
137
9
245,454
def __build_url ( self , api_call , * * kwargs ) : kwargs [ 'key' ] = self . api_key if 'language' not in kwargs : kwargs [ 'language' ] = self . language if 'format' not in kwargs : kwargs [ 'format' ] = self . __format api_query = urlencode ( kwargs ) return "{0}{1}?{2}" . format ( urls . BASE_URL , api_call , api_query )
Builds the api query
118
5
245,455
def __check_http_err ( self , status_code ) : if status_code == 403 : raise exceptions . APIAuthenticationError ( self . api_key ) elif status_code == 503 : raise exceptions . APITimeoutError ( ) else : return False
Raises an exception if we get a http error
60
10
245,456
def item_id ( response ) : dict_keys = [ 'item_0' , 'item_1' , 'item_2' , 'item_3' , 'item_4' , 'item_5' ] new_keys = [ 'item_0_name' , 'item_1_name' , 'item_2_name' , 'item_3_name' , 'item_4_name' , 'item_5_name' ] for player in response [ 'players' ] : for key , new_key in zip ( dict_keys , new_keys ) : for item in items [ 'items' ] : if item [ 'id' ] == player [ key ] : player [ new_key ] = item [ 'localized_name' ] return response
Parse the item ids will be available as item_0_name item_1_name item_2_name and so on
171
28
245,457
def get_reviews ( obj ) : ctype = ContentType . objects . get_for_model ( obj ) return models . Review . objects . filter ( content_type = ctype , object_id = obj . id )
Simply returns the reviews for an object .
49
8
245,458
def get_review_average ( obj ) : total = 0 reviews = get_reviews ( obj ) if not reviews : return False for review in reviews : average = review . get_average_rating ( ) if average : total += review . get_average_rating ( ) if total > 0 : return total / reviews . count ( ) return False
Returns the review average for an object .
73
8
245,459
def render_category_averages ( obj , normalize_to = 100 ) : context = { 'reviewed_item' : obj } ctype = ContentType . objects . get_for_model ( obj ) reviews = models . Review . objects . filter ( content_type = ctype , object_id = obj . id ) category_averages = { } for review in reviews : review_category_averages = review . get_category_averages ( normalize_to ) if review_category_averages : for category , average in review_category_averages . items ( ) : if category not in category_averages : category_averages [ category ] = review_category_averages [ category ] else : category_averages [ category ] += review_category_averages [ category ] if reviews and category_averages : for category , average in category_averages . items ( ) : category_averages [ category ] = category_averages [ category ] / models . Rating . objects . filter ( category = category , value__isnull = False , review__content_type = ctype , review__object_id = obj . id ) . exclude ( value = '' ) . count ( ) else : category_averages = { } for category in models . RatingCategory . objects . filter ( counts_for_average = True ) : category_averages [ category ] = 0.0 context . update ( { 'category_averages' : category_averages } ) return context
Renders all the sub - averages for each category .
320
11
245,460
def total_review_average ( obj , normalize_to = 100 ) : ctype = ContentType . objects . get_for_model ( obj ) total_average = 0 reviews = models . Review . objects . filter ( content_type = ctype , object_id = obj . id ) for review in reviews : total_average += review . get_average_rating ( normalize_to ) if reviews : total_average /= reviews . count ( ) return total_average
Returns the average for all reviews of the given object .
102
11
245,461
def user_has_reviewed ( obj , user ) : ctype = ContentType . objects . get_for_model ( obj ) try : models . Review . objects . get ( user = user , content_type = ctype , object_id = obj . id ) except models . Review . DoesNotExist : return False return True
Returns True if the user has already reviewed the object .
71
11
245,462
def str_to_bytes ( value : str , expected_length : int ) -> bytes : length = len ( value ) if length != expected_length : raise ValueError ( 'Expects {} characters for decoding; got {}' . format ( expected_length , length ) ) try : encoded = value . encode ( 'ascii' ) except UnicodeEncodeError as ex : raise ValueError ( 'Expects value that can be encoded in ASCII charset: {}' . format ( ex ) ) decoding = DECODING # Confirm all bytes are valid Base32 decode characters. # Note: ASCII encoding handles the out of range checking for us. for byte in encoded : if decoding [ byte ] > 31 : raise ValueError ( 'Non-base32 character found: "{}"' . format ( chr ( byte ) ) ) return encoded
Convert the given string to bytes and validate it is within the Base32 character set .
176
18
245,463
def package_version ( ) : version_path = os . path . join ( os . path . dirname ( __file__ ) , 'version.py' ) version = read_version ( version_path ) write_version ( version_path , version ) return version
Get the package version via Git Tag .
57
8
245,464
def synchronized ( * args ) : if callable ( args [ 0 ] ) : return decorate_synchronized ( args [ 0 ] , _synchronized_lock ) else : def wrap ( function ) : return decorate_synchronized ( function , args [ 0 ] ) return wrap
A synchronized function prevents two or more callers to interleave its execution preventing race conditions .
62
18
245,465
def worker_thread ( context ) : queue = context . task_queue parameters = context . worker_parameters if parameters . initializer is not None : if not run_initializer ( parameters . initializer , parameters . initargs ) : context . state = ERROR return for task in get_next_task ( context , parameters . max_tasks ) : execute_next_task ( task ) queue . task_done ( )
The worker thread routines .
90
5
245,466
def stop_process ( process ) : process . terminate ( ) process . join ( 3 ) if process . is_alive ( ) and os . name != 'nt' : try : os . kill ( process . pid , signal . SIGKILL ) process . join ( ) except OSError : return if process . is_alive ( ) : raise RuntimeError ( "Unable to terminate PID %d" % os . getpid ( ) )
Does its best to stop the process .
96
8
245,467
def send_result ( pipe , data ) : try : pipe . send ( data ) except ( pickle . PicklingError , TypeError ) as error : error . traceback = format_exc ( ) pipe . send ( RemoteException ( error , error . traceback ) )
Send result handling pickling and communication errors .
58
9
245,468
def process ( * args , * * kwargs ) : timeout = kwargs . get ( 'timeout' ) # decorator without parameters if len ( args ) == 1 and len ( kwargs ) == 0 and callable ( args [ 0 ] ) : return _process_wrapper ( args [ 0 ] , timeout ) else : # decorator with parameters if timeout is not None and not isinstance ( timeout , ( int , float ) ) : raise TypeError ( 'Timeout expected to be None or integer or float' ) def decorating_function ( function ) : return _process_wrapper ( function , timeout ) return decorating_function
Runs the decorated function in a concurrent process taking care of the result and error management .
135
18
245,469
def _worker_handler ( future , worker , pipe , timeout ) : result = _get_result ( future , pipe , timeout ) if isinstance ( result , BaseException ) : if isinstance ( result , ProcessExpired ) : result . exitcode = worker . exitcode future . set_exception ( result ) else : future . set_result ( result ) if worker . is_alive ( ) : stop_process ( worker )
Worker lifecycle manager .
93
6
245,470
def _function_handler ( function , args , kwargs , pipe ) : signal . signal ( signal . SIGINT , signal . SIG_IGN ) result = process_execute ( function , * args , * * kwargs ) send_result ( pipe , result )
Runs the actual function in separate process and returns its result .
57
13
245,471
def _get_result ( future , pipe , timeout ) : counter = count ( step = SLEEP_UNIT ) try : while not pipe . poll ( SLEEP_UNIT ) : if timeout is not None and next ( counter ) >= timeout : return TimeoutError ( 'Task Timeout' , timeout ) elif future . cancelled ( ) : return CancelledError ( ) return pipe . recv ( ) except ( EOFError , OSError ) : return ProcessExpired ( 'Abnormal termination' ) except Exception as error : return error
Waits for result and handles communication errors .
119
9
245,472
def _trampoline ( name , module , * args , * * kwargs ) : function = _function_lookup ( name , module ) return function ( * args , * * kwargs )
Trampoline function for decorators .
44
8
245,473
def _function_lookup ( name , module ) : try : return _registered_functions [ name ] except KeyError : # force function registering __import__ ( module ) mod = sys . modules [ module ] getattr ( mod , name ) return _registered_functions [ name ]
Searches the function between the registered ones . If not found it imports the module forcing its registration .
61
21
245,474
def worker_process ( params , channel ) : signal ( SIGINT , SIG_IGN ) if params . initializer is not None : if not run_initializer ( params . initializer , params . initargs ) : os . _exit ( 1 ) try : for task in worker_get_next_task ( channel , params . max_tasks ) : payload = task . payload result = process_execute ( payload . function , * payload . args , * * payload . kwargs ) send_result ( channel , Result ( task . id , result ) ) except ( EnvironmentError , OSError , RuntimeError ) as error : os . _exit ( error . errno if error . errno else 1 ) except EOFError : os . _exit ( 0 )
The worker process routines .
164
5
245,475
def task_transaction ( channel ) : with channel . lock : if channel . poll ( 0 ) : task = channel . recv ( ) channel . send ( Acknowledgement ( os . getpid ( ) , task . id ) ) else : raise RuntimeError ( "Race condition between workers" ) return task
Ensures a task is fetched and acknowledged atomically .
65
13
245,476
def schedule ( self , task ) : self . task_manager . register ( task ) self . worker_manager . dispatch ( task )
Schedules a new Task in the PoolManager .
28
11
245,477
def process_next_message ( self , timeout ) : message = self . worker_manager . receive ( timeout ) if isinstance ( message , Acknowledgement ) : self . task_manager . task_start ( message . task , message . worker ) elif isinstance ( message , Result ) : self . task_manager . task_done ( message . task , message . result )
Processes the next message coming from the workers .
81
10
245,478
def update_tasks ( self ) : for task in self . task_manager . timeout_tasks ( ) : self . task_manager . task_done ( task . id , TimeoutError ( "Task timeout" , task . timeout ) ) self . worker_manager . stop_worker ( task . worker_id ) for task in self . task_manager . cancelled_tasks ( ) : self . task_manager . task_done ( task . id , CancelledError ( ) ) self . worker_manager . stop_worker ( task . worker_id )
Handles timing out Tasks .
121
7
245,479
def update_workers ( self ) : for expiration in self . worker_manager . inspect_workers ( ) : self . handle_worker_expiration ( expiration ) self . worker_manager . create_workers ( )
Handles unexpected processes termination .
45
6
245,480
def task_done ( self , task_id , result ) : try : task = self . tasks . pop ( task_id ) except KeyError : return # result of previously timeout Task else : if task . future . cancelled ( ) : task . set_running_or_notify_cancel ( ) elif isinstance ( result , BaseException ) : task . future . set_exception ( result ) else : task . future . set_result ( result ) self . task_done_callback ( )
Set the tasks result and run the callback .
108
9
245,481
def inspect_workers ( self ) : workers = tuple ( self . workers . values ( ) ) expired = tuple ( w for w in workers if not w . is_alive ( ) ) for worker in expired : self . workers . pop ( worker . pid ) return ( ( w . pid , w . exitcode ) for w in expired if w . exitcode != 0 )
Updates the workers status .
79
6
245,482
def iter_chunks ( chunksize , * iterables ) : iterables = iter ( zip ( * iterables ) ) while 1 : chunk = tuple ( islice ( iterables , chunksize ) ) if not chunk : return yield chunk
Iterates over zipped iterables in chunks .
51
10
245,483
def run_initializer ( initializer , initargs ) : try : initializer ( * initargs ) return True except Exception as error : logging . exception ( error ) return False
Runs the Pool initializer dealing with errors .
37
10
245,484
def join ( self , timeout = None ) : if self . _context . state == RUNNING : raise RuntimeError ( 'The Pool is still running' ) if self . _context . state == CLOSED : self . _wait_queue_depletion ( timeout ) self . stop ( ) self . join ( ) else : self . _context . task_queue . put ( None ) self . _stop_pool ( )
Joins the pool waiting until all workers exited .
90
10
245,485
def thread ( function ) : @ wraps ( function ) def wrapper ( * args , * * kwargs ) : future = Future ( ) launch_thread ( _function_handler , function , args , kwargs , future ) return future return wrapper
Runs the decorated function within a concurrent thread taking care of the result and error management .
52
18
245,486
def _function_handler ( function , args , kwargs , future ) : future . set_running_or_notify_cancel ( ) try : result = function ( * args , * * kwargs ) except BaseException as error : error . traceback = format_exc ( ) future . set_exception ( error ) else : future . set_result ( result )
Runs the actual function in separate thread and returns its result .
82
13
245,487
def create_cities_csv ( filename = "places2k.txt" , output = "cities.csv" ) : with open ( filename , 'r' ) as city_file : with open ( output , 'w' ) as out : for line in city_file : # Drop Puerto Rico (just looking for the 50 states) if line [ 0 : 2 ] == "PR" : continue # Per census.gov, characters 9-72 are the name of the city or place. Cut ,off the last part, which is city, town, etc. # print " ".join(line[9:72].split()[:-1]) out . write ( " " . join ( line [ 9 : 72 ] . split ( ) [ : - 1 ] ) + '\n' )
Takes the places2k . txt from USPS and creates a simple file of all cities .
169
20
245,488
def parse_address ( self , address , line_number = - 1 ) : return Address ( address , self , line_number , self . logger )
Return an Address object from the given address . Passes itself to the Address constructor to use all the custom loaded suffixes cities etc .
32
27
245,489
def load_cities ( self , filename ) : with open ( filename , 'r' ) as f : for line in f : self . cities . append ( line . strip ( ) . lower ( ) )
Load up all cities in lowercase for easier matching . The file should have one city per line with no extra characters . This isn t strictly required but will vastly increase the accuracy .
44
36
245,490
def load_streets ( self , filename ) : with open ( filename , 'r' ) as f : for line in f : self . streets . append ( line . strip ( ) . lower ( ) )
Load up all streets in lowercase for easier matching . The file should have one street per line with no extra characters . This isn t strictly required but will vastly increase the accuracy .
44
36
245,491
def preprocess_address ( self , address ) : # Run some basic cleaning address = address . replace ( "# " , "#" ) address = address . replace ( " & " , "&" ) # Clear the address of things like 'X units', which shouldn't be in an address anyway. We won't save this for now. if re . search ( r"-?-?\w+ units" , address , re . IGNORECASE ) : address = re . sub ( r"-?-?\w+ units" , "" , address , flags = re . IGNORECASE ) # Sometimes buildings are put in parantheses. # building_match = re.search(r"\(.*\)", address, re.IGNORECASE) # if building_match: # self.building = self._clean(building_match.group().replace('(', '').replace(')', '')) # address = re.sub(r"\(.*\)", "", address, flags=re.IGNORECASE) # Now let's get the apartment stuff out of the way. Using only sure match regexes, delete apartment parts from # the address. This prevents things like "Unit" being the street name. apartment_regexes = [ r'#\w+ & \w+' , '#\w+ rm \w+' , "#\w+-\w" , r'apt #{0,1}\w+' , r'apartment #{0,1}\w+' , r'#\w+' , r'# \w+' , r'rm \w+' , r'unit #?\w+' , r'units #?\w+' , r'- #{0,1}\w+' , r'no\s?\d+\w*' , r'style\s\w{1,2}' , r'townhouse style\s\w{1,2}' ] for regex in apartment_regexes : apartment_match = re . search ( regex , address , re . IGNORECASE ) if apartment_match : # print "Matched regex: ", regex, apartment_match.group() self . apartment = self . _clean ( apartment_match . group ( ) ) address = re . sub ( regex , "" , address , flags = re . IGNORECASE ) # Now check for things like ", ," which throw off dstk address = re . sub ( r"\,\s*\," , "," , address ) return address
Takes a basic address and attempts to clean it up extract reasonably assured bits that may throw off the rest of the parsing and return the cleaned address .
551
30
245,492
def check_state ( self , token ) : # print "zip", self.zip if len ( token ) == 2 and self . state is None : if token . capitalize ( ) in self . parser . states . keys ( ) : self . state = self . _clean ( self . parser . states [ token . capitalize ( ) ] ) return True elif token . upper ( ) in self . parser . states . values ( ) : self . state = self . _clean ( token . upper ( ) ) return True if self . state is None and self . street_suffix is None and len ( self . comma_separated_address ) > 1 : if token . capitalize ( ) in self . parser . states . keys ( ) : self . state = self . _clean ( self . parser . states [ token . capitalize ( ) ] ) return True elif token . upper ( ) in self . parser . states . values ( ) : self . state = self . _clean ( token . upper ( ) ) return True return False
Check if state is in either the keys or values of our states list . Must come before the suffix .
216
21
245,493
def check_city ( self , token ) : shortened_cities = { 'saint' : 'st.' } if self . city is None and self . state is not None and self . street_suffix is None : if token . lower ( ) in self . parser . cities : self . city = self . _clean ( token . capitalize ( ) ) return True return False # Check that we're in the correct location, and that we have at least one comma in the address if self . city is None and self . apartment is None and self . street_suffix is None and len ( self . comma_separated_address ) > 1 : if token . lower ( ) in self . parser . cities : self . city = self . _clean ( token . capitalize ( ) ) return True return False # Multi word cities if self . city is not None and self . street_suffix is None and self . street is None : print "Checking for multi part city" , token . lower ( ) , token . lower ( ) in shortened_cities . keys ( ) if token . lower ( ) + ' ' + self . city in self . parser . cities : self . city = self . _clean ( ( token . lower ( ) + ' ' + self . city ) . capitalize ( ) ) return True if token . lower ( ) in shortened_cities . keys ( ) : token = shortened_cities [ token . lower ( ) ] print "Checking for shorted multi part city" , token . lower ( ) + ' ' + self . city if token . lower ( ) + ' ' + self . city . lower ( ) in self . parser . cities : self . city = self . _clean ( token . capitalize ( ) + ' ' + self . city . capitalize ( ) ) return True
Check if there is a known city from our city list . Must come before the suffix .
381
18
245,494
def check_street_suffix ( self , token ) : # Suffix must come before street # print "Suffix check", token, "suffix", self.street_suffix, "street", self.street if self . street_suffix is None and self . street is None : # print "upper", token.upper() if token . upper ( ) in self . parser . suffixes . keys ( ) : suffix = self . parser . suffixes [ token . upper ( ) ] self . street_suffix = self . _clean ( suffix . capitalize ( ) + '.' ) return True elif token . upper ( ) in self . parser . suffixes . values ( ) : self . street_suffix = self . _clean ( token . capitalize ( ) + '.' ) return True return False
Attempts to match a street suffix . If found it will return the abbreviation with the first letter capitalized and a period after it . E . g . St . or Ave .
172
36
245,495
def check_street ( self , token ) : # First check for single word streets between a prefix and a suffix if self . street is None and self . street_suffix is not None and self . street_prefix is None and self . house_number is None : self . street = self . _clean ( token . capitalize ( ) ) return True # Now check for multiple word streets. This check must come after the check for street_prefix and house_number for this reason. elif self . street is not None and self . street_suffix is not None and self . street_prefix is None and self . house_number is None : self . street = self . _clean ( token . capitalize ( ) + ' ' + self . street ) return True if not self . street_suffix and not self . street and token . lower ( ) in self . parser . streets : self . street = self . _clean ( token ) return True return False
Let s assume a street comes before a prefix and after a suffix . This isn t always the case but we ll deal with that in our guessing game . Also two word street names ... well ...
200
39
245,496
def check_street_prefix ( self , token ) : if self . street and not self . street_prefix and token . lower ( ) . replace ( '.' , '' ) in self . parser . prefixes . keys ( ) : self . street_prefix = self . _clean ( self . parser . prefixes [ token . lower ( ) . replace ( '.' , '' ) ] ) return True return False
Finds street prefixes such as N . or Northwest before a street name . Standardizes to 1 or two letters followed by a period .
86
28
245,497
def check_house_number ( self , token ) : if self . street and self . house_number is None and re . match ( street_num_regex , token . lower ( ) ) : if '/' in token : token = token . split ( '/' ) [ 0 ] if '-' in token : token = token . split ( '-' ) [ 0 ] self . house_number = self . _clean ( str ( token ) ) return True return False
Attempts to find a house number generally the first thing in an address . If anything is in front of it we assume it is a building name .
99
29
245,498
def check_building ( self , token ) : if self . street and self . house_number : if not self . building : self . building = self . _clean ( token ) else : self . building = self . _clean ( token + ' ' + self . building ) return True return False
Building name check . If we have leftover and everything else is set probably building names . Allows for multi word building names .
62
24
245,499
def guess_unmatched ( self , token ) : # Check if this is probably an apartment: if token . lower ( ) in [ 'apt' , 'apartment' ] : return False # Stray dashes are likely useless if token . strip ( ) == '-' : return True # Almost definitely not a street if it is one or two characters long. if len ( token ) <= 2 : return False # Let's check for a suffix-less street. if self . street_suffix is None and self . street is None and self . street_prefix is None and self . house_number is None : # Streets will just be letters if re . match ( r"[A-Za-z]" , token ) : if self . line_number >= 0 : pass # print "{0}: Guessing suffix-less street: ".format(self.line_number), token else : # print "Guessing suffix-less street: ", token pass self . street = self . _clean ( token . capitalize ( ) ) return True return False
When we find something that doesn t match we can make an educated guess and log it as such .
218
20