repository_name
stringclasses
316 values
func_path_in_repository
stringlengths
6
223
func_name
stringlengths
1
134
language
stringclasses
1 value
func_code_string
stringlengths
57
65.5k
func_documentation_string
stringlengths
1
46.3k
split_name
stringclasses
1 value
func_code_url
stringlengths
91
315
called_functions
listlengths
1
156
enclosing_scope
stringlengths
2
1.48M
aacanakin/glim
glim/core.py
Facade.register
python
def register(cls, config={}): if cls.accessor is not None: if cls.instance is None: cls.instance = cls.accessor(config)
This function is basically a shortcut of boot for accessors that have only the config dict argument. Args ---- config (dict): the configuration dictionary
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/core.py#L167-L178
null
class Facade(MetaMixin): """ This magical class is basically a singleton implementation without using any kind of singleton :) It's used to register glim framework instances for only once and reach the class without disturbing readability. """ instance = None # accessor is the object which will be registered during runtime accessor = None @classmethod def boot(cls, *args, **kwargs): """ Function creates the instance of accessor with dynamic positional & keyword arguments. Args ---- args (positional arguments): the positional arguments that are passed to the class of accessor. kwargs (keyword arguments): the keyword arguments that are passed to the class of accessor. """ if cls.accessor is not None: if cls.instance is None: cls.instance = cls.accessor(*args, **kwargs) @classmethod @classmethod def _get(cls): """Function returns the instance""" return cls.instance
aacanakin/glim
glim/cli.py
main
python
def main(): # register the global parser preparser = argparse.ArgumentParser(description=description, add_help=False) preparser.add_argument('--env', '-e', dest='env', default='development', help='choose application environment') # parse existing options namespace, extra = preparser.parse_known_args() env = namespace.env # register the subparsers parser = argparse.ArgumentParser(parents=[preparser], description=description, add_help=True) subparsers = parser.add_subparsers(title='commands', help='commands') # initialize a command adapter with subparsers commandadapter = CommandAdapter(subparsers) # register glim commands commandadapter.register(glim.commands) # register app commands appcommands = import_module('app.commands', pass_errors=True) commandadapter.register(appcommands) app = None if paths.app_exists() is False: # check if a new app is being created new = True if 'new' in extra else False if ('help' in extra) or ('--help' in extra) or ('-h' in extra): help = True else: help = False if help: parser.print_help() exit() else: app = make_app(env, commandadapter) args = parser.parse_args() command = commandadapter.match(args) commandadapter.dispatch(command, app)
The single entry point to glim command line interface.Main method is called from pypi console_scripts key or by glim.py on root.This function initializes a new app given the glim commands and app commands if app exists. Usage ----- $ python glim/cli.py start $ python glim.py start (on root folder)
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/cli.py#L35-L96
[ "def import_module(module, pass_errors=False):\n \"\"\"\n Function imports a module given module name\n\n Args\n ----\n module (string): the module name\n pass_errors(boolean): the switch for function\n to skip errors or not.\n\n Returns\n -------\n module (module): the module object.\n\n Raises\n ------\n exception (Exception): any kind of exceptions during importing.\n import_error(ImportError): import errors during importing.\n\n Note:\n pass_errors switch will not pass any errors other than ImportError\n \"\"\"\n frm = module.split('.')\n try:\n m = __import__(module, fromlist=[frm[1]])\n return m\n except ImportError as e:\n if pass_errors:\n return None\n else:\n print(traceback.format_exc())\n return None\n except Exception as e:\n print(traceback.format_exc())\n return None\n", "def app_exists():\n return os.path.exists(APP_PATH)\n", "def make_app(env, commandadapter=None):\n \"\"\"\n Function creates an app given environment\n \"\"\"\n mconfig = import_module('app.config.%s' % env, pass_errors=True)\n if mconfig is None and paths.app_exists():\n print(colored('Configuration for \"%s\" environment is not found' % env, 'red'))\n return None\n mstart = import_module('app.start')\n mroutes = import_module('app.routes')\n mcontrollers = import_module('app.controllers')\n before = mstart.before\n\n return Glim(commandadapter, mconfig, mroutes, mcontrollers, env, before)\n", "def register(self, module):\n \"\"\"\n Function registers into self.commands from module.\n\n Args\n ----\n module (module): The module name.\n \"\"\"\n if module is not None:\n cmds = self.retrieve_commands(module)\n\n for c in cmds:\n if self.valid_name(c.name):\n cmd = c(self.subparsers)\n self.commands.append(cmd)\n else:\n print(colored(\"Warning: Command %s has empty name. It won't be registered\"\n % c, 'yellow'))\n", "def match(self, args):\n \"\"\"\n Function dispatches the active command line utility.\n\n Args\n ----\n args (argparse.parse_args()):\n The parsed arguments using parser.parse_args() function.\n\n Returns\n -------\n command (glim.command.Command): the active command object.\n \"\"\"\n command = None\n for c in self.commands:\n if c.name == args.which:\n c.args = args\n command = c\n break\n return command\n", "def dispatch(self, command, app):\n \"\"\"\n Function runs the active command.\n\n Args\n ----\n command (glim.command.Command): the command object.\n app (glim.app.App): the glim app object.\n\n Note:\n Exception handling should be done in Command class\n itself. If not, an unhandled exception may result\n in app crash!\n \"\"\"\n if self.is_glimcommand(command):\n command.run(app)\n else:\n command.run()\n" ]
#!/usr/bin/env python # _ # | (_) # __ _| |_ _ __ ___ # / _` | | | '_ ` _ \ # | (_| | | | | | | | | # \__, |_|_|_| |_| |_| # __/ | # |___/ # # # A modern python framework for the web __author__ = "Aras Can Akin" from . import paths paths.configure() from termcolor import colored from glim.app import Glim from glim.utils import import_module from glim.command import CommandAdapter import glim.commands import traceback import argparse import os import sys description = "glim ~ a modern python framework for the web" def make_app(env, commandadapter=None): """ Function creates an app given environment """ mconfig = import_module('app.config.%s' % env, pass_errors=True) if mconfig is None and paths.app_exists(): print(colored('Configuration for "%s" environment is not found' % env, 'red')) return None mstart = import_module('app.start') mroutes = import_module('app.routes') mcontrollers = import_module('app.controllers') before = mstart.before return Glim(commandadapter, mconfig, mroutes, mcontrollers, env, before) if __name__ == '__main__': main()
aacanakin/glim
glim/cli.py
make_app
python
def make_app(env, commandadapter=None): mconfig = import_module('app.config.%s' % env, pass_errors=True) if mconfig is None and paths.app_exists(): print(colored('Configuration for "%s" environment is not found' % env, 'red')) return None mstart = import_module('app.start') mroutes = import_module('app.routes') mcontrollers = import_module('app.controllers') before = mstart.before return Glim(commandadapter, mconfig, mroutes, mcontrollers, env, before)
Function creates an app given environment
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/cli.py#L98-L111
[ "def import_module(module, pass_errors=False):\n \"\"\"\n Function imports a module given module name\n\n Args\n ----\n module (string): the module name\n pass_errors(boolean): the switch for function\n to skip errors or not.\n\n Returns\n -------\n module (module): the module object.\n\n Raises\n ------\n exception (Exception): any kind of exceptions during importing.\n import_error(ImportError): import errors during importing.\n\n Note:\n pass_errors switch will not pass any errors other than ImportError\n \"\"\"\n frm = module.split('.')\n try:\n m = __import__(module, fromlist=[frm[1]])\n return m\n except ImportError as e:\n if pass_errors:\n return None\n else:\n print(traceback.format_exc())\n return None\n except Exception as e:\n print(traceback.format_exc())\n return None\n", "def app_exists():\n return os.path.exists(APP_PATH)\n" ]
#!/usr/bin/env python # _ # | (_) # __ _| |_ _ __ ___ # / _` | | | '_ ` _ \ # | (_| | | | | | | | | # \__, |_|_|_| |_| |_| # __/ | # |___/ # # # A modern python framework for the web __author__ = "Aras Can Akin" from . import paths paths.configure() from termcolor import colored from glim.app import Glim from glim.utils import import_module from glim.command import CommandAdapter import glim.commands import traceback import argparse import os import sys description = "glim ~ a modern python framework for the web" def main(): """ The single entry point to glim command line interface.Main method is called from pypi console_scripts key or by glim.py on root.This function initializes a new app given the glim commands and app commands if app exists. Usage ----- $ python glim/cli.py start $ python glim.py start (on root folder) """ # register the global parser preparser = argparse.ArgumentParser(description=description, add_help=False) preparser.add_argument('--env', '-e', dest='env', default='development', help='choose application environment') # parse existing options namespace, extra = preparser.parse_known_args() env = namespace.env # register the subparsers parser = argparse.ArgumentParser(parents=[preparser], description=description, add_help=True) subparsers = parser.add_subparsers(title='commands', help='commands') # initialize a command adapter with subparsers commandadapter = CommandAdapter(subparsers) # register glim commands commandadapter.register(glim.commands) # register app commands appcommands = import_module('app.commands', pass_errors=True) commandadapter.register(appcommands) app = None if paths.app_exists() is False: # check if a new app is being created new = True if 'new' in extra else False if ('help' in extra) or ('--help' in extra) or ('-h' in extra): help = True else: help = False if help: parser.print_help() exit() else: app = make_app(env, commandadapter) args = parser.parse_args() command = commandadapter.match(args) commandadapter.dispatch(command, app) if __name__ == '__main__': main()
aacanakin/glim
glim/app.py
Glim.register_routes
python
def register_routes(self): routes = self.flatten_urls(self.urls) self.controllers = {} controller_names = set() for route in routes: cname = route['endpoint'].split('.')[0] controller_names.add(cname) for cname in controller_names: attr = getattr(self.mcontrollers, cname) instance = attr(request, response) self.controllers[cname] = instance for route in routes: cname, aname = route['endpoint'].split('.') action = getattr(self.controllers[cname], aname) self.wsgi.route(route['url'], route['methods'], action)
Function creates instances of controllers, adds into bottle routes
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/app.py#L62-L82
[ "def flatten_urls(self, urls):\n \"\"\"\n Function flatten urls for route grouping feature of glim.\n\n Args\n ----\n urls (dict): a dict of url definitions.\n current_key (unknown type): a dict or a string marking the\n current key that is used for recursive calls.\n ruleset (dict): the ruleset that is eventually returned to\n dispatcher.\n\n Returns\n -------\n ruleset (list): a list of ruleset dict with endpoint, url and method functions\n \"\"\"\n available_methods = ['POST', 'PUT', 'OPTIONS', 'GET', 'DELETE', 'TRACE', 'COPY']\n ruleset = []\n for route, endpoint in urls.items():\n route_pieces = route.split(' ')\n try:\n methods = url = None\n if len(route_pieces) > 1:\n methods = [route_pieces[0]]\n url = route_pieces[1]\n else:\n methods = available_methods\n url = route_pieces[0]\n\n endpoint_pieces = endpoint.split('.')\n\n if len(endpoint_pieces) > 1:\n rule = {'url': url, 'endpoint': endpoint, 'methods': methods}\n ruleset.append(rule)\n else:\n for method in available_methods:\n rule = {\n 'url': url,\n 'endpoint': '%s.%s' % (endpoint, method.lower()),\n 'methods': [method]\n }\n ruleset.append(rule)\n except Exception as e:\n raise InvalidRouteDefinitionError()\n return ruleset\n" ]
class Glim(object): """ This class is responsible for registering the components of a typical glim framework app Attributes ---------- commandadapter (glim.command.CommandAdapter): The commandadapter object which is responsible for dispatching commands env (string): application environment variable passed from command line mconfig (module): The configuration module imported from app.config.<env> config (dict): The configuration dictionary by environment which resides in app.config.<env> before (method): The before hook function for registering a function before app starts """ def __init__(self, commandadapter, mconfig=None, mroutes=None, mcontrollers=None, env='default', before=None): # register app self.commandadapter = commandadapter self.config = mconfig.config self.urls = mroutes.urls self.mcontrollers = mcontrollers; self.wsgi = Bottle() self.register_config() self.register_log() self.register_extensions() self.register_ssl_context() self.register_routes() self.before = before self.before() def register_config(self): """ Function registers the Config facade using Config(Registry). """ Config.register(self.config) def register_extensions(self): """ Function registers extensions given extensions list Args ---- extensions (list) : the extensions dict on app.config.<env> Raises ------ Exception: Raises exception when extension can't be loaded properly. """ try: for extension, config in self.config['extensions'].items(): extension_bstr = '' # gather package name if exists extension_pieces = extension.split('.') # if the extensions is not in glim_extensions package if len(extension_pieces) > 1: extension_bstr = '.'.join(extension_pieces) else: # if the extension is in glim_extensions package extension_bstr = 'glim_extensions.%s' % extension_pieces[0] extension_module = import_module(extension_bstr) if extension_module: extension_startstr = '%s.%s' % (extension_bstr, 'start') extension_start = import_module(extension_startstr, pass_errors=True) extension_cmdsstr = '%s.%s' % (extension_bstr, 'commands') extension_cmds = import_module(extension_cmdsstr, pass_errors=True) if extension_start is not None: before = extension_start.before before(config) if extension_cmds is not None: if self.commandadapter is not None: self.commandadapter.register_extension(extension_cmds, extension_pieces[0]) else: GlimLog.error('Extension %s could not be loaded' % extension) except Exception as e: GlimLog.error(traceback.format_exc()) def register_log(self): """ Function registers Log facade using configuration in app.config.<env>. Note: The Log facade will be registered using default configuration if there isn't any 'log' key in app.config.<env>. """ if not empty('log', self.config): if not empty('glim', self.config['log']): GlimLog.boot(name='glim', config=self.config['log']['glim']) else: GlimLog.boot(name='glim') if not empty('app', self.config['log']): Log.boot(name='app', config=self.config['log']['app']) else: Log.boot(name='app') else: Log.boot(name='app') GlimLog.boot(name='glim') def register_ssl_context(self): """ Function detects ssl context """ if not empty('ssl', self.config['app']): self.ssl_context = self.config['app']['ssl'] else: self.ssl_context = None def flatten_urls(self, urls): """ Function flatten urls for route grouping feature of glim. Args ---- urls (dict): a dict of url definitions. current_key (unknown type): a dict or a string marking the current key that is used for recursive calls. ruleset (dict): the ruleset that is eventually returned to dispatcher. Returns ------- ruleset (list): a list of ruleset dict with endpoint, url and method functions """ available_methods = ['POST', 'PUT', 'OPTIONS', 'GET', 'DELETE', 'TRACE', 'COPY'] ruleset = [] for route, endpoint in urls.items(): route_pieces = route.split(' ') try: methods = url = None if len(route_pieces) > 1: methods = [route_pieces[0]] url = route_pieces[1] else: methods = available_methods url = route_pieces[0] endpoint_pieces = endpoint.split('.') if len(endpoint_pieces) > 1: rule = {'url': url, 'endpoint': endpoint, 'methods': methods} ruleset.append(rule) else: for method in available_methods: rule = { 'url': url, 'endpoint': '%s.%s' % (endpoint, method.lower()), 'methods': [method] } ruleset.append(rule) except Exception as e: raise InvalidRouteDefinitionError() return ruleset
aacanakin/glim
glim/app.py
Glim.register_extensions
python
def register_extensions(self): try: for extension, config in self.config['extensions'].items(): extension_bstr = '' # gather package name if exists extension_pieces = extension.split('.') # if the extensions is not in glim_extensions package if len(extension_pieces) > 1: extension_bstr = '.'.join(extension_pieces) else: # if the extension is in glim_extensions package extension_bstr = 'glim_extensions.%s' % extension_pieces[0] extension_module = import_module(extension_bstr) if extension_module: extension_startstr = '%s.%s' % (extension_bstr, 'start') extension_start = import_module(extension_startstr, pass_errors=True) extension_cmdsstr = '%s.%s' % (extension_bstr, 'commands') extension_cmds = import_module(extension_cmdsstr, pass_errors=True) if extension_start is not None: before = extension_start.before before(config) if extension_cmds is not None: if self.commandadapter is not None: self.commandadapter.register_extension(extension_cmds, extension_pieces[0]) else: GlimLog.error('Extension %s could not be loaded' % extension) except Exception as e: GlimLog.error(traceback.format_exc())
Function registers extensions given extensions list Args ---- extensions (list) : the extensions dict on app.config.<env> Raises ------ Exception: Raises exception when extension can't be loaded properly.
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/app.py#L84-L131
[ "def import_module(module, pass_errors=False):\n \"\"\"\n Function imports a module given module name\n\n Args\n ----\n module (string): the module name\n pass_errors(boolean): the switch for function\n to skip errors or not.\n\n Returns\n -------\n module (module): the module object.\n\n Raises\n ------\n exception (Exception): any kind of exceptions during importing.\n import_error(ImportError): import errors during importing.\n\n Note:\n pass_errors switch will not pass any errors other than ImportError\n \"\"\"\n frm = module.split('.')\n try:\n m = __import__(module, fromlist=[frm[1]])\n return m\n except ImportError as e:\n if pass_errors:\n return None\n else:\n print(traceback.format_exc())\n return None\n except Exception as e:\n print(traceback.format_exc())\n return None\n" ]
class Glim(object): """ This class is responsible for registering the components of a typical glim framework app Attributes ---------- commandadapter (glim.command.CommandAdapter): The commandadapter object which is responsible for dispatching commands env (string): application environment variable passed from command line mconfig (module): The configuration module imported from app.config.<env> config (dict): The configuration dictionary by environment which resides in app.config.<env> before (method): The before hook function for registering a function before app starts """ def __init__(self, commandadapter, mconfig=None, mroutes=None, mcontrollers=None, env='default', before=None): # register app self.commandadapter = commandadapter self.config = mconfig.config self.urls = mroutes.urls self.mcontrollers = mcontrollers; self.wsgi = Bottle() self.register_config() self.register_log() self.register_extensions() self.register_ssl_context() self.register_routes() self.before = before self.before() def register_config(self): """ Function registers the Config facade using Config(Registry). """ Config.register(self.config) def register_routes(self): """ Function creates instances of controllers, adds into bottle routes """ routes = self.flatten_urls(self.urls) self.controllers = {} controller_names = set() for route in routes: cname = route['endpoint'].split('.')[0] controller_names.add(cname) for cname in controller_names: attr = getattr(self.mcontrollers, cname) instance = attr(request, response) self.controllers[cname] = instance for route in routes: cname, aname = route['endpoint'].split('.') action = getattr(self.controllers[cname], aname) self.wsgi.route(route['url'], route['methods'], action) def register_log(self): """ Function registers Log facade using configuration in app.config.<env>. Note: The Log facade will be registered using default configuration if there isn't any 'log' key in app.config.<env>. """ if not empty('log', self.config): if not empty('glim', self.config['log']): GlimLog.boot(name='glim', config=self.config['log']['glim']) else: GlimLog.boot(name='glim') if not empty('app', self.config['log']): Log.boot(name='app', config=self.config['log']['app']) else: Log.boot(name='app') else: Log.boot(name='app') GlimLog.boot(name='glim') def register_ssl_context(self): """ Function detects ssl context """ if not empty('ssl', self.config['app']): self.ssl_context = self.config['app']['ssl'] else: self.ssl_context = None def flatten_urls(self, urls): """ Function flatten urls for route grouping feature of glim. Args ---- urls (dict): a dict of url definitions. current_key (unknown type): a dict or a string marking the current key that is used for recursive calls. ruleset (dict): the ruleset that is eventually returned to dispatcher. Returns ------- ruleset (list): a list of ruleset dict with endpoint, url and method functions """ available_methods = ['POST', 'PUT', 'OPTIONS', 'GET', 'DELETE', 'TRACE', 'COPY'] ruleset = [] for route, endpoint in urls.items(): route_pieces = route.split(' ') try: methods = url = None if len(route_pieces) > 1: methods = [route_pieces[0]] url = route_pieces[1] else: methods = available_methods url = route_pieces[0] endpoint_pieces = endpoint.split('.') if len(endpoint_pieces) > 1: rule = {'url': url, 'endpoint': endpoint, 'methods': methods} ruleset.append(rule) else: for method in available_methods: rule = { 'url': url, 'endpoint': '%s.%s' % (endpoint, method.lower()), 'methods': [method] } ruleset.append(rule) except Exception as e: raise InvalidRouteDefinitionError() return ruleset
aacanakin/glim
glim/app.py
Glim.register_log
python
def register_log(self): if not empty('log', self.config): if not empty('glim', self.config['log']): GlimLog.boot(name='glim', config=self.config['log']['glim']) else: GlimLog.boot(name='glim') if not empty('app', self.config['log']): Log.boot(name='app', config=self.config['log']['app']) else: Log.boot(name='app') else: Log.boot(name='app') GlimLog.boot(name='glim')
Function registers Log facade using configuration in app.config.<env>. Note: The Log facade will be registered using default configuration if there isn't any 'log' key in app.config.<env>.
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/app.py#L133-L154
[ "def empty(key, dict):\n \"\"\"\n Function determines if the dict key exists or it is empty\n\n Args\n ----\n key (string): the dict key\n dict (dict): the dict to be searched\n \"\"\"\n if key in dict.keys():\n if dict[key]:\n return False\n return True\n" ]
class Glim(object): """ This class is responsible for registering the components of a typical glim framework app Attributes ---------- commandadapter (glim.command.CommandAdapter): The commandadapter object which is responsible for dispatching commands env (string): application environment variable passed from command line mconfig (module): The configuration module imported from app.config.<env> config (dict): The configuration dictionary by environment which resides in app.config.<env> before (method): The before hook function for registering a function before app starts """ def __init__(self, commandadapter, mconfig=None, mroutes=None, mcontrollers=None, env='default', before=None): # register app self.commandadapter = commandadapter self.config = mconfig.config self.urls = mroutes.urls self.mcontrollers = mcontrollers; self.wsgi = Bottle() self.register_config() self.register_log() self.register_extensions() self.register_ssl_context() self.register_routes() self.before = before self.before() def register_config(self): """ Function registers the Config facade using Config(Registry). """ Config.register(self.config) def register_routes(self): """ Function creates instances of controllers, adds into bottle routes """ routes = self.flatten_urls(self.urls) self.controllers = {} controller_names = set() for route in routes: cname = route['endpoint'].split('.')[0] controller_names.add(cname) for cname in controller_names: attr = getattr(self.mcontrollers, cname) instance = attr(request, response) self.controllers[cname] = instance for route in routes: cname, aname = route['endpoint'].split('.') action = getattr(self.controllers[cname], aname) self.wsgi.route(route['url'], route['methods'], action) def register_extensions(self): """ Function registers extensions given extensions list Args ---- extensions (list) : the extensions dict on app.config.<env> Raises ------ Exception: Raises exception when extension can't be loaded properly. """ try: for extension, config in self.config['extensions'].items(): extension_bstr = '' # gather package name if exists extension_pieces = extension.split('.') # if the extensions is not in glim_extensions package if len(extension_pieces) > 1: extension_bstr = '.'.join(extension_pieces) else: # if the extension is in glim_extensions package extension_bstr = 'glim_extensions.%s' % extension_pieces[0] extension_module = import_module(extension_bstr) if extension_module: extension_startstr = '%s.%s' % (extension_bstr, 'start') extension_start = import_module(extension_startstr, pass_errors=True) extension_cmdsstr = '%s.%s' % (extension_bstr, 'commands') extension_cmds = import_module(extension_cmdsstr, pass_errors=True) if extension_start is not None: before = extension_start.before before(config) if extension_cmds is not None: if self.commandadapter is not None: self.commandadapter.register_extension(extension_cmds, extension_pieces[0]) else: GlimLog.error('Extension %s could not be loaded' % extension) except Exception as e: GlimLog.error(traceback.format_exc()) def register_ssl_context(self): """ Function detects ssl context """ if not empty('ssl', self.config['app']): self.ssl_context = self.config['app']['ssl'] else: self.ssl_context = None def flatten_urls(self, urls): """ Function flatten urls for route grouping feature of glim. Args ---- urls (dict): a dict of url definitions. current_key (unknown type): a dict or a string marking the current key that is used for recursive calls. ruleset (dict): the ruleset that is eventually returned to dispatcher. Returns ------- ruleset (list): a list of ruleset dict with endpoint, url and method functions """ available_methods = ['POST', 'PUT', 'OPTIONS', 'GET', 'DELETE', 'TRACE', 'COPY'] ruleset = [] for route, endpoint in urls.items(): route_pieces = route.split(' ') try: methods = url = None if len(route_pieces) > 1: methods = [route_pieces[0]] url = route_pieces[1] else: methods = available_methods url = route_pieces[0] endpoint_pieces = endpoint.split('.') if len(endpoint_pieces) > 1: rule = {'url': url, 'endpoint': endpoint, 'methods': methods} ruleset.append(rule) else: for method in available_methods: rule = { 'url': url, 'endpoint': '%s.%s' % (endpoint, method.lower()), 'methods': [method] } ruleset.append(rule) except Exception as e: raise InvalidRouteDefinitionError() return ruleset
aacanakin/glim
glim/app.py
Glim.register_ssl_context
python
def register_ssl_context(self): if not empty('ssl', self.config['app']): self.ssl_context = self.config['app']['ssl'] else: self.ssl_context = None
Function detects ssl context
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/app.py#L156-L163
[ "def empty(key, dict):\n \"\"\"\n Function determines if the dict key exists or it is empty\n\n Args\n ----\n key (string): the dict key\n dict (dict): the dict to be searched\n \"\"\"\n if key in dict.keys():\n if dict[key]:\n return False\n return True\n" ]
class Glim(object): """ This class is responsible for registering the components of a typical glim framework app Attributes ---------- commandadapter (glim.command.CommandAdapter): The commandadapter object which is responsible for dispatching commands env (string): application environment variable passed from command line mconfig (module): The configuration module imported from app.config.<env> config (dict): The configuration dictionary by environment which resides in app.config.<env> before (method): The before hook function for registering a function before app starts """ def __init__(self, commandadapter, mconfig=None, mroutes=None, mcontrollers=None, env='default', before=None): # register app self.commandadapter = commandadapter self.config = mconfig.config self.urls = mroutes.urls self.mcontrollers = mcontrollers; self.wsgi = Bottle() self.register_config() self.register_log() self.register_extensions() self.register_ssl_context() self.register_routes() self.before = before self.before() def register_config(self): """ Function registers the Config facade using Config(Registry). """ Config.register(self.config) def register_routes(self): """ Function creates instances of controllers, adds into bottle routes """ routes = self.flatten_urls(self.urls) self.controllers = {} controller_names = set() for route in routes: cname = route['endpoint'].split('.')[0] controller_names.add(cname) for cname in controller_names: attr = getattr(self.mcontrollers, cname) instance = attr(request, response) self.controllers[cname] = instance for route in routes: cname, aname = route['endpoint'].split('.') action = getattr(self.controllers[cname], aname) self.wsgi.route(route['url'], route['methods'], action) def register_extensions(self): """ Function registers extensions given extensions list Args ---- extensions (list) : the extensions dict on app.config.<env> Raises ------ Exception: Raises exception when extension can't be loaded properly. """ try: for extension, config in self.config['extensions'].items(): extension_bstr = '' # gather package name if exists extension_pieces = extension.split('.') # if the extensions is not in glim_extensions package if len(extension_pieces) > 1: extension_bstr = '.'.join(extension_pieces) else: # if the extension is in glim_extensions package extension_bstr = 'glim_extensions.%s' % extension_pieces[0] extension_module = import_module(extension_bstr) if extension_module: extension_startstr = '%s.%s' % (extension_bstr, 'start') extension_start = import_module(extension_startstr, pass_errors=True) extension_cmdsstr = '%s.%s' % (extension_bstr, 'commands') extension_cmds = import_module(extension_cmdsstr, pass_errors=True) if extension_start is not None: before = extension_start.before before(config) if extension_cmds is not None: if self.commandadapter is not None: self.commandadapter.register_extension(extension_cmds, extension_pieces[0]) else: GlimLog.error('Extension %s could not be loaded' % extension) except Exception as e: GlimLog.error(traceback.format_exc()) def register_log(self): """ Function registers Log facade using configuration in app.config.<env>. Note: The Log facade will be registered using default configuration if there isn't any 'log' key in app.config.<env>. """ if not empty('log', self.config): if not empty('glim', self.config['log']): GlimLog.boot(name='glim', config=self.config['log']['glim']) else: GlimLog.boot(name='glim') if not empty('app', self.config['log']): Log.boot(name='app', config=self.config['log']['app']) else: Log.boot(name='app') else: Log.boot(name='app') GlimLog.boot(name='glim') def flatten_urls(self, urls): """ Function flatten urls for route grouping feature of glim. Args ---- urls (dict): a dict of url definitions. current_key (unknown type): a dict or a string marking the current key that is used for recursive calls. ruleset (dict): the ruleset that is eventually returned to dispatcher. Returns ------- ruleset (list): a list of ruleset dict with endpoint, url and method functions """ available_methods = ['POST', 'PUT', 'OPTIONS', 'GET', 'DELETE', 'TRACE', 'COPY'] ruleset = [] for route, endpoint in urls.items(): route_pieces = route.split(' ') try: methods = url = None if len(route_pieces) > 1: methods = [route_pieces[0]] url = route_pieces[1] else: methods = available_methods url = route_pieces[0] endpoint_pieces = endpoint.split('.') if len(endpoint_pieces) > 1: rule = {'url': url, 'endpoint': endpoint, 'methods': methods} ruleset.append(rule) else: for method in available_methods: rule = { 'url': url, 'endpoint': '%s.%s' % (endpoint, method.lower()), 'methods': [method] } ruleset.append(rule) except Exception as e: raise InvalidRouteDefinitionError() return ruleset
aacanakin/glim
glim/app.py
Glim.flatten_urls
python
def flatten_urls(self, urls): available_methods = ['POST', 'PUT', 'OPTIONS', 'GET', 'DELETE', 'TRACE', 'COPY'] ruleset = [] for route, endpoint in urls.items(): route_pieces = route.split(' ') try: methods = url = None if len(route_pieces) > 1: methods = [route_pieces[0]] url = route_pieces[1] else: methods = available_methods url = route_pieces[0] endpoint_pieces = endpoint.split('.') if len(endpoint_pieces) > 1: rule = {'url': url, 'endpoint': endpoint, 'methods': methods} ruleset.append(rule) else: for method in available_methods: rule = { 'url': url, 'endpoint': '%s.%s' % (endpoint, method.lower()), 'methods': [method] } ruleset.append(rule) except Exception as e: raise InvalidRouteDefinitionError() return ruleset
Function flatten urls for route grouping feature of glim. Args ---- urls (dict): a dict of url definitions. current_key (unknown type): a dict or a string marking the current key that is used for recursive calls. ruleset (dict): the ruleset that is eventually returned to dispatcher. Returns ------- ruleset (list): a list of ruleset dict with endpoint, url and method functions
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/app.py#L165-L209
null
class Glim(object): """ This class is responsible for registering the components of a typical glim framework app Attributes ---------- commandadapter (glim.command.CommandAdapter): The commandadapter object which is responsible for dispatching commands env (string): application environment variable passed from command line mconfig (module): The configuration module imported from app.config.<env> config (dict): The configuration dictionary by environment which resides in app.config.<env> before (method): The before hook function for registering a function before app starts """ def __init__(self, commandadapter, mconfig=None, mroutes=None, mcontrollers=None, env='default', before=None): # register app self.commandadapter = commandadapter self.config = mconfig.config self.urls = mroutes.urls self.mcontrollers = mcontrollers; self.wsgi = Bottle() self.register_config() self.register_log() self.register_extensions() self.register_ssl_context() self.register_routes() self.before = before self.before() def register_config(self): """ Function registers the Config facade using Config(Registry). """ Config.register(self.config) def register_routes(self): """ Function creates instances of controllers, adds into bottle routes """ routes = self.flatten_urls(self.urls) self.controllers = {} controller_names = set() for route in routes: cname = route['endpoint'].split('.')[0] controller_names.add(cname) for cname in controller_names: attr = getattr(self.mcontrollers, cname) instance = attr(request, response) self.controllers[cname] = instance for route in routes: cname, aname = route['endpoint'].split('.') action = getattr(self.controllers[cname], aname) self.wsgi.route(route['url'], route['methods'], action) def register_extensions(self): """ Function registers extensions given extensions list Args ---- extensions (list) : the extensions dict on app.config.<env> Raises ------ Exception: Raises exception when extension can't be loaded properly. """ try: for extension, config in self.config['extensions'].items(): extension_bstr = '' # gather package name if exists extension_pieces = extension.split('.') # if the extensions is not in glim_extensions package if len(extension_pieces) > 1: extension_bstr = '.'.join(extension_pieces) else: # if the extension is in glim_extensions package extension_bstr = 'glim_extensions.%s' % extension_pieces[0] extension_module = import_module(extension_bstr) if extension_module: extension_startstr = '%s.%s' % (extension_bstr, 'start') extension_start = import_module(extension_startstr, pass_errors=True) extension_cmdsstr = '%s.%s' % (extension_bstr, 'commands') extension_cmds = import_module(extension_cmdsstr, pass_errors=True) if extension_start is not None: before = extension_start.before before(config) if extension_cmds is not None: if self.commandadapter is not None: self.commandadapter.register_extension(extension_cmds, extension_pieces[0]) else: GlimLog.error('Extension %s could not be loaded' % extension) except Exception as e: GlimLog.error(traceback.format_exc()) def register_log(self): """ Function registers Log facade using configuration in app.config.<env>. Note: The Log facade will be registered using default configuration if there isn't any 'log' key in app.config.<env>. """ if not empty('log', self.config): if not empty('glim', self.config['log']): GlimLog.boot(name='glim', config=self.config['log']['glim']) else: GlimLog.boot(name='glim') if not empty('app', self.config['log']): Log.boot(name='app', config=self.config['log']['app']) else: Log.boot(name='app') else: Log.boot(name='app') GlimLog.boot(name='glim') def register_ssl_context(self): """ Function detects ssl context """ if not empty('ssl', self.config['app']): self.ssl_context = self.config['app']['ssl'] else: self.ssl_context = None
aacanakin/glim
glim/commands.py
NewCommand.run
python
def run(self, app): project_path = os.getcwd() if self.args.name is not None: project_path = os.path.join(project_path, self.args.name) proto_path = paths.PROTO_PATH try: copytree(proto_path, project_path) print(colored('A new glim app created successfully! Happy coding :)', 'green')) except FolderExistsError as e: print(e) print(colored('App already exists', 'red'))
Function copies the prototype folder into os.getcwd() path.
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/commands.py#L39-L52
[ "def copytree(src, dst, symlinks=False, ignore=None):\n \"\"\"\n Function recursively copies from directory to directory.\n\n Args\n ----\n src (string): the full path of source directory\n dst (string): the full path of destination directory\n symlinks (boolean): the switch for tracking symlinks\n ignore (list): the ignore list\n \"\"\"\n if not os.path.exists(dst):\n os.mkdir(dst)\n try:\n for item in os.listdir(src):\n s = os.path.join(src, item)\n d = os.path.join(dst, item)\n if os.path.isdir(s):\n shutil.copytree(s, d, symlinks, ignore)\n else:\n shutil.copy2(s, d)\n except Exception as e:\n raise FolderExistsError(\"Folder already exists in %s\" % dst)\n" ]
class NewCommand(GlimCommand): """ This class is responsible for generating a new glim app. Attributes ---------- glim.command.GlimCommand Attributes """ name = 'new' description = 'generates a new glim app' def configure(self): """ Function adds the optional name argument for creating an app with project name. """ self.add_argument("name", nargs='?', help="enter project name", default=None)
aacanakin/glim
glim/commands.py
StartCommand.run
python
def run(self, app): GlimLog.info('Glim server started on %s environment' % self.args.env) try: kwargs = Config.get('app.server.options') run(app.wsgi, host=Config.get('app.server.host'), port=Config.get('app.server.port'), debug=Config.get('app.server.debugger'), reloader=Config.get('app.server.reloader'), server=Config.get('app.server.wsgi'), **kwargs) except Exception as e: print(traceback.format_exc()) exit()
Function starts the web server given configuration.
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/commands.py#L66-L80
null
class StartCommand(GlimCommand): """ This class is responsible for starting wsgi of glim framework app. Attributes ---------- glim.command.GlimCommand Attributes """ name = 'start' description = 'start the glim app web server'
aacanakin/glim
glim/command.py
CommandAdapter.retrieve_commands
python
def retrieve_commands(self, module): commands = [] for name, obj in inspect.getmembers(module): if name != 'Command' and 'Command' in name: if name != 'GlimCommand': cobject = getattr(module, name) commands.append(cobject) return commands
Function smartly imports Command type classes given module Args ---- module (module): The module which Command classes will be extracted from Returns ------- commands (list): A list of Command instances Note: This function will not register any command class named "Command" or "GlimCommand". When extending Command class, be sure to have "Command" string on your custom commands.
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/command.py#L33-L62
null
class CommandAdapter(object): """ This class is responsible for detecting, registering and dispatching command line utilities of glim framework. In glim, there are two types of commands namely Command and GlimCommand. The only difference of them is GlimCommand can access to app object in the runtime. The class is used for appending app.commands and glim.commands together. It is also used for extension command registering. Attributes ---------- subparsers (argparse.Subparsers): The subparsers object that provides sub commands. commands (list): A list of Command objects """ def __init__(self, subparsers): self.subparsers = subparsers self.commands = [] def valid_name(self, name): """ Function returns if command name is valid or not. Args ---- name (string): The command line utility name. Returns ------- valid (boolean): Returns true when valid, else false. """ invalid = name is None or name == '' return not invalid def register(self, module): """ Function registers into self.commands from module. Args ---- module (module): The module name. """ if module is not None: cmds = self.retrieve_commands(module) for c in cmds: if self.valid_name(c.name): cmd = c(self.subparsers) self.commands.append(cmd) else: print(colored("Warning: Command %s has empty name. It won't be registered" % c, 'yellow')) def register_extension(self, module, extension): """ Function registers into self.commands from module extension. All extension subcommands are registered using the name convention 'extension:command' Example: If you have a redis extension namely 'gredis', the extension commands can be accessed by the following; $ python glim.py gredis:ping Args ---- module (module): The module name. extension (string): The extension name. """ if module is not None: cmds = self.retrieve_commands(module) commands = [] for c in cmds: if self.valid_name(c.name): name = '%s:%s' % (extension, c.name) cmd = c(self.subparsers, name) self.commands.append(cmd) def match(self, args): """ Function dispatches the active command line utility. Args ---- args (argparse.parse_args()): The parsed arguments using parser.parse_args() function. Returns ------- command (glim.command.Command): the active command object. """ command = None for c in self.commands: if c.name == args.which: c.args = args command = c break return command def is_glimcommand(self, command): """ Function detects if a command is GlimCommand. Args ---- command (glim.command.Command): the command object. Returns ------- True or False """ return isinstance(command, GlimCommand) def dispatch(self, command, app): """ Function runs the active command. Args ---- command (glim.command.Command): the command object. app (glim.app.App): the glim app object. Note: Exception handling should be done in Command class itself. If not, an unhandled exception may result in app crash! """ if self.is_glimcommand(command): command.run(app) else: command.run()
aacanakin/glim
glim/command.py
CommandAdapter.register
python
def register(self, module): if module is not None: cmds = self.retrieve_commands(module) for c in cmds: if self.valid_name(c.name): cmd = c(self.subparsers) self.commands.append(cmd) else: print(colored("Warning: Command %s has empty name. It won't be registered" % c, 'yellow'))
Function registers into self.commands from module. Args ---- module (module): The module name.
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/command.py#L79-L96
[ "def retrieve_commands(self, module):\n \"\"\"\n Function smartly imports Command type classes given module\n\n Args\n ----\n module (module):\n The module which Command classes will be extracted from\n\n Returns\n -------\n commands (list):\n A list of Command instances\n\n Note:\n This function will not register any command class\n named \"Command\" or \"GlimCommand\".\n\n When extending Command class, be sure to have \"Command\"\n string on your custom commands.\n \"\"\"\n commands = []\n\n for name, obj in inspect.getmembers(module):\n if name != 'Command' and 'Command' in name:\n if name != 'GlimCommand':\n cobject = getattr(module, name)\n commands.append(cobject)\n\n return commands\n", "def valid_name(self, name):\n \"\"\"\n Function returns if command name is valid or not.\n\n Args\n ----\n name (string): The command line utility name.\n\n Returns\n -------\n valid (boolean): Returns true when valid, else false.\n \"\"\"\n invalid = name is None or name == ''\n return not invalid\n" ]
class CommandAdapter(object): """ This class is responsible for detecting, registering and dispatching command line utilities of glim framework. In glim, there are two types of commands namely Command and GlimCommand. The only difference of them is GlimCommand can access to app object in the runtime. The class is used for appending app.commands and glim.commands together. It is also used for extension command registering. Attributes ---------- subparsers (argparse.Subparsers): The subparsers object that provides sub commands. commands (list): A list of Command objects """ def __init__(self, subparsers): self.subparsers = subparsers self.commands = [] def retrieve_commands(self, module): """ Function smartly imports Command type classes given module Args ---- module (module): The module which Command classes will be extracted from Returns ------- commands (list): A list of Command instances Note: This function will not register any command class named "Command" or "GlimCommand". When extending Command class, be sure to have "Command" string on your custom commands. """ commands = [] for name, obj in inspect.getmembers(module): if name != 'Command' and 'Command' in name: if name != 'GlimCommand': cobject = getattr(module, name) commands.append(cobject) return commands def valid_name(self, name): """ Function returns if command name is valid or not. Args ---- name (string): The command line utility name. Returns ------- valid (boolean): Returns true when valid, else false. """ invalid = name is None or name == '' return not invalid def register_extension(self, module, extension): """ Function registers into self.commands from module extension. All extension subcommands are registered using the name convention 'extension:command' Example: If you have a redis extension namely 'gredis', the extension commands can be accessed by the following; $ python glim.py gredis:ping Args ---- module (module): The module name. extension (string): The extension name. """ if module is not None: cmds = self.retrieve_commands(module) commands = [] for c in cmds: if self.valid_name(c.name): name = '%s:%s' % (extension, c.name) cmd = c(self.subparsers, name) self.commands.append(cmd) def match(self, args): """ Function dispatches the active command line utility. Args ---- args (argparse.parse_args()): The parsed arguments using parser.parse_args() function. Returns ------- command (glim.command.Command): the active command object. """ command = None for c in self.commands: if c.name == args.which: c.args = args command = c break return command def is_glimcommand(self, command): """ Function detects if a command is GlimCommand. Args ---- command (glim.command.Command): the command object. Returns ------- True or False """ return isinstance(command, GlimCommand) def dispatch(self, command, app): """ Function runs the active command. Args ---- command (glim.command.Command): the command object. app (glim.app.App): the glim app object. Note: Exception handling should be done in Command class itself. If not, an unhandled exception may result in app crash! """ if self.is_glimcommand(command): command.run(app) else: command.run()
aacanakin/glim
glim/command.py
CommandAdapter.register_extension
python
def register_extension(self, module, extension): if module is not None: cmds = self.retrieve_commands(module) commands = [] for c in cmds: if self.valid_name(c.name): name = '%s:%s' % (extension, c.name) cmd = c(self.subparsers, name) self.commands.append(cmd)
Function registers into self.commands from module extension. All extension subcommands are registered using the name convention 'extension:command' Example: If you have a redis extension namely 'gredis', the extension commands can be accessed by the following; $ python glim.py gredis:ping Args ---- module (module): The module name. extension (string): The extension name.
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/command.py#L98-L123
[ "def retrieve_commands(self, module):\n \"\"\"\n Function smartly imports Command type classes given module\n\n Args\n ----\n module (module):\n The module which Command classes will be extracted from\n\n Returns\n -------\n commands (list):\n A list of Command instances\n\n Note:\n This function will not register any command class\n named \"Command\" or \"GlimCommand\".\n\n When extending Command class, be sure to have \"Command\"\n string on your custom commands.\n \"\"\"\n commands = []\n\n for name, obj in inspect.getmembers(module):\n if name != 'Command' and 'Command' in name:\n if name != 'GlimCommand':\n cobject = getattr(module, name)\n commands.append(cobject)\n\n return commands\n", "def valid_name(self, name):\n \"\"\"\n Function returns if command name is valid or not.\n\n Args\n ----\n name (string): The command line utility name.\n\n Returns\n -------\n valid (boolean): Returns true when valid, else false.\n \"\"\"\n invalid = name is None or name == ''\n return not invalid\n" ]
class CommandAdapter(object): """ This class is responsible for detecting, registering and dispatching command line utilities of glim framework. In glim, there are two types of commands namely Command and GlimCommand. The only difference of them is GlimCommand can access to app object in the runtime. The class is used for appending app.commands and glim.commands together. It is also used for extension command registering. Attributes ---------- subparsers (argparse.Subparsers): The subparsers object that provides sub commands. commands (list): A list of Command objects """ def __init__(self, subparsers): self.subparsers = subparsers self.commands = [] def retrieve_commands(self, module): """ Function smartly imports Command type classes given module Args ---- module (module): The module which Command classes will be extracted from Returns ------- commands (list): A list of Command instances Note: This function will not register any command class named "Command" or "GlimCommand". When extending Command class, be sure to have "Command" string on your custom commands. """ commands = [] for name, obj in inspect.getmembers(module): if name != 'Command' and 'Command' in name: if name != 'GlimCommand': cobject = getattr(module, name) commands.append(cobject) return commands def valid_name(self, name): """ Function returns if command name is valid or not. Args ---- name (string): The command line utility name. Returns ------- valid (boolean): Returns true when valid, else false. """ invalid = name is None or name == '' return not invalid def register(self, module): """ Function registers into self.commands from module. Args ---- module (module): The module name. """ if module is not None: cmds = self.retrieve_commands(module) for c in cmds: if self.valid_name(c.name): cmd = c(self.subparsers) self.commands.append(cmd) else: print(colored("Warning: Command %s has empty name. It won't be registered" % c, 'yellow')) def match(self, args): """ Function dispatches the active command line utility. Args ---- args (argparse.parse_args()): The parsed arguments using parser.parse_args() function. Returns ------- command (glim.command.Command): the active command object. """ command = None for c in self.commands: if c.name == args.which: c.args = args command = c break return command def is_glimcommand(self, command): """ Function detects if a command is GlimCommand. Args ---- command (glim.command.Command): the command object. Returns ------- True or False """ return isinstance(command, GlimCommand) def dispatch(self, command, app): """ Function runs the active command. Args ---- command (glim.command.Command): the command object. app (glim.app.App): the glim app object. Note: Exception handling should be done in Command class itself. If not, an unhandled exception may result in app crash! """ if self.is_glimcommand(command): command.run(app) else: command.run()
aacanakin/glim
glim/command.py
CommandAdapter.match
python
def match(self, args): command = None for c in self.commands: if c.name == args.which: c.args = args command = c break return command
Function dispatches the active command line utility. Args ---- args (argparse.parse_args()): The parsed arguments using parser.parse_args() function. Returns ------- command (glim.command.Command): the active command object.
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/command.py#L125-L144
null
class CommandAdapter(object): """ This class is responsible for detecting, registering and dispatching command line utilities of glim framework. In glim, there are two types of commands namely Command and GlimCommand. The only difference of them is GlimCommand can access to app object in the runtime. The class is used for appending app.commands and glim.commands together. It is also used for extension command registering. Attributes ---------- subparsers (argparse.Subparsers): The subparsers object that provides sub commands. commands (list): A list of Command objects """ def __init__(self, subparsers): self.subparsers = subparsers self.commands = [] def retrieve_commands(self, module): """ Function smartly imports Command type classes given module Args ---- module (module): The module which Command classes will be extracted from Returns ------- commands (list): A list of Command instances Note: This function will not register any command class named "Command" or "GlimCommand". When extending Command class, be sure to have "Command" string on your custom commands. """ commands = [] for name, obj in inspect.getmembers(module): if name != 'Command' and 'Command' in name: if name != 'GlimCommand': cobject = getattr(module, name) commands.append(cobject) return commands def valid_name(self, name): """ Function returns if command name is valid or not. Args ---- name (string): The command line utility name. Returns ------- valid (boolean): Returns true when valid, else false. """ invalid = name is None or name == '' return not invalid def register(self, module): """ Function registers into self.commands from module. Args ---- module (module): The module name. """ if module is not None: cmds = self.retrieve_commands(module) for c in cmds: if self.valid_name(c.name): cmd = c(self.subparsers) self.commands.append(cmd) else: print(colored("Warning: Command %s has empty name. It won't be registered" % c, 'yellow')) def register_extension(self, module, extension): """ Function registers into self.commands from module extension. All extension subcommands are registered using the name convention 'extension:command' Example: If you have a redis extension namely 'gredis', the extension commands can be accessed by the following; $ python glim.py gredis:ping Args ---- module (module): The module name. extension (string): The extension name. """ if module is not None: cmds = self.retrieve_commands(module) commands = [] for c in cmds: if self.valid_name(c.name): name = '%s:%s' % (extension, c.name) cmd = c(self.subparsers, name) self.commands.append(cmd) def is_glimcommand(self, command): """ Function detects if a command is GlimCommand. Args ---- command (glim.command.Command): the command object. Returns ------- True or False """ return isinstance(command, GlimCommand) def dispatch(self, command, app): """ Function runs the active command. Args ---- command (glim.command.Command): the command object. app (glim.app.App): the glim app object. Note: Exception handling should be done in Command class itself. If not, an unhandled exception may result in app crash! """ if self.is_glimcommand(command): command.run(app) else: command.run()
aacanakin/glim
glim/command.py
CommandAdapter.dispatch
python
def dispatch(self, command, app): if self.is_glimcommand(command): command.run(app) else: command.run()
Function runs the active command. Args ---- command (glim.command.Command): the command object. app (glim.app.App): the glim app object. Note: Exception handling should be done in Command class itself. If not, an unhandled exception may result in app crash!
train
https://github.com/aacanakin/glim/blob/71a20ac149a1292c0d6c1dc7414985ea51854f7a/glim/command.py#L160-L177
[ "def is_glimcommand(self, command):\n \"\"\"\n Function detects if a command is GlimCommand.\n\n Args\n ----\n command (glim.command.Command): the command object.\n\n Returns\n -------\n True or False\n \"\"\"\n return isinstance(command, GlimCommand)\n" ]
class CommandAdapter(object): """ This class is responsible for detecting, registering and dispatching command line utilities of glim framework. In glim, there are two types of commands namely Command and GlimCommand. The only difference of them is GlimCommand can access to app object in the runtime. The class is used for appending app.commands and glim.commands together. It is also used for extension command registering. Attributes ---------- subparsers (argparse.Subparsers): The subparsers object that provides sub commands. commands (list): A list of Command objects """ def __init__(self, subparsers): self.subparsers = subparsers self.commands = [] def retrieve_commands(self, module): """ Function smartly imports Command type classes given module Args ---- module (module): The module which Command classes will be extracted from Returns ------- commands (list): A list of Command instances Note: This function will not register any command class named "Command" or "GlimCommand". When extending Command class, be sure to have "Command" string on your custom commands. """ commands = [] for name, obj in inspect.getmembers(module): if name != 'Command' and 'Command' in name: if name != 'GlimCommand': cobject = getattr(module, name) commands.append(cobject) return commands def valid_name(self, name): """ Function returns if command name is valid or not. Args ---- name (string): The command line utility name. Returns ------- valid (boolean): Returns true when valid, else false. """ invalid = name is None or name == '' return not invalid def register(self, module): """ Function registers into self.commands from module. Args ---- module (module): The module name. """ if module is not None: cmds = self.retrieve_commands(module) for c in cmds: if self.valid_name(c.name): cmd = c(self.subparsers) self.commands.append(cmd) else: print(colored("Warning: Command %s has empty name. It won't be registered" % c, 'yellow')) def register_extension(self, module, extension): """ Function registers into self.commands from module extension. All extension subcommands are registered using the name convention 'extension:command' Example: If you have a redis extension namely 'gredis', the extension commands can be accessed by the following; $ python glim.py gredis:ping Args ---- module (module): The module name. extension (string): The extension name. """ if module is not None: cmds = self.retrieve_commands(module) commands = [] for c in cmds: if self.valid_name(c.name): name = '%s:%s' % (extension, c.name) cmd = c(self.subparsers, name) self.commands.append(cmd) def match(self, args): """ Function dispatches the active command line utility. Args ---- args (argparse.parse_args()): The parsed arguments using parser.parse_args() function. Returns ------- command (glim.command.Command): the active command object. """ command = None for c in self.commands: if c.name == args.which: c.args = args command = c break return command def is_glimcommand(self, command): """ Function detects if a command is GlimCommand. Args ---- command (glim.command.Command): the command object. Returns ------- True or False """ return isinstance(command, GlimCommand)
The-Politico/politico-civic-election
election/models/race.py
Race.save
python
def save(self, *args, **kwargs): self.uid = '{}_{}_race'.format( self.office.uid, self.cycle.uid ) name_label = '{0} {1}'.format( self.cycle.name, self.office.label ) if self.special: self.uid = '{}:special'.format( self.uid ) name_label = '{} Special'.format( name_label ) self.label = name_label self.name = name_label if not self.slug: self.slug = uuslug( name_label, instance=self, max_length=100, separator='-', start_no=2 ) super(Race, self).save(*args, **kwargs)
**uid**: :code:`{office.uid}_{cycle.uid}_race`
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/race.py#L33-L66
null
class Race(models.Model): """ A race for an office comprised of one or many elections. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) slug = models.SlugField( blank=True, max_length=255, unique=True, editable=False) label = models.CharField(max_length=255, blank=True) short_label = models.CharField(max_length=50, null=True, blank=True) description = models.TextField(blank=True, null=True) office = models.ForeignKey( Office, related_name='races', on_delete=models.PROTECT) cycle = models.ForeignKey( 'ElectionCycle', related_name='races', on_delete=models.PROTECT) special = models.BooleanField(default=False) def __str__(self): return self.label
The-Politico/politico-civic-election
election/models/election_type.py
ElectionType.save
python
def save(self, *args, **kwargs): self.uid = 'electiontype:{}'.format(self.slug) super(ElectionType, self).save(*args, **kwargs)
**uid**: :code:`electiontype:{name}`
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election_type.py#L42-L47
null
class ElectionType(models.Model): """ e.g., "General", "Primary" """ GENERAL = 'general' PARTY_PRIMARY = 'party-primary' JUNGLE_PRIMARY = 'jungle-primary' PRIMARY_RUNOFF = 'primary-runoff' GENERAL_RUNOFF = 'general-runoff' TYPES = ( (GENERAL, 'General'), (PARTY_PRIMARY, 'Party Primary'), (JUNGLE_PRIMARY, 'Jungle Primary'), (PRIMARY_RUNOFF, 'Primary Runoff'), (GENERAL_RUNOFF, 'General Runoff') ) uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True) slug = models.SlugField( blank=True, max_length=255, unique=True, choices=TYPES ) label = models.CharField(max_length=255, blank=True) short_label = models.CharField(max_length=50, null=True, blank=True) ap_code = models.CharField(max_length=1, null=True, blank=True) number_of_winners = models.PositiveSmallIntegerField(default=1) winning_threshold = models.DecimalField( decimal_places=3, max_digits=5, null=True, blank=True) def __str__(self): return self.uid def is_primary(self): if self.slug in [self.PARTY_PRIMARY, self.JUNGLE_PRIMARY]: return True else: return False def is_runoff(self): if self.slug in [self.PRIMARY_RUNOFF, self.GENERAL_RUNOFF]: return True else: return False
The-Politico/politico-civic-election
election/models/ballot_measure.py
BallotMeasure.save
python
def save(self, *args, **kwargs): self.uid = '{}_{}_ballotmeasure:{}'.format( self.division.uid, self.election_day.uid, self.number ) super(BallotMeasure, self).save(*args, **kwargs)
**uid**: :code:`division_cycle_ballotmeasure:{number}`
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/ballot_measure.py#L28-L37
null
class BallotMeasure(models.Model): """A ballot measure.""" uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) label = models.CharField(max_length=255, blank=True) short_label = models.CharField(max_length=50, null=True, blank=True) question = models.TextField() division = models.ForeignKey( Division, related_name='ballot_measures', on_delete=models.PROTECT) number = models.CharField(max_length=3) election_day = models.ForeignKey( 'ElectionDay', related_name='ballot_measures', on_delete=models.PROTECT) def __str__(self): return self.uid
The-Politico/politico-civic-election
election/models/election_cycle.py
ElectionCycle.save
python
def save(self, *args, **kwargs): self.slug = slugify(self.name) self.uid = 'cycle:{}'.format(self.slug) super(ElectionCycle, self).save(*args, **kwargs)
**uid**: :code:`cycle:{year}`
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election_cycle.py#L20-L26
null
class ElectionCycle(models.Model): uid = models.CharField( max_length=10, primary_key=True, editable=False, blank=True ) slug = models.SlugField( blank=True, max_length=4, unique=True, editable=False ) name = models.CharField(max_length=4) def __str__(self): return self.uid
The-Politico/politico-civic-election
election/models/election.py
Election.save
python
def save(self, *args, **kwargs): if self.party: self.uid = "{}_election:{}-{}".format( self.race.uid, self.election_day.date, slugify(self.party.ap_code), ) else: self.uid = "{}_election:{}".format( self.race.uid, self.election_day.date ) super(Election, self).save(*args, **kwargs)
**uid**: :code:`{race.uid}_election:{election_day}-{party}`
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election.py#L38-L52
null
class Election(models.Model): """ A specific contest in a race held on a specific day. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) election_type = models.ForeignKey( "ElectionType", related_name="elections", on_delete=models.PROTECT ) candidates = models.ManyToManyField( "Candidate", through="CandidateElection" ) race = models.ForeignKey( "Race", related_name="elections", on_delete=models.PROTECT ) party = models.ForeignKey( Party, null=True, blank=True, on_delete=models.PROTECT ) election_day = models.ForeignKey( "ElectionDay", related_name="elections", on_delete=models.PROTECT ) division = models.ForeignKey( Division, related_name="elections", on_delete=models.PROTECT ) def __str__(self): return self.race.office.label def update_or_create_candidate( self, candidate, aggregable=True, uncontested=False ): """Create a CandidateElection.""" candidate_election, c = CandidateElection.objects.update_or_create( candidate=candidate, election=self, defaults={"aggregable": aggregable, "uncontested": uncontested}, ) return candidate_election def delete_candidate(self, candidate): """Delete a CandidateElection.""" CandidateElection.objects.filter( candidate=candidate, election=self ).delete() def get_candidates(self): """Get all CandidateElections for this election.""" candidate_elections = CandidateElection.objects.filter(election=self) return [ce.candidate for ce in candidate_elections] def get_candidates_by_party(self): """ Get CandidateElections serialized into an object with party-slug keys. """ candidate_elections = CandidateElection.objects.filter(election=self) return { ce.candidate.party.slug: ce.candidate for ce in candidate_elections } def get_candidate_election(self, candidate): """Get CandidateElection for a Candidate in this election.""" return CandidateElection.objects.get( candidate=candidate, election=self ) def get_candidate_votes(self, candidate): """ Get all votes attached to a CandidateElection for a Candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.votes.all() def get_votes(self): """ Get all votes for this election. """ candidate_elections = CandidateElection.objects.filter(election=self) votes = None for ce in candidate_elections: votes = votes | ce.votes.all() return votes def get_candidate_electoral_votes(self, candidate): """ Get all electoral votes for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.electoral_votes.all() def get_electoral_votes(self): """ Get all electoral votes for all candidates in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) electoral_votes = None for ce in candidate_elections: electoral_votes = electoral_votes | ce.electoral_votes.all() return electoral_votes def get_candidate_delegates(self, candidate): """ Get all pledged delegates for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.delegates.all() def get_delegates(self): """ Get all pledged delegates for any candidate in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/election.py
Election.update_or_create_candidate
python
def update_or_create_candidate( self, candidate, aggregable=True, uncontested=False ): candidate_election, c = CandidateElection.objects.update_or_create( candidate=candidate, election=self, defaults={"aggregable": aggregable, "uncontested": uncontested}, ) return candidate_election
Create a CandidateElection.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election.py#L54-L64
null
class Election(models.Model): """ A specific contest in a race held on a specific day. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) election_type = models.ForeignKey( "ElectionType", related_name="elections", on_delete=models.PROTECT ) candidates = models.ManyToManyField( "Candidate", through="CandidateElection" ) race = models.ForeignKey( "Race", related_name="elections", on_delete=models.PROTECT ) party = models.ForeignKey( Party, null=True, blank=True, on_delete=models.PROTECT ) election_day = models.ForeignKey( "ElectionDay", related_name="elections", on_delete=models.PROTECT ) division = models.ForeignKey( Division, related_name="elections", on_delete=models.PROTECT ) def __str__(self): return self.race.office.label def save(self, *args, **kwargs): """ **uid**: :code:`{race.uid}_election:{election_day}-{party}` """ if self.party: self.uid = "{}_election:{}-{}".format( self.race.uid, self.election_day.date, slugify(self.party.ap_code), ) else: self.uid = "{}_election:{}".format( self.race.uid, self.election_day.date ) super(Election, self).save(*args, **kwargs) def delete_candidate(self, candidate): """Delete a CandidateElection.""" CandidateElection.objects.filter( candidate=candidate, election=self ).delete() def get_candidates(self): """Get all CandidateElections for this election.""" candidate_elections = CandidateElection.objects.filter(election=self) return [ce.candidate for ce in candidate_elections] def get_candidates_by_party(self): """ Get CandidateElections serialized into an object with party-slug keys. """ candidate_elections = CandidateElection.objects.filter(election=self) return { ce.candidate.party.slug: ce.candidate for ce in candidate_elections } def get_candidate_election(self, candidate): """Get CandidateElection for a Candidate in this election.""" return CandidateElection.objects.get( candidate=candidate, election=self ) def get_candidate_votes(self, candidate): """ Get all votes attached to a CandidateElection for a Candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.votes.all() def get_votes(self): """ Get all votes for this election. """ candidate_elections = CandidateElection.objects.filter(election=self) votes = None for ce in candidate_elections: votes = votes | ce.votes.all() return votes def get_candidate_electoral_votes(self, candidate): """ Get all electoral votes for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.electoral_votes.all() def get_electoral_votes(self): """ Get all electoral votes for all candidates in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) electoral_votes = None for ce in candidate_elections: electoral_votes = electoral_votes | ce.electoral_votes.all() return electoral_votes def get_candidate_delegates(self, candidate): """ Get all pledged delegates for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.delegates.all() def get_delegates(self): """ Get all pledged delegates for any candidate in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/election.py
Election.delete_candidate
python
def delete_candidate(self, candidate): CandidateElection.objects.filter( candidate=candidate, election=self ).delete()
Delete a CandidateElection.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election.py#L66-L70
null
class Election(models.Model): """ A specific contest in a race held on a specific day. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) election_type = models.ForeignKey( "ElectionType", related_name="elections", on_delete=models.PROTECT ) candidates = models.ManyToManyField( "Candidate", through="CandidateElection" ) race = models.ForeignKey( "Race", related_name="elections", on_delete=models.PROTECT ) party = models.ForeignKey( Party, null=True, blank=True, on_delete=models.PROTECT ) election_day = models.ForeignKey( "ElectionDay", related_name="elections", on_delete=models.PROTECT ) division = models.ForeignKey( Division, related_name="elections", on_delete=models.PROTECT ) def __str__(self): return self.race.office.label def save(self, *args, **kwargs): """ **uid**: :code:`{race.uid}_election:{election_day}-{party}` """ if self.party: self.uid = "{}_election:{}-{}".format( self.race.uid, self.election_day.date, slugify(self.party.ap_code), ) else: self.uid = "{}_election:{}".format( self.race.uid, self.election_day.date ) super(Election, self).save(*args, **kwargs) def update_or_create_candidate( self, candidate, aggregable=True, uncontested=False ): """Create a CandidateElection.""" candidate_election, c = CandidateElection.objects.update_or_create( candidate=candidate, election=self, defaults={"aggregable": aggregable, "uncontested": uncontested}, ) return candidate_election def get_candidates(self): """Get all CandidateElections for this election.""" candidate_elections = CandidateElection.objects.filter(election=self) return [ce.candidate for ce in candidate_elections] def get_candidates_by_party(self): """ Get CandidateElections serialized into an object with party-slug keys. """ candidate_elections = CandidateElection.objects.filter(election=self) return { ce.candidate.party.slug: ce.candidate for ce in candidate_elections } def get_candidate_election(self, candidate): """Get CandidateElection for a Candidate in this election.""" return CandidateElection.objects.get( candidate=candidate, election=self ) def get_candidate_votes(self, candidate): """ Get all votes attached to a CandidateElection for a Candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.votes.all() def get_votes(self): """ Get all votes for this election. """ candidate_elections = CandidateElection.objects.filter(election=self) votes = None for ce in candidate_elections: votes = votes | ce.votes.all() return votes def get_candidate_electoral_votes(self, candidate): """ Get all electoral votes for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.electoral_votes.all() def get_electoral_votes(self): """ Get all electoral votes for all candidates in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) electoral_votes = None for ce in candidate_elections: electoral_votes = electoral_votes | ce.electoral_votes.all() return electoral_votes def get_candidate_delegates(self, candidate): """ Get all pledged delegates for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.delegates.all() def get_delegates(self): """ Get all pledged delegates for any candidate in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/election.py
Election.get_candidates
python
def get_candidates(self): candidate_elections = CandidateElection.objects.filter(election=self) return [ce.candidate for ce in candidate_elections]
Get all CandidateElections for this election.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election.py#L72-L76
null
class Election(models.Model): """ A specific contest in a race held on a specific day. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) election_type = models.ForeignKey( "ElectionType", related_name="elections", on_delete=models.PROTECT ) candidates = models.ManyToManyField( "Candidate", through="CandidateElection" ) race = models.ForeignKey( "Race", related_name="elections", on_delete=models.PROTECT ) party = models.ForeignKey( Party, null=True, blank=True, on_delete=models.PROTECT ) election_day = models.ForeignKey( "ElectionDay", related_name="elections", on_delete=models.PROTECT ) division = models.ForeignKey( Division, related_name="elections", on_delete=models.PROTECT ) def __str__(self): return self.race.office.label def save(self, *args, **kwargs): """ **uid**: :code:`{race.uid}_election:{election_day}-{party}` """ if self.party: self.uid = "{}_election:{}-{}".format( self.race.uid, self.election_day.date, slugify(self.party.ap_code), ) else: self.uid = "{}_election:{}".format( self.race.uid, self.election_day.date ) super(Election, self).save(*args, **kwargs) def update_or_create_candidate( self, candidate, aggregable=True, uncontested=False ): """Create a CandidateElection.""" candidate_election, c = CandidateElection.objects.update_or_create( candidate=candidate, election=self, defaults={"aggregable": aggregable, "uncontested": uncontested}, ) return candidate_election def delete_candidate(self, candidate): """Delete a CandidateElection.""" CandidateElection.objects.filter( candidate=candidate, election=self ).delete() def get_candidates_by_party(self): """ Get CandidateElections serialized into an object with party-slug keys. """ candidate_elections = CandidateElection.objects.filter(election=self) return { ce.candidate.party.slug: ce.candidate for ce in candidate_elections } def get_candidate_election(self, candidate): """Get CandidateElection for a Candidate in this election.""" return CandidateElection.objects.get( candidate=candidate, election=self ) def get_candidate_votes(self, candidate): """ Get all votes attached to a CandidateElection for a Candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.votes.all() def get_votes(self): """ Get all votes for this election. """ candidate_elections = CandidateElection.objects.filter(election=self) votes = None for ce in candidate_elections: votes = votes | ce.votes.all() return votes def get_candidate_electoral_votes(self, candidate): """ Get all electoral votes for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.electoral_votes.all() def get_electoral_votes(self): """ Get all electoral votes for all candidates in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) electoral_votes = None for ce in candidate_elections: electoral_votes = electoral_votes | ce.electoral_votes.all() return electoral_votes def get_candidate_delegates(self, candidate): """ Get all pledged delegates for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.delegates.all() def get_delegates(self): """ Get all pledged delegates for any candidate in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/election.py
Election.get_candidates_by_party
python
def get_candidates_by_party(self): candidate_elections = CandidateElection.objects.filter(election=self) return { ce.candidate.party.slug: ce.candidate for ce in candidate_elections }
Get CandidateElections serialized into an object with party-slug keys.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election.py#L78-L87
null
class Election(models.Model): """ A specific contest in a race held on a specific day. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) election_type = models.ForeignKey( "ElectionType", related_name="elections", on_delete=models.PROTECT ) candidates = models.ManyToManyField( "Candidate", through="CandidateElection" ) race = models.ForeignKey( "Race", related_name="elections", on_delete=models.PROTECT ) party = models.ForeignKey( Party, null=True, blank=True, on_delete=models.PROTECT ) election_day = models.ForeignKey( "ElectionDay", related_name="elections", on_delete=models.PROTECT ) division = models.ForeignKey( Division, related_name="elections", on_delete=models.PROTECT ) def __str__(self): return self.race.office.label def save(self, *args, **kwargs): """ **uid**: :code:`{race.uid}_election:{election_day}-{party}` """ if self.party: self.uid = "{}_election:{}-{}".format( self.race.uid, self.election_day.date, slugify(self.party.ap_code), ) else: self.uid = "{}_election:{}".format( self.race.uid, self.election_day.date ) super(Election, self).save(*args, **kwargs) def update_or_create_candidate( self, candidate, aggregable=True, uncontested=False ): """Create a CandidateElection.""" candidate_election, c = CandidateElection.objects.update_or_create( candidate=candidate, election=self, defaults={"aggregable": aggregable, "uncontested": uncontested}, ) return candidate_election def delete_candidate(self, candidate): """Delete a CandidateElection.""" CandidateElection.objects.filter( candidate=candidate, election=self ).delete() def get_candidates(self): """Get all CandidateElections for this election.""" candidate_elections = CandidateElection.objects.filter(election=self) return [ce.candidate for ce in candidate_elections] def get_candidate_election(self, candidate): """Get CandidateElection for a Candidate in this election.""" return CandidateElection.objects.get( candidate=candidate, election=self ) def get_candidate_votes(self, candidate): """ Get all votes attached to a CandidateElection for a Candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.votes.all() def get_votes(self): """ Get all votes for this election. """ candidate_elections = CandidateElection.objects.filter(election=self) votes = None for ce in candidate_elections: votes = votes | ce.votes.all() return votes def get_candidate_electoral_votes(self, candidate): """ Get all electoral votes for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.electoral_votes.all() def get_electoral_votes(self): """ Get all electoral votes for all candidates in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) electoral_votes = None for ce in candidate_elections: electoral_votes = electoral_votes | ce.electoral_votes.all() return electoral_votes def get_candidate_delegates(self, candidate): """ Get all pledged delegates for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.delegates.all() def get_delegates(self): """ Get all pledged delegates for any candidate in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/election.py
Election.get_candidate_election
python
def get_candidate_election(self, candidate): return CandidateElection.objects.get( candidate=candidate, election=self )
Get CandidateElection for a Candidate in this election.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election.py#L89-L93
null
class Election(models.Model): """ A specific contest in a race held on a specific day. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) election_type = models.ForeignKey( "ElectionType", related_name="elections", on_delete=models.PROTECT ) candidates = models.ManyToManyField( "Candidate", through="CandidateElection" ) race = models.ForeignKey( "Race", related_name="elections", on_delete=models.PROTECT ) party = models.ForeignKey( Party, null=True, blank=True, on_delete=models.PROTECT ) election_day = models.ForeignKey( "ElectionDay", related_name="elections", on_delete=models.PROTECT ) division = models.ForeignKey( Division, related_name="elections", on_delete=models.PROTECT ) def __str__(self): return self.race.office.label def save(self, *args, **kwargs): """ **uid**: :code:`{race.uid}_election:{election_day}-{party}` """ if self.party: self.uid = "{}_election:{}-{}".format( self.race.uid, self.election_day.date, slugify(self.party.ap_code), ) else: self.uid = "{}_election:{}".format( self.race.uid, self.election_day.date ) super(Election, self).save(*args, **kwargs) def update_or_create_candidate( self, candidate, aggregable=True, uncontested=False ): """Create a CandidateElection.""" candidate_election, c = CandidateElection.objects.update_or_create( candidate=candidate, election=self, defaults={"aggregable": aggregable, "uncontested": uncontested}, ) return candidate_election def delete_candidate(self, candidate): """Delete a CandidateElection.""" CandidateElection.objects.filter( candidate=candidate, election=self ).delete() def get_candidates(self): """Get all CandidateElections for this election.""" candidate_elections = CandidateElection.objects.filter(election=self) return [ce.candidate for ce in candidate_elections] def get_candidates_by_party(self): """ Get CandidateElections serialized into an object with party-slug keys. """ candidate_elections = CandidateElection.objects.filter(election=self) return { ce.candidate.party.slug: ce.candidate for ce in candidate_elections } def get_candidate_votes(self, candidate): """ Get all votes attached to a CandidateElection for a Candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.votes.all() def get_votes(self): """ Get all votes for this election. """ candidate_elections = CandidateElection.objects.filter(election=self) votes = None for ce in candidate_elections: votes = votes | ce.votes.all() return votes def get_candidate_electoral_votes(self, candidate): """ Get all electoral votes for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.electoral_votes.all() def get_electoral_votes(self): """ Get all electoral votes for all candidates in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) electoral_votes = None for ce in candidate_elections: electoral_votes = electoral_votes | ce.electoral_votes.all() return electoral_votes def get_candidate_delegates(self, candidate): """ Get all pledged delegates for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.delegates.all() def get_delegates(self): """ Get all pledged delegates for any candidate in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/election.py
Election.get_candidate_votes
python
def get_candidate_votes(self, candidate): candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.votes.all()
Get all votes attached to a CandidateElection for a Candidate in this election.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election.py#L95-L104
null
class Election(models.Model): """ A specific contest in a race held on a specific day. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) election_type = models.ForeignKey( "ElectionType", related_name="elections", on_delete=models.PROTECT ) candidates = models.ManyToManyField( "Candidate", through="CandidateElection" ) race = models.ForeignKey( "Race", related_name="elections", on_delete=models.PROTECT ) party = models.ForeignKey( Party, null=True, blank=True, on_delete=models.PROTECT ) election_day = models.ForeignKey( "ElectionDay", related_name="elections", on_delete=models.PROTECT ) division = models.ForeignKey( Division, related_name="elections", on_delete=models.PROTECT ) def __str__(self): return self.race.office.label def save(self, *args, **kwargs): """ **uid**: :code:`{race.uid}_election:{election_day}-{party}` """ if self.party: self.uid = "{}_election:{}-{}".format( self.race.uid, self.election_day.date, slugify(self.party.ap_code), ) else: self.uid = "{}_election:{}".format( self.race.uid, self.election_day.date ) super(Election, self).save(*args, **kwargs) def update_or_create_candidate( self, candidate, aggregable=True, uncontested=False ): """Create a CandidateElection.""" candidate_election, c = CandidateElection.objects.update_or_create( candidate=candidate, election=self, defaults={"aggregable": aggregable, "uncontested": uncontested}, ) return candidate_election def delete_candidate(self, candidate): """Delete a CandidateElection.""" CandidateElection.objects.filter( candidate=candidate, election=self ).delete() def get_candidates(self): """Get all CandidateElections for this election.""" candidate_elections = CandidateElection.objects.filter(election=self) return [ce.candidate for ce in candidate_elections] def get_candidates_by_party(self): """ Get CandidateElections serialized into an object with party-slug keys. """ candidate_elections = CandidateElection.objects.filter(election=self) return { ce.candidate.party.slug: ce.candidate for ce in candidate_elections } def get_candidate_election(self, candidate): """Get CandidateElection for a Candidate in this election.""" return CandidateElection.objects.get( candidate=candidate, election=self ) def get_votes(self): """ Get all votes for this election. """ candidate_elections = CandidateElection.objects.filter(election=self) votes = None for ce in candidate_elections: votes = votes | ce.votes.all() return votes def get_candidate_electoral_votes(self, candidate): """ Get all electoral votes for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.electoral_votes.all() def get_electoral_votes(self): """ Get all electoral votes for all candidates in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) electoral_votes = None for ce in candidate_elections: electoral_votes = electoral_votes | ce.electoral_votes.all() return electoral_votes def get_candidate_delegates(self, candidate): """ Get all pledged delegates for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.delegates.all() def get_delegates(self): """ Get all pledged delegates for any candidate in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/election.py
Election.get_votes
python
def get_votes(self): candidate_elections = CandidateElection.objects.filter(election=self) votes = None for ce in candidate_elections: votes = votes | ce.votes.all() return votes
Get all votes for this election.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election.py#L106-L116
null
class Election(models.Model): """ A specific contest in a race held on a specific day. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) election_type = models.ForeignKey( "ElectionType", related_name="elections", on_delete=models.PROTECT ) candidates = models.ManyToManyField( "Candidate", through="CandidateElection" ) race = models.ForeignKey( "Race", related_name="elections", on_delete=models.PROTECT ) party = models.ForeignKey( Party, null=True, blank=True, on_delete=models.PROTECT ) election_day = models.ForeignKey( "ElectionDay", related_name="elections", on_delete=models.PROTECT ) division = models.ForeignKey( Division, related_name="elections", on_delete=models.PROTECT ) def __str__(self): return self.race.office.label def save(self, *args, **kwargs): """ **uid**: :code:`{race.uid}_election:{election_day}-{party}` """ if self.party: self.uid = "{}_election:{}-{}".format( self.race.uid, self.election_day.date, slugify(self.party.ap_code), ) else: self.uid = "{}_election:{}".format( self.race.uid, self.election_day.date ) super(Election, self).save(*args, **kwargs) def update_or_create_candidate( self, candidate, aggregable=True, uncontested=False ): """Create a CandidateElection.""" candidate_election, c = CandidateElection.objects.update_or_create( candidate=candidate, election=self, defaults={"aggregable": aggregable, "uncontested": uncontested}, ) return candidate_election def delete_candidate(self, candidate): """Delete a CandidateElection.""" CandidateElection.objects.filter( candidate=candidate, election=self ).delete() def get_candidates(self): """Get all CandidateElections for this election.""" candidate_elections = CandidateElection.objects.filter(election=self) return [ce.candidate for ce in candidate_elections] def get_candidates_by_party(self): """ Get CandidateElections serialized into an object with party-slug keys. """ candidate_elections = CandidateElection.objects.filter(election=self) return { ce.candidate.party.slug: ce.candidate for ce in candidate_elections } def get_candidate_election(self, candidate): """Get CandidateElection for a Candidate in this election.""" return CandidateElection.objects.get( candidate=candidate, election=self ) def get_candidate_votes(self, candidate): """ Get all votes attached to a CandidateElection for a Candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.votes.all() def get_candidate_electoral_votes(self, candidate): """ Get all electoral votes for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.electoral_votes.all() def get_electoral_votes(self): """ Get all electoral votes for all candidates in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) electoral_votes = None for ce in candidate_elections: electoral_votes = electoral_votes | ce.electoral_votes.all() return electoral_votes def get_candidate_delegates(self, candidate): """ Get all pledged delegates for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.delegates.all() def get_delegates(self): """ Get all pledged delegates for any candidate in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/election.py
Election.get_candidate_electoral_votes
python
def get_candidate_electoral_votes(self, candidate): candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.electoral_votes.all()
Get all electoral votes for a candidate in this election.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election.py#L118-L126
null
class Election(models.Model): """ A specific contest in a race held on a specific day. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) election_type = models.ForeignKey( "ElectionType", related_name="elections", on_delete=models.PROTECT ) candidates = models.ManyToManyField( "Candidate", through="CandidateElection" ) race = models.ForeignKey( "Race", related_name="elections", on_delete=models.PROTECT ) party = models.ForeignKey( Party, null=True, blank=True, on_delete=models.PROTECT ) election_day = models.ForeignKey( "ElectionDay", related_name="elections", on_delete=models.PROTECT ) division = models.ForeignKey( Division, related_name="elections", on_delete=models.PROTECT ) def __str__(self): return self.race.office.label def save(self, *args, **kwargs): """ **uid**: :code:`{race.uid}_election:{election_day}-{party}` """ if self.party: self.uid = "{}_election:{}-{}".format( self.race.uid, self.election_day.date, slugify(self.party.ap_code), ) else: self.uid = "{}_election:{}".format( self.race.uid, self.election_day.date ) super(Election, self).save(*args, **kwargs) def update_or_create_candidate( self, candidate, aggregable=True, uncontested=False ): """Create a CandidateElection.""" candidate_election, c = CandidateElection.objects.update_or_create( candidate=candidate, election=self, defaults={"aggregable": aggregable, "uncontested": uncontested}, ) return candidate_election def delete_candidate(self, candidate): """Delete a CandidateElection.""" CandidateElection.objects.filter( candidate=candidate, election=self ).delete() def get_candidates(self): """Get all CandidateElections for this election.""" candidate_elections = CandidateElection.objects.filter(election=self) return [ce.candidate for ce in candidate_elections] def get_candidates_by_party(self): """ Get CandidateElections serialized into an object with party-slug keys. """ candidate_elections = CandidateElection.objects.filter(election=self) return { ce.candidate.party.slug: ce.candidate for ce in candidate_elections } def get_candidate_election(self, candidate): """Get CandidateElection for a Candidate in this election.""" return CandidateElection.objects.get( candidate=candidate, election=self ) def get_candidate_votes(self, candidate): """ Get all votes attached to a CandidateElection for a Candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.votes.all() def get_votes(self): """ Get all votes for this election. """ candidate_elections = CandidateElection.objects.filter(election=self) votes = None for ce in candidate_elections: votes = votes | ce.votes.all() return votes def get_electoral_votes(self): """ Get all electoral votes for all candidates in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) electoral_votes = None for ce in candidate_elections: electoral_votes = electoral_votes | ce.electoral_votes.all() return electoral_votes def get_candidate_delegates(self, candidate): """ Get all pledged delegates for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.delegates.all() def get_delegates(self): """ Get all pledged delegates for any candidate in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/election.py
Election.get_electoral_votes
python
def get_electoral_votes(self): candidate_elections = CandidateElection.objects.filter(election=self) electoral_votes = None for ce in candidate_elections: electoral_votes = electoral_votes | ce.electoral_votes.all() return electoral_votes
Get all electoral votes for all candidates in this election.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election.py#L128-L138
null
class Election(models.Model): """ A specific contest in a race held on a specific day. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) election_type = models.ForeignKey( "ElectionType", related_name="elections", on_delete=models.PROTECT ) candidates = models.ManyToManyField( "Candidate", through="CandidateElection" ) race = models.ForeignKey( "Race", related_name="elections", on_delete=models.PROTECT ) party = models.ForeignKey( Party, null=True, blank=True, on_delete=models.PROTECT ) election_day = models.ForeignKey( "ElectionDay", related_name="elections", on_delete=models.PROTECT ) division = models.ForeignKey( Division, related_name="elections", on_delete=models.PROTECT ) def __str__(self): return self.race.office.label def save(self, *args, **kwargs): """ **uid**: :code:`{race.uid}_election:{election_day}-{party}` """ if self.party: self.uid = "{}_election:{}-{}".format( self.race.uid, self.election_day.date, slugify(self.party.ap_code), ) else: self.uid = "{}_election:{}".format( self.race.uid, self.election_day.date ) super(Election, self).save(*args, **kwargs) def update_or_create_candidate( self, candidate, aggregable=True, uncontested=False ): """Create a CandidateElection.""" candidate_election, c = CandidateElection.objects.update_or_create( candidate=candidate, election=self, defaults={"aggregable": aggregable, "uncontested": uncontested}, ) return candidate_election def delete_candidate(self, candidate): """Delete a CandidateElection.""" CandidateElection.objects.filter( candidate=candidate, election=self ).delete() def get_candidates(self): """Get all CandidateElections for this election.""" candidate_elections = CandidateElection.objects.filter(election=self) return [ce.candidate for ce in candidate_elections] def get_candidates_by_party(self): """ Get CandidateElections serialized into an object with party-slug keys. """ candidate_elections = CandidateElection.objects.filter(election=self) return { ce.candidate.party.slug: ce.candidate for ce in candidate_elections } def get_candidate_election(self, candidate): """Get CandidateElection for a Candidate in this election.""" return CandidateElection.objects.get( candidate=candidate, election=self ) def get_candidate_votes(self, candidate): """ Get all votes attached to a CandidateElection for a Candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.votes.all() def get_votes(self): """ Get all votes for this election. """ candidate_elections = CandidateElection.objects.filter(election=self) votes = None for ce in candidate_elections: votes = votes | ce.votes.all() return votes def get_candidate_electoral_votes(self, candidate): """ Get all electoral votes for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.electoral_votes.all() def get_candidate_delegates(self, candidate): """ Get all pledged delegates for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.delegates.all() def get_delegates(self): """ Get all pledged delegates for any candidate in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/election.py
Election.get_candidate_delegates
python
def get_candidate_delegates(self, candidate): candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.delegates.all()
Get all pledged delegates for a candidate in this election.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election.py#L140-L148
null
class Election(models.Model): """ A specific contest in a race held on a specific day. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) election_type = models.ForeignKey( "ElectionType", related_name="elections", on_delete=models.PROTECT ) candidates = models.ManyToManyField( "Candidate", through="CandidateElection" ) race = models.ForeignKey( "Race", related_name="elections", on_delete=models.PROTECT ) party = models.ForeignKey( Party, null=True, blank=True, on_delete=models.PROTECT ) election_day = models.ForeignKey( "ElectionDay", related_name="elections", on_delete=models.PROTECT ) division = models.ForeignKey( Division, related_name="elections", on_delete=models.PROTECT ) def __str__(self): return self.race.office.label def save(self, *args, **kwargs): """ **uid**: :code:`{race.uid}_election:{election_day}-{party}` """ if self.party: self.uid = "{}_election:{}-{}".format( self.race.uid, self.election_day.date, slugify(self.party.ap_code), ) else: self.uid = "{}_election:{}".format( self.race.uid, self.election_day.date ) super(Election, self).save(*args, **kwargs) def update_or_create_candidate( self, candidate, aggregable=True, uncontested=False ): """Create a CandidateElection.""" candidate_election, c = CandidateElection.objects.update_or_create( candidate=candidate, election=self, defaults={"aggregable": aggregable, "uncontested": uncontested}, ) return candidate_election def delete_candidate(self, candidate): """Delete a CandidateElection.""" CandidateElection.objects.filter( candidate=candidate, election=self ).delete() def get_candidates(self): """Get all CandidateElections for this election.""" candidate_elections = CandidateElection.objects.filter(election=self) return [ce.candidate for ce in candidate_elections] def get_candidates_by_party(self): """ Get CandidateElections serialized into an object with party-slug keys. """ candidate_elections = CandidateElection.objects.filter(election=self) return { ce.candidate.party.slug: ce.candidate for ce in candidate_elections } def get_candidate_election(self, candidate): """Get CandidateElection for a Candidate in this election.""" return CandidateElection.objects.get( candidate=candidate, election=self ) def get_candidate_votes(self, candidate): """ Get all votes attached to a CandidateElection for a Candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.votes.all() def get_votes(self): """ Get all votes for this election. """ candidate_elections = CandidateElection.objects.filter(election=self) votes = None for ce in candidate_elections: votes = votes | ce.votes.all() return votes def get_candidate_electoral_votes(self, candidate): """ Get all electoral votes for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.electoral_votes.all() def get_electoral_votes(self): """ Get all electoral votes for all candidates in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) electoral_votes = None for ce in candidate_elections: electoral_votes = electoral_votes | ce.electoral_votes.all() return electoral_votes def get_delegates(self): """ Get all pledged delegates for any candidate in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/election.py
Election.get_delegates
python
def get_delegates(self): candidate_elections = CandidateElection.objects.filter(election=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
Get all pledged delegates for any candidate in this election.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election.py#L150-L160
null
class Election(models.Model): """ A specific contest in a race held on a specific day. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) election_type = models.ForeignKey( "ElectionType", related_name="elections", on_delete=models.PROTECT ) candidates = models.ManyToManyField( "Candidate", through="CandidateElection" ) race = models.ForeignKey( "Race", related_name="elections", on_delete=models.PROTECT ) party = models.ForeignKey( Party, null=True, blank=True, on_delete=models.PROTECT ) election_day = models.ForeignKey( "ElectionDay", related_name="elections", on_delete=models.PROTECT ) division = models.ForeignKey( Division, related_name="elections", on_delete=models.PROTECT ) def __str__(self): return self.race.office.label def save(self, *args, **kwargs): """ **uid**: :code:`{race.uid}_election:{election_day}-{party}` """ if self.party: self.uid = "{}_election:{}-{}".format( self.race.uid, self.election_day.date, slugify(self.party.ap_code), ) else: self.uid = "{}_election:{}".format( self.race.uid, self.election_day.date ) super(Election, self).save(*args, **kwargs) def update_or_create_candidate( self, candidate, aggregable=True, uncontested=False ): """Create a CandidateElection.""" candidate_election, c = CandidateElection.objects.update_or_create( candidate=candidate, election=self, defaults={"aggregable": aggregable, "uncontested": uncontested}, ) return candidate_election def delete_candidate(self, candidate): """Delete a CandidateElection.""" CandidateElection.objects.filter( candidate=candidate, election=self ).delete() def get_candidates(self): """Get all CandidateElections for this election.""" candidate_elections = CandidateElection.objects.filter(election=self) return [ce.candidate for ce in candidate_elections] def get_candidates_by_party(self): """ Get CandidateElections serialized into an object with party-slug keys. """ candidate_elections = CandidateElection.objects.filter(election=self) return { ce.candidate.party.slug: ce.candidate for ce in candidate_elections } def get_candidate_election(self, candidate): """Get CandidateElection for a Candidate in this election.""" return CandidateElection.objects.get( candidate=candidate, election=self ) def get_candidate_votes(self, candidate): """ Get all votes attached to a CandidateElection for a Candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.votes.all() def get_votes(self): """ Get all votes for this election. """ candidate_elections = CandidateElection.objects.filter(election=self) votes = None for ce in candidate_elections: votes = votes | ce.votes.all() return votes def get_candidate_electoral_votes(self, candidate): """ Get all electoral votes for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.electoral_votes.all() def get_electoral_votes(self): """ Get all electoral votes for all candidates in this election. """ candidate_elections = CandidateElection.objects.filter(election=self) electoral_votes = None for ce in candidate_elections: electoral_votes = electoral_votes | ce.electoral_votes.all() return electoral_votes def get_candidate_delegates(self, candidate): """ Get all pledged delegates for a candidate in this election. """ candidate_election = CandidateElection.objects.get( candidate=candidate, election=self ) return candidate_election.delegates.all()
The-Politico/politico-civic-election
election/models/election_day.py
ElectionDay.save
python
def save(self, *args, **kwargs): self.uid = '{}_date:{}'.format( self.cycle.uid, self.date ) self.slug = '{}'.format(self.date) super(ElectionDay, self).save(*args, **kwargs)
**uid**: :code:`{cycle.uid}_date:{date}`
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/election_day.py#L27-L36
null
class ElectionDay(models.Model): """ A day on which one or many elections can be held. """ uid = models.CharField( max_length=500, primary_key=True, editable=False, blank=True ) slug = models.SlugField( blank=True, max_length=255, unique=True, editable=False ) date = models.DateField(unique=True) cycle = models.ForeignKey( 'ElectionCycle', related_name='elections_days', on_delete=models.PROTECT) def __str__(self): return self.uid def special_election_datestring(self): """ Formatted date string used in URL for special elections. """ return self.date.strftime('%b-%d').lower()
The-Politico/politico-civic-election
election/models/candidate.py
Candidate.save
python
def save(self, *args, **kwargs): self.uid = "{}_candidate:{}-{}".format( self.person.uid, self.party.uid, self.race.cycle.uid ) super(Candidate, self).save(*args, **kwargs)
**uid**: :code:`{person.uid}_candidate:{party.uid}-{cycle.ap_code}`
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/candidate.py#L41-L48
null
class Candidate(models.Model): """ A person who runs in a race for an office. """ id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) uid = models.CharField(max_length=500, editable=False, blank=True) race = models.ForeignKey( "Race", related_name="candidates", on_delete=models.PROTECT ) person = models.ForeignKey( Person, related_name="candidacies", on_delete=models.PROTECT ) party = models.ForeignKey( Party, related_name="candidates", on_delete=models.PROTECT ) ap_candidate_id = models.CharField(max_length=255, null=True, blank=True) incumbent = models.BooleanField(default=False) top_of_ticket = models.ForeignKey( "self", null=True, blank=True, related_name="ticket", on_delete=models.SET_NULL, ) prospective = models.BooleanField( default=False, help_text="The candidate has not yet declared her candidacy.", ) def __str__(self): return self.uid def get_candidate_election(self, election): """Get a CandidateElection.""" return CandidateElection.objects.get(candidate=self, election=election) def get_elections(self): """Get all elections a candidate is in.""" candidate_elections = CandidateElection.objects.filter(candidate=self) return [ce.election for ce in candidate_elections] def get_election_votes(self, election): """Get all votes for this candidate in an election.""" candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.votes.all() def get_election_electoral_votes(self, election): """Get all electoral votes for this candidate in an election.""" candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.electoral_votes.all() def get_election_delegates(self, election): """Get all pledged delegates for this candidate in an election.""" candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.delegates.all() def get_delegates(self): """Get all pledged delegates for this candidate.""" candidate_elections = CandidateElection.objects.filter(candidate=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/candidate.py
Candidate.get_candidate_election
python
def get_candidate_election(self, election): return CandidateElection.objects.get(candidate=self, election=election)
Get a CandidateElection.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/candidate.py#L53-L55
null
class Candidate(models.Model): """ A person who runs in a race for an office. """ id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) uid = models.CharField(max_length=500, editable=False, blank=True) race = models.ForeignKey( "Race", related_name="candidates", on_delete=models.PROTECT ) person = models.ForeignKey( Person, related_name="candidacies", on_delete=models.PROTECT ) party = models.ForeignKey( Party, related_name="candidates", on_delete=models.PROTECT ) ap_candidate_id = models.CharField(max_length=255, null=True, blank=True) incumbent = models.BooleanField(default=False) top_of_ticket = models.ForeignKey( "self", null=True, blank=True, related_name="ticket", on_delete=models.SET_NULL, ) prospective = models.BooleanField( default=False, help_text="The candidate has not yet declared her candidacy.", ) def save(self, *args, **kwargs): """ **uid**: :code:`{person.uid}_candidate:{party.uid}-{cycle.ap_code}` """ self.uid = "{}_candidate:{}-{}".format( self.person.uid, self.party.uid, self.race.cycle.uid ) super(Candidate, self).save(*args, **kwargs) def __str__(self): return self.uid def get_elections(self): """Get all elections a candidate is in.""" candidate_elections = CandidateElection.objects.filter(candidate=self) return [ce.election for ce in candidate_elections] def get_election_votes(self, election): """Get all votes for this candidate in an election.""" candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.votes.all() def get_election_electoral_votes(self, election): """Get all electoral votes for this candidate in an election.""" candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.electoral_votes.all() def get_election_delegates(self, election): """Get all pledged delegates for this candidate in an election.""" candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.delegates.all() def get_delegates(self): """Get all pledged delegates for this candidate.""" candidate_elections = CandidateElection.objects.filter(candidate=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/candidate.py
Candidate.get_election_votes
python
def get_election_votes(self, election): candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.votes.all()
Get all votes for this candidate in an election.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/candidate.py#L63-L69
null
class Candidate(models.Model): """ A person who runs in a race for an office. """ id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) uid = models.CharField(max_length=500, editable=False, blank=True) race = models.ForeignKey( "Race", related_name="candidates", on_delete=models.PROTECT ) person = models.ForeignKey( Person, related_name="candidacies", on_delete=models.PROTECT ) party = models.ForeignKey( Party, related_name="candidates", on_delete=models.PROTECT ) ap_candidate_id = models.CharField(max_length=255, null=True, blank=True) incumbent = models.BooleanField(default=False) top_of_ticket = models.ForeignKey( "self", null=True, blank=True, related_name="ticket", on_delete=models.SET_NULL, ) prospective = models.BooleanField( default=False, help_text="The candidate has not yet declared her candidacy.", ) def save(self, *args, **kwargs): """ **uid**: :code:`{person.uid}_candidate:{party.uid}-{cycle.ap_code}` """ self.uid = "{}_candidate:{}-{}".format( self.person.uid, self.party.uid, self.race.cycle.uid ) super(Candidate, self).save(*args, **kwargs) def __str__(self): return self.uid def get_candidate_election(self, election): """Get a CandidateElection.""" return CandidateElection.objects.get(candidate=self, election=election) def get_elections(self): """Get all elections a candidate is in.""" candidate_elections = CandidateElection.objects.filter(candidate=self) return [ce.election for ce in candidate_elections] def get_election_electoral_votes(self, election): """Get all electoral votes for this candidate in an election.""" candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.electoral_votes.all() def get_election_delegates(self, election): """Get all pledged delegates for this candidate in an election.""" candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.delegates.all() def get_delegates(self): """Get all pledged delegates for this candidate.""" candidate_elections = CandidateElection.objects.filter(candidate=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/candidate.py
Candidate.get_election_electoral_votes
python
def get_election_electoral_votes(self, election): candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.electoral_votes.all()
Get all electoral votes for this candidate in an election.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/candidate.py#L71-L77
null
class Candidate(models.Model): """ A person who runs in a race for an office. """ id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) uid = models.CharField(max_length=500, editable=False, blank=True) race = models.ForeignKey( "Race", related_name="candidates", on_delete=models.PROTECT ) person = models.ForeignKey( Person, related_name="candidacies", on_delete=models.PROTECT ) party = models.ForeignKey( Party, related_name="candidates", on_delete=models.PROTECT ) ap_candidate_id = models.CharField(max_length=255, null=True, blank=True) incumbent = models.BooleanField(default=False) top_of_ticket = models.ForeignKey( "self", null=True, blank=True, related_name="ticket", on_delete=models.SET_NULL, ) prospective = models.BooleanField( default=False, help_text="The candidate has not yet declared her candidacy.", ) def save(self, *args, **kwargs): """ **uid**: :code:`{person.uid}_candidate:{party.uid}-{cycle.ap_code}` """ self.uid = "{}_candidate:{}-{}".format( self.person.uid, self.party.uid, self.race.cycle.uid ) super(Candidate, self).save(*args, **kwargs) def __str__(self): return self.uid def get_candidate_election(self, election): """Get a CandidateElection.""" return CandidateElection.objects.get(candidate=self, election=election) def get_elections(self): """Get all elections a candidate is in.""" candidate_elections = CandidateElection.objects.filter(candidate=self) return [ce.election for ce in candidate_elections] def get_election_votes(self, election): """Get all votes for this candidate in an election.""" candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.votes.all() def get_election_delegates(self, election): """Get all pledged delegates for this candidate in an election.""" candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.delegates.all() def get_delegates(self): """Get all pledged delegates for this candidate.""" candidate_elections = CandidateElection.objects.filter(candidate=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
The-Politico/politico-civic-election
election/models/candidate.py
Candidate.get_election_delegates
python
def get_election_delegates(self, election): candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.delegates.all()
Get all pledged delegates for this candidate in an election.
train
https://github.com/The-Politico/politico-civic-election/blob/44c6872c419909df616e997e1990c4d295b25eda/election/models/candidate.py#L79-L85
null
class Candidate(models.Model): """ A person who runs in a race for an office. """ id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) uid = models.CharField(max_length=500, editable=False, blank=True) race = models.ForeignKey( "Race", related_name="candidates", on_delete=models.PROTECT ) person = models.ForeignKey( Person, related_name="candidacies", on_delete=models.PROTECT ) party = models.ForeignKey( Party, related_name="candidates", on_delete=models.PROTECT ) ap_candidate_id = models.CharField(max_length=255, null=True, blank=True) incumbent = models.BooleanField(default=False) top_of_ticket = models.ForeignKey( "self", null=True, blank=True, related_name="ticket", on_delete=models.SET_NULL, ) prospective = models.BooleanField( default=False, help_text="The candidate has not yet declared her candidacy.", ) def save(self, *args, **kwargs): """ **uid**: :code:`{person.uid}_candidate:{party.uid}-{cycle.ap_code}` """ self.uid = "{}_candidate:{}-{}".format( self.person.uid, self.party.uid, self.race.cycle.uid ) super(Candidate, self).save(*args, **kwargs) def __str__(self): return self.uid def get_candidate_election(self, election): """Get a CandidateElection.""" return CandidateElection.objects.get(candidate=self, election=election) def get_elections(self): """Get all elections a candidate is in.""" candidate_elections = CandidateElection.objects.filter(candidate=self) return [ce.election for ce in candidate_elections] def get_election_votes(self, election): """Get all votes for this candidate in an election.""" candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.votes.all() def get_election_electoral_votes(self, election): """Get all electoral votes for this candidate in an election.""" candidate_election = CandidateElection.objects.get( candidate=self, election=election ) return candidate_election.electoral_votes.all() def get_delegates(self): """Get all pledged delegates for this candidate.""" candidate_elections = CandidateElection.objects.filter(candidate=self) delegates = None for ce in candidate_elections: delegates = delegates | ce.delegates.all() return delegates
mapmyfitness/jtime
jtime/utils.py
get_input
python
def get_input(input_func, input_str): val = input_func("Please enter your {0}: ".format(input_str)) while not val or not len(val.strip()): val = input_func("You didn't enter a valid {0}, please try again: ".format(input_str)) return val
Get input from the user given an input function and an input string
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/utils.py#L6-L13
null
from __future__ import division # This allows division to stay as a float import datetime import sys def working_cycletime(start, end, workday_start=datetime.timedelta(hours=0), workday_end=datetime.timedelta(hours=24)): """ Get the working time between a beginning and an end point subtracting out non-office time """ def clamp(t, start, end): "Return 't' clamped to the range ['start', 'end']" return max(start, min(end, t)) def day_part(t): "Return timedelta between midnight and 't'." return t - t.replace(hour=0, minute=0, second=0) if not start: return None if not end: end = datetime.datetime.now() zero = datetime.timedelta(0) # Make sure that the work day is valid assert(zero <= workday_start <= workday_end <= datetime.timedelta(1)) # Get the workday delta workday = workday_end - workday_start # Get the number of days it took days = (end - start).days + 1 # Number of weeks weeks = days // 7 # Get the number of days in addition to weeks extra = (max(0, 5 - start.weekday()) + min(5, 1 + end.weekday())) % 5 # Get the number of working days weekdays = weeks * 5 + extra # Get the total time spent accounting for the workday total = workday * weekdays if start.weekday() < 5: # Figuring out how much time it wasn't being worked on and subtracting total -= clamp(day_part(start) - workday_start, zero, workday) if end.weekday() < 5: # Figuring out how much time it wasn't being worked on and subtracting total -= clamp(workday_end - day_part(end), zero, workday) cycle_time = timedelta_total_seconds(total) / timedelta_total_seconds(workday) return cycle_time # Could we override the class to add this on import instead? def timedelta_total_seconds(td): if sys.version_info >= (2, 7): return td.total_seconds() return td.days * 24 * 60 * 60 + td.seconds
mapmyfitness/jtime
jtime/utils.py
working_cycletime
python
def working_cycletime(start, end, workday_start=datetime.timedelta(hours=0), workday_end=datetime.timedelta(hours=24)): def clamp(t, start, end): "Return 't' clamped to the range ['start', 'end']" return max(start, min(end, t)) def day_part(t): "Return timedelta between midnight and 't'." return t - t.replace(hour=0, minute=0, second=0) if not start: return None if not end: end = datetime.datetime.now() zero = datetime.timedelta(0) # Make sure that the work day is valid assert(zero <= workday_start <= workday_end <= datetime.timedelta(1)) # Get the workday delta workday = workday_end - workday_start # Get the number of days it took days = (end - start).days + 1 # Number of weeks weeks = days // 7 # Get the number of days in addition to weeks extra = (max(0, 5 - start.weekday()) + min(5, 1 + end.weekday())) % 5 # Get the number of working days weekdays = weeks * 5 + extra # Get the total time spent accounting for the workday total = workday * weekdays if start.weekday() < 5: # Figuring out how much time it wasn't being worked on and subtracting total -= clamp(day_part(start) - workday_start, zero, workday) if end.weekday() < 5: # Figuring out how much time it wasn't being worked on and subtracting total -= clamp(workday_end - day_part(end), zero, workday) cycle_time = timedelta_total_seconds(total) / timedelta_total_seconds(workday) return cycle_time
Get the working time between a beginning and an end point subtracting out non-office time
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/utils.py#L16-L56
[ "def timedelta_total_seconds(td):\n if sys.version_info >= (2, 7):\n return td.total_seconds()\n return td.days * 24 * 60 * 60 + td.seconds\n", "def clamp(t, start, end):\n \"Return 't' clamped to the range ['start', 'end']\"\n return max(start, min(end, t))\n", "def day_part(t):\n \"Return timedelta between midnight and 't'.\"\n return t - t.replace(hour=0, minute=0, second=0)\n" ]
from __future__ import division # This allows division to stay as a float import datetime import sys def get_input(input_func, input_str): """ Get input from the user given an input function and an input string """ val = input_func("Please enter your {0}: ".format(input_str)) while not val or not len(val.strip()): val = input_func("You didn't enter a valid {0}, please try again: ".format(input_str)) return val # Could we override the class to add this on import instead? def timedelta_total_seconds(td): if sys.version_info >= (2, 7): return td.total_seconds() return td.days * 24 * 60 * 60 + td.seconds
mapmyfitness/jtime
jtime/jtime.py
configure
python
def configure(): jira_url = utils.get_input(raw_input, "Jira url") username = utils.get_input(raw_input, "username") password = utils.get_input(getpass.getpass, "password") error_reporting = True \ if 'n' not in raw_input("Would you like to automatically report errors to help improve the software? [y]/N: ").lower() \ else False configuration._save_config(jira_url, username, password, error_reporting) try: connection.jira_connection(configuration.load_config()) except jira_exceptions.JIRAError as e: configuration._delete_config() logging.error("You have an error in your jira connection/configuration: {error}. Please fix the configuration before attempting to use jtime.\n We suggest trying your username without using the email address.".format(error=e))
Update config
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/jtime.py#L40-L56
[ "def load_config():\n \"\"\"\n Validate the config\n \"\"\"\n configuration = MyParser()\n configuration.read(_config)\n\n d = configuration.as_dict()\n\n if 'jira' not in d:\n raise custom_exceptions.NotConfigured\n\n # Special handling of the boolean for error reporting\n d['jira']['error_reporting'] = configuration.getboolean('jira', 'error_reporting')\n\n return d\n", "def _delete_config():\n os.path.exists(_config) and os.remove(_config)\n", "def _save_config(jira_url, username, password, error_reporting):\n \"\"\"\n Saves the username and password to the config\n \"\"\"\n # Delete what is there before we re-write. New user means new everything\n os.path.exists(_config) and os.remove(_config)\n\n config = ConfigParser.SafeConfigParser()\n config.read(_config)\n if not config.has_section('jira'):\n config.add_section('jira')\n\n if 'http' not in jira_url:\n jira_url = 'http://' + jira_url\n\n try:\n resp = urllib.urlopen(jira_url)\n url = urlparse.urlparse(resp.url)\n jira_url = url.scheme + \"://\" + url.netloc\n except IOError, e:\n print \"It doesn't appear that {0} is responding to a request.\\\n Please make sure that you typed the hostname, \\\n i.e. jira.atlassian.com.\\n{1}\".format(jira_url, e)\n sys.exit(1)\n\n config.set('jira', 'url', jira_url)\n config.set('jira', 'username', username)\n config.set('jira', 'password', base64.b64encode(password))\n config.set('jira', 'error_reporting', str(error_reporting))\n\n with open(_config, 'w') as ini:\n os.chmod(_config, 0600)\n config.write(ini)\n", "def jira_connection(config):\n \"\"\"\n Gets a JIRA API connection. If a connection has already been created the existing connection\n will be returned.\n \"\"\"\n global _jira_connection\n if _jira_connection:\n return _jira_connection\n else:\n jira_options = {'server': config.get('jira').get('url')}\n\n cookies = configuration._get_cookies_as_dict()\n jira_connection = jira_ext.JIRA(options=jira_options)\n session = jira_connection._session\n\n reused_session = False\n\n if cookies:\n requests.utils.add_dict_to_cookiejar(session.cookies, cookies)\n try:\n jira_connection.session()\n reused_session = True\n except Exception:\n pass\n\n if not reused_session:\n session.auth = (config['jira']['username'], base64.b64decode(config['jira']['password']))\n jira_connection.session()\n session.auth = None\n\n cookie_jar_hash = requests.utils.dict_from_cookiejar(session.cookies)\n for key, value in cookie_jar_hash.iteritems():\n configuration._save_cookie(key, value)\n\n _jira_connection = jira_connection\n return _jira_connection\n", "def get_input(input_func, input_str):\n \"\"\"\n Get input from the user given an input function and an input string\n \"\"\"\n val = input_func(\"Please enter your {0}: \".format(input_str))\n while not val or not len(val.strip()):\n val = input_func(\"You didn't enter a valid {0}, please try again: \".format(input_str))\n return val\n" ]
#!/usr/bin/env python import argh import argparse import ConfigParser import dateutil.parser from dateutil.tz import tzlocal import datetime import logging import getpass from jira import exceptions as jira_exceptions import os import sys import types import configuration import connection import custom_exceptions import git_ext import utils logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.ERROR) configured = None jira = None git = None def init(): global configured, jira, git # Initialize the connectors configured = configuration.load_config() jira = connection.jira_connection(configured) git = git_ext.GIT() def status(): """ Gets the worklog status for the current branch """ import pdb; pdb.set_trace() branch = git.branch issue = jira.get_issue(branch) if not issue: return # Print the title title = issue.fields.summary print "(%s) %s" % (branch, title) # Print the status status = issue.fields.status.name assignee = issue.fields.assignee.name in_progress = jira.get_datetime_issue_in_progress(issue) if in_progress: in_progress_string = in_progress.strftime("%a %x %I:%M %p") print ' Status: %s as of %s' % (status, in_progress_string) else: print ' Status: %s' % status print ' Assignee: %s' % assignee # Print the worklogs # Get the timespent and return 0m if it does not exist time_spent = '0m' try: time_spent = issue.fields.timetracking.timeSpent except: pass worklogs = jira.get_worklog(issue) print "\nTime logged (%s):" % time_spent if worklogs: for worklog in worklogs: worklog_hash = worklog.raw author = worklog_hash['author']['name'] time_spent = worklog_hash.get('timeSpent', '0m') created = dateutil.parser.parse(worklog_hash['started']) created_pattern = '%a %x ' # Adding extra space for formatting if not created.hour == created.minute == created.second == 0: created = created.astimezone(tzlocal()) created_pattern = '%a %x %I:%M %p' comment = worklog_hash.get('comment', '<no comment>') updated_string = created.strftime(created_pattern) print " %s - %s (%s): %s" % (updated_string, author, time_spent, comment) else: print " No worklogs" cycle_time = jira.get_cycle_time(issue) if cycle_time: print '\nCycle Time: %.1f days' % cycle_time # Print the time elapsed since the last mark elapsed_time = jira.get_elapsed_time(issue) if elapsed_time: print '\n\033[0;32m%s elapsed\033[00m (use "jtime log ." to log elapsed time or "jtime log <duration> (ex. 30m, 1h etc.)" to log a specific amount of time)' % (elapsed_time) else: print '\n\033[0;32m0m elapsed\033[00m' def lmts(): print git.get_last_modified_timestamp() @argh.arg('duration', help='Use . to log all time elapsed since the last mark or provide a specific amount of time to log (ex. 30m, 1h)') @argh.arg('-m', '--message', help='A message to add to this work log') @argh.arg('-c', '--commit', dest='use_last_commit_message', help='Use last commit message for the work log message') def log(duration, message=None, use_last_commit_message=False): """ Log time against the current active issue """ branch = git.branch issue = jira.get_issue(branch) # Create the comment comment = "Working on issue %s" % branch if message: comment = message elif use_last_commit_message: comment = git.get_last_commit_message() if issue: # If the duration is provided use it, otherwise use the elapsed time since the last mark duration = jira.get_elapsed_time(issue) if duration == '.' else duration if duration: # Add the worklog jira.add_worklog(issue, timeSpent=duration, adjustEstimate=None, newEstimate=None, reduceBy=None, comment=comment) print "Logged %s against issue %s (%s)" % (duration, branch, comment) else: print "No time logged, less than 0m elapsed." def mark(): """ Mark the start time for active work on an issue """ branch = git.branch issue = jira.get_issue(branch) if not issue: return worklogs = jira.get_worklog(issue) marked = False if worklogs: # If we have worklogs, change the updated time of the last log to the mark marked = jira.touch_last_worklog(issue) mark_time = datetime.datetime.now(dateutil.tz.tzlocal()).strftime("%I:%M %p") print "Set mark at %s on %s by touching last work log" % (mark_time, branch) else: # If we don't have worklogs, mark the issue as in progress if that is an available transition jira.workflow_transition(issue, 'Open') marked = jira.workflow_transition(issue, 'In Progress') mark_time = datetime.datetime.now(dateutil.tz.tzlocal()).strftime("%I:%M %p") print 'Set mark at %s on %s by changing status to "In Progress"' % (mark_time, branch) if not marked: print "ERROR: Issue %s is has a status of %s and has no worklogs. You must log some time or re-open the issue to proceed." % \ (branch, issue.fields.status.name) @argh.arg('-a', '--show-all', help='Include all issues that are not Closed') @argh.arg('-i', '--show-inprogress', help='Show only issues that are In Progress') @argh.arg('-o', '--show-open', help='Show only issues that are Open') @argh.arg('-q', '--quiet', help='Quiet, does not includes issue title') def me(show_all=False, show_inprogress=False, show_open=False, quiet=False): """ Prints a list of the users tickets and provides filtering options """ default = not [arg for arg in sys.argv[2:] if arg not in ('-q', '--quiet')] status_exclusions = ['Backlog', 'Open', 'Closed'] status_inclusions = [] if show_inprogress or default: status_inclusions.append('In Progress') if show_open: status_exclusions = [] status_inclusions.append('Open') elif show_all: status_exclusions = ['Closed'] jql = \ """ assignee=currentUser() AND resolved is EMPTY """ # We are switching between showing everything and only showing in progress items if not show_all and not show_open: jql += ' AND status was "In Progress" ' inclusion_str = None if len(status_inclusions): inclusion_str = "status in ({0})".format((','.join('"' + issue_status + '"' for issue_status in status_inclusions))) exclusion_str = None if len(status_exclusions): exclusion_str = "status not in ({0})".format((','.join('"' + issue_status + '"' for issue_status in status_exclusions))) jql += " AND ( {0} {1} {2} ) ".format(inclusion_str if inclusion_str else "", " OR " if inclusion_str and exclusion_str else "", exclusion_str if exclusion_str else "") jql += " ORDER BY updated DESC " results = jira.search_issues(jql) for result in results: issue = result.key updated = dateutil.parser.parse(result.fields.updated).strftime("%a %x %I:%M %p") status = result.fields.status.name cycletime = jira.get_cycle_time(result.key) cycletime_str = " -- %.1f days" % cycletime if cycletime else "" print "%s (%s) %s%s" % (issue, updated, status, cycletime_str) # If verbose, add a line for the issue title if not quiet: title = result.fields.summary title = (title[:75] + '..') if len(title) > 75 else title print " %s\n" % title # Print result count and usage hint for help print "\033[0;32m%s issue(s) found\033[00m (use 'jtime me -h' for filter options)" % len(results) print "One week avg cycle time: %.1f days" % jira.get_week_avg_cycletime() def reopen(): """ Reopen an issue """ issue = jira.get_issue(git.branch) jira.workflow_transition(issue, 'Open') def main(): """ Set up the context and connectors """ try: init() except custom_exceptions.NotConfigured: configure() init() # Adding this in case users are trying to run without adding a jira url. # I would like to take this out in a release or two. # TODO: REMOVE except (AttributeError, ConfigParser.NoOptionError): logging.error('It appears that your configuration is invalid, please reconfigure the app and try again.') configure() init() parser = argparse.ArgumentParser() # Now simply auto-discovering the methods listed in this module current_module = sys.modules[__name__] module_methods = [getattr(current_module, a, None) for a in dir(current_module) if isinstance(getattr(current_module, a, None), types.FunctionType) and a != 'main'] argh.add_commands(parser, module_methods) # Putting the error logging after the app is initialized because # we want to adhere to the user's preferences try: argh.dispatch(parser) # We don't want to report keyboard interrupts to rollbar except (KeyboardInterrupt, SystemExit): raise except Exception as e: if isinstance(e, jira.exceptions.JIRAError) and "HTTP 400" in e: logging.warning('It appears that your authentication with {0} is invalid. Please re-configure jtime: `jtime configure` with the correct credentials'.format(configuration.load_config['jira'].get('url'))) elif configured.get('jira').get('error_reporting', True): # Configure rollbar so that we report errors import rollbar from . import __version__ as version root_path = os.path.dirname(os.path.realpath(__file__)) rollbar.init('7541b8e188044831b6728fa8475eab9f', 'v%s' % version, root=root_path) logging.error('Sorry. It appears that there was an error when handling your command. ' 'This error has been reported to our error tracking system. To disable ' 'this reporting, please re-configure the app: `jtime config`.') extra_data = { # grab the command that we're running 'cmd': sys.argv[1], # we really don't want to see jtime in the args 'args': sys.argv[2:], # lets grab anything useful, python version? 'python': str(sys.version), } # We really shouldn't thit this line of code when running tests, so let's not cover it. rollbar.report_exc_info(extra_data=extra_data) # pragma: no cover else: logging.error('It appears that there was an error when handling your command.') raise
mapmyfitness/jtime
jtime/jtime.py
status
python
def status(): import pdb; pdb.set_trace() branch = git.branch issue = jira.get_issue(branch) if not issue: return # Print the title title = issue.fields.summary print "(%s) %s" % (branch, title) # Print the status status = issue.fields.status.name assignee = issue.fields.assignee.name in_progress = jira.get_datetime_issue_in_progress(issue) if in_progress: in_progress_string = in_progress.strftime("%a %x %I:%M %p") print ' Status: %s as of %s' % (status, in_progress_string) else: print ' Status: %s' % status print ' Assignee: %s' % assignee # Print the worklogs # Get the timespent and return 0m if it does not exist time_spent = '0m' try: time_spent = issue.fields.timetracking.timeSpent except: pass worklogs = jira.get_worklog(issue) print "\nTime logged (%s):" % time_spent if worklogs: for worklog in worklogs: worklog_hash = worklog.raw author = worklog_hash['author']['name'] time_spent = worklog_hash.get('timeSpent', '0m') created = dateutil.parser.parse(worklog_hash['started']) created_pattern = '%a %x ' # Adding extra space for formatting if not created.hour == created.minute == created.second == 0: created = created.astimezone(tzlocal()) created_pattern = '%a %x %I:%M %p' comment = worklog_hash.get('comment', '<no comment>') updated_string = created.strftime(created_pattern) print " %s - %s (%s): %s" % (updated_string, author, time_spent, comment) else: print " No worklogs" cycle_time = jira.get_cycle_time(issue) if cycle_time: print '\nCycle Time: %.1f days' % cycle_time # Print the time elapsed since the last mark elapsed_time = jira.get_elapsed_time(issue) if elapsed_time: print '\n\033[0;32m%s elapsed\033[00m (use "jtime log ." to log elapsed time or "jtime log <duration> (ex. 30m, 1h etc.)" to log a specific amount of time)' % (elapsed_time) else: print '\n\033[0;32m0m elapsed\033[00m'
Gets the worklog status for the current branch
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/jtime.py#L58-L125
null
#!/usr/bin/env python import argh import argparse import ConfigParser import dateutil.parser from dateutil.tz import tzlocal import datetime import logging import getpass from jira import exceptions as jira_exceptions import os import sys import types import configuration import connection import custom_exceptions import git_ext import utils logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.ERROR) configured = None jira = None git = None def init(): global configured, jira, git # Initialize the connectors configured = configuration.load_config() jira = connection.jira_connection(configured) git = git_ext.GIT() def configure(): """ Update config """ jira_url = utils.get_input(raw_input, "Jira url") username = utils.get_input(raw_input, "username") password = utils.get_input(getpass.getpass, "password") error_reporting = True \ if 'n' not in raw_input("Would you like to automatically report errors to help improve the software? [y]/N: ").lower() \ else False configuration._save_config(jira_url, username, password, error_reporting) try: connection.jira_connection(configuration.load_config()) except jira_exceptions.JIRAError as e: configuration._delete_config() logging.error("You have an error in your jira connection/configuration: {error}. Please fix the configuration before attempting to use jtime.\n We suggest trying your username without using the email address.".format(error=e)) def lmts(): print git.get_last_modified_timestamp() @argh.arg('duration', help='Use . to log all time elapsed since the last mark or provide a specific amount of time to log (ex. 30m, 1h)') @argh.arg('-m', '--message', help='A message to add to this work log') @argh.arg('-c', '--commit', dest='use_last_commit_message', help='Use last commit message for the work log message') def log(duration, message=None, use_last_commit_message=False): """ Log time against the current active issue """ branch = git.branch issue = jira.get_issue(branch) # Create the comment comment = "Working on issue %s" % branch if message: comment = message elif use_last_commit_message: comment = git.get_last_commit_message() if issue: # If the duration is provided use it, otherwise use the elapsed time since the last mark duration = jira.get_elapsed_time(issue) if duration == '.' else duration if duration: # Add the worklog jira.add_worklog(issue, timeSpent=duration, adjustEstimate=None, newEstimate=None, reduceBy=None, comment=comment) print "Logged %s against issue %s (%s)" % (duration, branch, comment) else: print "No time logged, less than 0m elapsed." def mark(): """ Mark the start time for active work on an issue """ branch = git.branch issue = jira.get_issue(branch) if not issue: return worklogs = jira.get_worklog(issue) marked = False if worklogs: # If we have worklogs, change the updated time of the last log to the mark marked = jira.touch_last_worklog(issue) mark_time = datetime.datetime.now(dateutil.tz.tzlocal()).strftime("%I:%M %p") print "Set mark at %s on %s by touching last work log" % (mark_time, branch) else: # If we don't have worklogs, mark the issue as in progress if that is an available transition jira.workflow_transition(issue, 'Open') marked = jira.workflow_transition(issue, 'In Progress') mark_time = datetime.datetime.now(dateutil.tz.tzlocal()).strftime("%I:%M %p") print 'Set mark at %s on %s by changing status to "In Progress"' % (mark_time, branch) if not marked: print "ERROR: Issue %s is has a status of %s and has no worklogs. You must log some time or re-open the issue to proceed." % \ (branch, issue.fields.status.name) @argh.arg('-a', '--show-all', help='Include all issues that are not Closed') @argh.arg('-i', '--show-inprogress', help='Show only issues that are In Progress') @argh.arg('-o', '--show-open', help='Show only issues that are Open') @argh.arg('-q', '--quiet', help='Quiet, does not includes issue title') def me(show_all=False, show_inprogress=False, show_open=False, quiet=False): """ Prints a list of the users tickets and provides filtering options """ default = not [arg for arg in sys.argv[2:] if arg not in ('-q', '--quiet')] status_exclusions = ['Backlog', 'Open', 'Closed'] status_inclusions = [] if show_inprogress or default: status_inclusions.append('In Progress') if show_open: status_exclusions = [] status_inclusions.append('Open') elif show_all: status_exclusions = ['Closed'] jql = \ """ assignee=currentUser() AND resolved is EMPTY """ # We are switching between showing everything and only showing in progress items if not show_all and not show_open: jql += ' AND status was "In Progress" ' inclusion_str = None if len(status_inclusions): inclusion_str = "status in ({0})".format((','.join('"' + issue_status + '"' for issue_status in status_inclusions))) exclusion_str = None if len(status_exclusions): exclusion_str = "status not in ({0})".format((','.join('"' + issue_status + '"' for issue_status in status_exclusions))) jql += " AND ( {0} {1} {2} ) ".format(inclusion_str if inclusion_str else "", " OR " if inclusion_str and exclusion_str else "", exclusion_str if exclusion_str else "") jql += " ORDER BY updated DESC " results = jira.search_issues(jql) for result in results: issue = result.key updated = dateutil.parser.parse(result.fields.updated).strftime("%a %x %I:%M %p") status = result.fields.status.name cycletime = jira.get_cycle_time(result.key) cycletime_str = " -- %.1f days" % cycletime if cycletime else "" print "%s (%s) %s%s" % (issue, updated, status, cycletime_str) # If verbose, add a line for the issue title if not quiet: title = result.fields.summary title = (title[:75] + '..') if len(title) > 75 else title print " %s\n" % title # Print result count and usage hint for help print "\033[0;32m%s issue(s) found\033[00m (use 'jtime me -h' for filter options)" % len(results) print "One week avg cycle time: %.1f days" % jira.get_week_avg_cycletime() def reopen(): """ Reopen an issue """ issue = jira.get_issue(git.branch) jira.workflow_transition(issue, 'Open') def main(): """ Set up the context and connectors """ try: init() except custom_exceptions.NotConfigured: configure() init() # Adding this in case users are trying to run without adding a jira url. # I would like to take this out in a release or two. # TODO: REMOVE except (AttributeError, ConfigParser.NoOptionError): logging.error('It appears that your configuration is invalid, please reconfigure the app and try again.') configure() init() parser = argparse.ArgumentParser() # Now simply auto-discovering the methods listed in this module current_module = sys.modules[__name__] module_methods = [getattr(current_module, a, None) for a in dir(current_module) if isinstance(getattr(current_module, a, None), types.FunctionType) and a != 'main'] argh.add_commands(parser, module_methods) # Putting the error logging after the app is initialized because # we want to adhere to the user's preferences try: argh.dispatch(parser) # We don't want to report keyboard interrupts to rollbar except (KeyboardInterrupt, SystemExit): raise except Exception as e: if isinstance(e, jira.exceptions.JIRAError) and "HTTP 400" in e: logging.warning('It appears that your authentication with {0} is invalid. Please re-configure jtime: `jtime configure` with the correct credentials'.format(configuration.load_config['jira'].get('url'))) elif configured.get('jira').get('error_reporting', True): # Configure rollbar so that we report errors import rollbar from . import __version__ as version root_path = os.path.dirname(os.path.realpath(__file__)) rollbar.init('7541b8e188044831b6728fa8475eab9f', 'v%s' % version, root=root_path) logging.error('Sorry. It appears that there was an error when handling your command. ' 'This error has been reported to our error tracking system. To disable ' 'this reporting, please re-configure the app: `jtime config`.') extra_data = { # grab the command that we're running 'cmd': sys.argv[1], # we really don't want to see jtime in the args 'args': sys.argv[2:], # lets grab anything useful, python version? 'python': str(sys.version), } # We really shouldn't thit this line of code when running tests, so let's not cover it. rollbar.report_exc_info(extra_data=extra_data) # pragma: no cover else: logging.error('It appears that there was an error when handling your command.') raise
mapmyfitness/jtime
jtime/jtime.py
log
python
def log(duration, message=None, use_last_commit_message=False): branch = git.branch issue = jira.get_issue(branch) # Create the comment comment = "Working on issue %s" % branch if message: comment = message elif use_last_commit_message: comment = git.get_last_commit_message() if issue: # If the duration is provided use it, otherwise use the elapsed time since the last mark duration = jira.get_elapsed_time(issue) if duration == '.' else duration if duration: # Add the worklog jira.add_worklog(issue, timeSpent=duration, adjustEstimate=None, newEstimate=None, reduceBy=None, comment=comment) print "Logged %s against issue %s (%s)" % (duration, branch, comment) else: print "No time logged, less than 0m elapsed."
Log time against the current active issue
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/jtime.py#L135-L159
null
#!/usr/bin/env python import argh import argparse import ConfigParser import dateutil.parser from dateutil.tz import tzlocal import datetime import logging import getpass from jira import exceptions as jira_exceptions import os import sys import types import configuration import connection import custom_exceptions import git_ext import utils logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.ERROR) configured = None jira = None git = None def init(): global configured, jira, git # Initialize the connectors configured = configuration.load_config() jira = connection.jira_connection(configured) git = git_ext.GIT() def configure(): """ Update config """ jira_url = utils.get_input(raw_input, "Jira url") username = utils.get_input(raw_input, "username") password = utils.get_input(getpass.getpass, "password") error_reporting = True \ if 'n' not in raw_input("Would you like to automatically report errors to help improve the software? [y]/N: ").lower() \ else False configuration._save_config(jira_url, username, password, error_reporting) try: connection.jira_connection(configuration.load_config()) except jira_exceptions.JIRAError as e: configuration._delete_config() logging.error("You have an error in your jira connection/configuration: {error}. Please fix the configuration before attempting to use jtime.\n We suggest trying your username without using the email address.".format(error=e)) def status(): """ Gets the worklog status for the current branch """ import pdb; pdb.set_trace() branch = git.branch issue = jira.get_issue(branch) if not issue: return # Print the title title = issue.fields.summary print "(%s) %s" % (branch, title) # Print the status status = issue.fields.status.name assignee = issue.fields.assignee.name in_progress = jira.get_datetime_issue_in_progress(issue) if in_progress: in_progress_string = in_progress.strftime("%a %x %I:%M %p") print ' Status: %s as of %s' % (status, in_progress_string) else: print ' Status: %s' % status print ' Assignee: %s' % assignee # Print the worklogs # Get the timespent and return 0m if it does not exist time_spent = '0m' try: time_spent = issue.fields.timetracking.timeSpent except: pass worklogs = jira.get_worklog(issue) print "\nTime logged (%s):" % time_spent if worklogs: for worklog in worklogs: worklog_hash = worklog.raw author = worklog_hash['author']['name'] time_spent = worklog_hash.get('timeSpent', '0m') created = dateutil.parser.parse(worklog_hash['started']) created_pattern = '%a %x ' # Adding extra space for formatting if not created.hour == created.minute == created.second == 0: created = created.astimezone(tzlocal()) created_pattern = '%a %x %I:%M %p' comment = worklog_hash.get('comment', '<no comment>') updated_string = created.strftime(created_pattern) print " %s - %s (%s): %s" % (updated_string, author, time_spent, comment) else: print " No worklogs" cycle_time = jira.get_cycle_time(issue) if cycle_time: print '\nCycle Time: %.1f days' % cycle_time # Print the time elapsed since the last mark elapsed_time = jira.get_elapsed_time(issue) if elapsed_time: print '\n\033[0;32m%s elapsed\033[00m (use "jtime log ." to log elapsed time or "jtime log <duration> (ex. 30m, 1h etc.)" to log a specific amount of time)' % (elapsed_time) else: print '\n\033[0;32m0m elapsed\033[00m' def lmts(): print git.get_last_modified_timestamp() @argh.arg('duration', help='Use . to log all time elapsed since the last mark or provide a specific amount of time to log (ex. 30m, 1h)') @argh.arg('-m', '--message', help='A message to add to this work log') @argh.arg('-c', '--commit', dest='use_last_commit_message', help='Use last commit message for the work log message') def mark(): """ Mark the start time for active work on an issue """ branch = git.branch issue = jira.get_issue(branch) if not issue: return worklogs = jira.get_worklog(issue) marked = False if worklogs: # If we have worklogs, change the updated time of the last log to the mark marked = jira.touch_last_worklog(issue) mark_time = datetime.datetime.now(dateutil.tz.tzlocal()).strftime("%I:%M %p") print "Set mark at %s on %s by touching last work log" % (mark_time, branch) else: # If we don't have worklogs, mark the issue as in progress if that is an available transition jira.workflow_transition(issue, 'Open') marked = jira.workflow_transition(issue, 'In Progress') mark_time = datetime.datetime.now(dateutil.tz.tzlocal()).strftime("%I:%M %p") print 'Set mark at %s on %s by changing status to "In Progress"' % (mark_time, branch) if not marked: print "ERROR: Issue %s is has a status of %s and has no worklogs. You must log some time or re-open the issue to proceed." % \ (branch, issue.fields.status.name) @argh.arg('-a', '--show-all', help='Include all issues that are not Closed') @argh.arg('-i', '--show-inprogress', help='Show only issues that are In Progress') @argh.arg('-o', '--show-open', help='Show only issues that are Open') @argh.arg('-q', '--quiet', help='Quiet, does not includes issue title') def me(show_all=False, show_inprogress=False, show_open=False, quiet=False): """ Prints a list of the users tickets and provides filtering options """ default = not [arg for arg in sys.argv[2:] if arg not in ('-q', '--quiet')] status_exclusions = ['Backlog', 'Open', 'Closed'] status_inclusions = [] if show_inprogress or default: status_inclusions.append('In Progress') if show_open: status_exclusions = [] status_inclusions.append('Open') elif show_all: status_exclusions = ['Closed'] jql = \ """ assignee=currentUser() AND resolved is EMPTY """ # We are switching between showing everything and only showing in progress items if not show_all and not show_open: jql += ' AND status was "In Progress" ' inclusion_str = None if len(status_inclusions): inclusion_str = "status in ({0})".format((','.join('"' + issue_status + '"' for issue_status in status_inclusions))) exclusion_str = None if len(status_exclusions): exclusion_str = "status not in ({0})".format((','.join('"' + issue_status + '"' for issue_status in status_exclusions))) jql += " AND ( {0} {1} {2} ) ".format(inclusion_str if inclusion_str else "", " OR " if inclusion_str and exclusion_str else "", exclusion_str if exclusion_str else "") jql += " ORDER BY updated DESC " results = jira.search_issues(jql) for result in results: issue = result.key updated = dateutil.parser.parse(result.fields.updated).strftime("%a %x %I:%M %p") status = result.fields.status.name cycletime = jira.get_cycle_time(result.key) cycletime_str = " -- %.1f days" % cycletime if cycletime else "" print "%s (%s) %s%s" % (issue, updated, status, cycletime_str) # If verbose, add a line for the issue title if not quiet: title = result.fields.summary title = (title[:75] + '..') if len(title) > 75 else title print " %s\n" % title # Print result count and usage hint for help print "\033[0;32m%s issue(s) found\033[00m (use 'jtime me -h' for filter options)" % len(results) print "One week avg cycle time: %.1f days" % jira.get_week_avg_cycletime() def reopen(): """ Reopen an issue """ issue = jira.get_issue(git.branch) jira.workflow_transition(issue, 'Open') def main(): """ Set up the context and connectors """ try: init() except custom_exceptions.NotConfigured: configure() init() # Adding this in case users are trying to run without adding a jira url. # I would like to take this out in a release or two. # TODO: REMOVE except (AttributeError, ConfigParser.NoOptionError): logging.error('It appears that your configuration is invalid, please reconfigure the app and try again.') configure() init() parser = argparse.ArgumentParser() # Now simply auto-discovering the methods listed in this module current_module = sys.modules[__name__] module_methods = [getattr(current_module, a, None) for a in dir(current_module) if isinstance(getattr(current_module, a, None), types.FunctionType) and a != 'main'] argh.add_commands(parser, module_methods) # Putting the error logging after the app is initialized because # we want to adhere to the user's preferences try: argh.dispatch(parser) # We don't want to report keyboard interrupts to rollbar except (KeyboardInterrupt, SystemExit): raise except Exception as e: if isinstance(e, jira.exceptions.JIRAError) and "HTTP 400" in e: logging.warning('It appears that your authentication with {0} is invalid. Please re-configure jtime: `jtime configure` with the correct credentials'.format(configuration.load_config['jira'].get('url'))) elif configured.get('jira').get('error_reporting', True): # Configure rollbar so that we report errors import rollbar from . import __version__ as version root_path = os.path.dirname(os.path.realpath(__file__)) rollbar.init('7541b8e188044831b6728fa8475eab9f', 'v%s' % version, root=root_path) logging.error('Sorry. It appears that there was an error when handling your command. ' 'This error has been reported to our error tracking system. To disable ' 'this reporting, please re-configure the app: `jtime config`.') extra_data = { # grab the command that we're running 'cmd': sys.argv[1], # we really don't want to see jtime in the args 'args': sys.argv[2:], # lets grab anything useful, python version? 'python': str(sys.version), } # We really shouldn't thit this line of code when running tests, so let's not cover it. rollbar.report_exc_info(extra_data=extra_data) # pragma: no cover else: logging.error('It appears that there was an error when handling your command.') raise
mapmyfitness/jtime
jtime/jtime.py
mark
python
def mark(): branch = git.branch issue = jira.get_issue(branch) if not issue: return worklogs = jira.get_worklog(issue) marked = False if worklogs: # If we have worklogs, change the updated time of the last log to the mark marked = jira.touch_last_worklog(issue) mark_time = datetime.datetime.now(dateutil.tz.tzlocal()).strftime("%I:%M %p") print "Set mark at %s on %s by touching last work log" % (mark_time, branch) else: # If we don't have worklogs, mark the issue as in progress if that is an available transition jira.workflow_transition(issue, 'Open') marked = jira.workflow_transition(issue, 'In Progress') mark_time = datetime.datetime.now(dateutil.tz.tzlocal()).strftime("%I:%M %p") print 'Set mark at %s on %s by changing status to "In Progress"' % (mark_time, branch) if not marked: print "ERROR: Issue %s is has a status of %s and has no worklogs. You must log some time or re-open the issue to proceed." % \ (branch, issue.fields.status.name)
Mark the start time for active work on an issue
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/jtime.py#L162-L187
null
#!/usr/bin/env python import argh import argparse import ConfigParser import dateutil.parser from dateutil.tz import tzlocal import datetime import logging import getpass from jira import exceptions as jira_exceptions import os import sys import types import configuration import connection import custom_exceptions import git_ext import utils logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.ERROR) configured = None jira = None git = None def init(): global configured, jira, git # Initialize the connectors configured = configuration.load_config() jira = connection.jira_connection(configured) git = git_ext.GIT() def configure(): """ Update config """ jira_url = utils.get_input(raw_input, "Jira url") username = utils.get_input(raw_input, "username") password = utils.get_input(getpass.getpass, "password") error_reporting = True \ if 'n' not in raw_input("Would you like to automatically report errors to help improve the software? [y]/N: ").lower() \ else False configuration._save_config(jira_url, username, password, error_reporting) try: connection.jira_connection(configuration.load_config()) except jira_exceptions.JIRAError as e: configuration._delete_config() logging.error("You have an error in your jira connection/configuration: {error}. Please fix the configuration before attempting to use jtime.\n We suggest trying your username without using the email address.".format(error=e)) def status(): """ Gets the worklog status for the current branch """ import pdb; pdb.set_trace() branch = git.branch issue = jira.get_issue(branch) if not issue: return # Print the title title = issue.fields.summary print "(%s) %s" % (branch, title) # Print the status status = issue.fields.status.name assignee = issue.fields.assignee.name in_progress = jira.get_datetime_issue_in_progress(issue) if in_progress: in_progress_string = in_progress.strftime("%a %x %I:%M %p") print ' Status: %s as of %s' % (status, in_progress_string) else: print ' Status: %s' % status print ' Assignee: %s' % assignee # Print the worklogs # Get the timespent and return 0m if it does not exist time_spent = '0m' try: time_spent = issue.fields.timetracking.timeSpent except: pass worklogs = jira.get_worklog(issue) print "\nTime logged (%s):" % time_spent if worklogs: for worklog in worklogs: worklog_hash = worklog.raw author = worklog_hash['author']['name'] time_spent = worklog_hash.get('timeSpent', '0m') created = dateutil.parser.parse(worklog_hash['started']) created_pattern = '%a %x ' # Adding extra space for formatting if not created.hour == created.minute == created.second == 0: created = created.astimezone(tzlocal()) created_pattern = '%a %x %I:%M %p' comment = worklog_hash.get('comment', '<no comment>') updated_string = created.strftime(created_pattern) print " %s - %s (%s): %s" % (updated_string, author, time_spent, comment) else: print " No worklogs" cycle_time = jira.get_cycle_time(issue) if cycle_time: print '\nCycle Time: %.1f days' % cycle_time # Print the time elapsed since the last mark elapsed_time = jira.get_elapsed_time(issue) if elapsed_time: print '\n\033[0;32m%s elapsed\033[00m (use "jtime log ." to log elapsed time or "jtime log <duration> (ex. 30m, 1h etc.)" to log a specific amount of time)' % (elapsed_time) else: print '\n\033[0;32m0m elapsed\033[00m' def lmts(): print git.get_last_modified_timestamp() @argh.arg('duration', help='Use . to log all time elapsed since the last mark or provide a specific amount of time to log (ex. 30m, 1h)') @argh.arg('-m', '--message', help='A message to add to this work log') @argh.arg('-c', '--commit', dest='use_last_commit_message', help='Use last commit message for the work log message') def log(duration, message=None, use_last_commit_message=False): """ Log time against the current active issue """ branch = git.branch issue = jira.get_issue(branch) # Create the comment comment = "Working on issue %s" % branch if message: comment = message elif use_last_commit_message: comment = git.get_last_commit_message() if issue: # If the duration is provided use it, otherwise use the elapsed time since the last mark duration = jira.get_elapsed_time(issue) if duration == '.' else duration if duration: # Add the worklog jira.add_worklog(issue, timeSpent=duration, adjustEstimate=None, newEstimate=None, reduceBy=None, comment=comment) print "Logged %s against issue %s (%s)" % (duration, branch, comment) else: print "No time logged, less than 0m elapsed." @argh.arg('-a', '--show-all', help='Include all issues that are not Closed') @argh.arg('-i', '--show-inprogress', help='Show only issues that are In Progress') @argh.arg('-o', '--show-open', help='Show only issues that are Open') @argh.arg('-q', '--quiet', help='Quiet, does not includes issue title') def me(show_all=False, show_inprogress=False, show_open=False, quiet=False): """ Prints a list of the users tickets and provides filtering options """ default = not [arg for arg in sys.argv[2:] if arg not in ('-q', '--quiet')] status_exclusions = ['Backlog', 'Open', 'Closed'] status_inclusions = [] if show_inprogress or default: status_inclusions.append('In Progress') if show_open: status_exclusions = [] status_inclusions.append('Open') elif show_all: status_exclusions = ['Closed'] jql = \ """ assignee=currentUser() AND resolved is EMPTY """ # We are switching between showing everything and only showing in progress items if not show_all and not show_open: jql += ' AND status was "In Progress" ' inclusion_str = None if len(status_inclusions): inclusion_str = "status in ({0})".format((','.join('"' + issue_status + '"' for issue_status in status_inclusions))) exclusion_str = None if len(status_exclusions): exclusion_str = "status not in ({0})".format((','.join('"' + issue_status + '"' for issue_status in status_exclusions))) jql += " AND ( {0} {1} {2} ) ".format(inclusion_str if inclusion_str else "", " OR " if inclusion_str and exclusion_str else "", exclusion_str if exclusion_str else "") jql += " ORDER BY updated DESC " results = jira.search_issues(jql) for result in results: issue = result.key updated = dateutil.parser.parse(result.fields.updated).strftime("%a %x %I:%M %p") status = result.fields.status.name cycletime = jira.get_cycle_time(result.key) cycletime_str = " -- %.1f days" % cycletime if cycletime else "" print "%s (%s) %s%s" % (issue, updated, status, cycletime_str) # If verbose, add a line for the issue title if not quiet: title = result.fields.summary title = (title[:75] + '..') if len(title) > 75 else title print " %s\n" % title # Print result count and usage hint for help print "\033[0;32m%s issue(s) found\033[00m (use 'jtime me -h' for filter options)" % len(results) print "One week avg cycle time: %.1f days" % jira.get_week_avg_cycletime() def reopen(): """ Reopen an issue """ issue = jira.get_issue(git.branch) jira.workflow_transition(issue, 'Open') def main(): """ Set up the context and connectors """ try: init() except custom_exceptions.NotConfigured: configure() init() # Adding this in case users are trying to run without adding a jira url. # I would like to take this out in a release or two. # TODO: REMOVE except (AttributeError, ConfigParser.NoOptionError): logging.error('It appears that your configuration is invalid, please reconfigure the app and try again.') configure() init() parser = argparse.ArgumentParser() # Now simply auto-discovering the methods listed in this module current_module = sys.modules[__name__] module_methods = [getattr(current_module, a, None) for a in dir(current_module) if isinstance(getattr(current_module, a, None), types.FunctionType) and a != 'main'] argh.add_commands(parser, module_methods) # Putting the error logging after the app is initialized because # we want to adhere to the user's preferences try: argh.dispatch(parser) # We don't want to report keyboard interrupts to rollbar except (KeyboardInterrupt, SystemExit): raise except Exception as e: if isinstance(e, jira.exceptions.JIRAError) and "HTTP 400" in e: logging.warning('It appears that your authentication with {0} is invalid. Please re-configure jtime: `jtime configure` with the correct credentials'.format(configuration.load_config['jira'].get('url'))) elif configured.get('jira').get('error_reporting', True): # Configure rollbar so that we report errors import rollbar from . import __version__ as version root_path = os.path.dirname(os.path.realpath(__file__)) rollbar.init('7541b8e188044831b6728fa8475eab9f', 'v%s' % version, root=root_path) logging.error('Sorry. It appears that there was an error when handling your command. ' 'This error has been reported to our error tracking system. To disable ' 'this reporting, please re-configure the app: `jtime config`.') extra_data = { # grab the command that we're running 'cmd': sys.argv[1], # we really don't want to see jtime in the args 'args': sys.argv[2:], # lets grab anything useful, python version? 'python': str(sys.version), } # We really shouldn't thit this line of code when running tests, so let's not cover it. rollbar.report_exc_info(extra_data=extra_data) # pragma: no cover else: logging.error('It appears that there was an error when handling your command.') raise
mapmyfitness/jtime
jtime/jtime.py
me
python
def me(show_all=False, show_inprogress=False, show_open=False, quiet=False): default = not [arg for arg in sys.argv[2:] if arg not in ('-q', '--quiet')] status_exclusions = ['Backlog', 'Open', 'Closed'] status_inclusions = [] if show_inprogress or default: status_inclusions.append('In Progress') if show_open: status_exclusions = [] status_inclusions.append('Open') elif show_all: status_exclusions = ['Closed'] jql = \ """ assignee=currentUser() AND resolved is EMPTY """ # We are switching between showing everything and only showing in progress items if not show_all and not show_open: jql += ' AND status was "In Progress" ' inclusion_str = None if len(status_inclusions): inclusion_str = "status in ({0})".format((','.join('"' + issue_status + '"' for issue_status in status_inclusions))) exclusion_str = None if len(status_exclusions): exclusion_str = "status not in ({0})".format((','.join('"' + issue_status + '"' for issue_status in status_exclusions))) jql += " AND ( {0} {1} {2} ) ".format(inclusion_str if inclusion_str else "", " OR " if inclusion_str and exclusion_str else "", exclusion_str if exclusion_str else "") jql += " ORDER BY updated DESC " results = jira.search_issues(jql) for result in results: issue = result.key updated = dateutil.parser.parse(result.fields.updated).strftime("%a %x %I:%M %p") status = result.fields.status.name cycletime = jira.get_cycle_time(result.key) cycletime_str = " -- %.1f days" % cycletime if cycletime else "" print "%s (%s) %s%s" % (issue, updated, status, cycletime_str) # If verbose, add a line for the issue title if not quiet: title = result.fields.summary title = (title[:75] + '..') if len(title) > 75 else title print " %s\n" % title # Print result count and usage hint for help print "\033[0;32m%s issue(s) found\033[00m (use 'jtime me -h' for filter options)" % len(results) print "One week avg cycle time: %.1f days" % jira.get_week_avg_cycletime()
Prints a list of the users tickets and provides filtering options
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/jtime.py#L194-L253
null
#!/usr/bin/env python import argh import argparse import ConfigParser import dateutil.parser from dateutil.tz import tzlocal import datetime import logging import getpass from jira import exceptions as jira_exceptions import os import sys import types import configuration import connection import custom_exceptions import git_ext import utils logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.ERROR) configured = None jira = None git = None def init(): global configured, jira, git # Initialize the connectors configured = configuration.load_config() jira = connection.jira_connection(configured) git = git_ext.GIT() def configure(): """ Update config """ jira_url = utils.get_input(raw_input, "Jira url") username = utils.get_input(raw_input, "username") password = utils.get_input(getpass.getpass, "password") error_reporting = True \ if 'n' not in raw_input("Would you like to automatically report errors to help improve the software? [y]/N: ").lower() \ else False configuration._save_config(jira_url, username, password, error_reporting) try: connection.jira_connection(configuration.load_config()) except jira_exceptions.JIRAError as e: configuration._delete_config() logging.error("You have an error in your jira connection/configuration: {error}. Please fix the configuration before attempting to use jtime.\n We suggest trying your username without using the email address.".format(error=e)) def status(): """ Gets the worklog status for the current branch """ import pdb; pdb.set_trace() branch = git.branch issue = jira.get_issue(branch) if not issue: return # Print the title title = issue.fields.summary print "(%s) %s" % (branch, title) # Print the status status = issue.fields.status.name assignee = issue.fields.assignee.name in_progress = jira.get_datetime_issue_in_progress(issue) if in_progress: in_progress_string = in_progress.strftime("%a %x %I:%M %p") print ' Status: %s as of %s' % (status, in_progress_string) else: print ' Status: %s' % status print ' Assignee: %s' % assignee # Print the worklogs # Get the timespent and return 0m if it does not exist time_spent = '0m' try: time_spent = issue.fields.timetracking.timeSpent except: pass worklogs = jira.get_worklog(issue) print "\nTime logged (%s):" % time_spent if worklogs: for worklog in worklogs: worklog_hash = worklog.raw author = worklog_hash['author']['name'] time_spent = worklog_hash.get('timeSpent', '0m') created = dateutil.parser.parse(worklog_hash['started']) created_pattern = '%a %x ' # Adding extra space for formatting if not created.hour == created.minute == created.second == 0: created = created.astimezone(tzlocal()) created_pattern = '%a %x %I:%M %p' comment = worklog_hash.get('comment', '<no comment>') updated_string = created.strftime(created_pattern) print " %s - %s (%s): %s" % (updated_string, author, time_spent, comment) else: print " No worklogs" cycle_time = jira.get_cycle_time(issue) if cycle_time: print '\nCycle Time: %.1f days' % cycle_time # Print the time elapsed since the last mark elapsed_time = jira.get_elapsed_time(issue) if elapsed_time: print '\n\033[0;32m%s elapsed\033[00m (use "jtime log ." to log elapsed time or "jtime log <duration> (ex. 30m, 1h etc.)" to log a specific amount of time)' % (elapsed_time) else: print '\n\033[0;32m0m elapsed\033[00m' def lmts(): print git.get_last_modified_timestamp() @argh.arg('duration', help='Use . to log all time elapsed since the last mark or provide a specific amount of time to log (ex. 30m, 1h)') @argh.arg('-m', '--message', help='A message to add to this work log') @argh.arg('-c', '--commit', dest='use_last_commit_message', help='Use last commit message for the work log message') def log(duration, message=None, use_last_commit_message=False): """ Log time against the current active issue """ branch = git.branch issue = jira.get_issue(branch) # Create the comment comment = "Working on issue %s" % branch if message: comment = message elif use_last_commit_message: comment = git.get_last_commit_message() if issue: # If the duration is provided use it, otherwise use the elapsed time since the last mark duration = jira.get_elapsed_time(issue) if duration == '.' else duration if duration: # Add the worklog jira.add_worklog(issue, timeSpent=duration, adjustEstimate=None, newEstimate=None, reduceBy=None, comment=comment) print "Logged %s against issue %s (%s)" % (duration, branch, comment) else: print "No time logged, less than 0m elapsed." def mark(): """ Mark the start time for active work on an issue """ branch = git.branch issue = jira.get_issue(branch) if not issue: return worklogs = jira.get_worklog(issue) marked = False if worklogs: # If we have worklogs, change the updated time of the last log to the mark marked = jira.touch_last_worklog(issue) mark_time = datetime.datetime.now(dateutil.tz.tzlocal()).strftime("%I:%M %p") print "Set mark at %s on %s by touching last work log" % (mark_time, branch) else: # If we don't have worklogs, mark the issue as in progress if that is an available transition jira.workflow_transition(issue, 'Open') marked = jira.workflow_transition(issue, 'In Progress') mark_time = datetime.datetime.now(dateutil.tz.tzlocal()).strftime("%I:%M %p") print 'Set mark at %s on %s by changing status to "In Progress"' % (mark_time, branch) if not marked: print "ERROR: Issue %s is has a status of %s and has no worklogs. You must log some time or re-open the issue to proceed." % \ (branch, issue.fields.status.name) @argh.arg('-a', '--show-all', help='Include all issues that are not Closed') @argh.arg('-i', '--show-inprogress', help='Show only issues that are In Progress') @argh.arg('-o', '--show-open', help='Show only issues that are Open') @argh.arg('-q', '--quiet', help='Quiet, does not includes issue title') def reopen(): """ Reopen an issue """ issue = jira.get_issue(git.branch) jira.workflow_transition(issue, 'Open') def main(): """ Set up the context and connectors """ try: init() except custom_exceptions.NotConfigured: configure() init() # Adding this in case users are trying to run without adding a jira url. # I would like to take this out in a release or two. # TODO: REMOVE except (AttributeError, ConfigParser.NoOptionError): logging.error('It appears that your configuration is invalid, please reconfigure the app and try again.') configure() init() parser = argparse.ArgumentParser() # Now simply auto-discovering the methods listed in this module current_module = sys.modules[__name__] module_methods = [getattr(current_module, a, None) for a in dir(current_module) if isinstance(getattr(current_module, a, None), types.FunctionType) and a != 'main'] argh.add_commands(parser, module_methods) # Putting the error logging after the app is initialized because # we want to adhere to the user's preferences try: argh.dispatch(parser) # We don't want to report keyboard interrupts to rollbar except (KeyboardInterrupt, SystemExit): raise except Exception as e: if isinstance(e, jira.exceptions.JIRAError) and "HTTP 400" in e: logging.warning('It appears that your authentication with {0} is invalid. Please re-configure jtime: `jtime configure` with the correct credentials'.format(configuration.load_config['jira'].get('url'))) elif configured.get('jira').get('error_reporting', True): # Configure rollbar so that we report errors import rollbar from . import __version__ as version root_path = os.path.dirname(os.path.realpath(__file__)) rollbar.init('7541b8e188044831b6728fa8475eab9f', 'v%s' % version, root=root_path) logging.error('Sorry. It appears that there was an error when handling your command. ' 'This error has been reported to our error tracking system. To disable ' 'this reporting, please re-configure the app: `jtime config`.') extra_data = { # grab the command that we're running 'cmd': sys.argv[1], # we really don't want to see jtime in the args 'args': sys.argv[2:], # lets grab anything useful, python version? 'python': str(sys.version), } # We really shouldn't thit this line of code when running tests, so let's not cover it. rollbar.report_exc_info(extra_data=extra_data) # pragma: no cover else: logging.error('It appears that there was an error when handling your command.') raise
mapmyfitness/jtime
jtime/jtime.py
main
python
def main(): try: init() except custom_exceptions.NotConfigured: configure() init() # Adding this in case users are trying to run without adding a jira url. # I would like to take this out in a release or two. # TODO: REMOVE except (AttributeError, ConfigParser.NoOptionError): logging.error('It appears that your configuration is invalid, please reconfigure the app and try again.') configure() init() parser = argparse.ArgumentParser() # Now simply auto-discovering the methods listed in this module current_module = sys.modules[__name__] module_methods = [getattr(current_module, a, None) for a in dir(current_module) if isinstance(getattr(current_module, a, None), types.FunctionType) and a != 'main'] argh.add_commands(parser, module_methods) # Putting the error logging after the app is initialized because # we want to adhere to the user's preferences try: argh.dispatch(parser) # We don't want to report keyboard interrupts to rollbar except (KeyboardInterrupt, SystemExit): raise except Exception as e: if isinstance(e, jira.exceptions.JIRAError) and "HTTP 400" in e: logging.warning('It appears that your authentication with {0} is invalid. Please re-configure jtime: `jtime configure` with the correct credentials'.format(configuration.load_config['jira'].get('url'))) elif configured.get('jira').get('error_reporting', True): # Configure rollbar so that we report errors import rollbar from . import __version__ as version root_path = os.path.dirname(os.path.realpath(__file__)) rollbar.init('7541b8e188044831b6728fa8475eab9f', 'v%s' % version, root=root_path) logging.error('Sorry. It appears that there was an error when handling your command. ' 'This error has been reported to our error tracking system. To disable ' 'this reporting, please re-configure the app: `jtime config`.') extra_data = { # grab the command that we're running 'cmd': sys.argv[1], # we really don't want to see jtime in the args 'args': sys.argv[2:], # lets grab anything useful, python version? 'python': str(sys.version), } # We really shouldn't thit this line of code when running tests, so let's not cover it. rollbar.report_exc_info(extra_data=extra_data) # pragma: no cover else: logging.error('It appears that there was an error when handling your command.') raise
Set up the context and connectors
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/jtime.py#L264-L321
[ "def init():\n global configured, jira, git\n # Initialize the connectors\n configured = configuration.load_config()\n jira = connection.jira_connection(configured)\n git = git_ext.GIT()\n", "def configure():\n \"\"\"\n Update config\n \"\"\"\n jira_url = utils.get_input(raw_input, \"Jira url\")\n username = utils.get_input(raw_input, \"username\")\n password = utils.get_input(getpass.getpass, \"password\")\n error_reporting = True \\\n if 'n' not in raw_input(\"Would you like to automatically report errors to help improve the software? [y]/N: \").lower() \\\n else False\n configuration._save_config(jira_url, username, password, error_reporting)\n\n try:\n connection.jira_connection(configuration.load_config())\n except jira_exceptions.JIRAError as e:\n configuration._delete_config()\n logging.error(\"You have an error in your jira connection/configuration: {error}. Please fix the configuration before attempting to use jtime.\\n We suggest trying your username without using the email address.\".format(error=e))\n" ]
#!/usr/bin/env python import argh import argparse import ConfigParser import dateutil.parser from dateutil.tz import tzlocal import datetime import logging import getpass from jira import exceptions as jira_exceptions import os import sys import types import configuration import connection import custom_exceptions import git_ext import utils logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.ERROR) configured = None jira = None git = None def init(): global configured, jira, git # Initialize the connectors configured = configuration.load_config() jira = connection.jira_connection(configured) git = git_ext.GIT() def configure(): """ Update config """ jira_url = utils.get_input(raw_input, "Jira url") username = utils.get_input(raw_input, "username") password = utils.get_input(getpass.getpass, "password") error_reporting = True \ if 'n' not in raw_input("Would you like to automatically report errors to help improve the software? [y]/N: ").lower() \ else False configuration._save_config(jira_url, username, password, error_reporting) try: connection.jira_connection(configuration.load_config()) except jira_exceptions.JIRAError as e: configuration._delete_config() logging.error("You have an error in your jira connection/configuration: {error}. Please fix the configuration before attempting to use jtime.\n We suggest trying your username without using the email address.".format(error=e)) def status(): """ Gets the worklog status for the current branch """ import pdb; pdb.set_trace() branch = git.branch issue = jira.get_issue(branch) if not issue: return # Print the title title = issue.fields.summary print "(%s) %s" % (branch, title) # Print the status status = issue.fields.status.name assignee = issue.fields.assignee.name in_progress = jira.get_datetime_issue_in_progress(issue) if in_progress: in_progress_string = in_progress.strftime("%a %x %I:%M %p") print ' Status: %s as of %s' % (status, in_progress_string) else: print ' Status: %s' % status print ' Assignee: %s' % assignee # Print the worklogs # Get the timespent and return 0m if it does not exist time_spent = '0m' try: time_spent = issue.fields.timetracking.timeSpent except: pass worklogs = jira.get_worklog(issue) print "\nTime logged (%s):" % time_spent if worklogs: for worklog in worklogs: worklog_hash = worklog.raw author = worklog_hash['author']['name'] time_spent = worklog_hash.get('timeSpent', '0m') created = dateutil.parser.parse(worklog_hash['started']) created_pattern = '%a %x ' # Adding extra space for formatting if not created.hour == created.minute == created.second == 0: created = created.astimezone(tzlocal()) created_pattern = '%a %x %I:%M %p' comment = worklog_hash.get('comment', '<no comment>') updated_string = created.strftime(created_pattern) print " %s - %s (%s): %s" % (updated_string, author, time_spent, comment) else: print " No worklogs" cycle_time = jira.get_cycle_time(issue) if cycle_time: print '\nCycle Time: %.1f days' % cycle_time # Print the time elapsed since the last mark elapsed_time = jira.get_elapsed_time(issue) if elapsed_time: print '\n\033[0;32m%s elapsed\033[00m (use "jtime log ." to log elapsed time or "jtime log <duration> (ex. 30m, 1h etc.)" to log a specific amount of time)' % (elapsed_time) else: print '\n\033[0;32m0m elapsed\033[00m' def lmts(): print git.get_last_modified_timestamp() @argh.arg('duration', help='Use . to log all time elapsed since the last mark or provide a specific amount of time to log (ex. 30m, 1h)') @argh.arg('-m', '--message', help='A message to add to this work log') @argh.arg('-c', '--commit', dest='use_last_commit_message', help='Use last commit message for the work log message') def log(duration, message=None, use_last_commit_message=False): """ Log time against the current active issue """ branch = git.branch issue = jira.get_issue(branch) # Create the comment comment = "Working on issue %s" % branch if message: comment = message elif use_last_commit_message: comment = git.get_last_commit_message() if issue: # If the duration is provided use it, otherwise use the elapsed time since the last mark duration = jira.get_elapsed_time(issue) if duration == '.' else duration if duration: # Add the worklog jira.add_worklog(issue, timeSpent=duration, adjustEstimate=None, newEstimate=None, reduceBy=None, comment=comment) print "Logged %s against issue %s (%s)" % (duration, branch, comment) else: print "No time logged, less than 0m elapsed." def mark(): """ Mark the start time for active work on an issue """ branch = git.branch issue = jira.get_issue(branch) if not issue: return worklogs = jira.get_worklog(issue) marked = False if worklogs: # If we have worklogs, change the updated time of the last log to the mark marked = jira.touch_last_worklog(issue) mark_time = datetime.datetime.now(dateutil.tz.tzlocal()).strftime("%I:%M %p") print "Set mark at %s on %s by touching last work log" % (mark_time, branch) else: # If we don't have worklogs, mark the issue as in progress if that is an available transition jira.workflow_transition(issue, 'Open') marked = jira.workflow_transition(issue, 'In Progress') mark_time = datetime.datetime.now(dateutil.tz.tzlocal()).strftime("%I:%M %p") print 'Set mark at %s on %s by changing status to "In Progress"' % (mark_time, branch) if not marked: print "ERROR: Issue %s is has a status of %s and has no worklogs. You must log some time or re-open the issue to proceed." % \ (branch, issue.fields.status.name) @argh.arg('-a', '--show-all', help='Include all issues that are not Closed') @argh.arg('-i', '--show-inprogress', help='Show only issues that are In Progress') @argh.arg('-o', '--show-open', help='Show only issues that are Open') @argh.arg('-q', '--quiet', help='Quiet, does not includes issue title') def me(show_all=False, show_inprogress=False, show_open=False, quiet=False): """ Prints a list of the users tickets and provides filtering options """ default = not [arg for arg in sys.argv[2:] if arg not in ('-q', '--quiet')] status_exclusions = ['Backlog', 'Open', 'Closed'] status_inclusions = [] if show_inprogress or default: status_inclusions.append('In Progress') if show_open: status_exclusions = [] status_inclusions.append('Open') elif show_all: status_exclusions = ['Closed'] jql = \ """ assignee=currentUser() AND resolved is EMPTY """ # We are switching between showing everything and only showing in progress items if not show_all and not show_open: jql += ' AND status was "In Progress" ' inclusion_str = None if len(status_inclusions): inclusion_str = "status in ({0})".format((','.join('"' + issue_status + '"' for issue_status in status_inclusions))) exclusion_str = None if len(status_exclusions): exclusion_str = "status not in ({0})".format((','.join('"' + issue_status + '"' for issue_status in status_exclusions))) jql += " AND ( {0} {1} {2} ) ".format(inclusion_str if inclusion_str else "", " OR " if inclusion_str and exclusion_str else "", exclusion_str if exclusion_str else "") jql += " ORDER BY updated DESC " results = jira.search_issues(jql) for result in results: issue = result.key updated = dateutil.parser.parse(result.fields.updated).strftime("%a %x %I:%M %p") status = result.fields.status.name cycletime = jira.get_cycle_time(result.key) cycletime_str = " -- %.1f days" % cycletime if cycletime else "" print "%s (%s) %s%s" % (issue, updated, status, cycletime_str) # If verbose, add a line for the issue title if not quiet: title = result.fields.summary title = (title[:75] + '..') if len(title) > 75 else title print " %s\n" % title # Print result count and usage hint for help print "\033[0;32m%s issue(s) found\033[00m (use 'jtime me -h' for filter options)" % len(results) print "One week avg cycle time: %.1f days" % jira.get_week_avg_cycletime() def reopen(): """ Reopen an issue """ issue = jira.get_issue(git.branch) jira.workflow_transition(issue, 'Open')
mapmyfitness/jtime
jtime/git_ext.py
GIT.get_last_commit_message
python
def get_last_commit_message(self): # Check if we are currently in a repo try: branch = self.active_branch return self.commit(branch).message except InvalidGitRepositoryError: print "Not in a git repo" return None
Gets the last commit message on the active branch Returns None if not in a git repo
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/git_ext.py#L30-L42
null
class GIT(git.Repo): def __init__(self, **kwargs): try: super(GIT, self).__init__(**kwargs) except InvalidGitRepositoryError: print "You must be in a git repository to use jtime" sys.exit(1) @property def branch(self): """ Gets the active branch in the current repo. Returns None if not in a git repo or no current branch """ branch = None # Check if we are currently in a repo try: branch = self.active_branch except InvalidGitRepositoryError: print "Not in a git repo" return branch def get_last_modified_timestamp(self): """ Looks at the files in a git root directory and grabs the last modified timestamp """ cmd = "find . -print0 | xargs -0 stat -f '%T@ %p' | sort -n | tail -1 | cut -f2- -d' '" ps = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.STDOUT) output = ps.communicate()[0] print output
mapmyfitness/jtime
jtime/git_ext.py
GIT.get_last_modified_timestamp
python
def get_last_modified_timestamp(self): cmd = "find . -print0 | xargs -0 stat -f '%T@ %p' | sort -n | tail -1 | cut -f2- -d' '" ps = subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.STDOUT) output = ps.communicate()[0] print output
Looks at the files in a git root directory and grabs the last modified timestamp
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/git_ext.py#L44-L51
null
class GIT(git.Repo): def __init__(self, **kwargs): try: super(GIT, self).__init__(**kwargs) except InvalidGitRepositoryError: print "You must be in a git repository to use jtime" sys.exit(1) @property def branch(self): """ Gets the active branch in the current repo. Returns None if not in a git repo or no current branch """ branch = None # Check if we are currently in a repo try: branch = self.active_branch except InvalidGitRepositoryError: print "Not in a git repo" return branch def get_last_commit_message(self): """ Gets the last commit message on the active branch Returns None if not in a git repo """ # Check if we are currently in a repo try: branch = self.active_branch return self.commit(branch).message except InvalidGitRepositoryError: print "Not in a git repo" return None
mapmyfitness/jtime
jtime/jira_ext.py
JIRA.get_issue
python
def get_issue(self, branch): if branch: try: issue = self.issue(branch, expand='changelog') return issue except jira.exceptions.JIRAError as ex: if ex.status_code == 404: print "No JIRA issue found for branch %s" % branch else: print str(ex)
Gets the JIRA issue associated with the branch name. Returns None if no issue with this branch name.
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/jira_ext.py#L16-L30
null
class JIRA(jira.client.JIRA): """ Overloading the jira-python JIRA class with custom methods that we are using for our application """ def __init__(self, **kwargs): super(JIRA, self).__init__(**kwargs) def get_worklog(self, issue): """ Gets the worklogs for a JIRA issue """ return issue.fields.worklog.worklogs def get_elapsed_time(self, issue): """ Gets the elapsed time since the last mark (either the updated time of the last log or the time that the issue was marked in progress) """ last_mark = None # Get the last mark from the work logs worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_mark = dateutil.parser.parse(last_worklog.raw['updated']) # If no worklogs, get the time since the issue was marked In Progress if not last_mark: last_mark = self.get_datetime_issue_in_progress(issue) if last_mark: now = datetime.datetime.now(dateutil.tz.tzlocal()) delta = now - last_mark minutes = int(utils.timedelta_total_seconds(delta) / 60) if minutes > 0: return str(minutes) + 'm' else: return None def workflow_transition(self, issue, status_name): """ Change the status of a JIRA issue to a named status. Will only be updated if this transition is available from the current status. """ transitions = self.transitions(issue) for transition in transitions: if transition['to']['name'] == status_name: transition_id = transition['id'] self.transition_issue(issue, transition_id) print "Changed status of issue %s to %s" % (issue.key, status_name) return True print "Unable to change status of issue %s to %s" % (issue.key, status_name) def get_datetime_issue_in_progress(self, issue): """ If the issue is in progress, gets that most recent time that the issue became 'In Progress' """ histories = issue.changelog.histories for history in reversed(histories): history_items = history.items for item in history_items: if item.field == 'status' and item.toString == "In Progress": return dateutil.parser.parse(history.created) def touch_last_worklog(self, issue): """ Touch the last worklog for an issue (changes the updated date on the worklog). We use this date as the 'mark' for determining the time elapsed for the next log entry. """ worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_worklog.update() return True def get_cycle_time(self, issue_or_start_or_key): """ Provided an issue or a start datetime, will return the cycle time since the start or progress """ if isinstance(issue_or_start_or_key, basestring): issue_or_start_or_key = self.get_issue(issue_or_start_or_key) if isinstance(issue_or_start_or_key, jira.resources.Issue): progress_started = self.get_datetime_issue_in_progress(issue_or_start_or_key) elif isinstance(issue_or_start_or_key, datetime.datetime): progress_started = issue_or_start_or_key curr_time = datetime.datetime.now(dateutil.tz.tzlocal()) return utils.working_cycletime(progress_started, curr_time) def get_week_avg_cycletime(self): """ Gets the average cycletime of the current user for the past week. This includes any ticket that was marked "In Progress" but not reopened. """ def moving_average(new_val, old_avg, prev_n): return (new_val + old_avg) / (prev_n + 1) active_tickets_jql = 'assignee=currentUser() and status was "In Progress" DURING (startOfWeek(), endofweek()) and status not in (Backlog, Open) ORDER BY updated DESC' week_active_tickets = self.search_issues(active_tickets_jql) avg_cycletime = 0 n_issues = 0 for issue in week_active_tickets: cycle_time = self.get_cycle_time(self.get_issue(issue.key)) avg_cycletime = moving_average(cycle_time, avg_cycletime, n_issues) n_issues = n_issues + 1 return avg_cycletime
mapmyfitness/jtime
jtime/jira_ext.py
JIRA.get_elapsed_time
python
def get_elapsed_time(self, issue): last_mark = None # Get the last mark from the work logs worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_mark = dateutil.parser.parse(last_worklog.raw['updated']) # If no worklogs, get the time since the issue was marked In Progress if not last_mark: last_mark = self.get_datetime_issue_in_progress(issue) if last_mark: now = datetime.datetime.now(dateutil.tz.tzlocal()) delta = now - last_mark minutes = int(utils.timedelta_total_seconds(delta) / 60) if minutes > 0: return str(minutes) + 'm' else: return None
Gets the elapsed time since the last mark (either the updated time of the last log or the time that the issue was marked in progress)
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/jira_ext.py#L38-L62
[ "def timedelta_total_seconds(td):\n if sys.version_info >= (2, 7):\n return td.total_seconds()\n return td.days * 24 * 60 * 60 + td.seconds\n", "def get_worklog(self, issue):\n \"\"\"\n Gets the worklogs for a JIRA issue\n \"\"\"\n return issue.fields.worklog.worklogs\n", "def get_datetime_issue_in_progress(self, issue):\n \"\"\"\n If the issue is in progress, gets that most recent time that the issue became 'In Progress'\n \"\"\"\n histories = issue.changelog.histories\n for history in reversed(histories):\n history_items = history.items\n for item in history_items:\n if item.field == 'status' and item.toString == \"In Progress\":\n return dateutil.parser.parse(history.created)\n" ]
class JIRA(jira.client.JIRA): """ Overloading the jira-python JIRA class with custom methods that we are using for our application """ def __init__(self, **kwargs): super(JIRA, self).__init__(**kwargs) def get_issue(self, branch): """ Gets the JIRA issue associated with the branch name. Returns None if no issue with this branch name. """ if branch: try: issue = self.issue(branch, expand='changelog') return issue except jira.exceptions.JIRAError as ex: if ex.status_code == 404: print "No JIRA issue found for branch %s" % branch else: print str(ex) def get_worklog(self, issue): """ Gets the worklogs for a JIRA issue """ return issue.fields.worklog.worklogs def workflow_transition(self, issue, status_name): """ Change the status of a JIRA issue to a named status. Will only be updated if this transition is available from the current status. """ transitions = self.transitions(issue) for transition in transitions: if transition['to']['name'] == status_name: transition_id = transition['id'] self.transition_issue(issue, transition_id) print "Changed status of issue %s to %s" % (issue.key, status_name) return True print "Unable to change status of issue %s to %s" % (issue.key, status_name) def get_datetime_issue_in_progress(self, issue): """ If the issue is in progress, gets that most recent time that the issue became 'In Progress' """ histories = issue.changelog.histories for history in reversed(histories): history_items = history.items for item in history_items: if item.field == 'status' and item.toString == "In Progress": return dateutil.parser.parse(history.created) def touch_last_worklog(self, issue): """ Touch the last worklog for an issue (changes the updated date on the worklog). We use this date as the 'mark' for determining the time elapsed for the next log entry. """ worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_worklog.update() return True def get_cycle_time(self, issue_or_start_or_key): """ Provided an issue or a start datetime, will return the cycle time since the start or progress """ if isinstance(issue_or_start_or_key, basestring): issue_or_start_or_key = self.get_issue(issue_or_start_or_key) if isinstance(issue_or_start_or_key, jira.resources.Issue): progress_started = self.get_datetime_issue_in_progress(issue_or_start_or_key) elif isinstance(issue_or_start_or_key, datetime.datetime): progress_started = issue_or_start_or_key curr_time = datetime.datetime.now(dateutil.tz.tzlocal()) return utils.working_cycletime(progress_started, curr_time) def get_week_avg_cycletime(self): """ Gets the average cycletime of the current user for the past week. This includes any ticket that was marked "In Progress" but not reopened. """ def moving_average(new_val, old_avg, prev_n): return (new_val + old_avg) / (prev_n + 1) active_tickets_jql = 'assignee=currentUser() and status was "In Progress" DURING (startOfWeek(), endofweek()) and status not in (Backlog, Open) ORDER BY updated DESC' week_active_tickets = self.search_issues(active_tickets_jql) avg_cycletime = 0 n_issues = 0 for issue in week_active_tickets: cycle_time = self.get_cycle_time(self.get_issue(issue.key)) avg_cycletime = moving_average(cycle_time, avg_cycletime, n_issues) n_issues = n_issues + 1 return avg_cycletime
mapmyfitness/jtime
jtime/jira_ext.py
JIRA.workflow_transition
python
def workflow_transition(self, issue, status_name): transitions = self.transitions(issue) for transition in transitions: if transition['to']['name'] == status_name: transition_id = transition['id'] self.transition_issue(issue, transition_id) print "Changed status of issue %s to %s" % (issue.key, status_name) return True print "Unable to change status of issue %s to %s" % (issue.key, status_name)
Change the status of a JIRA issue to a named status. Will only be updated if this transition is available from the current status.
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/jira_ext.py#L64-L77
null
class JIRA(jira.client.JIRA): """ Overloading the jira-python JIRA class with custom methods that we are using for our application """ def __init__(self, **kwargs): super(JIRA, self).__init__(**kwargs) def get_issue(self, branch): """ Gets the JIRA issue associated with the branch name. Returns None if no issue with this branch name. """ if branch: try: issue = self.issue(branch, expand='changelog') return issue except jira.exceptions.JIRAError as ex: if ex.status_code == 404: print "No JIRA issue found for branch %s" % branch else: print str(ex) def get_worklog(self, issue): """ Gets the worklogs for a JIRA issue """ return issue.fields.worklog.worklogs def get_elapsed_time(self, issue): """ Gets the elapsed time since the last mark (either the updated time of the last log or the time that the issue was marked in progress) """ last_mark = None # Get the last mark from the work logs worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_mark = dateutil.parser.parse(last_worklog.raw['updated']) # If no worklogs, get the time since the issue was marked In Progress if not last_mark: last_mark = self.get_datetime_issue_in_progress(issue) if last_mark: now = datetime.datetime.now(dateutil.tz.tzlocal()) delta = now - last_mark minutes = int(utils.timedelta_total_seconds(delta) / 60) if minutes > 0: return str(minutes) + 'm' else: return None def get_datetime_issue_in_progress(self, issue): """ If the issue is in progress, gets that most recent time that the issue became 'In Progress' """ histories = issue.changelog.histories for history in reversed(histories): history_items = history.items for item in history_items: if item.field == 'status' and item.toString == "In Progress": return dateutil.parser.parse(history.created) def touch_last_worklog(self, issue): """ Touch the last worklog for an issue (changes the updated date on the worklog). We use this date as the 'mark' for determining the time elapsed for the next log entry. """ worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_worklog.update() return True def get_cycle_time(self, issue_or_start_or_key): """ Provided an issue or a start datetime, will return the cycle time since the start or progress """ if isinstance(issue_or_start_or_key, basestring): issue_or_start_or_key = self.get_issue(issue_or_start_or_key) if isinstance(issue_or_start_or_key, jira.resources.Issue): progress_started = self.get_datetime_issue_in_progress(issue_or_start_or_key) elif isinstance(issue_or_start_or_key, datetime.datetime): progress_started = issue_or_start_or_key curr_time = datetime.datetime.now(dateutil.tz.tzlocal()) return utils.working_cycletime(progress_started, curr_time) def get_week_avg_cycletime(self): """ Gets the average cycletime of the current user for the past week. This includes any ticket that was marked "In Progress" but not reopened. """ def moving_average(new_val, old_avg, prev_n): return (new_val + old_avg) / (prev_n + 1) active_tickets_jql = 'assignee=currentUser() and status was "In Progress" DURING (startOfWeek(), endofweek()) and status not in (Backlog, Open) ORDER BY updated DESC' week_active_tickets = self.search_issues(active_tickets_jql) avg_cycletime = 0 n_issues = 0 for issue in week_active_tickets: cycle_time = self.get_cycle_time(self.get_issue(issue.key)) avg_cycletime = moving_average(cycle_time, avg_cycletime, n_issues) n_issues = n_issues + 1 return avg_cycletime
mapmyfitness/jtime
jtime/jira_ext.py
JIRA.get_datetime_issue_in_progress
python
def get_datetime_issue_in_progress(self, issue): histories = issue.changelog.histories for history in reversed(histories): history_items = history.items for item in history_items: if item.field == 'status' and item.toString == "In Progress": return dateutil.parser.parse(history.created)
If the issue is in progress, gets that most recent time that the issue became 'In Progress'
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/jira_ext.py#L79-L88
null
class JIRA(jira.client.JIRA): """ Overloading the jira-python JIRA class with custom methods that we are using for our application """ def __init__(self, **kwargs): super(JIRA, self).__init__(**kwargs) def get_issue(self, branch): """ Gets the JIRA issue associated with the branch name. Returns None if no issue with this branch name. """ if branch: try: issue = self.issue(branch, expand='changelog') return issue except jira.exceptions.JIRAError as ex: if ex.status_code == 404: print "No JIRA issue found for branch %s" % branch else: print str(ex) def get_worklog(self, issue): """ Gets the worklogs for a JIRA issue """ return issue.fields.worklog.worklogs def get_elapsed_time(self, issue): """ Gets the elapsed time since the last mark (either the updated time of the last log or the time that the issue was marked in progress) """ last_mark = None # Get the last mark from the work logs worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_mark = dateutil.parser.parse(last_worklog.raw['updated']) # If no worklogs, get the time since the issue was marked In Progress if not last_mark: last_mark = self.get_datetime_issue_in_progress(issue) if last_mark: now = datetime.datetime.now(dateutil.tz.tzlocal()) delta = now - last_mark minutes = int(utils.timedelta_total_seconds(delta) / 60) if minutes > 0: return str(minutes) + 'm' else: return None def workflow_transition(self, issue, status_name): """ Change the status of a JIRA issue to a named status. Will only be updated if this transition is available from the current status. """ transitions = self.transitions(issue) for transition in transitions: if transition['to']['name'] == status_name: transition_id = transition['id'] self.transition_issue(issue, transition_id) print "Changed status of issue %s to %s" % (issue.key, status_name) return True print "Unable to change status of issue %s to %s" % (issue.key, status_name) def touch_last_worklog(self, issue): """ Touch the last worklog for an issue (changes the updated date on the worklog). We use this date as the 'mark' for determining the time elapsed for the next log entry. """ worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_worklog.update() return True def get_cycle_time(self, issue_or_start_or_key): """ Provided an issue or a start datetime, will return the cycle time since the start or progress """ if isinstance(issue_or_start_or_key, basestring): issue_or_start_or_key = self.get_issue(issue_or_start_or_key) if isinstance(issue_or_start_or_key, jira.resources.Issue): progress_started = self.get_datetime_issue_in_progress(issue_or_start_or_key) elif isinstance(issue_or_start_or_key, datetime.datetime): progress_started = issue_or_start_or_key curr_time = datetime.datetime.now(dateutil.tz.tzlocal()) return utils.working_cycletime(progress_started, curr_time) def get_week_avg_cycletime(self): """ Gets the average cycletime of the current user for the past week. This includes any ticket that was marked "In Progress" but not reopened. """ def moving_average(new_val, old_avg, prev_n): return (new_val + old_avg) / (prev_n + 1) active_tickets_jql = 'assignee=currentUser() and status was "In Progress" DURING (startOfWeek(), endofweek()) and status not in (Backlog, Open) ORDER BY updated DESC' week_active_tickets = self.search_issues(active_tickets_jql) avg_cycletime = 0 n_issues = 0 for issue in week_active_tickets: cycle_time = self.get_cycle_time(self.get_issue(issue.key)) avg_cycletime = moving_average(cycle_time, avg_cycletime, n_issues) n_issues = n_issues + 1 return avg_cycletime
mapmyfitness/jtime
jtime/jira_ext.py
JIRA.touch_last_worklog
python
def touch_last_worklog(self, issue): worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_worklog.update() return True
Touch the last worklog for an issue (changes the updated date on the worklog). We use this date as the 'mark' for determining the time elapsed for the next log entry.
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/jira_ext.py#L90-L99
[ "def get_worklog(self, issue):\n \"\"\"\n Gets the worklogs for a JIRA issue\n \"\"\"\n return issue.fields.worklog.worklogs\n" ]
class JIRA(jira.client.JIRA): """ Overloading the jira-python JIRA class with custom methods that we are using for our application """ def __init__(self, **kwargs): super(JIRA, self).__init__(**kwargs) def get_issue(self, branch): """ Gets the JIRA issue associated with the branch name. Returns None if no issue with this branch name. """ if branch: try: issue = self.issue(branch, expand='changelog') return issue except jira.exceptions.JIRAError as ex: if ex.status_code == 404: print "No JIRA issue found for branch %s" % branch else: print str(ex) def get_worklog(self, issue): """ Gets the worklogs for a JIRA issue """ return issue.fields.worklog.worklogs def get_elapsed_time(self, issue): """ Gets the elapsed time since the last mark (either the updated time of the last log or the time that the issue was marked in progress) """ last_mark = None # Get the last mark from the work logs worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_mark = dateutil.parser.parse(last_worklog.raw['updated']) # If no worklogs, get the time since the issue was marked In Progress if not last_mark: last_mark = self.get_datetime_issue_in_progress(issue) if last_mark: now = datetime.datetime.now(dateutil.tz.tzlocal()) delta = now - last_mark minutes = int(utils.timedelta_total_seconds(delta) / 60) if minutes > 0: return str(minutes) + 'm' else: return None def workflow_transition(self, issue, status_name): """ Change the status of a JIRA issue to a named status. Will only be updated if this transition is available from the current status. """ transitions = self.transitions(issue) for transition in transitions: if transition['to']['name'] == status_name: transition_id = transition['id'] self.transition_issue(issue, transition_id) print "Changed status of issue %s to %s" % (issue.key, status_name) return True print "Unable to change status of issue %s to %s" % (issue.key, status_name) def get_datetime_issue_in_progress(self, issue): """ If the issue is in progress, gets that most recent time that the issue became 'In Progress' """ histories = issue.changelog.histories for history in reversed(histories): history_items = history.items for item in history_items: if item.field == 'status' and item.toString == "In Progress": return dateutil.parser.parse(history.created) def get_cycle_time(self, issue_or_start_or_key): """ Provided an issue or a start datetime, will return the cycle time since the start or progress """ if isinstance(issue_or_start_or_key, basestring): issue_or_start_or_key = self.get_issue(issue_or_start_or_key) if isinstance(issue_or_start_or_key, jira.resources.Issue): progress_started = self.get_datetime_issue_in_progress(issue_or_start_or_key) elif isinstance(issue_or_start_or_key, datetime.datetime): progress_started = issue_or_start_or_key curr_time = datetime.datetime.now(dateutil.tz.tzlocal()) return utils.working_cycletime(progress_started, curr_time) def get_week_avg_cycletime(self): """ Gets the average cycletime of the current user for the past week. This includes any ticket that was marked "In Progress" but not reopened. """ def moving_average(new_val, old_avg, prev_n): return (new_val + old_avg) / (prev_n + 1) active_tickets_jql = 'assignee=currentUser() and status was "In Progress" DURING (startOfWeek(), endofweek()) and status not in (Backlog, Open) ORDER BY updated DESC' week_active_tickets = self.search_issues(active_tickets_jql) avg_cycletime = 0 n_issues = 0 for issue in week_active_tickets: cycle_time = self.get_cycle_time(self.get_issue(issue.key)) avg_cycletime = moving_average(cycle_time, avg_cycletime, n_issues) n_issues = n_issues + 1 return avg_cycletime
mapmyfitness/jtime
jtime/jira_ext.py
JIRA.get_cycle_time
python
def get_cycle_time(self, issue_or_start_or_key): if isinstance(issue_or_start_or_key, basestring): issue_or_start_or_key = self.get_issue(issue_or_start_or_key) if isinstance(issue_or_start_or_key, jira.resources.Issue): progress_started = self.get_datetime_issue_in_progress(issue_or_start_or_key) elif isinstance(issue_or_start_or_key, datetime.datetime): progress_started = issue_or_start_or_key curr_time = datetime.datetime.now(dateutil.tz.tzlocal()) return utils.working_cycletime(progress_started, curr_time)
Provided an issue or a start datetime, will return the cycle time since the start or progress
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/jira_ext.py#L101-L114
[ "def working_cycletime(start, end, workday_start=datetime.timedelta(hours=0), workday_end=datetime.timedelta(hours=24)):\n \"\"\"\n Get the working time between a beginning and an end point subtracting out non-office time\n \"\"\"\n def clamp(t, start, end):\n \"Return 't' clamped to the range ['start', 'end']\"\n return max(start, min(end, t))\n\n def day_part(t):\n \"Return timedelta between midnight and 't'.\"\n return t - t.replace(hour=0, minute=0, second=0)\n\n if not start:\n return None\n if not end:\n end = datetime.datetime.now()\n\n zero = datetime.timedelta(0)\n # Make sure that the work day is valid\n assert(zero <= workday_start <= workday_end <= datetime.timedelta(1))\n # Get the workday delta\n workday = workday_end - workday_start\n # Get the number of days it took\n days = (end - start).days + 1\n # Number of weeks\n weeks = days // 7\n # Get the number of days in addition to weeks\n extra = (max(0, 5 - start.weekday()) + min(5, 1 + end.weekday())) % 5\n # Get the number of working days\n weekdays = weeks * 5 + extra\n # Get the total time spent accounting for the workday\n total = workday * weekdays\n if start.weekday() < 5:\n # Figuring out how much time it wasn't being worked on and subtracting\n total -= clamp(day_part(start) - workday_start, zero, workday)\n if end.weekday() < 5:\n # Figuring out how much time it wasn't being worked on and subtracting\n total -= clamp(workday_end - day_part(end), zero, workday)\n\n cycle_time = timedelta_total_seconds(total) / timedelta_total_seconds(workday)\n return cycle_time\n", "def get_issue(self, branch):\n \"\"\"\n Gets the JIRA issue associated with the branch name.\n\n Returns None if no issue with this branch name.\n \"\"\"\n if branch:\n try:\n issue = self.issue(branch, expand='changelog')\n return issue\n except jira.exceptions.JIRAError as ex:\n if ex.status_code == 404:\n print \"No JIRA issue found for branch %s\" % branch\n else:\n print str(ex)\n", "def get_datetime_issue_in_progress(self, issue):\n \"\"\"\n If the issue is in progress, gets that most recent time that the issue became 'In Progress'\n \"\"\"\n histories = issue.changelog.histories\n for history in reversed(histories):\n history_items = history.items\n for item in history_items:\n if item.field == 'status' and item.toString == \"In Progress\":\n return dateutil.parser.parse(history.created)\n" ]
class JIRA(jira.client.JIRA): """ Overloading the jira-python JIRA class with custom methods that we are using for our application """ def __init__(self, **kwargs): super(JIRA, self).__init__(**kwargs) def get_issue(self, branch): """ Gets the JIRA issue associated with the branch name. Returns None if no issue with this branch name. """ if branch: try: issue = self.issue(branch, expand='changelog') return issue except jira.exceptions.JIRAError as ex: if ex.status_code == 404: print "No JIRA issue found for branch %s" % branch else: print str(ex) def get_worklog(self, issue): """ Gets the worklogs for a JIRA issue """ return issue.fields.worklog.worklogs def get_elapsed_time(self, issue): """ Gets the elapsed time since the last mark (either the updated time of the last log or the time that the issue was marked in progress) """ last_mark = None # Get the last mark from the work logs worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_mark = dateutil.parser.parse(last_worklog.raw['updated']) # If no worklogs, get the time since the issue was marked In Progress if not last_mark: last_mark = self.get_datetime_issue_in_progress(issue) if last_mark: now = datetime.datetime.now(dateutil.tz.tzlocal()) delta = now - last_mark minutes = int(utils.timedelta_total_seconds(delta) / 60) if minutes > 0: return str(minutes) + 'm' else: return None def workflow_transition(self, issue, status_name): """ Change the status of a JIRA issue to a named status. Will only be updated if this transition is available from the current status. """ transitions = self.transitions(issue) for transition in transitions: if transition['to']['name'] == status_name: transition_id = transition['id'] self.transition_issue(issue, transition_id) print "Changed status of issue %s to %s" % (issue.key, status_name) return True print "Unable to change status of issue %s to %s" % (issue.key, status_name) def get_datetime_issue_in_progress(self, issue): """ If the issue is in progress, gets that most recent time that the issue became 'In Progress' """ histories = issue.changelog.histories for history in reversed(histories): history_items = history.items for item in history_items: if item.field == 'status' and item.toString == "In Progress": return dateutil.parser.parse(history.created) def touch_last_worklog(self, issue): """ Touch the last worklog for an issue (changes the updated date on the worklog). We use this date as the 'mark' for determining the time elapsed for the next log entry. """ worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_worklog.update() return True def get_week_avg_cycletime(self): """ Gets the average cycletime of the current user for the past week. This includes any ticket that was marked "In Progress" but not reopened. """ def moving_average(new_val, old_avg, prev_n): return (new_val + old_avg) / (prev_n + 1) active_tickets_jql = 'assignee=currentUser() and status was "In Progress" DURING (startOfWeek(), endofweek()) and status not in (Backlog, Open) ORDER BY updated DESC' week_active_tickets = self.search_issues(active_tickets_jql) avg_cycletime = 0 n_issues = 0 for issue in week_active_tickets: cycle_time = self.get_cycle_time(self.get_issue(issue.key)) avg_cycletime = moving_average(cycle_time, avg_cycletime, n_issues) n_issues = n_issues + 1 return avg_cycletime
mapmyfitness/jtime
jtime/jira_ext.py
JIRA.get_week_avg_cycletime
python
def get_week_avg_cycletime(self): def moving_average(new_val, old_avg, prev_n): return (new_val + old_avg) / (prev_n + 1) active_tickets_jql = 'assignee=currentUser() and status was "In Progress" DURING (startOfWeek(), endofweek()) and status not in (Backlog, Open) ORDER BY updated DESC' week_active_tickets = self.search_issues(active_tickets_jql) avg_cycletime = 0 n_issues = 0 for issue in week_active_tickets: cycle_time = self.get_cycle_time(self.get_issue(issue.key)) avg_cycletime = moving_average(cycle_time, avg_cycletime, n_issues) n_issues = n_issues + 1 return avg_cycletime
Gets the average cycletime of the current user for the past week. This includes any ticket that was marked "In Progress" but not reopened.
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/jira_ext.py#L116-L135
[ "def get_issue(self, branch):\n \"\"\"\n Gets the JIRA issue associated with the branch name.\n\n Returns None if no issue with this branch name.\n \"\"\"\n if branch:\n try:\n issue = self.issue(branch, expand='changelog')\n return issue\n except jira.exceptions.JIRAError as ex:\n if ex.status_code == 404:\n print \"No JIRA issue found for branch %s\" % branch\n else:\n print str(ex)\n", "def get_cycle_time(self, issue_or_start_or_key):\n \"\"\"\n Provided an issue or a start datetime, will return the cycle time since the start or progress\n \"\"\"\n if isinstance(issue_or_start_or_key, basestring):\n issue_or_start_or_key = self.get_issue(issue_or_start_or_key)\n\n if isinstance(issue_or_start_or_key, jira.resources.Issue):\n progress_started = self.get_datetime_issue_in_progress(issue_or_start_or_key)\n elif isinstance(issue_or_start_or_key, datetime.datetime):\n progress_started = issue_or_start_or_key\n\n curr_time = datetime.datetime.now(dateutil.tz.tzlocal())\n return utils.working_cycletime(progress_started, curr_time)\n", "def moving_average(new_val, old_avg, prev_n):\n return (new_val + old_avg) / (prev_n + 1)\n" ]
class JIRA(jira.client.JIRA): """ Overloading the jira-python JIRA class with custom methods that we are using for our application """ def __init__(self, **kwargs): super(JIRA, self).__init__(**kwargs) def get_issue(self, branch): """ Gets the JIRA issue associated with the branch name. Returns None if no issue with this branch name. """ if branch: try: issue = self.issue(branch, expand='changelog') return issue except jira.exceptions.JIRAError as ex: if ex.status_code == 404: print "No JIRA issue found for branch %s" % branch else: print str(ex) def get_worklog(self, issue): """ Gets the worklogs for a JIRA issue """ return issue.fields.worklog.worklogs def get_elapsed_time(self, issue): """ Gets the elapsed time since the last mark (either the updated time of the last log or the time that the issue was marked in progress) """ last_mark = None # Get the last mark from the work logs worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_mark = dateutil.parser.parse(last_worklog.raw['updated']) # If no worklogs, get the time since the issue was marked In Progress if not last_mark: last_mark = self.get_datetime_issue_in_progress(issue) if last_mark: now = datetime.datetime.now(dateutil.tz.tzlocal()) delta = now - last_mark minutes = int(utils.timedelta_total_seconds(delta) / 60) if minutes > 0: return str(minutes) + 'm' else: return None def workflow_transition(self, issue, status_name): """ Change the status of a JIRA issue to a named status. Will only be updated if this transition is available from the current status. """ transitions = self.transitions(issue) for transition in transitions: if transition['to']['name'] == status_name: transition_id = transition['id'] self.transition_issue(issue, transition_id) print "Changed status of issue %s to %s" % (issue.key, status_name) return True print "Unable to change status of issue %s to %s" % (issue.key, status_name) def get_datetime_issue_in_progress(self, issue): """ If the issue is in progress, gets that most recent time that the issue became 'In Progress' """ histories = issue.changelog.histories for history in reversed(histories): history_items = history.items for item in history_items: if item.field == 'status' and item.toString == "In Progress": return dateutil.parser.parse(history.created) def touch_last_worklog(self, issue): """ Touch the last worklog for an issue (changes the updated date on the worklog). We use this date as the 'mark' for determining the time elapsed for the next log entry. """ worklogs = self.get_worklog(issue) if worklogs: last_worklog = worklogs[-1] last_worklog.update() return True def get_cycle_time(self, issue_or_start_or_key): """ Provided an issue or a start datetime, will return the cycle time since the start or progress """ if isinstance(issue_or_start_or_key, basestring): issue_or_start_or_key = self.get_issue(issue_or_start_or_key) if isinstance(issue_or_start_or_key, jira.resources.Issue): progress_started = self.get_datetime_issue_in_progress(issue_or_start_or_key) elif isinstance(issue_or_start_or_key, datetime.datetime): progress_started = issue_or_start_or_key curr_time = datetime.datetime.now(dateutil.tz.tzlocal()) return utils.working_cycletime(progress_started, curr_time)
mapmyfitness/jtime
jtime/configuration.py
load_config
python
def load_config(): configuration = MyParser() configuration.read(_config) d = configuration.as_dict() if 'jira' not in d: raise custom_exceptions.NotConfigured # Special handling of the boolean for error reporting d['jira']['error_reporting'] = configuration.getboolean('jira', 'error_reporting') return d
Validate the config
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/configuration.py#L14-L29
null
import base64 import ConfigParser import os import sys import urllib import urlparse import custom_exceptions _config = os.path.expanduser('~/.jtime.ini') def _delete_config(): os.path.exists(_config) and os.remove(_config) def _save_config(jira_url, username, password, error_reporting): """ Saves the username and password to the config """ # Delete what is there before we re-write. New user means new everything os.path.exists(_config) and os.remove(_config) config = ConfigParser.SafeConfigParser() config.read(_config) if not config.has_section('jira'): config.add_section('jira') if 'http' not in jira_url: jira_url = 'http://' + jira_url try: resp = urllib.urlopen(jira_url) url = urlparse.urlparse(resp.url) jira_url = url.scheme + "://" + url.netloc except IOError, e: print "It doesn't appear that {0} is responding to a request.\ Please make sure that you typed the hostname, \ i.e. jira.atlassian.com.\n{1}".format(jira_url, e) sys.exit(1) config.set('jira', 'url', jira_url) config.set('jira', 'username', username) config.set('jira', 'password', base64.b64encode(password)) config.set('jira', 'error_reporting', str(error_reporting)) with open(_config, 'w') as ini: os.chmod(_config, 0600) config.write(ini) def _get_cookies_as_dict(): """ Get cookies as a dict """ config = ConfigParser.SafeConfigParser() config.read(_config) if config.has_section('cookies'): cookie_dict = {} for option in config.options('cookies'): option_key = option.upper() if option == 'jsessionid' else option cookie_dict[option_key] = config.get('cookies', option) return cookie_dict def _save_cookie(cookie_name, cookie_value): """ Save cookie """ config = ConfigParser.SafeConfigParser() config.read(_config) if not config.has_section('cookies'): config.add_section('cookies') config.set('cookies', cookie_name, cookie_value) with open(_config, 'w') as ini: config.write(ini) class MyParser(ConfigParser.SafeConfigParser): def as_dict(self): d = dict(self._sections) for k in d: d[k] = dict(self._defaults, **d[k]) d[k].pop('__name__', None) return d
mapmyfitness/jtime
jtime/configuration.py
_save_config
python
def _save_config(jira_url, username, password, error_reporting): # Delete what is there before we re-write. New user means new everything os.path.exists(_config) and os.remove(_config) config = ConfigParser.SafeConfigParser() config.read(_config) if not config.has_section('jira'): config.add_section('jira') if 'http' not in jira_url: jira_url = 'http://' + jira_url try: resp = urllib.urlopen(jira_url) url = urlparse.urlparse(resp.url) jira_url = url.scheme + "://" + url.netloc except IOError, e: print "It doesn't appear that {0} is responding to a request.\ Please make sure that you typed the hostname, \ i.e. jira.atlassian.com.\n{1}".format(jira_url, e) sys.exit(1) config.set('jira', 'url', jira_url) config.set('jira', 'username', username) config.set('jira', 'password', base64.b64encode(password)) config.set('jira', 'error_reporting', str(error_reporting)) with open(_config, 'w') as ini: os.chmod(_config, 0600) config.write(ini)
Saves the username and password to the config
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/configuration.py#L36-L68
null
import base64 import ConfigParser import os import sys import urllib import urlparse import custom_exceptions _config = os.path.expanduser('~/.jtime.ini') def load_config(): """ Validate the config """ configuration = MyParser() configuration.read(_config) d = configuration.as_dict() if 'jira' not in d: raise custom_exceptions.NotConfigured # Special handling of the boolean for error reporting d['jira']['error_reporting'] = configuration.getboolean('jira', 'error_reporting') return d def _delete_config(): os.path.exists(_config) and os.remove(_config) def _get_cookies_as_dict(): """ Get cookies as a dict """ config = ConfigParser.SafeConfigParser() config.read(_config) if config.has_section('cookies'): cookie_dict = {} for option in config.options('cookies'): option_key = option.upper() if option == 'jsessionid' else option cookie_dict[option_key] = config.get('cookies', option) return cookie_dict def _save_cookie(cookie_name, cookie_value): """ Save cookie """ config = ConfigParser.SafeConfigParser() config.read(_config) if not config.has_section('cookies'): config.add_section('cookies') config.set('cookies', cookie_name, cookie_value) with open(_config, 'w') as ini: config.write(ini) class MyParser(ConfigParser.SafeConfigParser): def as_dict(self): d = dict(self._sections) for k in d: d[k] = dict(self._defaults, **d[k]) d[k].pop('__name__', None) return d
mapmyfitness/jtime
jtime/configuration.py
_get_cookies_as_dict
python
def _get_cookies_as_dict(): config = ConfigParser.SafeConfigParser() config.read(_config) if config.has_section('cookies'): cookie_dict = {} for option in config.options('cookies'): option_key = option.upper() if option == 'jsessionid' else option cookie_dict[option_key] = config.get('cookies', option) return cookie_dict
Get cookies as a dict
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/configuration.py#L71-L84
null
import base64 import ConfigParser import os import sys import urllib import urlparse import custom_exceptions _config = os.path.expanduser('~/.jtime.ini') def load_config(): """ Validate the config """ configuration = MyParser() configuration.read(_config) d = configuration.as_dict() if 'jira' not in d: raise custom_exceptions.NotConfigured # Special handling of the boolean for error reporting d['jira']['error_reporting'] = configuration.getboolean('jira', 'error_reporting') return d def _delete_config(): os.path.exists(_config) and os.remove(_config) def _save_config(jira_url, username, password, error_reporting): """ Saves the username and password to the config """ # Delete what is there before we re-write. New user means new everything os.path.exists(_config) and os.remove(_config) config = ConfigParser.SafeConfigParser() config.read(_config) if not config.has_section('jira'): config.add_section('jira') if 'http' not in jira_url: jira_url = 'http://' + jira_url try: resp = urllib.urlopen(jira_url) url = urlparse.urlparse(resp.url) jira_url = url.scheme + "://" + url.netloc except IOError, e: print "It doesn't appear that {0} is responding to a request.\ Please make sure that you typed the hostname, \ i.e. jira.atlassian.com.\n{1}".format(jira_url, e) sys.exit(1) config.set('jira', 'url', jira_url) config.set('jira', 'username', username) config.set('jira', 'password', base64.b64encode(password)) config.set('jira', 'error_reporting', str(error_reporting)) with open(_config, 'w') as ini: os.chmod(_config, 0600) config.write(ini) def _save_cookie(cookie_name, cookie_value): """ Save cookie """ config = ConfigParser.SafeConfigParser() config.read(_config) if not config.has_section('cookies'): config.add_section('cookies') config.set('cookies', cookie_name, cookie_value) with open(_config, 'w') as ini: config.write(ini) class MyParser(ConfigParser.SafeConfigParser): def as_dict(self): d = dict(self._sections) for k in d: d[k] = dict(self._defaults, **d[k]) d[k].pop('__name__', None) return d
mapmyfitness/jtime
jtime/configuration.py
_save_cookie
python
def _save_cookie(cookie_name, cookie_value): config = ConfigParser.SafeConfigParser() config.read(_config) if not config.has_section('cookies'): config.add_section('cookies') config.set('cookies', cookie_name, cookie_value) with open(_config, 'w') as ini: config.write(ini)
Save cookie
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/configuration.py#L87-L99
null
import base64 import ConfigParser import os import sys import urllib import urlparse import custom_exceptions _config = os.path.expanduser('~/.jtime.ini') def load_config(): """ Validate the config """ configuration = MyParser() configuration.read(_config) d = configuration.as_dict() if 'jira' not in d: raise custom_exceptions.NotConfigured # Special handling of the boolean for error reporting d['jira']['error_reporting'] = configuration.getboolean('jira', 'error_reporting') return d def _delete_config(): os.path.exists(_config) and os.remove(_config) def _save_config(jira_url, username, password, error_reporting): """ Saves the username and password to the config """ # Delete what is there before we re-write. New user means new everything os.path.exists(_config) and os.remove(_config) config = ConfigParser.SafeConfigParser() config.read(_config) if not config.has_section('jira'): config.add_section('jira') if 'http' not in jira_url: jira_url = 'http://' + jira_url try: resp = urllib.urlopen(jira_url) url = urlparse.urlparse(resp.url) jira_url = url.scheme + "://" + url.netloc except IOError, e: print "It doesn't appear that {0} is responding to a request.\ Please make sure that you typed the hostname, \ i.e. jira.atlassian.com.\n{1}".format(jira_url, e) sys.exit(1) config.set('jira', 'url', jira_url) config.set('jira', 'username', username) config.set('jira', 'password', base64.b64encode(password)) config.set('jira', 'error_reporting', str(error_reporting)) with open(_config, 'w') as ini: os.chmod(_config, 0600) config.write(ini) def _get_cookies_as_dict(): """ Get cookies as a dict """ config = ConfigParser.SafeConfigParser() config.read(_config) if config.has_section('cookies'): cookie_dict = {} for option in config.options('cookies'): option_key = option.upper() if option == 'jsessionid' else option cookie_dict[option_key] = config.get('cookies', option) return cookie_dict class MyParser(ConfigParser.SafeConfigParser): def as_dict(self): d = dict(self._sections) for k in d: d[k] = dict(self._defaults, **d[k]) d[k].pop('__name__', None) return d
mapmyfitness/jtime
jtime/connection.py
jira_connection
python
def jira_connection(config): global _jira_connection if _jira_connection: return _jira_connection else: jira_options = {'server': config.get('jira').get('url')} cookies = configuration._get_cookies_as_dict() jira_connection = jira_ext.JIRA(options=jira_options) session = jira_connection._session reused_session = False if cookies: requests.utils.add_dict_to_cookiejar(session.cookies, cookies) try: jira_connection.session() reused_session = True except Exception: pass if not reused_session: session.auth = (config['jira']['username'], base64.b64decode(config['jira']['password'])) jira_connection.session() session.auth = None cookie_jar_hash = requests.utils.dict_from_cookiejar(session.cookies) for key, value in cookie_jar_hash.iteritems(): configuration._save_cookie(key, value) _jira_connection = jira_connection return _jira_connection
Gets a JIRA API connection. If a connection has already been created the existing connection will be returned.
train
https://github.com/mapmyfitness/jtime/blob/402fb6b40ac7a78c23fd02fac50c6dbe49e5ebfd/jtime/connection.py#L12-L47
[ "def _get_cookies_as_dict():\n \"\"\"\n Get cookies as a dict\n \"\"\"\n config = ConfigParser.SafeConfigParser()\n config.read(_config)\n\n if config.has_section('cookies'):\n cookie_dict = {}\n for option in config.options('cookies'):\n option_key = option.upper() if option == 'jsessionid' else option\n cookie_dict[option_key] = config.get('cookies', option)\n\n return cookie_dict\n", "def _save_cookie(cookie_name, cookie_value):\n \"\"\"\n Save cookie\n \"\"\"\n config = ConfigParser.SafeConfigParser()\n config.read(_config)\n if not config.has_section('cookies'):\n config.add_section('cookies')\n\n config.set('cookies', cookie_name, cookie_value)\n\n with open(_config, 'w') as ini:\n config.write(ini)\n" ]
import base64 import requests import configuration import jira_ext # Global to re-use opened JIRA connection _jira_connection = None
wickman/compactor
compactor/request.py
encode_request
python
def encode_request(from_pid, to_pid, method, body=None, content_type=None, legacy=False): if body is None: body = b'' if not isinstance(body, (bytes, bytearray)): raise TypeError('Body must be a sequence of bytes.') headers = [ 'POST /{process}/{method} HTTP/1.0'.format(process=to_pid.id, method=method), 'Connection: Keep-Alive', 'Content-Length: %d' % len(body) ] if legacy: headers.append('User-Agent: libprocess/{pid}'.format(pid=from_pid)) else: headers.append('Libprocess-From: {pid}'.format(pid=from_pid)) if content_type is not None: headers.append('Content-Type: {content_type}'.format(content_type=content_type)) headers = [header.encode('utf8') for header in headers] def iter_fragments(): for fragment in headers: yield fragment yield CRLF yield CRLF if body: yield body return b''.join(iter_fragments())
Encode a request into a raw HTTP request. This function returns a string of bytes that represent a valid HTTP/1.0 request, including any libprocess headers required for communication. Use the `legacy` option (set to True) to use the legacy User-Agent based libprocess identification.
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/request.py#L5-L45
[ "def iter_fragments():\n for fragment in headers:\n yield fragment\n yield CRLF\n yield CRLF\n if body:\n yield body\n" ]
CRLF = b'\r\n'
wickman/compactor
compactor/bin/http_example.py
listen
python
def listen(identifier): context = Context() process = WebProcess(identifier) context.spawn(process) log.info("Launching PID %s", process.pid) return process, context
Launch a listener and return the compactor context.
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/bin/http_example.py#L34-L46
[ "def spawn(self, process):\n \"\"\"Spawn a process.\n\n Spawning a process binds it to this context and assigns the process a\n pid which is returned. The process' ``initialize`` method is called.\n\n Note: A process cannot send messages until it is bound to a context.\n\n :param process: The process to bind to this context.\n :type process: :class:`Process`\n :return: The pid of the process.\n :rtype: :class:`PID`\n \"\"\"\n self._assert_started()\n process.bind(self)\n self.http.mount_process(process)\n self._processes[process.pid] = process\n process.initialize()\n return process.pid\n" ]
import time from compactor.process import Process from compactor.context import Context import logging logging.basicConfig() log = logging.getLogger(__name__) log.setLevel(logging.INFO) class WebProcess(Process): @Process.install('ping') def ping(self, from_pid, body): log.info("Received ping") def respond(): time.sleep(0.5) self.send(from_pid, "pong") self.context.loop.add_callback(respond) @Process.install('pong') def pong(self, from_pid, body): log.info("Received pong") def respond(): time.sleep(0.5) self.send(from_pid, "ping") self.context.loop.add_callback(respond) if __name__ == '__main__': a, a_context = listen("web(1)") b, b_context = listen("web(2)") a_context.start() b_context.start() # Kick off the game of ping/pong by sending a message to B from A a.send(b.pid, "ping") while a_context.isAlive() or b_context.isAlive(): time.sleep(0.5)
wickman/compactor
compactor/pid.py
PID.from_string
python
def from_string(cls, pid): try: id_, ip_port = pid.split('@') ip, port = ip_port.split(':') port = int(port) except ValueError: raise ValueError('Invalid PID: %s' % pid) return cls(ip, port, id_)
Parse a PID from its string representation. PIDs may be represented as name@ip:port, e.g. .. code-block:: python pid = PID.from_string('master(1)@192.168.33.2:5051') :param pid: A string representation of a pid. :type pid: ``str`` :return: The parsed pid. :rtype: :class:`PID` :raises: ``ValueError`` should the string not be of the correct syntax.
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/pid.py#L5-L26
null
class PID(object): # noqa __slots__ = ('ip', 'port', 'id') @classmethod def __init__(self, ip, port, id_): """Construct a pid. :param ip: An IP address in string form. :type ip: ``str`` :param port: The port of this pid. :type port: ``int`` :param id_: The name of the process. :type id_: ``str`` """ self.ip = ip self.port = port self.id = id_ def __hash__(self): return hash((self.ip, self.port, self.id)) def __eq__(self, other): return isinstance(other, PID) and ( self.ip == other.ip and self.port == other.port and self.id == other.id ) def __ne__(self, other): return not (self == other) def as_url(self, endpoint=None): url = 'http://%s:%s/%s' % (self.ip, self.port, self.id) if endpoint: url += '/%s' % endpoint return url def __str__(self): return '%s@%s:%d' % (self.id, self.ip, self.port) def __repr__(self): return 'PID(%s, %d, %s)' % (self.ip, self.port, self.id)
wickman/compactor
compactor/httpd.py
WireProtocolMessageHandler.detect_process
python
def detect_process(cls, headers): try: if 'Libprocess-From' in headers: return PID.from_string(headers['Libprocess-From']), False elif 'User-Agent' in headers and headers['User-Agent'].startswith('libprocess/'): return PID.from_string(headers['User-Agent'][len('libprocess/'):]), True except ValueError as e: log.error('Failed to detect process: %r' % e) pass return None, None
Returns tuple of process, legacy or None, None if not process originating.
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/httpd.py#L27-L39
[ "def from_string(cls, pid):\n \"\"\"Parse a PID from its string representation.\n\n PIDs may be represented as name@ip:port, e.g.\n\n .. code-block:: python\n\n pid = PID.from_string('master(1)@192.168.33.2:5051')\n\n :param pid: A string representation of a pid.\n :type pid: ``str``\n :return: The parsed pid.\n :rtype: :class:`PID`\n :raises: ``ValueError`` should the string not be of the correct syntax.\n \"\"\"\n try:\n id_, ip_port = pid.split('@')\n ip, port = ip_port.split(':')\n port = int(port)\n except ValueError:\n raise ValueError('Invalid PID: %s' % pid)\n return cls(ip, port, id_)\n" ]
class WireProtocolMessageHandler(ProcessBaseHandler): """Tornado request handler for libprocess internal messages.""" @classmethod def initialize(self, **kw): self.__name = kw.pop('name') super(WireProtocolMessageHandler, self).initialize(**kw) def set_default_headers(self): self._headers = httputil.HTTPHeaders({ "Date": httputil.format_timestamp(time.time()) }) def post(self, *args, **kw): log.info('Handling %s for %s' % (self.__name, self.process.pid)) process, legacy = self.detect_process(self.request.headers) if process is None: self.set_status(404) return log.debug('Delivering %s to %s from %s' % (self.__name, self.process.pid, process)) log.debug('Request body length: %s' % len(self.request.body)) # Handle the message self.process.handle_message(self.__name, process, self.request.body) self.set_status(202) self.finish()
wickman/compactor
compactor/httpd.py
HTTPD.mount_process
python
def mount_process(self, process): for route_path in process.route_paths: route = '/%s%s' % (process.pid.id, route_path) log.info('Mounting route %s' % route) self.app.add_handlers('.*$', [( re.escape(route), RoutedRequestHandler, dict(process=process, path=route_path) )]) for message_name in process.message_names: route = '/%s/%s' % (process.pid.id, message_name) log.info('Mounting message handler %s' % route) self.app.add_handlers('.*$', [( re.escape(route), WireProtocolMessageHandler, dict(process=process, name=message_name) )])
Mount a Process onto the http server to receive message callbacks.
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/httpd.py#L118-L139
null
class HTTPD(object): # noqa """ HTTP Server implementation that attaches to an event loop and socket, and is capable of handling mesos wire protocol messages. """ def __init__(self, sock, loop): """ Construct an HTTP server on a socket given an ioloop. """ self.loop = loop self.sock = sock self.app = Application(handlers=[(r'/.*$', Blackhole)]) self.server = HTTPServer(self.app, io_loop=self.loop) self.server.add_sockets([sock]) self.sock.listen(1024) def terminate(self): log.info('Terminating HTTP server and all connections') self.server.close_all_connections() self.sock.close() def unmount_process(self, process): """ Unmount a process from the http server to stop receiving message callbacks. """ # There is no remove_handlers, but .handlers is public so why not. server.handlers is a list of # 2-tuples of the form (host_pattern, [list of RequestHandler]) objects. We filter out all # handlers matching our process from the RequestHandler list for each host pattern. def nonmatching(handler): return 'process' not in handler.kwargs or handler.kwargs['process'] != process def filter_handlers(handlers): host_pattern, handlers = handlers return (host_pattern, list(filter(nonmatching, handlers))) self.app.handlers = [filter_handlers(handlers) for handlers in self.app.handlers]
wickman/compactor
compactor/httpd.py
HTTPD.unmount_process
python
def unmount_process(self, process): # There is no remove_handlers, but .handlers is public so why not. server.handlers is a list of # 2-tuples of the form (host_pattern, [list of RequestHandler]) objects. We filter out all # handlers matching our process from the RequestHandler list for each host pattern. def nonmatching(handler): return 'process' not in handler.kwargs or handler.kwargs['process'] != process def filter_handlers(handlers): host_pattern, handlers = handlers return (host_pattern, list(filter(nonmatching, handlers))) self.app.handlers = [filter_handlers(handlers) for handlers in self.app.handlers]
Unmount a process from the http server to stop receiving message callbacks.
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/httpd.py#L141-L157
null
class HTTPD(object): # noqa """ HTTP Server implementation that attaches to an event loop and socket, and is capable of handling mesos wire protocol messages. """ def __init__(self, sock, loop): """ Construct an HTTP server on a socket given an ioloop. """ self.loop = loop self.sock = sock self.app = Application(handlers=[(r'/.*$', Blackhole)]) self.server = HTTPServer(self.app, io_loop=self.loop) self.server.add_sockets([sock]) self.sock.listen(1024) def terminate(self): log.info('Terminating HTTP server and all connections') self.server.close_all_connections() self.sock.close() def mount_process(self, process): """ Mount a Process onto the http server to receive message callbacks. """ for route_path in process.route_paths: route = '/%s%s' % (process.pid.id, route_path) log.info('Mounting route %s' % route) self.app.add_handlers('.*$', [( re.escape(route), RoutedRequestHandler, dict(process=process, path=route_path) )]) for message_name in process.message_names: route = '/%s/%s' % (process.pid.id, message_name) log.info('Mounting message handler %s' % route) self.app.add_handlers('.*$', [( re.escape(route), WireProtocolMessageHandler, dict(process=process, name=message_name) )])
wickman/compactor
compactor/context.py
Context._make_socket
python
def _make_socket(cls, ip, port): bound_socket = bind_sockets(port, address=ip)[0] ip, port = bound_socket.getsockname() if not ip or ip == '0.0.0.0': ip = socket.gethostbyname(socket.gethostname()) return bound_socket, ip, port
Bind to a new socket. If LIBPROCESS_PORT or LIBPROCESS_IP are configured in the environment, these will be used for socket connectivity.
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/context.py#L45-L57
null
class Context(threading.Thread): """A compactor context. Compactor contexts control the routing and handling of messages between processes. At its most basic level, a context is a listening (ip, port) pair and an event loop. """ class Error(Exception): pass class SocketError(Error): pass class InvalidProcess(Error): pass class InvalidMethod(Error): pass _SINGLETON = None _LOCK = threading.Lock() CONNECT_TIMEOUT_SECS = 5 @classmethod @classmethod def get_ip_port(cls, ip=None, port=None): ip = ip or os.environ.get('LIBPROCESS_IP', '0.0.0.0') try: port = int(port or os.environ.get('LIBPROCESS_PORT', 0)) except ValueError: raise cls.Error('Invalid ip/port provided') return ip, port @classmethod def singleton(cls, delegate='', **kw): with cls._LOCK: if cls._SINGLETON: if cls._SINGLETON.delegate != delegate: raise RuntimeError('Attempting to construct different singleton context.') else: cls._SINGLETON = cls(delegate=delegate, **kw) cls._SINGLETON.start() return cls._SINGLETON def __init__(self, delegate='', loop=None, ip=None, port=None): """Construct a compactor context. Before any useful work can be done with a context, you must call ``start`` on the context. :keyword ip: The ip port of the interface on which the Context should listen. If none is specified, the context will attempt to bind to the ip specified by the ``LIBPROCESS_IP`` environment variable. If this variable is not set, it will bind on all interfaces. :type ip: ``str`` or None :keyword port: The port on which the Context should listen. If none is specified, the context will attempt to bind to the port specified by the ``LIBPROCESS_PORT`` environment variable. If this variable is not set, it will bind to an ephemeral port. :type port: ``int`` or None """ self._processes = {} self._links = defaultdict(set) self.delegate = delegate self.__loop = self.http = None self.__event_loop = loop self._ip = None ip, port = self.get_ip_port(ip, port) self.__sock, self.ip, self.port = self._make_socket(ip, port) self._connections = {} self._connection_callbacks = defaultdict(list) self._connection_callbacks_lock = threading.Lock() self.__context_name = 'CompactorContext(%s:%d)' % (self.ip, self.port) super(Context, self).__init__(name=self.__context_name) self.daemon = True self.lock = threading.Lock() self.__id = 1 self.__loop_started = threading.Event() def _assert_started(self): assert self.__loop_started.is_set() def start(self): """Start the context. This method must be called before calls to ``send`` and ``spawn``. This method is non-blocking. """ super(Context, self).start() self.__loop_started.wait() def __debug(self, msg): log.debug('%s: %s' % (self.__context_name, msg)) def run(self): # The entry point of the Context thread. This should not be called directly. loop = self.__event_loop or asyncio.new_event_loop() class CustomIOLoop(BaseAsyncIOLoop): def initialize(self): super(CustomIOLoop, self).initialize(loop, close_loop=False) self.__loop = CustomIOLoop() self.http = HTTPD(self.__sock, self.__loop) self.__loop_started.set() self.__loop.start() self.__loop.close() def _is_local(self, pid): return pid in self._processes def _assert_local_pid(self, pid): if not self._is_local(pid): raise self.InvalidProcess('Operation only valid for local processes!') def stop(self): """Stops the context. This terminates all PIDs and closes all connections.""" log.info('Stopping %s' % self) pids = list(self._processes) # Clean up the context for pid in pids: self.terminate(pid) while self._connections: pid = next(iter(self._connections)) conn = self._connections.pop(pid, None) if conn: conn.close() self.__loop.stop() def spawn(self, process): """Spawn a process. Spawning a process binds it to this context and assigns the process a pid which is returned. The process' ``initialize`` method is called. Note: A process cannot send messages until it is bound to a context. :param process: The process to bind to this context. :type process: :class:`Process` :return: The pid of the process. :rtype: :class:`PID` """ self._assert_started() process.bind(self) self.http.mount_process(process) self._processes[process.pid] = process process.initialize() return process.pid def _get_dispatch_method(self, pid, method): try: return getattr(self._processes[pid], method) except KeyError: raise self.InvalidProcess('Unknown process %s' % pid) except AttributeError: raise self.InvalidMethod('Unknown method %s on %s' % (method, pid)) def dispatch(self, pid, method, *args): """Call a method on another process by its pid. The method on the other process does not need to be installed with ``Process.install``. The call is serialized with all other calls on the context's event loop. The pid must be bound to this context. This function returns immediately. :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_callback(function, *args) def delay(self, amount, pid, method, *args): """Call a method on another process after a specified delay. This is equivalent to ``dispatch`` except with an additional amount of time to wait prior to invoking the call. This function returns immediately. :param amount: The amount of time to wait in seconds before making the call. :type amount: ``float`` or ``int`` :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_timeout(self.__loop.time() + amount, function, *args) def __dispatch_on_connect_callbacks(self, to_pid, stream): with self._connection_callbacks_lock: callbacks = self._connection_callbacks.pop(to_pid, []) for callback in callbacks: log.debug('Dispatching connection callback %s for %s:%s -> %s' % ( callback, self.ip, self.port, to_pid)) self.__loop.add_callback(callback, stream) def _maybe_connect(self, to_pid, callback=None): """Asynchronously establish a connection to the remote pid.""" callback = stack_context.wrap(callback or (lambda stream: None)) def streaming_callback(data): # we are not guaranteed to get an acknowledgment, but log and discard bytes if we do. log.info('Received %d bytes from %s, discarding.' % (len(data), to_pid)) log.debug(' data: %r' % (data,)) def on_connect(exit_cb, stream): log.info('Connection to %s established' % to_pid) with self._connection_callbacks_lock: self._connections[to_pid] = stream self.__dispatch_on_connect_callbacks(to_pid, stream) self.__loop.add_callback( stream.read_until_close, exit_cb, streaming_callback=streaming_callback) create = False with self._connection_callbacks_lock: stream = self._connections.get(to_pid) callbacks = self._connection_callbacks.get(to_pid) if not stream: self._connection_callbacks[to_pid].append(callback) if not callbacks: create = True if stream: self.__loop.add_callback(callback, stream) return if not create: return sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) if not sock: raise self.SocketError('Failed opening socket') stream = IOStream(sock, io_loop=self.__loop) stream.set_nodelay(True) stream.set_close_callback(partial(self.__on_exit, to_pid, b'reached end of stream')) connect_callback = partial(on_connect, partial(self.__on_exit, to_pid), stream) log.info('Establishing connection to %s' % to_pid) stream.connect((to_pid.ip, to_pid.port), callback=connect_callback) if stream.closed(): raise self.SocketError('Failed to initiate stream connection') log.info('Maybe connected to %s' % to_pid) def _get_local_mailbox(self, pid, method): for mailbox, callable in self._processes[pid].iter_handlers(): if method == mailbox: return callable def send(self, from_pid, to_pid, method, body=None): """Send a message method from one pid to another with an optional body. Note: It is more idiomatic to send directly from a bound process rather than calling send on the context. If the destination pid is on the same context, the Context may skip the wire and route directly to process itself. ``from_pid`` must be bound to this context. This method returns immediately. :param from_pid: The pid of the sending process. :type from_pid: :class:`PID` :param to_pid: The pid of the destination process. :type to_pid: :class:`PID` :param method: The method name of the destination process. :type method: ``str`` :keyword body: Optional content to send along with the message. :type body: ``bytes`` or None :return: Nothing """ self._assert_started() self._assert_local_pid(from_pid) if self._is_local(to_pid): local_method = self._get_local_mailbox(to_pid, method) if local_method: log.info('Doing local dispatch of %s => %s (method: %s)' % (from_pid, to_pid, local_method)) self.__loop.add_callback(local_method, from_pid, body or b'') return else: # TODO(wickman) Consider failing hard if no local method is detected, otherwise we're # just going to do a POST and have it dropped on the floor. pass request_data = encode_request(from_pid, to_pid, method, body=body) log.info('Sending POST %s => %s (payload: %d bytes)' % ( from_pid, to_pid.as_url(method), len(request_data))) def on_connect(stream): log.info('Writing %s from %s to %s' % (len(request_data), from_pid, to_pid)) stream.write(request_data) log.info('Wrote %s from %s to %s' % (len(request_data), from_pid, to_pid)) self.__loop.add_callback(self._maybe_connect, to_pid, on_connect) def __erase_link(self, to_pid): for pid, links in self._links.items(): try: links.remove(to_pid) log.debug('PID link from %s <- %s exited.' % (pid, to_pid)) self._processes[pid].exited(to_pid) except KeyError: continue def __on_exit(self, to_pid, body): log.info('Disconnected from %s (%s)', to_pid, body) stream = self._connections.pop(to_pid, None) if stream is None: log.error('Received disconnection from %s but no stream found.' % to_pid) self.__erase_link(to_pid) def link(self, pid, to): """Link a local process to a possibly remote process. Note: It is more idiomatic to call ``link`` directly on the bound Process object instead. When ``pid`` is linked to ``to``, the termination of the ``to`` process (or the severing of its connection from the Process ``pid``) will result in the local process' ``exited`` method to be called with ``to``. This method returns immediately. :param pid: The pid of the linking process. :type pid: :class:`PID` :param to: The pid of the linked process. :type to: :class:`PID` :returns: Nothing """ self._assert_started() def really_link(): self._links[pid].add(to) log.info('Added link from %s to %s' % (pid, to)) def on_connect(stream): really_link() if self._is_local(pid): really_link() else: self.__loop.add_callback(self._maybe_connect, to, on_connect) def terminate(self, pid): """Terminate a process bound to this context. When a process is terminated, all the processes to which it is linked will be have their ``exited`` methods called. Messages to this process will no longer be delivered. This method returns immediately. :param pid: The pid of the process to terminate. :type pid: :class:`PID` :returns: Nothing """ self._assert_started() log.info('Terminating %s' % pid) process = self._processes.pop(pid, None) if process: log.info('Unmounting %s' % process) self.http.unmount_process(process) self.__erase_link(pid) def __str__(self): return 'Context(%s:%s)' % (self.ip, self.port)
wickman/compactor
compactor/context.py
Context.stop
python
def stop(self): log.info('Stopping %s' % self) pids = list(self._processes) # Clean up the context for pid in pids: self.terminate(pid) while self._connections: pid = next(iter(self._connections)) conn = self._connections.pop(pid, None) if conn: conn.close() self.__loop.stop()
Stops the context. This terminates all PIDs and closes all connections.
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/context.py#L151-L168
[ "def terminate(self, pid):\n \"\"\"Terminate a process bound to this context.\n\n When a process is terminated, all the processes to which it is linked\n will be have their ``exited`` methods called. Messages to this process\n will no longer be delivered.\n\n This method returns immediately.\n\n :param pid: The pid of the process to terminate.\n :type pid: :class:`PID`\n :returns: Nothing\n \"\"\"\n self._assert_started()\n\n log.info('Terminating %s' % pid)\n process = self._processes.pop(pid, None)\n if process:\n log.info('Unmounting %s' % process)\n self.http.unmount_process(process)\n self.__erase_link(pid)\n" ]
class Context(threading.Thread): """A compactor context. Compactor contexts control the routing and handling of messages between processes. At its most basic level, a context is a listening (ip, port) pair and an event loop. """ class Error(Exception): pass class SocketError(Error): pass class InvalidProcess(Error): pass class InvalidMethod(Error): pass _SINGLETON = None _LOCK = threading.Lock() CONNECT_TIMEOUT_SECS = 5 @classmethod def _make_socket(cls, ip, port): """Bind to a new socket. If LIBPROCESS_PORT or LIBPROCESS_IP are configured in the environment, these will be used for socket connectivity. """ bound_socket = bind_sockets(port, address=ip)[0] ip, port = bound_socket.getsockname() if not ip or ip == '0.0.0.0': ip = socket.gethostbyname(socket.gethostname()) return bound_socket, ip, port @classmethod def get_ip_port(cls, ip=None, port=None): ip = ip or os.environ.get('LIBPROCESS_IP', '0.0.0.0') try: port = int(port or os.environ.get('LIBPROCESS_PORT', 0)) except ValueError: raise cls.Error('Invalid ip/port provided') return ip, port @classmethod def singleton(cls, delegate='', **kw): with cls._LOCK: if cls._SINGLETON: if cls._SINGLETON.delegate != delegate: raise RuntimeError('Attempting to construct different singleton context.') else: cls._SINGLETON = cls(delegate=delegate, **kw) cls._SINGLETON.start() return cls._SINGLETON def __init__(self, delegate='', loop=None, ip=None, port=None): """Construct a compactor context. Before any useful work can be done with a context, you must call ``start`` on the context. :keyword ip: The ip port of the interface on which the Context should listen. If none is specified, the context will attempt to bind to the ip specified by the ``LIBPROCESS_IP`` environment variable. If this variable is not set, it will bind on all interfaces. :type ip: ``str`` or None :keyword port: The port on which the Context should listen. If none is specified, the context will attempt to bind to the port specified by the ``LIBPROCESS_PORT`` environment variable. If this variable is not set, it will bind to an ephemeral port. :type port: ``int`` or None """ self._processes = {} self._links = defaultdict(set) self.delegate = delegate self.__loop = self.http = None self.__event_loop = loop self._ip = None ip, port = self.get_ip_port(ip, port) self.__sock, self.ip, self.port = self._make_socket(ip, port) self._connections = {} self._connection_callbacks = defaultdict(list) self._connection_callbacks_lock = threading.Lock() self.__context_name = 'CompactorContext(%s:%d)' % (self.ip, self.port) super(Context, self).__init__(name=self.__context_name) self.daemon = True self.lock = threading.Lock() self.__id = 1 self.__loop_started = threading.Event() def _assert_started(self): assert self.__loop_started.is_set() def start(self): """Start the context. This method must be called before calls to ``send`` and ``spawn``. This method is non-blocking. """ super(Context, self).start() self.__loop_started.wait() def __debug(self, msg): log.debug('%s: %s' % (self.__context_name, msg)) def run(self): # The entry point of the Context thread. This should not be called directly. loop = self.__event_loop or asyncio.new_event_loop() class CustomIOLoop(BaseAsyncIOLoop): def initialize(self): super(CustomIOLoop, self).initialize(loop, close_loop=False) self.__loop = CustomIOLoop() self.http = HTTPD(self.__sock, self.__loop) self.__loop_started.set() self.__loop.start() self.__loop.close() def _is_local(self, pid): return pid in self._processes def _assert_local_pid(self, pid): if not self._is_local(pid): raise self.InvalidProcess('Operation only valid for local processes!') def spawn(self, process): """Spawn a process. Spawning a process binds it to this context and assigns the process a pid which is returned. The process' ``initialize`` method is called. Note: A process cannot send messages until it is bound to a context. :param process: The process to bind to this context. :type process: :class:`Process` :return: The pid of the process. :rtype: :class:`PID` """ self._assert_started() process.bind(self) self.http.mount_process(process) self._processes[process.pid] = process process.initialize() return process.pid def _get_dispatch_method(self, pid, method): try: return getattr(self._processes[pid], method) except KeyError: raise self.InvalidProcess('Unknown process %s' % pid) except AttributeError: raise self.InvalidMethod('Unknown method %s on %s' % (method, pid)) def dispatch(self, pid, method, *args): """Call a method on another process by its pid. The method on the other process does not need to be installed with ``Process.install``. The call is serialized with all other calls on the context's event loop. The pid must be bound to this context. This function returns immediately. :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_callback(function, *args) def delay(self, amount, pid, method, *args): """Call a method on another process after a specified delay. This is equivalent to ``dispatch`` except with an additional amount of time to wait prior to invoking the call. This function returns immediately. :param amount: The amount of time to wait in seconds before making the call. :type amount: ``float`` or ``int`` :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_timeout(self.__loop.time() + amount, function, *args) def __dispatch_on_connect_callbacks(self, to_pid, stream): with self._connection_callbacks_lock: callbacks = self._connection_callbacks.pop(to_pid, []) for callback in callbacks: log.debug('Dispatching connection callback %s for %s:%s -> %s' % ( callback, self.ip, self.port, to_pid)) self.__loop.add_callback(callback, stream) def _maybe_connect(self, to_pid, callback=None): """Asynchronously establish a connection to the remote pid.""" callback = stack_context.wrap(callback or (lambda stream: None)) def streaming_callback(data): # we are not guaranteed to get an acknowledgment, but log and discard bytes if we do. log.info('Received %d bytes from %s, discarding.' % (len(data), to_pid)) log.debug(' data: %r' % (data,)) def on_connect(exit_cb, stream): log.info('Connection to %s established' % to_pid) with self._connection_callbacks_lock: self._connections[to_pid] = stream self.__dispatch_on_connect_callbacks(to_pid, stream) self.__loop.add_callback( stream.read_until_close, exit_cb, streaming_callback=streaming_callback) create = False with self._connection_callbacks_lock: stream = self._connections.get(to_pid) callbacks = self._connection_callbacks.get(to_pid) if not stream: self._connection_callbacks[to_pid].append(callback) if not callbacks: create = True if stream: self.__loop.add_callback(callback, stream) return if not create: return sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) if not sock: raise self.SocketError('Failed opening socket') stream = IOStream(sock, io_loop=self.__loop) stream.set_nodelay(True) stream.set_close_callback(partial(self.__on_exit, to_pid, b'reached end of stream')) connect_callback = partial(on_connect, partial(self.__on_exit, to_pid), stream) log.info('Establishing connection to %s' % to_pid) stream.connect((to_pid.ip, to_pid.port), callback=connect_callback) if stream.closed(): raise self.SocketError('Failed to initiate stream connection') log.info('Maybe connected to %s' % to_pid) def _get_local_mailbox(self, pid, method): for mailbox, callable in self._processes[pid].iter_handlers(): if method == mailbox: return callable def send(self, from_pid, to_pid, method, body=None): """Send a message method from one pid to another with an optional body. Note: It is more idiomatic to send directly from a bound process rather than calling send on the context. If the destination pid is on the same context, the Context may skip the wire and route directly to process itself. ``from_pid`` must be bound to this context. This method returns immediately. :param from_pid: The pid of the sending process. :type from_pid: :class:`PID` :param to_pid: The pid of the destination process. :type to_pid: :class:`PID` :param method: The method name of the destination process. :type method: ``str`` :keyword body: Optional content to send along with the message. :type body: ``bytes`` or None :return: Nothing """ self._assert_started() self._assert_local_pid(from_pid) if self._is_local(to_pid): local_method = self._get_local_mailbox(to_pid, method) if local_method: log.info('Doing local dispatch of %s => %s (method: %s)' % (from_pid, to_pid, local_method)) self.__loop.add_callback(local_method, from_pid, body or b'') return else: # TODO(wickman) Consider failing hard if no local method is detected, otherwise we're # just going to do a POST and have it dropped on the floor. pass request_data = encode_request(from_pid, to_pid, method, body=body) log.info('Sending POST %s => %s (payload: %d bytes)' % ( from_pid, to_pid.as_url(method), len(request_data))) def on_connect(stream): log.info('Writing %s from %s to %s' % (len(request_data), from_pid, to_pid)) stream.write(request_data) log.info('Wrote %s from %s to %s' % (len(request_data), from_pid, to_pid)) self.__loop.add_callback(self._maybe_connect, to_pid, on_connect) def __erase_link(self, to_pid): for pid, links in self._links.items(): try: links.remove(to_pid) log.debug('PID link from %s <- %s exited.' % (pid, to_pid)) self._processes[pid].exited(to_pid) except KeyError: continue def __on_exit(self, to_pid, body): log.info('Disconnected from %s (%s)', to_pid, body) stream = self._connections.pop(to_pid, None) if stream is None: log.error('Received disconnection from %s but no stream found.' % to_pid) self.__erase_link(to_pid) def link(self, pid, to): """Link a local process to a possibly remote process. Note: It is more idiomatic to call ``link`` directly on the bound Process object instead. When ``pid`` is linked to ``to``, the termination of the ``to`` process (or the severing of its connection from the Process ``pid``) will result in the local process' ``exited`` method to be called with ``to``. This method returns immediately. :param pid: The pid of the linking process. :type pid: :class:`PID` :param to: The pid of the linked process. :type to: :class:`PID` :returns: Nothing """ self._assert_started() def really_link(): self._links[pid].add(to) log.info('Added link from %s to %s' % (pid, to)) def on_connect(stream): really_link() if self._is_local(pid): really_link() else: self.__loop.add_callback(self._maybe_connect, to, on_connect) def terminate(self, pid): """Terminate a process bound to this context. When a process is terminated, all the processes to which it is linked will be have their ``exited`` methods called. Messages to this process will no longer be delivered. This method returns immediately. :param pid: The pid of the process to terminate. :type pid: :class:`PID` :returns: Nothing """ self._assert_started() log.info('Terminating %s' % pid) process = self._processes.pop(pid, None) if process: log.info('Unmounting %s' % process) self.http.unmount_process(process) self.__erase_link(pid) def __str__(self): return 'Context(%s:%s)' % (self.ip, self.port)
wickman/compactor
compactor/context.py
Context.spawn
python
def spawn(self, process): self._assert_started() process.bind(self) self.http.mount_process(process) self._processes[process.pid] = process process.initialize() return process.pid
Spawn a process. Spawning a process binds it to this context and assigns the process a pid which is returned. The process' ``initialize`` method is called. Note: A process cannot send messages until it is bound to a context. :param process: The process to bind to this context. :type process: :class:`Process` :return: The pid of the process. :rtype: :class:`PID`
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/context.py#L170-L188
[ "def _assert_started(self):\n assert self.__loop_started.is_set()\n", "def bind(self, context):\n if not isinstance(context, Context):\n raise TypeError('Can only bind to a Context, got %s' % type(context))\n self._context = context\n", "def initialize(self):\n \"\"\"Called when this process is spawned.\n\n Once this is called, it means a process is now routable. Subclasses\n should implement this to initialize state or possibly initiate\n connections to remote processes.\n \"\"\"\n" ]
class Context(threading.Thread): """A compactor context. Compactor contexts control the routing and handling of messages between processes. At its most basic level, a context is a listening (ip, port) pair and an event loop. """ class Error(Exception): pass class SocketError(Error): pass class InvalidProcess(Error): pass class InvalidMethod(Error): pass _SINGLETON = None _LOCK = threading.Lock() CONNECT_TIMEOUT_SECS = 5 @classmethod def _make_socket(cls, ip, port): """Bind to a new socket. If LIBPROCESS_PORT or LIBPROCESS_IP are configured in the environment, these will be used for socket connectivity. """ bound_socket = bind_sockets(port, address=ip)[0] ip, port = bound_socket.getsockname() if not ip or ip == '0.0.0.0': ip = socket.gethostbyname(socket.gethostname()) return bound_socket, ip, port @classmethod def get_ip_port(cls, ip=None, port=None): ip = ip or os.environ.get('LIBPROCESS_IP', '0.0.0.0') try: port = int(port or os.environ.get('LIBPROCESS_PORT', 0)) except ValueError: raise cls.Error('Invalid ip/port provided') return ip, port @classmethod def singleton(cls, delegate='', **kw): with cls._LOCK: if cls._SINGLETON: if cls._SINGLETON.delegate != delegate: raise RuntimeError('Attempting to construct different singleton context.') else: cls._SINGLETON = cls(delegate=delegate, **kw) cls._SINGLETON.start() return cls._SINGLETON def __init__(self, delegate='', loop=None, ip=None, port=None): """Construct a compactor context. Before any useful work can be done with a context, you must call ``start`` on the context. :keyword ip: The ip port of the interface on which the Context should listen. If none is specified, the context will attempt to bind to the ip specified by the ``LIBPROCESS_IP`` environment variable. If this variable is not set, it will bind on all interfaces. :type ip: ``str`` or None :keyword port: The port on which the Context should listen. If none is specified, the context will attempt to bind to the port specified by the ``LIBPROCESS_PORT`` environment variable. If this variable is not set, it will bind to an ephemeral port. :type port: ``int`` or None """ self._processes = {} self._links = defaultdict(set) self.delegate = delegate self.__loop = self.http = None self.__event_loop = loop self._ip = None ip, port = self.get_ip_port(ip, port) self.__sock, self.ip, self.port = self._make_socket(ip, port) self._connections = {} self._connection_callbacks = defaultdict(list) self._connection_callbacks_lock = threading.Lock() self.__context_name = 'CompactorContext(%s:%d)' % (self.ip, self.port) super(Context, self).__init__(name=self.__context_name) self.daemon = True self.lock = threading.Lock() self.__id = 1 self.__loop_started = threading.Event() def _assert_started(self): assert self.__loop_started.is_set() def start(self): """Start the context. This method must be called before calls to ``send`` and ``spawn``. This method is non-blocking. """ super(Context, self).start() self.__loop_started.wait() def __debug(self, msg): log.debug('%s: %s' % (self.__context_name, msg)) def run(self): # The entry point of the Context thread. This should not be called directly. loop = self.__event_loop or asyncio.new_event_loop() class CustomIOLoop(BaseAsyncIOLoop): def initialize(self): super(CustomIOLoop, self).initialize(loop, close_loop=False) self.__loop = CustomIOLoop() self.http = HTTPD(self.__sock, self.__loop) self.__loop_started.set() self.__loop.start() self.__loop.close() def _is_local(self, pid): return pid in self._processes def _assert_local_pid(self, pid): if not self._is_local(pid): raise self.InvalidProcess('Operation only valid for local processes!') def stop(self): """Stops the context. This terminates all PIDs and closes all connections.""" log.info('Stopping %s' % self) pids = list(self._processes) # Clean up the context for pid in pids: self.terminate(pid) while self._connections: pid = next(iter(self._connections)) conn = self._connections.pop(pid, None) if conn: conn.close() self.__loop.stop() def _get_dispatch_method(self, pid, method): try: return getattr(self._processes[pid], method) except KeyError: raise self.InvalidProcess('Unknown process %s' % pid) except AttributeError: raise self.InvalidMethod('Unknown method %s on %s' % (method, pid)) def dispatch(self, pid, method, *args): """Call a method on another process by its pid. The method on the other process does not need to be installed with ``Process.install``. The call is serialized with all other calls on the context's event loop. The pid must be bound to this context. This function returns immediately. :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_callback(function, *args) def delay(self, amount, pid, method, *args): """Call a method on another process after a specified delay. This is equivalent to ``dispatch`` except with an additional amount of time to wait prior to invoking the call. This function returns immediately. :param amount: The amount of time to wait in seconds before making the call. :type amount: ``float`` or ``int`` :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_timeout(self.__loop.time() + amount, function, *args) def __dispatch_on_connect_callbacks(self, to_pid, stream): with self._connection_callbacks_lock: callbacks = self._connection_callbacks.pop(to_pid, []) for callback in callbacks: log.debug('Dispatching connection callback %s for %s:%s -> %s' % ( callback, self.ip, self.port, to_pid)) self.__loop.add_callback(callback, stream) def _maybe_connect(self, to_pid, callback=None): """Asynchronously establish a connection to the remote pid.""" callback = stack_context.wrap(callback or (lambda stream: None)) def streaming_callback(data): # we are not guaranteed to get an acknowledgment, but log and discard bytes if we do. log.info('Received %d bytes from %s, discarding.' % (len(data), to_pid)) log.debug(' data: %r' % (data,)) def on_connect(exit_cb, stream): log.info('Connection to %s established' % to_pid) with self._connection_callbacks_lock: self._connections[to_pid] = stream self.__dispatch_on_connect_callbacks(to_pid, stream) self.__loop.add_callback( stream.read_until_close, exit_cb, streaming_callback=streaming_callback) create = False with self._connection_callbacks_lock: stream = self._connections.get(to_pid) callbacks = self._connection_callbacks.get(to_pid) if not stream: self._connection_callbacks[to_pid].append(callback) if not callbacks: create = True if stream: self.__loop.add_callback(callback, stream) return if not create: return sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) if not sock: raise self.SocketError('Failed opening socket') stream = IOStream(sock, io_loop=self.__loop) stream.set_nodelay(True) stream.set_close_callback(partial(self.__on_exit, to_pid, b'reached end of stream')) connect_callback = partial(on_connect, partial(self.__on_exit, to_pid), stream) log.info('Establishing connection to %s' % to_pid) stream.connect((to_pid.ip, to_pid.port), callback=connect_callback) if stream.closed(): raise self.SocketError('Failed to initiate stream connection') log.info('Maybe connected to %s' % to_pid) def _get_local_mailbox(self, pid, method): for mailbox, callable in self._processes[pid].iter_handlers(): if method == mailbox: return callable def send(self, from_pid, to_pid, method, body=None): """Send a message method from one pid to another with an optional body. Note: It is more idiomatic to send directly from a bound process rather than calling send on the context. If the destination pid is on the same context, the Context may skip the wire and route directly to process itself. ``from_pid`` must be bound to this context. This method returns immediately. :param from_pid: The pid of the sending process. :type from_pid: :class:`PID` :param to_pid: The pid of the destination process. :type to_pid: :class:`PID` :param method: The method name of the destination process. :type method: ``str`` :keyword body: Optional content to send along with the message. :type body: ``bytes`` or None :return: Nothing """ self._assert_started() self._assert_local_pid(from_pid) if self._is_local(to_pid): local_method = self._get_local_mailbox(to_pid, method) if local_method: log.info('Doing local dispatch of %s => %s (method: %s)' % (from_pid, to_pid, local_method)) self.__loop.add_callback(local_method, from_pid, body or b'') return else: # TODO(wickman) Consider failing hard if no local method is detected, otherwise we're # just going to do a POST and have it dropped on the floor. pass request_data = encode_request(from_pid, to_pid, method, body=body) log.info('Sending POST %s => %s (payload: %d bytes)' % ( from_pid, to_pid.as_url(method), len(request_data))) def on_connect(stream): log.info('Writing %s from %s to %s' % (len(request_data), from_pid, to_pid)) stream.write(request_data) log.info('Wrote %s from %s to %s' % (len(request_data), from_pid, to_pid)) self.__loop.add_callback(self._maybe_connect, to_pid, on_connect) def __erase_link(self, to_pid): for pid, links in self._links.items(): try: links.remove(to_pid) log.debug('PID link from %s <- %s exited.' % (pid, to_pid)) self._processes[pid].exited(to_pid) except KeyError: continue def __on_exit(self, to_pid, body): log.info('Disconnected from %s (%s)', to_pid, body) stream = self._connections.pop(to_pid, None) if stream is None: log.error('Received disconnection from %s but no stream found.' % to_pid) self.__erase_link(to_pid) def link(self, pid, to): """Link a local process to a possibly remote process. Note: It is more idiomatic to call ``link`` directly on the bound Process object instead. When ``pid`` is linked to ``to``, the termination of the ``to`` process (or the severing of its connection from the Process ``pid``) will result in the local process' ``exited`` method to be called with ``to``. This method returns immediately. :param pid: The pid of the linking process. :type pid: :class:`PID` :param to: The pid of the linked process. :type to: :class:`PID` :returns: Nothing """ self._assert_started() def really_link(): self._links[pid].add(to) log.info('Added link from %s to %s' % (pid, to)) def on_connect(stream): really_link() if self._is_local(pid): really_link() else: self.__loop.add_callback(self._maybe_connect, to, on_connect) def terminate(self, pid): """Terminate a process bound to this context. When a process is terminated, all the processes to which it is linked will be have their ``exited`` methods called. Messages to this process will no longer be delivered. This method returns immediately. :param pid: The pid of the process to terminate. :type pid: :class:`PID` :returns: Nothing """ self._assert_started() log.info('Terminating %s' % pid) process = self._processes.pop(pid, None) if process: log.info('Unmounting %s' % process) self.http.unmount_process(process) self.__erase_link(pid) def __str__(self): return 'Context(%s:%s)' % (self.ip, self.port)
wickman/compactor
compactor/context.py
Context.dispatch
python
def dispatch(self, pid, method, *args): self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_callback(function, *args)
Call a method on another process by its pid. The method on the other process does not need to be installed with ``Process.install``. The call is serialized with all other calls on the context's event loop. The pid must be bound to this context. This function returns immediately. :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/context.py#L198-L216
[ "def _assert_started(self):\n assert self.__loop_started.is_set()\n", "def _assert_local_pid(self, pid):\n if not self._is_local(pid):\n raise self.InvalidProcess('Operation only valid for local processes!')\n", "def _get_dispatch_method(self, pid, method):\n try:\n return getattr(self._processes[pid], method)\n except KeyError:\n raise self.InvalidProcess('Unknown process %s' % pid)\n except AttributeError:\n raise self.InvalidMethod('Unknown method %s on %s' % (method, pid))\n" ]
class Context(threading.Thread): """A compactor context. Compactor contexts control the routing and handling of messages between processes. At its most basic level, a context is a listening (ip, port) pair and an event loop. """ class Error(Exception): pass class SocketError(Error): pass class InvalidProcess(Error): pass class InvalidMethod(Error): pass _SINGLETON = None _LOCK = threading.Lock() CONNECT_TIMEOUT_SECS = 5 @classmethod def _make_socket(cls, ip, port): """Bind to a new socket. If LIBPROCESS_PORT or LIBPROCESS_IP are configured in the environment, these will be used for socket connectivity. """ bound_socket = bind_sockets(port, address=ip)[0] ip, port = bound_socket.getsockname() if not ip or ip == '0.0.0.0': ip = socket.gethostbyname(socket.gethostname()) return bound_socket, ip, port @classmethod def get_ip_port(cls, ip=None, port=None): ip = ip or os.environ.get('LIBPROCESS_IP', '0.0.0.0') try: port = int(port or os.environ.get('LIBPROCESS_PORT', 0)) except ValueError: raise cls.Error('Invalid ip/port provided') return ip, port @classmethod def singleton(cls, delegate='', **kw): with cls._LOCK: if cls._SINGLETON: if cls._SINGLETON.delegate != delegate: raise RuntimeError('Attempting to construct different singleton context.') else: cls._SINGLETON = cls(delegate=delegate, **kw) cls._SINGLETON.start() return cls._SINGLETON def __init__(self, delegate='', loop=None, ip=None, port=None): """Construct a compactor context. Before any useful work can be done with a context, you must call ``start`` on the context. :keyword ip: The ip port of the interface on which the Context should listen. If none is specified, the context will attempt to bind to the ip specified by the ``LIBPROCESS_IP`` environment variable. If this variable is not set, it will bind on all interfaces. :type ip: ``str`` or None :keyword port: The port on which the Context should listen. If none is specified, the context will attempt to bind to the port specified by the ``LIBPROCESS_PORT`` environment variable. If this variable is not set, it will bind to an ephemeral port. :type port: ``int`` or None """ self._processes = {} self._links = defaultdict(set) self.delegate = delegate self.__loop = self.http = None self.__event_loop = loop self._ip = None ip, port = self.get_ip_port(ip, port) self.__sock, self.ip, self.port = self._make_socket(ip, port) self._connections = {} self._connection_callbacks = defaultdict(list) self._connection_callbacks_lock = threading.Lock() self.__context_name = 'CompactorContext(%s:%d)' % (self.ip, self.port) super(Context, self).__init__(name=self.__context_name) self.daemon = True self.lock = threading.Lock() self.__id = 1 self.__loop_started = threading.Event() def _assert_started(self): assert self.__loop_started.is_set() def start(self): """Start the context. This method must be called before calls to ``send`` and ``spawn``. This method is non-blocking. """ super(Context, self).start() self.__loop_started.wait() def __debug(self, msg): log.debug('%s: %s' % (self.__context_name, msg)) def run(self): # The entry point of the Context thread. This should not be called directly. loop = self.__event_loop or asyncio.new_event_loop() class CustomIOLoop(BaseAsyncIOLoop): def initialize(self): super(CustomIOLoop, self).initialize(loop, close_loop=False) self.__loop = CustomIOLoop() self.http = HTTPD(self.__sock, self.__loop) self.__loop_started.set() self.__loop.start() self.__loop.close() def _is_local(self, pid): return pid in self._processes def _assert_local_pid(self, pid): if not self._is_local(pid): raise self.InvalidProcess('Operation only valid for local processes!') def stop(self): """Stops the context. This terminates all PIDs and closes all connections.""" log.info('Stopping %s' % self) pids = list(self._processes) # Clean up the context for pid in pids: self.terminate(pid) while self._connections: pid = next(iter(self._connections)) conn = self._connections.pop(pid, None) if conn: conn.close() self.__loop.stop() def spawn(self, process): """Spawn a process. Spawning a process binds it to this context and assigns the process a pid which is returned. The process' ``initialize`` method is called. Note: A process cannot send messages until it is bound to a context. :param process: The process to bind to this context. :type process: :class:`Process` :return: The pid of the process. :rtype: :class:`PID` """ self._assert_started() process.bind(self) self.http.mount_process(process) self._processes[process.pid] = process process.initialize() return process.pid def _get_dispatch_method(self, pid, method): try: return getattr(self._processes[pid], method) except KeyError: raise self.InvalidProcess('Unknown process %s' % pid) except AttributeError: raise self.InvalidMethod('Unknown method %s on %s' % (method, pid)) def delay(self, amount, pid, method, *args): """Call a method on another process after a specified delay. This is equivalent to ``dispatch`` except with an additional amount of time to wait prior to invoking the call. This function returns immediately. :param amount: The amount of time to wait in seconds before making the call. :type amount: ``float`` or ``int`` :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_timeout(self.__loop.time() + amount, function, *args) def __dispatch_on_connect_callbacks(self, to_pid, stream): with self._connection_callbacks_lock: callbacks = self._connection_callbacks.pop(to_pid, []) for callback in callbacks: log.debug('Dispatching connection callback %s for %s:%s -> %s' % ( callback, self.ip, self.port, to_pid)) self.__loop.add_callback(callback, stream) def _maybe_connect(self, to_pid, callback=None): """Asynchronously establish a connection to the remote pid.""" callback = stack_context.wrap(callback or (lambda stream: None)) def streaming_callback(data): # we are not guaranteed to get an acknowledgment, but log and discard bytes if we do. log.info('Received %d bytes from %s, discarding.' % (len(data), to_pid)) log.debug(' data: %r' % (data,)) def on_connect(exit_cb, stream): log.info('Connection to %s established' % to_pid) with self._connection_callbacks_lock: self._connections[to_pid] = stream self.__dispatch_on_connect_callbacks(to_pid, stream) self.__loop.add_callback( stream.read_until_close, exit_cb, streaming_callback=streaming_callback) create = False with self._connection_callbacks_lock: stream = self._connections.get(to_pid) callbacks = self._connection_callbacks.get(to_pid) if not stream: self._connection_callbacks[to_pid].append(callback) if not callbacks: create = True if stream: self.__loop.add_callback(callback, stream) return if not create: return sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) if not sock: raise self.SocketError('Failed opening socket') stream = IOStream(sock, io_loop=self.__loop) stream.set_nodelay(True) stream.set_close_callback(partial(self.__on_exit, to_pid, b'reached end of stream')) connect_callback = partial(on_connect, partial(self.__on_exit, to_pid), stream) log.info('Establishing connection to %s' % to_pid) stream.connect((to_pid.ip, to_pid.port), callback=connect_callback) if stream.closed(): raise self.SocketError('Failed to initiate stream connection') log.info('Maybe connected to %s' % to_pid) def _get_local_mailbox(self, pid, method): for mailbox, callable in self._processes[pid].iter_handlers(): if method == mailbox: return callable def send(self, from_pid, to_pid, method, body=None): """Send a message method from one pid to another with an optional body. Note: It is more idiomatic to send directly from a bound process rather than calling send on the context. If the destination pid is on the same context, the Context may skip the wire and route directly to process itself. ``from_pid`` must be bound to this context. This method returns immediately. :param from_pid: The pid of the sending process. :type from_pid: :class:`PID` :param to_pid: The pid of the destination process. :type to_pid: :class:`PID` :param method: The method name of the destination process. :type method: ``str`` :keyword body: Optional content to send along with the message. :type body: ``bytes`` or None :return: Nothing """ self._assert_started() self._assert_local_pid(from_pid) if self._is_local(to_pid): local_method = self._get_local_mailbox(to_pid, method) if local_method: log.info('Doing local dispatch of %s => %s (method: %s)' % (from_pid, to_pid, local_method)) self.__loop.add_callback(local_method, from_pid, body or b'') return else: # TODO(wickman) Consider failing hard if no local method is detected, otherwise we're # just going to do a POST and have it dropped on the floor. pass request_data = encode_request(from_pid, to_pid, method, body=body) log.info('Sending POST %s => %s (payload: %d bytes)' % ( from_pid, to_pid.as_url(method), len(request_data))) def on_connect(stream): log.info('Writing %s from %s to %s' % (len(request_data), from_pid, to_pid)) stream.write(request_data) log.info('Wrote %s from %s to %s' % (len(request_data), from_pid, to_pid)) self.__loop.add_callback(self._maybe_connect, to_pid, on_connect) def __erase_link(self, to_pid): for pid, links in self._links.items(): try: links.remove(to_pid) log.debug('PID link from %s <- %s exited.' % (pid, to_pid)) self._processes[pid].exited(to_pid) except KeyError: continue def __on_exit(self, to_pid, body): log.info('Disconnected from %s (%s)', to_pid, body) stream = self._connections.pop(to_pid, None) if stream is None: log.error('Received disconnection from %s but no stream found.' % to_pid) self.__erase_link(to_pid) def link(self, pid, to): """Link a local process to a possibly remote process. Note: It is more idiomatic to call ``link`` directly on the bound Process object instead. When ``pid`` is linked to ``to``, the termination of the ``to`` process (or the severing of its connection from the Process ``pid``) will result in the local process' ``exited`` method to be called with ``to``. This method returns immediately. :param pid: The pid of the linking process. :type pid: :class:`PID` :param to: The pid of the linked process. :type to: :class:`PID` :returns: Nothing """ self._assert_started() def really_link(): self._links[pid].add(to) log.info('Added link from %s to %s' % (pid, to)) def on_connect(stream): really_link() if self._is_local(pid): really_link() else: self.__loop.add_callback(self._maybe_connect, to, on_connect) def terminate(self, pid): """Terminate a process bound to this context. When a process is terminated, all the processes to which it is linked will be have their ``exited`` methods called. Messages to this process will no longer be delivered. This method returns immediately. :param pid: The pid of the process to terminate. :type pid: :class:`PID` :returns: Nothing """ self._assert_started() log.info('Terminating %s' % pid) process = self._processes.pop(pid, None) if process: log.info('Unmounting %s' % process) self.http.unmount_process(process) self.__erase_link(pid) def __str__(self): return 'Context(%s:%s)' % (self.ip, self.port)
wickman/compactor
compactor/context.py
Context.delay
python
def delay(self, amount, pid, method, *args): self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_timeout(self.__loop.time() + amount, function, *args)
Call a method on another process after a specified delay. This is equivalent to ``dispatch`` except with an additional amount of time to wait prior to invoking the call. This function returns immediately. :param amount: The amount of time to wait in seconds before making the call. :type amount: ``float`` or ``int`` :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/context.py#L218-L237
[ "def _assert_started(self):\n assert self.__loop_started.is_set()\n", "def _assert_local_pid(self, pid):\n if not self._is_local(pid):\n raise self.InvalidProcess('Operation only valid for local processes!')\n", "def _get_dispatch_method(self, pid, method):\n try:\n return getattr(self._processes[pid], method)\n except KeyError:\n raise self.InvalidProcess('Unknown process %s' % pid)\n except AttributeError:\n raise self.InvalidMethod('Unknown method %s on %s' % (method, pid))\n" ]
class Context(threading.Thread): """A compactor context. Compactor contexts control the routing and handling of messages between processes. At its most basic level, a context is a listening (ip, port) pair and an event loop. """ class Error(Exception): pass class SocketError(Error): pass class InvalidProcess(Error): pass class InvalidMethod(Error): pass _SINGLETON = None _LOCK = threading.Lock() CONNECT_TIMEOUT_SECS = 5 @classmethod def _make_socket(cls, ip, port): """Bind to a new socket. If LIBPROCESS_PORT or LIBPROCESS_IP are configured in the environment, these will be used for socket connectivity. """ bound_socket = bind_sockets(port, address=ip)[0] ip, port = bound_socket.getsockname() if not ip or ip == '0.0.0.0': ip = socket.gethostbyname(socket.gethostname()) return bound_socket, ip, port @classmethod def get_ip_port(cls, ip=None, port=None): ip = ip or os.environ.get('LIBPROCESS_IP', '0.0.0.0') try: port = int(port or os.environ.get('LIBPROCESS_PORT', 0)) except ValueError: raise cls.Error('Invalid ip/port provided') return ip, port @classmethod def singleton(cls, delegate='', **kw): with cls._LOCK: if cls._SINGLETON: if cls._SINGLETON.delegate != delegate: raise RuntimeError('Attempting to construct different singleton context.') else: cls._SINGLETON = cls(delegate=delegate, **kw) cls._SINGLETON.start() return cls._SINGLETON def __init__(self, delegate='', loop=None, ip=None, port=None): """Construct a compactor context. Before any useful work can be done with a context, you must call ``start`` on the context. :keyword ip: The ip port of the interface on which the Context should listen. If none is specified, the context will attempt to bind to the ip specified by the ``LIBPROCESS_IP`` environment variable. If this variable is not set, it will bind on all interfaces. :type ip: ``str`` or None :keyword port: The port on which the Context should listen. If none is specified, the context will attempt to bind to the port specified by the ``LIBPROCESS_PORT`` environment variable. If this variable is not set, it will bind to an ephemeral port. :type port: ``int`` or None """ self._processes = {} self._links = defaultdict(set) self.delegate = delegate self.__loop = self.http = None self.__event_loop = loop self._ip = None ip, port = self.get_ip_port(ip, port) self.__sock, self.ip, self.port = self._make_socket(ip, port) self._connections = {} self._connection_callbacks = defaultdict(list) self._connection_callbacks_lock = threading.Lock() self.__context_name = 'CompactorContext(%s:%d)' % (self.ip, self.port) super(Context, self).__init__(name=self.__context_name) self.daemon = True self.lock = threading.Lock() self.__id = 1 self.__loop_started = threading.Event() def _assert_started(self): assert self.__loop_started.is_set() def start(self): """Start the context. This method must be called before calls to ``send`` and ``spawn``. This method is non-blocking. """ super(Context, self).start() self.__loop_started.wait() def __debug(self, msg): log.debug('%s: %s' % (self.__context_name, msg)) def run(self): # The entry point of the Context thread. This should not be called directly. loop = self.__event_loop or asyncio.new_event_loop() class CustomIOLoop(BaseAsyncIOLoop): def initialize(self): super(CustomIOLoop, self).initialize(loop, close_loop=False) self.__loop = CustomIOLoop() self.http = HTTPD(self.__sock, self.__loop) self.__loop_started.set() self.__loop.start() self.__loop.close() def _is_local(self, pid): return pid in self._processes def _assert_local_pid(self, pid): if not self._is_local(pid): raise self.InvalidProcess('Operation only valid for local processes!') def stop(self): """Stops the context. This terminates all PIDs and closes all connections.""" log.info('Stopping %s' % self) pids = list(self._processes) # Clean up the context for pid in pids: self.terminate(pid) while self._connections: pid = next(iter(self._connections)) conn = self._connections.pop(pid, None) if conn: conn.close() self.__loop.stop() def spawn(self, process): """Spawn a process. Spawning a process binds it to this context and assigns the process a pid which is returned. The process' ``initialize`` method is called. Note: A process cannot send messages until it is bound to a context. :param process: The process to bind to this context. :type process: :class:`Process` :return: The pid of the process. :rtype: :class:`PID` """ self._assert_started() process.bind(self) self.http.mount_process(process) self._processes[process.pid] = process process.initialize() return process.pid def _get_dispatch_method(self, pid, method): try: return getattr(self._processes[pid], method) except KeyError: raise self.InvalidProcess('Unknown process %s' % pid) except AttributeError: raise self.InvalidMethod('Unknown method %s on %s' % (method, pid)) def dispatch(self, pid, method, *args): """Call a method on another process by its pid. The method on the other process does not need to be installed with ``Process.install``. The call is serialized with all other calls on the context's event loop. The pid must be bound to this context. This function returns immediately. :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_callback(function, *args) def __dispatch_on_connect_callbacks(self, to_pid, stream): with self._connection_callbacks_lock: callbacks = self._connection_callbacks.pop(to_pid, []) for callback in callbacks: log.debug('Dispatching connection callback %s for %s:%s -> %s' % ( callback, self.ip, self.port, to_pid)) self.__loop.add_callback(callback, stream) def _maybe_connect(self, to_pid, callback=None): """Asynchronously establish a connection to the remote pid.""" callback = stack_context.wrap(callback or (lambda stream: None)) def streaming_callback(data): # we are not guaranteed to get an acknowledgment, but log and discard bytes if we do. log.info('Received %d bytes from %s, discarding.' % (len(data), to_pid)) log.debug(' data: %r' % (data,)) def on_connect(exit_cb, stream): log.info('Connection to %s established' % to_pid) with self._connection_callbacks_lock: self._connections[to_pid] = stream self.__dispatch_on_connect_callbacks(to_pid, stream) self.__loop.add_callback( stream.read_until_close, exit_cb, streaming_callback=streaming_callback) create = False with self._connection_callbacks_lock: stream = self._connections.get(to_pid) callbacks = self._connection_callbacks.get(to_pid) if not stream: self._connection_callbacks[to_pid].append(callback) if not callbacks: create = True if stream: self.__loop.add_callback(callback, stream) return if not create: return sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) if not sock: raise self.SocketError('Failed opening socket') stream = IOStream(sock, io_loop=self.__loop) stream.set_nodelay(True) stream.set_close_callback(partial(self.__on_exit, to_pid, b'reached end of stream')) connect_callback = partial(on_connect, partial(self.__on_exit, to_pid), stream) log.info('Establishing connection to %s' % to_pid) stream.connect((to_pid.ip, to_pid.port), callback=connect_callback) if stream.closed(): raise self.SocketError('Failed to initiate stream connection') log.info('Maybe connected to %s' % to_pid) def _get_local_mailbox(self, pid, method): for mailbox, callable in self._processes[pid].iter_handlers(): if method == mailbox: return callable def send(self, from_pid, to_pid, method, body=None): """Send a message method from one pid to another with an optional body. Note: It is more idiomatic to send directly from a bound process rather than calling send on the context. If the destination pid is on the same context, the Context may skip the wire and route directly to process itself. ``from_pid`` must be bound to this context. This method returns immediately. :param from_pid: The pid of the sending process. :type from_pid: :class:`PID` :param to_pid: The pid of the destination process. :type to_pid: :class:`PID` :param method: The method name of the destination process. :type method: ``str`` :keyword body: Optional content to send along with the message. :type body: ``bytes`` or None :return: Nothing """ self._assert_started() self._assert_local_pid(from_pid) if self._is_local(to_pid): local_method = self._get_local_mailbox(to_pid, method) if local_method: log.info('Doing local dispatch of %s => %s (method: %s)' % (from_pid, to_pid, local_method)) self.__loop.add_callback(local_method, from_pid, body or b'') return else: # TODO(wickman) Consider failing hard if no local method is detected, otherwise we're # just going to do a POST and have it dropped on the floor. pass request_data = encode_request(from_pid, to_pid, method, body=body) log.info('Sending POST %s => %s (payload: %d bytes)' % ( from_pid, to_pid.as_url(method), len(request_data))) def on_connect(stream): log.info('Writing %s from %s to %s' % (len(request_data), from_pid, to_pid)) stream.write(request_data) log.info('Wrote %s from %s to %s' % (len(request_data), from_pid, to_pid)) self.__loop.add_callback(self._maybe_connect, to_pid, on_connect) def __erase_link(self, to_pid): for pid, links in self._links.items(): try: links.remove(to_pid) log.debug('PID link from %s <- %s exited.' % (pid, to_pid)) self._processes[pid].exited(to_pid) except KeyError: continue def __on_exit(self, to_pid, body): log.info('Disconnected from %s (%s)', to_pid, body) stream = self._connections.pop(to_pid, None) if stream is None: log.error('Received disconnection from %s but no stream found.' % to_pid) self.__erase_link(to_pid) def link(self, pid, to): """Link a local process to a possibly remote process. Note: It is more idiomatic to call ``link`` directly on the bound Process object instead. When ``pid`` is linked to ``to``, the termination of the ``to`` process (or the severing of its connection from the Process ``pid``) will result in the local process' ``exited`` method to be called with ``to``. This method returns immediately. :param pid: The pid of the linking process. :type pid: :class:`PID` :param to: The pid of the linked process. :type to: :class:`PID` :returns: Nothing """ self._assert_started() def really_link(): self._links[pid].add(to) log.info('Added link from %s to %s' % (pid, to)) def on_connect(stream): really_link() if self._is_local(pid): really_link() else: self.__loop.add_callback(self._maybe_connect, to, on_connect) def terminate(self, pid): """Terminate a process bound to this context. When a process is terminated, all the processes to which it is linked will be have their ``exited`` methods called. Messages to this process will no longer be delivered. This method returns immediately. :param pid: The pid of the process to terminate. :type pid: :class:`PID` :returns: Nothing """ self._assert_started() log.info('Terminating %s' % pid) process = self._processes.pop(pid, None) if process: log.info('Unmounting %s' % process) self.http.unmount_process(process) self.__erase_link(pid) def __str__(self): return 'Context(%s:%s)' % (self.ip, self.port)
wickman/compactor
compactor/context.py
Context._maybe_connect
python
def _maybe_connect(self, to_pid, callback=None): callback = stack_context.wrap(callback or (lambda stream: None)) def streaming_callback(data): # we are not guaranteed to get an acknowledgment, but log and discard bytes if we do. log.info('Received %d bytes from %s, discarding.' % (len(data), to_pid)) log.debug(' data: %r' % (data,)) def on_connect(exit_cb, stream): log.info('Connection to %s established' % to_pid) with self._connection_callbacks_lock: self._connections[to_pid] = stream self.__dispatch_on_connect_callbacks(to_pid, stream) self.__loop.add_callback( stream.read_until_close, exit_cb, streaming_callback=streaming_callback) create = False with self._connection_callbacks_lock: stream = self._connections.get(to_pid) callbacks = self._connection_callbacks.get(to_pid) if not stream: self._connection_callbacks[to_pid].append(callback) if not callbacks: create = True if stream: self.__loop.add_callback(callback, stream) return if not create: return sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) if not sock: raise self.SocketError('Failed opening socket') stream = IOStream(sock, io_loop=self.__loop) stream.set_nodelay(True) stream.set_close_callback(partial(self.__on_exit, to_pid, b'reached end of stream')) connect_callback = partial(on_connect, partial(self.__on_exit, to_pid), stream) log.info('Establishing connection to %s' % to_pid) stream.connect((to_pid.ip, to_pid.port), callback=connect_callback) if stream.closed(): raise self.SocketError('Failed to initiate stream connection') log.info('Maybe connected to %s' % to_pid)
Asynchronously establish a connection to the remote pid.
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/context.py#L247-L302
null
class Context(threading.Thread): """A compactor context. Compactor contexts control the routing and handling of messages between processes. At its most basic level, a context is a listening (ip, port) pair and an event loop. """ class Error(Exception): pass class SocketError(Error): pass class InvalidProcess(Error): pass class InvalidMethod(Error): pass _SINGLETON = None _LOCK = threading.Lock() CONNECT_TIMEOUT_SECS = 5 @classmethod def _make_socket(cls, ip, port): """Bind to a new socket. If LIBPROCESS_PORT or LIBPROCESS_IP are configured in the environment, these will be used for socket connectivity. """ bound_socket = bind_sockets(port, address=ip)[0] ip, port = bound_socket.getsockname() if not ip or ip == '0.0.0.0': ip = socket.gethostbyname(socket.gethostname()) return bound_socket, ip, port @classmethod def get_ip_port(cls, ip=None, port=None): ip = ip or os.environ.get('LIBPROCESS_IP', '0.0.0.0') try: port = int(port or os.environ.get('LIBPROCESS_PORT', 0)) except ValueError: raise cls.Error('Invalid ip/port provided') return ip, port @classmethod def singleton(cls, delegate='', **kw): with cls._LOCK: if cls._SINGLETON: if cls._SINGLETON.delegate != delegate: raise RuntimeError('Attempting to construct different singleton context.') else: cls._SINGLETON = cls(delegate=delegate, **kw) cls._SINGLETON.start() return cls._SINGLETON def __init__(self, delegate='', loop=None, ip=None, port=None): """Construct a compactor context. Before any useful work can be done with a context, you must call ``start`` on the context. :keyword ip: The ip port of the interface on which the Context should listen. If none is specified, the context will attempt to bind to the ip specified by the ``LIBPROCESS_IP`` environment variable. If this variable is not set, it will bind on all interfaces. :type ip: ``str`` or None :keyword port: The port on which the Context should listen. If none is specified, the context will attempt to bind to the port specified by the ``LIBPROCESS_PORT`` environment variable. If this variable is not set, it will bind to an ephemeral port. :type port: ``int`` or None """ self._processes = {} self._links = defaultdict(set) self.delegate = delegate self.__loop = self.http = None self.__event_loop = loop self._ip = None ip, port = self.get_ip_port(ip, port) self.__sock, self.ip, self.port = self._make_socket(ip, port) self._connections = {} self._connection_callbacks = defaultdict(list) self._connection_callbacks_lock = threading.Lock() self.__context_name = 'CompactorContext(%s:%d)' % (self.ip, self.port) super(Context, self).__init__(name=self.__context_name) self.daemon = True self.lock = threading.Lock() self.__id = 1 self.__loop_started = threading.Event() def _assert_started(self): assert self.__loop_started.is_set() def start(self): """Start the context. This method must be called before calls to ``send`` and ``spawn``. This method is non-blocking. """ super(Context, self).start() self.__loop_started.wait() def __debug(self, msg): log.debug('%s: %s' % (self.__context_name, msg)) def run(self): # The entry point of the Context thread. This should not be called directly. loop = self.__event_loop or asyncio.new_event_loop() class CustomIOLoop(BaseAsyncIOLoop): def initialize(self): super(CustomIOLoop, self).initialize(loop, close_loop=False) self.__loop = CustomIOLoop() self.http = HTTPD(self.__sock, self.__loop) self.__loop_started.set() self.__loop.start() self.__loop.close() def _is_local(self, pid): return pid in self._processes def _assert_local_pid(self, pid): if not self._is_local(pid): raise self.InvalidProcess('Operation only valid for local processes!') def stop(self): """Stops the context. This terminates all PIDs and closes all connections.""" log.info('Stopping %s' % self) pids = list(self._processes) # Clean up the context for pid in pids: self.terminate(pid) while self._connections: pid = next(iter(self._connections)) conn = self._connections.pop(pid, None) if conn: conn.close() self.__loop.stop() def spawn(self, process): """Spawn a process. Spawning a process binds it to this context and assigns the process a pid which is returned. The process' ``initialize`` method is called. Note: A process cannot send messages until it is bound to a context. :param process: The process to bind to this context. :type process: :class:`Process` :return: The pid of the process. :rtype: :class:`PID` """ self._assert_started() process.bind(self) self.http.mount_process(process) self._processes[process.pid] = process process.initialize() return process.pid def _get_dispatch_method(self, pid, method): try: return getattr(self._processes[pid], method) except KeyError: raise self.InvalidProcess('Unknown process %s' % pid) except AttributeError: raise self.InvalidMethod('Unknown method %s on %s' % (method, pid)) def dispatch(self, pid, method, *args): """Call a method on another process by its pid. The method on the other process does not need to be installed with ``Process.install``. The call is serialized with all other calls on the context's event loop. The pid must be bound to this context. This function returns immediately. :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_callback(function, *args) def delay(self, amount, pid, method, *args): """Call a method on another process after a specified delay. This is equivalent to ``dispatch`` except with an additional amount of time to wait prior to invoking the call. This function returns immediately. :param amount: The amount of time to wait in seconds before making the call. :type amount: ``float`` or ``int`` :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_timeout(self.__loop.time() + amount, function, *args) def __dispatch_on_connect_callbacks(self, to_pid, stream): with self._connection_callbacks_lock: callbacks = self._connection_callbacks.pop(to_pid, []) for callback in callbacks: log.debug('Dispatching connection callback %s for %s:%s -> %s' % ( callback, self.ip, self.port, to_pid)) self.__loop.add_callback(callback, stream) def _get_local_mailbox(self, pid, method): for mailbox, callable in self._processes[pid].iter_handlers(): if method == mailbox: return callable def send(self, from_pid, to_pid, method, body=None): """Send a message method from one pid to another with an optional body. Note: It is more idiomatic to send directly from a bound process rather than calling send on the context. If the destination pid is on the same context, the Context may skip the wire and route directly to process itself. ``from_pid`` must be bound to this context. This method returns immediately. :param from_pid: The pid of the sending process. :type from_pid: :class:`PID` :param to_pid: The pid of the destination process. :type to_pid: :class:`PID` :param method: The method name of the destination process. :type method: ``str`` :keyword body: Optional content to send along with the message. :type body: ``bytes`` or None :return: Nothing """ self._assert_started() self._assert_local_pid(from_pid) if self._is_local(to_pid): local_method = self._get_local_mailbox(to_pid, method) if local_method: log.info('Doing local dispatch of %s => %s (method: %s)' % (from_pid, to_pid, local_method)) self.__loop.add_callback(local_method, from_pid, body or b'') return else: # TODO(wickman) Consider failing hard if no local method is detected, otherwise we're # just going to do a POST and have it dropped on the floor. pass request_data = encode_request(from_pid, to_pid, method, body=body) log.info('Sending POST %s => %s (payload: %d bytes)' % ( from_pid, to_pid.as_url(method), len(request_data))) def on_connect(stream): log.info('Writing %s from %s to %s' % (len(request_data), from_pid, to_pid)) stream.write(request_data) log.info('Wrote %s from %s to %s' % (len(request_data), from_pid, to_pid)) self.__loop.add_callback(self._maybe_connect, to_pid, on_connect) def __erase_link(self, to_pid): for pid, links in self._links.items(): try: links.remove(to_pid) log.debug('PID link from %s <- %s exited.' % (pid, to_pid)) self._processes[pid].exited(to_pid) except KeyError: continue def __on_exit(self, to_pid, body): log.info('Disconnected from %s (%s)', to_pid, body) stream = self._connections.pop(to_pid, None) if stream is None: log.error('Received disconnection from %s but no stream found.' % to_pid) self.__erase_link(to_pid) def link(self, pid, to): """Link a local process to a possibly remote process. Note: It is more idiomatic to call ``link`` directly on the bound Process object instead. When ``pid`` is linked to ``to``, the termination of the ``to`` process (or the severing of its connection from the Process ``pid``) will result in the local process' ``exited`` method to be called with ``to``. This method returns immediately. :param pid: The pid of the linking process. :type pid: :class:`PID` :param to: The pid of the linked process. :type to: :class:`PID` :returns: Nothing """ self._assert_started() def really_link(): self._links[pid].add(to) log.info('Added link from %s to %s' % (pid, to)) def on_connect(stream): really_link() if self._is_local(pid): really_link() else: self.__loop.add_callback(self._maybe_connect, to, on_connect) def terminate(self, pid): """Terminate a process bound to this context. When a process is terminated, all the processes to which it is linked will be have their ``exited`` methods called. Messages to this process will no longer be delivered. This method returns immediately. :param pid: The pid of the process to terminate. :type pid: :class:`PID` :returns: Nothing """ self._assert_started() log.info('Terminating %s' % pid) process = self._processes.pop(pid, None) if process: log.info('Unmounting %s' % process) self.http.unmount_process(process) self.__erase_link(pid) def __str__(self): return 'Context(%s:%s)' % (self.ip, self.port)
wickman/compactor
compactor/context.py
Context.send
python
def send(self, from_pid, to_pid, method, body=None): self._assert_started() self._assert_local_pid(from_pid) if self._is_local(to_pid): local_method = self._get_local_mailbox(to_pid, method) if local_method: log.info('Doing local dispatch of %s => %s (method: %s)' % (from_pid, to_pid, local_method)) self.__loop.add_callback(local_method, from_pid, body or b'') return else: # TODO(wickman) Consider failing hard if no local method is detected, otherwise we're # just going to do a POST and have it dropped on the floor. pass request_data = encode_request(from_pid, to_pid, method, body=body) log.info('Sending POST %s => %s (payload: %d bytes)' % ( from_pid, to_pid.as_url(method), len(request_data))) def on_connect(stream): log.info('Writing %s from %s to %s' % (len(request_data), from_pid, to_pid)) stream.write(request_data) log.info('Wrote %s from %s to %s' % (len(request_data), from_pid, to_pid)) self.__loop.add_callback(self._maybe_connect, to_pid, on_connect)
Send a message method from one pid to another with an optional body. Note: It is more idiomatic to send directly from a bound process rather than calling send on the context. If the destination pid is on the same context, the Context may skip the wire and route directly to process itself. ``from_pid`` must be bound to this context. This method returns immediately. :param from_pid: The pid of the sending process. :type from_pid: :class:`PID` :param to_pid: The pid of the destination process. :type to_pid: :class:`PID` :param method: The method name of the destination process. :type method: ``str`` :keyword body: Optional content to send along with the message. :type body: ``bytes`` or None :return: Nothing
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/context.py#L309-L356
[ "def encode_request(from_pid, to_pid, method, body=None, content_type=None, legacy=False):\n \"\"\"\n Encode a request into a raw HTTP request. This function returns a string\n of bytes that represent a valid HTTP/1.0 request, including any libprocess\n headers required for communication.\n\n Use the `legacy` option (set to True) to use the legacy User-Agent based\n libprocess identification.\n \"\"\"\n\n if body is None:\n body = b''\n\n if not isinstance(body, (bytes, bytearray)):\n raise TypeError('Body must be a sequence of bytes.')\n\n headers = [\n 'POST /{process}/{method} HTTP/1.0'.format(process=to_pid.id, method=method),\n 'Connection: Keep-Alive',\n 'Content-Length: %d' % len(body)\n ]\n\n if legacy:\n headers.append('User-Agent: libprocess/{pid}'.format(pid=from_pid))\n else:\n headers.append('Libprocess-From: {pid}'.format(pid=from_pid))\n\n if content_type is not None:\n headers.append('Content-Type: {content_type}'.format(content_type=content_type))\n\n headers = [header.encode('utf8') for header in headers]\n\n def iter_fragments():\n for fragment in headers:\n yield fragment\n yield CRLF\n yield CRLF\n if body:\n yield body\n\n return b''.join(iter_fragments())\n", "def _assert_started(self):\n assert self.__loop_started.is_set()\n", "def _is_local(self, pid):\n return pid in self._processes\n", "def _assert_local_pid(self, pid):\n if not self._is_local(pid):\n raise self.InvalidProcess('Operation only valid for local processes!')\n" ]
class Context(threading.Thread): """A compactor context. Compactor contexts control the routing and handling of messages between processes. At its most basic level, a context is a listening (ip, port) pair and an event loop. """ class Error(Exception): pass class SocketError(Error): pass class InvalidProcess(Error): pass class InvalidMethod(Error): pass _SINGLETON = None _LOCK = threading.Lock() CONNECT_TIMEOUT_SECS = 5 @classmethod def _make_socket(cls, ip, port): """Bind to a new socket. If LIBPROCESS_PORT or LIBPROCESS_IP are configured in the environment, these will be used for socket connectivity. """ bound_socket = bind_sockets(port, address=ip)[0] ip, port = bound_socket.getsockname() if not ip or ip == '0.0.0.0': ip = socket.gethostbyname(socket.gethostname()) return bound_socket, ip, port @classmethod def get_ip_port(cls, ip=None, port=None): ip = ip or os.environ.get('LIBPROCESS_IP', '0.0.0.0') try: port = int(port or os.environ.get('LIBPROCESS_PORT', 0)) except ValueError: raise cls.Error('Invalid ip/port provided') return ip, port @classmethod def singleton(cls, delegate='', **kw): with cls._LOCK: if cls._SINGLETON: if cls._SINGLETON.delegate != delegate: raise RuntimeError('Attempting to construct different singleton context.') else: cls._SINGLETON = cls(delegate=delegate, **kw) cls._SINGLETON.start() return cls._SINGLETON def __init__(self, delegate='', loop=None, ip=None, port=None): """Construct a compactor context. Before any useful work can be done with a context, you must call ``start`` on the context. :keyword ip: The ip port of the interface on which the Context should listen. If none is specified, the context will attempt to bind to the ip specified by the ``LIBPROCESS_IP`` environment variable. If this variable is not set, it will bind on all interfaces. :type ip: ``str`` or None :keyword port: The port on which the Context should listen. If none is specified, the context will attempt to bind to the port specified by the ``LIBPROCESS_PORT`` environment variable. If this variable is not set, it will bind to an ephemeral port. :type port: ``int`` or None """ self._processes = {} self._links = defaultdict(set) self.delegate = delegate self.__loop = self.http = None self.__event_loop = loop self._ip = None ip, port = self.get_ip_port(ip, port) self.__sock, self.ip, self.port = self._make_socket(ip, port) self._connections = {} self._connection_callbacks = defaultdict(list) self._connection_callbacks_lock = threading.Lock() self.__context_name = 'CompactorContext(%s:%d)' % (self.ip, self.port) super(Context, self).__init__(name=self.__context_name) self.daemon = True self.lock = threading.Lock() self.__id = 1 self.__loop_started = threading.Event() def _assert_started(self): assert self.__loop_started.is_set() def start(self): """Start the context. This method must be called before calls to ``send`` and ``spawn``. This method is non-blocking. """ super(Context, self).start() self.__loop_started.wait() def __debug(self, msg): log.debug('%s: %s' % (self.__context_name, msg)) def run(self): # The entry point of the Context thread. This should not be called directly. loop = self.__event_loop or asyncio.new_event_loop() class CustomIOLoop(BaseAsyncIOLoop): def initialize(self): super(CustomIOLoop, self).initialize(loop, close_loop=False) self.__loop = CustomIOLoop() self.http = HTTPD(self.__sock, self.__loop) self.__loop_started.set() self.__loop.start() self.__loop.close() def _is_local(self, pid): return pid in self._processes def _assert_local_pid(self, pid): if not self._is_local(pid): raise self.InvalidProcess('Operation only valid for local processes!') def stop(self): """Stops the context. This terminates all PIDs and closes all connections.""" log.info('Stopping %s' % self) pids = list(self._processes) # Clean up the context for pid in pids: self.terminate(pid) while self._connections: pid = next(iter(self._connections)) conn = self._connections.pop(pid, None) if conn: conn.close() self.__loop.stop() def spawn(self, process): """Spawn a process. Spawning a process binds it to this context and assigns the process a pid which is returned. The process' ``initialize`` method is called. Note: A process cannot send messages until it is bound to a context. :param process: The process to bind to this context. :type process: :class:`Process` :return: The pid of the process. :rtype: :class:`PID` """ self._assert_started() process.bind(self) self.http.mount_process(process) self._processes[process.pid] = process process.initialize() return process.pid def _get_dispatch_method(self, pid, method): try: return getattr(self._processes[pid], method) except KeyError: raise self.InvalidProcess('Unknown process %s' % pid) except AttributeError: raise self.InvalidMethod('Unknown method %s on %s' % (method, pid)) def dispatch(self, pid, method, *args): """Call a method on another process by its pid. The method on the other process does not need to be installed with ``Process.install``. The call is serialized with all other calls on the context's event loop. The pid must be bound to this context. This function returns immediately. :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_callback(function, *args) def delay(self, amount, pid, method, *args): """Call a method on another process after a specified delay. This is equivalent to ``dispatch`` except with an additional amount of time to wait prior to invoking the call. This function returns immediately. :param amount: The amount of time to wait in seconds before making the call. :type amount: ``float`` or ``int`` :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_timeout(self.__loop.time() + amount, function, *args) def __dispatch_on_connect_callbacks(self, to_pid, stream): with self._connection_callbacks_lock: callbacks = self._connection_callbacks.pop(to_pid, []) for callback in callbacks: log.debug('Dispatching connection callback %s for %s:%s -> %s' % ( callback, self.ip, self.port, to_pid)) self.__loop.add_callback(callback, stream) def _maybe_connect(self, to_pid, callback=None): """Asynchronously establish a connection to the remote pid.""" callback = stack_context.wrap(callback or (lambda stream: None)) def streaming_callback(data): # we are not guaranteed to get an acknowledgment, but log and discard bytes if we do. log.info('Received %d bytes from %s, discarding.' % (len(data), to_pid)) log.debug(' data: %r' % (data,)) def on_connect(exit_cb, stream): log.info('Connection to %s established' % to_pid) with self._connection_callbacks_lock: self._connections[to_pid] = stream self.__dispatch_on_connect_callbacks(to_pid, stream) self.__loop.add_callback( stream.read_until_close, exit_cb, streaming_callback=streaming_callback) create = False with self._connection_callbacks_lock: stream = self._connections.get(to_pid) callbacks = self._connection_callbacks.get(to_pid) if not stream: self._connection_callbacks[to_pid].append(callback) if not callbacks: create = True if stream: self.__loop.add_callback(callback, stream) return if not create: return sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) if not sock: raise self.SocketError('Failed opening socket') stream = IOStream(sock, io_loop=self.__loop) stream.set_nodelay(True) stream.set_close_callback(partial(self.__on_exit, to_pid, b'reached end of stream')) connect_callback = partial(on_connect, partial(self.__on_exit, to_pid), stream) log.info('Establishing connection to %s' % to_pid) stream.connect((to_pid.ip, to_pid.port), callback=connect_callback) if stream.closed(): raise self.SocketError('Failed to initiate stream connection') log.info('Maybe connected to %s' % to_pid) def _get_local_mailbox(self, pid, method): for mailbox, callable in self._processes[pid].iter_handlers(): if method == mailbox: return callable def __erase_link(self, to_pid): for pid, links in self._links.items(): try: links.remove(to_pid) log.debug('PID link from %s <- %s exited.' % (pid, to_pid)) self._processes[pid].exited(to_pid) except KeyError: continue def __on_exit(self, to_pid, body): log.info('Disconnected from %s (%s)', to_pid, body) stream = self._connections.pop(to_pid, None) if stream is None: log.error('Received disconnection from %s but no stream found.' % to_pid) self.__erase_link(to_pid) def link(self, pid, to): """Link a local process to a possibly remote process. Note: It is more idiomatic to call ``link`` directly on the bound Process object instead. When ``pid`` is linked to ``to``, the termination of the ``to`` process (or the severing of its connection from the Process ``pid``) will result in the local process' ``exited`` method to be called with ``to``. This method returns immediately. :param pid: The pid of the linking process. :type pid: :class:`PID` :param to: The pid of the linked process. :type to: :class:`PID` :returns: Nothing """ self._assert_started() def really_link(): self._links[pid].add(to) log.info('Added link from %s to %s' % (pid, to)) def on_connect(stream): really_link() if self._is_local(pid): really_link() else: self.__loop.add_callback(self._maybe_connect, to, on_connect) def terminate(self, pid): """Terminate a process bound to this context. When a process is terminated, all the processes to which it is linked will be have their ``exited`` methods called. Messages to this process will no longer be delivered. This method returns immediately. :param pid: The pid of the process to terminate. :type pid: :class:`PID` :returns: Nothing """ self._assert_started() log.info('Terminating %s' % pid) process = self._processes.pop(pid, None) if process: log.info('Unmounting %s' % process) self.http.unmount_process(process) self.__erase_link(pid) def __str__(self): return 'Context(%s:%s)' % (self.ip, self.port)
wickman/compactor
compactor/context.py
Context.link
python
def link(self, pid, to): self._assert_started() def really_link(): self._links[pid].add(to) log.info('Added link from %s to %s' % (pid, to)) def on_connect(stream): really_link() if self._is_local(pid): really_link() else: self.__loop.add_callback(self._maybe_connect, to, on_connect)
Link a local process to a possibly remote process. Note: It is more idiomatic to call ``link`` directly on the bound Process object instead. When ``pid`` is linked to ``to``, the termination of the ``to`` process (or the severing of its connection from the Process ``pid``) will result in the local process' ``exited`` method to be called with ``to``. This method returns immediately. :param pid: The pid of the linking process. :type pid: :class:`PID` :param to: The pid of the linked process. :type to: :class:`PID` :returns: Nothing
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/context.py#L374-L405
[ "def _assert_started(self):\n assert self.__loop_started.is_set()\n", "def _is_local(self, pid):\n return pid in self._processes\n", "def really_link():\n self._links[pid].add(to)\n log.info('Added link from %s to %s' % (pid, to))\n" ]
class Context(threading.Thread): """A compactor context. Compactor contexts control the routing and handling of messages between processes. At its most basic level, a context is a listening (ip, port) pair and an event loop. """ class Error(Exception): pass class SocketError(Error): pass class InvalidProcess(Error): pass class InvalidMethod(Error): pass _SINGLETON = None _LOCK = threading.Lock() CONNECT_TIMEOUT_SECS = 5 @classmethod def _make_socket(cls, ip, port): """Bind to a new socket. If LIBPROCESS_PORT or LIBPROCESS_IP are configured in the environment, these will be used for socket connectivity. """ bound_socket = bind_sockets(port, address=ip)[0] ip, port = bound_socket.getsockname() if not ip or ip == '0.0.0.0': ip = socket.gethostbyname(socket.gethostname()) return bound_socket, ip, port @classmethod def get_ip_port(cls, ip=None, port=None): ip = ip or os.environ.get('LIBPROCESS_IP', '0.0.0.0') try: port = int(port or os.environ.get('LIBPROCESS_PORT', 0)) except ValueError: raise cls.Error('Invalid ip/port provided') return ip, port @classmethod def singleton(cls, delegate='', **kw): with cls._LOCK: if cls._SINGLETON: if cls._SINGLETON.delegate != delegate: raise RuntimeError('Attempting to construct different singleton context.') else: cls._SINGLETON = cls(delegate=delegate, **kw) cls._SINGLETON.start() return cls._SINGLETON def __init__(self, delegate='', loop=None, ip=None, port=None): """Construct a compactor context. Before any useful work can be done with a context, you must call ``start`` on the context. :keyword ip: The ip port of the interface on which the Context should listen. If none is specified, the context will attempt to bind to the ip specified by the ``LIBPROCESS_IP`` environment variable. If this variable is not set, it will bind on all interfaces. :type ip: ``str`` or None :keyword port: The port on which the Context should listen. If none is specified, the context will attempt to bind to the port specified by the ``LIBPROCESS_PORT`` environment variable. If this variable is not set, it will bind to an ephemeral port. :type port: ``int`` or None """ self._processes = {} self._links = defaultdict(set) self.delegate = delegate self.__loop = self.http = None self.__event_loop = loop self._ip = None ip, port = self.get_ip_port(ip, port) self.__sock, self.ip, self.port = self._make_socket(ip, port) self._connections = {} self._connection_callbacks = defaultdict(list) self._connection_callbacks_lock = threading.Lock() self.__context_name = 'CompactorContext(%s:%d)' % (self.ip, self.port) super(Context, self).__init__(name=self.__context_name) self.daemon = True self.lock = threading.Lock() self.__id = 1 self.__loop_started = threading.Event() def _assert_started(self): assert self.__loop_started.is_set() def start(self): """Start the context. This method must be called before calls to ``send`` and ``spawn``. This method is non-blocking. """ super(Context, self).start() self.__loop_started.wait() def __debug(self, msg): log.debug('%s: %s' % (self.__context_name, msg)) def run(self): # The entry point of the Context thread. This should not be called directly. loop = self.__event_loop or asyncio.new_event_loop() class CustomIOLoop(BaseAsyncIOLoop): def initialize(self): super(CustomIOLoop, self).initialize(loop, close_loop=False) self.__loop = CustomIOLoop() self.http = HTTPD(self.__sock, self.__loop) self.__loop_started.set() self.__loop.start() self.__loop.close() def _is_local(self, pid): return pid in self._processes def _assert_local_pid(self, pid): if not self._is_local(pid): raise self.InvalidProcess('Operation only valid for local processes!') def stop(self): """Stops the context. This terminates all PIDs and closes all connections.""" log.info('Stopping %s' % self) pids = list(self._processes) # Clean up the context for pid in pids: self.terminate(pid) while self._connections: pid = next(iter(self._connections)) conn = self._connections.pop(pid, None) if conn: conn.close() self.__loop.stop() def spawn(self, process): """Spawn a process. Spawning a process binds it to this context and assigns the process a pid which is returned. The process' ``initialize`` method is called. Note: A process cannot send messages until it is bound to a context. :param process: The process to bind to this context. :type process: :class:`Process` :return: The pid of the process. :rtype: :class:`PID` """ self._assert_started() process.bind(self) self.http.mount_process(process) self._processes[process.pid] = process process.initialize() return process.pid def _get_dispatch_method(self, pid, method): try: return getattr(self._processes[pid], method) except KeyError: raise self.InvalidProcess('Unknown process %s' % pid) except AttributeError: raise self.InvalidMethod('Unknown method %s on %s' % (method, pid)) def dispatch(self, pid, method, *args): """Call a method on another process by its pid. The method on the other process does not need to be installed with ``Process.install``. The call is serialized with all other calls on the context's event loop. The pid must be bound to this context. This function returns immediately. :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_callback(function, *args) def delay(self, amount, pid, method, *args): """Call a method on another process after a specified delay. This is equivalent to ``dispatch`` except with an additional amount of time to wait prior to invoking the call. This function returns immediately. :param amount: The amount of time to wait in seconds before making the call. :type amount: ``float`` or ``int`` :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_timeout(self.__loop.time() + amount, function, *args) def __dispatch_on_connect_callbacks(self, to_pid, stream): with self._connection_callbacks_lock: callbacks = self._connection_callbacks.pop(to_pid, []) for callback in callbacks: log.debug('Dispatching connection callback %s for %s:%s -> %s' % ( callback, self.ip, self.port, to_pid)) self.__loop.add_callback(callback, stream) def _maybe_connect(self, to_pid, callback=None): """Asynchronously establish a connection to the remote pid.""" callback = stack_context.wrap(callback or (lambda stream: None)) def streaming_callback(data): # we are not guaranteed to get an acknowledgment, but log and discard bytes if we do. log.info('Received %d bytes from %s, discarding.' % (len(data), to_pid)) log.debug(' data: %r' % (data,)) def on_connect(exit_cb, stream): log.info('Connection to %s established' % to_pid) with self._connection_callbacks_lock: self._connections[to_pid] = stream self.__dispatch_on_connect_callbacks(to_pid, stream) self.__loop.add_callback( stream.read_until_close, exit_cb, streaming_callback=streaming_callback) create = False with self._connection_callbacks_lock: stream = self._connections.get(to_pid) callbacks = self._connection_callbacks.get(to_pid) if not stream: self._connection_callbacks[to_pid].append(callback) if not callbacks: create = True if stream: self.__loop.add_callback(callback, stream) return if not create: return sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) if not sock: raise self.SocketError('Failed opening socket') stream = IOStream(sock, io_loop=self.__loop) stream.set_nodelay(True) stream.set_close_callback(partial(self.__on_exit, to_pid, b'reached end of stream')) connect_callback = partial(on_connect, partial(self.__on_exit, to_pid), stream) log.info('Establishing connection to %s' % to_pid) stream.connect((to_pid.ip, to_pid.port), callback=connect_callback) if stream.closed(): raise self.SocketError('Failed to initiate stream connection') log.info('Maybe connected to %s' % to_pid) def _get_local_mailbox(self, pid, method): for mailbox, callable in self._processes[pid].iter_handlers(): if method == mailbox: return callable def send(self, from_pid, to_pid, method, body=None): """Send a message method from one pid to another with an optional body. Note: It is more idiomatic to send directly from a bound process rather than calling send on the context. If the destination pid is on the same context, the Context may skip the wire and route directly to process itself. ``from_pid`` must be bound to this context. This method returns immediately. :param from_pid: The pid of the sending process. :type from_pid: :class:`PID` :param to_pid: The pid of the destination process. :type to_pid: :class:`PID` :param method: The method name of the destination process. :type method: ``str`` :keyword body: Optional content to send along with the message. :type body: ``bytes`` or None :return: Nothing """ self._assert_started() self._assert_local_pid(from_pid) if self._is_local(to_pid): local_method = self._get_local_mailbox(to_pid, method) if local_method: log.info('Doing local dispatch of %s => %s (method: %s)' % (from_pid, to_pid, local_method)) self.__loop.add_callback(local_method, from_pid, body or b'') return else: # TODO(wickman) Consider failing hard if no local method is detected, otherwise we're # just going to do a POST and have it dropped on the floor. pass request_data = encode_request(from_pid, to_pid, method, body=body) log.info('Sending POST %s => %s (payload: %d bytes)' % ( from_pid, to_pid.as_url(method), len(request_data))) def on_connect(stream): log.info('Writing %s from %s to %s' % (len(request_data), from_pid, to_pid)) stream.write(request_data) log.info('Wrote %s from %s to %s' % (len(request_data), from_pid, to_pid)) self.__loop.add_callback(self._maybe_connect, to_pid, on_connect) def __erase_link(self, to_pid): for pid, links in self._links.items(): try: links.remove(to_pid) log.debug('PID link from %s <- %s exited.' % (pid, to_pid)) self._processes[pid].exited(to_pid) except KeyError: continue def __on_exit(self, to_pid, body): log.info('Disconnected from %s (%s)', to_pid, body) stream = self._connections.pop(to_pid, None) if stream is None: log.error('Received disconnection from %s but no stream found.' % to_pid) self.__erase_link(to_pid) def terminate(self, pid): """Terminate a process bound to this context. When a process is terminated, all the processes to which it is linked will be have their ``exited`` methods called. Messages to this process will no longer be delivered. This method returns immediately. :param pid: The pid of the process to terminate. :type pid: :class:`PID` :returns: Nothing """ self._assert_started() log.info('Terminating %s' % pid) process = self._processes.pop(pid, None) if process: log.info('Unmounting %s' % process) self.http.unmount_process(process) self.__erase_link(pid) def __str__(self): return 'Context(%s:%s)' % (self.ip, self.port)
wickman/compactor
compactor/context.py
Context.terminate
python
def terminate(self, pid): self._assert_started() log.info('Terminating %s' % pid) process = self._processes.pop(pid, None) if process: log.info('Unmounting %s' % process) self.http.unmount_process(process) self.__erase_link(pid)
Terminate a process bound to this context. When a process is terminated, all the processes to which it is linked will be have their ``exited`` methods called. Messages to this process will no longer be delivered. This method returns immediately. :param pid: The pid of the process to terminate. :type pid: :class:`PID` :returns: Nothing
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/context.py#L407-L427
[ "def _assert_started(self):\n assert self.__loop_started.is_set()\n", "def __erase_link(self, to_pid):\n for pid, links in self._links.items():\n try:\n links.remove(to_pid)\n log.debug('PID link from %s <- %s exited.' % (pid, to_pid))\n self._processes[pid].exited(to_pid)\n except KeyError:\n continue\n" ]
class Context(threading.Thread): """A compactor context. Compactor contexts control the routing and handling of messages between processes. At its most basic level, a context is a listening (ip, port) pair and an event loop. """ class Error(Exception): pass class SocketError(Error): pass class InvalidProcess(Error): pass class InvalidMethod(Error): pass _SINGLETON = None _LOCK = threading.Lock() CONNECT_TIMEOUT_SECS = 5 @classmethod def _make_socket(cls, ip, port): """Bind to a new socket. If LIBPROCESS_PORT or LIBPROCESS_IP are configured in the environment, these will be used for socket connectivity. """ bound_socket = bind_sockets(port, address=ip)[0] ip, port = bound_socket.getsockname() if not ip or ip == '0.0.0.0': ip = socket.gethostbyname(socket.gethostname()) return bound_socket, ip, port @classmethod def get_ip_port(cls, ip=None, port=None): ip = ip or os.environ.get('LIBPROCESS_IP', '0.0.0.0') try: port = int(port or os.environ.get('LIBPROCESS_PORT', 0)) except ValueError: raise cls.Error('Invalid ip/port provided') return ip, port @classmethod def singleton(cls, delegate='', **kw): with cls._LOCK: if cls._SINGLETON: if cls._SINGLETON.delegate != delegate: raise RuntimeError('Attempting to construct different singleton context.') else: cls._SINGLETON = cls(delegate=delegate, **kw) cls._SINGLETON.start() return cls._SINGLETON def __init__(self, delegate='', loop=None, ip=None, port=None): """Construct a compactor context. Before any useful work can be done with a context, you must call ``start`` on the context. :keyword ip: The ip port of the interface on which the Context should listen. If none is specified, the context will attempt to bind to the ip specified by the ``LIBPROCESS_IP`` environment variable. If this variable is not set, it will bind on all interfaces. :type ip: ``str`` or None :keyword port: The port on which the Context should listen. If none is specified, the context will attempt to bind to the port specified by the ``LIBPROCESS_PORT`` environment variable. If this variable is not set, it will bind to an ephemeral port. :type port: ``int`` or None """ self._processes = {} self._links = defaultdict(set) self.delegate = delegate self.__loop = self.http = None self.__event_loop = loop self._ip = None ip, port = self.get_ip_port(ip, port) self.__sock, self.ip, self.port = self._make_socket(ip, port) self._connections = {} self._connection_callbacks = defaultdict(list) self._connection_callbacks_lock = threading.Lock() self.__context_name = 'CompactorContext(%s:%d)' % (self.ip, self.port) super(Context, self).__init__(name=self.__context_name) self.daemon = True self.lock = threading.Lock() self.__id = 1 self.__loop_started = threading.Event() def _assert_started(self): assert self.__loop_started.is_set() def start(self): """Start the context. This method must be called before calls to ``send`` and ``spawn``. This method is non-blocking. """ super(Context, self).start() self.__loop_started.wait() def __debug(self, msg): log.debug('%s: %s' % (self.__context_name, msg)) def run(self): # The entry point of the Context thread. This should not be called directly. loop = self.__event_loop or asyncio.new_event_loop() class CustomIOLoop(BaseAsyncIOLoop): def initialize(self): super(CustomIOLoop, self).initialize(loop, close_loop=False) self.__loop = CustomIOLoop() self.http = HTTPD(self.__sock, self.__loop) self.__loop_started.set() self.__loop.start() self.__loop.close() def _is_local(self, pid): return pid in self._processes def _assert_local_pid(self, pid): if not self._is_local(pid): raise self.InvalidProcess('Operation only valid for local processes!') def stop(self): """Stops the context. This terminates all PIDs and closes all connections.""" log.info('Stopping %s' % self) pids = list(self._processes) # Clean up the context for pid in pids: self.terminate(pid) while self._connections: pid = next(iter(self._connections)) conn = self._connections.pop(pid, None) if conn: conn.close() self.__loop.stop() def spawn(self, process): """Spawn a process. Spawning a process binds it to this context and assigns the process a pid which is returned. The process' ``initialize`` method is called. Note: A process cannot send messages until it is bound to a context. :param process: The process to bind to this context. :type process: :class:`Process` :return: The pid of the process. :rtype: :class:`PID` """ self._assert_started() process.bind(self) self.http.mount_process(process) self._processes[process.pid] = process process.initialize() return process.pid def _get_dispatch_method(self, pid, method): try: return getattr(self._processes[pid], method) except KeyError: raise self.InvalidProcess('Unknown process %s' % pid) except AttributeError: raise self.InvalidMethod('Unknown method %s on %s' % (method, pid)) def dispatch(self, pid, method, *args): """Call a method on another process by its pid. The method on the other process does not need to be installed with ``Process.install``. The call is serialized with all other calls on the context's event loop. The pid must be bound to this context. This function returns immediately. :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_callback(function, *args) def delay(self, amount, pid, method, *args): """Call a method on another process after a specified delay. This is equivalent to ``dispatch`` except with an additional amount of time to wait prior to invoking the call. This function returns immediately. :param amount: The amount of time to wait in seconds before making the call. :type amount: ``float`` or ``int`` :param pid: The pid of the process to be called. :type pid: :class:`PID` :param method: The name of the method to be called. :type method: ``str`` :return: Nothing """ self._assert_started() self._assert_local_pid(pid) function = self._get_dispatch_method(pid, method) self.__loop.add_timeout(self.__loop.time() + amount, function, *args) def __dispatch_on_connect_callbacks(self, to_pid, stream): with self._connection_callbacks_lock: callbacks = self._connection_callbacks.pop(to_pid, []) for callback in callbacks: log.debug('Dispatching connection callback %s for %s:%s -> %s' % ( callback, self.ip, self.port, to_pid)) self.__loop.add_callback(callback, stream) def _maybe_connect(self, to_pid, callback=None): """Asynchronously establish a connection to the remote pid.""" callback = stack_context.wrap(callback or (lambda stream: None)) def streaming_callback(data): # we are not guaranteed to get an acknowledgment, but log and discard bytes if we do. log.info('Received %d bytes from %s, discarding.' % (len(data), to_pid)) log.debug(' data: %r' % (data,)) def on_connect(exit_cb, stream): log.info('Connection to %s established' % to_pid) with self._connection_callbacks_lock: self._connections[to_pid] = stream self.__dispatch_on_connect_callbacks(to_pid, stream) self.__loop.add_callback( stream.read_until_close, exit_cb, streaming_callback=streaming_callback) create = False with self._connection_callbacks_lock: stream = self._connections.get(to_pid) callbacks = self._connection_callbacks.get(to_pid) if not stream: self._connection_callbacks[to_pid].append(callback) if not callbacks: create = True if stream: self.__loop.add_callback(callback, stream) return if not create: return sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) if not sock: raise self.SocketError('Failed opening socket') stream = IOStream(sock, io_loop=self.__loop) stream.set_nodelay(True) stream.set_close_callback(partial(self.__on_exit, to_pid, b'reached end of stream')) connect_callback = partial(on_connect, partial(self.__on_exit, to_pid), stream) log.info('Establishing connection to %s' % to_pid) stream.connect((to_pid.ip, to_pid.port), callback=connect_callback) if stream.closed(): raise self.SocketError('Failed to initiate stream connection') log.info('Maybe connected to %s' % to_pid) def _get_local_mailbox(self, pid, method): for mailbox, callable in self._processes[pid].iter_handlers(): if method == mailbox: return callable def send(self, from_pid, to_pid, method, body=None): """Send a message method from one pid to another with an optional body. Note: It is more idiomatic to send directly from a bound process rather than calling send on the context. If the destination pid is on the same context, the Context may skip the wire and route directly to process itself. ``from_pid`` must be bound to this context. This method returns immediately. :param from_pid: The pid of the sending process. :type from_pid: :class:`PID` :param to_pid: The pid of the destination process. :type to_pid: :class:`PID` :param method: The method name of the destination process. :type method: ``str`` :keyword body: Optional content to send along with the message. :type body: ``bytes`` or None :return: Nothing """ self._assert_started() self._assert_local_pid(from_pid) if self._is_local(to_pid): local_method = self._get_local_mailbox(to_pid, method) if local_method: log.info('Doing local dispatch of %s => %s (method: %s)' % (from_pid, to_pid, local_method)) self.__loop.add_callback(local_method, from_pid, body or b'') return else: # TODO(wickman) Consider failing hard if no local method is detected, otherwise we're # just going to do a POST and have it dropped on the floor. pass request_data = encode_request(from_pid, to_pid, method, body=body) log.info('Sending POST %s => %s (payload: %d bytes)' % ( from_pid, to_pid.as_url(method), len(request_data))) def on_connect(stream): log.info('Writing %s from %s to %s' % (len(request_data), from_pid, to_pid)) stream.write(request_data) log.info('Wrote %s from %s to %s' % (len(request_data), from_pid, to_pid)) self.__loop.add_callback(self._maybe_connect, to_pid, on_connect) def __erase_link(self, to_pid): for pid, links in self._links.items(): try: links.remove(to_pid) log.debug('PID link from %s <- %s exited.' % (pid, to_pid)) self._processes[pid].exited(to_pid) except KeyError: continue def __on_exit(self, to_pid, body): log.info('Disconnected from %s (%s)', to_pid, body) stream = self._connections.pop(to_pid, None) if stream is None: log.error('Received disconnection from %s but no stream found.' % to_pid) self.__erase_link(to_pid) def link(self, pid, to): """Link a local process to a possibly remote process. Note: It is more idiomatic to call ``link`` directly on the bound Process object instead. When ``pid`` is linked to ``to``, the termination of the ``to`` process (or the severing of its connection from the Process ``pid``) will result in the local process' ``exited`` method to be called with ``to``. This method returns immediately. :param pid: The pid of the linking process. :type pid: :class:`PID` :param to: The pid of the linked process. :type to: :class:`PID` :returns: Nothing """ self._assert_started() def really_link(): self._links[pid].add(to) log.info('Added link from %s to %s' % (pid, to)) def on_connect(stream): really_link() if self._is_local(pid): really_link() else: self.__loop.add_callback(self._maybe_connect, to, on_connect) def __str__(self): return 'Context(%s:%s)' % (self.ip, self.port)
wickman/compactor
compactor/process.py
Process.route
python
def route(cls, path): if not path.startswith('/'): raise ValueError('Routes must start with "/"') def wrap(fn): setattr(fn, cls.ROUTE_ATTRIBUTE, path) return fn return wrap
A decorator to indicate that a method should be a routable HTTP endpoint. .. code-block:: python from compactor.process import Process class WebProcess(Process): @Process.route('/hello/world') def hello_world(self, handler): return handler.write('<html><title>hello world</title></html>') The handler passed to the method is a tornado RequestHandler. WARNING: This interface is alpha and may change in the future if or when we remove tornado as a compactor dependency. :param path: The endpoint to route to this method. :type path: ``str``
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/process.py#L15-L43
null
class Process(object): class Error(Exception): pass class UnboundProcess(Error): pass ROUTE_ATTRIBUTE = '__route__' INSTALL_ATTRIBUTE = '__mailbox__' @classmethod # TODO(wickman) Make mbox optional, defaulting to function.__name__. # TODO(wickman) Make INSTALL_ATTRIBUTE a defaultdict(list) so that we can # route multiple endpoints to a single method. @classmethod def install(cls, mbox): """A decorator to indicate a remotely callable method on a process. .. code-block:: python from compactor.process import Process class PingProcess(Process): @Process.install('ping') def ping(self, from_pid, body): # do something The installed method should take ``from_pid`` and ``body`` parameters. ``from_pid`` is the process calling the method. ``body`` is a ``bytes`` stream that was delivered with the message, possibly empty. :param mbox: Incoming messages to this "mailbox" will be dispatched to this method. :type mbox: ``str`` """ def wrap(fn): setattr(fn, cls.INSTALL_ATTRIBUTE, mbox) return fn return wrap def __init__(self, name): """Create a process with a given name. The process must still be bound to a context before it can send messages or link to other processes. :param name: The name of this process. :type name: ``str`` """ self.name = name self._delegates = {} self._http_handlers = dict(self.iter_routes()) self._message_handlers = dict(self.iter_handlers()) self._context = None def __iter_callables(self): # iterate over the methods in a way where we can differentiate methods from descriptors for method in type(self).__dict__.values(): if callable(method): # 'method' is the unbound method on the class -- we want to return the bound instancemethod try: yield getattr(self, method.__name__) except AttributeError: # This is possible for __name_mangled_attributes. continue def iter_routes(self): for function in self.__iter_callables(): if hasattr(function, self.ROUTE_ATTRIBUTE): yield getattr(function, self.ROUTE_ATTRIBUTE), function def iter_handlers(self): for function in self.__iter_callables(): if hasattr(function, self.INSTALL_ATTRIBUTE): yield getattr(function, self.INSTALL_ATTRIBUTE), function def _assert_bound(self): if not self._context: raise self.UnboundProcess('Cannot get pid of unbound process.') def bind(self, context): if not isinstance(context, Context): raise TypeError('Can only bind to a Context, got %s' % type(context)) self._context = context @property def pid(self): """The pid of this process. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() return PID(self._context.ip, self._context.port, self.name) @property def context(self): """The context that this process is bound to. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() return self._context @property def route_paths(self): return self._http_handlers.keys() @property def message_names(self): return self._message_handlers.keys() def delegate(self, name, pid): self._delegates[name] = pid def handle_message(self, name, from_pid, body): if name in self._message_handlers: self._message_handlers[name](from_pid, body) elif name in self._delegates: to = self._delegates[name] self._context.transport(to, name, body, from_pid) def handle_http(self, route, handler, *args, **kw): return self._http_handlers[route](handler, *args, **kw) def initialize(self): """Called when this process is spawned. Once this is called, it means a process is now routable. Subclasses should implement this to initialize state or possibly initiate connections to remote processes. """ def exited(self, pid): """Called when a linked process terminates or its connection is severed. :param pid: The pid of the linked process. :type pid: :class:`PID` """ def send(self, to, method, body=None): """Send a message to another process. Sending messages is done asynchronously and is not guaranteed to succeed. Returns immediately. :param to: The pid of the process to send a message. :type to: :class:`PID` :param method: The method/mailbox name of the remote method. :type method: ``str`` :keyword body: The optional content to send with the message. :type body: ``bytes`` or None :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. :return: Nothing """ self._assert_bound() self._context.send(self.pid, to, method, body) def link(self, to): """Link to another process. The ``link`` operation is not guaranteed to succeed. If it does, when the other process terminates, the ``exited`` method will be called with its pid. Returns immediately. :param to: The pid of the process to send a message. :type to: :class:`PID` :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. :return: Nothing """ self._assert_bound() self._context.link(self.pid, to) def terminate(self): """Terminate this process. This unbinds it from the context to which it is bound. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() self._context.terminate(self.pid)
wickman/compactor
compactor/process.py
Process.install
python
def install(cls, mbox): def wrap(fn): setattr(fn, cls.INSTALL_ATTRIBUTE, mbox) return fn return wrap
A decorator to indicate a remotely callable method on a process. .. code-block:: python from compactor.process import Process class PingProcess(Process): @Process.install('ping') def ping(self, from_pid, body): # do something The installed method should take ``from_pid`` and ``body`` parameters. ``from_pid`` is the process calling the method. ``body`` is a ``bytes`` stream that was delivered with the message, possibly empty. :param mbox: Incoming messages to this "mailbox" will be dispatched to this method. :type mbox: ``str``
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/process.py#L49-L71
null
class Process(object): class Error(Exception): pass class UnboundProcess(Error): pass ROUTE_ATTRIBUTE = '__route__' INSTALL_ATTRIBUTE = '__mailbox__' @classmethod def route(cls, path): """A decorator to indicate that a method should be a routable HTTP endpoint. .. code-block:: python from compactor.process import Process class WebProcess(Process): @Process.route('/hello/world') def hello_world(self, handler): return handler.write('<html><title>hello world</title></html>') The handler passed to the method is a tornado RequestHandler. WARNING: This interface is alpha and may change in the future if or when we remove tornado as a compactor dependency. :param path: The endpoint to route to this method. :type path: ``str`` """ if not path.startswith('/'): raise ValueError('Routes must start with "/"') def wrap(fn): setattr(fn, cls.ROUTE_ATTRIBUTE, path) return fn return wrap # TODO(wickman) Make mbox optional, defaulting to function.__name__. # TODO(wickman) Make INSTALL_ATTRIBUTE a defaultdict(list) so that we can # route multiple endpoints to a single method. @classmethod def __init__(self, name): """Create a process with a given name. The process must still be bound to a context before it can send messages or link to other processes. :param name: The name of this process. :type name: ``str`` """ self.name = name self._delegates = {} self._http_handlers = dict(self.iter_routes()) self._message_handlers = dict(self.iter_handlers()) self._context = None def __iter_callables(self): # iterate over the methods in a way where we can differentiate methods from descriptors for method in type(self).__dict__.values(): if callable(method): # 'method' is the unbound method on the class -- we want to return the bound instancemethod try: yield getattr(self, method.__name__) except AttributeError: # This is possible for __name_mangled_attributes. continue def iter_routes(self): for function in self.__iter_callables(): if hasattr(function, self.ROUTE_ATTRIBUTE): yield getattr(function, self.ROUTE_ATTRIBUTE), function def iter_handlers(self): for function in self.__iter_callables(): if hasattr(function, self.INSTALL_ATTRIBUTE): yield getattr(function, self.INSTALL_ATTRIBUTE), function def _assert_bound(self): if not self._context: raise self.UnboundProcess('Cannot get pid of unbound process.') def bind(self, context): if not isinstance(context, Context): raise TypeError('Can only bind to a Context, got %s' % type(context)) self._context = context @property def pid(self): """The pid of this process. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() return PID(self._context.ip, self._context.port, self.name) @property def context(self): """The context that this process is bound to. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() return self._context @property def route_paths(self): return self._http_handlers.keys() @property def message_names(self): return self._message_handlers.keys() def delegate(self, name, pid): self._delegates[name] = pid def handle_message(self, name, from_pid, body): if name in self._message_handlers: self._message_handlers[name](from_pid, body) elif name in self._delegates: to = self._delegates[name] self._context.transport(to, name, body, from_pid) def handle_http(self, route, handler, *args, **kw): return self._http_handlers[route](handler, *args, **kw) def initialize(self): """Called when this process is spawned. Once this is called, it means a process is now routable. Subclasses should implement this to initialize state or possibly initiate connections to remote processes. """ def exited(self, pid): """Called when a linked process terminates or its connection is severed. :param pid: The pid of the linked process. :type pid: :class:`PID` """ def send(self, to, method, body=None): """Send a message to another process. Sending messages is done asynchronously and is not guaranteed to succeed. Returns immediately. :param to: The pid of the process to send a message. :type to: :class:`PID` :param method: The method/mailbox name of the remote method. :type method: ``str`` :keyword body: The optional content to send with the message. :type body: ``bytes`` or None :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. :return: Nothing """ self._assert_bound() self._context.send(self.pid, to, method, body) def link(self, to): """Link to another process. The ``link`` operation is not guaranteed to succeed. If it does, when the other process terminates, the ``exited`` method will be called with its pid. Returns immediately. :param to: The pid of the process to send a message. :type to: :class:`PID` :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. :return: Nothing """ self._assert_bound() self._context.link(self.pid, to) def terminate(self): """Terminate this process. This unbinds it from the context to which it is bound. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() self._context.terminate(self.pid)
wickman/compactor
compactor/process.py
Process.pid
python
def pid(self): self._assert_bound() return PID(self._context.ip, self._context.port, self.name)
The pid of this process. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context.
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/process.py#L120-L127
[ "def _assert_bound(self):\n if not self._context:\n raise self.UnboundProcess('Cannot get pid of unbound process.')\n" ]
class Process(object): class Error(Exception): pass class UnboundProcess(Error): pass ROUTE_ATTRIBUTE = '__route__' INSTALL_ATTRIBUTE = '__mailbox__' @classmethod def route(cls, path): """A decorator to indicate that a method should be a routable HTTP endpoint. .. code-block:: python from compactor.process import Process class WebProcess(Process): @Process.route('/hello/world') def hello_world(self, handler): return handler.write('<html><title>hello world</title></html>') The handler passed to the method is a tornado RequestHandler. WARNING: This interface is alpha and may change in the future if or when we remove tornado as a compactor dependency. :param path: The endpoint to route to this method. :type path: ``str`` """ if not path.startswith('/'): raise ValueError('Routes must start with "/"') def wrap(fn): setattr(fn, cls.ROUTE_ATTRIBUTE, path) return fn return wrap # TODO(wickman) Make mbox optional, defaulting to function.__name__. # TODO(wickman) Make INSTALL_ATTRIBUTE a defaultdict(list) so that we can # route multiple endpoints to a single method. @classmethod def install(cls, mbox): """A decorator to indicate a remotely callable method on a process. .. code-block:: python from compactor.process import Process class PingProcess(Process): @Process.install('ping') def ping(self, from_pid, body): # do something The installed method should take ``from_pid`` and ``body`` parameters. ``from_pid`` is the process calling the method. ``body`` is a ``bytes`` stream that was delivered with the message, possibly empty. :param mbox: Incoming messages to this "mailbox" will be dispatched to this method. :type mbox: ``str`` """ def wrap(fn): setattr(fn, cls.INSTALL_ATTRIBUTE, mbox) return fn return wrap def __init__(self, name): """Create a process with a given name. The process must still be bound to a context before it can send messages or link to other processes. :param name: The name of this process. :type name: ``str`` """ self.name = name self._delegates = {} self._http_handlers = dict(self.iter_routes()) self._message_handlers = dict(self.iter_handlers()) self._context = None def __iter_callables(self): # iterate over the methods in a way where we can differentiate methods from descriptors for method in type(self).__dict__.values(): if callable(method): # 'method' is the unbound method on the class -- we want to return the bound instancemethod try: yield getattr(self, method.__name__) except AttributeError: # This is possible for __name_mangled_attributes. continue def iter_routes(self): for function in self.__iter_callables(): if hasattr(function, self.ROUTE_ATTRIBUTE): yield getattr(function, self.ROUTE_ATTRIBUTE), function def iter_handlers(self): for function in self.__iter_callables(): if hasattr(function, self.INSTALL_ATTRIBUTE): yield getattr(function, self.INSTALL_ATTRIBUTE), function def _assert_bound(self): if not self._context: raise self.UnboundProcess('Cannot get pid of unbound process.') def bind(self, context): if not isinstance(context, Context): raise TypeError('Can only bind to a Context, got %s' % type(context)) self._context = context @property @property def context(self): """The context that this process is bound to. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() return self._context @property def route_paths(self): return self._http_handlers.keys() @property def message_names(self): return self._message_handlers.keys() def delegate(self, name, pid): self._delegates[name] = pid def handle_message(self, name, from_pid, body): if name in self._message_handlers: self._message_handlers[name](from_pid, body) elif name in self._delegates: to = self._delegates[name] self._context.transport(to, name, body, from_pid) def handle_http(self, route, handler, *args, **kw): return self._http_handlers[route](handler, *args, **kw) def initialize(self): """Called when this process is spawned. Once this is called, it means a process is now routable. Subclasses should implement this to initialize state or possibly initiate connections to remote processes. """ def exited(self, pid): """Called when a linked process terminates or its connection is severed. :param pid: The pid of the linked process. :type pid: :class:`PID` """ def send(self, to, method, body=None): """Send a message to another process. Sending messages is done asynchronously and is not guaranteed to succeed. Returns immediately. :param to: The pid of the process to send a message. :type to: :class:`PID` :param method: The method/mailbox name of the remote method. :type method: ``str`` :keyword body: The optional content to send with the message. :type body: ``bytes`` or None :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. :return: Nothing """ self._assert_bound() self._context.send(self.pid, to, method, body) def link(self, to): """Link to another process. The ``link`` operation is not guaranteed to succeed. If it does, when the other process terminates, the ``exited`` method will be called with its pid. Returns immediately. :param to: The pid of the process to send a message. :type to: :class:`PID` :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. :return: Nothing """ self._assert_bound() self._context.link(self.pid, to) def terminate(self): """Terminate this process. This unbinds it from the context to which it is bound. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() self._context.terminate(self.pid)
wickman/compactor
compactor/process.py
Process.send
python
def send(self, to, method, body=None): self._assert_bound() self._context.send(self.pid, to, method, body)
Send a message to another process. Sending messages is done asynchronously and is not guaranteed to succeed. Returns immediately. :param to: The pid of the process to send a message. :type to: :class:`PID` :param method: The method/mailbox name of the remote method. :type method: ``str`` :keyword body: The optional content to send with the message. :type body: ``bytes`` or None :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. :return: Nothing
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/process.py#L175-L193
[ "def _assert_bound(self):\n if not self._context:\n raise self.UnboundProcess('Cannot get pid of unbound process.')\n" ]
class Process(object): class Error(Exception): pass class UnboundProcess(Error): pass ROUTE_ATTRIBUTE = '__route__' INSTALL_ATTRIBUTE = '__mailbox__' @classmethod def route(cls, path): """A decorator to indicate that a method should be a routable HTTP endpoint. .. code-block:: python from compactor.process import Process class WebProcess(Process): @Process.route('/hello/world') def hello_world(self, handler): return handler.write('<html><title>hello world</title></html>') The handler passed to the method is a tornado RequestHandler. WARNING: This interface is alpha and may change in the future if or when we remove tornado as a compactor dependency. :param path: The endpoint to route to this method. :type path: ``str`` """ if not path.startswith('/'): raise ValueError('Routes must start with "/"') def wrap(fn): setattr(fn, cls.ROUTE_ATTRIBUTE, path) return fn return wrap # TODO(wickman) Make mbox optional, defaulting to function.__name__. # TODO(wickman) Make INSTALL_ATTRIBUTE a defaultdict(list) so that we can # route multiple endpoints to a single method. @classmethod def install(cls, mbox): """A decorator to indicate a remotely callable method on a process. .. code-block:: python from compactor.process import Process class PingProcess(Process): @Process.install('ping') def ping(self, from_pid, body): # do something The installed method should take ``from_pid`` and ``body`` parameters. ``from_pid`` is the process calling the method. ``body`` is a ``bytes`` stream that was delivered with the message, possibly empty. :param mbox: Incoming messages to this "mailbox" will be dispatched to this method. :type mbox: ``str`` """ def wrap(fn): setattr(fn, cls.INSTALL_ATTRIBUTE, mbox) return fn return wrap def __init__(self, name): """Create a process with a given name. The process must still be bound to a context before it can send messages or link to other processes. :param name: The name of this process. :type name: ``str`` """ self.name = name self._delegates = {} self._http_handlers = dict(self.iter_routes()) self._message_handlers = dict(self.iter_handlers()) self._context = None def __iter_callables(self): # iterate over the methods in a way where we can differentiate methods from descriptors for method in type(self).__dict__.values(): if callable(method): # 'method' is the unbound method on the class -- we want to return the bound instancemethod try: yield getattr(self, method.__name__) except AttributeError: # This is possible for __name_mangled_attributes. continue def iter_routes(self): for function in self.__iter_callables(): if hasattr(function, self.ROUTE_ATTRIBUTE): yield getattr(function, self.ROUTE_ATTRIBUTE), function def iter_handlers(self): for function in self.__iter_callables(): if hasattr(function, self.INSTALL_ATTRIBUTE): yield getattr(function, self.INSTALL_ATTRIBUTE), function def _assert_bound(self): if not self._context: raise self.UnboundProcess('Cannot get pid of unbound process.') def bind(self, context): if not isinstance(context, Context): raise TypeError('Can only bind to a Context, got %s' % type(context)) self._context = context @property def pid(self): """The pid of this process. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() return PID(self._context.ip, self._context.port, self.name) @property def context(self): """The context that this process is bound to. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() return self._context @property def route_paths(self): return self._http_handlers.keys() @property def message_names(self): return self._message_handlers.keys() def delegate(self, name, pid): self._delegates[name] = pid def handle_message(self, name, from_pid, body): if name in self._message_handlers: self._message_handlers[name](from_pid, body) elif name in self._delegates: to = self._delegates[name] self._context.transport(to, name, body, from_pid) def handle_http(self, route, handler, *args, **kw): return self._http_handlers[route](handler, *args, **kw) def initialize(self): """Called when this process is spawned. Once this is called, it means a process is now routable. Subclasses should implement this to initialize state or possibly initiate connections to remote processes. """ def exited(self, pid): """Called when a linked process terminates or its connection is severed. :param pid: The pid of the linked process. :type pid: :class:`PID` """ def link(self, to): """Link to another process. The ``link`` operation is not guaranteed to succeed. If it does, when the other process terminates, the ``exited`` method will be called with its pid. Returns immediately. :param to: The pid of the process to send a message. :type to: :class:`PID` :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. :return: Nothing """ self._assert_bound() self._context.link(self.pid, to) def terminate(self): """Terminate this process. This unbinds it from the context to which it is bound. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() self._context.terminate(self.pid)
wickman/compactor
compactor/process.py
Process.link
python
def link(self, to): self._assert_bound() self._context.link(self.pid, to)
Link to another process. The ``link`` operation is not guaranteed to succeed. If it does, when the other process terminates, the ``exited`` method will be called with its pid. Returns immediately. :param to: The pid of the process to send a message. :type to: :class:`PID` :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. :return: Nothing
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/process.py#L195-L211
[ "def _assert_bound(self):\n if not self._context:\n raise self.UnboundProcess('Cannot get pid of unbound process.')\n" ]
class Process(object): class Error(Exception): pass class UnboundProcess(Error): pass ROUTE_ATTRIBUTE = '__route__' INSTALL_ATTRIBUTE = '__mailbox__' @classmethod def route(cls, path): """A decorator to indicate that a method should be a routable HTTP endpoint. .. code-block:: python from compactor.process import Process class WebProcess(Process): @Process.route('/hello/world') def hello_world(self, handler): return handler.write('<html><title>hello world</title></html>') The handler passed to the method is a tornado RequestHandler. WARNING: This interface is alpha and may change in the future if or when we remove tornado as a compactor dependency. :param path: The endpoint to route to this method. :type path: ``str`` """ if not path.startswith('/'): raise ValueError('Routes must start with "/"') def wrap(fn): setattr(fn, cls.ROUTE_ATTRIBUTE, path) return fn return wrap # TODO(wickman) Make mbox optional, defaulting to function.__name__. # TODO(wickman) Make INSTALL_ATTRIBUTE a defaultdict(list) so that we can # route multiple endpoints to a single method. @classmethod def install(cls, mbox): """A decorator to indicate a remotely callable method on a process. .. code-block:: python from compactor.process import Process class PingProcess(Process): @Process.install('ping') def ping(self, from_pid, body): # do something The installed method should take ``from_pid`` and ``body`` parameters. ``from_pid`` is the process calling the method. ``body`` is a ``bytes`` stream that was delivered with the message, possibly empty. :param mbox: Incoming messages to this "mailbox" will be dispatched to this method. :type mbox: ``str`` """ def wrap(fn): setattr(fn, cls.INSTALL_ATTRIBUTE, mbox) return fn return wrap def __init__(self, name): """Create a process with a given name. The process must still be bound to a context before it can send messages or link to other processes. :param name: The name of this process. :type name: ``str`` """ self.name = name self._delegates = {} self._http_handlers = dict(self.iter_routes()) self._message_handlers = dict(self.iter_handlers()) self._context = None def __iter_callables(self): # iterate over the methods in a way where we can differentiate methods from descriptors for method in type(self).__dict__.values(): if callable(method): # 'method' is the unbound method on the class -- we want to return the bound instancemethod try: yield getattr(self, method.__name__) except AttributeError: # This is possible for __name_mangled_attributes. continue def iter_routes(self): for function in self.__iter_callables(): if hasattr(function, self.ROUTE_ATTRIBUTE): yield getattr(function, self.ROUTE_ATTRIBUTE), function def iter_handlers(self): for function in self.__iter_callables(): if hasattr(function, self.INSTALL_ATTRIBUTE): yield getattr(function, self.INSTALL_ATTRIBUTE), function def _assert_bound(self): if not self._context: raise self.UnboundProcess('Cannot get pid of unbound process.') def bind(self, context): if not isinstance(context, Context): raise TypeError('Can only bind to a Context, got %s' % type(context)) self._context = context @property def pid(self): """The pid of this process. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() return PID(self._context.ip, self._context.port, self.name) @property def context(self): """The context that this process is bound to. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() return self._context @property def route_paths(self): return self._http_handlers.keys() @property def message_names(self): return self._message_handlers.keys() def delegate(self, name, pid): self._delegates[name] = pid def handle_message(self, name, from_pid, body): if name in self._message_handlers: self._message_handlers[name](from_pid, body) elif name in self._delegates: to = self._delegates[name] self._context.transport(to, name, body, from_pid) def handle_http(self, route, handler, *args, **kw): return self._http_handlers[route](handler, *args, **kw) def initialize(self): """Called when this process is spawned. Once this is called, it means a process is now routable. Subclasses should implement this to initialize state or possibly initiate connections to remote processes. """ def exited(self, pid): """Called when a linked process terminates or its connection is severed. :param pid: The pid of the linked process. :type pid: :class:`PID` """ def send(self, to, method, body=None): """Send a message to another process. Sending messages is done asynchronously and is not guaranteed to succeed. Returns immediately. :param to: The pid of the process to send a message. :type to: :class:`PID` :param method: The method/mailbox name of the remote method. :type method: ``str`` :keyword body: The optional content to send with the message. :type body: ``bytes`` or None :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. :return: Nothing """ self._assert_bound() self._context.send(self.pid, to, method, body) def terminate(self): """Terminate this process. This unbinds it from the context to which it is bound. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. """ self._assert_bound() self._context.terminate(self.pid)
wickman/compactor
compactor/process.py
ProtobufProcess.install
python
def install(cls, message_type): def wrap(fn): @functools.wraps(fn) def wrapped_fn(self, from_pid, message_str): message = message_type() message.MergeFromString(message_str) return fn(self, from_pid, message) return Process.install(message_type.DESCRIPTOR.full_name)(wrapped_fn) return wrap
A decorator to indicate a remotely callable method on a process using protocol buffers. .. code-block:: python from compactor.process import ProtobufProcess from messages_pb2 import RequestMessage, ResponseMessage class PingProcess(ProtobufProcess): @ProtobufProcess.install(RequestMessage) def ping(self, from_pid, message): # do something with message, a RequestMessage response = ResponseMessage(...) # send a protocol buffer which will get serialized on the wire. self.send(from_pid, response) The installed method should take ``from_pid`` and ``message`` parameters. ``from_pid`` is the process calling the method. ``message`` is a protocol buffer of the installed type. :param message_type: Incoming messages to this message_type will be dispatched to this method. :type message_type: A generated protocol buffer stub
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/process.py#L227-L257
null
class ProtobufProcess(Process): @classmethod def send(self, to, message): """Send a message to another process. Same as ``Process.send`` except that ``message`` is a protocol buffer. Returns immediately. :param to: The pid of the process to send a message. :type to: :class:`PID` :param message: The message to send :type method: A protocol buffer instance. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. :return: Nothing """ super(ProtobufProcess, self).send(to, message.DESCRIPTOR.full_name, message.SerializeToString())
wickman/compactor
compactor/process.py
ProtobufProcess.send
python
def send(self, to, message): super(ProtobufProcess, self).send(to, message.DESCRIPTOR.full_name, message.SerializeToString())
Send a message to another process. Same as ``Process.send`` except that ``message`` is a protocol buffer. Returns immediately. :param to: The pid of the process to send a message. :type to: :class:`PID` :param message: The message to send :type method: A protocol buffer instance. :raises: Will raise a ``Process.UnboundProcess`` exception if the process is not bound to a context. :return: Nothing
train
https://github.com/wickman/compactor/blob/52714be3d84aa595a212feccb4d92ec250cede2a/compactor/process.py#L259-L274
[ "def send(self, to, method, body=None):\n \"\"\"Send a message to another process.\n\n Sending messages is done asynchronously and is not guaranteed to succeed.\n\n Returns immediately.\n\n :param to: The pid of the process to send a message.\n :type to: :class:`PID`\n :param method: The method/mailbox name of the remote method.\n :type method: ``str``\n :keyword body: The optional content to send with the message.\n :type body: ``bytes`` or None\n :raises: Will raise a ``Process.UnboundProcess`` exception if the\n process is not bound to a context.\n :return: Nothing\n \"\"\"\n self._assert_bound()\n self._context.send(self.pid, to, method, body)\n" ]
class ProtobufProcess(Process): @classmethod def install(cls, message_type): """A decorator to indicate a remotely callable method on a process using protocol buffers. .. code-block:: python from compactor.process import ProtobufProcess from messages_pb2 import RequestMessage, ResponseMessage class PingProcess(ProtobufProcess): @ProtobufProcess.install(RequestMessage) def ping(self, from_pid, message): # do something with message, a RequestMessage response = ResponseMessage(...) # send a protocol buffer which will get serialized on the wire. self.send(from_pid, response) The installed method should take ``from_pid`` and ``message`` parameters. ``from_pid`` is the process calling the method. ``message`` is a protocol buffer of the installed type. :param message_type: Incoming messages to this message_type will be dispatched to this method. :type message_type: A generated protocol buffer stub """ def wrap(fn): @functools.wraps(fn) def wrapped_fn(self, from_pid, message_str): message = message_type() message.MergeFromString(message_str) return fn(self, from_pid, message) return Process.install(message_type.DESCRIPTOR.full_name)(wrapped_fn) return wrap
pavelsof/ipalint
ipalint/read.py
Reader._save_stdin
python
def _save_stdin(self, stdin): self.temp_dir = TemporaryDirectory() file_path = os.path.join(self.temp_dir.name, 'dataset') try: with open(file_path, 'w') as f: for line in stdin: f.write(line) except TypeError: self.temp_dir.cleanup() raise ValueError('Could not read stdin') return file_path
Creates a temporary dir (self.temp_dir) and saves the given input stream to a file within that dir. Returns the path to the file. The dir is removed in the __del__ method.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/read.py#L96-L113
null
class Reader: """ Comprises the code for reading the dataset file which is to be linted. """ def __init__(self, dataset, has_header=True, ipa_col=None, delimiter=None, quotechar=None, escapechar=None): """ Constructor. Expects either the path to the file to be read or an input stream to read from. Optional args: has_header: whether the first line of the file will be ignored or not; ipa_col: the column from which to extract the IPA data; this could be either the column's index or name, or None (in which case the Reader will try to guess the column); delimiter and quotechar: will be used as csv.reader arguments if provided; if None, the Reader will try to guess the dialect. """ self.log = logging.getLogger(__name__) self.temp_dir = None if isinstance(dataset, str): self.file_path = dataset else: self.file_path = self._save_stdin(dataset) self.has_header = has_header self.ipa_col = ipa_col self.is_single_col = False self.delimiter = delimiter self.quotechar = quotechar self.escapechar = escapechar def _open(self, file_path=None): """ Opens the file specified by the given path. Raises ValueError if there is a problem with opening or reading the file. """ if file_path is None: file_path = self.file_path if not os.path.exists(file_path): raise ValueError('Could not find file: {}'.format(file_path)) try: f = open(file_path, encoding='utf-8', newline='') except OSError as err: self.log.error(str(err)) raise ValueError('Could not open file: {}'.format(file_path)) return f def get_dialect(self): """ Returns a Dialect named tuple or None if the dataset file comprises a single column of data. If the dialect is not already known, then tries to determine it. Raises ValueError if it fails in the latter case. """ if self.is_single_col: return None if self.delimiter and self.quotechar: return Dialect(self.delimiter, self.quotechar, True if self.escapechar is None else False, self.escapechar) ext = os.path.basename(self.file_path).rsplit('.', maxsplit=1) ext = ext[1].lower() if len(ext) > 1 else None if ext in TSV_EXTENSIONS: self.delimiter = '\t' self.quotechar = '"' else: f = self._open() lines = f.read().splitlines() f.close() if lines: dialect = self._determine_dialect(lines) else: dialect = None if dialect is None: self.is_single_col = True else: self.delimiter = dialect.delimiter self.quotechar = dialect.quotechar self.escapechar = dialect.escapechar return self.get_dialect() def _determine_dialect(self, lines): """ Expects a non-empty [] of strings; these would normally be the first few lines of a csv file. Returns the most likely Dialect named tuple or None if the data seems to form a single column. Ensures that using the returned dialect, all the lines given will have the same number of columns. Helper for the get_dialect method. """ permuts = [(quotechar, escapechar) for quotechar in CSV_QUOTECHARS for escapechar in CSV_ESCAPECHARS] for delim in CSV_DELIMITERS: counts = [line.count(delim) for line in lines] if min(counts) == 0: continue for quotechar, escapechar in permuts: doublequote = True if escapechar is None else False reader = csv.reader(lines, delimiter=delim, quotechar=quotechar, doublequote=doublequote, escapechar=escapechar) try: assert len(set([len(line) for line in reader])) == 1 except AssertionError: continue else: break else: continue # no suitable quoting found break # found it! else: return None return Dialect(delim, quotechar, doublequote, escapechar) def _get_csv_reader(self, f, dialect): """ Returns a csv.reader for the given file handler and csv Dialect named tuple. If the file has a header, it already will be gone through. Also, if self.ipa_col is not set, an attempt will be made to infer which the IPA column is. ValueError would be raised otherwise. """ reader = csv.reader(f, delimiter = dialect.delimiter, quotechar = dialect.quotechar, doublequote = dialect.doublequote, escapechar = dialect.escapechar) if self.has_header: header = next(reader) if not isinstance(self.ipa_col, int): self.ipa_col = self._infer_ipa_col(header) else: if not isinstance(self.ipa_col, int): if not self.ipa_col: raise ValueError('Cannot infer IPA column without header') try: self.ipa_col = int(self.ipa_col) except ValueError: raise ValueError('Cannot find column: {}'.format(self.ipa_col)) return reader def _infer_ipa_col(self, header): """ Returns the column (as index) containing the IPA data based on the header (the first line of the data file). Raises ValueError otherwise. If self.ipa_col is a string, it is assumed to be the column's name or index. Otherwise, several common IPA column names are tried. """ if self.ipa_col and isinstance(self.ipa_col, str): if self.ipa_col in header: return header.index(self.ipa_col) try: ipa_col = int(self.ipa_col) except ValueError: pass else: return ipa_col raise ValueError('Could not find column: {}'.format(self.ipa_col)) pot = [] for index, col_name in enumerate(header): col_name = col_name.lower() for name in IPA_COL_NAMES: if col_name.startswith(name): pot.append(index) if len(pot) == 0: raise ValueError('Could not find an IPA column') elif len(pot) > 1: raise ValueError('Could not decide which is the IPA column') return pot[0] def gen_ipa_data(self): """ Generator for iterating over the IPA strings found in the dataset file. Yields the IPA data string paired with the respective line number. """ dialect = self.get_dialect() f = self._open() try: if dialect: for res in self._gen_csv_data(f, dialect): yield res else: for res in self._gen_txt_data(f): yield res finally: f.close() def _gen_csv_data(self, f, dialect): """ Yields (column data, row number) tuples from the given csv file handler, using the given Dialect named tuple instance. Depends on self.ipa_col being correctly set. Helper for the gen_ipa_data method. """ reader = self._get_csv_reader(f, dialect) for line in reader: try: datum = line[self.ipa_col] except IndexError: mes = 'Could not find IPA data on line: {}'.format(line) raise ValueError(mes) yield datum, reader.line_num def _gen_txt_data(self, f): """ Yields (line, line number) tuples from the given file handler. Skips the first line if the self.has_header flag is set. Helper for the gen_ipa_data method. """ reader = iter(f) for line_num, line in enumerate(reader): if line_num == 0 and self.has_header: continue datum = line.rstrip('\r\n') yield datum, line_num+1 def __del__(self): """ Destructor. Removes the temporary directory, if such. """ if self.temp_dir: self.temp_dir.cleanup()
pavelsof/ipalint
ipalint/read.py
Reader._open
python
def _open(self, file_path=None): if file_path is None: file_path = self.file_path if not os.path.exists(file_path): raise ValueError('Could not find file: {}'.format(file_path)) try: f = open(file_path, encoding='utf-8', newline='') except OSError as err: self.log.error(str(err)) raise ValueError('Could not open file: {}'.format(file_path)) return f
Opens the file specified by the given path. Raises ValueError if there is a problem with opening or reading the file.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/read.py#L116-L133
null
class Reader: """ Comprises the code for reading the dataset file which is to be linted. """ def __init__(self, dataset, has_header=True, ipa_col=None, delimiter=None, quotechar=None, escapechar=None): """ Constructor. Expects either the path to the file to be read or an input stream to read from. Optional args: has_header: whether the first line of the file will be ignored or not; ipa_col: the column from which to extract the IPA data; this could be either the column's index or name, or None (in which case the Reader will try to guess the column); delimiter and quotechar: will be used as csv.reader arguments if provided; if None, the Reader will try to guess the dialect. """ self.log = logging.getLogger(__name__) self.temp_dir = None if isinstance(dataset, str): self.file_path = dataset else: self.file_path = self._save_stdin(dataset) self.has_header = has_header self.ipa_col = ipa_col self.is_single_col = False self.delimiter = delimiter self.quotechar = quotechar self.escapechar = escapechar def _save_stdin(self, stdin): """ Creates a temporary dir (self.temp_dir) and saves the given input stream to a file within that dir. Returns the path to the file. The dir is removed in the __del__ method. """ self.temp_dir = TemporaryDirectory() file_path = os.path.join(self.temp_dir.name, 'dataset') try: with open(file_path, 'w') as f: for line in stdin: f.write(line) except TypeError: self.temp_dir.cleanup() raise ValueError('Could not read stdin') return file_path def get_dialect(self): """ Returns a Dialect named tuple or None if the dataset file comprises a single column of data. If the dialect is not already known, then tries to determine it. Raises ValueError if it fails in the latter case. """ if self.is_single_col: return None if self.delimiter and self.quotechar: return Dialect(self.delimiter, self.quotechar, True if self.escapechar is None else False, self.escapechar) ext = os.path.basename(self.file_path).rsplit('.', maxsplit=1) ext = ext[1].lower() if len(ext) > 1 else None if ext in TSV_EXTENSIONS: self.delimiter = '\t' self.quotechar = '"' else: f = self._open() lines = f.read().splitlines() f.close() if lines: dialect = self._determine_dialect(lines) else: dialect = None if dialect is None: self.is_single_col = True else: self.delimiter = dialect.delimiter self.quotechar = dialect.quotechar self.escapechar = dialect.escapechar return self.get_dialect() def _determine_dialect(self, lines): """ Expects a non-empty [] of strings; these would normally be the first few lines of a csv file. Returns the most likely Dialect named tuple or None if the data seems to form a single column. Ensures that using the returned dialect, all the lines given will have the same number of columns. Helper for the get_dialect method. """ permuts = [(quotechar, escapechar) for quotechar in CSV_QUOTECHARS for escapechar in CSV_ESCAPECHARS] for delim in CSV_DELIMITERS: counts = [line.count(delim) for line in lines] if min(counts) == 0: continue for quotechar, escapechar in permuts: doublequote = True if escapechar is None else False reader = csv.reader(lines, delimiter=delim, quotechar=quotechar, doublequote=doublequote, escapechar=escapechar) try: assert len(set([len(line) for line in reader])) == 1 except AssertionError: continue else: break else: continue # no suitable quoting found break # found it! else: return None return Dialect(delim, quotechar, doublequote, escapechar) def _get_csv_reader(self, f, dialect): """ Returns a csv.reader for the given file handler and csv Dialect named tuple. If the file has a header, it already will be gone through. Also, if self.ipa_col is not set, an attempt will be made to infer which the IPA column is. ValueError would be raised otherwise. """ reader = csv.reader(f, delimiter = dialect.delimiter, quotechar = dialect.quotechar, doublequote = dialect.doublequote, escapechar = dialect.escapechar) if self.has_header: header = next(reader) if not isinstance(self.ipa_col, int): self.ipa_col = self._infer_ipa_col(header) else: if not isinstance(self.ipa_col, int): if not self.ipa_col: raise ValueError('Cannot infer IPA column without header') try: self.ipa_col = int(self.ipa_col) except ValueError: raise ValueError('Cannot find column: {}'.format(self.ipa_col)) return reader def _infer_ipa_col(self, header): """ Returns the column (as index) containing the IPA data based on the header (the first line of the data file). Raises ValueError otherwise. If self.ipa_col is a string, it is assumed to be the column's name or index. Otherwise, several common IPA column names are tried. """ if self.ipa_col and isinstance(self.ipa_col, str): if self.ipa_col in header: return header.index(self.ipa_col) try: ipa_col = int(self.ipa_col) except ValueError: pass else: return ipa_col raise ValueError('Could not find column: {}'.format(self.ipa_col)) pot = [] for index, col_name in enumerate(header): col_name = col_name.lower() for name in IPA_COL_NAMES: if col_name.startswith(name): pot.append(index) if len(pot) == 0: raise ValueError('Could not find an IPA column') elif len(pot) > 1: raise ValueError('Could not decide which is the IPA column') return pot[0] def gen_ipa_data(self): """ Generator for iterating over the IPA strings found in the dataset file. Yields the IPA data string paired with the respective line number. """ dialect = self.get_dialect() f = self._open() try: if dialect: for res in self._gen_csv_data(f, dialect): yield res else: for res in self._gen_txt_data(f): yield res finally: f.close() def _gen_csv_data(self, f, dialect): """ Yields (column data, row number) tuples from the given csv file handler, using the given Dialect named tuple instance. Depends on self.ipa_col being correctly set. Helper for the gen_ipa_data method. """ reader = self._get_csv_reader(f, dialect) for line in reader: try: datum = line[self.ipa_col] except IndexError: mes = 'Could not find IPA data on line: {}'.format(line) raise ValueError(mes) yield datum, reader.line_num def _gen_txt_data(self, f): """ Yields (line, line number) tuples from the given file handler. Skips the first line if the self.has_header flag is set. Helper for the gen_ipa_data method. """ reader = iter(f) for line_num, line in enumerate(reader): if line_num == 0 and self.has_header: continue datum = line.rstrip('\r\n') yield datum, line_num+1 def __del__(self): """ Destructor. Removes the temporary directory, if such. """ if self.temp_dir: self.temp_dir.cleanup()
pavelsof/ipalint
ipalint/read.py
Reader.get_dialect
python
def get_dialect(self): if self.is_single_col: return None if self.delimiter and self.quotechar: return Dialect(self.delimiter, self.quotechar, True if self.escapechar is None else False, self.escapechar) ext = os.path.basename(self.file_path).rsplit('.', maxsplit=1) ext = ext[1].lower() if len(ext) > 1 else None if ext in TSV_EXTENSIONS: self.delimiter = '\t' self.quotechar = '"' else: f = self._open() lines = f.read().splitlines() f.close() if lines: dialect = self._determine_dialect(lines) else: dialect = None if dialect is None: self.is_single_col = True else: self.delimiter = dialect.delimiter self.quotechar = dialect.quotechar self.escapechar = dialect.escapechar return self.get_dialect()
Returns a Dialect named tuple or None if the dataset file comprises a single column of data. If the dialect is not already known, then tries to determine it. Raises ValueError if it fails in the latter case.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/read.py#L136-L174
[ "def _open(self, file_path=None):\n\t\"\"\"\n\tOpens the file specified by the given path. Raises ValueError if there\n\tis a problem with opening or reading the file.\n\t\"\"\"\n\tif file_path is None:\n\t\tfile_path = self.file_path\n\n\tif not os.path.exists(file_path):\n\t\traise ValueError('Could not find file: {}'.format(file_path))\n\n\ttry:\n\t\tf = open(file_path, encoding='utf-8', newline='')\n\texcept OSError as err:\n\t\tself.log.error(str(err))\n\t\traise ValueError('Could not open file: {}'.format(file_path))\n\n\treturn f\n", "def get_dialect(self):\n\t\"\"\"\n\tReturns a Dialect named tuple or None if the dataset file comprises a\n\tsingle column of data. If the dialect is not already known, then tries\n\tto determine it. Raises ValueError if it fails in the latter case.\n\t\"\"\"\n\tif self.is_single_col:\n\t\treturn None\n\n\tif self.delimiter and self.quotechar:\n\t\treturn Dialect(self.delimiter, self.quotechar,\n\t\t\t\t\tTrue if self.escapechar is None else False,\n\t\t\t\t\tself.escapechar)\n\n\text = os.path.basename(self.file_path).rsplit('.', maxsplit=1)\n\text = ext[1].lower() if len(ext) > 1 else None\n\n\tif ext in TSV_EXTENSIONS:\n\t\tself.delimiter = '\\t'\n\t\tself.quotechar = '\"'\n\n\telse:\n\t\tf = self._open()\n\t\tlines = f.read().splitlines()\n\t\tf.close()\n\n\t\tif lines:\n\t\t\tdialect = self._determine_dialect(lines)\n\t\telse:\n\t\t\tdialect = None\n\n\t\tif dialect is None:\n\t\t\tself.is_single_col = True\n\t\telse:\n\t\t\tself.delimiter = dialect.delimiter\n\t\t\tself.quotechar = dialect.quotechar\n\t\t\tself.escapechar = dialect.escapechar\n\n\treturn self.get_dialect()\n", "def _determine_dialect(self, lines):\n\t\"\"\"\n\tExpects a non-empty [] of strings; these would normally be the first\n\tfew lines of a csv file. Returns the most likely Dialect named tuple or\n\tNone if the data seems to form a single column.\n\n\tEnsures that using the returned dialect, all the lines given will have\n\tthe same number of columns.\n\n\tHelper for the get_dialect method.\n\t\"\"\"\n\tpermuts = [(quotechar, escapechar)\n\t\t\tfor quotechar in CSV_QUOTECHARS\n\t\t\tfor escapechar in CSV_ESCAPECHARS]\n\n\tfor delim in CSV_DELIMITERS:\n\t\tcounts = [line.count(delim) for line in lines]\n\n\t\tif min(counts) == 0:\n\t\t\tcontinue\n\n\t\tfor quotechar, escapechar in permuts:\n\t\t\tdoublequote = True if escapechar is None else False\n\n\t\t\treader = csv.reader(lines, delimiter=delim, quotechar=quotechar,\n\t\t\t\t\t\t\tdoublequote=doublequote, escapechar=escapechar)\n\n\t\t\ttry:\n\t\t\t\tassert len(set([len(line) for line in reader])) == 1\n\t\t\texcept AssertionError:\n\t\t\t\tcontinue\n\t\t\telse:\n\t\t\t\tbreak\n\t\telse:\n\t\t\tcontinue # no suitable quoting found\n\n\t\tbreak # found it!\n\n\telse:\n\t\treturn None\n\n\treturn Dialect(delim, quotechar, doublequote, escapechar)\n" ]
class Reader: """ Comprises the code for reading the dataset file which is to be linted. """ def __init__(self, dataset, has_header=True, ipa_col=None, delimiter=None, quotechar=None, escapechar=None): """ Constructor. Expects either the path to the file to be read or an input stream to read from. Optional args: has_header: whether the first line of the file will be ignored or not; ipa_col: the column from which to extract the IPA data; this could be either the column's index or name, or None (in which case the Reader will try to guess the column); delimiter and quotechar: will be used as csv.reader arguments if provided; if None, the Reader will try to guess the dialect. """ self.log = logging.getLogger(__name__) self.temp_dir = None if isinstance(dataset, str): self.file_path = dataset else: self.file_path = self._save_stdin(dataset) self.has_header = has_header self.ipa_col = ipa_col self.is_single_col = False self.delimiter = delimiter self.quotechar = quotechar self.escapechar = escapechar def _save_stdin(self, stdin): """ Creates a temporary dir (self.temp_dir) and saves the given input stream to a file within that dir. Returns the path to the file. The dir is removed in the __del__ method. """ self.temp_dir = TemporaryDirectory() file_path = os.path.join(self.temp_dir.name, 'dataset') try: with open(file_path, 'w') as f: for line in stdin: f.write(line) except TypeError: self.temp_dir.cleanup() raise ValueError('Could not read stdin') return file_path def _open(self, file_path=None): """ Opens the file specified by the given path. Raises ValueError if there is a problem with opening or reading the file. """ if file_path is None: file_path = self.file_path if not os.path.exists(file_path): raise ValueError('Could not find file: {}'.format(file_path)) try: f = open(file_path, encoding='utf-8', newline='') except OSError as err: self.log.error(str(err)) raise ValueError('Could not open file: {}'.format(file_path)) return f def _determine_dialect(self, lines): """ Expects a non-empty [] of strings; these would normally be the first few lines of a csv file. Returns the most likely Dialect named tuple or None if the data seems to form a single column. Ensures that using the returned dialect, all the lines given will have the same number of columns. Helper for the get_dialect method. """ permuts = [(quotechar, escapechar) for quotechar in CSV_QUOTECHARS for escapechar in CSV_ESCAPECHARS] for delim in CSV_DELIMITERS: counts = [line.count(delim) for line in lines] if min(counts) == 0: continue for quotechar, escapechar in permuts: doublequote = True if escapechar is None else False reader = csv.reader(lines, delimiter=delim, quotechar=quotechar, doublequote=doublequote, escapechar=escapechar) try: assert len(set([len(line) for line in reader])) == 1 except AssertionError: continue else: break else: continue # no suitable quoting found break # found it! else: return None return Dialect(delim, quotechar, doublequote, escapechar) def _get_csv_reader(self, f, dialect): """ Returns a csv.reader for the given file handler and csv Dialect named tuple. If the file has a header, it already will be gone through. Also, if self.ipa_col is not set, an attempt will be made to infer which the IPA column is. ValueError would be raised otherwise. """ reader = csv.reader(f, delimiter = dialect.delimiter, quotechar = dialect.quotechar, doublequote = dialect.doublequote, escapechar = dialect.escapechar) if self.has_header: header = next(reader) if not isinstance(self.ipa_col, int): self.ipa_col = self._infer_ipa_col(header) else: if not isinstance(self.ipa_col, int): if not self.ipa_col: raise ValueError('Cannot infer IPA column without header') try: self.ipa_col = int(self.ipa_col) except ValueError: raise ValueError('Cannot find column: {}'.format(self.ipa_col)) return reader def _infer_ipa_col(self, header): """ Returns the column (as index) containing the IPA data based on the header (the first line of the data file). Raises ValueError otherwise. If self.ipa_col is a string, it is assumed to be the column's name or index. Otherwise, several common IPA column names are tried. """ if self.ipa_col and isinstance(self.ipa_col, str): if self.ipa_col in header: return header.index(self.ipa_col) try: ipa_col = int(self.ipa_col) except ValueError: pass else: return ipa_col raise ValueError('Could not find column: {}'.format(self.ipa_col)) pot = [] for index, col_name in enumerate(header): col_name = col_name.lower() for name in IPA_COL_NAMES: if col_name.startswith(name): pot.append(index) if len(pot) == 0: raise ValueError('Could not find an IPA column') elif len(pot) > 1: raise ValueError('Could not decide which is the IPA column') return pot[0] def gen_ipa_data(self): """ Generator for iterating over the IPA strings found in the dataset file. Yields the IPA data string paired with the respective line number. """ dialect = self.get_dialect() f = self._open() try: if dialect: for res in self._gen_csv_data(f, dialect): yield res else: for res in self._gen_txt_data(f): yield res finally: f.close() def _gen_csv_data(self, f, dialect): """ Yields (column data, row number) tuples from the given csv file handler, using the given Dialect named tuple instance. Depends on self.ipa_col being correctly set. Helper for the gen_ipa_data method. """ reader = self._get_csv_reader(f, dialect) for line in reader: try: datum = line[self.ipa_col] except IndexError: mes = 'Could not find IPA data on line: {}'.format(line) raise ValueError(mes) yield datum, reader.line_num def _gen_txt_data(self, f): """ Yields (line, line number) tuples from the given file handler. Skips the first line if the self.has_header flag is set. Helper for the gen_ipa_data method. """ reader = iter(f) for line_num, line in enumerate(reader): if line_num == 0 and self.has_header: continue datum = line.rstrip('\r\n') yield datum, line_num+1 def __del__(self): """ Destructor. Removes the temporary directory, if such. """ if self.temp_dir: self.temp_dir.cleanup()
pavelsof/ipalint
ipalint/read.py
Reader._determine_dialect
python
def _determine_dialect(self, lines): permuts = [(quotechar, escapechar) for quotechar in CSV_QUOTECHARS for escapechar in CSV_ESCAPECHARS] for delim in CSV_DELIMITERS: counts = [line.count(delim) for line in lines] if min(counts) == 0: continue for quotechar, escapechar in permuts: doublequote = True if escapechar is None else False reader = csv.reader(lines, delimiter=delim, quotechar=quotechar, doublequote=doublequote, escapechar=escapechar) try: assert len(set([len(line) for line in reader])) == 1 except AssertionError: continue else: break else: continue # no suitable quoting found break # found it! else: return None return Dialect(delim, quotechar, doublequote, escapechar)
Expects a non-empty [] of strings; these would normally be the first few lines of a csv file. Returns the most likely Dialect named tuple or None if the data seems to form a single column. Ensures that using the returned dialect, all the lines given will have the same number of columns. Helper for the get_dialect method.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/read.py#L177-L218
null
class Reader: """ Comprises the code for reading the dataset file which is to be linted. """ def __init__(self, dataset, has_header=True, ipa_col=None, delimiter=None, quotechar=None, escapechar=None): """ Constructor. Expects either the path to the file to be read or an input stream to read from. Optional args: has_header: whether the first line of the file will be ignored or not; ipa_col: the column from which to extract the IPA data; this could be either the column's index or name, or None (in which case the Reader will try to guess the column); delimiter and quotechar: will be used as csv.reader arguments if provided; if None, the Reader will try to guess the dialect. """ self.log = logging.getLogger(__name__) self.temp_dir = None if isinstance(dataset, str): self.file_path = dataset else: self.file_path = self._save_stdin(dataset) self.has_header = has_header self.ipa_col = ipa_col self.is_single_col = False self.delimiter = delimiter self.quotechar = quotechar self.escapechar = escapechar def _save_stdin(self, stdin): """ Creates a temporary dir (self.temp_dir) and saves the given input stream to a file within that dir. Returns the path to the file. The dir is removed in the __del__ method. """ self.temp_dir = TemporaryDirectory() file_path = os.path.join(self.temp_dir.name, 'dataset') try: with open(file_path, 'w') as f: for line in stdin: f.write(line) except TypeError: self.temp_dir.cleanup() raise ValueError('Could not read stdin') return file_path def _open(self, file_path=None): """ Opens the file specified by the given path. Raises ValueError if there is a problem with opening or reading the file. """ if file_path is None: file_path = self.file_path if not os.path.exists(file_path): raise ValueError('Could not find file: {}'.format(file_path)) try: f = open(file_path, encoding='utf-8', newline='') except OSError as err: self.log.error(str(err)) raise ValueError('Could not open file: {}'.format(file_path)) return f def get_dialect(self): """ Returns a Dialect named tuple or None if the dataset file comprises a single column of data. If the dialect is not already known, then tries to determine it. Raises ValueError if it fails in the latter case. """ if self.is_single_col: return None if self.delimiter and self.quotechar: return Dialect(self.delimiter, self.quotechar, True if self.escapechar is None else False, self.escapechar) ext = os.path.basename(self.file_path).rsplit('.', maxsplit=1) ext = ext[1].lower() if len(ext) > 1 else None if ext in TSV_EXTENSIONS: self.delimiter = '\t' self.quotechar = '"' else: f = self._open() lines = f.read().splitlines() f.close() if lines: dialect = self._determine_dialect(lines) else: dialect = None if dialect is None: self.is_single_col = True else: self.delimiter = dialect.delimiter self.quotechar = dialect.quotechar self.escapechar = dialect.escapechar return self.get_dialect() def _get_csv_reader(self, f, dialect): """ Returns a csv.reader for the given file handler and csv Dialect named tuple. If the file has a header, it already will be gone through. Also, if self.ipa_col is not set, an attempt will be made to infer which the IPA column is. ValueError would be raised otherwise. """ reader = csv.reader(f, delimiter = dialect.delimiter, quotechar = dialect.quotechar, doublequote = dialect.doublequote, escapechar = dialect.escapechar) if self.has_header: header = next(reader) if not isinstance(self.ipa_col, int): self.ipa_col = self._infer_ipa_col(header) else: if not isinstance(self.ipa_col, int): if not self.ipa_col: raise ValueError('Cannot infer IPA column without header') try: self.ipa_col = int(self.ipa_col) except ValueError: raise ValueError('Cannot find column: {}'.format(self.ipa_col)) return reader def _infer_ipa_col(self, header): """ Returns the column (as index) containing the IPA data based on the header (the first line of the data file). Raises ValueError otherwise. If self.ipa_col is a string, it is assumed to be the column's name or index. Otherwise, several common IPA column names are tried. """ if self.ipa_col and isinstance(self.ipa_col, str): if self.ipa_col in header: return header.index(self.ipa_col) try: ipa_col = int(self.ipa_col) except ValueError: pass else: return ipa_col raise ValueError('Could not find column: {}'.format(self.ipa_col)) pot = [] for index, col_name in enumerate(header): col_name = col_name.lower() for name in IPA_COL_NAMES: if col_name.startswith(name): pot.append(index) if len(pot) == 0: raise ValueError('Could not find an IPA column') elif len(pot) > 1: raise ValueError('Could not decide which is the IPA column') return pot[0] def gen_ipa_data(self): """ Generator for iterating over the IPA strings found in the dataset file. Yields the IPA data string paired with the respective line number. """ dialect = self.get_dialect() f = self._open() try: if dialect: for res in self._gen_csv_data(f, dialect): yield res else: for res in self._gen_txt_data(f): yield res finally: f.close() def _gen_csv_data(self, f, dialect): """ Yields (column data, row number) tuples from the given csv file handler, using the given Dialect named tuple instance. Depends on self.ipa_col being correctly set. Helper for the gen_ipa_data method. """ reader = self._get_csv_reader(f, dialect) for line in reader: try: datum = line[self.ipa_col] except IndexError: mes = 'Could not find IPA data on line: {}'.format(line) raise ValueError(mes) yield datum, reader.line_num def _gen_txt_data(self, f): """ Yields (line, line number) tuples from the given file handler. Skips the first line if the self.has_header flag is set. Helper for the gen_ipa_data method. """ reader = iter(f) for line_num, line in enumerate(reader): if line_num == 0 and self.has_header: continue datum = line.rstrip('\r\n') yield datum, line_num+1 def __del__(self): """ Destructor. Removes the temporary directory, if such. """ if self.temp_dir: self.temp_dir.cleanup()
pavelsof/ipalint
ipalint/read.py
Reader._get_csv_reader
python
def _get_csv_reader(self, f, dialect): reader = csv.reader(f, delimiter = dialect.delimiter, quotechar = dialect.quotechar, doublequote = dialect.doublequote, escapechar = dialect.escapechar) if self.has_header: header = next(reader) if not isinstance(self.ipa_col, int): self.ipa_col = self._infer_ipa_col(header) else: if not isinstance(self.ipa_col, int): if not self.ipa_col: raise ValueError('Cannot infer IPA column without header') try: self.ipa_col = int(self.ipa_col) except ValueError: raise ValueError('Cannot find column: {}'.format(self.ipa_col)) return reader
Returns a csv.reader for the given file handler and csv Dialect named tuple. If the file has a header, it already will be gone through. Also, if self.ipa_col is not set, an attempt will be made to infer which the IPA column is. ValueError would be raised otherwise.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/read.py#L221-L250
null
class Reader: """ Comprises the code for reading the dataset file which is to be linted. """ def __init__(self, dataset, has_header=True, ipa_col=None, delimiter=None, quotechar=None, escapechar=None): """ Constructor. Expects either the path to the file to be read or an input stream to read from. Optional args: has_header: whether the first line of the file will be ignored or not; ipa_col: the column from which to extract the IPA data; this could be either the column's index or name, or None (in which case the Reader will try to guess the column); delimiter and quotechar: will be used as csv.reader arguments if provided; if None, the Reader will try to guess the dialect. """ self.log = logging.getLogger(__name__) self.temp_dir = None if isinstance(dataset, str): self.file_path = dataset else: self.file_path = self._save_stdin(dataset) self.has_header = has_header self.ipa_col = ipa_col self.is_single_col = False self.delimiter = delimiter self.quotechar = quotechar self.escapechar = escapechar def _save_stdin(self, stdin): """ Creates a temporary dir (self.temp_dir) and saves the given input stream to a file within that dir. Returns the path to the file. The dir is removed in the __del__ method. """ self.temp_dir = TemporaryDirectory() file_path = os.path.join(self.temp_dir.name, 'dataset') try: with open(file_path, 'w') as f: for line in stdin: f.write(line) except TypeError: self.temp_dir.cleanup() raise ValueError('Could not read stdin') return file_path def _open(self, file_path=None): """ Opens the file specified by the given path. Raises ValueError if there is a problem with opening or reading the file. """ if file_path is None: file_path = self.file_path if not os.path.exists(file_path): raise ValueError('Could not find file: {}'.format(file_path)) try: f = open(file_path, encoding='utf-8', newline='') except OSError as err: self.log.error(str(err)) raise ValueError('Could not open file: {}'.format(file_path)) return f def get_dialect(self): """ Returns a Dialect named tuple or None if the dataset file comprises a single column of data. If the dialect is not already known, then tries to determine it. Raises ValueError if it fails in the latter case. """ if self.is_single_col: return None if self.delimiter and self.quotechar: return Dialect(self.delimiter, self.quotechar, True if self.escapechar is None else False, self.escapechar) ext = os.path.basename(self.file_path).rsplit('.', maxsplit=1) ext = ext[1].lower() if len(ext) > 1 else None if ext in TSV_EXTENSIONS: self.delimiter = '\t' self.quotechar = '"' else: f = self._open() lines = f.read().splitlines() f.close() if lines: dialect = self._determine_dialect(lines) else: dialect = None if dialect is None: self.is_single_col = True else: self.delimiter = dialect.delimiter self.quotechar = dialect.quotechar self.escapechar = dialect.escapechar return self.get_dialect() def _determine_dialect(self, lines): """ Expects a non-empty [] of strings; these would normally be the first few lines of a csv file. Returns the most likely Dialect named tuple or None if the data seems to form a single column. Ensures that using the returned dialect, all the lines given will have the same number of columns. Helper for the get_dialect method. """ permuts = [(quotechar, escapechar) for quotechar in CSV_QUOTECHARS for escapechar in CSV_ESCAPECHARS] for delim in CSV_DELIMITERS: counts = [line.count(delim) for line in lines] if min(counts) == 0: continue for quotechar, escapechar in permuts: doublequote = True if escapechar is None else False reader = csv.reader(lines, delimiter=delim, quotechar=quotechar, doublequote=doublequote, escapechar=escapechar) try: assert len(set([len(line) for line in reader])) == 1 except AssertionError: continue else: break else: continue # no suitable quoting found break # found it! else: return None return Dialect(delim, quotechar, doublequote, escapechar) def _infer_ipa_col(self, header): """ Returns the column (as index) containing the IPA data based on the header (the first line of the data file). Raises ValueError otherwise. If self.ipa_col is a string, it is assumed to be the column's name or index. Otherwise, several common IPA column names are tried. """ if self.ipa_col and isinstance(self.ipa_col, str): if self.ipa_col in header: return header.index(self.ipa_col) try: ipa_col = int(self.ipa_col) except ValueError: pass else: return ipa_col raise ValueError('Could not find column: {}'.format(self.ipa_col)) pot = [] for index, col_name in enumerate(header): col_name = col_name.lower() for name in IPA_COL_NAMES: if col_name.startswith(name): pot.append(index) if len(pot) == 0: raise ValueError('Could not find an IPA column') elif len(pot) > 1: raise ValueError('Could not decide which is the IPA column') return pot[0] def gen_ipa_data(self): """ Generator for iterating over the IPA strings found in the dataset file. Yields the IPA data string paired with the respective line number. """ dialect = self.get_dialect() f = self._open() try: if dialect: for res in self._gen_csv_data(f, dialect): yield res else: for res in self._gen_txt_data(f): yield res finally: f.close() def _gen_csv_data(self, f, dialect): """ Yields (column data, row number) tuples from the given csv file handler, using the given Dialect named tuple instance. Depends on self.ipa_col being correctly set. Helper for the gen_ipa_data method. """ reader = self._get_csv_reader(f, dialect) for line in reader: try: datum = line[self.ipa_col] except IndexError: mes = 'Could not find IPA data on line: {}'.format(line) raise ValueError(mes) yield datum, reader.line_num def _gen_txt_data(self, f): """ Yields (line, line number) tuples from the given file handler. Skips the first line if the self.has_header flag is set. Helper for the gen_ipa_data method. """ reader = iter(f) for line_num, line in enumerate(reader): if line_num == 0 and self.has_header: continue datum = line.rstrip('\r\n') yield datum, line_num+1 def __del__(self): """ Destructor. Removes the temporary directory, if such. """ if self.temp_dir: self.temp_dir.cleanup()
pavelsof/ipalint
ipalint/read.py
Reader._infer_ipa_col
python
def _infer_ipa_col(self, header): if self.ipa_col and isinstance(self.ipa_col, str): if self.ipa_col in header: return header.index(self.ipa_col) try: ipa_col = int(self.ipa_col) except ValueError: pass else: return ipa_col raise ValueError('Could not find column: {}'.format(self.ipa_col)) pot = [] for index, col_name in enumerate(header): col_name = col_name.lower() for name in IPA_COL_NAMES: if col_name.startswith(name): pot.append(index) if len(pot) == 0: raise ValueError('Could not find an IPA column') elif len(pot) > 1: raise ValueError('Could not decide which is the IPA column') return pot[0]
Returns the column (as index) containing the IPA data based on the header (the first line of the data file). Raises ValueError otherwise. If self.ipa_col is a string, it is assumed to be the column's name or index. Otherwise, several common IPA column names are tried.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/read.py#L253-L286
null
class Reader: """ Comprises the code for reading the dataset file which is to be linted. """ def __init__(self, dataset, has_header=True, ipa_col=None, delimiter=None, quotechar=None, escapechar=None): """ Constructor. Expects either the path to the file to be read or an input stream to read from. Optional args: has_header: whether the first line of the file will be ignored or not; ipa_col: the column from which to extract the IPA data; this could be either the column's index or name, or None (in which case the Reader will try to guess the column); delimiter and quotechar: will be used as csv.reader arguments if provided; if None, the Reader will try to guess the dialect. """ self.log = logging.getLogger(__name__) self.temp_dir = None if isinstance(dataset, str): self.file_path = dataset else: self.file_path = self._save_stdin(dataset) self.has_header = has_header self.ipa_col = ipa_col self.is_single_col = False self.delimiter = delimiter self.quotechar = quotechar self.escapechar = escapechar def _save_stdin(self, stdin): """ Creates a temporary dir (self.temp_dir) and saves the given input stream to a file within that dir. Returns the path to the file. The dir is removed in the __del__ method. """ self.temp_dir = TemporaryDirectory() file_path = os.path.join(self.temp_dir.name, 'dataset') try: with open(file_path, 'w') as f: for line in stdin: f.write(line) except TypeError: self.temp_dir.cleanup() raise ValueError('Could not read stdin') return file_path def _open(self, file_path=None): """ Opens the file specified by the given path. Raises ValueError if there is a problem with opening or reading the file. """ if file_path is None: file_path = self.file_path if not os.path.exists(file_path): raise ValueError('Could not find file: {}'.format(file_path)) try: f = open(file_path, encoding='utf-8', newline='') except OSError as err: self.log.error(str(err)) raise ValueError('Could not open file: {}'.format(file_path)) return f def get_dialect(self): """ Returns a Dialect named tuple or None if the dataset file comprises a single column of data. If the dialect is not already known, then tries to determine it. Raises ValueError if it fails in the latter case. """ if self.is_single_col: return None if self.delimiter and self.quotechar: return Dialect(self.delimiter, self.quotechar, True if self.escapechar is None else False, self.escapechar) ext = os.path.basename(self.file_path).rsplit('.', maxsplit=1) ext = ext[1].lower() if len(ext) > 1 else None if ext in TSV_EXTENSIONS: self.delimiter = '\t' self.quotechar = '"' else: f = self._open() lines = f.read().splitlines() f.close() if lines: dialect = self._determine_dialect(lines) else: dialect = None if dialect is None: self.is_single_col = True else: self.delimiter = dialect.delimiter self.quotechar = dialect.quotechar self.escapechar = dialect.escapechar return self.get_dialect() def _determine_dialect(self, lines): """ Expects a non-empty [] of strings; these would normally be the first few lines of a csv file. Returns the most likely Dialect named tuple or None if the data seems to form a single column. Ensures that using the returned dialect, all the lines given will have the same number of columns. Helper for the get_dialect method. """ permuts = [(quotechar, escapechar) for quotechar in CSV_QUOTECHARS for escapechar in CSV_ESCAPECHARS] for delim in CSV_DELIMITERS: counts = [line.count(delim) for line in lines] if min(counts) == 0: continue for quotechar, escapechar in permuts: doublequote = True if escapechar is None else False reader = csv.reader(lines, delimiter=delim, quotechar=quotechar, doublequote=doublequote, escapechar=escapechar) try: assert len(set([len(line) for line in reader])) == 1 except AssertionError: continue else: break else: continue # no suitable quoting found break # found it! else: return None return Dialect(delim, quotechar, doublequote, escapechar) def _get_csv_reader(self, f, dialect): """ Returns a csv.reader for the given file handler and csv Dialect named tuple. If the file has a header, it already will be gone through. Also, if self.ipa_col is not set, an attempt will be made to infer which the IPA column is. ValueError would be raised otherwise. """ reader = csv.reader(f, delimiter = dialect.delimiter, quotechar = dialect.quotechar, doublequote = dialect.doublequote, escapechar = dialect.escapechar) if self.has_header: header = next(reader) if not isinstance(self.ipa_col, int): self.ipa_col = self._infer_ipa_col(header) else: if not isinstance(self.ipa_col, int): if not self.ipa_col: raise ValueError('Cannot infer IPA column without header') try: self.ipa_col = int(self.ipa_col) except ValueError: raise ValueError('Cannot find column: {}'.format(self.ipa_col)) return reader def gen_ipa_data(self): """ Generator for iterating over the IPA strings found in the dataset file. Yields the IPA data string paired with the respective line number. """ dialect = self.get_dialect() f = self._open() try: if dialect: for res in self._gen_csv_data(f, dialect): yield res else: for res in self._gen_txt_data(f): yield res finally: f.close() def _gen_csv_data(self, f, dialect): """ Yields (column data, row number) tuples from the given csv file handler, using the given Dialect named tuple instance. Depends on self.ipa_col being correctly set. Helper for the gen_ipa_data method. """ reader = self._get_csv_reader(f, dialect) for line in reader: try: datum = line[self.ipa_col] except IndexError: mes = 'Could not find IPA data on line: {}'.format(line) raise ValueError(mes) yield datum, reader.line_num def _gen_txt_data(self, f): """ Yields (line, line number) tuples from the given file handler. Skips the first line if the self.has_header flag is set. Helper for the gen_ipa_data method. """ reader = iter(f) for line_num, line in enumerate(reader): if line_num == 0 and self.has_header: continue datum = line.rstrip('\r\n') yield datum, line_num+1 def __del__(self): """ Destructor. Removes the temporary directory, if such. """ if self.temp_dir: self.temp_dir.cleanup()
pavelsof/ipalint
ipalint/read.py
Reader.gen_ipa_data
python
def gen_ipa_data(self): dialect = self.get_dialect() f = self._open() try: if dialect: for res in self._gen_csv_data(f, dialect): yield res else: for res in self._gen_txt_data(f): yield res finally: f.close()
Generator for iterating over the IPA strings found in the dataset file. Yields the IPA data string paired with the respective line number.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/read.py#L289-L306
[ "def _open(self, file_path=None):\n\t\"\"\"\n\tOpens the file specified by the given path. Raises ValueError if there\n\tis a problem with opening or reading the file.\n\t\"\"\"\n\tif file_path is None:\n\t\tfile_path = self.file_path\n\n\tif not os.path.exists(file_path):\n\t\traise ValueError('Could not find file: {}'.format(file_path))\n\n\ttry:\n\t\tf = open(file_path, encoding='utf-8', newline='')\n\texcept OSError as err:\n\t\tself.log.error(str(err))\n\t\traise ValueError('Could not open file: {}'.format(file_path))\n\n\treturn f\n", "def get_dialect(self):\n\t\"\"\"\n\tReturns a Dialect named tuple or None if the dataset file comprises a\n\tsingle column of data. If the dialect is not already known, then tries\n\tto determine it. Raises ValueError if it fails in the latter case.\n\t\"\"\"\n\tif self.is_single_col:\n\t\treturn None\n\n\tif self.delimiter and self.quotechar:\n\t\treturn Dialect(self.delimiter, self.quotechar,\n\t\t\t\t\tTrue if self.escapechar is None else False,\n\t\t\t\t\tself.escapechar)\n\n\text = os.path.basename(self.file_path).rsplit('.', maxsplit=1)\n\text = ext[1].lower() if len(ext) > 1 else None\n\n\tif ext in TSV_EXTENSIONS:\n\t\tself.delimiter = '\\t'\n\t\tself.quotechar = '\"'\n\n\telse:\n\t\tf = self._open()\n\t\tlines = f.read().splitlines()\n\t\tf.close()\n\n\t\tif lines:\n\t\t\tdialect = self._determine_dialect(lines)\n\t\telse:\n\t\t\tdialect = None\n\n\t\tif dialect is None:\n\t\t\tself.is_single_col = True\n\t\telse:\n\t\t\tself.delimiter = dialect.delimiter\n\t\t\tself.quotechar = dialect.quotechar\n\t\t\tself.escapechar = dialect.escapechar\n\n\treturn self.get_dialect()\n", "def _gen_csv_data(self, f, dialect):\n\t\"\"\"\n\tYields (column data, row number) tuples from the given csv file\n\thandler, using the given Dialect named tuple instance. Depends on\n\tself.ipa_col being correctly set.\n\n\tHelper for the gen_ipa_data method.\n\t\"\"\"\n\treader = self._get_csv_reader(f, dialect)\n\n\tfor line in reader:\n\t\ttry:\n\t\t\tdatum = line[self.ipa_col]\n\t\texcept IndexError:\n\t\t\tmes = 'Could not find IPA data on line: {}'.format(line)\n\t\t\traise ValueError(mes)\n\n\t\tyield datum, reader.line_num\n", "def _gen_txt_data(self, f):\n\t\"\"\"\n\tYields (line, line number) tuples from the given file handler. Skips\n\tthe first line if the self.has_header flag is set.\n\n\tHelper for the gen_ipa_data method.\n\t\"\"\"\n\treader = iter(f)\n\n\tfor line_num, line in enumerate(reader):\n\t\tif line_num == 0 and self.has_header:\n\t\t\tcontinue\n\n\t\tdatum = line.rstrip('\\r\\n')\n\n\t\tyield datum, line_num+1\n" ]
class Reader: """ Comprises the code for reading the dataset file which is to be linted. """ def __init__(self, dataset, has_header=True, ipa_col=None, delimiter=None, quotechar=None, escapechar=None): """ Constructor. Expects either the path to the file to be read or an input stream to read from. Optional args: has_header: whether the first line of the file will be ignored or not; ipa_col: the column from which to extract the IPA data; this could be either the column's index or name, or None (in which case the Reader will try to guess the column); delimiter and quotechar: will be used as csv.reader arguments if provided; if None, the Reader will try to guess the dialect. """ self.log = logging.getLogger(__name__) self.temp_dir = None if isinstance(dataset, str): self.file_path = dataset else: self.file_path = self._save_stdin(dataset) self.has_header = has_header self.ipa_col = ipa_col self.is_single_col = False self.delimiter = delimiter self.quotechar = quotechar self.escapechar = escapechar def _save_stdin(self, stdin): """ Creates a temporary dir (self.temp_dir) and saves the given input stream to a file within that dir. Returns the path to the file. The dir is removed in the __del__ method. """ self.temp_dir = TemporaryDirectory() file_path = os.path.join(self.temp_dir.name, 'dataset') try: with open(file_path, 'w') as f: for line in stdin: f.write(line) except TypeError: self.temp_dir.cleanup() raise ValueError('Could not read stdin') return file_path def _open(self, file_path=None): """ Opens the file specified by the given path. Raises ValueError if there is a problem with opening or reading the file. """ if file_path is None: file_path = self.file_path if not os.path.exists(file_path): raise ValueError('Could not find file: {}'.format(file_path)) try: f = open(file_path, encoding='utf-8', newline='') except OSError as err: self.log.error(str(err)) raise ValueError('Could not open file: {}'.format(file_path)) return f def get_dialect(self): """ Returns a Dialect named tuple or None if the dataset file comprises a single column of data. If the dialect is not already known, then tries to determine it. Raises ValueError if it fails in the latter case. """ if self.is_single_col: return None if self.delimiter and self.quotechar: return Dialect(self.delimiter, self.quotechar, True if self.escapechar is None else False, self.escapechar) ext = os.path.basename(self.file_path).rsplit('.', maxsplit=1) ext = ext[1].lower() if len(ext) > 1 else None if ext in TSV_EXTENSIONS: self.delimiter = '\t' self.quotechar = '"' else: f = self._open() lines = f.read().splitlines() f.close() if lines: dialect = self._determine_dialect(lines) else: dialect = None if dialect is None: self.is_single_col = True else: self.delimiter = dialect.delimiter self.quotechar = dialect.quotechar self.escapechar = dialect.escapechar return self.get_dialect() def _determine_dialect(self, lines): """ Expects a non-empty [] of strings; these would normally be the first few lines of a csv file. Returns the most likely Dialect named tuple or None if the data seems to form a single column. Ensures that using the returned dialect, all the lines given will have the same number of columns. Helper for the get_dialect method. """ permuts = [(quotechar, escapechar) for quotechar in CSV_QUOTECHARS for escapechar in CSV_ESCAPECHARS] for delim in CSV_DELIMITERS: counts = [line.count(delim) for line in lines] if min(counts) == 0: continue for quotechar, escapechar in permuts: doublequote = True if escapechar is None else False reader = csv.reader(lines, delimiter=delim, quotechar=quotechar, doublequote=doublequote, escapechar=escapechar) try: assert len(set([len(line) for line in reader])) == 1 except AssertionError: continue else: break else: continue # no suitable quoting found break # found it! else: return None return Dialect(delim, quotechar, doublequote, escapechar) def _get_csv_reader(self, f, dialect): """ Returns a csv.reader for the given file handler and csv Dialect named tuple. If the file has a header, it already will be gone through. Also, if self.ipa_col is not set, an attempt will be made to infer which the IPA column is. ValueError would be raised otherwise. """ reader = csv.reader(f, delimiter = dialect.delimiter, quotechar = dialect.quotechar, doublequote = dialect.doublequote, escapechar = dialect.escapechar) if self.has_header: header = next(reader) if not isinstance(self.ipa_col, int): self.ipa_col = self._infer_ipa_col(header) else: if not isinstance(self.ipa_col, int): if not self.ipa_col: raise ValueError('Cannot infer IPA column without header') try: self.ipa_col = int(self.ipa_col) except ValueError: raise ValueError('Cannot find column: {}'.format(self.ipa_col)) return reader def _infer_ipa_col(self, header): """ Returns the column (as index) containing the IPA data based on the header (the first line of the data file). Raises ValueError otherwise. If self.ipa_col is a string, it is assumed to be the column's name or index. Otherwise, several common IPA column names are tried. """ if self.ipa_col and isinstance(self.ipa_col, str): if self.ipa_col in header: return header.index(self.ipa_col) try: ipa_col = int(self.ipa_col) except ValueError: pass else: return ipa_col raise ValueError('Could not find column: {}'.format(self.ipa_col)) pot = [] for index, col_name in enumerate(header): col_name = col_name.lower() for name in IPA_COL_NAMES: if col_name.startswith(name): pot.append(index) if len(pot) == 0: raise ValueError('Could not find an IPA column') elif len(pot) > 1: raise ValueError('Could not decide which is the IPA column') return pot[0] def _gen_csv_data(self, f, dialect): """ Yields (column data, row number) tuples from the given csv file handler, using the given Dialect named tuple instance. Depends on self.ipa_col being correctly set. Helper for the gen_ipa_data method. """ reader = self._get_csv_reader(f, dialect) for line in reader: try: datum = line[self.ipa_col] except IndexError: mes = 'Could not find IPA data on line: {}'.format(line) raise ValueError(mes) yield datum, reader.line_num def _gen_txt_data(self, f): """ Yields (line, line number) tuples from the given file handler. Skips the first line if the self.has_header flag is set. Helper for the gen_ipa_data method. """ reader = iter(f) for line_num, line in enumerate(reader): if line_num == 0 and self.has_header: continue datum = line.rstrip('\r\n') yield datum, line_num+1 def __del__(self): """ Destructor. Removes the temporary directory, if such. """ if self.temp_dir: self.temp_dir.cleanup()
pavelsof/ipalint
ipalint/read.py
Reader._gen_csv_data
python
def _gen_csv_data(self, f, dialect): reader = self._get_csv_reader(f, dialect) for line in reader: try: datum = line[self.ipa_col] except IndexError: mes = 'Could not find IPA data on line: {}'.format(line) raise ValueError(mes) yield datum, reader.line_num
Yields (column data, row number) tuples from the given csv file handler, using the given Dialect named tuple instance. Depends on self.ipa_col being correctly set. Helper for the gen_ipa_data method.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/read.py#L309-L326
[ "def _get_csv_reader(self, f, dialect):\n\t\"\"\"\n\tReturns a csv.reader for the given file handler and csv Dialect named\n\ttuple. If the file has a header, it already will be gone through.\n\n\tAlso, if self.ipa_col is not set, an attempt will be made to infer\n\twhich the IPA column is. ValueError would be raised otherwise.\n\t\"\"\"\n\treader = csv.reader(f,\n\t\t\t\tdelimiter = dialect.delimiter,\n\t\t\t\tquotechar = dialect.quotechar,\n\t\t\t\tdoublequote = dialect.doublequote,\n\t\t\t\tescapechar = dialect.escapechar)\n\n\tif self.has_header:\n\t\theader = next(reader)\n\t\tif not isinstance(self.ipa_col, int):\n\t\t\tself.ipa_col = self._infer_ipa_col(header)\n\n\telse:\n\t\tif not isinstance(self.ipa_col, int):\n\t\t\tif not self.ipa_col:\n\t\t\t\traise ValueError('Cannot infer IPA column without header')\n\n\t\t\ttry:\n\t\t\t\tself.ipa_col = int(self.ipa_col)\n\t\t\texcept ValueError:\n\t\t\t\traise ValueError('Cannot find column: {}'.format(self.ipa_col))\n\n\treturn reader\n" ]
class Reader: """ Comprises the code for reading the dataset file which is to be linted. """ def __init__(self, dataset, has_header=True, ipa_col=None, delimiter=None, quotechar=None, escapechar=None): """ Constructor. Expects either the path to the file to be read or an input stream to read from. Optional args: has_header: whether the first line of the file will be ignored or not; ipa_col: the column from which to extract the IPA data; this could be either the column's index or name, or None (in which case the Reader will try to guess the column); delimiter and quotechar: will be used as csv.reader arguments if provided; if None, the Reader will try to guess the dialect. """ self.log = logging.getLogger(__name__) self.temp_dir = None if isinstance(dataset, str): self.file_path = dataset else: self.file_path = self._save_stdin(dataset) self.has_header = has_header self.ipa_col = ipa_col self.is_single_col = False self.delimiter = delimiter self.quotechar = quotechar self.escapechar = escapechar def _save_stdin(self, stdin): """ Creates a temporary dir (self.temp_dir) and saves the given input stream to a file within that dir. Returns the path to the file. The dir is removed in the __del__ method. """ self.temp_dir = TemporaryDirectory() file_path = os.path.join(self.temp_dir.name, 'dataset') try: with open(file_path, 'w') as f: for line in stdin: f.write(line) except TypeError: self.temp_dir.cleanup() raise ValueError('Could not read stdin') return file_path def _open(self, file_path=None): """ Opens the file specified by the given path. Raises ValueError if there is a problem with opening or reading the file. """ if file_path is None: file_path = self.file_path if not os.path.exists(file_path): raise ValueError('Could not find file: {}'.format(file_path)) try: f = open(file_path, encoding='utf-8', newline='') except OSError as err: self.log.error(str(err)) raise ValueError('Could not open file: {}'.format(file_path)) return f def get_dialect(self): """ Returns a Dialect named tuple or None if the dataset file comprises a single column of data. If the dialect is not already known, then tries to determine it. Raises ValueError if it fails in the latter case. """ if self.is_single_col: return None if self.delimiter and self.quotechar: return Dialect(self.delimiter, self.quotechar, True if self.escapechar is None else False, self.escapechar) ext = os.path.basename(self.file_path).rsplit('.', maxsplit=1) ext = ext[1].lower() if len(ext) > 1 else None if ext in TSV_EXTENSIONS: self.delimiter = '\t' self.quotechar = '"' else: f = self._open() lines = f.read().splitlines() f.close() if lines: dialect = self._determine_dialect(lines) else: dialect = None if dialect is None: self.is_single_col = True else: self.delimiter = dialect.delimiter self.quotechar = dialect.quotechar self.escapechar = dialect.escapechar return self.get_dialect() def _determine_dialect(self, lines): """ Expects a non-empty [] of strings; these would normally be the first few lines of a csv file. Returns the most likely Dialect named tuple or None if the data seems to form a single column. Ensures that using the returned dialect, all the lines given will have the same number of columns. Helper for the get_dialect method. """ permuts = [(quotechar, escapechar) for quotechar in CSV_QUOTECHARS for escapechar in CSV_ESCAPECHARS] for delim in CSV_DELIMITERS: counts = [line.count(delim) for line in lines] if min(counts) == 0: continue for quotechar, escapechar in permuts: doublequote = True if escapechar is None else False reader = csv.reader(lines, delimiter=delim, quotechar=quotechar, doublequote=doublequote, escapechar=escapechar) try: assert len(set([len(line) for line in reader])) == 1 except AssertionError: continue else: break else: continue # no suitable quoting found break # found it! else: return None return Dialect(delim, quotechar, doublequote, escapechar) def _get_csv_reader(self, f, dialect): """ Returns a csv.reader for the given file handler and csv Dialect named tuple. If the file has a header, it already will be gone through. Also, if self.ipa_col is not set, an attempt will be made to infer which the IPA column is. ValueError would be raised otherwise. """ reader = csv.reader(f, delimiter = dialect.delimiter, quotechar = dialect.quotechar, doublequote = dialect.doublequote, escapechar = dialect.escapechar) if self.has_header: header = next(reader) if not isinstance(self.ipa_col, int): self.ipa_col = self._infer_ipa_col(header) else: if not isinstance(self.ipa_col, int): if not self.ipa_col: raise ValueError('Cannot infer IPA column without header') try: self.ipa_col = int(self.ipa_col) except ValueError: raise ValueError('Cannot find column: {}'.format(self.ipa_col)) return reader def _infer_ipa_col(self, header): """ Returns the column (as index) containing the IPA data based on the header (the first line of the data file). Raises ValueError otherwise. If self.ipa_col is a string, it is assumed to be the column's name or index. Otherwise, several common IPA column names are tried. """ if self.ipa_col and isinstance(self.ipa_col, str): if self.ipa_col in header: return header.index(self.ipa_col) try: ipa_col = int(self.ipa_col) except ValueError: pass else: return ipa_col raise ValueError('Could not find column: {}'.format(self.ipa_col)) pot = [] for index, col_name in enumerate(header): col_name = col_name.lower() for name in IPA_COL_NAMES: if col_name.startswith(name): pot.append(index) if len(pot) == 0: raise ValueError('Could not find an IPA column') elif len(pot) > 1: raise ValueError('Could not decide which is the IPA column') return pot[0] def gen_ipa_data(self): """ Generator for iterating over the IPA strings found in the dataset file. Yields the IPA data string paired with the respective line number. """ dialect = self.get_dialect() f = self._open() try: if dialect: for res in self._gen_csv_data(f, dialect): yield res else: for res in self._gen_txt_data(f): yield res finally: f.close() def _gen_txt_data(self, f): """ Yields (line, line number) tuples from the given file handler. Skips the first line if the self.has_header flag is set. Helper for the gen_ipa_data method. """ reader = iter(f) for line_num, line in enumerate(reader): if line_num == 0 and self.has_header: continue datum = line.rstrip('\r\n') yield datum, line_num+1 def __del__(self): """ Destructor. Removes the temporary directory, if such. """ if self.temp_dir: self.temp_dir.cleanup()
pavelsof/ipalint
ipalint/read.py
Reader._gen_txt_data
python
def _gen_txt_data(self, f): reader = iter(f) for line_num, line in enumerate(reader): if line_num == 0 and self.has_header: continue datum = line.rstrip('\r\n') yield datum, line_num+1
Yields (line, line number) tuples from the given file handler. Skips the first line if the self.has_header flag is set. Helper for the gen_ipa_data method.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/read.py#L329-L344
null
class Reader: """ Comprises the code for reading the dataset file which is to be linted. """ def __init__(self, dataset, has_header=True, ipa_col=None, delimiter=None, quotechar=None, escapechar=None): """ Constructor. Expects either the path to the file to be read or an input stream to read from. Optional args: has_header: whether the first line of the file will be ignored or not; ipa_col: the column from which to extract the IPA data; this could be either the column's index or name, or None (in which case the Reader will try to guess the column); delimiter and quotechar: will be used as csv.reader arguments if provided; if None, the Reader will try to guess the dialect. """ self.log = logging.getLogger(__name__) self.temp_dir = None if isinstance(dataset, str): self.file_path = dataset else: self.file_path = self._save_stdin(dataset) self.has_header = has_header self.ipa_col = ipa_col self.is_single_col = False self.delimiter = delimiter self.quotechar = quotechar self.escapechar = escapechar def _save_stdin(self, stdin): """ Creates a temporary dir (self.temp_dir) and saves the given input stream to a file within that dir. Returns the path to the file. The dir is removed in the __del__ method. """ self.temp_dir = TemporaryDirectory() file_path = os.path.join(self.temp_dir.name, 'dataset') try: with open(file_path, 'w') as f: for line in stdin: f.write(line) except TypeError: self.temp_dir.cleanup() raise ValueError('Could not read stdin') return file_path def _open(self, file_path=None): """ Opens the file specified by the given path. Raises ValueError if there is a problem with opening or reading the file. """ if file_path is None: file_path = self.file_path if not os.path.exists(file_path): raise ValueError('Could not find file: {}'.format(file_path)) try: f = open(file_path, encoding='utf-8', newline='') except OSError as err: self.log.error(str(err)) raise ValueError('Could not open file: {}'.format(file_path)) return f def get_dialect(self): """ Returns a Dialect named tuple or None if the dataset file comprises a single column of data. If the dialect is not already known, then tries to determine it. Raises ValueError if it fails in the latter case. """ if self.is_single_col: return None if self.delimiter and self.quotechar: return Dialect(self.delimiter, self.quotechar, True if self.escapechar is None else False, self.escapechar) ext = os.path.basename(self.file_path).rsplit('.', maxsplit=1) ext = ext[1].lower() if len(ext) > 1 else None if ext in TSV_EXTENSIONS: self.delimiter = '\t' self.quotechar = '"' else: f = self._open() lines = f.read().splitlines() f.close() if lines: dialect = self._determine_dialect(lines) else: dialect = None if dialect is None: self.is_single_col = True else: self.delimiter = dialect.delimiter self.quotechar = dialect.quotechar self.escapechar = dialect.escapechar return self.get_dialect() def _determine_dialect(self, lines): """ Expects a non-empty [] of strings; these would normally be the first few lines of a csv file. Returns the most likely Dialect named tuple or None if the data seems to form a single column. Ensures that using the returned dialect, all the lines given will have the same number of columns. Helper for the get_dialect method. """ permuts = [(quotechar, escapechar) for quotechar in CSV_QUOTECHARS for escapechar in CSV_ESCAPECHARS] for delim in CSV_DELIMITERS: counts = [line.count(delim) for line in lines] if min(counts) == 0: continue for quotechar, escapechar in permuts: doublequote = True if escapechar is None else False reader = csv.reader(lines, delimiter=delim, quotechar=quotechar, doublequote=doublequote, escapechar=escapechar) try: assert len(set([len(line) for line in reader])) == 1 except AssertionError: continue else: break else: continue # no suitable quoting found break # found it! else: return None return Dialect(delim, quotechar, doublequote, escapechar) def _get_csv_reader(self, f, dialect): """ Returns a csv.reader for the given file handler and csv Dialect named tuple. If the file has a header, it already will be gone through. Also, if self.ipa_col is not set, an attempt will be made to infer which the IPA column is. ValueError would be raised otherwise. """ reader = csv.reader(f, delimiter = dialect.delimiter, quotechar = dialect.quotechar, doublequote = dialect.doublequote, escapechar = dialect.escapechar) if self.has_header: header = next(reader) if not isinstance(self.ipa_col, int): self.ipa_col = self._infer_ipa_col(header) else: if not isinstance(self.ipa_col, int): if not self.ipa_col: raise ValueError('Cannot infer IPA column without header') try: self.ipa_col = int(self.ipa_col) except ValueError: raise ValueError('Cannot find column: {}'.format(self.ipa_col)) return reader def _infer_ipa_col(self, header): """ Returns the column (as index) containing the IPA data based on the header (the first line of the data file). Raises ValueError otherwise. If self.ipa_col is a string, it is assumed to be the column's name or index. Otherwise, several common IPA column names are tried. """ if self.ipa_col and isinstance(self.ipa_col, str): if self.ipa_col in header: return header.index(self.ipa_col) try: ipa_col = int(self.ipa_col) except ValueError: pass else: return ipa_col raise ValueError('Could not find column: {}'.format(self.ipa_col)) pot = [] for index, col_name in enumerate(header): col_name = col_name.lower() for name in IPA_COL_NAMES: if col_name.startswith(name): pot.append(index) if len(pot) == 0: raise ValueError('Could not find an IPA column') elif len(pot) > 1: raise ValueError('Could not decide which is the IPA column') return pot[0] def gen_ipa_data(self): """ Generator for iterating over the IPA strings found in the dataset file. Yields the IPA data string paired with the respective line number. """ dialect = self.get_dialect() f = self._open() try: if dialect: for res in self._gen_csv_data(f, dialect): yield res else: for res in self._gen_txt_data(f): yield res finally: f.close() def _gen_csv_data(self, f, dialect): """ Yields (column data, row number) tuples from the given csv file handler, using the given Dialect named tuple instance. Depends on self.ipa_col being correctly set. Helper for the gen_ipa_data method. """ reader = self._get_csv_reader(f, dialect) for line in reader: try: datum = line[self.ipa_col] except IndexError: mes = 'Could not find IPA data on line: {}'.format(line) raise ValueError(mes) yield datum, reader.line_num def __del__(self): """ Destructor. Removes the temporary directory, if such. """ if self.temp_dir: self.temp_dir.cleanup()
pavelsof/ipalint
ipalint/core.py
Core.lint
python
def lint(self, dataset=None, col=None, no_header=False, ignore_nfd=False, ignore_ws=False, linewise=False, no_lines=False): reader = Reader(dataset, has_header=not no_header, ipa_col=col) recog = Recogniser() norm = Normaliser(nfc_chars=recog.get_nfc_chars()) for ipa_string, line_num in reader.gen_ipa_data(): ipa_string = norm.normalise(ipa_string, line_num) recog.recognise(ipa_string, line_num) rep = Reporter() norm.report(rep, ignore_nfd, ignore_ws) recog.report(rep) return rep.get_report(linewise, no_lines)
Returns a string containing all the issues found in the dataset defined by the given file path.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/core.py#L58-L77
[ "def get_nfc_chars(self):\n\t\"\"\"\n\tReturns the set of IPA symbols that are precomposed (decomposable)\n\tchars. These should not be decomposed during string normalisation,\n\tbecause they will not be recognised otherwise.\n\n\tIn IPA 2015 there is only one precomposed character: ç, the voiceless\n\tpalatal fricative.\n\t\"\"\"\n\tex = []\n\n\tfor char in self.ipa.keys():\n\t\tif len(char) == 1:\n\t\t\tdecomp = unicodedata.normalize('NFD', char)\n\t\t\tif len(decomp) == 2:\n\t\t\t\tex.append(char)\n\n\treturn set(ex)\n", "def recognise(self, string, line_num):\n\t\"\"\"\n\tSplits the string into chars and distributes these into the buckets of\n\tIPA and non-IPA symbols. Expects that there are no precomposed chars in\n\tthe string.\n\t\"\"\"\n\tsymbols = []\n\tunknown = []\n\n\tfor char in string:\n\t\tif char == SPACE:\n\t\t\tcontinue\n\n\t\ttry:\n\t\t\tname = unicodedata.name(char)\n\t\texcept ValueError:\n\t\t\tname = 'UNNAMED CHARACTER {}'.format(ord(char))\n\n\t\tif char in self.ipa:\n\t\t\tsymbol = Symbol(char, name, self.ipa[char])\n\t\t\tsymbols.append(symbol)\n\t\t\tself.ipa_symbols[symbol].append(line_num)\n\t\telse:\n\t\t\tsymbol = UnknownSymbol(char, name)\n\t\t\tunknown.append(symbol)\n\t\t\tself.unk_symbols[symbol].append(line_num)\n\n\treturn tuple(symbols), tuple(unknown)\n", "def report(self, reporter):\n\t\"\"\"\n\tAdds the problems that have been found so far to the given Reporter\n\tinstance.\n\t\"\"\"\n\tfor symbol in sorted(self.unk_symbols.keys()):\n\t\terr = '{} ({}) is not part of IPA'.format(symbol.char, symbol.name)\n\n\t\tif symbol.char in self.common_err:\n\t\t\trepl = self.common_err[symbol.char]\n\t\t\terr += ', suggested replacement is {}'.format(repl)\n\t\t\tif len(repl) == 1:\n\t\t\t\terr += ' ({})'.format(unicodedata.name(repl))\n\n\t\treporter.add(self.unk_symbols[symbol], err)\n", "def gen_ipa_data(self):\n\t\"\"\"\n\tGenerator for iterating over the IPA strings found in the dataset file.\n\tYields the IPA data string paired with the respective line number.\n\t\"\"\"\n\tdialect = self.get_dialect()\n\tf = self._open()\n\n\ttry:\n\t\tif dialect:\n\t\t\tfor res in self._gen_csv_data(f, dialect):\n\t\t\t\tyield res\n\t\telse:\n\t\t\tfor res in self._gen_txt_data(f):\n\t\t\t\tyield res\n\n\tfinally:\n\t\tf.close()\n", "def get_report(self, linewise=False, no_lines=False):\n\t\"\"\"\n\tReturns a string describing all the errors collected so far (the\n\treport). The first flag determines the type of report. The second flag\n\tis ignored if the first is set to True.\n\t\"\"\"\n\tif linewise:\n\t\treturn self._get_linewise_report()\n\telse:\n\t\treturn self._get_report(not no_lines)\n", "def normalise(self, string, line_num):\n\t\"\"\"\n\tStrips the whitespace and applies Unicode normalisation to the given\n\tstring. The second arg is used as an ID of the string when reporting\n\tits lint errors (if such).\n\t\"\"\"\n\tstripped = string.strip()\n\tif stripped != string:\n\t\tself.strip_errors.append(line_num)\n\n\tnfc_pos = [index\n\t\t\t\tfor index, char in enumerate(stripped)\n\t\t\t\tif char in self.nfc_chars]\n\n\tparts = []\n\tstart_pos = 0\n\n\tfor pos in nfc_pos:\n\t\tif pos > 0:\n\t\t\tparts.append(self.norm_f(stripped[start_pos:pos]))\n\n\t\tparts.append(stripped[pos])\n\t\tstart_pos = pos + 1\n\n\tif start_pos < len(stripped):\n\t\tparts.append(self.norm_f(stripped[start_pos:]))\n\n\tnorm = ''.join(parts)\n\n\tif norm != stripped:\n\t\tself.norm_errors.append(line_num)\n\n\treturn norm\n", "def report(self, reporter, ignore_nfd=False, ignore_ws=False):\n\t\"\"\"\n\tAdds the problems that have been found so far to the given Reporter\n\tinstance. The two keyword args can be used to restrict the error types\n\tto be reported.\n\t\"\"\"\n\tif self.strip_errors and not ignore_ws:\n\t\treporter.add(self.strip_errors, 'leading or trailing whitespace')\n\n\tif self.norm_errors and not ignore_nfd:\n\t\treporter.add(self.norm_errors, 'not in Unicode NFD')\n" ]
class Core: """ The controller singleton, an instance of which should be always present. This is what stays behind the cli and orchestrates the other modules. """ def __init__(self, verbose=False): """ Constructor. Configures the logging. The verbosity flag determines whether the min log level would be DEBUG or INFO. """ config = dict(DEFAULT_LOGGING) if verbose: config['root']['level'] = logging.DEBUG logging.config.dictConfig(config) self.log = logging.getLogger(__name__)
pavelsof/ipalint
ipalint/cli.py
Cli.run
python
def run(self, raw_args=None): args = self.parser.parse_args(raw_args) core = Core() try: report = core.lint(**vars(args)) except Exception as err: self.parser.error(str(err)) print(report) self.parser.exit()
Parses the given arguments (if these are None, then argparse's parser defaults to parsing sys.argv), inits a Core instance, calls its lint method with the respective arguments, and then exits.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/cli.py#L63-L79
[ "def lint(self, dataset=None, col=None, no_header=False,\n\t\t\tignore_nfd=False, ignore_ws=False, linewise=False, no_lines=False):\n\t\"\"\"\n\tReturns a string containing all the issues found in the dataset\n\tdefined by the given file path.\n\t\"\"\"\n\treader = Reader(dataset, has_header=not no_header, ipa_col=col)\n\n\trecog = Recogniser()\n\tnorm = Normaliser(nfc_chars=recog.get_nfc_chars())\n\n\tfor ipa_string, line_num in reader.gen_ipa_data():\n\t\tipa_string = norm.normalise(ipa_string, line_num)\n\t\trecog.recognise(ipa_string, line_num)\n\n\trep = Reporter()\n\tnorm.report(rep, ignore_nfd, ignore_ws)\n\trecog.report(rep)\n\n\treturn rep.get_report(linewise, no_lines)\n" ]
class Cli: """ Singleton that handles the user input, inits the whole machinery, and takes care of exiting the programme. """ def __init__(self): """ Constructor. Inits the argparse parser. """ usage = 'ipalint dataset [options]' desc = ('simple linter that checks datasets for ' 'IPA errors and inconsistencies') self.parser = argparse.ArgumentParser(usage=usage, description=desc, add_help=False) input_args = self.parser.add_argument_group('dataset arguments') input_args.add_argument('dataset', nargs='?', default=sys.stdin, help=( 'the dataset file to be linted; ' 'if omitted, ipalint reads from stdin ' '(thus, ipalint X and cat X | ipalint are equivalent)')) input_args.add_argument('--col', help=( 'specify the column containing the IPA data; ' 'this could be the column index (starting from 0) ' 'or the column name (if there is a header row)')) input_args.add_argument('--no-header', action='store_true', help=( 'do not skip the first row of the file; ' 'if this flag is not set, the first row will be skipped')) output_args = self.parser.add_argument_group('output arguments') output_args.add_argument('--ignore-nfd', action='store_true', help=( 'ignore warnings about strings that are not compliant with ' 'Unicode\'s NFD normal form')) output_args.add_argument('--ignore-ws', action='store_true', help=( 'ignore warnings about whitespace issues ' '(e.g. leading or trailing whitespace)')) output_args.add_argument('--linewise', action='store_true', help=( 'show errors line-by-line; ' 'by default each error is only shown once with ' 'the offending lines\' numbers stacked together')) output_args.add_argument('--no-lines', action='store_true', help=( 'only show the error messages, ' 'without the line numbers where the errors originate; ' 'ignored if --linewise is set')) meta_args = self.parser.add_argument_group('meta arguments') meta_args.add_argument('-h', '--help', action='help', help=( 'show this help message and exit')) meta_args.add_argument('-v', '--version', action='version', version=__version__, help='show the version number and exit')
pavelsof/ipalint
ipalint/report.py
Reporter.add
python
def add(self, lines, message): error = Error(message) if error not in self.errors: self.errors[error] = [] self.errors[error].extend(lines)
Adds a lint issue to the report. The first arg should be [] of lines on which the issue is present. The second arg should be the error message.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/report.py#L28-L38
null
class Reporter: """ An instance of this class is used to collect all the errors found by the various linters, so that they can be output together at the end. """ def __init__(self): """ Constructor. """ self.log = logging.getLogger(__name__) self.errors = OrderedDict() # error: [] of line numbers def clear(self): """ Removes the errors that have been collected so far. Useful for unit testing. """ self.errors = OrderedDict() def _get_linewise_report(self): """ Returns a report each line of which comprises a pair of an input line and an error. Unlike in the standard report, errors will appear as many times as they occur. Helper for the get_report method. """ d = defaultdict(list) # line: [] of errors for error, lines in self.errors.items(): for line_num in lines: d[line_num].append(error) return '\n'.join([ '{:>3} → {}'.format(line, error.string) for line in sorted(d.keys()) for error in d[line]]) def _get_report(self, with_line_nums=True): """ Returns a report which includes each distinct error only once, together with a list of the input lines where the error occurs. The latter will be omitted if flag is set to False. Helper for the get_report method. """ templ = '{} ← {}' if with_line_nums else '{}' return '\n'.join([ templ.format(error.string, ','.join(map(str, sorted(set(lines))))) for error, lines in self.errors.items()]) def get_report(self, linewise=False, no_lines=False): """ Returns a string describing all the errors collected so far (the report). The first flag determines the type of report. The second flag is ignored if the first is set to True. """ if linewise: return self._get_linewise_report() else: return self._get_report(not no_lines)
pavelsof/ipalint
ipalint/report.py
Reporter._get_linewise_report
python
def _get_linewise_report(self): d = defaultdict(list) # line: [] of errors for error, lines in self.errors.items(): for line_num in lines: d[line_num].append(error) return '\n'.join([ '{:>3} → {}'.format(line, error.string) for line in sorted(d.keys()) for error in d[line]])
Returns a report each line of which comprises a pair of an input line and an error. Unlike in the standard report, errors will appear as many times as they occur. Helper for the get_report method.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/report.py#L49-L66
null
class Reporter: """ An instance of this class is used to collect all the errors found by the various linters, so that they can be output together at the end. """ def __init__(self): """ Constructor. """ self.log = logging.getLogger(__name__) self.errors = OrderedDict() # error: [] of line numbers def add(self, lines, message): """ Adds a lint issue to the report. The first arg should be [] of lines on which the issue is present. The second arg should be the error message. """ error = Error(message) if error not in self.errors: self.errors[error] = [] self.errors[error].extend(lines) def clear(self): """ Removes the errors that have been collected so far. Useful for unit testing. """ self.errors = OrderedDict() def _get_report(self, with_line_nums=True): """ Returns a report which includes each distinct error only once, together with a list of the input lines where the error occurs. The latter will be omitted if flag is set to False. Helper for the get_report method. """ templ = '{} ← {}' if with_line_nums else '{}' return '\n'.join([ templ.format(error.string, ','.join(map(str, sorted(set(lines))))) for error, lines in self.errors.items()]) def get_report(self, linewise=False, no_lines=False): """ Returns a string describing all the errors collected so far (the report). The first flag determines the type of report. The second flag is ignored if the first is set to True. """ if linewise: return self._get_linewise_report() else: return self._get_report(not no_lines)
pavelsof/ipalint
ipalint/report.py
Reporter._get_report
python
def _get_report(self, with_line_nums=True): templ = '{} ← {}' if with_line_nums else '{}' return '\n'.join([ templ.format(error.string, ','.join(map(str, sorted(set(lines))))) for error, lines in self.errors.items()])
Returns a report which includes each distinct error only once, together with a list of the input lines where the error occurs. The latter will be omitted if flag is set to False. Helper for the get_report method.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/report.py#L69-L81
null
class Reporter: """ An instance of this class is used to collect all the errors found by the various linters, so that they can be output together at the end. """ def __init__(self): """ Constructor. """ self.log = logging.getLogger(__name__) self.errors = OrderedDict() # error: [] of line numbers def add(self, lines, message): """ Adds a lint issue to the report. The first arg should be [] of lines on which the issue is present. The second arg should be the error message. """ error = Error(message) if error not in self.errors: self.errors[error] = [] self.errors[error].extend(lines) def clear(self): """ Removes the errors that have been collected so far. Useful for unit testing. """ self.errors = OrderedDict() def _get_linewise_report(self): """ Returns a report each line of which comprises a pair of an input line and an error. Unlike in the standard report, errors will appear as many times as they occur. Helper for the get_report method. """ d = defaultdict(list) # line: [] of errors for error, lines in self.errors.items(): for line_num in lines: d[line_num].append(error) return '\n'.join([ '{:>3} → {}'.format(line, error.string) for line in sorted(d.keys()) for error in d[line]]) def get_report(self, linewise=False, no_lines=False): """ Returns a string describing all the errors collected so far (the report). The first flag determines the type of report. The second flag is ignored if the first is set to True. """ if linewise: return self._get_linewise_report() else: return self._get_report(not no_lines)
pavelsof/ipalint
ipalint/report.py
Reporter.get_report
python
def get_report(self, linewise=False, no_lines=False): if linewise: return self._get_linewise_report() else: return self._get_report(not no_lines)
Returns a string describing all the errors collected so far (the report). The first flag determines the type of report. The second flag is ignored if the first is set to True.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/report.py#L84-L93
[ "def _get_linewise_report(self):\n\t\"\"\"\n\tReturns a report each line of which comprises a pair of an input line\n\tand an error. Unlike in the standard report, errors will appear as many\n\ttimes as they occur.\n\n\tHelper for the get_report method.\n\t\"\"\"\n\td = defaultdict(list) # line: [] of errors\n\n\tfor error, lines in self.errors.items():\n\t\tfor line_num in lines:\n\t\t\td[line_num].append(error)\n\n\treturn '\\n'.join([\n\t\t'{:>3} → {}'.format(line, error.string)\n\t\tfor line in sorted(d.keys())\n\t\tfor error in d[line]])\n", "def _get_report(self, with_line_nums=True):\n\t\"\"\"\n\tReturns a report which includes each distinct error only once, together\n\twith a list of the input lines where the error occurs. The latter will\n\tbe omitted if flag is set to False.\n\n\tHelper for the get_report method.\n\t\"\"\"\n\ttempl = '{} ← {}' if with_line_nums else '{}'\n\n\treturn '\\n'.join([\n\t\ttempl.format(error.string, ','.join(map(str, sorted(set(lines)))))\n\t\tfor error, lines in self.errors.items()])\n" ]
class Reporter: """ An instance of this class is used to collect all the errors found by the various linters, so that they can be output together at the end. """ def __init__(self): """ Constructor. """ self.log = logging.getLogger(__name__) self.errors = OrderedDict() # error: [] of line numbers def add(self, lines, message): """ Adds a lint issue to the report. The first arg should be [] of lines on which the issue is present. The second arg should be the error message. """ error = Error(message) if error not in self.errors: self.errors[error] = [] self.errors[error].extend(lines) def clear(self): """ Removes the errors that have been collected so far. Useful for unit testing. """ self.errors = OrderedDict() def _get_linewise_report(self): """ Returns a report each line of which comprises a pair of an input line and an error. Unlike in the standard report, errors will appear as many times as they occur. Helper for the get_report method. """ d = defaultdict(list) # line: [] of errors for error, lines in self.errors.items(): for line_num in lines: d[line_num].append(error) return '\n'.join([ '{:>3} → {}'.format(line, error.string) for line in sorted(d.keys()) for error in d[line]]) def _get_report(self, with_line_nums=True): """ Returns a report which includes each distinct error only once, together with a list of the input lines where the error occurs. The latter will be omitted if flag is set to False. Helper for the get_report method. """ templ = '{} ← {}' if with_line_nums else '{}' return '\n'.join([ templ.format(error.string, ','.join(map(str, sorted(set(lines))))) for error, lines in self.errors.items()])
pavelsof/ipalint
ipalint/strnorm.py
Normaliser.normalise
python
def normalise(self, string, line_num): stripped = string.strip() if stripped != string: self.strip_errors.append(line_num) nfc_pos = [index for index, char in enumerate(stripped) if char in self.nfc_chars] parts = [] start_pos = 0 for pos in nfc_pos: if pos > 0: parts.append(self.norm_f(stripped[start_pos:pos])) parts.append(stripped[pos]) start_pos = pos + 1 if start_pos < len(stripped): parts.append(self.norm_f(stripped[start_pos:])) norm = ''.join(parts) if norm != stripped: self.norm_errors.append(line_num) return norm
Strips the whitespace and applies Unicode normalisation to the given string. The second arg is used as an ID of the string when reporting its lint errors (if such).
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/strnorm.py#L28-L60
null
class Normaliser: """ Normalises strings and keeps track of those that (1) do not comply to Unicode's normal form; (2) have whitespace issues. """ def __init__(self, nfc_chars=[]): """ Constructor. The optional arg specifies the set of chars that should not be decomposed. """ self.log = logging.getLogger(__name__) self.norm_f = functools.partial(unicodedata.normalize, 'NFD') self.nfc_chars = set(nfc_chars) self.strip_errors = [] self.norm_errors = [] def report(self, reporter, ignore_nfd=False, ignore_ws=False): """ Adds the problems that have been found so far to the given Reporter instance. The two keyword args can be used to restrict the error types to be reported. """ if self.strip_errors and not ignore_ws: reporter.add(self.strip_errors, 'leading or trailing whitespace') if self.norm_errors and not ignore_nfd: reporter.add(self.norm_errors, 'not in Unicode NFD')
pavelsof/ipalint
ipalint/strnorm.py
Normaliser.report
python
def report(self, reporter, ignore_nfd=False, ignore_ws=False): if self.strip_errors and not ignore_ws: reporter.add(self.strip_errors, 'leading or trailing whitespace') if self.norm_errors and not ignore_nfd: reporter.add(self.norm_errors, 'not in Unicode NFD')
Adds the problems that have been found so far to the given Reporter instance. The two keyword args can be used to restrict the error types to be reported.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/strnorm.py#L63-L73
[ "def add(self, lines, message):\n\t\"\"\"\n\tAdds a lint issue to the report. The first arg should be [] of lines on\n\twhich the issue is present. The second arg should be the error message.\n\t\"\"\"\n\terror = Error(message)\n\n\tif error not in self.errors:\n\t\tself.errors[error] = []\n\n\tself.errors[error].extend(lines)\n" ]
class Normaliser: """ Normalises strings and keeps track of those that (1) do not comply to Unicode's normal form; (2) have whitespace issues. """ def __init__(self, nfc_chars=[]): """ Constructor. The optional arg specifies the set of chars that should not be decomposed. """ self.log = logging.getLogger(__name__) self.norm_f = functools.partial(unicodedata.normalize, 'NFD') self.nfc_chars = set(nfc_chars) self.strip_errors = [] self.norm_errors = [] def normalise(self, string, line_num): """ Strips the whitespace and applies Unicode normalisation to the given string. The second arg is used as an ID of the string when reporting its lint errors (if such). """ stripped = string.strip() if stripped != string: self.strip_errors.append(line_num) nfc_pos = [index for index, char in enumerate(stripped) if char in self.nfc_chars] parts = [] start_pos = 0 for pos in nfc_pos: if pos > 0: parts.append(self.norm_f(stripped[start_pos:pos])) parts.append(stripped[pos]) start_pos = pos + 1 if start_pos < len(stripped): parts.append(self.norm_f(stripped[start_pos:])) norm = ''.join(parts) if norm != stripped: self.norm_errors.append(line_num) return norm
pavelsof/ipalint
ipalint/ipa.py
Recogniser._load_ipa_data
python
def _load_ipa_data(self, ipa_data_path): ipa = {} try: with open(ipa_data_path, newline='') as f: reader = csv.reader(f, delimiter='\t') for line in reader: if len(line) != 2: continue if line[0] in ipa: raise IPADataError('Bad IPA data file') ipa[line[0]] = line[1] except (IOError, ValueError) as err: self.log.error(str(err)) raise IPADataError('Could not open the IPA data file') return ipa
Loads and returns the {symbol: name} dictionary stored in the data/ipa.tsv file.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/ipa.py#L81-L104
null
class Recogniser: """ Knows how to recognise IPA symbols from non-IPA ones and keeps track of all the encountered symbols. """ def __init__(self): """ Constructor. Raises IPADataError if the IPA data cannot be loaded. """ self.log = logging.getLogger(__name__) self.ipa = self._load_ipa_data(IPA_DATA_PATH) self.common_err = self._load_common_err_data(COMMON_ERR_DATA_PATH) self.ipa_symbols = defaultdict(list) # Symbol: [] of line_num self.unk_symbols = defaultdict(list) # UnknownSymbol: [] of line_num def _load_common_err_data(self, common_err_data_path): """ Loads and returns the {bad: good} dictionary stored in the common errors data file. Note that the dict's keys are single characters while the values do not have to be. The method also asserts that all the values are valid IPA strings. """ common_err = {} try: with open(common_err_data_path, newline='') as f: reader = csv.reader(f, delimiter='\t') for line in reader: if len(line) != 2: continue try: assert line[0] not in common_err assert all([char in self.ipa for char in line[1]]) except AssertionError: raise IPADataError('Bad common IPA errors file') common_err[line[0]] = line[1] except (IOError, ValueError) as err: self.log.error(str(err)) raise IPADataError('Could not open the common IPA errors data file') return common_err def get_nfc_chars(self): """ Returns the set of IPA symbols that are precomposed (decomposable) chars. These should not be decomposed during string normalisation, because they will not be recognised otherwise. In IPA 2015 there is only one precomposed character: ç, the voiceless palatal fricative. """ ex = [] for char in self.ipa.keys(): if len(char) == 1: decomp = unicodedata.normalize('NFD', char) if len(decomp) == 2: ex.append(char) return set(ex) def recognise(self, string, line_num): """ Splits the string into chars and distributes these into the buckets of IPA and non-IPA symbols. Expects that there are no precomposed chars in the string. """ symbols = [] unknown = [] for char in string: if char == SPACE: continue try: name = unicodedata.name(char) except ValueError: name = 'UNNAMED CHARACTER {}'.format(ord(char)) if char in self.ipa: symbol = Symbol(char, name, self.ipa[char]) symbols.append(symbol) self.ipa_symbols[symbol].append(line_num) else: symbol = UnknownSymbol(char, name) unknown.append(symbol) self.unk_symbols[symbol].append(line_num) return tuple(symbols), tuple(unknown) def report(self, reporter): """ Adds the problems that have been found so far to the given Reporter instance. """ for symbol in sorted(self.unk_symbols.keys()): err = '{} ({}) is not part of IPA'.format(symbol.char, symbol.name) if symbol.char in self.common_err: repl = self.common_err[symbol.char] err += ', suggested replacement is {}'.format(repl) if len(repl) == 1: err += ' ({})'.format(unicodedata.name(repl)) reporter.add(self.unk_symbols[symbol], err)
pavelsof/ipalint
ipalint/ipa.py
Recogniser._load_common_err_data
python
def _load_common_err_data(self, common_err_data_path): common_err = {} try: with open(common_err_data_path, newline='') as f: reader = csv.reader(f, delimiter='\t') for line in reader: if len(line) != 2: continue try: assert line[0] not in common_err assert all([char in self.ipa for char in line[1]]) except AssertionError: raise IPADataError('Bad common IPA errors file') common_err[line[0]] = line[1] except (IOError, ValueError) as err: self.log.error(str(err)) raise IPADataError('Could not open the common IPA errors data file') return common_err
Loads and returns the {bad: good} dictionary stored in the common errors data file. Note that the dict's keys are single characters while the values do not have to be. The method also asserts that all the values are valid IPA strings.
train
https://github.com/pavelsof/ipalint/blob/763e5979ede6980cbfc746b06fd007b379762eeb/ipalint/ipa.py#L107-L135
null
class Recogniser: """ Knows how to recognise IPA symbols from non-IPA ones and keeps track of all the encountered symbols. """ def __init__(self): """ Constructor. Raises IPADataError if the IPA data cannot be loaded. """ self.log = logging.getLogger(__name__) self.ipa = self._load_ipa_data(IPA_DATA_PATH) self.common_err = self._load_common_err_data(COMMON_ERR_DATA_PATH) self.ipa_symbols = defaultdict(list) # Symbol: [] of line_num self.unk_symbols = defaultdict(list) # UnknownSymbol: [] of line_num def _load_ipa_data(self, ipa_data_path): """ Loads and returns the {symbol: name} dictionary stored in the data/ipa.tsv file. """ ipa = {} try: with open(ipa_data_path, newline='') as f: reader = csv.reader(f, delimiter='\t') for line in reader: if len(line) != 2: continue if line[0] in ipa: raise IPADataError('Bad IPA data file') ipa[line[0]] = line[1] except (IOError, ValueError) as err: self.log.error(str(err)) raise IPADataError('Could not open the IPA data file') return ipa def get_nfc_chars(self): """ Returns the set of IPA symbols that are precomposed (decomposable) chars. These should not be decomposed during string normalisation, because they will not be recognised otherwise. In IPA 2015 there is only one precomposed character: ç, the voiceless palatal fricative. """ ex = [] for char in self.ipa.keys(): if len(char) == 1: decomp = unicodedata.normalize('NFD', char) if len(decomp) == 2: ex.append(char) return set(ex) def recognise(self, string, line_num): """ Splits the string into chars and distributes these into the buckets of IPA and non-IPA symbols. Expects that there are no precomposed chars in the string. """ symbols = [] unknown = [] for char in string: if char == SPACE: continue try: name = unicodedata.name(char) except ValueError: name = 'UNNAMED CHARACTER {}'.format(ord(char)) if char in self.ipa: symbol = Symbol(char, name, self.ipa[char]) symbols.append(symbol) self.ipa_symbols[symbol].append(line_num) else: symbol = UnknownSymbol(char, name) unknown.append(symbol) self.unk_symbols[symbol].append(line_num) return tuple(symbols), tuple(unknown) def report(self, reporter): """ Adds the problems that have been found so far to the given Reporter instance. """ for symbol in sorted(self.unk_symbols.keys()): err = '{} ({}) is not part of IPA'.format(symbol.char, symbol.name) if symbol.char in self.common_err: repl = self.common_err[symbol.char] err += ', suggested replacement is {}'.format(repl) if len(repl) == 1: err += ' ({})'.format(unicodedata.name(repl)) reporter.add(self.unk_symbols[symbol], err)