repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
sdispater/cachy
cachy/tagged_cache.py
TaggedCache.tagged_item_key
def tagged_item_key(self, key): """ Get a fully qualified key for a tagged item. :param key: The cache key :type key: str :rtype: str """ return '%s:%s' % (hashlib.sha1(encode(self._tags.get_namespace())).hexdigest(), key)
python
def tagged_item_key(self, key): """ Get a fully qualified key for a tagged item. :param key: The cache key :type key: str :rtype: str """ return '%s:%s' % (hashlib.sha1(encode(self._tags.get_namespace())).hexdigest(), key)
[ "def", "tagged_item_key", "(", "self", ",", "key", ")", ":", "return", "'%s:%s'", "%", "(", "hashlib", ".", "sha1", "(", "encode", "(", "self", ".", "_tags", ".", "get_namespace", "(", ")", ")", ")", ".", "hexdigest", "(", ")", ",", "key", ")" ]
Get a fully qualified key for a tagged item. :param key: The cache key :type key: str :rtype: str
[ "Get", "a", "fully", "qualified", "key", "for", "a", "tagged", "item", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/tagged_cache.py#L206-L215
train
48,500
sdispater/cachy
cachy/tagged_cache.py
TaggedCache._get_minutes
def _get_minutes(self, duration): """ Calculate the number of minutes with the given duration. :param duration: The duration :type duration: int or datetime :rtype: int or None """ if isinstance(duration, datetime.datetime): from_now = (duration - datetime.datetime.now()).total_seconds() from_now = math.ceil(from_now / 60) if from_now > 0: return from_now return return duration
python
def _get_minutes(self, duration): """ Calculate the number of minutes with the given duration. :param duration: The duration :type duration: int or datetime :rtype: int or None """ if isinstance(duration, datetime.datetime): from_now = (duration - datetime.datetime.now()).total_seconds() from_now = math.ceil(from_now / 60) if from_now > 0: return from_now return return duration
[ "def", "_get_minutes", "(", "self", ",", "duration", ")", ":", "if", "isinstance", "(", "duration", ",", "datetime", ".", "datetime", ")", ":", "from_now", "=", "(", "duration", "-", "datetime", ".", "datetime", ".", "now", "(", ")", ")", ".", "total_s...
Calculate the number of minutes with the given duration. :param duration: The duration :type duration: int or datetime :rtype: int or None
[ "Calculate", "the", "number", "of", "minutes", "with", "the", "given", "duration", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/tagged_cache.py#L225-L243
train
48,501
fabaff/python-netdata
example.py
main
async def main(): """Get the data from a Netdata instance.""" with aiohttp.ClientSession() as session: data = Netdata('localhost', loop, session, data='data') await data.get_data('system.cpu') print(json.dumps(data.values, indent=4, sort_keys=True)) # Print the current value of the system's CPU print("CPU System:", round(data.values['system'], 2)) with aiohttp.ClientSession() as session: data = Netdata('localhost', loop, session, data='alarms') await data.get_alarms() print(data.alarms) with aiohttp.ClientSession() as session: data = Netdata('localhost', loop, session) await data.get_allmetrics() print(data.metrics) # Print the current value for the system's CPU print("CPU System:", round(data.metrics['system.cpu'] ['dimensions']['system']['value'], 2))
python
async def main(): """Get the data from a Netdata instance.""" with aiohttp.ClientSession() as session: data = Netdata('localhost', loop, session, data='data') await data.get_data('system.cpu') print(json.dumps(data.values, indent=4, sort_keys=True)) # Print the current value of the system's CPU print("CPU System:", round(data.values['system'], 2)) with aiohttp.ClientSession() as session: data = Netdata('localhost', loop, session, data='alarms') await data.get_alarms() print(data.alarms) with aiohttp.ClientSession() as session: data = Netdata('localhost', loop, session) await data.get_allmetrics() print(data.metrics) # Print the current value for the system's CPU print("CPU System:", round(data.metrics['system.cpu'] ['dimensions']['system']['value'], 2))
[ "async", "def", "main", "(", ")", ":", "with", "aiohttp", ".", "ClientSession", "(", ")", "as", "session", ":", "data", "=", "Netdata", "(", "'localhost'", ",", "loop", ",", "session", ",", "data", "=", "'data'", ")", "await", "data", ".", "get_data", ...
Get the data from a Netdata instance.
[ "Get", "the", "data", "from", "a", "Netdata", "instance", "." ]
bca5d58f84a0fc849b9bb16a00959a0b33d13a67
https://github.com/fabaff/python-netdata/blob/bca5d58f84a0fc849b9bb16a00959a0b33d13a67/example.py#L9-L34
train
48,502
inveniosoftware/invenio-logging
invenio_logging/console.py
InvenioLoggingConsole.install_handler
def install_handler(self, app): """Install logging handler.""" # Configure python logging if app.config['LOGGING_CONSOLE_PYWARNINGS']: self.capture_pywarnings(logging.StreamHandler()) if app.config['LOGGING_CONSOLE_LEVEL'] is not None: for h in app.logger.handlers: h.setLevel(app.config['LOGGING_CONSOLE_LEVEL']) # Add request_id to log record app.logger.addFilter(add_request_id_filter)
python
def install_handler(self, app): """Install logging handler.""" # Configure python logging if app.config['LOGGING_CONSOLE_PYWARNINGS']: self.capture_pywarnings(logging.StreamHandler()) if app.config['LOGGING_CONSOLE_LEVEL'] is not None: for h in app.logger.handlers: h.setLevel(app.config['LOGGING_CONSOLE_LEVEL']) # Add request_id to log record app.logger.addFilter(add_request_id_filter)
[ "def", "install_handler", "(", "self", ",", "app", ")", ":", "# Configure python logging", "if", "app", ".", "config", "[", "'LOGGING_CONSOLE_PYWARNINGS'", "]", ":", "self", ".", "capture_pywarnings", "(", "logging", ".", "StreamHandler", "(", ")", ")", "if", ...
Install logging handler.
[ "Install", "logging", "handler", "." ]
59ee171ad4f9809f62a822964b5c68e5be672dd8
https://github.com/inveniosoftware/invenio-logging/blob/59ee171ad4f9809f62a822964b5c68e5be672dd8/invenio_logging/console.py#L46-L57
train
48,503
load-tools/netort
netort/process.py
execute
def execute(cmd, shell=False, poll_period=1.0, catch_out=False): """Execute UNIX command and wait for its completion Args: cmd (str or list): command to execute shell (bool): invoke inside shell environment catch_out (bool): collect process' output Returns: returncode (int): process return code stdout (str): collected process stdout (only if catch_out set to true) stderr (str): collected process stderr (only if catch_out set to true) """ # FIXME: move to module level log = logging.getLogger(__name__) log.debug("Starting: %s", cmd) stdout = "" stderr = "" if not shell and isinstance(cmd, string_types): cmd = shlex.split(cmd) if catch_out: process = subprocess.Popen( cmd, shell=shell, stderr=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True) else: process = subprocess.Popen(cmd, shell=shell, close_fds=True) stdout, stderr = process.communicate() if stderr: log.error("There were errors:\n%s", stderr) if stdout: log.debug("Process output:\n%s", stdout) returncode = process.returncode log.debug("Process exit code: %s", returncode) return returncode, stdout, stderr
python
def execute(cmd, shell=False, poll_period=1.0, catch_out=False): """Execute UNIX command and wait for its completion Args: cmd (str or list): command to execute shell (bool): invoke inside shell environment catch_out (bool): collect process' output Returns: returncode (int): process return code stdout (str): collected process stdout (only if catch_out set to true) stderr (str): collected process stderr (only if catch_out set to true) """ # FIXME: move to module level log = logging.getLogger(__name__) log.debug("Starting: %s", cmd) stdout = "" stderr = "" if not shell and isinstance(cmd, string_types): cmd = shlex.split(cmd) if catch_out: process = subprocess.Popen( cmd, shell=shell, stderr=subprocess.PIPE, stdout=subprocess.PIPE, close_fds=True) else: process = subprocess.Popen(cmd, shell=shell, close_fds=True) stdout, stderr = process.communicate() if stderr: log.error("There were errors:\n%s", stderr) if stdout: log.debug("Process output:\n%s", stdout) returncode = process.returncode log.debug("Process exit code: %s", returncode) return returncode, stdout, stderr
[ "def", "execute", "(", "cmd", ",", "shell", "=", "False", ",", "poll_period", "=", "1.0", ",", "catch_out", "=", "False", ")", ":", "# FIXME: move to module level", "log", "=", "logging", ".", "getLogger", "(", "__name__", ")", "log", ".", "debug", "(", ...
Execute UNIX command and wait for its completion Args: cmd (str or list): command to execute shell (bool): invoke inside shell environment catch_out (bool): collect process' output Returns: returncode (int): process return code stdout (str): collected process stdout (only if catch_out set to true) stderr (str): collected process stderr (only if catch_out set to true)
[ "Execute", "UNIX", "command", "and", "wait", "for", "its", "completion" ]
b5233a70cea74108857ea24ba5c37975057ca00f
https://github.com/load-tools/netort/blob/b5233a70cea74108857ea24ba5c37975057ca00f/netort/process.py#L8-L49
train
48,504
sdispater/cachy
cachy/stores/file_store.py
FileStore.flush
def flush(self): """ Remove all items from the cache. """ if os.path.isdir(self._directory): for root, dirs, files in os.walk(self._directory, topdown=False): for name in files: os.remove(os.path.join(root, name)) for name in dirs: os.rmdir(os.path.join(root, name))
python
def flush(self): """ Remove all items from the cache. """ if os.path.isdir(self._directory): for root, dirs, files in os.walk(self._directory, topdown=False): for name in files: os.remove(os.path.join(root, name)) for name in dirs: os.rmdir(os.path.join(root, name))
[ "def", "flush", "(", "self", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "self", ".", "_directory", ")", ":", "for", "root", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "self", ".", "_directory", ",", "topdown", "=", "False...
Remove all items from the cache.
[ "Remove", "all", "items", "from", "the", "cache", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/stores/file_store.py#L178-L188
train
48,505
sdispater/cachy
cachy/stores/file_store.py
FileStore._path
def _path(self, key): """ Get the full path for the given cache key. :param key: The cache key :type key: str :rtype: str """ hash_type, parts_count = self._HASHES[self._hash_type] h = hash_type(encode(key)).hexdigest() parts = [h[i:i+2] for i in range(0, len(h), 2)][:parts_count] return os.path.join(self._directory, os.path.sep.join(parts), h)
python
def _path(self, key): """ Get the full path for the given cache key. :param key: The cache key :type key: str :rtype: str """ hash_type, parts_count = self._HASHES[self._hash_type] h = hash_type(encode(key)).hexdigest() parts = [h[i:i+2] for i in range(0, len(h), 2)][:parts_count] return os.path.join(self._directory, os.path.sep.join(parts), h)
[ "def", "_path", "(", "self", ",", "key", ")", ":", "hash_type", ",", "parts_count", "=", "self", ".", "_HASHES", "[", "self", ".", "_hash_type", "]", "h", "=", "hash_type", "(", "encode", "(", "key", ")", ")", ".", "hexdigest", "(", ")", "parts", "...
Get the full path for the given cache key. :param key: The cache key :type key: str :rtype: str
[ "Get", "the", "full", "path", "for", "the", "given", "cache", "key", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/stores/file_store.py#L190-L204
train
48,506
sdispater/cachy
cachy/contracts/taggable_store.py
TaggableStore.tags
def tags(self, *names): """ Begin executing a new tags operation. :param names: The tags :type names: tuple :rtype: cachy.tagged_cache.TaggedCache """ if len(names) == 1 and isinstance(names[0], list): names = names[0] return TaggedCache(self, TagSet(self, names))
python
def tags(self, *names): """ Begin executing a new tags operation. :param names: The tags :type names: tuple :rtype: cachy.tagged_cache.TaggedCache """ if len(names) == 1 and isinstance(names[0], list): names = names[0] return TaggedCache(self, TagSet(self, names))
[ "def", "tags", "(", "self", ",", "*", "names", ")", ":", "if", "len", "(", "names", ")", "==", "1", "and", "isinstance", "(", "names", "[", "0", "]", ",", "list", ")", ":", "names", "=", "names", "[", "0", "]", "return", "TaggedCache", "(", "se...
Begin executing a new tags operation. :param names: The tags :type names: tuple :rtype: cachy.tagged_cache.TaggedCache
[ "Begin", "executing", "a", "new", "tags", "operation", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/contracts/taggable_store.py#L10-L22
train
48,507
inveniosoftware/invenio-logging
invenio_logging/sentry6.py
Sentry6.get_user_info
def get_user_info(self, request): """Implement custom getter.""" if not current_user.is_authenticated: return {} user_info = { 'id': current_user.get_id(), } if 'SENTRY_USER_ATTRS' in current_app.config: for attr in current_app.config['SENTRY_USER_ATTRS']: if hasattr(current_user, attr): user_info[attr] = getattr(current_user, attr) return user_info
python
def get_user_info(self, request): """Implement custom getter.""" if not current_user.is_authenticated: return {} user_info = { 'id': current_user.get_id(), } if 'SENTRY_USER_ATTRS' in current_app.config: for attr in current_app.config['SENTRY_USER_ATTRS']: if hasattr(current_user, attr): user_info[attr] = getattr(current_user, attr) return user_info
[ "def", "get_user_info", "(", "self", ",", "request", ")", ":", "if", "not", "current_user", ".", "is_authenticated", ":", "return", "{", "}", "user_info", "=", "{", "'id'", ":", "current_user", ".", "get_id", "(", ")", ",", "}", "if", "'SENTRY_USER_ATTRS'"...
Implement custom getter.
[ "Implement", "custom", "getter", "." ]
59ee171ad4f9809f62a822964b5c68e5be672dd8
https://github.com/inveniosoftware/invenio-logging/blob/59ee171ad4f9809f62a822964b5c68e5be672dd8/invenio_logging/sentry6.py#L21-L35
train
48,508
load-tools/netort
netort/data_manager/clients/luna.py
LunaClient.create_job
def create_job(self): """ Create public Luna job Returns: job_id (basestring): Luna job id """ my_user_agent = None try: my_user_agent = pkg_resources.require('netort')[0].version except pkg_resources.DistributionNotFound: my_user_agent = 'DistributionNotFound' finally: headers = { "User-Agent": "Uploader/{uploader_ua}, {upward_ua}".format( upward_ua=self.meta.get('user_agent', ''), uploader_ua=my_user_agent ) } req = requests.Request( 'POST', "{api_address}{path}".format( api_address=self.api_address, path=self.create_job_path ), headers=headers ) req.data = { 'test_start': self.job.test_start } prepared_req = req.prepare() logger.debug('Prepared create_job request:\n%s', pretty_print(prepared_req)) response = send_chunk(self.session, prepared_req) logger.debug('Luna create job status: %s', response.status_code) logger.debug('Answ data: %s', response.content) job_id = response.content.decode('utf-8') if isinstance(response.content, bytes) else response.content if not job_id: self.failed.set() raise ValueError('Luna returned answer without jobid: %s', response.content) else: logger.info('Luna job created: %s', job_id) return job_id
python
def create_job(self): """ Create public Luna job Returns: job_id (basestring): Luna job id """ my_user_agent = None try: my_user_agent = pkg_resources.require('netort')[0].version except pkg_resources.DistributionNotFound: my_user_agent = 'DistributionNotFound' finally: headers = { "User-Agent": "Uploader/{uploader_ua}, {upward_ua}".format( upward_ua=self.meta.get('user_agent', ''), uploader_ua=my_user_agent ) } req = requests.Request( 'POST', "{api_address}{path}".format( api_address=self.api_address, path=self.create_job_path ), headers=headers ) req.data = { 'test_start': self.job.test_start } prepared_req = req.prepare() logger.debug('Prepared create_job request:\n%s', pretty_print(prepared_req)) response = send_chunk(self.session, prepared_req) logger.debug('Luna create job status: %s', response.status_code) logger.debug('Answ data: %s', response.content) job_id = response.content.decode('utf-8') if isinstance(response.content, bytes) else response.content if not job_id: self.failed.set() raise ValueError('Luna returned answer without jobid: %s', response.content) else: logger.info('Luna job created: %s', job_id) return job_id
[ "def", "create_job", "(", "self", ")", ":", "my_user_agent", "=", "None", "try", ":", "my_user_agent", "=", "pkg_resources", ".", "require", "(", "'netort'", ")", "[", "0", "]", ".", "version", "except", "pkg_resources", ".", "DistributionNotFound", ":", "my...
Create public Luna job Returns: job_id (basestring): Luna job id
[ "Create", "public", "Luna", "job" ]
b5233a70cea74108857ea24ba5c37975057ca00f
https://github.com/load-tools/netort/blob/b5233a70cea74108857ea24ba5c37975057ca00f/netort/data_manager/clients/luna.py#L89-L130
train
48,509
load-tools/netort
netort/data_manager/manager.py
DataManager.__filter
def __filter(filterable, filter_, logic_operation='and'): """ filtering DataFrame using filter_ key-value conditions applying logic_operation only find rows strictly fitting the filter_ criterion""" condition = [] if not filter_: return filterable elif filter_.get('type') == '__ANY__': return filterable else: for key, value in filter_.items(): condition.append('{key} == "{value}"'.format(key=key, value=value)) try: res = filterable.query(" {operation} ".format(operation=logic_operation).join(condition)) except pd.core.computation.ops.UndefinedVariableError: return pd.DataFrame() else: return res
python
def __filter(filterable, filter_, logic_operation='and'): """ filtering DataFrame using filter_ key-value conditions applying logic_operation only find rows strictly fitting the filter_ criterion""" condition = [] if not filter_: return filterable elif filter_.get('type') == '__ANY__': return filterable else: for key, value in filter_.items(): condition.append('{key} == "{value}"'.format(key=key, value=value)) try: res = filterable.query(" {operation} ".format(operation=logic_operation).join(condition)) except pd.core.computation.ops.UndefinedVariableError: return pd.DataFrame() else: return res
[ "def", "__filter", "(", "filterable", ",", "filter_", ",", "logic_operation", "=", "'and'", ")", ":", "condition", "=", "[", "]", "if", "not", "filter_", ":", "return", "filterable", "elif", "filter_", ".", "get", "(", "'type'", ")", "==", "'__ANY__'", "...
filtering DataFrame using filter_ key-value conditions applying logic_operation only find rows strictly fitting the filter_ criterion
[ "filtering", "DataFrame", "using", "filter_", "key", "-", "value", "conditions", "applying", "logic_operation", "only", "find", "rows", "strictly", "fitting", "the", "filter_", "criterion" ]
b5233a70cea74108857ea24ba5c37975057ca00f
https://github.com/load-tools/netort/blob/b5233a70cea74108857ea24ba5c37975057ca00f/netort/data_manager/manager.py#L279-L295
train
48,510
inveniosoftware/invenio-logging
invenio_logging/sentry.py
InvenioLoggingSentry.install_handler
def install_handler(self, app): """Install log handler.""" from raven.contrib.celery import register_logger_signal, \ register_signal from raven.contrib.flask import Sentry, make_client from raven.handlers.logging import SentryHandler # Installs sentry in app.extensions['sentry'] level = getattr(logging, app.config['LOGGING_SENTRY_LEVEL']) # Get the Sentry class. cls = app.config['LOGGING_SENTRY_CLASS'] if cls: if isinstance(cls, six.string_types): cls = import_string(cls) else: cls = Sentry sentry = cls( app, logging=True, level=level ) app.logger.addHandler(SentryHandler(client=sentry.client, level=level)) # Capture warnings from warnings module if app.config['LOGGING_SENTRY_PYWARNINGS']: self.capture_pywarnings( SentryHandler(sentry.client)) # Setup Celery logging to Sentry if app.config['LOGGING_SENTRY_CELERY']: try: register_logger_signal(sentry.client, loglevel=level) except TypeError: # Compatibility mode for Raven<=5.1.0 register_logger_signal(sentry.client) register_signal(sentry.client) # Werkzeug only adds a stream handler if there's no other handlers # defined, so when Sentry adds a log handler no output is # received from Werkzeug unless we install a console handler # here on the werkzeug logger. if app.debug: logger = logging.getLogger('werkzeug') logger.setLevel(logging.INFO) logger.addHandler(logging.StreamHandler())
python
def install_handler(self, app): """Install log handler.""" from raven.contrib.celery import register_logger_signal, \ register_signal from raven.contrib.flask import Sentry, make_client from raven.handlers.logging import SentryHandler # Installs sentry in app.extensions['sentry'] level = getattr(logging, app.config['LOGGING_SENTRY_LEVEL']) # Get the Sentry class. cls = app.config['LOGGING_SENTRY_CLASS'] if cls: if isinstance(cls, six.string_types): cls = import_string(cls) else: cls = Sentry sentry = cls( app, logging=True, level=level ) app.logger.addHandler(SentryHandler(client=sentry.client, level=level)) # Capture warnings from warnings module if app.config['LOGGING_SENTRY_PYWARNINGS']: self.capture_pywarnings( SentryHandler(sentry.client)) # Setup Celery logging to Sentry if app.config['LOGGING_SENTRY_CELERY']: try: register_logger_signal(sentry.client, loglevel=level) except TypeError: # Compatibility mode for Raven<=5.1.0 register_logger_signal(sentry.client) register_signal(sentry.client) # Werkzeug only adds a stream handler if there's no other handlers # defined, so when Sentry adds a log handler no output is # received from Werkzeug unless we install a console handler # here on the werkzeug logger. if app.debug: logger = logging.getLogger('werkzeug') logger.setLevel(logging.INFO) logger.addHandler(logging.StreamHandler())
[ "def", "install_handler", "(", "self", ",", "app", ")", ":", "from", "raven", ".", "contrib", ".", "celery", "import", "register_logger_signal", ",", "register_signal", "from", "raven", ".", "contrib", ".", "flask", "import", "Sentry", ",", "make_client", "fro...
Install log handler.
[ "Install", "log", "handler", "." ]
59ee171ad4f9809f62a822964b5c68e5be672dd8
https://github.com/inveniosoftware/invenio-logging/blob/59ee171ad4f9809f62a822964b5c68e5be672dd8/invenio_logging/sentry.py#L56-L103
train
48,511
inveniosoftware/invenio-logging
invenio_logging/sentry.py
RequestIdProcessor.process
def process(self, data, **kwargs): """Process event data.""" data = super(RequestIdProcessor, self).process(data, **kwargs) if g and hasattr(g, 'request_id'): tags = data.get('tags', {}) tags['request_id'] = g.request_id data['tags'] = tags return data
python
def process(self, data, **kwargs): """Process event data.""" data = super(RequestIdProcessor, self).process(data, **kwargs) if g and hasattr(g, 'request_id'): tags = data.get('tags', {}) tags['request_id'] = g.request_id data['tags'] = tags return data
[ "def", "process", "(", "self", ",", "data", ",", "*", "*", "kwargs", ")", ":", "data", "=", "super", "(", "RequestIdProcessor", ",", "self", ")", ".", "process", "(", "data", ",", "*", "*", "kwargs", ")", "if", "g", "and", "hasattr", "(", "g", ",...
Process event data.
[ "Process", "event", "data", "." ]
59ee171ad4f9809f62a822964b5c68e5be672dd8
https://github.com/inveniosoftware/invenio-logging/blob/59ee171ad4f9809f62a822964b5c68e5be672dd8/invenio_logging/sentry.py#L109-L116
train
48,512
sdispater/cachy
cachy/repository.py
Repository.put
def put(self, key, val, minutes): """ Store an item in the cache. :param key: The cache key :type key: str :param val: The cache value :type val: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int|datetime """ minutes = self._get_minutes(minutes) if minutes is not None: self._store.put(key, val, minutes)
python
def put(self, key, val, minutes): """ Store an item in the cache. :param key: The cache key :type key: str :param val: The cache value :type val: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int|datetime """ minutes = self._get_minutes(minutes) if minutes is not None: self._store.put(key, val, minutes)
[ "def", "put", "(", "self", ",", "key", ",", "val", ",", "minutes", ")", ":", "minutes", "=", "self", ".", "_get_minutes", "(", "minutes", ")", "if", "minutes", "is", "not", "None", ":", "self", ".", "_store", ".", "put", "(", "key", ",", "val", "...
Store an item in the cache. :param key: The cache key :type key: str :param val: The cache value :type val: mixed :param minutes: The lifetime in minutes of the cached value :type minutes: int|datetime
[ "Store", "an", "item", "in", "the", "cache", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/repository.py#L72-L88
train
48,513
sdispater/cachy
cachy/repository.py
Repository._get_key
def _get_key(self, fn, args, kwargs): """ Calculate a cache key given a function, args and kwargs. :param fn: The function :type fn: callable or str :param args: The function args :type args: tuple :param kwargs: The function kwargs :type kwargs: dict :rtype: str """ if args: serialized_arguments = ( self._store.serialize(args[1:]) + self._store.serialize([(k, kwargs[k]) for k in sorted(kwargs.keys())]) ) else: serialized_arguments = self._store.serialize([(k, kwargs[k]) for k in sorted(kwargs.keys())]) if isinstance(fn, types.MethodType): key = self._hash('%s.%s.%s' % (fn.__self__.__class__.__name__, args[0].__name__, serialized_arguments)) elif isinstance(fn, types.FunctionType): key = self._hash('%s.%s' % (fn.__name__, serialized_arguments)) else: key = '%s:' % fn + self._hash(serialized_arguments) return key
python
def _get_key(self, fn, args, kwargs): """ Calculate a cache key given a function, args and kwargs. :param fn: The function :type fn: callable or str :param args: The function args :type args: tuple :param kwargs: The function kwargs :type kwargs: dict :rtype: str """ if args: serialized_arguments = ( self._store.serialize(args[1:]) + self._store.serialize([(k, kwargs[k]) for k in sorted(kwargs.keys())]) ) else: serialized_arguments = self._store.serialize([(k, kwargs[k]) for k in sorted(kwargs.keys())]) if isinstance(fn, types.MethodType): key = self._hash('%s.%s.%s' % (fn.__self__.__class__.__name__, args[0].__name__, serialized_arguments)) elif isinstance(fn, types.FunctionType): key = self._hash('%s.%s' % (fn.__name__, serialized_arguments)) else: key = '%s:' % fn + self._hash(serialized_arguments) return key
[ "def", "_get_key", "(", "self", ",", "fn", ",", "args", ",", "kwargs", ")", ":", "if", "args", ":", "serialized_arguments", "=", "(", "self", ".", "_store", ".", "serialize", "(", "args", "[", "1", ":", "]", ")", "+", "self", ".", "_store", ".", ...
Calculate a cache key given a function, args and kwargs. :param fn: The function :type fn: callable or str :param args: The function args :type args: tuple :param kwargs: The function kwargs :type kwargs: dict :rtype: str
[ "Calculate", "a", "cache", "key", "given", "a", "function", "args", "and", "kwargs", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/repository.py#L262-L297
train
48,514
xlcteam/pynxc
pynxc/second_pass.py
SecondPassVisitor.flush_main
def flush_main(self): """Flushes the implicit main function if there is no main function defined.""" if self.has_main: return self.in_main = True self.write('task main()') self.INDENT() if self.debug: print 'Flushing main:', self.fv.main for node in self.fv.main: self.v(node) self.NEWLINE() self.DEDENT() self.in_main = False
python
def flush_main(self): """Flushes the implicit main function if there is no main function defined.""" if self.has_main: return self.in_main = True self.write('task main()') self.INDENT() if self.debug: print 'Flushing main:', self.fv.main for node in self.fv.main: self.v(node) self.NEWLINE() self.DEDENT() self.in_main = False
[ "def", "flush_main", "(", "self", ")", ":", "if", "self", ".", "has_main", ":", "return", "self", ".", "in_main", "=", "True", "self", ".", "write", "(", "'task main()'", ")", "self", ".", "INDENT", "(", ")", "if", "self", ".", "debug", ":", "print",...
Flushes the implicit main function if there is no main function defined.
[ "Flushes", "the", "implicit", "main", "function", "if", "there", "is", "no", "main", "function", "defined", "." ]
8932d3a7c0962577c8ead220621f63f800e3b411
https://github.com/xlcteam/pynxc/blob/8932d3a7c0962577c8ead220621f63f800e3b411/pynxc/second_pass.py#L659-L679
train
48,515
load-tools/netort
netort/data_processing.py
get_nowait_from_queue
def get_nowait_from_queue(queue): """ Collect all immediately available items from a queue """ data = [] for _ in range(queue.qsize()): try: data.append(queue.get_nowait()) except q.Empty: break return data
python
def get_nowait_from_queue(queue): """ Collect all immediately available items from a queue """ data = [] for _ in range(queue.qsize()): try: data.append(queue.get_nowait()) except q.Empty: break return data
[ "def", "get_nowait_from_queue", "(", "queue", ")", ":", "data", "=", "[", "]", "for", "_", "in", "range", "(", "queue", ".", "qsize", "(", ")", ")", ":", "try", ":", "data", ".", "append", "(", "queue", ".", "get_nowait", "(", ")", ")", "except", ...
Collect all immediately available items from a queue
[ "Collect", "all", "immediately", "available", "items", "from", "a", "queue" ]
b5233a70cea74108857ea24ba5c37975057ca00f
https://github.com/load-tools/netort/blob/b5233a70cea74108857ea24ba5c37975057ca00f/netort/data_processing.py#L10-L18
train
48,516
sdispater/cachy
cachy/redis_tagged_cache.py
RedisTaggedCache._push_forever_keys
def _push_forever_keys(self, namespace, key): """ Store a copy of the full key for each namespace segment. :type namespace: str :type key: str """ full_key = '%s%s:%s' % (self.get_prefix(), hashlib.sha1(encode(self._tags.get_namespace())).hexdigest(), key) for segment in namespace.split('|'): self._store.connection().lpush(self._forever_key(segment), full_key)
python
def _push_forever_keys(self, namespace, key): """ Store a copy of the full key for each namespace segment. :type namespace: str :type key: str """ full_key = '%s%s:%s' % (self.get_prefix(), hashlib.sha1(encode(self._tags.get_namespace())).hexdigest(), key) for segment in namespace.split('|'): self._store.connection().lpush(self._forever_key(segment), full_key)
[ "def", "_push_forever_keys", "(", "self", ",", "namespace", ",", "key", ")", ":", "full_key", "=", "'%s%s:%s'", "%", "(", "self", ".", "get_prefix", "(", ")", ",", "hashlib", ".", "sha1", "(", "encode", "(", "self", ".", "_tags", ".", "get_namespace", ...
Store a copy of the full key for each namespace segment. :type namespace: str :type key: str
[ "Store", "a", "copy", "of", "the", "full", "key", "for", "each", "namespace", "segment", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/redis_tagged_cache.py#L37-L49
train
48,517
sdispater/cachy
cachy/redis_tagged_cache.py
RedisTaggedCache._delete_forever_keys
def _delete_forever_keys(self): """ Delete all of the items that were stored forever. """ for segment in self._tags.get_namespace().split('|'): segment = self._forever_key(segment) self._delete_forever_values(segment) self._store.connection().delete(segment)
python
def _delete_forever_keys(self): """ Delete all of the items that were stored forever. """ for segment in self._tags.get_namespace().split('|'): segment = self._forever_key(segment) self._delete_forever_values(segment) self._store.connection().delete(segment)
[ "def", "_delete_forever_keys", "(", "self", ")", ":", "for", "segment", "in", "self", ".", "_tags", ".", "get_namespace", "(", ")", ".", "split", "(", "'|'", ")", ":", "segment", "=", "self", ".", "_forever_key", "(", "segment", ")", "self", ".", "_del...
Delete all of the items that were stored forever.
[ "Delete", "all", "of", "the", "items", "that", "were", "stored", "forever", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/redis_tagged_cache.py#L51-L59
train
48,518
sdispater/cachy
cachy/redis_tagged_cache.py
RedisTaggedCache._delete_forever_values
def _delete_forever_values(self, forever_key): """ Delete all of the keys that have been stored forever. :type forever_key: str """ forever = self._store.connection().lrange(forever_key, 0, -1) if len(forever) > 0: self._store.connection().delete(*forever)
python
def _delete_forever_values(self, forever_key): """ Delete all of the keys that have been stored forever. :type forever_key: str """ forever = self._store.connection().lrange(forever_key, 0, -1) if len(forever) > 0: self._store.connection().delete(*forever)
[ "def", "_delete_forever_values", "(", "self", ",", "forever_key", ")", ":", "forever", "=", "self", ".", "_store", ".", "connection", "(", ")", ".", "lrange", "(", "forever_key", ",", "0", ",", "-", "1", ")", "if", "len", "(", "forever", ")", ">", "0...
Delete all of the keys that have been stored forever. :type forever_key: str
[ "Delete", "all", "of", "the", "keys", "that", "have", "been", "stored", "forever", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/redis_tagged_cache.py#L61-L70
train
48,519
inveniosoftware/invenio-logging
invenio_logging/ext.py
InvenioLoggingBase.capture_pywarnings
def capture_pywarnings(handler): """Log python system warnings.""" logger = logging.getLogger('py.warnings') # Check for previously installed handlers. for h in logger.handlers: if isinstance(h, handler.__class__): return logger.addHandler(handler) logger.setLevel(logging.WARNING)
python
def capture_pywarnings(handler): """Log python system warnings.""" logger = logging.getLogger('py.warnings') # Check for previously installed handlers. for h in logger.handlers: if isinstance(h, handler.__class__): return logger.addHandler(handler) logger.setLevel(logging.WARNING)
[ "def", "capture_pywarnings", "(", "handler", ")", ":", "logger", "=", "logging", ".", "getLogger", "(", "'py.warnings'", ")", "# Check for previously installed handlers.", "for", "h", "in", "logger", ".", "handlers", ":", "if", "isinstance", "(", "h", ",", "hand...
Log python system warnings.
[ "Log", "python", "system", "warnings", "." ]
59ee171ad4f9809f62a822964b5c68e5be672dd8
https://github.com/inveniosoftware/invenio-logging/blob/59ee171ad4f9809f62a822964b5c68e5be672dd8/invenio_logging/ext.py#L34-L42
train
48,520
fabaff/python-netdata
netdata/__init__.py
Netdata.get_data
async def get_data(self, resource): """Get detail for a resource from the data endpoint.""" url = '{}{}'.format( self.base_url, self.endpoint.format(resource=resource)) try: with async_timeout.timeout(5, loop=self._loop): response = await self._session.get(url) _LOGGER.info( "Response from Netdata: %s", response.status) data = await response.json() _LOGGER.debug(data) self.values = {k: v for k, v in zip( data['labels'], data['data'][0])} except (asyncio.TimeoutError, aiohttp.ClientError, socket.gaierror): _LOGGER.error("Can not load data from Netdata") raise exceptions.NetdataConnectionError()
python
async def get_data(self, resource): """Get detail for a resource from the data endpoint.""" url = '{}{}'.format( self.base_url, self.endpoint.format(resource=resource)) try: with async_timeout.timeout(5, loop=self._loop): response = await self._session.get(url) _LOGGER.info( "Response from Netdata: %s", response.status) data = await response.json() _LOGGER.debug(data) self.values = {k: v for k, v in zip( data['labels'], data['data'][0])} except (asyncio.TimeoutError, aiohttp.ClientError, socket.gaierror): _LOGGER.error("Can not load data from Netdata") raise exceptions.NetdataConnectionError()
[ "async", "def", "get_data", "(", "self", ",", "resource", ")", ":", "url", "=", "'{}{}'", ".", "format", "(", "self", ".", "base_url", ",", "self", ".", "endpoint", ".", "format", "(", "resource", "=", "resource", ")", ")", "try", ":", "with", "async...
Get detail for a resource from the data endpoint.
[ "Get", "detail", "for", "a", "resource", "from", "the", "data", "endpoint", "." ]
bca5d58f84a0fc849b9bb16a00959a0b33d13a67
https://github.com/fabaff/python-netdata/blob/bca5d58f84a0fc849b9bb16a00959a0b33d13a67/netdata/__init__.py#L39-L57
train
48,521
fabaff/python-netdata
netdata/__init__.py
Netdata.get_alarms
async def get_alarms(self): """Get alarms for a Netdata instance.""" url = '{}{}'.format(self.base_url, self.endpoint) try: with async_timeout.timeout(5, loop=self._loop): response = await self._session.get(url) _LOGGER.debug( "Response from Netdata: %s", response.status) data = await response.text() _LOGGER.debug(data) self.alarms = data except (asyncio.TimeoutError, aiohttp.ClientError, socket.gaierror): _LOGGER.error("Can not load data from Netdata") raise exceptions.NetdataConnectionError()
python
async def get_alarms(self): """Get alarms for a Netdata instance.""" url = '{}{}'.format(self.base_url, self.endpoint) try: with async_timeout.timeout(5, loop=self._loop): response = await self._session.get(url) _LOGGER.debug( "Response from Netdata: %s", response.status) data = await response.text() _LOGGER.debug(data) self.alarms = data except (asyncio.TimeoutError, aiohttp.ClientError, socket.gaierror): _LOGGER.error("Can not load data from Netdata") raise exceptions.NetdataConnectionError()
[ "async", "def", "get_alarms", "(", "self", ")", ":", "url", "=", "'{}{}'", ".", "format", "(", "self", ".", "base_url", ",", "self", ".", "endpoint", ")", "try", ":", "with", "async_timeout", ".", "timeout", "(", "5", ",", "loop", "=", "self", ".", ...
Get alarms for a Netdata instance.
[ "Get", "alarms", "for", "a", "Netdata", "instance", "." ]
bca5d58f84a0fc849b9bb16a00959a0b33d13a67
https://github.com/fabaff/python-netdata/blob/bca5d58f84a0fc849b9bb16a00959a0b33d13a67/netdata/__init__.py#L59-L75
train
48,522
inveniosoftware/invenio-logging
invenio_logging/utils.py
AddRequestIdFilter.filter
def filter(self, record): """If request_id is set in flask.g, add it to log record.""" if g and hasattr(g, 'request_id'): record.request_id = g.request_id return True
python
def filter(self, record): """If request_id is set in flask.g, add it to log record.""" if g and hasattr(g, 'request_id'): record.request_id = g.request_id return True
[ "def", "filter", "(", "self", ",", "record", ")", ":", "if", "g", "and", "hasattr", "(", "g", ",", "'request_id'", ")", ":", "record", ".", "request_id", "=", "g", ".", "request_id", "return", "True" ]
If request_id is set in flask.g, add it to log record.
[ "If", "request_id", "is", "set", "in", "flask", ".", "g", "add", "it", "to", "log", "record", "." ]
59ee171ad4f9809f62a822964b5c68e5be672dd8
https://github.com/inveniosoftware/invenio-logging/blob/59ee171ad4f9809f62a822964b5c68e5be672dd8/invenio_logging/utils.py#L19-L23
train
48,523
sdispater/cachy
cachy/cache_manager.py
CacheManager.store
def store(self, name=None): """ Get a cache store instance by name. :param name: The cache store name :type name: str :rtype: Repository """ if name is None: name = self.get_default_driver() self._stores[name] = self._get(name) return self._stores[name]
python
def store(self, name=None): """ Get a cache store instance by name. :param name: The cache store name :type name: str :rtype: Repository """ if name is None: name = self.get_default_driver() self._stores[name] = self._get(name) return self._stores[name]
[ "def", "store", "(", "self", ",", "name", "=", "None", ")", ":", "if", "name", "is", "None", ":", "name", "=", "self", ".", "get_default_driver", "(", ")", "self", ".", "_stores", "[", "name", "]", "=", "self", ".", "_get", "(", "name", ")", "ret...
Get a cache store instance by name. :param name: The cache store name :type name: str :rtype: Repository
[ "Get", "a", "cache", "store", "instance", "by", "name", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/cache_manager.py#L43-L57
train
48,524
sdispater/cachy
cachy/cache_manager.py
CacheManager._get
def _get(self, name): """ Attempt to get the store from the local cache. :param name: The store name :type name: str :rtype: Repository """ return self._stores.get(name, self._resolve(name))
python
def _get(self, name): """ Attempt to get the store from the local cache. :param name: The store name :type name: str :rtype: Repository """ return self._stores.get(name, self._resolve(name))
[ "def", "_get", "(", "self", ",", "name", ")", ":", "return", "self", ".", "_stores", ".", "get", "(", "name", ",", "self", ".", "_resolve", "(", "name", ")", ")" ]
Attempt to get the store from the local cache. :param name: The store name :type name: str :rtype: Repository
[ "Attempt", "to", "get", "the", "store", "from", "the", "local", "cache", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/cache_manager.py#L70-L79
train
48,525
sdispater/cachy
cachy/cache_manager.py
CacheManager._resolve
def _resolve(self, name): """ Resolve the given store :param name: The store to resolve :type name: str :rtype: Repository """ config = self._get_config(name) if not config: raise RuntimeError('Cache store [%s] is not defined.' % name) if config['driver'] in self._custom_creators: repository = self._call_custom_creator(config) else: repository = getattr(self, '_create_%s_driver' % config['driver'])(config) if 'serializer' in config: serializer = self._resolve_serializer(config['serializer']) else: serializer = self._serializer repository.get_store().set_serializer(serializer) return repository
python
def _resolve(self, name): """ Resolve the given store :param name: The store to resolve :type name: str :rtype: Repository """ config = self._get_config(name) if not config: raise RuntimeError('Cache store [%s] is not defined.' % name) if config['driver'] in self._custom_creators: repository = self._call_custom_creator(config) else: repository = getattr(self, '_create_%s_driver' % config['driver'])(config) if 'serializer' in config: serializer = self._resolve_serializer(config['serializer']) else: serializer = self._serializer repository.get_store().set_serializer(serializer) return repository
[ "def", "_resolve", "(", "self", ",", "name", ")", ":", "config", "=", "self", ".", "_get_config", "(", "name", ")", "if", "not", "config", ":", "raise", "RuntimeError", "(", "'Cache store [%s] is not defined.'", "%", "name", ")", "if", "config", "[", "'dri...
Resolve the given store :param name: The store to resolve :type name: str :rtype: Repository
[ "Resolve", "the", "given", "store" ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/cache_manager.py#L81-L107
train
48,526
sdispater/cachy
cachy/cache_manager.py
CacheManager._call_custom_creator
def _call_custom_creator(self, config): """ Call a custom driver creator. :param config: The driver configuration :type config: dict :rtype: Repository """ creator = self._custom_creators[config['driver']](config) if isinstance(creator, Store): creator = self.repository(creator) if not isinstance(creator, Repository): raise RuntimeError('Custom creator should return a Repository instance.') return creator
python
def _call_custom_creator(self, config): """ Call a custom driver creator. :param config: The driver configuration :type config: dict :rtype: Repository """ creator = self._custom_creators[config['driver']](config) if isinstance(creator, Store): creator = self.repository(creator) if not isinstance(creator, Repository): raise RuntimeError('Custom creator should return a Repository instance.') return creator
[ "def", "_call_custom_creator", "(", "self", ",", "config", ")", ":", "creator", "=", "self", ".", "_custom_creators", "[", "config", "[", "'driver'", "]", "]", "(", "config", ")", "if", "isinstance", "(", "creator", ",", "Store", ")", ":", "creator", "="...
Call a custom driver creator. :param config: The driver configuration :type config: dict :rtype: Repository
[ "Call", "a", "custom", "driver", "creator", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/cache_manager.py#L109-L126
train
48,527
sdispater/cachy
cachy/cache_manager.py
CacheManager._create_file_driver
def _create_file_driver(self, config): """ Create an instance of the file cache driver. :param config: The driver configuration :type config: dict :rtype: Repository """ kwargs = { 'directory': config['path'] } if 'hash_type' in config: kwargs['hash_type'] = config['hash_type'] return self.repository(FileStore(**kwargs))
python
def _create_file_driver(self, config): """ Create an instance of the file cache driver. :param config: The driver configuration :type config: dict :rtype: Repository """ kwargs = { 'directory': config['path'] } if 'hash_type' in config: kwargs['hash_type'] = config['hash_type'] return self.repository(FileStore(**kwargs))
[ "def", "_create_file_driver", "(", "self", ",", "config", ")", ":", "kwargs", "=", "{", "'directory'", ":", "config", "[", "'path'", "]", "}", "if", "'hash_type'", "in", "config", ":", "kwargs", "[", "'hash_type'", "]", "=", "config", "[", "'hash_type'", ...
Create an instance of the file cache driver. :param config: The driver configuration :type config: dict :rtype: Repository
[ "Create", "an", "instance", "of", "the", "file", "cache", "driver", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/cache_manager.py#L139-L155
train
48,528
sdispater/cachy
cachy/cache_manager.py
CacheManager.get_default_driver
def get_default_driver(self): """ Get the default cache driver name. :rtype: str :raises: RuntimeError """ if 'default' in self._config: return self._config['default'] if len(self._config['stores']) == 1: return list(self._config['stores'].keys())[0] raise RuntimeError('Missing "default" cache in configuration.')
python
def get_default_driver(self): """ Get the default cache driver name. :rtype: str :raises: RuntimeError """ if 'default' in self._config: return self._config['default'] if len(self._config['stores']) == 1: return list(self._config['stores'].keys())[0] raise RuntimeError('Missing "default" cache in configuration.')
[ "def", "get_default_driver", "(", "self", ")", ":", "if", "'default'", "in", "self", ".", "_config", ":", "return", "self", ".", "_config", "[", "'default'", "]", "if", "len", "(", "self", ".", "_config", "[", "'stores'", "]", ")", "==", "1", ":", "r...
Get the default cache driver name. :rtype: str :raises: RuntimeError
[ "Get", "the", "default", "cache", "driver", "name", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/cache_manager.py#L214-L228
train
48,529
sdispater/cachy
cachy/cache_manager.py
CacheManager._resolve_serializer
def _resolve_serializer(self, serializer): """ Resolve the given serializer. :param serializer: The serializer to resolve :type serializer: str or Serializer :rtype: Serializer """ if isinstance(serializer, Serializer): return serializer if serializer in self._serializers: return self._serializers[serializer] raise RuntimeError('Unsupported serializer')
python
def _resolve_serializer(self, serializer): """ Resolve the given serializer. :param serializer: The serializer to resolve :type serializer: str or Serializer :rtype: Serializer """ if isinstance(serializer, Serializer): return serializer if serializer in self._serializers: return self._serializers[serializer] raise RuntimeError('Unsupported serializer')
[ "def", "_resolve_serializer", "(", "self", ",", "serializer", ")", ":", "if", "isinstance", "(", "serializer", ",", "Serializer", ")", ":", "return", "serializer", "if", "serializer", "in", "self", ".", "_serializers", ":", "return", "self", ".", "_serializers...
Resolve the given serializer. :param serializer: The serializer to resolve :type serializer: str or Serializer :rtype: Serializer
[ "Resolve", "the", "given", "serializer", "." ]
ee4b044d6aafa80125730a00b1f679a7bd852b8a
https://github.com/sdispater/cachy/blob/ee4b044d6aafa80125730a00b1f679a7bd852b8a/cachy/cache_manager.py#L255-L270
train
48,530
SamLau95/nbinteract
nbinteract/questions.py
multiple_choice
def multiple_choice(question, choices, answers): """ Generates a multiple choice question that allows the user to select an answer choice and shows whether choice was correct. Args: question (str): Question text displayed above choices. choices (list str): Answer choices that user can select. answers (int | iterable int): Either an integer or iterable of integers. Each integer in answers corresponds to the index of the correct choice in `choices`. Returns: None >>> multiple_choice(question="What is 10 + 2 * 5?", ... choices=['12', '60', '20'], ... answers=2) #doctest: +SKIP <What is 10 + 2 * 5?> <Button> <12> <Button> <60> <Button> <20> (Correct) >>> multiple_choice(question="Select all prime numbers.", ... choices=['12', '3', '31'], ... answers=[1, 2]) #doctest: +SKIP <Select all prime numbers.> <Button> <12> <Button> <3> (Correct) <Button> <31> (Correct) """ if not isinstance(answers, (int, collections.Iterable)): raise TypeError( 'The `answers` arg is expected to be of type ' '(int | iterable int) but got {} instead.'.format(type(answers)) ) @curry def check_answer(index, button): is_correct = ( index == answers if isinstance(answers, int) else index in answers ) button.style.button_color = GREEN if is_correct else RED answer_choices = [] for index, choice in enumerate(choices): button = widgets.Button( layout=widgets.Layout(width='20px', height='20px', padding='0') ) button.on_click(check_answer(index)) button_and_question = widgets.HBox( [button, widgets.HTML(TEXT_STYLE.format(choice))], layout=widgets.Layout(align_items='center') ) answer_choices.append(button_and_question) question_html = [widgets.HTML(TEXT_STYLE.format(question))] display(widgets.VBox(question_html + answer_choices))
python
def multiple_choice(question, choices, answers): """ Generates a multiple choice question that allows the user to select an answer choice and shows whether choice was correct. Args: question (str): Question text displayed above choices. choices (list str): Answer choices that user can select. answers (int | iterable int): Either an integer or iterable of integers. Each integer in answers corresponds to the index of the correct choice in `choices`. Returns: None >>> multiple_choice(question="What is 10 + 2 * 5?", ... choices=['12', '60', '20'], ... answers=2) #doctest: +SKIP <What is 10 + 2 * 5?> <Button> <12> <Button> <60> <Button> <20> (Correct) >>> multiple_choice(question="Select all prime numbers.", ... choices=['12', '3', '31'], ... answers=[1, 2]) #doctest: +SKIP <Select all prime numbers.> <Button> <12> <Button> <3> (Correct) <Button> <31> (Correct) """ if not isinstance(answers, (int, collections.Iterable)): raise TypeError( 'The `answers` arg is expected to be of type ' '(int | iterable int) but got {} instead.'.format(type(answers)) ) @curry def check_answer(index, button): is_correct = ( index == answers if isinstance(answers, int) else index in answers ) button.style.button_color = GREEN if is_correct else RED answer_choices = [] for index, choice in enumerate(choices): button = widgets.Button( layout=widgets.Layout(width='20px', height='20px', padding='0') ) button.on_click(check_answer(index)) button_and_question = widgets.HBox( [button, widgets.HTML(TEXT_STYLE.format(choice))], layout=widgets.Layout(align_items='center') ) answer_choices.append(button_and_question) question_html = [widgets.HTML(TEXT_STYLE.format(question))] display(widgets.VBox(question_html + answer_choices))
[ "def", "multiple_choice", "(", "question", ",", "choices", ",", "answers", ")", ":", "if", "not", "isinstance", "(", "answers", ",", "(", "int", ",", "collections", ".", "Iterable", ")", ")", ":", "raise", "TypeError", "(", "'The `answers` arg is expected to b...
Generates a multiple choice question that allows the user to select an answer choice and shows whether choice was correct. Args: question (str): Question text displayed above choices. choices (list str): Answer choices that user can select. answers (int | iterable int): Either an integer or iterable of integers. Each integer in answers corresponds to the index of the correct choice in `choices`. Returns: None >>> multiple_choice(question="What is 10 + 2 * 5?", ... choices=['12', '60', '20'], ... answers=2) #doctest: +SKIP <What is 10 + 2 * 5?> <Button> <12> <Button> <60> <Button> <20> (Correct) >>> multiple_choice(question="Select all prime numbers.", ... choices=['12', '3', '31'], ... answers=[1, 2]) #doctest: +SKIP <Select all prime numbers.> <Button> <12> <Button> <3> (Correct) <Button> <31> (Correct)
[ "Generates", "a", "multiple", "choice", "question", "that", "allows", "the", "user", "to", "select", "an", "answer", "choice", "and", "shows", "whether", "choice", "was", "correct", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/questions.py#L15-L75
train
48,531
SamLau95/nbinteract
nbinteract/questions.py
short_answer
def short_answer(question, answers, explanation=None): """ Generates a short answer question that allows user to input an answer in a textbox and a submit button to check the answer. Args: question (str): The question being asked. answers (str | list str | func): If a string, only that string will be marked correct. If a list of string, any string in the list will be marked correct. If a function, any input that causes the function to return True will be marked correct. explanation (str): The explanation to the question is displayed when the user inputs the correct answer. Returns: None >>> short_answer('What is 1 + 1?', '2', ... explanation='1+1 is 2') #doctest: +SKIP <What is 1+1?> <Input box, Submit button> >>> short_answer('Enter the first name of a member of the Beatles.', ... ['John', 'Paul', 'George', 'Ringo']) #doctest: +SKIP <Enter the first name of a member of the Beatles.> <Input box, Submit button> >>> short_answer('Enter an even number.', ... lambda x: int(x) % 2 == 0) #doctest: +SKIP <Enter an even number.> <Input box, Submit button> """ # Input textbox textbox = widgets.Text(placeholder='Write your answer here') # Submit button submit_button = widgets.Button(description='Submit') # Space right of the submit button to show checkmark/x-mark visual_correct = widgets.HTML() # Space below input line to display error if function call errored error_space = widgets.HTML() # Space below input line to display explanation if answer is correct explain_space = widgets.HTML() # correctness function linked to the submit button def check_answer(_): response = textbox.value if isinstance(answers, collections.Callable): try: error_space.value = '' correct = answers(response) except Exception as e: correct = False error_space.value = 'Error in checking answer: {}'.format(e) elif isinstance(answers, str): correct = response == answers elif isinstance(answers, collections.Iterable): correct = response in answers else: raise TypeError('The `answers` arg is an incorrect type.') visual_correct.value = CHECK_ICON if correct else X_ICON if correct and explanation: explain_space.value = explanation submit_button.on_click(check_answer) question_tag = widgets.HTML(TEXT_STYLE.format(question)) user_input_line = widgets.HBox([textbox, submit_button, visual_correct]) display( widgets.VBox([ question_tag, user_input_line, error_space, explain_space ]) )
python
def short_answer(question, answers, explanation=None): """ Generates a short answer question that allows user to input an answer in a textbox and a submit button to check the answer. Args: question (str): The question being asked. answers (str | list str | func): If a string, only that string will be marked correct. If a list of string, any string in the list will be marked correct. If a function, any input that causes the function to return True will be marked correct. explanation (str): The explanation to the question is displayed when the user inputs the correct answer. Returns: None >>> short_answer('What is 1 + 1?', '2', ... explanation='1+1 is 2') #doctest: +SKIP <What is 1+1?> <Input box, Submit button> >>> short_answer('Enter the first name of a member of the Beatles.', ... ['John', 'Paul', 'George', 'Ringo']) #doctest: +SKIP <Enter the first name of a member of the Beatles.> <Input box, Submit button> >>> short_answer('Enter an even number.', ... lambda x: int(x) % 2 == 0) #doctest: +SKIP <Enter an even number.> <Input box, Submit button> """ # Input textbox textbox = widgets.Text(placeholder='Write your answer here') # Submit button submit_button = widgets.Button(description='Submit') # Space right of the submit button to show checkmark/x-mark visual_correct = widgets.HTML() # Space below input line to display error if function call errored error_space = widgets.HTML() # Space below input line to display explanation if answer is correct explain_space = widgets.HTML() # correctness function linked to the submit button def check_answer(_): response = textbox.value if isinstance(answers, collections.Callable): try: error_space.value = '' correct = answers(response) except Exception as e: correct = False error_space.value = 'Error in checking answer: {}'.format(e) elif isinstance(answers, str): correct = response == answers elif isinstance(answers, collections.Iterable): correct = response in answers else: raise TypeError('The `answers` arg is an incorrect type.') visual_correct.value = CHECK_ICON if correct else X_ICON if correct and explanation: explain_space.value = explanation submit_button.on_click(check_answer) question_tag = widgets.HTML(TEXT_STYLE.format(question)) user_input_line = widgets.HBox([textbox, submit_button, visual_correct]) display( widgets.VBox([ question_tag, user_input_line, error_space, explain_space ]) )
[ "def", "short_answer", "(", "question", ",", "answers", ",", "explanation", "=", "None", ")", ":", "# Input textbox", "textbox", "=", "widgets", ".", "Text", "(", "placeholder", "=", "'Write your answer here'", ")", "# Submit button", "submit_button", "=", "widget...
Generates a short answer question that allows user to input an answer in a textbox and a submit button to check the answer. Args: question (str): The question being asked. answers (str | list str | func): If a string, only that string will be marked correct. If a list of string, any string in the list will be marked correct. If a function, any input that causes the function to return True will be marked correct. explanation (str): The explanation to the question is displayed when the user inputs the correct answer. Returns: None >>> short_answer('What is 1 + 1?', '2', ... explanation='1+1 is 2') #doctest: +SKIP <What is 1+1?> <Input box, Submit button> >>> short_answer('Enter the first name of a member of the Beatles.', ... ['John', 'Paul', 'George', 'Ringo']) #doctest: +SKIP <Enter the first name of a member of the Beatles.> <Input box, Submit button> >>> short_answer('Enter an even number.', ... lambda x: int(x) % 2 == 0) #doctest: +SKIP <Enter an even number.> <Input box, Submit button>
[ "Generates", "a", "short", "answer", "question", "that", "allows", "user", "to", "input", "an", "answer", "in", "a", "textbox", "and", "a", "submit", "button", "to", "check", "the", "answer", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/questions.py#L88-L159
train
48,532
SamLau95/nbinteract
nbinteract/exporters.py
publish
def publish(spec, nb_name, template='full', save_first=True): """ Converts nb_name to an HTML file. Preserves widget functionality. Outputs a link to download HTML file after conversion if called in a notebook environment. Equivalent to running `nbinteract ${spec} ${nb_name}` on the command line. Args: spec (str): BinderHub spec for Jupyter image. Must be in the format: `${username}/${repo}/${branch}`. nb_name (str): Complete name of the notebook file to convert. Can be a relative path (eg. './foo/test.ipynb'). Kwargs: template (str): Template to use for conversion. Valid templates: - 'full': Outputs a complete standalone HTML page with default styling. Automatically loads the nbinteract JS library. - 'partial': Outputs an HTML partial that can be embedded in another page. Automatically loads the nbinteract JS library but has no styling. - 'plain': Outputs an HTML partial used to embed in an HTML page where the nbinteract JS library is already loaded. Does not load JS library or styling save_first (bool): If True, saves the currently opened notebook before converting nb_name. Used to ensure notebook is written to filesystem before starting conversion. Does nothing if not in a notebook environment. Returns: None """ if not os.path.isfile(nb_name): raise ValueError( "{} isn't a path to a file. Double check your " "filename and try again.".format(nb_name) ) if save_first: _save_nb(nb_name) print('Converting notebook...') try: check_output( ['nbinteract', '--template', template, '--spec', spec, nb_name], stderr=STDOUT ) except CalledProcessError as err: logging.warning( ERROR_MESSAGE.format( filename=nb_name, error=str(err.output, 'utf-8') ) ) return html_filename = os.path.splitext(nb_name)[0] + '.html' display(Markdown(CONVERT_SUCCESS_MD.format(url=html_filename)))
python
def publish(spec, nb_name, template='full', save_first=True): """ Converts nb_name to an HTML file. Preserves widget functionality. Outputs a link to download HTML file after conversion if called in a notebook environment. Equivalent to running `nbinteract ${spec} ${nb_name}` on the command line. Args: spec (str): BinderHub spec for Jupyter image. Must be in the format: `${username}/${repo}/${branch}`. nb_name (str): Complete name of the notebook file to convert. Can be a relative path (eg. './foo/test.ipynb'). Kwargs: template (str): Template to use for conversion. Valid templates: - 'full': Outputs a complete standalone HTML page with default styling. Automatically loads the nbinteract JS library. - 'partial': Outputs an HTML partial that can be embedded in another page. Automatically loads the nbinteract JS library but has no styling. - 'plain': Outputs an HTML partial used to embed in an HTML page where the nbinteract JS library is already loaded. Does not load JS library or styling save_first (bool): If True, saves the currently opened notebook before converting nb_name. Used to ensure notebook is written to filesystem before starting conversion. Does nothing if not in a notebook environment. Returns: None """ if not os.path.isfile(nb_name): raise ValueError( "{} isn't a path to a file. Double check your " "filename and try again.".format(nb_name) ) if save_first: _save_nb(nb_name) print('Converting notebook...') try: check_output( ['nbinteract', '--template', template, '--spec', spec, nb_name], stderr=STDOUT ) except CalledProcessError as err: logging.warning( ERROR_MESSAGE.format( filename=nb_name, error=str(err.output, 'utf-8') ) ) return html_filename = os.path.splitext(nb_name)[0] + '.html' display(Markdown(CONVERT_SUCCESS_MD.format(url=html_filename)))
[ "def", "publish", "(", "spec", ",", "nb_name", ",", "template", "=", "'full'", ",", "save_first", "=", "True", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "nb_name", ")", ":", "raise", "ValueError", "(", "\"{} isn't a path to a file. Doub...
Converts nb_name to an HTML file. Preserves widget functionality. Outputs a link to download HTML file after conversion if called in a notebook environment. Equivalent to running `nbinteract ${spec} ${nb_name}` on the command line. Args: spec (str): BinderHub spec for Jupyter image. Must be in the format: `${username}/${repo}/${branch}`. nb_name (str): Complete name of the notebook file to convert. Can be a relative path (eg. './foo/test.ipynb'). Kwargs: template (str): Template to use for conversion. Valid templates: - 'full': Outputs a complete standalone HTML page with default styling. Automatically loads the nbinteract JS library. - 'partial': Outputs an HTML partial that can be embedded in another page. Automatically loads the nbinteract JS library but has no styling. - 'plain': Outputs an HTML partial used to embed in an HTML page where the nbinteract JS library is already loaded. Does not load JS library or styling save_first (bool): If True, saves the currently opened notebook before converting nb_name. Used to ensure notebook is written to filesystem before starting conversion. Does nothing if not in a notebook environment. Returns: None
[ "Converts", "nb_name", "to", "an", "HTML", "file", ".", "Preserves", "widget", "functionality", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/exporters.py#L154-L215
train
48,533
SamLau95/nbinteract
nbinteract/exporters.py
_save_nb
def _save_nb(nb_name): """ Attempts to save notebook. If unsuccessful, shows a warning. """ display(Javascript('IPython.notebook.save_checkpoint();')) display(Javascript('IPython.notebook.save_notebook();')) print('Saving notebook...', end=' ') if _wait_for_save(nb_name): print("Saved '{}'.".format(nb_name)) else: logging.warning( "Could not save your notebook (timed out waiting for " "IPython save). Make sure your notebook is saved " "and export again." )
python
def _save_nb(nb_name): """ Attempts to save notebook. If unsuccessful, shows a warning. """ display(Javascript('IPython.notebook.save_checkpoint();')) display(Javascript('IPython.notebook.save_notebook();')) print('Saving notebook...', end=' ') if _wait_for_save(nb_name): print("Saved '{}'.".format(nb_name)) else: logging.warning( "Could not save your notebook (timed out waiting for " "IPython save). Make sure your notebook is saved " "and export again." )
[ "def", "_save_nb", "(", "nb_name", ")", ":", "display", "(", "Javascript", "(", "'IPython.notebook.save_checkpoint();'", ")", ")", "display", "(", "Javascript", "(", "'IPython.notebook.save_notebook();'", ")", ")", "print", "(", "'Saving notebook...'", ",", "end", "...
Attempts to save notebook. If unsuccessful, shows a warning.
[ "Attempts", "to", "save", "notebook", ".", "If", "unsuccessful", "shows", "a", "warning", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/exporters.py#L218-L233
train
48,534
SamLau95/nbinteract
nbinteract/exporters.py
_wait_for_save
def _wait_for_save(nb_name, timeout=5): """Waits for nb_name to update, waiting up to TIMEOUT seconds. Returns True if a save was detected, and False otherwise. """ modification_time = os.path.getmtime(nb_name) start_time = time.time() while time.time() < start_time + timeout: if ( os.path.getmtime(nb_name) > modification_time and os.path.getsize(nb_name) > 0 ): return True time.sleep(0.2) return False
python
def _wait_for_save(nb_name, timeout=5): """Waits for nb_name to update, waiting up to TIMEOUT seconds. Returns True if a save was detected, and False otherwise. """ modification_time = os.path.getmtime(nb_name) start_time = time.time() while time.time() < start_time + timeout: if ( os.path.getmtime(nb_name) > modification_time and os.path.getsize(nb_name) > 0 ): return True time.sleep(0.2) return False
[ "def", "_wait_for_save", "(", "nb_name", ",", "timeout", "=", "5", ")", ":", "modification_time", "=", "os", ".", "path", ".", "getmtime", "(", "nb_name", ")", "start_time", "=", "time", ".", "time", "(", ")", "while", "time", ".", "time", "(", ")", ...
Waits for nb_name to update, waiting up to TIMEOUT seconds. Returns True if a save was detected, and False otherwise.
[ "Waits", "for", "nb_name", "to", "update", "waiting", "up", "to", "TIMEOUT", "seconds", ".", "Returns", "True", "if", "a", "save", "was", "detected", "and", "False", "otherwise", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/exporters.py#L236-L249
train
48,535
SamLau95/nbinteract
nbinteract/util.py
maybe_call
def maybe_call(maybe_fn, kwargs: dict, prefix: str = None) -> 'Any': """ If maybe_fn is a function, get its arguments from kwargs and call it, also searching for prefixed kwargs if prefix is specified. Otherwise, return maybe_fn. Used to allow both functions and iterables to be passed into plotting functions. >>> def square(x): return x * x >>> maybe_call(square, {'x': 10}) 100 >>> data = [1, 2, 3] >>> maybe_call(data, {'x': 10}) [1, 2, 3] """ if not callable(maybe_fn): return maybe_fn args = get_fn_args(maybe_fn, kwargs, prefix=prefix) return maybe_fn(**args)
python
def maybe_call(maybe_fn, kwargs: dict, prefix: str = None) -> 'Any': """ If maybe_fn is a function, get its arguments from kwargs and call it, also searching for prefixed kwargs if prefix is specified. Otherwise, return maybe_fn. Used to allow both functions and iterables to be passed into plotting functions. >>> def square(x): return x * x >>> maybe_call(square, {'x': 10}) 100 >>> data = [1, 2, 3] >>> maybe_call(data, {'x': 10}) [1, 2, 3] """ if not callable(maybe_fn): return maybe_fn args = get_fn_args(maybe_fn, kwargs, prefix=prefix) return maybe_fn(**args)
[ "def", "maybe_call", "(", "maybe_fn", ",", "kwargs", ":", "dict", ",", "prefix", ":", "str", "=", "None", ")", "->", "'Any'", ":", "if", "not", "callable", "(", "maybe_fn", ")", ":", "return", "maybe_fn", "args", "=", "get_fn_args", "(", "maybe_fn", ",...
If maybe_fn is a function, get its arguments from kwargs and call it, also searching for prefixed kwargs if prefix is specified. Otherwise, return maybe_fn. Used to allow both functions and iterables to be passed into plotting functions. >>> def square(x): return x * x >>> maybe_call(square, {'x': 10}) 100 >>> data = [1, 2, 3] >>> maybe_call(data, {'x': 10}) [1, 2, 3]
[ "If", "maybe_fn", "is", "a", "function", "get", "its", "arguments", "from", "kwargs", "and", "call", "it", "also", "searching", "for", "prefixed", "kwargs", "if", "prefix", "is", "specified", ".", "Otherwise", "return", "maybe_fn", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/util.py#L12-L33
train
48,536
SamLau95/nbinteract
nbinteract/util.py
maybe_curry
def maybe_curry(maybe_fn, first_arg) -> 'Function | Any': """ If maybe_fn is a function, curries it and passes in first_arg. Otherwise returns maybe_fn. """ if not callable(maybe_fn): return maybe_fn return tz.curry(maybe_fn)(first_arg)
python
def maybe_curry(maybe_fn, first_arg) -> 'Function | Any': """ If maybe_fn is a function, curries it and passes in first_arg. Otherwise returns maybe_fn. """ if not callable(maybe_fn): return maybe_fn return tz.curry(maybe_fn)(first_arg)
[ "def", "maybe_curry", "(", "maybe_fn", ",", "first_arg", ")", "->", "'Function | Any'", ":", "if", "not", "callable", "(", "maybe_fn", ")", ":", "return", "maybe_fn", "return", "tz", ".", "curry", "(", "maybe_fn", ")", "(", "first_arg", ")" ]
If maybe_fn is a function, curries it and passes in first_arg. Otherwise returns maybe_fn.
[ "If", "maybe_fn", "is", "a", "function", "curries", "it", "and", "passes", "in", "first_arg", ".", "Otherwise", "returns", "maybe_fn", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/util.py#L36-L43
train
48,537
SamLau95/nbinteract
nbinteract/util.py
get_fn_args
def get_fn_args(fn, kwargs: dict, prefix: str = None): """ Given function and a dict of kwargs return a dict containing only the args used by the function. If prefix is specified, also search for args that begin with '{prefix}__'. Removes prefix in returned dict. Raises ValueError if a required arg is missing from the kwargs. Raises ValueError if both prefixed and unprefixed arg are given in kwargs. >>> from pprint import pprint as p # Use pprint to sort dict keys >>> kwargs = {'a': 1, 'b': 2, 'c': 3, 'x__d': 4} >>> def foo(a, b=10): return a + b >>> p(get_fn_args(foo, kwargs)) {'a': 1, 'b': 2} >>> def bar(a, b, d): return a + b + d >>> p(get_fn_args(bar, kwargs, prefix='x')) {'a': 1, 'b': 2, 'd': 4} >>> p(get_fn_args(bar, kwargs)) Traceback (most recent call last): ValueError: The following args are missing for the function bar: ['d'] """ all_args = get_all_args(fn) required_args = get_required_args(fn) fn_kwargs = pick_kwargs(kwargs, all_args, prefix) missing_args = [arg for arg in required_args if arg not in fn_kwargs] if missing_args: raise ValueError( 'The following args are missing for the function ' '{}: {}.'.format(fn.__name__, missing_args) ) return fn_kwargs
python
def get_fn_args(fn, kwargs: dict, prefix: str = None): """ Given function and a dict of kwargs return a dict containing only the args used by the function. If prefix is specified, also search for args that begin with '{prefix}__'. Removes prefix in returned dict. Raises ValueError if a required arg is missing from the kwargs. Raises ValueError if both prefixed and unprefixed arg are given in kwargs. >>> from pprint import pprint as p # Use pprint to sort dict keys >>> kwargs = {'a': 1, 'b': 2, 'c': 3, 'x__d': 4} >>> def foo(a, b=10): return a + b >>> p(get_fn_args(foo, kwargs)) {'a': 1, 'b': 2} >>> def bar(a, b, d): return a + b + d >>> p(get_fn_args(bar, kwargs, prefix='x')) {'a': 1, 'b': 2, 'd': 4} >>> p(get_fn_args(bar, kwargs)) Traceback (most recent call last): ValueError: The following args are missing for the function bar: ['d'] """ all_args = get_all_args(fn) required_args = get_required_args(fn) fn_kwargs = pick_kwargs(kwargs, all_args, prefix) missing_args = [arg for arg in required_args if arg not in fn_kwargs] if missing_args: raise ValueError( 'The following args are missing for the function ' '{}: {}.'.format(fn.__name__, missing_args) ) return fn_kwargs
[ "def", "get_fn_args", "(", "fn", ",", "kwargs", ":", "dict", ",", "prefix", ":", "str", "=", "None", ")", ":", "all_args", "=", "get_all_args", "(", "fn", ")", "required_args", "=", "get_required_args", "(", "fn", ")", "fn_kwargs", "=", "pick_kwargs", "(...
Given function and a dict of kwargs return a dict containing only the args used by the function. If prefix is specified, also search for args that begin with '{prefix}__'. Removes prefix in returned dict. Raises ValueError if a required arg is missing from the kwargs. Raises ValueError if both prefixed and unprefixed arg are given in kwargs. >>> from pprint import pprint as p # Use pprint to sort dict keys >>> kwargs = {'a': 1, 'b': 2, 'c': 3, 'x__d': 4} >>> def foo(a, b=10): return a + b >>> p(get_fn_args(foo, kwargs)) {'a': 1, 'b': 2} >>> def bar(a, b, d): return a + b + d >>> p(get_fn_args(bar, kwargs, prefix='x')) {'a': 1, 'b': 2, 'd': 4} >>> p(get_fn_args(bar, kwargs)) Traceback (most recent call last): ValueError: The following args are missing for the function bar: ['d']
[ "Given", "function", "and", "a", "dict", "of", "kwargs", "return", "a", "dict", "containing", "only", "the", "args", "used", "by", "the", "function", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/util.py#L51-L88
train
48,538
SamLau95/nbinteract
nbinteract/util.py
get_all_args
def get_all_args(fn) -> list: """ Returns a list of all arguments for the function fn. >>> def foo(x, y, z=100): return x + y + z >>> get_all_args(foo) ['x', 'y', 'z'] """ sig = inspect.signature(fn) return list(sig.parameters)
python
def get_all_args(fn) -> list: """ Returns a list of all arguments for the function fn. >>> def foo(x, y, z=100): return x + y + z >>> get_all_args(foo) ['x', 'y', 'z'] """ sig = inspect.signature(fn) return list(sig.parameters)
[ "def", "get_all_args", "(", "fn", ")", "->", "list", ":", "sig", "=", "inspect", ".", "signature", "(", "fn", ")", "return", "list", "(", "sig", ".", "parameters", ")" ]
Returns a list of all arguments for the function fn. >>> def foo(x, y, z=100): return x + y + z >>> get_all_args(foo) ['x', 'y', 'z']
[ "Returns", "a", "list", "of", "all", "arguments", "for", "the", "function", "fn", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/util.py#L91-L100
train
48,539
SamLau95/nbinteract
nbinteract/util.py
get_required_args
def get_required_args(fn) -> list: """ Returns a list of required arguments for the function fn. >>> def foo(x, y, z=100): return x + y + z >>> get_required_args(foo) ['x', 'y'] >>> def bar(x, y=100, *args, **kwargs): return x >>> get_required_args(bar) ['x'] """ sig = inspect.signature(fn) return [ name for name, param in sig.parameters.items() if param.default == inspect._empty and param.kind not in VAR_ARGS ]
python
def get_required_args(fn) -> list: """ Returns a list of required arguments for the function fn. >>> def foo(x, y, z=100): return x + y + z >>> get_required_args(foo) ['x', 'y'] >>> def bar(x, y=100, *args, **kwargs): return x >>> get_required_args(bar) ['x'] """ sig = inspect.signature(fn) return [ name for name, param in sig.parameters.items() if param.default == inspect._empty and param.kind not in VAR_ARGS ]
[ "def", "get_required_args", "(", "fn", ")", "->", "list", ":", "sig", "=", "inspect", ".", "signature", "(", "fn", ")", "return", "[", "name", "for", "name", ",", "param", "in", "sig", ".", "parameters", ".", "items", "(", ")", "if", "param", ".", ...
Returns a list of required arguments for the function fn. >>> def foo(x, y, z=100): return x + y + z >>> get_required_args(foo) ['x', 'y'] >>> def bar(x, y=100, *args, **kwargs): return x >>> get_required_args(bar) ['x']
[ "Returns", "a", "list", "of", "required", "arguments", "for", "the", "function", "fn", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/util.py#L103-L119
train
48,540
SamLau95/nbinteract
nbinteract/util.py
pick_kwargs
def pick_kwargs(kwargs: dict, required_args: list, prefix: str = None): """ Given a dict of kwargs and a list of required_args, return a dict containing only the args in required_args. If prefix is specified, also search for args that begin with '{prefix}__'. Removes prefix in returned dict. Raises ValueError if both prefixed and unprefixed arg are given in kwargs. >>> from pprint import pprint as p # Use pprint to sort dict keys >>> kwargs = {'a': 1, 'b': 2, 'c': 3, 'x__d': 4} >>> p(pick_kwargs(kwargs, ['a', 'd'])) {'a': 1} >>> p(pick_kwargs(kwargs, ['a', 'd'], prefix='x')) {'a': 1, 'd': 4} >>> pick_kwargs({'a': 1, 'x__a': 2}, ['a'], prefix='x') Traceback (most recent call last): ValueError: Both prefixed and unprefixed args were specified for the following parameters: ['a'] """ picked = {k: v for k, v in kwargs.items() if k in required_args} prefixed = {} if prefix: prefix = prefix + '__' prefixed = { _remove_prefix(k, prefix): v for k, v in kwargs.items() if k.startswith(prefix) and _remove_prefix(k, prefix) in required_args } conflicting_args = [k for k in picked if k in prefixed] if conflicting_args: raise ValueError( 'Both prefixed and unprefixed args were specified ' 'for the following parameters: {}'.format(conflicting_args) ) return tz.merge(picked, prefixed)
python
def pick_kwargs(kwargs: dict, required_args: list, prefix: str = None): """ Given a dict of kwargs and a list of required_args, return a dict containing only the args in required_args. If prefix is specified, also search for args that begin with '{prefix}__'. Removes prefix in returned dict. Raises ValueError if both prefixed and unprefixed arg are given in kwargs. >>> from pprint import pprint as p # Use pprint to sort dict keys >>> kwargs = {'a': 1, 'b': 2, 'c': 3, 'x__d': 4} >>> p(pick_kwargs(kwargs, ['a', 'd'])) {'a': 1} >>> p(pick_kwargs(kwargs, ['a', 'd'], prefix='x')) {'a': 1, 'd': 4} >>> pick_kwargs({'a': 1, 'x__a': 2}, ['a'], prefix='x') Traceback (most recent call last): ValueError: Both prefixed and unprefixed args were specified for the following parameters: ['a'] """ picked = {k: v for k, v in kwargs.items() if k in required_args} prefixed = {} if prefix: prefix = prefix + '__' prefixed = { _remove_prefix(k, prefix): v for k, v in kwargs.items() if k.startswith(prefix) and _remove_prefix(k, prefix) in required_args } conflicting_args = [k for k in picked if k in prefixed] if conflicting_args: raise ValueError( 'Both prefixed and unprefixed args were specified ' 'for the following parameters: {}'.format(conflicting_args) ) return tz.merge(picked, prefixed)
[ "def", "pick_kwargs", "(", "kwargs", ":", "dict", ",", "required_args", ":", "list", ",", "prefix", ":", "str", "=", "None", ")", ":", "picked", "=", "{", "k", ":", "v", "for", "k", ",", "v", "in", "kwargs", ".", "items", "(", ")", "if", "k", "...
Given a dict of kwargs and a list of required_args, return a dict containing only the args in required_args. If prefix is specified, also search for args that begin with '{prefix}__'. Removes prefix in returned dict. Raises ValueError if both prefixed and unprefixed arg are given in kwargs. >>> from pprint import pprint as p # Use pprint to sort dict keys >>> kwargs = {'a': 1, 'b': 2, 'c': 3, 'x__d': 4} >>> p(pick_kwargs(kwargs, ['a', 'd'])) {'a': 1} >>> p(pick_kwargs(kwargs, ['a', 'd'], prefix='x')) {'a': 1, 'd': 4} >>> pick_kwargs({'a': 1, 'x__a': 2}, ['a'], prefix='x') Traceback (most recent call last): ValueError: Both prefixed and unprefixed args were specified for the following parameters: ['a']
[ "Given", "a", "dict", "of", "kwargs", "and", "a", "list", "of", "required_args", "return", "a", "dict", "containing", "only", "the", "args", "in", "required_args", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/util.py#L122-L163
train
48,541
SamLau95/nbinteract
nbinteract/plotting.py
use_options
def use_options(allowed): """ Decorator that logs warnings when unpermitted options are passed into its wrapped function. Requires that wrapped function has an keyword-only argument named `options`. If wrapped function has {options} in its docstring, fills in with the docs for allowed options. Args: allowed (list str): list of option keys allowed. If the wrapped function is called with an option not in allowed, log a warning. All values in allowed must also be present in `defaults`. Returns: Wrapped function with options validation. >>> @use_options(['title']) ... def test(*, options={}): return options['title'] >>> test(options={'title': 'Hello'}) 'Hello' >>> # test(options={'not_allowed': 123}) # Also logs error message '' """ def update_docstring(f): _update_option_docstring(f, allowed) @functools.wraps(f) def check_options(*args, **kwargs): options = kwargs.get('options', {}) not_allowed = [ option for option in options if option not in allowed # Don't validate private options and not option.startswith('_') ] if not_allowed: logging.warning( 'The following options are not supported by ' 'this function and will likely result in ' 'undefined behavior: {}.'.format(not_allowed) ) return f(*args, **kwargs) return check_options return update_docstring
python
def use_options(allowed): """ Decorator that logs warnings when unpermitted options are passed into its wrapped function. Requires that wrapped function has an keyword-only argument named `options`. If wrapped function has {options} in its docstring, fills in with the docs for allowed options. Args: allowed (list str): list of option keys allowed. If the wrapped function is called with an option not in allowed, log a warning. All values in allowed must also be present in `defaults`. Returns: Wrapped function with options validation. >>> @use_options(['title']) ... def test(*, options={}): return options['title'] >>> test(options={'title': 'Hello'}) 'Hello' >>> # test(options={'not_allowed': 123}) # Also logs error message '' """ def update_docstring(f): _update_option_docstring(f, allowed) @functools.wraps(f) def check_options(*args, **kwargs): options = kwargs.get('options', {}) not_allowed = [ option for option in options if option not in allowed # Don't validate private options and not option.startswith('_') ] if not_allowed: logging.warning( 'The following options are not supported by ' 'this function and will likely result in ' 'undefined behavior: {}.'.format(not_allowed) ) return f(*args, **kwargs) return check_options return update_docstring
[ "def", "use_options", "(", "allowed", ")", ":", "def", "update_docstring", "(", "f", ")", ":", "_update_option_docstring", "(", "f", ",", "allowed", ")", "@", "functools", ".", "wraps", "(", "f", ")", "def", "check_options", "(", "*", "args", ",", "*", ...
Decorator that logs warnings when unpermitted options are passed into its wrapped function. Requires that wrapped function has an keyword-only argument named `options`. If wrapped function has {options} in its docstring, fills in with the docs for allowed options. Args: allowed (list str): list of option keys allowed. If the wrapped function is called with an option not in allowed, log a warning. All values in allowed must also be present in `defaults`. Returns: Wrapped function with options validation. >>> @use_options(['title']) ... def test(*, options={}): return options['title'] >>> test(options={'title': 'Hello'}) 'Hello' >>> # test(options={'not_allowed': 123}) # Also logs error message ''
[ "Decorator", "that", "logs", "warnings", "when", "unpermitted", "options", "are", "passed", "into", "its", "wrapped", "function", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/plotting.py#L123-L172
train
48,542
SamLau95/nbinteract
nbinteract/plotting.py
hist
def hist(hist_function, *, options={}, **interact_params): """ Generates an interactive histogram that allows users to change the parameters of the input hist_function. Args: hist_function (Array | (*args -> Array int | Array float)): Function that takes in parameters to interact with and returns an array of numbers. These numbers will be plotted in the resulting histogram. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of `hist_function`. Returns: VBox with two children: the interactive controls and the figure. >>> def gen_random(n_points): ... return np.random.normal(size=n_points) >>> hist(gen_random, n_points=(0, 1000, 10)) VBox(...) """ params = { 'marks': [{ 'sample': _array_or_placeholder(hist_function), 'bins': _get_option('bins'), 'normalized': _get_option('normalized'), 'scales': ( lambda opts: {'sample': opts['x_sc'], 'count': opts['y_sc']} ), }], } fig = options.get('_fig', False) or _create_fig(options=options) [hist] = _create_marks( fig=fig, marks=[bq.Hist], options=options, params=params ) _add_marks(fig, [hist]) def wrapped(**interact_params): hist.sample = util.maybe_call(hist_function, interact_params) controls = widgets.interactive(wrapped, **interact_params) return widgets.VBox([controls, fig])
python
def hist(hist_function, *, options={}, **interact_params): """ Generates an interactive histogram that allows users to change the parameters of the input hist_function. Args: hist_function (Array | (*args -> Array int | Array float)): Function that takes in parameters to interact with and returns an array of numbers. These numbers will be plotted in the resulting histogram. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of `hist_function`. Returns: VBox with two children: the interactive controls and the figure. >>> def gen_random(n_points): ... return np.random.normal(size=n_points) >>> hist(gen_random, n_points=(0, 1000, 10)) VBox(...) """ params = { 'marks': [{ 'sample': _array_or_placeholder(hist_function), 'bins': _get_option('bins'), 'normalized': _get_option('normalized'), 'scales': ( lambda opts: {'sample': opts['x_sc'], 'count': opts['y_sc']} ), }], } fig = options.get('_fig', False) or _create_fig(options=options) [hist] = _create_marks( fig=fig, marks=[bq.Hist], options=options, params=params ) _add_marks(fig, [hist]) def wrapped(**interact_params): hist.sample = util.maybe_call(hist_function, interact_params) controls = widgets.interactive(wrapped, **interact_params) return widgets.VBox([controls, fig])
[ "def", "hist", "(", "hist_function", ",", "*", ",", "options", "=", "{", "}", ",", "*", "*", "interact_params", ")", ":", "params", "=", "{", "'marks'", ":", "[", "{", "'sample'", ":", "_array_or_placeholder", "(", "hist_function", ")", ",", "'bins'", ...
Generates an interactive histogram that allows users to change the parameters of the input hist_function. Args: hist_function (Array | (*args -> Array int | Array float)): Function that takes in parameters to interact with and returns an array of numbers. These numbers will be plotted in the resulting histogram. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of `hist_function`. Returns: VBox with two children: the interactive controls and the figure. >>> def gen_random(n_points): ... return np.random.normal(size=n_points) >>> hist(gen_random, n_points=(0, 1000, 10)) VBox(...)
[ "Generates", "an", "interactive", "histogram", "that", "allows", "users", "to", "change", "the", "parameters", "of", "the", "input", "hist_function", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/plotting.py#L182-L229
train
48,543
SamLau95/nbinteract
nbinteract/plotting.py
bar
def bar(x_fn, y_fn, *, options={}, **interact_params): """ Generates an interactive bar chart that allows users to change the parameters of the inputs x_fn and y_fn. Args: x_fn (Array | (*args -> Array str | Array int | Array float)): If array, uses array values for categories of bar chart. If function, must take parameters to interact with and return an array of strings or numbers. These will become the categories on the x-axis of the bar chart. y_fn (Array | (Array, *args -> Array int | Array float)): If array, uses array values for heights of bars. If function, must take in the output of x_fn as its first parameter and optionally other parameters to interact with. Must return an array of numbers. These will become the heights of the bars on the y-axis. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of both `x_fn` and `y_fn`. If `x_fn` and `y_fn` have conflicting parameter names, prefix the corresponding kwargs with `x__` and `y__`. Returns: VBox with two children: the interactive controls and the figure. >>> bar(['a', 'b', 'c'], [4, 7, 10]) VBox(...) >>> def categories(n): return np.arange(n) >>> def heights(xs, offset): ... return xs + offset >>> bar(categories, heights, n=(0, 10), offset=(1, 10)) VBox(...) >>> def multiply(xs, n): ... return xs * n >>> bar(categories, multiply, x__n=(0, 10), y__n=(1, 10)) VBox(...) """ params = { 'marks': [{ 'x': _array_or_placeholder(x_fn, PLACEHOLDER_RANGE), 'y': _array_or_placeholder(y_fn) }] } fig = options.get('_fig', False) or _create_fig( x_sc=bq.OrdinalScale, options=options ) [bar] = _create_marks( fig=fig, marks=[bq.Bars], options=options, params=params ) _add_marks(fig, [bar]) def wrapped(**interact_params): x_data = util.maybe_call(x_fn, interact_params, prefix='x') bar.x = x_data y_bound = util.maybe_curry(y_fn, x_data) bar.y = util.maybe_call(y_bound, interact_params, prefix='y') controls = widgets.interactive(wrapped, **interact_params) return widgets.VBox([controls, fig])
python
def bar(x_fn, y_fn, *, options={}, **interact_params): """ Generates an interactive bar chart that allows users to change the parameters of the inputs x_fn and y_fn. Args: x_fn (Array | (*args -> Array str | Array int | Array float)): If array, uses array values for categories of bar chart. If function, must take parameters to interact with and return an array of strings or numbers. These will become the categories on the x-axis of the bar chart. y_fn (Array | (Array, *args -> Array int | Array float)): If array, uses array values for heights of bars. If function, must take in the output of x_fn as its first parameter and optionally other parameters to interact with. Must return an array of numbers. These will become the heights of the bars on the y-axis. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of both `x_fn` and `y_fn`. If `x_fn` and `y_fn` have conflicting parameter names, prefix the corresponding kwargs with `x__` and `y__`. Returns: VBox with two children: the interactive controls and the figure. >>> bar(['a', 'b', 'c'], [4, 7, 10]) VBox(...) >>> def categories(n): return np.arange(n) >>> def heights(xs, offset): ... return xs + offset >>> bar(categories, heights, n=(0, 10), offset=(1, 10)) VBox(...) >>> def multiply(xs, n): ... return xs * n >>> bar(categories, multiply, x__n=(0, 10), y__n=(1, 10)) VBox(...) """ params = { 'marks': [{ 'x': _array_or_placeholder(x_fn, PLACEHOLDER_RANGE), 'y': _array_or_placeholder(y_fn) }] } fig = options.get('_fig', False) or _create_fig( x_sc=bq.OrdinalScale, options=options ) [bar] = _create_marks( fig=fig, marks=[bq.Bars], options=options, params=params ) _add_marks(fig, [bar]) def wrapped(**interact_params): x_data = util.maybe_call(x_fn, interact_params, prefix='x') bar.x = x_data y_bound = util.maybe_curry(y_fn, x_data) bar.y = util.maybe_call(y_bound, interact_params, prefix='y') controls = widgets.interactive(wrapped, **interact_params) return widgets.VBox([controls, fig])
[ "def", "bar", "(", "x_fn", ",", "y_fn", ",", "*", ",", "options", "=", "{", "}", ",", "*", "*", "interact_params", ")", ":", "params", "=", "{", "'marks'", ":", "[", "{", "'x'", ":", "_array_or_placeholder", "(", "x_fn", ",", "PLACEHOLDER_RANGE", ")"...
Generates an interactive bar chart that allows users to change the parameters of the inputs x_fn and y_fn. Args: x_fn (Array | (*args -> Array str | Array int | Array float)): If array, uses array values for categories of bar chart. If function, must take parameters to interact with and return an array of strings or numbers. These will become the categories on the x-axis of the bar chart. y_fn (Array | (Array, *args -> Array int | Array float)): If array, uses array values for heights of bars. If function, must take in the output of x_fn as its first parameter and optionally other parameters to interact with. Must return an array of numbers. These will become the heights of the bars on the y-axis. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of both `x_fn` and `y_fn`. If `x_fn` and `y_fn` have conflicting parameter names, prefix the corresponding kwargs with `x__` and `y__`. Returns: VBox with two children: the interactive controls and the figure. >>> bar(['a', 'b', 'c'], [4, 7, 10]) VBox(...) >>> def categories(n): return np.arange(n) >>> def heights(xs, offset): ... return xs + offset >>> bar(categories, heights, n=(0, 10), offset=(1, 10)) VBox(...) >>> def multiply(xs, n): ... return xs * n >>> bar(categories, multiply, x__n=(0, 10), y__n=(1, 10)) VBox(...)
[ "Generates", "an", "interactive", "bar", "chart", "that", "allows", "users", "to", "change", "the", "parameters", "of", "the", "inputs", "x_fn", "and", "y_fn", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/plotting.py#L235-L305
train
48,544
SamLau95/nbinteract
nbinteract/plotting.py
scatter_drag
def scatter_drag( x_points: 'Array', y_points: 'Array', *, fig=None, show_eqn=True, options={} ): """ Generates an interactive scatter plot with the best fit line plotted over the points. The points can be dragged by the user and the line will automatically update. Args: x_points (Array Number): x-values of points to plot y_points (Array Number): y-values of points to plot Kwargs: show_eqn (bool): If True (default), displays the best fit line's equation above the scatterplot. {options} Returns: VBox with two children: the equation widget and the figure. >>> xs = np.arange(10) >>> ys = np.arange(10) + np.random.rand(10) >>> scatter_drag(xs, ys) VBox(...) """ params = { 'marks': [{ 'x': x_points, 'y': y_points, 'enable_move': True, }, { 'colors': [GOLDENROD], }] } fig = options.get('_fig', False) or _create_fig(options=options) [scat, lin] = _create_marks( fig=fig, marks=[bq.Scatter, bq.Lines], options=options, params=params ) _add_marks(fig, [scat, lin]) equation = widgets.Label() # create line fit to data and display equation def update_line(change=None): x_sc = scat.scales['x'] lin.x = [ x_sc.min if x_sc.min is not None else np.min(scat.x), x_sc.max if x_sc.max is not None else np.max(scat.x), ] poly = np.polyfit(scat.x, scat.y, deg=1) lin.y = np.polyval(poly, lin.x) if show_eqn: equation.value = 'y = {:.2f}x + {:.2f}'.format(poly[0], poly[1]) update_line() scat.observe(update_line, names=['x', 'y']) return widgets.VBox([equation, fig])
python
def scatter_drag( x_points: 'Array', y_points: 'Array', *, fig=None, show_eqn=True, options={} ): """ Generates an interactive scatter plot with the best fit line plotted over the points. The points can be dragged by the user and the line will automatically update. Args: x_points (Array Number): x-values of points to plot y_points (Array Number): y-values of points to plot Kwargs: show_eqn (bool): If True (default), displays the best fit line's equation above the scatterplot. {options} Returns: VBox with two children: the equation widget and the figure. >>> xs = np.arange(10) >>> ys = np.arange(10) + np.random.rand(10) >>> scatter_drag(xs, ys) VBox(...) """ params = { 'marks': [{ 'x': x_points, 'y': y_points, 'enable_move': True, }, { 'colors': [GOLDENROD], }] } fig = options.get('_fig', False) or _create_fig(options=options) [scat, lin] = _create_marks( fig=fig, marks=[bq.Scatter, bq.Lines], options=options, params=params ) _add_marks(fig, [scat, lin]) equation = widgets.Label() # create line fit to data and display equation def update_line(change=None): x_sc = scat.scales['x'] lin.x = [ x_sc.min if x_sc.min is not None else np.min(scat.x), x_sc.max if x_sc.max is not None else np.max(scat.x), ] poly = np.polyfit(scat.x, scat.y, deg=1) lin.y = np.polyval(poly, lin.x) if show_eqn: equation.value = 'y = {:.2f}x + {:.2f}'.format(poly[0], poly[1]) update_line() scat.observe(update_line, names=['x', 'y']) return widgets.VBox([equation, fig])
[ "def", "scatter_drag", "(", "x_points", ":", "'Array'", ",", "y_points", ":", "'Array'", ",", "*", ",", "fig", "=", "None", ",", "show_eqn", "=", "True", ",", "options", "=", "{", "}", ")", ":", "params", "=", "{", "'marks'", ":", "[", "{", "'x'", ...
Generates an interactive scatter plot with the best fit line plotted over the points. The points can be dragged by the user and the line will automatically update. Args: x_points (Array Number): x-values of points to plot y_points (Array Number): y-values of points to plot Kwargs: show_eqn (bool): If True (default), displays the best fit line's equation above the scatterplot. {options} Returns: VBox with two children: the equation widget and the figure. >>> xs = np.arange(10) >>> ys = np.arange(10) + np.random.rand(10) >>> scatter_drag(xs, ys) VBox(...)
[ "Generates", "an", "interactive", "scatter", "plot", "with", "the", "best", "fit", "line", "plotted", "over", "the", "points", ".", "The", "points", "can", "be", "dragged", "by", "the", "user", "and", "the", "line", "will", "automatically", "update", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/plotting.py#L312-L377
train
48,545
SamLau95/nbinteract
nbinteract/plotting.py
scatter
def scatter(x_fn, y_fn, *, options={}, **interact_params): """ Generates an interactive scatter chart that allows users to change the parameters of the inputs x_fn and y_fn. Args: x_fn (Array | (*args -> Array str | Array int | Array float)): If array, uses array values for x-coordinates. If function, must take parameters to interact with and return an array of strings or numbers. These will become the x-coordinates of the scatter plot. y_fn (Array | (Array, *args -> Array int | Array float)): If array, uses array values for y-coordinates. If function, must take in the output of x_fn as its first parameter and optionally other parameters to interact with. Must return an array of numbers. These will become the y-coordinates of the scatter plot. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of both `x_fn` and `y_fn`. If `x_fn` and `y_fn` have conflicting parameter names, prefix the corresponding kwargs with `x__` and `y__`. Returns: VBox with two children: the interactive controls and the figure. >>> def x_values(n): return np.random.choice(100, n) >>> def y_values(xs): return np.random.choice(100, len(xs)) >>> scatter(x_values, y_values, n=(0,200)) VBox(...) """ params = { 'marks': [{ 'x': _array_or_placeholder(x_fn), 'y': _array_or_placeholder(y_fn), 'marker': _get_option('marker'), }] } fig = options.get('_fig', False) or _create_fig(options=options) [scat] = _create_marks( fig=fig, marks=[bq.Scatter], options=options, params=params ) _add_marks(fig, [scat]) def wrapped(**interact_params): x_data = util.maybe_call(x_fn, interact_params, prefix='x') scat.x = x_data y_bound = util.maybe_curry(y_fn, x_data) scat.y = util.maybe_call(y_bound, interact_params, prefix='y') controls = widgets.interactive(wrapped, **interact_params) return widgets.VBox([controls, fig])
python
def scatter(x_fn, y_fn, *, options={}, **interact_params): """ Generates an interactive scatter chart that allows users to change the parameters of the inputs x_fn and y_fn. Args: x_fn (Array | (*args -> Array str | Array int | Array float)): If array, uses array values for x-coordinates. If function, must take parameters to interact with and return an array of strings or numbers. These will become the x-coordinates of the scatter plot. y_fn (Array | (Array, *args -> Array int | Array float)): If array, uses array values for y-coordinates. If function, must take in the output of x_fn as its first parameter and optionally other parameters to interact with. Must return an array of numbers. These will become the y-coordinates of the scatter plot. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of both `x_fn` and `y_fn`. If `x_fn` and `y_fn` have conflicting parameter names, prefix the corresponding kwargs with `x__` and `y__`. Returns: VBox with two children: the interactive controls and the figure. >>> def x_values(n): return np.random.choice(100, n) >>> def y_values(xs): return np.random.choice(100, len(xs)) >>> scatter(x_values, y_values, n=(0,200)) VBox(...) """ params = { 'marks': [{ 'x': _array_or_placeholder(x_fn), 'y': _array_or_placeholder(y_fn), 'marker': _get_option('marker'), }] } fig = options.get('_fig', False) or _create_fig(options=options) [scat] = _create_marks( fig=fig, marks=[bq.Scatter], options=options, params=params ) _add_marks(fig, [scat]) def wrapped(**interact_params): x_data = util.maybe_call(x_fn, interact_params, prefix='x') scat.x = x_data y_bound = util.maybe_curry(y_fn, x_data) scat.y = util.maybe_call(y_bound, interact_params, prefix='y') controls = widgets.interactive(wrapped, **interact_params) return widgets.VBox([controls, fig])
[ "def", "scatter", "(", "x_fn", ",", "y_fn", ",", "*", ",", "options", "=", "{", "}", ",", "*", "*", "interact_params", ")", ":", "params", "=", "{", "'marks'", ":", "[", "{", "'x'", ":", "_array_or_placeholder", "(", "x_fn", ")", ",", "'y'", ":", ...
Generates an interactive scatter chart that allows users to change the parameters of the inputs x_fn and y_fn. Args: x_fn (Array | (*args -> Array str | Array int | Array float)): If array, uses array values for x-coordinates. If function, must take parameters to interact with and return an array of strings or numbers. These will become the x-coordinates of the scatter plot. y_fn (Array | (Array, *args -> Array int | Array float)): If array, uses array values for y-coordinates. If function, must take in the output of x_fn as its first parameter and optionally other parameters to interact with. Must return an array of numbers. These will become the y-coordinates of the scatter plot. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of both `x_fn` and `y_fn`. If `x_fn` and `y_fn` have conflicting parameter names, prefix the corresponding kwargs with `x__` and `y__`. Returns: VBox with two children: the interactive controls and the figure. >>> def x_values(n): return np.random.choice(100, n) >>> def y_values(xs): return np.random.choice(100, len(xs)) >>> scatter(x_values, y_values, n=(0,200)) VBox(...)
[ "Generates", "an", "interactive", "scatter", "chart", "that", "allows", "users", "to", "change", "the", "parameters", "of", "the", "inputs", "x_fn", "and", "y_fn", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/plotting.py#L384-L444
train
48,546
SamLau95/nbinteract
nbinteract/plotting.py
line
def line(x_fn, y_fn, *, options={}, **interact_params): """ Generates an interactive line chart that allows users to change the parameters of the inputs x_fn and y_fn. Args: x_fn (Array | (*args -> Array str | Array int | Array float)): If array, uses array values for x-coordinates. If function, must take parameters to interact with and return an array of strings or numbers. These will become the x-coordinates of the line plot. y_fn (Array | (Array, *args -> Array int | Array float)): If array, uses array values for y-coordinates. If function, must take in the output of x_fn as its first parameter and optionally other parameters to interact with. Must return an array of numbers. These will become the y-coordinates of the line plot. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of both `x_fn` and `y_fn`. If `x_fn` and `y_fn` have conflicting parameter names, prefix the corresponding kwargs with `x__` and `y__`. Returns: VBox with two children: the interactive controls and the figure. >>> line([1, 2, 3], [4, 7, 10]) VBox(...) >>> def x_values(max): return np.arange(0, max) >>> def y_values(xs, sd): ... return xs + np.random.normal(len(xs), scale=sd) >>> line(x_values, y_values, max=(10, 50), sd=(1, 10)) VBox(...) """ fig = options.get('_fig', False) or _create_fig(options=options) [line] = (_create_marks(fig=fig, marks=[bq.Lines], options=options)) _add_marks(fig, [line]) def wrapped(**interact_params): x_data = util.maybe_call(x_fn, interact_params, prefix='x') line.x = x_data y_bound = util.maybe_curry(y_fn, x_data) line.y = util.maybe_call(y_bound, interact_params, prefix='y') controls = widgets.interactive(wrapped, **interact_params) return widgets.VBox([controls, fig])
python
def line(x_fn, y_fn, *, options={}, **interact_params): """ Generates an interactive line chart that allows users to change the parameters of the inputs x_fn and y_fn. Args: x_fn (Array | (*args -> Array str | Array int | Array float)): If array, uses array values for x-coordinates. If function, must take parameters to interact with and return an array of strings or numbers. These will become the x-coordinates of the line plot. y_fn (Array | (Array, *args -> Array int | Array float)): If array, uses array values for y-coordinates. If function, must take in the output of x_fn as its first parameter and optionally other parameters to interact with. Must return an array of numbers. These will become the y-coordinates of the line plot. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of both `x_fn` and `y_fn`. If `x_fn` and `y_fn` have conflicting parameter names, prefix the corresponding kwargs with `x__` and `y__`. Returns: VBox with two children: the interactive controls and the figure. >>> line([1, 2, 3], [4, 7, 10]) VBox(...) >>> def x_values(max): return np.arange(0, max) >>> def y_values(xs, sd): ... return xs + np.random.normal(len(xs), scale=sd) >>> line(x_values, y_values, max=(10, 50), sd=(1, 10)) VBox(...) """ fig = options.get('_fig', False) or _create_fig(options=options) [line] = (_create_marks(fig=fig, marks=[bq.Lines], options=options)) _add_marks(fig, [line]) def wrapped(**interact_params): x_data = util.maybe_call(x_fn, interact_params, prefix='x') line.x = x_data y_bound = util.maybe_curry(y_fn, x_data) line.y = util.maybe_call(y_bound, interact_params, prefix='y') controls = widgets.interactive(wrapped, **interact_params) return widgets.VBox([controls, fig])
[ "def", "line", "(", "x_fn", ",", "y_fn", ",", "*", ",", "options", "=", "{", "}", ",", "*", "*", "interact_params", ")", ":", "fig", "=", "options", ".", "get", "(", "'_fig'", ",", "False", ")", "or", "_create_fig", "(", "options", "=", "options", ...
Generates an interactive line chart that allows users to change the parameters of the inputs x_fn and y_fn. Args: x_fn (Array | (*args -> Array str | Array int | Array float)): If array, uses array values for x-coordinates. If function, must take parameters to interact with and return an array of strings or numbers. These will become the x-coordinates of the line plot. y_fn (Array | (Array, *args -> Array int | Array float)): If array, uses array values for y-coordinates. If function, must take in the output of x_fn as its first parameter and optionally other parameters to interact with. Must return an array of numbers. These will become the y-coordinates of the line plot. Kwargs: {options} interact_params (dict): Keyword arguments in the same format as `ipywidgets.interact`. One argument is required for each argument of both `x_fn` and `y_fn`. If `x_fn` and `y_fn` have conflicting parameter names, prefix the corresponding kwargs with `x__` and `y__`. Returns: VBox with two children: the interactive controls and the figure. >>> line([1, 2, 3], [4, 7, 10]) VBox(...) >>> def x_values(max): return np.arange(0, max) >>> def y_values(xs, sd): ... return xs + np.random.normal(len(xs), scale=sd) >>> line(x_values, y_values, max=(10, 50), sd=(1, 10)) VBox(...)
[ "Generates", "an", "interactive", "line", "chart", "that", "allows", "users", "to", "change", "the", "parameters", "of", "the", "inputs", "x_fn", "and", "y_fn", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/plotting.py#L451-L506
train
48,547
SamLau95/nbinteract
nbinteract/plotting.py
_merge_with_defaults
def _merge_with_defaults(params): """ Performs a 2-level deep merge of params with _default_params with corrent merging of params for each mark. This is a bit complicated since params['marks'] is a list and we need to make sure each mark gets the default params. """ marks_params = [ tz.merge(default, param) for default, param in zip(itertools.repeat(_default_params['marks']), params['marks']) ] if 'marks' in params else [_default_params['marks']] merged_without_marks = tz.merge_with( tz.merge, tz.dissoc(_default_params, 'marks'), tz.dissoc(params, 'marks') ) return tz.merge(merged_without_marks, {'marks': marks_params})
python
def _merge_with_defaults(params): """ Performs a 2-level deep merge of params with _default_params with corrent merging of params for each mark. This is a bit complicated since params['marks'] is a list and we need to make sure each mark gets the default params. """ marks_params = [ tz.merge(default, param) for default, param in zip(itertools.repeat(_default_params['marks']), params['marks']) ] if 'marks' in params else [_default_params['marks']] merged_without_marks = tz.merge_with( tz.merge, tz.dissoc(_default_params, 'marks'), tz.dissoc(params, 'marks') ) return tz.merge(merged_without_marks, {'marks': marks_params})
[ "def", "_merge_with_defaults", "(", "params", ")", ":", "marks_params", "=", "[", "tz", ".", "merge", "(", "default", ",", "param", ")", "for", "default", ",", "param", "in", "zip", "(", "itertools", ".", "repeat", "(", "_default_params", "[", "'marks'", ...
Performs a 2-level deep merge of params with _default_params with corrent merging of params for each mark. This is a bit complicated since params['marks'] is a list and we need to make sure each mark gets the default params.
[ "Performs", "a", "2", "-", "level", "deep", "merge", "of", "params", "with", "_default_params", "with", "corrent", "merging", "of", "params", "for", "each", "mark", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/plotting.py#L636-L654
train
48,548
SamLau95/nbinteract
nbinteract/plotting.py
_create_fig
def _create_fig( *, x_sc=bq.LinearScale, y_sc=bq.LinearScale, x_ax=bq.Axis, y_ax=bq.Axis, fig=bq.Figure, options={}, params={} ): """ Initializes scales and axes for a bqplot figure and returns the resulting blank figure. Each plot component is passed in as a class. The plot options should be passed into options. Any additional parameters to initialize plot components are passed into params as a dict of { plot_component: { trait: value, ... } } For example, to change the grid lines of the x-axis: params={ 'x_ax': {'grid_lines' : 'solid'} } If the param value is a function, it will be called with the options dict augmented with all previously created plot elements. This permits dependencies on plot elements: params={ 'x_ax': {'scale': lambda opts: opts['x_sc'] } } """ params = _merge_with_defaults(params) x_sc = x_sc(**_call_params(params['x_sc'], options)) y_sc = y_sc(**_call_params(params['y_sc'], options)) options = tz.merge(options, {'x_sc': x_sc, 'y_sc': y_sc}) x_ax = x_ax(**_call_params(params['x_ax'], options)) y_ax = y_ax(**_call_params(params['y_ax'], options)) options = tz.merge(options, {'x_ax': x_ax, 'y_ax': y_ax, 'marks': []}) fig = fig(**_call_params(params['fig'], options)) return fig
python
def _create_fig( *, x_sc=bq.LinearScale, y_sc=bq.LinearScale, x_ax=bq.Axis, y_ax=bq.Axis, fig=bq.Figure, options={}, params={} ): """ Initializes scales and axes for a bqplot figure and returns the resulting blank figure. Each plot component is passed in as a class. The plot options should be passed into options. Any additional parameters to initialize plot components are passed into params as a dict of { plot_component: { trait: value, ... } } For example, to change the grid lines of the x-axis: params={ 'x_ax': {'grid_lines' : 'solid'} } If the param value is a function, it will be called with the options dict augmented with all previously created plot elements. This permits dependencies on plot elements: params={ 'x_ax': {'scale': lambda opts: opts['x_sc'] } } """ params = _merge_with_defaults(params) x_sc = x_sc(**_call_params(params['x_sc'], options)) y_sc = y_sc(**_call_params(params['y_sc'], options)) options = tz.merge(options, {'x_sc': x_sc, 'y_sc': y_sc}) x_ax = x_ax(**_call_params(params['x_ax'], options)) y_ax = y_ax(**_call_params(params['y_ax'], options)) options = tz.merge(options, {'x_ax': x_ax, 'y_ax': y_ax, 'marks': []}) fig = fig(**_call_params(params['fig'], options)) return fig
[ "def", "_create_fig", "(", "*", ",", "x_sc", "=", "bq", ".", "LinearScale", ",", "y_sc", "=", "bq", ".", "LinearScale", ",", "x_ax", "=", "bq", ".", "Axis", ",", "y_ax", "=", "bq", ".", "Axis", ",", "fig", "=", "bq", ".", "Figure", ",", "options"...
Initializes scales and axes for a bqplot figure and returns the resulting blank figure. Each plot component is passed in as a class. The plot options should be passed into options. Any additional parameters to initialize plot components are passed into params as a dict of { plot_component: { trait: value, ... } } For example, to change the grid lines of the x-axis: params={ 'x_ax': {'grid_lines' : 'solid'} } If the param value is a function, it will be called with the options dict augmented with all previously created plot elements. This permits dependencies on plot elements: params={ 'x_ax': {'scale': lambda opts: opts['x_sc'] } }
[ "Initializes", "scales", "and", "axes", "for", "a", "bqplot", "figure", "and", "returns", "the", "resulting", "blank", "figure", ".", "Each", "plot", "component", "is", "passed", "in", "as", "a", "class", ".", "The", "plot", "options", "should", "be", "pas...
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/plotting.py#L657-L694
train
48,549
SamLau95/nbinteract
nbinteract/plotting.py
_create_marks
def _create_marks(fig, marks=[bq.Mark], options={}, params={}): """ Initializes and returns marks for a figure as a list. Each mark is passed in as a class. The plot options should be passed into options. Any additional parameters to initialize plot components are passed into params as a dict of { 'mark': [{ trait: value, ... }, ...] } For example, when initializing two marks you can assign different colors to each one: params={ 'marks': [ {'colors': [DARK_BLUE]}, {'colors': [GOLDENROD]}, ] } If the param value is a function, it will be called with the options dict augmented with all previously created plot elements. This permits dependencies on plot elements: params={ 'marks': {'scale': lambda opts: opts['x_sc'] } } """ params = _merge_with_defaults(params) # Although fig provides scale_x and scale_y properties, the scales on the # axes are the only ones that are actually used. x_ax, y_ax = fig.axes x_sc, y_sc = x_ax.scale, y_ax.scale options = tz.merge(options, {'x_sc': x_sc, 'y_sc': y_sc}) marks = [ mark_cls(**_call_params(mark_params, options)) for mark_cls, mark_params in zip(marks, params['marks']) ] return marks
python
def _create_marks(fig, marks=[bq.Mark], options={}, params={}): """ Initializes and returns marks for a figure as a list. Each mark is passed in as a class. The plot options should be passed into options. Any additional parameters to initialize plot components are passed into params as a dict of { 'mark': [{ trait: value, ... }, ...] } For example, when initializing two marks you can assign different colors to each one: params={ 'marks': [ {'colors': [DARK_BLUE]}, {'colors': [GOLDENROD]}, ] } If the param value is a function, it will be called with the options dict augmented with all previously created plot elements. This permits dependencies on plot elements: params={ 'marks': {'scale': lambda opts: opts['x_sc'] } } """ params = _merge_with_defaults(params) # Although fig provides scale_x and scale_y properties, the scales on the # axes are the only ones that are actually used. x_ax, y_ax = fig.axes x_sc, y_sc = x_ax.scale, y_ax.scale options = tz.merge(options, {'x_sc': x_sc, 'y_sc': y_sc}) marks = [ mark_cls(**_call_params(mark_params, options)) for mark_cls, mark_params in zip(marks, params['marks']) ] return marks
[ "def", "_create_marks", "(", "fig", ",", "marks", "=", "[", "bq", ".", "Mark", "]", ",", "options", "=", "{", "}", ",", "params", "=", "{", "}", ")", ":", "params", "=", "_merge_with_defaults", "(", "params", ")", "# Although fig provides scale_x and scale...
Initializes and returns marks for a figure as a list. Each mark is passed in as a class. The plot options should be passed into options. Any additional parameters to initialize plot components are passed into params as a dict of { 'mark': [{ trait: value, ... }, ...] } For example, when initializing two marks you can assign different colors to each one: params={ 'marks': [ {'colors': [DARK_BLUE]}, {'colors': [GOLDENROD]}, ] } If the param value is a function, it will be called with the options dict augmented with all previously created plot elements. This permits dependencies on plot elements: params={ 'marks': {'scale': lambda opts: opts['x_sc'] } }
[ "Initializes", "and", "returns", "marks", "for", "a", "figure", "as", "a", "list", ".", "Each", "mark", "is", "passed", "in", "as", "a", "class", ".", "The", "plot", "options", "should", "be", "passed", "into", "options", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/plotting.py#L697-L733
train
48,550
SamLau95/nbinteract
nbinteract/plotting.py
_array_or_placeholder
def _array_or_placeholder( maybe_iterable, placeholder=PLACEHOLDER_ZEROS ) -> np.array: """ Return maybe_iterable's contents or a placeholder array. Used to give bqplot its required initial points to plot even if we're using a function to generate points. """ if isinstance(maybe_iterable, collections.Iterable): return np.array([i for i in maybe_iterable]) return placeholder
python
def _array_or_placeholder( maybe_iterable, placeholder=PLACEHOLDER_ZEROS ) -> np.array: """ Return maybe_iterable's contents or a placeholder array. Used to give bqplot its required initial points to plot even if we're using a function to generate points. """ if isinstance(maybe_iterable, collections.Iterable): return np.array([i for i in maybe_iterable]) return placeholder
[ "def", "_array_or_placeholder", "(", "maybe_iterable", ",", "placeholder", "=", "PLACEHOLDER_ZEROS", ")", "->", "np", ".", "array", ":", "if", "isinstance", "(", "maybe_iterable", ",", "collections", ".", "Iterable", ")", ":", "return", "np", ".", "array", "("...
Return maybe_iterable's contents or a placeholder array. Used to give bqplot its required initial points to plot even if we're using a function to generate points.
[ "Return", "maybe_iterable", "s", "contents", "or", "a", "placeholder", "array", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/plotting.py#L743-L754
train
48,551
SamLau95/nbinteract
nbinteract/cli.py
binder_spec_from_github_url
def binder_spec_from_github_url(github_url): """ Converts GitHub origin into a Binder spec. For example: git@github.com:SamLau95/nbinteract.git -> SamLau95/nbinteract/master https://github.com/Calebs97/riemann_book -> Calebs97/riemann_book/master """ tokens = re.split(r'/|:', github_url.replace('.git', '')) # The username and reponame are the last two tokens return '{}/{}/master'.format(tokens[-2], tokens[-1])
python
def binder_spec_from_github_url(github_url): """ Converts GitHub origin into a Binder spec. For example: git@github.com:SamLau95/nbinteract.git -> SamLau95/nbinteract/master https://github.com/Calebs97/riemann_book -> Calebs97/riemann_book/master """ tokens = re.split(r'/|:', github_url.replace('.git', '')) # The username and reponame are the last two tokens return '{}/{}/master'.format(tokens[-2], tokens[-1])
[ "def", "binder_spec_from_github_url", "(", "github_url", ")", ":", "tokens", "=", "re", ".", "split", "(", "r'/|:'", ",", "github_url", ".", "replace", "(", "'.git'", ",", "''", ")", ")", "# The username and reponame are the last two tokens", "return", "'{}/{}/maste...
Converts GitHub origin into a Binder spec. For example: git@github.com:SamLau95/nbinteract.git -> SamLau95/nbinteract/master https://github.com/Calebs97/riemann_book -> Calebs97/riemann_book/master
[ "Converts", "GitHub", "origin", "into", "a", "Binder", "spec", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/cli.py#L93-L103
train
48,552
SamLau95/nbinteract
nbinteract/cli.py
main
def main(): """ Parses command line options and runs nbinteract. """ arguments = docopt(__doc__) if arguments['init']: return_code = init() sys.exit(return_code) run_converter(arguments)
python
def main(): """ Parses command line options and runs nbinteract. """ arguments = docopt(__doc__) if arguments['init']: return_code = init() sys.exit(return_code) run_converter(arguments)
[ "def", "main", "(", ")", ":", "arguments", "=", "docopt", "(", "__doc__", ")", "if", "arguments", "[", "'init'", "]", ":", "return_code", "=", "init", "(", ")", "sys", ".", "exit", "(", "return_code", ")", "run_converter", "(", "arguments", ")" ]
Parses command line options and runs nbinteract.
[ "Parses", "command", "line", "options", "and", "runs", "nbinteract", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/cli.py#L162-L171
train
48,553
SamLau95/nbinteract
nbinteract/cli.py
run_converter
def run_converter(arguments): """ Converts notebooks to HTML files. Returns list of output file paths """ # Get spec from config file if os.path.isfile(CONFIG_FILE): with open(CONFIG_FILE, encoding='utf-8') as f: config = json.load(f) arguments['--spec'] = arguments['--spec'] or config['spec'] check_arguments(arguments) notebooks = flatmap( expand_folder, arguments['NOTEBOOKS'], recursive=arguments['--recursive'] ) exporter = init_exporter( extract_images=arguments['--images'], spec=arguments['--spec'], template_file=arguments['--template'], button_at_top=(not arguments['--no-top-button']), execute=arguments['--execute'], ) log('Converting notebooks to HTML...') output_files = [] for notebook in notebooks: output_file = convert( notebook, exporter=exporter, output_folder=arguments['--output'], images_folder=arguments['--images'] ) output_files.append(output_file) log('Converted {} to {}'.format(notebook, output_file)) log('Done!') if arguments['--images']: log('Resulting images located in {}'.format(arguments['--images'])) return output_files
python
def run_converter(arguments): """ Converts notebooks to HTML files. Returns list of output file paths """ # Get spec from config file if os.path.isfile(CONFIG_FILE): with open(CONFIG_FILE, encoding='utf-8') as f: config = json.load(f) arguments['--spec'] = arguments['--spec'] or config['spec'] check_arguments(arguments) notebooks = flatmap( expand_folder, arguments['NOTEBOOKS'], recursive=arguments['--recursive'] ) exporter = init_exporter( extract_images=arguments['--images'], spec=arguments['--spec'], template_file=arguments['--template'], button_at_top=(not arguments['--no-top-button']), execute=arguments['--execute'], ) log('Converting notebooks to HTML...') output_files = [] for notebook in notebooks: output_file = convert( notebook, exporter=exporter, output_folder=arguments['--output'], images_folder=arguments['--images'] ) output_files.append(output_file) log('Converted {} to {}'.format(notebook, output_file)) log('Done!') if arguments['--images']: log('Resulting images located in {}'.format(arguments['--images'])) return output_files
[ "def", "run_converter", "(", "arguments", ")", ":", "# Get spec from config file", "if", "os", ".", "path", ".", "isfile", "(", "CONFIG_FILE", ")", ":", "with", "open", "(", "CONFIG_FILE", ",", "encoding", "=", "'utf-8'", ")", "as", "f", ":", "config", "="...
Converts notebooks to HTML files. Returns list of output file paths
[ "Converts", "notebooks", "to", "HTML", "files", ".", "Returns", "list", "of", "output", "file", "paths" ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/cli.py#L174-L218
train
48,554
SamLau95/nbinteract
nbinteract/cli.py
expand_folder
def expand_folder(notebook_or_folder, recursive=False): """ If notebook_or_folder is a folder, returns a list containing all notebooks in the folder. Otherwise, returns a list containing the notebook name. If recursive is True, recurses into subdirectories. """ is_file = os.path.isfile(notebook_or_folder) is_dir = os.path.isdir(notebook_or_folder) if not (is_file or is_dir): raise ValueError( '{} is neither an existing file nor a folder.' .format(notebook_or_folder) ) if is_file: return [notebook_or_folder] # Now we know the input is a directory if not recursive: return glob('{}/*.ipynb'.format(notebook_or_folder)) # Recursive case return [ os.path.join(folder, filename) for folder, _, filenames in os.walk(notebook_or_folder) # Skip folders that start with . if not os.path.basename(folder).startswith('.') for filename in fnmatch.filter(filenames, '*.ipynb') ]
python
def expand_folder(notebook_or_folder, recursive=False): """ If notebook_or_folder is a folder, returns a list containing all notebooks in the folder. Otherwise, returns a list containing the notebook name. If recursive is True, recurses into subdirectories. """ is_file = os.path.isfile(notebook_or_folder) is_dir = os.path.isdir(notebook_or_folder) if not (is_file or is_dir): raise ValueError( '{} is neither an existing file nor a folder.' .format(notebook_or_folder) ) if is_file: return [notebook_or_folder] # Now we know the input is a directory if not recursive: return glob('{}/*.ipynb'.format(notebook_or_folder)) # Recursive case return [ os.path.join(folder, filename) for folder, _, filenames in os.walk(notebook_or_folder) # Skip folders that start with . if not os.path.basename(folder).startswith('.') for filename in fnmatch.filter(filenames, '*.ipynb') ]
[ "def", "expand_folder", "(", "notebook_or_folder", ",", "recursive", "=", "False", ")", ":", "is_file", "=", "os", ".", "path", ".", "isfile", "(", "notebook_or_folder", ")", "is_dir", "=", "os", ".", "path", ".", "isdir", "(", "notebook_or_folder", ")", "...
If notebook_or_folder is a folder, returns a list containing all notebooks in the folder. Otherwise, returns a list containing the notebook name. If recursive is True, recurses into subdirectories.
[ "If", "notebook_or_folder", "is", "a", "folder", "returns", "a", "list", "containing", "all", "notebooks", "in", "the", "folder", ".", "Otherwise", "returns", "a", "list", "containing", "the", "notebook", "name", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/cli.py#L360-L389
train
48,555
SamLau95/nbinteract
nbinteract/cli.py
init_exporter
def init_exporter(extract_images, execute, **exporter_config): """ Returns an initialized exporter. """ config = Config(InteractExporter=exporter_config) preprocessors = [] if extract_images: # Use ExtractOutputPreprocessor to extract the images to separate files preprocessors.append( 'nbconvert.preprocessors.ExtractOutputPreprocessor' ) if execute: # Use the NbiExecutePreprocessor to correctly generate widget output # for interact() calls. preprocessors.append('nbinteract.preprocessors.NbiExecutePreprocessor') config.InteractExporter.preprocessors = preprocessors exporter = InteractExporter(config=config) return exporter
python
def init_exporter(extract_images, execute, **exporter_config): """ Returns an initialized exporter. """ config = Config(InteractExporter=exporter_config) preprocessors = [] if extract_images: # Use ExtractOutputPreprocessor to extract the images to separate files preprocessors.append( 'nbconvert.preprocessors.ExtractOutputPreprocessor' ) if execute: # Use the NbiExecutePreprocessor to correctly generate widget output # for interact() calls. preprocessors.append('nbinteract.preprocessors.NbiExecutePreprocessor') config.InteractExporter.preprocessors = preprocessors exporter = InteractExporter(config=config) return exporter
[ "def", "init_exporter", "(", "extract_images", ",", "execute", ",", "*", "*", "exporter_config", ")", ":", "config", "=", "Config", "(", "InteractExporter", "=", "exporter_config", ")", "preprocessors", "=", "[", "]", "if", "extract_images", ":", "# Use ExtractO...
Returns an initialized exporter.
[ "Returns", "an", "initialized", "exporter", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/cli.py#L392-L412
train
48,556
SamLau95/nbinteract
nbinteract/cli.py
make_exporter_resources
def make_exporter_resources(nb_name, out_folder, images_folder=None): """ Creates resources dict for the exporter """ resources = defaultdict(str) resources['metadata'] = defaultdict(str) resources['metadata']['name'] = nb_name resources['metadata']['path'] = out_folder # This results in images like AB_5_1.png for a notebook called AB.ipynb resources['unique_key'] = nb_name resources['output_files_dir'] = images_folder return resources
python
def make_exporter_resources(nb_name, out_folder, images_folder=None): """ Creates resources dict for the exporter """ resources = defaultdict(str) resources['metadata'] = defaultdict(str) resources['metadata']['name'] = nb_name resources['metadata']['path'] = out_folder # This results in images like AB_5_1.png for a notebook called AB.ipynb resources['unique_key'] = nb_name resources['output_files_dir'] = images_folder return resources
[ "def", "make_exporter_resources", "(", "nb_name", ",", "out_folder", ",", "images_folder", "=", "None", ")", ":", "resources", "=", "defaultdict", "(", "str", ")", "resources", "[", "'metadata'", "]", "=", "defaultdict", "(", "str", ")", "resources", "[", "'...
Creates resources dict for the exporter
[ "Creates", "resources", "dict", "for", "the", "exporter" ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/cli.py#L415-L428
train
48,557
SamLau95/nbinteract
nbinteract/cli.py
convert
def convert(notebook_path, exporter, output_folder=None, images_folder=None): """ Converts notebook into an HTML file, outputting notebooks into output_folder if set and images into images_folder if set. Returns the path to the resulting HTML file. """ if output_folder: os.makedirs(output_folder, exist_ok=True) if images_folder: os.makedirs(images_folder, exist_ok=True) # Computes notebooks/ch1 and <name>.ipynb from notebooks/ch1/<name>.ipynb path, filename = os.path.split(notebook_path) # Computes <name> from <name>.ipynb basename, _ = os.path.splitext(filename) # Computes <name>.html from notebooks/<name>.ipynb outfile_name = basename + '.html' # If output_folder is not set, we default to the original folder of the # notebook. out_folder = path if not output_folder else output_folder outfile_path = os.path.join(out_folder, outfile_name) notebook = nbformat.read(notebook_path, as_version=4) html, resources = exporter.from_notebook_node( notebook, resources=make_exporter_resources(basename, out_folder, images_folder), ) # Write out HTML with open(outfile_path, 'w', encoding='utf-8') as outfile: outfile.write(html) # Write out images. If images_folder wasn't specified, resources['outputs'] # is None so this loop won't run for image_path, image_data in resources.get('outputs', {}).items(): with open(image_path, 'wb') as outimage: outimage.write(image_data) return outfile_path
python
def convert(notebook_path, exporter, output_folder=None, images_folder=None): """ Converts notebook into an HTML file, outputting notebooks into output_folder if set and images into images_folder if set. Returns the path to the resulting HTML file. """ if output_folder: os.makedirs(output_folder, exist_ok=True) if images_folder: os.makedirs(images_folder, exist_ok=True) # Computes notebooks/ch1 and <name>.ipynb from notebooks/ch1/<name>.ipynb path, filename = os.path.split(notebook_path) # Computes <name> from <name>.ipynb basename, _ = os.path.splitext(filename) # Computes <name>.html from notebooks/<name>.ipynb outfile_name = basename + '.html' # If output_folder is not set, we default to the original folder of the # notebook. out_folder = path if not output_folder else output_folder outfile_path = os.path.join(out_folder, outfile_name) notebook = nbformat.read(notebook_path, as_version=4) html, resources = exporter.from_notebook_node( notebook, resources=make_exporter_resources(basename, out_folder, images_folder), ) # Write out HTML with open(outfile_path, 'w', encoding='utf-8') as outfile: outfile.write(html) # Write out images. If images_folder wasn't specified, resources['outputs'] # is None so this loop won't run for image_path, image_data in resources.get('outputs', {}).items(): with open(image_path, 'wb') as outimage: outimage.write(image_data) return outfile_path
[ "def", "convert", "(", "notebook_path", ",", "exporter", ",", "output_folder", "=", "None", ",", "images_folder", "=", "None", ")", ":", "if", "output_folder", ":", "os", ".", "makedirs", "(", "output_folder", ",", "exist_ok", "=", "True", ")", "if", "imag...
Converts notebook into an HTML file, outputting notebooks into output_folder if set and images into images_folder if set. Returns the path to the resulting HTML file.
[ "Converts", "notebook", "into", "an", "HTML", "file", "outputting", "notebooks", "into", "output_folder", "if", "set", "and", "images", "into", "images_folder", "if", "set", "." ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/nbinteract/cli.py#L431-L472
train
48,558
SamLau95/nbinteract
docs/convert_notebooks_to_html_partial.py
convert_notebooks_to_html_partial
def convert_notebooks_to_html_partial(notebook_paths, url_map): """ Converts notebooks in notebook_paths to HTML partials """ for notebook_path in notebook_paths: # Computes <name>.ipynb from notebooks/01/<name>.ipynb path, filename = os.path.split(notebook_path) # Computes examples from notebooks/examples chapter = os.path.split(path)[1] if os.sep in path else '' # Computes <name> from <name>.ipynb basename, _ = os.path.splitext(filename) # Computes <name>.html from notebooks/<name>.ipynb outfile_name = basename + '.html' # This results in images like AB_5_1.png for a notebook called AB.ipynb unique_image_key = basename # This sets the img tag URL in the rendered HTML. output_files_dir = '/' + NOTEBOOK_IMAGE_DIR # Path to output final HTML file outfile_path = os.path.join(chapter, outfile_name) if chapter: os.makedirs(chapter, exist_ok=True) extract_output_config = { 'unique_key': unique_image_key, 'output_files_dir': output_files_dir, } notebook = nbformat.read(notebook_path, 4) notebook.cells.insert(0, _preamble_cell(path)) html, resources = html_exporter.from_notebook_node( notebook, resources=extract_output_config, ) if outfile_path not in url_map: print( '[Warning]: {} not found in _data/toc.yml. This page will ' 'not appear in the textbook table of contents.' .format(outfile_path) ) prev_page = url_map.get(outfile_path, {}).get('prev', 'false') next_page = url_map.get(outfile_path, {}).get('next', 'false') final_output = wrapper.format( html=html, prev_page=prev_page, next_page=next_page, ) # Write out HTML with open(outfile_path, 'w', encoding='utf-8') as outfile: outfile.write(final_output) # Write out images for relative_path, image_data in resources['outputs'].items(): image_name = os.path.basename(relative_path) final_image_path = os.path.join(NOTEBOOK_IMAGE_DIR, image_name) with open(final_image_path, 'wb') as outimage: outimage.write(image_data) print(outfile_path + " written.")
python
def convert_notebooks_to_html_partial(notebook_paths, url_map): """ Converts notebooks in notebook_paths to HTML partials """ for notebook_path in notebook_paths: # Computes <name>.ipynb from notebooks/01/<name>.ipynb path, filename = os.path.split(notebook_path) # Computes examples from notebooks/examples chapter = os.path.split(path)[1] if os.sep in path else '' # Computes <name> from <name>.ipynb basename, _ = os.path.splitext(filename) # Computes <name>.html from notebooks/<name>.ipynb outfile_name = basename + '.html' # This results in images like AB_5_1.png for a notebook called AB.ipynb unique_image_key = basename # This sets the img tag URL in the rendered HTML. output_files_dir = '/' + NOTEBOOK_IMAGE_DIR # Path to output final HTML file outfile_path = os.path.join(chapter, outfile_name) if chapter: os.makedirs(chapter, exist_ok=True) extract_output_config = { 'unique_key': unique_image_key, 'output_files_dir': output_files_dir, } notebook = nbformat.read(notebook_path, 4) notebook.cells.insert(0, _preamble_cell(path)) html, resources = html_exporter.from_notebook_node( notebook, resources=extract_output_config, ) if outfile_path not in url_map: print( '[Warning]: {} not found in _data/toc.yml. This page will ' 'not appear in the textbook table of contents.' .format(outfile_path) ) prev_page = url_map.get(outfile_path, {}).get('prev', 'false') next_page = url_map.get(outfile_path, {}).get('next', 'false') final_output = wrapper.format( html=html, prev_page=prev_page, next_page=next_page, ) # Write out HTML with open(outfile_path, 'w', encoding='utf-8') as outfile: outfile.write(final_output) # Write out images for relative_path, image_data in resources['outputs'].items(): image_name = os.path.basename(relative_path) final_image_path = os.path.join(NOTEBOOK_IMAGE_DIR, image_name) with open(final_image_path, 'wb') as outimage: outimage.write(image_data) print(outfile_path + " written.")
[ "def", "convert_notebooks_to_html_partial", "(", "notebook_paths", ",", "url_map", ")", ":", "for", "notebook_path", "in", "notebook_paths", ":", "# Computes <name>.ipynb from notebooks/01/<name>.ipynb", "path", ",", "filename", "=", "os", ".", "path", ".", "split", "("...
Converts notebooks in notebook_paths to HTML partials
[ "Converts", "notebooks", "in", "notebook_paths", "to", "HTML", "partials" ]
9f346452283831aad3f4416c04879f1d187ec3b7
https://github.com/SamLau95/nbinteract/blob/9f346452283831aad3f4416c04879f1d187ec3b7/docs/convert_notebooks_to_html_partial.py#L73-L135
train
48,559
kylef/refract.py
refract/registry.py
Registry.find_element_class
def find_element_class(self, element_name): """ Finds an element class for the given element name contained within the registry. Returns Element when there is no matching element subclass. >>> registry.find_element_class('string') String >>> registry.find_element_class('unknown') Element """ for element in self.elements: if element.element == element_name: return element return Element
python
def find_element_class(self, element_name): """ Finds an element class for the given element name contained within the registry. Returns Element when there is no matching element subclass. >>> registry.find_element_class('string') String >>> registry.find_element_class('unknown') Element """ for element in self.elements: if element.element == element_name: return element return Element
[ "def", "find_element_class", "(", "self", ",", "element_name", ")", ":", "for", "element", "in", "self", ".", "elements", ":", "if", "element", ".", "element", "==", "element_name", ":", "return", "element", "return", "Element" ]
Finds an element class for the given element name contained within the registry. Returns Element when there is no matching element subclass. >>> registry.find_element_class('string') String >>> registry.find_element_class('unknown') Element
[ "Finds", "an", "element", "class", "for", "the", "given", "element", "name", "contained", "within", "the", "registry", "." ]
f58ddf619038b580ab50c2e7f867d59d153eabbb
https://github.com/kylef/refract.py/blob/f58ddf619038b580ab50c2e7f867d59d153eabbb/refract/registry.py#L39-L57
train
48,560
meraki-analytics/datapipelines-python
datapipelines/pipelines.py
_transform
def _transform(transformer_chain: Sequence[Tuple[DataTransformer, Type]], data: S, context: PipelineContext = None) -> T: """Transform data to a new type. Args: transformer_chain: A sequence of (transformer, type) pairs to convert the data. data: The data to be transformed. context: The context of the transformations (mutable). Returns: The transformed data. """ for transformer, target_type in transformer_chain: # noinspection PyTypeChecker data = transformer.transform(target_type, data, context) return data
python
def _transform(transformer_chain: Sequence[Tuple[DataTransformer, Type]], data: S, context: PipelineContext = None) -> T: """Transform data to a new type. Args: transformer_chain: A sequence of (transformer, type) pairs to convert the data. data: The data to be transformed. context: The context of the transformations (mutable). Returns: The transformed data. """ for transformer, target_type in transformer_chain: # noinspection PyTypeChecker data = transformer.transform(target_type, data, context) return data
[ "def", "_transform", "(", "transformer_chain", ":", "Sequence", "[", "Tuple", "[", "DataTransformer", ",", "Type", "]", "]", ",", "data", ":", "S", ",", "context", ":", "PipelineContext", "=", "None", ")", "->", "T", ":", "for", "transformer", ",", "targ...
Transform data to a new type. Args: transformer_chain: A sequence of (transformer, type) pairs to convert the data. data: The data to be transformed. context: The context of the transformations (mutable). Returns: The transformed data.
[ "Transform", "data", "to", "a", "new", "type", "." ]
dc38d7976a012039a15d67cd8b07ae77eb1e4a4c
https://github.com/meraki-analytics/datapipelines-python/blob/dc38d7976a012039a15d67cd8b07ae77eb1e4a4c/datapipelines/pipelines.py#L100-L114
train
48,561
meraki-analytics/datapipelines-python
datapipelines/pipelines.py
_SinkHandler.put
def put(self, item: T, context: PipelineContext = None) -> None: """Puts an objects into the data sink. The objects may be transformed into a new type for insertion if necessary. Args: item: The objects to be inserted into the data sink. context: The context of the insertion (mutable). """ LOGGER.info("Converting item \"{item}\" for sink \"{sink}\"".format(item=item, sink=self._sink)) item = self._transform(data=item, context=context) LOGGER.info("Puting item \"{item}\" into sink \"{sink}\"".format(item=item, sink=self._sink)) self._sink.put(self._store_type, item, context)
python
def put(self, item: T, context: PipelineContext = None) -> None: """Puts an objects into the data sink. The objects may be transformed into a new type for insertion if necessary. Args: item: The objects to be inserted into the data sink. context: The context of the insertion (mutable). """ LOGGER.info("Converting item \"{item}\" for sink \"{sink}\"".format(item=item, sink=self._sink)) item = self._transform(data=item, context=context) LOGGER.info("Puting item \"{item}\" into sink \"{sink}\"".format(item=item, sink=self._sink)) self._sink.put(self._store_type, item, context)
[ "def", "put", "(", "self", ",", "item", ":", "T", ",", "context", ":", "PipelineContext", "=", "None", ")", "->", "None", ":", "LOGGER", ".", "info", "(", "\"Converting item \\\"{item}\\\" for sink \\\"{sink}\\\"\"", ".", "format", "(", "item", "=", "item", ...
Puts an objects into the data sink. The objects may be transformed into a new type for insertion if necessary. Args: item: The objects to be inserted into the data sink. context: The context of the insertion (mutable).
[ "Puts", "an", "objects", "into", "the", "data", "sink", ".", "The", "objects", "may", "be", "transformed", "into", "a", "new", "type", "for", "insertion", "if", "necessary", "." ]
dc38d7976a012039a15d67cd8b07ae77eb1e4a4c
https://github.com/meraki-analytics/datapipelines-python/blob/dc38d7976a012039a15d67cd8b07ae77eb1e4a4c/datapipelines/pipelines.py#L130-L140
train
48,562
meraki-analytics/datapipelines-python
datapipelines/pipelines.py
_SourceHandler.get_many
def get_many(self, query: Mapping[str, Any], context: PipelineContext = None, streaming: bool = False) -> Iterable[T]: """Gets a query from the data source, where the query contains multiple elements to be extracted. 1) Extracts the query from the data source. 2) Inserts the result into any data sinks. 3) Transforms the results into the requested type if it wasn't already. 4) Inserts the transformed result into any data sinks. Args: query: The query being requested. context: The context for the extraction (mutable). streaming: Specifies whether the results should be returned as a generator (default False). Returns: The requested objects or a generator of the objects if streaming is True. """ result = self._source.get_many(self._source_type, deepcopy(query), context) LOGGER.info("Got results \"{result}\" from query \"{query}\" of source \"{source}\"".format(result=result, query=query, source=self._source)) if not streaming: LOGGER.info("Non-streaming get_many request. Ensuring results \"{result}\" are a Iterable".format(result=result)) result = list(result) LOGGER.info("Sending results \"{result}\" to sinks before converting".format(result=result)) for sink in self._before_transform: sink.put_many(result, context) LOGGER.info("Converting results \"{result}\" to request type".format(result=result)) result = [self._transform(data=item, context=context) for item in result] LOGGER.info("Sending results \"{result}\" to sinks after converting".format(result=result)) for sink in self._after_transform: sink.put_many(result, context) return result else: LOGGER.info("Streaming get_many request. Returning result generator for results \"{result}\"".format(result=result)) return self._get_many_generator(result)
python
def get_many(self, query: Mapping[str, Any], context: PipelineContext = None, streaming: bool = False) -> Iterable[T]: """Gets a query from the data source, where the query contains multiple elements to be extracted. 1) Extracts the query from the data source. 2) Inserts the result into any data sinks. 3) Transforms the results into the requested type if it wasn't already. 4) Inserts the transformed result into any data sinks. Args: query: The query being requested. context: The context for the extraction (mutable). streaming: Specifies whether the results should be returned as a generator (default False). Returns: The requested objects or a generator of the objects if streaming is True. """ result = self._source.get_many(self._source_type, deepcopy(query), context) LOGGER.info("Got results \"{result}\" from query \"{query}\" of source \"{source}\"".format(result=result, query=query, source=self._source)) if not streaming: LOGGER.info("Non-streaming get_many request. Ensuring results \"{result}\" are a Iterable".format(result=result)) result = list(result) LOGGER.info("Sending results \"{result}\" to sinks before converting".format(result=result)) for sink in self._before_transform: sink.put_many(result, context) LOGGER.info("Converting results \"{result}\" to request type".format(result=result)) result = [self._transform(data=item, context=context) for item in result] LOGGER.info("Sending results \"{result}\" to sinks after converting".format(result=result)) for sink in self._after_transform: sink.put_many(result, context) return result else: LOGGER.info("Streaming get_many request. Returning result generator for results \"{result}\"".format(result=result)) return self._get_many_generator(result)
[ "def", "get_many", "(", "self", ",", "query", ":", "Mapping", "[", "str", ",", "Any", "]", ",", "context", ":", "PipelineContext", "=", "None", ",", "streaming", ":", "bool", "=", "False", ")", "->", "Iterable", "[", "T", "]", ":", "result", "=", "...
Gets a query from the data source, where the query contains multiple elements to be extracted. 1) Extracts the query from the data source. 2) Inserts the result into any data sinks. 3) Transforms the results into the requested type if it wasn't already. 4) Inserts the transformed result into any data sinks. Args: query: The query being requested. context: The context for the extraction (mutable). streaming: Specifies whether the results should be returned as a generator (default False). Returns: The requested objects or a generator of the objects if streaming is True.
[ "Gets", "a", "query", "from", "the", "data", "source", "where", "the", "query", "contains", "multiple", "elements", "to", "be", "extracted", "." ]
dc38d7976a012039a15d67cd8b07ae77eb1e4a4c
https://github.com/meraki-analytics/datapipelines-python/blob/dc38d7976a012039a15d67cd8b07ae77eb1e4a4c/datapipelines/pipelines.py#L216-L253
train
48,563
meraki-analytics/datapipelines-python
datapipelines/pipelines.py
DataPipeline.get
def get(self, type: Type[T], query: Mapping[str, Any]) -> T: """Gets a query from the data pipeline. 1) Extracts the query the sequence of data sources. 2) Inserts the result into the data sinks (if appropriate). 3) Transforms the result into the requested type if it wasn't already. 4) Inserts the transformed result into any data sinks. Args: query: The query being requested. context: The context for the extraction (mutable). Returns: The requested object. """ LOGGER.info("Getting SourceHandlers for \"{type}\"".format(type=type.__name__)) try: handlers = self._get_types[type] except KeyError: try: LOGGER.info("Building new SourceHandlers for \"{type}\"".format(type=type.__name__)) handlers = self._get_handlers(type) except NoConversionError: handlers = None self._get_types[type] = handlers if handlers is None: raise NoConversionError("No source can provide \"{type}\"".format(type=type.__name__)) LOGGER.info("Creating new PipelineContext") context = self._new_context() LOGGER.info("Querying SourceHandlers for \"{type}\"".format(type=type.__name__)) for handler in handlers: try: return handler.get(query, context) except NotFoundError: pass raise NotFoundError("No source returned a query result!")
python
def get(self, type: Type[T], query: Mapping[str, Any]) -> T: """Gets a query from the data pipeline. 1) Extracts the query the sequence of data sources. 2) Inserts the result into the data sinks (if appropriate). 3) Transforms the result into the requested type if it wasn't already. 4) Inserts the transformed result into any data sinks. Args: query: The query being requested. context: The context for the extraction (mutable). Returns: The requested object. """ LOGGER.info("Getting SourceHandlers for \"{type}\"".format(type=type.__name__)) try: handlers = self._get_types[type] except KeyError: try: LOGGER.info("Building new SourceHandlers for \"{type}\"".format(type=type.__name__)) handlers = self._get_handlers(type) except NoConversionError: handlers = None self._get_types[type] = handlers if handlers is None: raise NoConversionError("No source can provide \"{type}\"".format(type=type.__name__)) LOGGER.info("Creating new PipelineContext") context = self._new_context() LOGGER.info("Querying SourceHandlers for \"{type}\"".format(type=type.__name__)) for handler in handlers: try: return handler.get(query, context) except NotFoundError: pass raise NotFoundError("No source returned a query result!")
[ "def", "get", "(", "self", ",", "type", ":", "Type", "[", "T", "]", ",", "query", ":", "Mapping", "[", "str", ",", "Any", "]", ")", "->", "T", ":", "LOGGER", ".", "info", "(", "\"Getting SourceHandlers for \\\"{type}\\\"\"", ".", "format", "(", "type",...
Gets a query from the data pipeline. 1) Extracts the query the sequence of data sources. 2) Inserts the result into the data sinks (if appropriate). 3) Transforms the result into the requested type if it wasn't already. 4) Inserts the transformed result into any data sinks. Args: query: The query being requested. context: The context for the extraction (mutable). Returns: The requested object.
[ "Gets", "a", "query", "from", "the", "data", "pipeline", "." ]
dc38d7976a012039a15d67cd8b07ae77eb1e4a4c
https://github.com/meraki-analytics/datapipelines-python/blob/dc38d7976a012039a15d67cd8b07ae77eb1e4a4c/datapipelines/pipelines.py#L424-L463
train
48,564
meraki-analytics/datapipelines-python
datapipelines/pipelines.py
DataPipeline.get_many
def get_many(self, type: Type[T], query: Mapping[str, Any], streaming: bool = False) -> Iterable[T]: """Gets a query from the data pipeline, which contains a request for multiple objects. 1) Extracts the query the sequence of data sources. 2) Inserts the results into the data sinks (if appropriate). 3) Transforms the results into the requested type if it wasn't already. 4) Inserts the transformed result into any data sinks. Args: query: The query being requested (contains a request for multiple objects). context: The context for the extraction (mutable). streaming: Specifies whether the results should be returned as a generator (default False). Returns: The requested objects or a generator of the objects if streaming is True. """ LOGGER.info("Getting SourceHandlers for \"{type}\"".format(type=type.__name__)) try: handlers = self._get_types[type] except KeyError: try: LOGGER.info("Building new SourceHandlers for \"{type}\"".format(type=type.__name__)) handlers = self._get_handlers(type) except NoConversionError: handlers = None self._get_types[type] = handlers if handlers is None: raise NoConversionError("No source can provide \"{type}\"".format(type=type.__name__)) LOGGER.info("Creating new PipelineContext") context = self._new_context() LOGGER.info("Querying SourceHandlers for \"{type}\"".format(type=type.__name__)) for handler in handlers: try: return handler.get_many(query, context, streaming) except NotFoundError: pass raise NotFoundError("No source returned a query result!")
python
def get_many(self, type: Type[T], query: Mapping[str, Any], streaming: bool = False) -> Iterable[T]: """Gets a query from the data pipeline, which contains a request for multiple objects. 1) Extracts the query the sequence of data sources. 2) Inserts the results into the data sinks (if appropriate). 3) Transforms the results into the requested type if it wasn't already. 4) Inserts the transformed result into any data sinks. Args: query: The query being requested (contains a request for multiple objects). context: The context for the extraction (mutable). streaming: Specifies whether the results should be returned as a generator (default False). Returns: The requested objects or a generator of the objects if streaming is True. """ LOGGER.info("Getting SourceHandlers for \"{type}\"".format(type=type.__name__)) try: handlers = self._get_types[type] except KeyError: try: LOGGER.info("Building new SourceHandlers for \"{type}\"".format(type=type.__name__)) handlers = self._get_handlers(type) except NoConversionError: handlers = None self._get_types[type] = handlers if handlers is None: raise NoConversionError("No source can provide \"{type}\"".format(type=type.__name__)) LOGGER.info("Creating new PipelineContext") context = self._new_context() LOGGER.info("Querying SourceHandlers for \"{type}\"".format(type=type.__name__)) for handler in handlers: try: return handler.get_many(query, context, streaming) except NotFoundError: pass raise NotFoundError("No source returned a query result!")
[ "def", "get_many", "(", "self", ",", "type", ":", "Type", "[", "T", "]", ",", "query", ":", "Mapping", "[", "str", ",", "Any", "]", ",", "streaming", ":", "bool", "=", "False", ")", "->", "Iterable", "[", "T", "]", ":", "LOGGER", ".", "info", "...
Gets a query from the data pipeline, which contains a request for multiple objects. 1) Extracts the query the sequence of data sources. 2) Inserts the results into the data sinks (if appropriate). 3) Transforms the results into the requested type if it wasn't already. 4) Inserts the transformed result into any data sinks. Args: query: The query being requested (contains a request for multiple objects). context: The context for the extraction (mutable). streaming: Specifies whether the results should be returned as a generator (default False). Returns: The requested objects or a generator of the objects if streaming is True.
[ "Gets", "a", "query", "from", "the", "data", "pipeline", "which", "contains", "a", "request", "for", "multiple", "objects", "." ]
dc38d7976a012039a15d67cd8b07ae77eb1e4a4c
https://github.com/meraki-analytics/datapipelines-python/blob/dc38d7976a012039a15d67cd8b07ae77eb1e4a4c/datapipelines/pipelines.py#L465-L505
train
48,565
meraki-analytics/datapipelines-python
datapipelines/pipelines.py
DataPipeline.put
def put(self, type: Type[T], item: T) -> None: """Puts an objects into the data pipeline. The object may be transformed into a new type for insertion if necessary. Args: item: The object to be inserted into the data pipeline. """ LOGGER.info("Getting SinkHandlers for \"{type}\"".format(type=type.__name__)) try: handlers = self._put_types[type] except KeyError: try: LOGGER.info("Building new SinkHandlers for \"{type}\"".format(type=type.__name__)) handlers = self._put_handlers(type) except NoConversionError: handlers = None self._get_types[type] = handlers LOGGER.info("Creating new PipelineContext") context = self._new_context() LOGGER.info("Sending item \"{item}\" to SourceHandlers".format(item=item)) if handlers is not None: for handler in handlers: handler.put(item, context)
python
def put(self, type: Type[T], item: T) -> None: """Puts an objects into the data pipeline. The object may be transformed into a new type for insertion if necessary. Args: item: The object to be inserted into the data pipeline. """ LOGGER.info("Getting SinkHandlers for \"{type}\"".format(type=type.__name__)) try: handlers = self._put_types[type] except KeyError: try: LOGGER.info("Building new SinkHandlers for \"{type}\"".format(type=type.__name__)) handlers = self._put_handlers(type) except NoConversionError: handlers = None self._get_types[type] = handlers LOGGER.info("Creating new PipelineContext") context = self._new_context() LOGGER.info("Sending item \"{item}\" to SourceHandlers".format(item=item)) if handlers is not None: for handler in handlers: handler.put(item, context)
[ "def", "put", "(", "self", ",", "type", ":", "Type", "[", "T", "]", ",", "item", ":", "T", ")", "->", "None", ":", "LOGGER", ".", "info", "(", "\"Getting SinkHandlers for \\\"{type}\\\"\"", ".", "format", "(", "type", "=", "type", ".", "__name__", ")",...
Puts an objects into the data pipeline. The object may be transformed into a new type for insertion if necessary. Args: item: The object to be inserted into the data pipeline.
[ "Puts", "an", "objects", "into", "the", "data", "pipeline", ".", "The", "object", "may", "be", "transformed", "into", "a", "new", "type", "for", "insertion", "if", "necessary", "." ]
dc38d7976a012039a15d67cd8b07ae77eb1e4a4c
https://github.com/meraki-analytics/datapipelines-python/blob/dc38d7976a012039a15d67cd8b07ae77eb1e4a4c/datapipelines/pipelines.py#L507-L530
train
48,566
jmvrbanac/Specter
specter/reporting/dots.py
DotsReporter.print_error
def print_error(self, wrapper): """ A crude way of output the errors for now. This needs to be cleaned up into something better. """ level = 0 parent = wrapper.parent while parent: print_test_msg(parent.name, level, TestStatus.FAIL, self.use_color) level += 1 parent = parent.parent print_test_msg(wrapper.name, level, TestStatus.FAIL, self.use_color) print_test_args(wrapper.execute_kwargs, level, TestStatus.FAIL, self.use_color) if wrapper.error: for line in wrapper.error: print_test_msg( line, level + 2, TestStatus.FAIL, self.use_color ) print_expects(wrapper, level, use_color=self.use_color)
python
def print_error(self, wrapper): """ A crude way of output the errors for now. This needs to be cleaned up into something better. """ level = 0 parent = wrapper.parent while parent: print_test_msg(parent.name, level, TestStatus.FAIL, self.use_color) level += 1 parent = parent.parent print_test_msg(wrapper.name, level, TestStatus.FAIL, self.use_color) print_test_args(wrapper.execute_kwargs, level, TestStatus.FAIL, self.use_color) if wrapper.error: for line in wrapper.error: print_test_msg( line, level + 2, TestStatus.FAIL, self.use_color ) print_expects(wrapper, level, use_color=self.use_color)
[ "def", "print_error", "(", "self", ",", "wrapper", ")", ":", "level", "=", "0", "parent", "=", "wrapper", ".", "parent", "while", "parent", ":", "print_test_msg", "(", "parent", ".", "name", ",", "level", ",", "TestStatus", ".", "FAIL", ",", "self", "....
A crude way of output the errors for now. This needs to be cleaned up into something better.
[ "A", "crude", "way", "of", "output", "the", "errors", "for", "now", ".", "This", "needs", "to", "be", "cleaned", "up", "into", "something", "better", "." ]
1f5a729b0aa16242add8c1c754efa268335e3944
https://github.com/jmvrbanac/Specter/blob/1f5a729b0aa16242add8c1c754efa268335e3944/specter/reporting/dots.py#L29-L53
train
48,567
kylef/refract.py
refract/refraction.py
refract
def refract(structure) -> Element: """ Refracts the given value. >>> refract('string') String(content='string') >>> refract(1) Number(content=1) >>> refract(True) Boolean(content=True) >>> refract(None) Null() >>> refract([1, 2]) Array(content=[Number(content=1), Number(content=2)]) >>> refract({'name': 'Doe'}) Object(content=[Member( key=String(content='name'), value=String(content='Doe') )]) """ if isinstance(structure, Element): return structure elif isinstance(structure, str): return String(content=structure) elif isinstance(structure, bool): return Boolean(content=structure) elif isinstance(structure, (int, float)): return Number(content=structure) elif isinstance(structure, (list, tuple)): return Array(content=list(map(refract, structure))) elif isinstance(structure, dict): return Object(content=[Member(key=refract(k), value=refract(v)) for (k, v) in structure.items()]) elif structure is None: return Null() raise ValueError('Unsupported Value Type')
python
def refract(structure) -> Element: """ Refracts the given value. >>> refract('string') String(content='string') >>> refract(1) Number(content=1) >>> refract(True) Boolean(content=True) >>> refract(None) Null() >>> refract([1, 2]) Array(content=[Number(content=1), Number(content=2)]) >>> refract({'name': 'Doe'}) Object(content=[Member( key=String(content='name'), value=String(content='Doe') )]) """ if isinstance(structure, Element): return structure elif isinstance(structure, str): return String(content=structure) elif isinstance(structure, bool): return Boolean(content=structure) elif isinstance(structure, (int, float)): return Number(content=structure) elif isinstance(structure, (list, tuple)): return Array(content=list(map(refract, structure))) elif isinstance(structure, dict): return Object(content=[Member(key=refract(k), value=refract(v)) for (k, v) in structure.items()]) elif structure is None: return Null() raise ValueError('Unsupported Value Type')
[ "def", "refract", "(", "structure", ")", "->", "Element", ":", "if", "isinstance", "(", "structure", ",", "Element", ")", ":", "return", "structure", "elif", "isinstance", "(", "structure", ",", "str", ")", ":", "return", "String", "(", "content", "=", "...
Refracts the given value. >>> refract('string') String(content='string') >>> refract(1) Number(content=1) >>> refract(True) Boolean(content=True) >>> refract(None) Null() >>> refract([1, 2]) Array(content=[Number(content=1), Number(content=2)]) >>> refract({'name': 'Doe'}) Object(content=[Member( key=String(content='name'), value=String(content='Doe') )])
[ "Refracts", "the", "given", "value", "." ]
f58ddf619038b580ab50c2e7f867d59d153eabbb
https://github.com/kylef/refract.py/blob/f58ddf619038b580ab50c2e7f867d59d153eabbb/refract/refraction.py#L8-L50
train
48,568
kylef/refract.py
refract/json.py
JSONSerialiser.serialise
def serialise(self, element: Element, **kwargs) -> str: """ Serialises the given element into JSON. >>> JSONSerialiser().serialise(String(content='Hello')) '{"element": "string", "content": "Hello"}' """ return json.dumps(self.serialise_dict(element), **kwargs)
python
def serialise(self, element: Element, **kwargs) -> str: """ Serialises the given element into JSON. >>> JSONSerialiser().serialise(String(content='Hello')) '{"element": "string", "content": "Hello"}' """ return json.dumps(self.serialise_dict(element), **kwargs)
[ "def", "serialise", "(", "self", ",", "element", ":", "Element", ",", "*", "*", "kwargs", ")", "->", "str", ":", "return", "json", ".", "dumps", "(", "self", ".", "serialise_dict", "(", "element", ")", ",", "*", "*", "kwargs", ")" ]
Serialises the given element into JSON. >>> JSONSerialiser().serialise(String(content='Hello')) '{"element": "string", "content": "Hello"}'
[ "Serialises", "the", "given", "element", "into", "JSON", "." ]
f58ddf619038b580ab50c2e7f867d59d153eabbb
https://github.com/kylef/refract.py/blob/f58ddf619038b580ab50c2e7f867d59d153eabbb/refract/json.py#L62-L70
train
48,569
kylef/refract.py
refract/json.py
JSONDeserialiser.deserialise
def deserialise(self, element_json: str) -> Element: """ Deserialises the given JSON into an element. >>> json = '{"element": "string", "content": "Hello"' >>> JSONDeserialiser().deserialise(json) String(content='Hello') """ return self.deserialise_dict(json.loads(element_json))
python
def deserialise(self, element_json: str) -> Element: """ Deserialises the given JSON into an element. >>> json = '{"element": "string", "content": "Hello"' >>> JSONDeserialiser().deserialise(json) String(content='Hello') """ return self.deserialise_dict(json.loads(element_json))
[ "def", "deserialise", "(", "self", ",", "element_json", ":", "str", ")", "->", "Element", ":", "return", "self", ".", "deserialise_dict", "(", "json", ".", "loads", "(", "element_json", ")", ")" ]
Deserialises the given JSON into an element. >>> json = '{"element": "string", "content": "Hello"' >>> JSONDeserialiser().deserialise(json) String(content='Hello')
[ "Deserialises", "the", "given", "JSON", "into", "an", "element", "." ]
f58ddf619038b580ab50c2e7f867d59d153eabbb
https://github.com/kylef/refract.py/blob/f58ddf619038b580ab50c2e7f867d59d153eabbb/refract/json.py#L140-L149
train
48,570
kylef/refract.py
refract/json.py
CompactJSONSerialiser.serialise
def serialise(self, element: Element) -> str: """ Serialises the given element into Compact JSON. >>> CompactJSONSerialiser().serialise(String(content='Hello')) '["string", null, null, "Hello"]' """ return json.dumps(self.serialise_element(element))
python
def serialise(self, element: Element) -> str: """ Serialises the given element into Compact JSON. >>> CompactJSONSerialiser().serialise(String(content='Hello')) '["string", null, null, "Hello"]' """ return json.dumps(self.serialise_element(element))
[ "def", "serialise", "(", "self", ",", "element", ":", "Element", ")", "->", "str", ":", "return", "json", ".", "dumps", "(", "self", ".", "serialise_element", "(", "element", ")", ")" ]
Serialises the given element into Compact JSON. >>> CompactJSONSerialiser().serialise(String(content='Hello')) '["string", null, null, "Hello"]'
[ "Serialises", "the", "given", "element", "into", "Compact", "JSON", "." ]
f58ddf619038b580ab50c2e7f867d59d153eabbb
https://github.com/kylef/refract.py/blob/f58ddf619038b580ab50c2e7f867d59d153eabbb/refract/json.py#L236-L244
train
48,571
kylef/refract.py
refract/json.py
CompactJSONDeserialiser.deserialise
def deserialise(self, content) -> Element: """ Deserialises the given compact JSON into an element. >>> deserialiser = CompactJSONDeserialiser() >>> deserialiser.deserialise('["string", null, null, "Hi"]') String(content='Hi') """ content = json.loads(content) if not isinstance(content, list): raise ValueError('Given content was not compact JSON refract') return self.deserialise_element(content)
python
def deserialise(self, content) -> Element: """ Deserialises the given compact JSON into an element. >>> deserialiser = CompactJSONDeserialiser() >>> deserialiser.deserialise('["string", null, null, "Hi"]') String(content='Hi') """ content = json.loads(content) if not isinstance(content, list): raise ValueError('Given content was not compact JSON refract') return self.deserialise_element(content)
[ "def", "deserialise", "(", "self", ",", "content", ")", "->", "Element", ":", "content", "=", "json", ".", "loads", "(", "content", ")", "if", "not", "isinstance", "(", "content", ",", "list", ")", ":", "raise", "ValueError", "(", "'Given content was not c...
Deserialises the given compact JSON into an element. >>> deserialiser = CompactJSONDeserialiser() >>> deserialiser.deserialise('["string", null, null, "Hi"]') String(content='Hi')
[ "Deserialises", "the", "given", "compact", "JSON", "into", "an", "element", "." ]
f58ddf619038b580ab50c2e7f867d59d153eabbb
https://github.com/kylef/refract.py/blob/f58ddf619038b580ab50c2e7f867d59d153eabbb/refract/json.py#L306-L319
train
48,572
jmvrbanac/Specter
specter/runner.py
SpecterRunner.combine_coverage_reports
def combine_coverage_reports(self, omit, parallel): """ Method to force the combination of parallel coverage reports.""" tmp_cov = coverage.coverage(omit=omit, data_suffix=parallel) tmp_cov.load() tmp_cov.combine() tmp_cov.save()
python
def combine_coverage_reports(self, omit, parallel): """ Method to force the combination of parallel coverage reports.""" tmp_cov = coverage.coverage(omit=omit, data_suffix=parallel) tmp_cov.load() tmp_cov.combine() tmp_cov.save()
[ "def", "combine_coverage_reports", "(", "self", ",", "omit", ",", "parallel", ")", ":", "tmp_cov", "=", "coverage", ".", "coverage", "(", "omit", "=", "omit", ",", "data_suffix", "=", "parallel", ")", "tmp_cov", ".", "load", "(", ")", "tmp_cov", ".", "co...
Method to force the combination of parallel coverage reports.
[ "Method", "to", "force", "the", "combination", "of", "parallel", "coverage", "reports", "." ]
1f5a729b0aa16242add8c1c754efa268335e3944
https://github.com/jmvrbanac/Specter/blob/1f5a729b0aa16242add8c1c754efa268335e3944/specter/runner.py#L120-L125
train
48,573
meraki-analytics/datapipelines-python
datapipelines/transformers.py
DataTransformer.transforms
def transforms(self) -> Mapping[Type, Iterable[Type]]: """The available data transformers.""" try: return getattr(self.__class__, "transform")._transforms except AttributeError: return {}
python
def transforms(self) -> Mapping[Type, Iterable[Type]]: """The available data transformers.""" try: return getattr(self.__class__, "transform")._transforms except AttributeError: return {}
[ "def", "transforms", "(", "self", ")", "->", "Mapping", "[", "Type", ",", "Iterable", "[", "Type", "]", "]", ":", "try", ":", "return", "getattr", "(", "self", ".", "__class__", ",", "\"transform\"", ")", ".", "_transforms", "except", "AttributeError", "...
The available data transformers.
[ "The", "available", "data", "transformers", "." ]
dc38d7976a012039a15d67cd8b07ae77eb1e4a4c
https://github.com/meraki-analytics/datapipelines-python/blob/dc38d7976a012039a15d67cd8b07ae77eb1e4a4c/datapipelines/transformers.py#L19-L24
train
48,574
meraki-analytics/datapipelines-python
datapipelines/transformers.py
DataTransformer.transform
def transform(self, target_type: Type[T], value: F, context: PipelineContext = None) -> T: """Transforms an object to a new type. Args: target_type: The type to be converted to. value: The object to be transformed. context: The context of the transformation (mutable). """ pass
python
def transform(self, target_type: Type[T], value: F, context: PipelineContext = None) -> T: """Transforms an object to a new type. Args: target_type: The type to be converted to. value: The object to be transformed. context: The context of the transformation (mutable). """ pass
[ "def", "transform", "(", "self", ",", "target_type", ":", "Type", "[", "T", "]", ",", "value", ":", "F", ",", "context", ":", "PipelineContext", "=", "None", ")", "->", "T", ":", "pass" ]
Transforms an object to a new type. Args: target_type: The type to be converted to. value: The object to be transformed. context: The context of the transformation (mutable).
[ "Transforms", "an", "object", "to", "a", "new", "type", "." ]
dc38d7976a012039a15d67cd8b07ae77eb1e4a4c
https://github.com/meraki-analytics/datapipelines-python/blob/dc38d7976a012039a15d67cd8b07ae77eb1e4a4c/datapipelines/transformers.py#L27-L35
train
48,575
jmvrbanac/Specter
specter/spec.py
CaseWrapper.serialize
def serialize(self): """ Serializes the CaseWrapper object for collection. Warning, this will only grab the available information. It is strongly that you only call this once all specs and tests have completed. """ expects = [exp.serialize() for exp in self.expects] converted_dict = { 'id': self.id, 'name': self.pretty_name, 'raw_name': self.name, 'doc': self.doc, 'error': self.error, 'skipped': self.skipped, 'skip_reason': self.skip_reason, 'execute_kwargs': self.safe_execute_kwargs, 'metadata': self.metadata, 'start': self.start_time, 'end': self.end_time, 'expects': expects, 'success': self.success } return remove_empty_entries_from_dict(converted_dict)
python
def serialize(self): """ Serializes the CaseWrapper object for collection. Warning, this will only grab the available information. It is strongly that you only call this once all specs and tests have completed. """ expects = [exp.serialize() for exp in self.expects] converted_dict = { 'id': self.id, 'name': self.pretty_name, 'raw_name': self.name, 'doc': self.doc, 'error': self.error, 'skipped': self.skipped, 'skip_reason': self.skip_reason, 'execute_kwargs': self.safe_execute_kwargs, 'metadata': self.metadata, 'start': self.start_time, 'end': self.end_time, 'expects': expects, 'success': self.success } return remove_empty_entries_from_dict(converted_dict)
[ "def", "serialize", "(", "self", ")", ":", "expects", "=", "[", "exp", ".", "serialize", "(", ")", "for", "exp", "in", "self", ".", "expects", "]", "converted_dict", "=", "{", "'id'", ":", "self", ".", "id", ",", "'name'", ":", "self", ".", "pretty...
Serializes the CaseWrapper object for collection. Warning, this will only grab the available information. It is strongly that you only call this once all specs and tests have completed.
[ "Serializes", "the", "CaseWrapper", "object", "for", "collection", "." ]
1f5a729b0aa16242add8c1c754efa268335e3944
https://github.com/jmvrbanac/Specter/blob/1f5a729b0aa16242add8c1c754efa268335e3944/specter/spec.py#L53-L76
train
48,576
jmvrbanac/Specter
specter/spec.py
Describe._run_hooks
def _run_hooks(self): """Calls any registered hooks providing the current state.""" for hook in self.hooks: getattr(self, hook)(self._state)
python
def _run_hooks(self): """Calls any registered hooks providing the current state.""" for hook in self.hooks: getattr(self, hook)(self._state)
[ "def", "_run_hooks", "(", "self", ")", ":", "for", "hook", "in", "self", ".", "hooks", ":", "getattr", "(", "self", ",", "hook", ")", "(", "self", ".", "_state", ")" ]
Calls any registered hooks providing the current state.
[ "Calls", "any", "registered", "hooks", "providing", "the", "current", "state", "." ]
1f5a729b0aa16242add8c1c754efa268335e3944
https://github.com/jmvrbanac/Specter/blob/1f5a729b0aa16242add8c1c754efa268335e3944/specter/spec.py#L301-L304
train
48,577
kylef/refract.py
refract/elements/array.py
Array.append
def append(self, element): """ Append an element onto the array. >>> array = Array() >>> array.append('test') """ from refract.refraction import refract self.content.append(refract(element))
python
def append(self, element): """ Append an element onto the array. >>> array = Array() >>> array.append('test') """ from refract.refraction import refract self.content.append(refract(element))
[ "def", "append", "(", "self", ",", "element", ")", ":", "from", "refract", ".", "refraction", "import", "refract", "self", ".", "content", ".", "append", "(", "refract", "(", "element", ")", ")" ]
Append an element onto the array. >>> array = Array() >>> array.append('test')
[ "Append", "an", "element", "onto", "the", "array", "." ]
f58ddf619038b580ab50c2e7f867d59d153eabbb
https://github.com/kylef/refract.py/blob/f58ddf619038b580ab50c2e7f867d59d153eabbb/refract/elements/array.py#L60-L69
train
48,578
kylef/refract.py
refract/elements/array.py
Array.insert
def insert(self, index: int, element): """ Insert an element at a given position. >>> array = Array() >>> array.insert(0, Element()) """ from refract.refraction import refract self.content.insert(index, refract(element))
python
def insert(self, index: int, element): """ Insert an element at a given position. >>> array = Array() >>> array.insert(0, Element()) """ from refract.refraction import refract self.content.insert(index, refract(element))
[ "def", "insert", "(", "self", ",", "index", ":", "int", ",", "element", ")", ":", "from", "refract", ".", "refraction", "import", "refract", "self", ".", "content", ".", "insert", "(", "index", ",", "refract", "(", "element", ")", ")" ]
Insert an element at a given position. >>> array = Array() >>> array.insert(0, Element())
[ "Insert", "an", "element", "at", "a", "given", "position", "." ]
f58ddf619038b580ab50c2e7f867d59d153eabbb
https://github.com/kylef/refract.py/blob/f58ddf619038b580ab50c2e7f867d59d153eabbb/refract/elements/array.py#L71-L80
train
48,579
kylef/refract.py
refract/elements/array.py
Array.index
def index(self, element: Element) -> int: """ Return the index in the array of the first item whose value is element. It is an error if there is no such item. >>> element = String('hello') >>> array = Array(content=[element]) >>> array.index(element) 0 """ from refract.refraction import refract return self.content.index(refract(element))
python
def index(self, element: Element) -> int: """ Return the index in the array of the first item whose value is element. It is an error if there is no such item. >>> element = String('hello') >>> array = Array(content=[element]) >>> array.index(element) 0 """ from refract.refraction import refract return self.content.index(refract(element))
[ "def", "index", "(", "self", ",", "element", ":", "Element", ")", "->", "int", ":", "from", "refract", ".", "refraction", "import", "refract", "return", "self", ".", "content", ".", "index", "(", "refract", "(", "element", ")", ")" ]
Return the index in the array of the first item whose value is element. It is an error if there is no such item. >>> element = String('hello') >>> array = Array(content=[element]) >>> array.index(element) 0
[ "Return", "the", "index", "in", "the", "array", "of", "the", "first", "item", "whose", "value", "is", "element", ".", "It", "is", "an", "error", "if", "there", "is", "no", "such", "item", "." ]
f58ddf619038b580ab50c2e7f867d59d153eabbb
https://github.com/kylef/refract.py/blob/f58ddf619038b580ab50c2e7f867d59d153eabbb/refract/elements/array.py#L82-L94
train
48,580
meraki-analytics/datapipelines-python
datapipelines/sources.py
DataSource.provides
def provides(self): # type: Union[Iterable[Type[T]], Type[Any]] """The types of objects the data store provides.""" types = set() any_dispatch = False try: types.update(getattr(self.__class__, "get")._provides) any_dispatch = True except AttributeError: pass try: types.update(getattr(self.__class__, "get_many")._provides) any_dispatch = True except AttributeError: pass return types if any_dispatch else TYPE_WILDCARD
python
def provides(self): # type: Union[Iterable[Type[T]], Type[Any]] """The types of objects the data store provides.""" types = set() any_dispatch = False try: types.update(getattr(self.__class__, "get")._provides) any_dispatch = True except AttributeError: pass try: types.update(getattr(self.__class__, "get_many")._provides) any_dispatch = True except AttributeError: pass return types if any_dispatch else TYPE_WILDCARD
[ "def", "provides", "(", "self", ")", ":", "# type: Union[Iterable[Type[T]], Type[Any]]", "types", "=", "set", "(", ")", "any_dispatch", "=", "False", "try", ":", "types", ".", "update", "(", "getattr", "(", "self", ".", "__class__", ",", "\"get\"", ")", ".",...
The types of objects the data store provides.
[ "The", "types", "of", "objects", "the", "data", "store", "provides", "." ]
dc38d7976a012039a15d67cd8b07ae77eb1e4a4c
https://github.com/meraki-analytics/datapipelines-python/blob/dc38d7976a012039a15d67cd8b07ae77eb1e4a4c/datapipelines/sources.py#L19-L33
train
48,581
meraki-analytics/datapipelines-python
datapipelines/sources.py
DataSource.get_many
def get_many(self, type: Type[T], query: Mapping[str, Any], context: PipelineContext = None) -> Iterable[T]: """Gets a query from the data source, which contains a request for multiple objects. Args: query: The query being requested (contains a request for multiple objects). context: The context for the extraction (mutable). Returns: The requested objects. """ pass
python
def get_many(self, type: Type[T], query: Mapping[str, Any], context: PipelineContext = None) -> Iterable[T]: """Gets a query from the data source, which contains a request for multiple objects. Args: query: The query being requested (contains a request for multiple objects). context: The context for the extraction (mutable). Returns: The requested objects. """ pass
[ "def", "get_many", "(", "self", ",", "type", ":", "Type", "[", "T", "]", ",", "query", ":", "Mapping", "[", "str", ",", "Any", "]", ",", "context", ":", "PipelineContext", "=", "None", ")", "->", "Iterable", "[", "T", "]", ":", "pass" ]
Gets a query from the data source, which contains a request for multiple objects. Args: query: The query being requested (contains a request for multiple objects). context: The context for the extraction (mutable). Returns: The requested objects.
[ "Gets", "a", "query", "from", "the", "data", "source", "which", "contains", "a", "request", "for", "multiple", "objects", "." ]
dc38d7976a012039a15d67cd8b07ae77eb1e4a4c
https://github.com/meraki-analytics/datapipelines-python/blob/dc38d7976a012039a15d67cd8b07ae77eb1e4a4c/datapipelines/sources.py#L49-L59
train
48,582
jmvrbanac/Specter
specter/reporting/__init__.py
ReporterPluginManager.subscribe_all_to_spec
def subscribe_all_to_spec(self, spec): """ Will automatically not subscribe reporters that are not parallel or serial depending on the current mode. """ for reporter in self.reporters: if self.can_use_reporter(reporter, self.parallel): reporter.subscribe_to_spec(spec)
python
def subscribe_all_to_spec(self, spec): """ Will automatically not subscribe reporters that are not parallel or serial depending on the current mode. """ for reporter in self.reporters: if self.can_use_reporter(reporter, self.parallel): reporter.subscribe_to_spec(spec)
[ "def", "subscribe_all_to_spec", "(", "self", ",", "spec", ")", ":", "for", "reporter", "in", "self", ".", "reporters", ":", "if", "self", ".", "can_use_reporter", "(", "reporter", ",", "self", ".", "parallel", ")", ":", "reporter", ".", "subscribe_to_spec", ...
Will automatically not subscribe reporters that are not parallel or serial depending on the current mode.
[ "Will", "automatically", "not", "subscribe", "reporters", "that", "are", "not", "parallel", "or", "serial", "depending", "on", "the", "current", "mode", "." ]
1f5a729b0aa16242add8c1c754efa268335e3944
https://github.com/jmvrbanac/Specter/blob/1f5a729b0aa16242add8c1c754efa268335e3944/specter/reporting/__init__.py#L77-L83
train
48,583
jmvrbanac/Specter
specter/reporting/console.py
ConsoleReporter.output
def output(self, msg, indent, status=None): """ Alias for print_indent_msg with color determined by status.""" color = None if self.use_color: color = get_color_from_status(status) print_indent_msg(msg, indent, color)
python
def output(self, msg, indent, status=None): """ Alias for print_indent_msg with color determined by status.""" color = None if self.use_color: color = get_color_from_status(status) print_indent_msg(msg, indent, color)
[ "def", "output", "(", "self", ",", "msg", ",", "indent", ",", "status", "=", "None", ")", ":", "color", "=", "None", "if", "self", ".", "use_color", ":", "color", "=", "get_color_from_status", "(", "status", ")", "print_indent_msg", "(", "msg", ",", "i...
Alias for print_indent_msg with color determined by status.
[ "Alias", "for", "print_indent_msg", "with", "color", "determined", "by", "status", "." ]
1f5a729b0aa16242add8c1c754efa268335e3944
https://github.com/jmvrbanac/Specter/blob/1f5a729b0aa16242add8c1c754efa268335e3944/specter/reporting/console.py#L139-L144
train
48,584
jmvrbanac/Specter
specter/util.py
get_real_last_traceback
def get_real_last_traceback(exception): """ An unfortunate evil... All because Python's traceback cannot determine where my executed code is coming from... """ traceback_blocks = [] _n, _n, exc_traceback = sys.exc_info() tb_list = get_all_tracebacks(exc_traceback)[1:] # Remove already captured tracebacks # TODO(jmv): This must be a better way of doing this. Need to revisit. tb_list = [tb for tb in tb_list if tb not in CAPTURED_TRACEBACKS] CAPTURED_TRACEBACKS.extend(tb_list) for traceback in tb_list: lines, path, line_num = get_source_from_frame(traceback.tb_frame) traceback_lines = get_numbered_source(lines, traceback.tb_lineno, line_num) traceback_lines.insert(0, ' - {0}'.format(path)) traceback_lines.insert(1, ' ------------------') traceback_lines.append(' ------------------') traceback_blocks.append(traceback_lines) traced_lines = ['Error Traceback:'] traced_lines.extend(itertools.chain.from_iterable(traceback_blocks)) traced_lines.append(' - Error | {0}: {1}'.format( type(exception).__name__, exception)) return traced_lines
python
def get_real_last_traceback(exception): """ An unfortunate evil... All because Python's traceback cannot determine where my executed code is coming from... """ traceback_blocks = [] _n, _n, exc_traceback = sys.exc_info() tb_list = get_all_tracebacks(exc_traceback)[1:] # Remove already captured tracebacks # TODO(jmv): This must be a better way of doing this. Need to revisit. tb_list = [tb for tb in tb_list if tb not in CAPTURED_TRACEBACKS] CAPTURED_TRACEBACKS.extend(tb_list) for traceback in tb_list: lines, path, line_num = get_source_from_frame(traceback.tb_frame) traceback_lines = get_numbered_source(lines, traceback.tb_lineno, line_num) traceback_lines.insert(0, ' - {0}'.format(path)) traceback_lines.insert(1, ' ------------------') traceback_lines.append(' ------------------') traceback_blocks.append(traceback_lines) traced_lines = ['Error Traceback:'] traced_lines.extend(itertools.chain.from_iterable(traceback_blocks)) traced_lines.append(' - Error | {0}: {1}'.format( type(exception).__name__, exception)) return traced_lines
[ "def", "get_real_last_traceback", "(", "exception", ")", ":", "traceback_blocks", "=", "[", "]", "_n", ",", "_n", ",", "exc_traceback", "=", "sys", ".", "exc_info", "(", ")", "tb_list", "=", "get_all_tracebacks", "(", "exc_traceback", ")", "[", "1", ":", "...
An unfortunate evil... All because Python's traceback cannot determine where my executed code is coming from...
[ "An", "unfortunate", "evil", "...", "All", "because", "Python", "s", "traceback", "cannot", "determine", "where", "my", "executed", "code", "is", "coming", "from", "..." ]
1f5a729b0aa16242add8c1c754efa268335e3944
https://github.com/jmvrbanac/Specter/blob/1f5a729b0aa16242add8c1c754efa268335e3944/specter/util.py#L155-L183
train
48,585
kylef/refract.py
refract/contrib/apielements.py
HTTPMessage.assets
def assets(self) -> List[Asset]: """ Returns the assets in the transaction. """ return list(filter(is_element(Asset), self.content))
python
def assets(self) -> List[Asset]: """ Returns the assets in the transaction. """ return list(filter(is_element(Asset), self.content))
[ "def", "assets", "(", "self", ")", "->", "List", "[", "Asset", "]", ":", "return", "list", "(", "filter", "(", "is_element", "(", "Asset", ")", ",", "self", ".", "content", ")", ")" ]
Returns the assets in the transaction.
[ "Returns", "the", "assets", "in", "the", "transaction", "." ]
f58ddf619038b580ab50c2e7f867d59d153eabbb
https://github.com/kylef/refract.py/blob/f58ddf619038b580ab50c2e7f867d59d153eabbb/refract/contrib/apielements.py#L79-L84
train
48,586
jmvrbanac/Specter
specter/vendor/ast_decompiler.py
decompile
def decompile(ast, indentation=4, line_length=100, starting_indentation=0): """Decompiles an AST into Python code. Arguments: - ast: code to decompile, using AST objects as generated by the standard library ast module - indentation: indentation level of lines - line_length: if lines become longer than this length, ast_decompiler will try to break them up (but it will not necessarily succeed in all cases) - starting_indentation: indentation level at which to start producing code """ decompiler = Decompiler( indentation=indentation, line_length=line_length, starting_indentation=starting_indentation, ) return decompiler.run(ast)
python
def decompile(ast, indentation=4, line_length=100, starting_indentation=0): """Decompiles an AST into Python code. Arguments: - ast: code to decompile, using AST objects as generated by the standard library ast module - indentation: indentation level of lines - line_length: if lines become longer than this length, ast_decompiler will try to break them up (but it will not necessarily succeed in all cases) - starting_indentation: indentation level at which to start producing code """ decompiler = Decompiler( indentation=indentation, line_length=line_length, starting_indentation=starting_indentation, ) return decompiler.run(ast)
[ "def", "decompile", "(", "ast", ",", "indentation", "=", "4", ",", "line_length", "=", "100", ",", "starting_indentation", "=", "0", ")", ":", "decompiler", "=", "Decompiler", "(", "indentation", "=", "indentation", ",", "line_length", "=", "line_length", ",...
Decompiles an AST into Python code. Arguments: - ast: code to decompile, using AST objects as generated by the standard library ast module - indentation: indentation level of lines - line_length: if lines become longer than this length, ast_decompiler will try to break them up (but it will not necessarily succeed in all cases) - starting_indentation: indentation level at which to start producing code
[ "Decompiles", "an", "AST", "into", "Python", "code", "." ]
1f5a729b0aa16242add8c1c754efa268335e3944
https://github.com/jmvrbanac/Specter/blob/1f5a729b0aa16242add8c1c754efa268335e3944/specter/vendor/ast_decompiler.py#L94-L110
train
48,587
jmvrbanac/Specter
specter/vendor/ast_decompiler.py
Decompiler.write_expression_list
def write_expression_list(self, nodes, separator=', ', allow_newlines=True, need_parens=True, final_separator_if_multiline=True): """Writes a list of nodes, separated by separator. If allow_newlines, will write the expression over multiple lines if necessary to say within max_line_length. If need_parens, will surround the expression with parentheses in this case. If final_separator_if_multiline, will write a separator at the end of the list if it is divided over multiple lines. """ first = True last_line = len(self.lines) current_line = list(self.current_line) for node in nodes: if first: first = False else: self.write(separator) self.visit(node) if allow_newlines and (self.current_line_length() > self.max_line_length or last_line != len(self.lines)): break else: return # stayed within the limit # reset state del self.lines[last_line:] self.current_line = current_line separator = separator.rstrip() if need_parens: self.write('(') self.write_newline() with self.add_indentation(): num_nodes = len(nodes) for i, node in enumerate(nodes): self.write_indentation() self.visit(node) if final_separator_if_multiline or i < num_nodes - 1: self.write(separator) self.write_newline() self.write_indentation() if need_parens: self.write(')')
python
def write_expression_list(self, nodes, separator=', ', allow_newlines=True, need_parens=True, final_separator_if_multiline=True): """Writes a list of nodes, separated by separator. If allow_newlines, will write the expression over multiple lines if necessary to say within max_line_length. If need_parens, will surround the expression with parentheses in this case. If final_separator_if_multiline, will write a separator at the end of the list if it is divided over multiple lines. """ first = True last_line = len(self.lines) current_line = list(self.current_line) for node in nodes: if first: first = False else: self.write(separator) self.visit(node) if allow_newlines and (self.current_line_length() > self.max_line_length or last_line != len(self.lines)): break else: return # stayed within the limit # reset state del self.lines[last_line:] self.current_line = current_line separator = separator.rstrip() if need_parens: self.write('(') self.write_newline() with self.add_indentation(): num_nodes = len(nodes) for i, node in enumerate(nodes): self.write_indentation() self.visit(node) if final_separator_if_multiline or i < num_nodes - 1: self.write(separator) self.write_newline() self.write_indentation() if need_parens: self.write(')')
[ "def", "write_expression_list", "(", "self", ",", "nodes", ",", "separator", "=", "', '", ",", "allow_newlines", "=", "True", ",", "need_parens", "=", "True", ",", "final_separator_if_multiline", "=", "True", ")", ":", "first", "=", "True", "last_line", "=", ...
Writes a list of nodes, separated by separator. If allow_newlines, will write the expression over multiple lines if necessary to say within max_line_length. If need_parens, will surround the expression with parentheses in this case. If final_separator_if_multiline, will write a separator at the end of the list if it is divided over multiple lines.
[ "Writes", "a", "list", "of", "nodes", "separated", "by", "separator", "." ]
1f5a729b0aa16242add8c1c754efa268335e3944
https://github.com/jmvrbanac/Specter/blob/1f5a729b0aa16242add8c1c754efa268335e3944/specter/vendor/ast_decompiler.py#L164-L208
train
48,588
kylef/refract.py
refract/elements/base.py
Element.children
def children(self): """ Returns all of the children elements. """ if isinstance(self.content, list): return self.content elif isinstance(self.content, Element): return [self.content] else: return []
python
def children(self): """ Returns all of the children elements. """ if isinstance(self.content, list): return self.content elif isinstance(self.content, Element): return [self.content] else: return []
[ "def", "children", "(", "self", ")", ":", "if", "isinstance", "(", "self", ".", "content", ",", "list", ")", ":", "return", "self", ".", "content", "elif", "isinstance", "(", "self", ".", "content", ",", "Element", ")", ":", "return", "[", "self", "....
Returns all of the children elements.
[ "Returns", "all", "of", "the", "children", "elements", "." ]
f58ddf619038b580ab50c2e7f867d59d153eabbb
https://github.com/kylef/refract.py/blob/f58ddf619038b580ab50c2e7f867d59d153eabbb/refract/elements/base.py#L193-L203
train
48,589
kylef/refract.py
refract/elements/base.py
Element.recursive_children
def recursive_children(self): """ Generator returning all recursive children elements. """ for child in self.children: yield child for recursive_child in child.recursive_children: yield recursive_child
python
def recursive_children(self): """ Generator returning all recursive children elements. """ for child in self.children: yield child for recursive_child in child.recursive_children: yield recursive_child
[ "def", "recursive_children", "(", "self", ")", ":", "for", "child", "in", "self", ".", "children", ":", "yield", "child", "for", "recursive_child", "in", "child", ".", "recursive_children", ":", "yield", "recursive_child" ]
Generator returning all recursive children elements.
[ "Generator", "returning", "all", "recursive", "children", "elements", "." ]
f58ddf619038b580ab50c2e7f867d59d153eabbb
https://github.com/kylef/refract.py/blob/f58ddf619038b580ab50c2e7f867d59d153eabbb/refract/elements/base.py#L206-L215
train
48,590
jmvrbanac/Specter
specter/expect.py
skip
def skip(reason): """The skip decorator allows for you to always bypass a test. :param reason: Expects a string """ def decorator(test_func): if not isinstance(test_func, (type, ClassObjType)): func_data = None if test_func.__name__ == 'DECORATOR_ONCALL': # Call down and save the results func_data = test_func() @functools.wraps(test_func) def skip_wrapper(*args, **kwargs): other_data = { 'real_func': func_data[0] if func_data else test_func, 'metadata': func_data[1] if func_data else None } raise TestSkippedException(test_func, reason, other_data) test_func = skip_wrapper return test_func return decorator
python
def skip(reason): """The skip decorator allows for you to always bypass a test. :param reason: Expects a string """ def decorator(test_func): if not isinstance(test_func, (type, ClassObjType)): func_data = None if test_func.__name__ == 'DECORATOR_ONCALL': # Call down and save the results func_data = test_func() @functools.wraps(test_func) def skip_wrapper(*args, **kwargs): other_data = { 'real_func': func_data[0] if func_data else test_func, 'metadata': func_data[1] if func_data else None } raise TestSkippedException(test_func, reason, other_data) test_func = skip_wrapper return test_func return decorator
[ "def", "skip", "(", "reason", ")", ":", "def", "decorator", "(", "test_func", ")", ":", "if", "not", "isinstance", "(", "test_func", ",", "(", "type", ",", "ClassObjType", ")", ")", ":", "func_data", "=", "None", "if", "test_func", ".", "__name__", "==...
The skip decorator allows for you to always bypass a test. :param reason: Expects a string
[ "The", "skip", "decorator", "allows", "for", "you", "to", "always", "bypass", "a", "test", "." ]
1f5a729b0aa16242add8c1c754efa268335e3944
https://github.com/jmvrbanac/Specter/blob/1f5a729b0aa16242add8c1c754efa268335e3944/specter/expect.py#L238-L259
train
48,591
jmvrbanac/Specter
specter/expect.py
skip_if
def skip_if(condition, reason=None): """The skip_if decorator allows for you to bypass a test on conditions :param condition: Expects a boolean :param reason: Expects a string """ if condition: return skip(reason) def wrapper(func): return func return wrapper
python
def skip_if(condition, reason=None): """The skip_if decorator allows for you to bypass a test on conditions :param condition: Expects a boolean :param reason: Expects a string """ if condition: return skip(reason) def wrapper(func): return func return wrapper
[ "def", "skip_if", "(", "condition", ",", "reason", "=", "None", ")", ":", "if", "condition", ":", "return", "skip", "(", "reason", ")", "def", "wrapper", "(", "func", ")", ":", "return", "func", "return", "wrapper" ]
The skip_if decorator allows for you to bypass a test on conditions :param condition: Expects a boolean :param reason: Expects a string
[ "The", "skip_if", "decorator", "allows", "for", "you", "to", "bypass", "a", "test", "on", "conditions" ]
1f5a729b0aa16242add8c1c754efa268335e3944
https://github.com/jmvrbanac/Specter/blob/1f5a729b0aa16242add8c1c754efa268335e3944/specter/expect.py#L262-L273
train
48,592
jmvrbanac/Specter
specter/expect.py
incomplete
def incomplete(test_func): """The incomplete decorator behaves much like a normal skip; however, tests that are marked as incomplete get tracked under a different metric. This allows for you to create a skeleton around all of your features and specifications, and track what tests have been written and what tests are left outstanding. .. code-block:: python # Example of using the incomplete decorator @incomplete def it_should_do_something(self): pass """ if not isinstance(test_func, (type, ClassObjType)): @functools.wraps(test_func) def skip_wrapper(*args, **kwargs): raise TestIncompleteException(test_func, _('Test is incomplete')) return skip_wrapper
python
def incomplete(test_func): """The incomplete decorator behaves much like a normal skip; however, tests that are marked as incomplete get tracked under a different metric. This allows for you to create a skeleton around all of your features and specifications, and track what tests have been written and what tests are left outstanding. .. code-block:: python # Example of using the incomplete decorator @incomplete def it_should_do_something(self): pass """ if not isinstance(test_func, (type, ClassObjType)): @functools.wraps(test_func) def skip_wrapper(*args, **kwargs): raise TestIncompleteException(test_func, _('Test is incomplete')) return skip_wrapper
[ "def", "incomplete", "(", "test_func", ")", ":", "if", "not", "isinstance", "(", "test_func", ",", "(", "type", ",", "ClassObjType", ")", ")", ":", "@", "functools", ".", "wraps", "(", "test_func", ")", "def", "skip_wrapper", "(", "*", "args", ",", "*"...
The incomplete decorator behaves much like a normal skip; however, tests that are marked as incomplete get tracked under a different metric. This allows for you to create a skeleton around all of your features and specifications, and track what tests have been written and what tests are left outstanding. .. code-block:: python # Example of using the incomplete decorator @incomplete def it_should_do_something(self): pass
[ "The", "incomplete", "decorator", "behaves", "much", "like", "a", "normal", "skip", ";", "however", "tests", "that", "are", "marked", "as", "incomplete", "get", "tracked", "under", "a", "different", "metric", ".", "This", "allows", "for", "you", "to", "creat...
1f5a729b0aa16242add8c1c754efa268335e3944
https://github.com/jmvrbanac/Specter/blob/1f5a729b0aa16242add8c1c754efa268335e3944/specter/expect.py#L276-L294
train
48,593
jmvrbanac/Specter
specter/expect.py
ExpectAssert.serialize
def serialize(self): """Serializes the ExpectAssert object for collection. Warning, this will only grab the available information. It is strongly that you only call this once all specs and tests have completed. """ converted_dict = { 'success': self.success, 'assertion': str(self), 'required': self.required } return converted_dict
python
def serialize(self): """Serializes the ExpectAssert object for collection. Warning, this will only grab the available information. It is strongly that you only call this once all specs and tests have completed. """ converted_dict = { 'success': self.success, 'assertion': str(self), 'required': self.required } return converted_dict
[ "def", "serialize", "(", "self", ")", ":", "converted_dict", "=", "{", "'success'", ":", "self", ".", "success", ",", "'assertion'", ":", "str", "(", "self", ")", ",", "'required'", ":", "self", ".", "required", "}", "return", "converted_dict" ]
Serializes the ExpectAssert object for collection. Warning, this will only grab the available information. It is strongly that you only call this once all specs and tests have completed.
[ "Serializes", "the", "ExpectAssert", "object", "for", "collection", "." ]
1f5a729b0aa16242add8c1c754efa268335e3944
https://github.com/jmvrbanac/Specter/blob/1f5a729b0aa16242add8c1c754efa268335e3944/specter/expect.py#L41-L53
train
48,594
meraki-analytics/datapipelines-python
datapipelines/sinks.py
DataSink.accepts
def accepts(self): # type: Union[Iterable[Type[T]], Type[Any]] """The types of objects the data sink can store.""" types = set() any_dispatch = False try: types.update(getattr(self.__class__, "put")._accepts) any_dispatch = True except AttributeError: pass try: types.update(getattr(self.__class__, "put_many")._accepts) any_dispatch = True except AttributeError: pass return types if any_dispatch else TYPE_WILDCARD
python
def accepts(self): # type: Union[Iterable[Type[T]], Type[Any]] """The types of objects the data sink can store.""" types = set() any_dispatch = False try: types.update(getattr(self.__class__, "put")._accepts) any_dispatch = True except AttributeError: pass try: types.update(getattr(self.__class__, "put_many")._accepts) any_dispatch = True except AttributeError: pass return types if any_dispatch else TYPE_WILDCARD
[ "def", "accepts", "(", "self", ")", ":", "# type: Union[Iterable[Type[T]], Type[Any]]", "types", "=", "set", "(", ")", "any_dispatch", "=", "False", "try", ":", "types", ".", "update", "(", "getattr", "(", "self", ".", "__class__", ",", "\"put\"", ")", ".", ...
The types of objects the data sink can store.
[ "The", "types", "of", "objects", "the", "data", "sink", "can", "store", "." ]
dc38d7976a012039a15d67cd8b07ae77eb1e4a4c
https://github.com/meraki-analytics/datapipelines-python/blob/dc38d7976a012039a15d67cd8b07ae77eb1e4a4c/datapipelines/sinks.py#L16-L30
train
48,595
meraki-analytics/datapipelines-python
datapipelines/sinks.py
DataSink.put_many
def put_many(self, type: Type[T], items: Iterable[T], context: PipelineContext = None) -> None: """Puts multiple objects of the same type into the data sink. Args: type: The type of the objects being inserted. items: The objects to be inserted. context: The context of the insertion (mutable). """ pass
python
def put_many(self, type: Type[T], items: Iterable[T], context: PipelineContext = None) -> None: """Puts multiple objects of the same type into the data sink. Args: type: The type of the objects being inserted. items: The objects to be inserted. context: The context of the insertion (mutable). """ pass
[ "def", "put_many", "(", "self", ",", "type", ":", "Type", "[", "T", "]", ",", "items", ":", "Iterable", "[", "T", "]", ",", "context", ":", "PipelineContext", "=", "None", ")", "->", "None", ":", "pass" ]
Puts multiple objects of the same type into the data sink. Args: type: The type of the objects being inserted. items: The objects to be inserted. context: The context of the insertion (mutable).
[ "Puts", "multiple", "objects", "of", "the", "same", "type", "into", "the", "data", "sink", "." ]
dc38d7976a012039a15d67cd8b07ae77eb1e4a4c
https://github.com/meraki-analytics/datapipelines-python/blob/dc38d7976a012039a15d67cd8b07ae77eb1e4a4c/datapipelines/sinks.py#L44-L52
train
48,596
internetarchive/warc
warc/warc.py
WARCHeader.init_defaults
def init_defaults(self): """Initializes important headers to default values, if not already specified. The WARC-Record-ID header is set to a newly generated UUID. The WARC-Date header is set to the current datetime. The Content-Type is set based on the WARC-Type header. The Content-Length is initialized to 0. """ if "WARC-Record-ID" not in self: self['WARC-Record-ID'] = "<urn:uuid:%s>" % uuid.uuid1() if "WARC-Date" not in self: self['WARC-Date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') if "Content-Type" not in self: self['Content-Type'] = WARCHeader.CONTENT_TYPES.get(self.type, "application/octet-stream")
python
def init_defaults(self): """Initializes important headers to default values, if not already specified. The WARC-Record-ID header is set to a newly generated UUID. The WARC-Date header is set to the current datetime. The Content-Type is set based on the WARC-Type header. The Content-Length is initialized to 0. """ if "WARC-Record-ID" not in self: self['WARC-Record-ID'] = "<urn:uuid:%s>" % uuid.uuid1() if "WARC-Date" not in self: self['WARC-Date'] = datetime.datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ') if "Content-Type" not in self: self['Content-Type'] = WARCHeader.CONTENT_TYPES.get(self.type, "application/octet-stream")
[ "def", "init_defaults", "(", "self", ")", ":", "if", "\"WARC-Record-ID\"", "not", "in", "self", ":", "self", "[", "'WARC-Record-ID'", "]", "=", "\"<urn:uuid:%s>\"", "%", "uuid", ".", "uuid1", "(", ")", "if", "\"WARC-Date\"", "not", "in", "self", ":", "self...
Initializes important headers to default values, if not already specified. The WARC-Record-ID header is set to a newly generated UUID. The WARC-Date header is set to the current datetime. The Content-Type is set based on the WARC-Type header. The Content-Length is initialized to 0.
[ "Initializes", "important", "headers", "to", "default", "values", "if", "not", "already", "specified", ".", "The", "WARC", "-", "Record", "-", "ID", "header", "is", "set", "to", "a", "newly", "generated", "UUID", ".", "The", "WARC", "-", "Date", "header", ...
8f05a000a23bbd6501217e37cfd862ffdf19da7f
https://github.com/internetarchive/warc/blob/8f05a000a23bbd6501217e37cfd862ffdf19da7f/warc/warc.py#L75-L88
train
48,597
internetarchive/warc
warc/warc.py
WARCHeader.write_to
def write_to(self, f): """Writes this header to a file, in the format specified by WARC. """ f.write(self.version + "\r\n") for name, value in self.items(): name = name.title() # Use standard forms for commonly used patterns name = name.replace("Warc-", "WARC-").replace("-Ip-", "-IP-").replace("-Id", "-ID").replace("-Uri", "-URI") f.write(name) f.write(": ") f.write(value) f.write("\r\n") # Header ends with an extra CRLF f.write("\r\n")
python
def write_to(self, f): """Writes this header to a file, in the format specified by WARC. """ f.write(self.version + "\r\n") for name, value in self.items(): name = name.title() # Use standard forms for commonly used patterns name = name.replace("Warc-", "WARC-").replace("-Ip-", "-IP-").replace("-Id", "-ID").replace("-Uri", "-URI") f.write(name) f.write(": ") f.write(value) f.write("\r\n") # Header ends with an extra CRLF f.write("\r\n")
[ "def", "write_to", "(", "self", ",", "f", ")", ":", "f", ".", "write", "(", "self", ".", "version", "+", "\"\\r\\n\"", ")", "for", "name", ",", "value", "in", "self", ".", "items", "(", ")", ":", "name", "=", "name", ".", "title", "(", ")", "# ...
Writes this header to a file, in the format specified by WARC.
[ "Writes", "this", "header", "to", "a", "file", "in", "the", "format", "specified", "by", "WARC", "." ]
8f05a000a23bbd6501217e37cfd862ffdf19da7f
https://github.com/internetarchive/warc/blob/8f05a000a23bbd6501217e37cfd862ffdf19da7f/warc/warc.py#L90-L104
train
48,598
internetarchive/warc
warc/warc.py
WARCRecord.from_response
def from_response(response): """Creates a WARCRecord from given response object. This must be called before reading the response. The response can be read after this method is called. :param response: An instance of :class:`requests.models.Response`. """ # Get the httplib.HTTPResponse object http_response = response.raw._original_response # HTTP status line, headers and body as strings status_line = "HTTP/1.1 %d %s" % (http_response.status, http_response.reason) headers = str(http_response.msg) body = http_response.read() # Monkey-patch the response object so that it is possible to read from it later. response.raw._fp = StringIO(body) # Build the payload to create warc file. payload = status_line + "\r\n" + headers + "\r\n" + body headers = { "WARC-Type": "response", "WARC-Target-URI": response.request.full_url.encode('utf-8') } return WARCRecord(payload=payload, headers=headers)
python
def from_response(response): """Creates a WARCRecord from given response object. This must be called before reading the response. The response can be read after this method is called. :param response: An instance of :class:`requests.models.Response`. """ # Get the httplib.HTTPResponse object http_response = response.raw._original_response # HTTP status line, headers and body as strings status_line = "HTTP/1.1 %d %s" % (http_response.status, http_response.reason) headers = str(http_response.msg) body = http_response.read() # Monkey-patch the response object so that it is possible to read from it later. response.raw._fp = StringIO(body) # Build the payload to create warc file. payload = status_line + "\r\n" + headers + "\r\n" + body headers = { "WARC-Type": "response", "WARC-Target-URI": response.request.full_url.encode('utf-8') } return WARCRecord(payload=payload, headers=headers)
[ "def", "from_response", "(", "response", ")", ":", "# Get the httplib.HTTPResponse object", "http_response", "=", "response", ".", "raw", ".", "_original_response", "# HTTP status line, headers and body as strings", "status_line", "=", "\"HTTP/1.1 %d %s\"", "%", "(", "http_re...
Creates a WARCRecord from given response object. This must be called before reading the response. The response can be read after this method is called. :param response: An instance of :class:`requests.models.Response`.
[ "Creates", "a", "WARCRecord", "from", "given", "response", "object", "." ]
8f05a000a23bbd6501217e37cfd862ffdf19da7f
https://github.com/internetarchive/warc/blob/8f05a000a23bbd6501217e37cfd862ffdf19da7f/warc/warc.py#L216-L242
train
48,599