text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def database_backup(self, data_directory, *args, **kwargs):
"""Uploads a PostgreSQL file cluster to S3 or Windows Azure Blob
Service
Mechanism: just wraps _upload_pg_cluster_dir with
start/stop backup actions with exception handling.
In particular there is a 'finally' block to stop the backup in
most situations.
"""
upload_good = False
backup_stop_good = False
while_offline = False
start_backup_info = None
if 'while_offline' in kwargs:
while_offline = kwargs.pop('while_offline')
try:
if not while_offline:
start_backup_info = PgBackupStatements.run_start_backup()
version = PgBackupStatements.pg_version()['version']
else:
if os.path.exists(os.path.join(data_directory,
'postmaster.pid')):
hint = ('Shut down postgres. '
'If there is a stale lockfile, '
'then remove it after being very sure postgres '
'is not running.')
raise UserException(
msg='while_offline set, but pg looks to be running',
detail='Found a postmaster.pid lockfile, and aborting',
hint=hint)
ctrl_data = PgControlDataParser(data_directory)
start_backup_info = ctrl_data.last_xlog_file_name_and_offset()
version = ctrl_data.pg_version()
ret_tuple = self._upload_pg_cluster_dir(
start_backup_info, data_directory, version=version, *args,
**kwargs)
spec, uploaded_to, expanded_size_bytes = ret_tuple
upload_good = True
finally:
if not upload_good:
logger.warning(
'blocking on sending WAL segments',
detail=('The backup was not completed successfully, '
'but we have to wait anyway. '
'See README: TODO about pg_cancel_backup'))
if not while_offline:
stop_backup_info = PgBackupStatements.run_stop_backup()
else:
stop_backup_info = start_backup_info
backup_stop_good = True
# XXX: Ugly, this is more of a 'worker' task because it might
# involve retries and error messages, something that is not
# treated by the "operator" category of modules. So
# basically, if this small upload fails, the whole upload
# fails!
if upload_good and backup_stop_good:
# Try to write a sentinel file to the cluster backup
# directory that indicates that the base backup upload has
# definitely run its course and also communicates what WAL
# segments are needed to get to consistency.
sentinel_content = json.dumps(
{'wal_segment_backup_stop':
stop_backup_info['file_name'],
'wal_segment_offset_backup_stop':
stop_backup_info['file_offset'],
'expanded_size_bytes': expanded_size_bytes,
'spec': spec})
# XXX: should use the storage operators.
#
# XXX: distinguish sentinels by *PREFIX* not suffix,
# which makes searching harder. (For the next version
# bump).
uri_put_file(self.creds,
uploaded_to + '_backup_stop_sentinel.json',
BytesIO(sentinel_content.encode("utf8")),
content_type='application/json')
else:
# NB: Other exceptions should be raised before this that
# have more informative results, it is intended that this
# exception never will get raised.
raise UserCritical('could not complete backup process') | [
"def",
"database_backup",
"(",
"self",
",",
"data_directory",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"upload_good",
"=",
"False",
"backup_stop_good",
"=",
"False",
"while_offline",
"=",
"False",
"start_backup_info",
"=",
"None",
"if",
"'while_off... | 44.629213 | 20.797753 |
def includeme(config):
"""
Activate the ``pyramid_retry`` execution policy in your application.
This will add the :func:`pyramid_retry.RetryableErrorPolicy` with
``attempts`` pulled from the ``retry.attempts`` setting.
The ``last_retry_attempt`` and ``retryable_error`` view predicates
are registered.
This should be included in your Pyramid application via
``config.include('pyramid_retry')``.
"""
settings = config.get_settings()
config.add_view_predicate('last_retry_attempt', LastAttemptPredicate)
config.add_view_predicate('retryable_error', RetryableErrorPredicate)
def register():
attempts = int(settings.get('retry.attempts') or 3)
settings['retry.attempts'] = attempts
activate_hook = settings.get('retry.activate_hook')
activate_hook = config.maybe_dotted(activate_hook)
policy = RetryableExecutionPolicy(
attempts,
activate_hook=activate_hook,
)
config.set_execution_policy(policy)
# defer registration to allow time to modify settings
config.action(None, register, order=PHASE1_CONFIG) | [
"def",
"includeme",
"(",
"config",
")",
":",
"settings",
"=",
"config",
".",
"get_settings",
"(",
")",
"config",
".",
"add_view_predicate",
"(",
"'last_retry_attempt'",
",",
"LastAttemptPredicate",
")",
"config",
".",
"add_view_predicate",
"(",
"'retryable_error'",
... | 32.735294 | 22.558824 |
def dataframe(self):
"""Build and cache a dataframe from query results"""
if self._dataframe is None:
try:
import pandas as pd
except ImportError:
raise RuntimeError('To enable dataframe support, '
'run \'pip install datadotworld[pandas]\'')
self._dataframe = pd.DataFrame.from_records(self._iter_rows(),
coerce_float=True)
return self._dataframe | [
"def",
"dataframe",
"(",
"self",
")",
":",
"if",
"self",
".",
"_dataframe",
"is",
"None",
":",
"try",
":",
"import",
"pandas",
"as",
"pd",
"except",
"ImportError",
":",
"raise",
"RuntimeError",
"(",
"'To enable dataframe support, '",
"'run \\'pip install datadotwo... | 37.071429 | 23.214286 |
def events(self):
""" All events in calendar within specified time range """
if self._events is None:
self._events = self.parent.calendar.events(
calendarId="primary", singleEvents=True, orderBy="startTime",
timeMin=self.since, timeMax=self.until)
return self._events | [
"def",
"events",
"(",
"self",
")",
":",
"if",
"self",
".",
"_events",
"is",
"None",
":",
"self",
".",
"_events",
"=",
"self",
".",
"parent",
".",
"calendar",
".",
"events",
"(",
"calendarId",
"=",
"\"primary\"",
",",
"singleEvents",
"=",
"True",
",",
... | 47 | 15.857143 |
def _print_MatMul(self, expr):
"""
Matrix multiplication printer. The sympy one turns everything into a
dot product without type-checking.
"""
from sympy import MatrixExpr
links = []
for i, j in zip(expr.args[1:], expr.args[:-1]):
if isinstance(i, MatrixExpr) and isinstance(j, MatrixExpr):
links.append(').dot(')
else:
links.append('*')
printouts = [self._print(i) for i in expr.args]
result = [printouts[0]]
for link, printout in zip(links, printouts[1:]):
result.extend([link, printout])
return '({0})'.format(''.join(result)) | [
"def",
"_print_MatMul",
"(",
"self",
",",
"expr",
")",
":",
"from",
"sympy",
"import",
"MatrixExpr",
"links",
"=",
"[",
"]",
"for",
"i",
",",
"j",
"in",
"zip",
"(",
"expr",
".",
"args",
"[",
"1",
":",
"]",
",",
"expr",
".",
"args",
"[",
":",
"-... | 39.352941 | 11.823529 |
def get_object_id_from_graph(access_token=None):
'''Return the object ID for the Graph user who owns the access token.
Args:
access_token (str): A Microsoft Graph access token. (Not an Azure access token.)
If not provided, attempt to get it from MSI_ENDPOINT.
Returns:
An object ID string for a user or service principal.
'''
if access_token is None:
access_token = get_graph_token_from_msi()
endpoint = 'https://' + GRAPH_RESOURCE_HOST + '/v1.0/me/'
headers = {'Authorization': 'Bearer ' + access_token, 'Host': GRAPH_RESOURCE_HOST}
ret = requests.get(endpoint, headers=headers)
return ret.json()['id'] | [
"def",
"get_object_id_from_graph",
"(",
"access_token",
"=",
"None",
")",
":",
"if",
"access_token",
"is",
"None",
":",
"access_token",
"=",
"get_graph_token_from_msi",
"(",
")",
"endpoint",
"=",
"'https://'",
"+",
"GRAPH_RESOURCE_HOST",
"+",
"'/v1.0/me/'",
"headers... | 39.882353 | 27.764706 |
def locked(self):
""" Determines if the queue is locked. """
if len(self.failed) == 0:
return False
for fail in self.failed:
for job in self.active_jobs:
if fail.alias in job.depends_on:
return True | [
"def",
"locked",
"(",
"self",
")",
":",
"if",
"len",
"(",
"self",
".",
"failed",
")",
"==",
"0",
":",
"return",
"False",
"for",
"fail",
"in",
"self",
".",
"failed",
":",
"for",
"job",
"in",
"self",
".",
"active_jobs",
":",
"if",
"fail",
".",
"ali... | 34.375 | 8.875 |
async def _request(self, method: str, url: str, headers: Mapping[str, str],
body: bytes = b'') -> Tuple[int, Mapping[str, str], bytes]:
"""Make an HTTP request."""
if method == "GET" and not body:
real_body = None
else:
real_body = body
request = httpclient.HTTPRequest(url, method, headers, real_body)
# Since Tornado has designed AsyncHTTPClient to be a singleton, there's
# no reason not to simply instantiate it every time.
client = httpclient.AsyncHTTPClient()
response = await client.fetch(request, raise_error=False)
return response.code, response.headers, response.body | [
"async",
"def",
"_request",
"(",
"self",
",",
"method",
":",
"str",
",",
"url",
":",
"str",
",",
"headers",
":",
"Mapping",
"[",
"str",
",",
"str",
"]",
",",
"body",
":",
"bytes",
"=",
"b''",
")",
"->",
"Tuple",
"[",
"int",
",",
"Mapping",
"[",
... | 52.615385 | 20.846154 |
def req(self, url, method='get', params=None, data=None, auth=False):
"""
请求API
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:type auth: bool
:param auth: if True and session expired will raise exception
:rtype: requests.Response
:return: Response
"""
self.logger.debug('fetch api<%s:%s>' % (method, url))
if auth and self.user_alias is None:
raise Exception('cannot fetch api<%s> without session' % url)
s = requests.Session()
r = s.request(method, url, params=params, data=data, cookies=self.cookies, headers=self.headers,
timeout=self.timeout)
s.close()
if r.url is not url and RE_SESSION_EXPIRE.search(r.url) is not None:
self.expire()
if auth:
raise Exception('auth expired, could not fetch with<%s>' % url)
return r | [
"def",
"req",
"(",
"self",
",",
"url",
",",
"method",
"=",
"'get'",
",",
"params",
"=",
"None",
",",
"data",
"=",
"None",
",",
"auth",
"=",
"False",
")",
":",
"self",
".",
"logger",
".",
"debug",
"(",
"'fetch api<%s:%s>'",
"%",
"(",
"method",
",",
... | 32.058824 | 21.411765 |
def _prepare(cls, context: Dict) -> Dict:
"""This is where you should alter the context to fit the action.
Default behaviour will leave the context as it is.
"""
if not context and cls.raise_empty_context:
raise PolyaxonActionException('{} received invalid payload context.'.format(cls.name))
return context | [
"def",
"_prepare",
"(",
"cls",
",",
"context",
":",
"Dict",
")",
"->",
"Dict",
":",
"if",
"not",
"context",
"and",
"cls",
".",
"raise_empty_context",
":",
"raise",
"PolyaxonActionException",
"(",
"'{} received invalid payload context.'",
".",
"format",
"(",
"cls... | 39.222222 | 20.666667 |
def tsToDf(tso):
"""
Create Pandas DataFrame from TimeSeries object.
Use: Must first extractTs to get a time series. Then pick one item from time series and pass it through
:param dict tso: Time series entry
:return dict dfs: Pandas dataframes
"""
dfs = {}
try:
dfs = ts_to_df(tso)
except Exception as e:
print("Error: Unable to create data frame")
logger_start.warn("ts_to_df: tso malformed: {}".format(e))
return dfs | [
"def",
"tsToDf",
"(",
"tso",
")",
":",
"dfs",
"=",
"{",
"}",
"try",
":",
"dfs",
"=",
"ts_to_df",
"(",
"tso",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"\"Error: Unable to create data frame\"",
")",
"logger_start",
".",
"warn",
"(",
"\"ts_... | 31.266667 | 19.666667 |
def MethodCalled(self, mock_method):
"""Remove a method call from the group.
If the method is not in the set, an UnexpectedMethodCallError will be
raised.
Args:
mock_method: a mock method that should be equal to a method in the group.
Returns:
The mock method from the group
Raises:
UnexpectedMethodCallError if the mock_method was not in the group.
"""
# Check to see if this method exists, and if so, remove it from the set
# and return it.
for method in self._methods:
if method == mock_method:
# Remove the called mock_method instead of the method in the group.
# The called method will match any comparators when equality is checked
# during removal. The method in the group could pass a comparator to
# another comparator during the equality check.
self._methods.remove(mock_method)
# If this group is not empty, put it back at the head of the queue.
if not self.IsSatisfied():
mock_method._call_queue.appendleft(self)
return self, method
raise UnexpectedMethodCallError(mock_method, self) | [
"def",
"MethodCalled",
"(",
"self",
",",
"mock_method",
")",
":",
"# Check to see if this method exists, and if so, remove it from the set",
"# and return it.",
"for",
"method",
"in",
"self",
".",
"_methods",
":",
"if",
"method",
"==",
"mock_method",
":",
"# Remove the ca... | 33.757576 | 25.030303 |
def get_rules(family='ipv4'):
'''
Return a data structure of the current, in-memory rules
CLI Example:
.. code-block:: bash
salt '*' nftables.get_rules
salt '*' nftables.get_rules family=ipv6
'''
nft_family = _NFTABLES_FAMILIES[family]
rules = []
cmd = '{0} --numeric --numeric --numeric ' \
'list tables {1}'. format(_nftables_cmd(),
nft_family)
out = __salt__['cmd.run'](cmd, python_shell=False)
if not out:
return rules
tables = re.split('\n+', out)
for table in tables:
table_name = table.split(' ')[1]
cmd = '{0} --numeric --numeric --numeric ' \
'list table {1} {2}'.format(_nftables_cmd(),
nft_family, table_name)
out = __salt__['cmd.run'](cmd, python_shell=False)
rules.append(out)
return rules | [
"def",
"get_rules",
"(",
"family",
"=",
"'ipv4'",
")",
":",
"nft_family",
"=",
"_NFTABLES_FAMILIES",
"[",
"family",
"]",
"rules",
"=",
"[",
"]",
"cmd",
"=",
"'{0} --numeric --numeric --numeric '",
"'list tables {1}'",
".",
"format",
"(",
"_nftables_cmd",
"(",
")... | 28.645161 | 20.580645 |
def load(target:str, namespace:str=None, default=nodefault, executable:bool=False, separators:Sequence[str]=('.', ':'),
protect:bool=True):
"""This helper function loads an object identified by a dotted-notation string.
For example::
# Load class Foo from example.objects
load('example.objects:Foo')
# Load the result of the class method ``new`` of the Foo object
load('example.objects:Foo.new', executable=True)
If a plugin namespace is provided simple name references are allowed. For example::
# Load the plugin named 'routing' from the 'web.dispatch' namespace
load('routing', 'web.dispatch')
The ``executable``, ``protect``, and first tuple element of ``separators`` are passed to the traverse function.
Providing a namespace does not prevent full object lookup (dot-colon notation) from working.
"""
assert check_argument_types()
if namespace and ':' not in target:
allowable = dict((i.name, i) for i in iter_entry_points(namespace))
if target not in allowable:
raise LookupError('Unknown plugin "' + target + '"; found: ' + ', '.join(allowable))
return allowable[target].load()
parts, _, target = target.partition(separators[1])
try:
obj = __import__(parts)
except ImportError:
if default is not nodefault:
return default
raise
return traverse(
obj,
separators[0].join(parts.split(separators[0])[1:] + target.split(separators[0])),
default = default,
executable = executable,
protect = protect
) if target else obj | [
"def",
"load",
"(",
"target",
":",
"str",
",",
"namespace",
":",
"str",
"=",
"None",
",",
"default",
"=",
"nodefault",
",",
"executable",
":",
"bool",
"=",
"False",
",",
"separators",
":",
"Sequence",
"[",
"str",
"]",
"=",
"(",
"'.'",
",",
"':'",
"... | 30.604167 | 28.020833 |
def base_boxes(self):
"""
Get the list of vagrant base boxes
"""
return sorted(list(set([name for name, provider in self._box_list()]))) | [
"def",
"base_boxes",
"(",
"self",
")",
":",
"return",
"sorted",
"(",
"list",
"(",
"set",
"(",
"[",
"name",
"for",
"name",
",",
"provider",
"in",
"self",
".",
"_box_list",
"(",
")",
"]",
")",
")",
")"
] | 32.8 | 12 |
def usb(
ctx, enable, disable, list, enable_all, touch_eject, no_touch_eject,
autoeject_timeout, chalresp_timeout, lock_code, force):
"""
Enable or disable applications over USB.
"""
def ensure_not_all_disabled(ctx, usb_enabled):
for app in APPLICATION:
if app & usb_enabled:
return
ctx.fail('Can not disable all applications over USB.')
if not (list or
enable_all or
enable or
disable or
touch_eject or
no_touch_eject or
autoeject_timeout or
chalresp_timeout):
ctx.fail('No configuration options chosen.')
enable = APPLICATION.__members__.keys() if enable_all else enable
_ensure_not_invalid_options(ctx, enable, disable)
if touch_eject and no_touch_eject:
ctx.fail('Invalid options.')
dev = ctx.obj['dev']
usb_supported = dev.config.usb_supported
usb_enabled = dev.config.usb_enabled
flags = dev.config.device_flags
if not usb_supported:
ctx.fail('USB interface not supported.')
if list:
_list_apps(ctx, usb_enabled)
if touch_eject:
flags |= FLAGS.MODE_FLAG_EJECT
if no_touch_eject:
flags &= ~FLAGS.MODE_FLAG_EJECT
for app in enable:
if APPLICATION[app] & usb_supported:
usb_enabled |= APPLICATION[app]
else:
ctx.fail('{} not supported over USB.'.format(app))
for app in disable:
if APPLICATION[app] & usb_supported:
usb_enabled &= ~APPLICATION[app]
else:
ctx.fail('{} not supported over USB.'.format(app))
ensure_not_all_disabled(ctx, usb_enabled)
f_confirm = '{}{}{}{}{}{}Configure USB interface?'.format(
'Enable {}.\n'.format(
', '.join(
[str(APPLICATION[app]) for app in enable])) if enable else '',
'Disable {}.\n'.format(
', '.join(
[str(APPLICATION[app]) for app in disable])) if disable else '',
'Set touch eject.\n' if touch_eject else '',
'Disable touch eject.\n' if no_touch_eject else '',
'Set autoeject timeout to {}.\n'.format(
autoeject_timeout) if autoeject_timeout else '',
'Set challenge-response timeout to {}.\n'.format(
chalresp_timeout) if chalresp_timeout else '')
is_locked = dev.config.configuration_locked
if force and is_locked and not lock_code:
ctx.fail('Configuration is locked - please supply the --lock-code '
'option.')
if lock_code and not is_locked:
ctx.fail('Configuration is not locked - please remove the '
'--lock-code option.')
force or click.confirm(f_confirm, abort=True, err=True)
if is_locked and not lock_code:
lock_code = prompt_lock_code()
if lock_code:
lock_code = _parse_lock_code(ctx, lock_code)
try:
dev.write_config(
device_config(
usb_enabled=usb_enabled,
flags=flags,
auto_eject_timeout=autoeject_timeout,
chalresp_timeout=chalresp_timeout),
reboot=True,
lock_key=lock_code)
except Exception as e:
logger.error('Failed to write config', exc_info=e)
ctx.fail('Failed to configure USB applications.') | [
"def",
"usb",
"(",
"ctx",
",",
"enable",
",",
"disable",
",",
"list",
",",
"enable_all",
",",
"touch_eject",
",",
"no_touch_eject",
",",
"autoeject_timeout",
",",
"chalresp_timeout",
",",
"lock_code",
",",
"force",
")",
":",
"def",
"ensure_not_all_disabled",
"... | 31.786408 | 18.893204 |
def ReadAttachments(self, document_link, options=None):
"""Reads all attachments in a document.
:param str document_link:
The link to the document.
:param dict options:
The request options for the request.
:return:
Query Iterable of Attachments.
:rtype:
query_iterable.QueryIterable
"""
if options is None:
options = {}
return self.QueryAttachments(document_link, None, options) | [
"def",
"ReadAttachments",
"(",
"self",
",",
"document_link",
",",
"options",
"=",
"None",
")",
":",
"if",
"options",
"is",
"None",
":",
"options",
"=",
"{",
"}",
"return",
"self",
".",
"QueryAttachments",
"(",
"document_link",
",",
"None",
",",
"options",
... | 27.166667 | 17.277778 |
def list_corpora(self):
"""Show corpora available for the CLTK to download."""
try:
# corpora = LANGUAGE_CORPORA[self.language]
corpora = self.all_corpora
corpus_names = [corpus['name'] for corpus in corpora]
return corpus_names
except (NameError, KeyError) as error:
msg = 'Corpus not available for language "{}": {}'.format(self.language, error)
logger.error(msg)
raise CorpusImportError(msg) | [
"def",
"list_corpora",
"(",
"self",
")",
":",
"try",
":",
"# corpora = LANGUAGE_CORPORA[self.language]",
"corpora",
"=",
"self",
".",
"all_corpora",
"corpus_names",
"=",
"[",
"corpus",
"[",
"'name'",
"]",
"for",
"corpus",
"in",
"corpora",
"]",
"return",
"corpus_... | 44.727273 | 14.909091 |
def decimate(self, fraction=0.5, N=None, boundaries=False, verbose=True):
"""
Downsample the number of vertices in a mesh.
:param float fraction: the desired target of reduction.
:param int N: the desired number of final points (**fraction** is recalculated based on it).
:param bool boundaries: (True), decide whether to leave boundaries untouched or not.
.. note:: Setting ``fraction=0.1`` leaves 10% of the original nr of vertices.
.. hint:: |skeletonize| |skeletonize.py|_
"""
poly = self.polydata(True)
if N: # N = desired number of points
Np = poly.GetNumberOfPoints()
fraction = float(N) / Np
if fraction >= 1:
return self
decimate = vtk.vtkDecimatePro()
decimate.SetInputData(poly)
decimate.SetTargetReduction(1 - fraction)
decimate.PreserveTopologyOff()
if boundaries:
decimate.BoundaryVertexDeletionOff()
else:
decimate.BoundaryVertexDeletionOn()
decimate.Update()
if verbose:
print("Nr. of pts, input:", poly.GetNumberOfPoints(), end="")
print(" output:", decimate.GetOutput().GetNumberOfPoints())
return self.updateMesh(decimate.GetOutput()) | [
"def",
"decimate",
"(",
"self",
",",
"fraction",
"=",
"0.5",
",",
"N",
"=",
"None",
",",
"boundaries",
"=",
"False",
",",
"verbose",
"=",
"True",
")",
":",
"poly",
"=",
"self",
".",
"polydata",
"(",
"True",
")",
"if",
"N",
":",
"# N = desired number ... | 39.96875 | 19.46875 |
def get_sdf(identifier, namespace='cid', domain='compound',operation=None, searchtype=None, **kwargs):
"""Request wrapper that automatically parses SDF response and supresses NotFoundError."""
try:
return get(identifier, namespace, domain, operation, 'SDF', searchtype, **kwargs).decode()
except NotFoundError as e:
log.info(e)
return None | [
"def",
"get_sdf",
"(",
"identifier",
",",
"namespace",
"=",
"'cid'",
",",
"domain",
"=",
"'compound'",
",",
"operation",
"=",
"None",
",",
"searchtype",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"try",
":",
"return",
"get",
"(",
"identifier",
","... | 52.714286 | 29.142857 |
def get_path_name(self):
"""Gets path and name of song
:return: Name of path, name of file (or folder)
"""
path = fix_raw_path(os.path.dirname(os.path.abspath(self.path)))
name = os.path.basename(self.path)
return path, name | [
"def",
"get_path_name",
"(",
"self",
")",
":",
"path",
"=",
"fix_raw_path",
"(",
"os",
".",
"path",
".",
"dirname",
"(",
"os",
".",
"path",
".",
"abspath",
"(",
"self",
".",
"path",
")",
")",
")",
"name",
"=",
"os",
".",
"path",
".",
"basename",
... | 33.25 | 15 |
def buildCommand(self,fileName,count,args):
"""
This is an internal method, building the command for a particular file.
"""
# Escape all placeholders in the file path:
fileNameWithPath = self.escapePlaceholders(fileName)
# The command is split into 'parts', which are separated by blank spaces:
commandParts = args.command.split(' ')
processedParts = []
# Each part of the command is processed separately:
for part in commandParts:
processedParts.append(self.buildPart(part,fileNameWithPath,count,args))
# The parts are put together at the end and the new command is returned:
return self.unescapePlaceholders(' '.join(processedParts)) | [
"def",
"buildCommand",
"(",
"self",
",",
"fileName",
",",
"count",
",",
"args",
")",
":",
"# Escape all placeholders in the file path:",
"fileNameWithPath",
"=",
"self",
".",
"escapePlaceholders",
"(",
"fileName",
")",
"# The command is split into 'parts', which are separat... | 48.2 | 20.066667 |
def chmod(path, mode, recursive=True):
""" alternative to os.
"""
if recursive:
cmd = 'chmod -R %s %s' % (mode, path)
else:
cmd = 'chmod %s %s' % (mode, path)
return sh(cmd) | [
"def",
"chmod",
"(",
"path",
",",
"mode",
",",
"recursive",
"=",
"True",
")",
":",
"if",
"recursive",
":",
"cmd",
"=",
"'chmod -R %s %s'",
"%",
"(",
"mode",
",",
"path",
")",
"else",
":",
"cmd",
"=",
"'chmod %s %s'",
"%",
"(",
"mode",
",",
"path",
... | 20.2 | 16.5 |
def aggregate_stat(origin_stat, new_stat):
""" aggregate new_stat to origin_stat.
Args:
origin_stat (dict): origin stat dict, will be updated with new_stat dict.
new_stat (dict): new stat dict.
"""
for key in new_stat:
if key not in origin_stat:
origin_stat[key] = new_stat[key]
elif key == "start_at":
# start datetime
origin_stat[key] = min(origin_stat[key], new_stat[key])
else:
origin_stat[key] += new_stat[key] | [
"def",
"aggregate_stat",
"(",
"origin_stat",
",",
"new_stat",
")",
":",
"for",
"key",
"in",
"new_stat",
":",
"if",
"key",
"not",
"in",
"origin_stat",
":",
"origin_stat",
"[",
"key",
"]",
"=",
"new_stat",
"[",
"key",
"]",
"elif",
"key",
"==",
"\"start_at\... | 31.625 | 16.3125 |
def stats():
'''
Returns statistics about the locate database
CLI Example:
.. code-block:: bash
salt '*' locate.stats
'''
ret = {}
cmd = 'locate -S'
out = __salt__['cmd.run'](cmd).splitlines()
for line in out:
comps = line.strip().split()
if line.startswith('Database'):
ret['database'] = comps[1].replace(':', '')
continue
ret[' '.join(comps[1:])] = comps[0]
return ret | [
"def",
"stats",
"(",
")",
":",
"ret",
"=",
"{",
"}",
"cmd",
"=",
"'locate -S'",
"out",
"=",
"__salt__",
"[",
"'cmd.run'",
"]",
"(",
"cmd",
")",
".",
"splitlines",
"(",
")",
"for",
"line",
"in",
"out",
":",
"comps",
"=",
"line",
".",
"strip",
"(",... | 22.5 | 20.8 |
def all(self, cache=False):
"""
can use cache to return objects
"""
if cache:
return [get_object(self.modelb, obj_id, cache=True, use_local=True) for obj_id in self.keys(True)]
else:
return self | [
"def",
"all",
"(",
"self",
",",
"cache",
"=",
"False",
")",
":",
"if",
"cache",
":",
"return",
"[",
"get_object",
"(",
"self",
".",
"modelb",
",",
"obj_id",
",",
"cache",
"=",
"True",
",",
"use_local",
"=",
"True",
")",
"for",
"obj_id",
"in",
"self... | 31.375 | 18.875 |
def fill_altgoids(go2obj):
"""Given a go2obj containing key GO IDs, fill with all alternate GO IDs."""
alt2obj = {altgo:goobj for goobj in go2obj.values() for altgo in goobj.alt_ids}
for goid, goobj in alt2obj.items():
go2obj[goid] = goobj | [
"def",
"fill_altgoids",
"(",
"go2obj",
")",
":",
"alt2obj",
"=",
"{",
"altgo",
":",
"goobj",
"for",
"goobj",
"in",
"go2obj",
".",
"values",
"(",
")",
"for",
"altgo",
"in",
"goobj",
".",
"alt_ids",
"}",
"for",
"goid",
",",
"goobj",
"in",
"alt2obj",
".... | 51 | 14 |
def get_version(version=None):
"""Returns a PEP 386-compliant version number from VERSION.
:param version: A tuple that represent a version.
:type version: tuple
:returns: a PEP 386-compliant version number.
:rtype: str
"""
if version is None:
version_list = inasafe_version.split('.')
version = tuple(version_list + [inasafe_release_status] + ['0'])
if len(version) != 5:
msg = 'Version must be a tuple of length 5. I got %s' % (version,)
raise RuntimeError(msg)
if version[3] not in ('alpha', 'beta', 'rc', 'final'):
msg = 'Version tuple not as expected. I got %s' % (version,)
raise RuntimeError(msg)
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:parts])
sub = ''
# This crashes on windows
if version[3] == 'alpha' and version[4] == '0':
# Currently failed on windows and mac
if 'win32' in sys.platform or 'darwin' in sys.platform:
sub = '.dev-master'
else:
try:
git_hash = current_git_hash()
if git_hash:
sub = '.dev-%s' % git_hash
except WindowsError:
sub = '.dev-master'
elif version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return main + sub | [
"def",
"get_version",
"(",
"version",
"=",
"None",
")",
":",
"if",
"version",
"is",
"None",
":",
"version_list",
"=",
"inasafe_version",
".",
"split",
"(",
"'.'",
")",
"version",
"=",
"tuple",
"(",
"version_list",
"+",
"[",
"inasafe_release_status",
"]",
"... | 32 | 18.625 |
def increment_times_modified(self):
"""
Increments the number of times this resource has been modified by all
processes.
"""
rc = self.conn.client.incr(self.times_modified_key)
self.conn.client.pexpire(self.times_modified_key,
phonon.s_to_ms(TTL)) | [
"def",
"increment_times_modified",
"(",
"self",
")",
":",
"rc",
"=",
"self",
".",
"conn",
".",
"client",
".",
"incr",
"(",
"self",
".",
"times_modified_key",
")",
"self",
".",
"conn",
".",
"client",
".",
"pexpire",
"(",
"self",
".",
"times_modified_key",
... | 40.125 | 14.125 |
def read(fname, merge_duplicate_shots=False, encoding='windows-1252'):
"""Read a PocketTopo .TXT file and produce a `TxtFile` object which represents it"""
return PocketTopoTxtParser(fname, merge_duplicate_shots, encoding).parse() | [
"def",
"read",
"(",
"fname",
",",
"merge_duplicate_shots",
"=",
"False",
",",
"encoding",
"=",
"'windows-1252'",
")",
":",
"return",
"PocketTopoTxtParser",
"(",
"fname",
",",
"merge_duplicate_shots",
",",
"encoding",
")",
".",
"parse",
"(",
")"
] | 81.333333 | 24 |
def firstVariant():
"""first variant of Variants
Read-only
"""
def fget(self):
if self.variants:
return self.variants[0]
else:
variant = Variant()
return variant
return locals() | [
"def",
"firstVariant",
"(",
")",
":",
"def",
"fget",
"(",
"self",
")",
":",
"if",
"self",
".",
"variants",
":",
"return",
"self",
".",
"variants",
"[",
"0",
"]",
"else",
":",
"variant",
"=",
"Variant",
"(",
")",
"return",
"variant",
"return",
"locals... | 22.076923 | 15.384615 |
def migrate(connection, dsn):
""" Collects all migrations and applies missed.
Args:
connection (sqlalchemy connection):
"""
all_migrations = _get_all_migrations()
logger.debug('Collected migrations: {}'.format(all_migrations))
for version, modname in all_migrations:
if _is_missed(connection, version) and version <= SCHEMA_VERSION:
logger.info('Missed migration: {} migration is missed. Migrating...'.format(version))
module = __import__(modname, fromlist='dummy')
# run each migration under its own transaction. This allows us to apply valid migrations
# and break on invalid.
trans = connection.begin()
try:
module.Migration().migrate(connection)
_update_version(connection, version)
trans.commit()
except:
trans.rollback()
logger.error("Failed to migrate '{}' on {} ".format(version, dsn))
raise | [
"def",
"migrate",
"(",
"connection",
",",
"dsn",
")",
":",
"all_migrations",
"=",
"_get_all_migrations",
"(",
")",
"logger",
".",
"debug",
"(",
"'Collected migrations: {}'",
".",
"format",
"(",
"all_migrations",
")",
")",
"for",
"version",
",",
"modname",
"in"... | 38.423077 | 21.653846 |
def get(self, sid):
"""
Constructs a DeploymentContext
:param sid: The sid
:returns: twilio.rest.serverless.v1.service.environment.deployment.DeploymentContext
:rtype: twilio.rest.serverless.v1.service.environment.deployment.DeploymentContext
"""
return DeploymentContext(
self._version,
service_sid=self._solution['service_sid'],
environment_sid=self._solution['environment_sid'],
sid=sid,
) | [
"def",
"get",
"(",
"self",
",",
"sid",
")",
":",
"return",
"DeploymentContext",
"(",
"self",
".",
"_version",
",",
"service_sid",
"=",
"self",
".",
"_solution",
"[",
"'service_sid'",
"]",
",",
"environment_sid",
"=",
"self",
".",
"_solution",
"[",
"'enviro... | 32.8 | 21.733333 |
def is_default(self):
"""
``True`` if the item's value is its default value or if no value and no default value are set.
If the item is backed by an environment variable, this will be ``True`` only
if the environment variable is set and is different to the
default value of the item.
"""
envvar_value = self._get_envvar_value()
if envvar_value is not not_set:
return envvar_value == self.default
else:
return self._value is not_set or self._value == self.default | [
"def",
"is_default",
"(",
"self",
")",
":",
"envvar_value",
"=",
"self",
".",
"_get_envvar_value",
"(",
")",
"if",
"envvar_value",
"is",
"not",
"not_set",
":",
"return",
"envvar_value",
"==",
"self",
".",
"default",
"else",
":",
"return",
"self",
".",
"_va... | 42.076923 | 20.846154 |
def render_view(view_name, **args):
'''Process view and return root Node'''
try:
root_xml = get_view_root(view_name)
return render(root_xml, **args)
except CoreError as error:
error.add_view_info(ViewInfo(view_name, None))
raise
except:
info = exc_info()
error = ViewError('Unknown error occured during rendering', ViewInfo(view_name, None))
error.add_cause(info[1])
raise error from info[1] | [
"def",
"render_view",
"(",
"view_name",
",",
"*",
"*",
"args",
")",
":",
"try",
":",
"root_xml",
"=",
"get_view_root",
"(",
"view_name",
")",
"return",
"render",
"(",
"root_xml",
",",
"*",
"*",
"args",
")",
"except",
"CoreError",
"as",
"error",
":",
"e... | 35.307692 | 16.076923 |
def _logger_api(self):
"""Add API logging handler."""
from .tcex_logger import TcExLogHandler, TcExLogFormatter
api = TcExLogHandler(self.session)
api.set_name('api')
api.setLevel(logging.DEBUG)
api.setFormatter(TcExLogFormatter())
self.log.addHandler(api) | [
"def",
"_logger_api",
"(",
"self",
")",
":",
"from",
".",
"tcex_logger",
"import",
"TcExLogHandler",
",",
"TcExLogFormatter",
"api",
"=",
"TcExLogHandler",
"(",
"self",
".",
"session",
")",
"api",
".",
"set_name",
"(",
"'api'",
")",
"api",
".",
"setLevel",
... | 33.888889 | 12.777778 |
def associate_notification_template(self, job_template,
notification_template, status):
"""Associate a notification template from this job template.
=====API DOCS=====
Associate a notification template from this job template.
:param job_template: The job template to associate to.
:type job_template: str
:param notification_template: The notification template to be associated.
:type notification_template: str
:param status: type of notification this notification template should be associated to.
:type status: str
:returns: Dictionary of only one key "changed", which indicates whether the association succeeded.
:rtype: dict
=====API DOCS=====
"""
return self._assoc('notification_templates_%s' % status,
job_template, notification_template) | [
"def",
"associate_notification_template",
"(",
"self",
",",
"job_template",
",",
"notification_template",
",",
"status",
")",
":",
"return",
"self",
".",
"_assoc",
"(",
"'notification_templates_%s'",
"%",
"status",
",",
"job_template",
",",
"notification_template",
")... | 45.45 | 24.7 |
def ttSparseALS(cooP, shape, x0=None, ttRank=1, tol=1e-5, maxnsweeps=20, verbose=True, alpha=1e-2):
'''
TT completion via Alternating Least Squares algorithm.
Parameters:
:dict: cooP
dictionary with two records
- 'indices': numpy.array of P x d shape,
contains index subspace of P known elements;
each string is an index of one element.
- 'values': numpy array of size P,
contains P known values.
:list, numpy.array: shape
full-format shape of tensor to be completed [dimensions]
:tt.vector: x0 = None
initial approximation of completed tensor
If it is specified, parameters 'shape' and 'ttRank' will be ignored
:int, numpy.array: ttRank = 1
assumed rank of completed tensor
:float: tol = 1e-5
tolerance for functional value
:int: maxnsweeps = 20
maximal number of sweeps [sequential optimization of all d cores
in right or left direction]
:boolean: verbose = True
switcher of messages from function
:float: alpha: = 1e-2
regularizer of least squares problem for each slice of current TT core.
[rcond parameter for np.linalg.lstsq]
Returns:
:tt.vector: xNew
completed TT vector
:list: fit
list of functional values at each sweep
'''
indices = cooP['indices']
values = cooP['values']
[P, d] = indices.shape
assert P == len(values)
timeVal = time.clock()
if x0 is None:
x = tt.rand(shape, r = ttRank)
x = x.round(0.)
x = (1./x.norm())*x
else:
x = copy.deepcopy(x0)
assert d == x.d
# TODO: also check if cooP indices are aligned with shape
normP = np.linalg.norm(values)
values /= normP
fitList = []
sweepTimeList = []
initTime = time.clock() - timeVal
timeVal = time.clock()
coreList = tt.vector.to_list(x)
#coreList = orthLRFull(coreList, mu = d, splitResult = False)
# orthTime = time.clock() - timeVal
if verbose:
print("Initialization time: %.3f seconds (proc.time)" % (initTime))
# print "Orthogonalizing time: %.3f seconds (proc.time)" % (orthTime)
for sweep in xrange(maxnsweeps):
sweepStart = time.clock()
# list left + right
[kStart, kEnd, kStep] = [0, d, 1]
# select direction of sweep
'''
if sweep % 2 == 0: # left to rigth
[kStart, kEnd, kStep] = [0, d, 1]
else: # right to left
[kStart, kEnd, kStep] = [d-1, -1, -1]
'''
# fix k-th core to update
for k in xrange(kStart, kEnd, kStep):
[r1, n, r2] = coreList[k].shape
core = np.zeros([r1, n, r2])
leftU = []
rightV = []
if k > 0:
leftU = coreList[:k]
if k < d-1:
rightV = coreList[k+1:]
for i in xrange(n):
thetaI = np.where(indices[:, k] == i)[0]
if len(thetaI) > 0:
A = np.zeros([len(thetaI), r1*r2])
for j in xrange(len(thetaI)):
tmp = getRow(leftU, rightV, indices[thetaI[j], :])
A[j:j+1, :] += tmp # .flatten(order = 'F')
vecCoreSlice, _, _, _ = np.linalg.lstsq(A, values[thetaI])#, rcond = alpha)
# 0.5*np.linalg.norm(np.dot(A, vecCoreSlice) - values[thetaI])**2.
core[:, i, :] += reshape(vecCoreSlice, [r1, r2]) ####
'''
if k < (d-1):
core = reshape(core, [r1*n, r2])
Q, R = np.linalg.qr(core)
rnew = Q.shape[1]
core = reshape(Q, [r1, n, rnew])
coreList[k+1] = np.einsum('ijk,li->ljk', coreList[k+1], R)
'''
coreList[k] = core.copy()
'''
else:
if (k > 0):
core = reshape(core, [r1, n*r2])
Q, R = np.linalg.qr(core.T)
rnew = Q.shape[1]
core = reshape(Q.T, [rnew, n, r2])
coreList[k-1] = np.einsum('ijk,lk->ijl', coreList[k-1], R)
'''
xNew = tt.vector.from_list(coreList)
fit = computeFunctional(xNew, cooP)
fitList.append(fit)
if fit < tol:
break
if sweep > 0:
if abs(fit - fitList[-2]) < tol:
break
sweepTimeList.append(time.clock() - sweepStart)
if verbose:
print("sweep %d/%d\t fit value: %.5e\t time: %.3f seconds (proc.time)" % (sweep+1, maxnsweeps, fit, sweepTimeList[-1]))
if verbose:
print("Total sweep time: %.3f seconds (proc.time)\t Total time: %.3f seconds (proc.time)" % (sum(sweepTimeList), sum(sweepTimeList) + initTime))# + orthTime)
info = {'fit': fitList, 'initTime': initTime, 'sweepTime': sweepTimeList} # 'orthTime': orthTime,
xNew *= normP
values *= normP
return xNew, info | [
"def",
"ttSparseALS",
"(",
"cooP",
",",
"shape",
",",
"x0",
"=",
"None",
",",
"ttRank",
"=",
"1",
",",
"tol",
"=",
"1e-5",
",",
"maxnsweeps",
"=",
"20",
",",
"verbose",
"=",
"True",
",",
"alpha",
"=",
"1e-2",
")",
":",
"indices",
"=",
"cooP",
"["... | 37.873134 | 19.08209 |
def list_commands(self, page_size=None):
"""
Lists the commands visible to this client.
Commands are returned in lexicographical order.
:rtype: :class:`.Command` iterator
"""
params = {}
if page_size is not None:
params['limit'] = page_size
return pagination.Iterator(
client=self._client,
path='/mdb/{}/commands'.format(self._instance),
params=params,
response_class=mdb_pb2.ListCommandsResponse,
items_key='command',
item_mapper=Command,
) | [
"def",
"list_commands",
"(",
"self",
",",
"page_size",
"=",
"None",
")",
":",
"params",
"=",
"{",
"}",
"if",
"page_size",
"is",
"not",
"None",
":",
"params",
"[",
"'limit'",
"]",
"=",
"page_size",
"return",
"pagination",
".",
"Iterator",
"(",
"client",
... | 27.666667 | 15.47619 |
def _Open(self, path_spec, mode='rb'):
"""Opens the file system defined by path specification.
Args:
path_spec (PathSpec): a path specification.
mode (Optional[str])): file access mode. The default is 'rb' read-only
binary.
Raises:
AccessError: if the access to open the file was denied.
IOError: if the file system object could not be opened.
PathSpecError: if the path specification is incorrect.
ValueError: if the path specification is invalid.
"""
if not path_spec.HasParent():
raise errors.PathSpecError(
'Unsupported path specification without parent.')
file_object = resolver.Resolver.OpenFileObject(
path_spec.parent, resolver_context=self._resolver_context)
try:
fsapfs_container = pyfsapfs.container()
fsapfs_container.open_file_object(file_object)
except:
file_object.close()
raise
self._file_object = file_object
self._fsapfs_container = fsapfs_container | [
"def",
"_Open",
"(",
"self",
",",
"path_spec",
",",
"mode",
"=",
"'rb'",
")",
":",
"if",
"not",
"path_spec",
".",
"HasParent",
"(",
")",
":",
"raise",
"errors",
".",
"PathSpecError",
"(",
"'Unsupported path specification without parent.'",
")",
"file_object",
... | 32.566667 | 20.3 |
def set_uri(self, uri, size, checksum, readable=True, writable=False,
storage_class=None):
"""Set a location of a file."""
self.uri = uri
self.size = size
self.checksum = checksum
self.writable = writable
self.readable = readable
self.storage_class = \
current_app.config['FILES_REST_DEFAULT_STORAGE_CLASS'] \
if storage_class is None else \
storage_class
return self | [
"def",
"set_uri",
"(",
"self",
",",
"uri",
",",
"size",
",",
"checksum",
",",
"readable",
"=",
"True",
",",
"writable",
"=",
"False",
",",
"storage_class",
"=",
"None",
")",
":",
"self",
".",
"uri",
"=",
"uri",
"self",
".",
"size",
"=",
"size",
"se... | 36.230769 | 12.923077 |
def toggle_value(request, name):
"""
For manual shortcut links to perform toggle actions
"""
obj = service.system.namespace.get(name, None)
if not obj or service.read_only:
raise Http404
new_status = obj.status = not obj.status
if service.redirect_from_setters:
return HttpResponseRedirect(reverse('set_ready', args=(name, new_status)))
else:
return set_ready(request, name, new_status) | [
"def",
"toggle_value",
"(",
"request",
",",
"name",
")",
":",
"obj",
"=",
"service",
".",
"system",
".",
"namespace",
".",
"get",
"(",
"name",
",",
"None",
")",
"if",
"not",
"obj",
"or",
"service",
".",
"read_only",
":",
"raise",
"Http404",
"new_status... | 35.916667 | 12.25 |
def siret_validator():
"""Validate a SIRET: check its length (14), its final code, and pass it
through the Luhn algorithm."""
def _validate_siret(form, field, siret=""):
"""SIRET validator.
A WTForm validator wants a form and a field as parameters. We
also want to give directly a siret, for a scripting use.
"""
if field is not None:
siret = (field.data or "").strip()
if len(siret) != 14:
msg = _("SIRET must have exactly 14 characters ({count})").format(
count=len(siret)
)
raise validators.ValidationError(msg)
if not all(("0" <= c <= "9") for c in siret):
if not siret[-3:] in SIRET_CODES:
msg = _(
"SIRET looks like special SIRET but geographical "
"code seems invalid (%(code)s)",
code=siret[-3:],
)
raise validators.ValidationError(msg)
elif not luhn(siret):
msg = _("SIRET number is invalid (length is ok: verify numbers)")
raise validators.ValidationError(msg)
return _validate_siret | [
"def",
"siret_validator",
"(",
")",
":",
"def",
"_validate_siret",
"(",
"form",
",",
"field",
",",
"siret",
"=",
"\"\"",
")",
":",
"\"\"\"SIRET validator.\n\n A WTForm validator wants a form and a field as parameters. We\n also want to give directly a siret, for a scr... | 35 | 18.666667 |
def create_archiver(typename):
"""Returns Archivers in common configurations.
:API: public
The typename must correspond to one of the following:
'tar' Returns a tar archiver that applies no compression and emits .tar files.
'tgz' Returns a tar archiver that applies gzip compression and emits .tar.gz files.
'tbz2' Returns a tar archiver that applies bzip2 compression and emits .tar.bz2 files.
'zip' Returns a zip archiver that applies standard compression and emits .zip files.
'jar' Returns a jar archiver that applies no compression and emits .jar files.
Note this is provided as a light way of zipping input files into a jar, without the
need to prepare Manifest etc. For more advanced usages, please refer to :class:
`pants.backend.jvm.subsystems.jar_tool.JarTool` or :class:
`pants.backend.jvm.tasks.jar_task.JarTask`.
"""
archiver = _ARCHIVER_BY_TYPE.get(typename)
if not archiver:
raise ValueError('No archiver registered for {!r}'.format(typename))
return archiver | [
"def",
"create_archiver",
"(",
"typename",
")",
":",
"archiver",
"=",
"_ARCHIVER_BY_TYPE",
".",
"get",
"(",
"typename",
")",
"if",
"not",
"archiver",
":",
"raise",
"ValueError",
"(",
"'No archiver registered for {!r}'",
".",
"format",
"(",
"typename",
")",
")",
... | 50.5 | 27.95 |
def get_abbr_impl():
"""Return abbreviated implementation name."""
impl = platform.python_implementation()
if impl == 'PyPy':
return 'pp'
elif impl == 'Jython':
return 'jy'
elif impl == 'IronPython':
return 'ip'
elif impl == 'CPython':
return 'cp'
raise LookupError('Unknown Python implementation: ' + impl) | [
"def",
"get_abbr_impl",
"(",
")",
":",
"impl",
"=",
"platform",
".",
"python_implementation",
"(",
")",
"if",
"impl",
"==",
"'PyPy'",
":",
"return",
"'pp'",
"elif",
"impl",
"==",
"'Jython'",
":",
"return",
"'jy'",
"elif",
"impl",
"==",
"'IronPython'",
":",... | 27.384615 | 17.307692 |
def _print_refs(self, refs, total, prefix=' ',
level=1, minsize=0, minpct=0.1):
"""
Print individual referents recursively.
"""
lrefs = list(refs)
lrefs.sort(key=lambda x: x.size)
lrefs.reverse()
for ref in lrefs:
if ref.size > minsize and (ref.size*100.0/total) > minpct:
self.stream.write('%-50s %-14s %3d%% [%d]\n' % (
trunc(prefix+str(ref.name), 50),
pp(ref.size),
int(ref.size*100.0/total),
level
))
self._print_refs(ref.refs, total, prefix=prefix+' ',
level=level+1) | [
"def",
"_print_refs",
"(",
"self",
",",
"refs",
",",
"total",
",",
"prefix",
"=",
"' '",
",",
"level",
"=",
"1",
",",
"minsize",
"=",
"0",
",",
"minpct",
"=",
"0.1",
")",
":",
"lrefs",
"=",
"list",
"(",
"refs",
")",
"lrefs",
".",
"sort",
"(",
... | 39.333333 | 12.555556 |
def graph(self, ASres=None, padding=0, vspread=0.75, title="Multi-Traceroute Probe (MTR)", timestamp="", rtt=1, **kargs):
"""x.graph(ASres=conf.AS_resolver, other args):
ASres = None : Use AS default resolver => 'conf.AS_resolver'
ASres = AS_resolver() : default whois AS resolver (riswhois.ripe.net)
ASres = AS_resolver_cymru(): use whois.cymru.com whois database
ASres = AS_resolver(server="whois.ra.net")
padding: Show packets with padding as a red 3D-Box.
vspread: Vertical separation between nodes on graph.
title: Title text for the rendering graphic.
timestamp: Title Time Stamp text to appear below the Title text.
rtt: Display Round-Trip Times (msec) for Hops along trace edges.
format: Output type (svg, ps, gif, jpg, etc.), passed to dot's "-T" option.
figsize: w,h tuple in inches. See matplotlib documentation.
target: filename. If None, uses matplotlib to display.
prog: Which graphviz program to use."""
if self._asres is None:
self._asres = conf.AS_resolver
if (self._graphdef is None or # Remake the graph if there are any changes
self._graphasres != self._asres or
self._graphpadding != padding):
self.make_dot_graph(ASres, padding, vspread, title, timestamp, rtt)
return do_graph(self._graphdef, **kargs) | [
"def",
"graph",
"(",
"self",
",",
"ASres",
"=",
"None",
",",
"padding",
"=",
"0",
",",
"vspread",
"=",
"0.75",
",",
"title",
"=",
"\"Multi-Traceroute Probe (MTR)\"",
",",
"timestamp",
"=",
"\"\"",
",",
"rtt",
"=",
"1",
",",
"*",
"*",
"kargs",
")",
":... | 60.869565 | 24.173913 |
def read_pure_water_scattering_from_file(self, file_name):
"""Read the pure water scattering from a csv formatted file
:param file_name: filename and path of the csv file
"""
lg.info('Reading water scattering from file')
try:
self.b_water = self._read_iop_from_file(file_name)
except:
lg.exception('Problem reading file :: ' + file_name) | [
"def",
"read_pure_water_scattering_from_file",
"(",
"self",
",",
"file_name",
")",
":",
"lg",
".",
"info",
"(",
"'Reading water scattering from file'",
")",
"try",
":",
"self",
".",
"b_water",
"=",
"self",
".",
"_read_iop_from_file",
"(",
"file_name",
")",
"except... | 40.1 | 18.9 |
def bootstrap_jar_classfiles(self):
"""Returns a set of classfiles from the JVM bootstrap jars."""
bootstrap_jar_classfiles = set()
for jar_file in self._find_all_bootstrap_jars():
for cls in self._jar_classfiles(jar_file):
bootstrap_jar_classfiles.add(cls)
return bootstrap_jar_classfiles | [
"def",
"bootstrap_jar_classfiles",
"(",
"self",
")",
":",
"bootstrap_jar_classfiles",
"=",
"set",
"(",
")",
"for",
"jar_file",
"in",
"self",
".",
"_find_all_bootstrap_jars",
"(",
")",
":",
"for",
"cls",
"in",
"self",
".",
"_jar_classfiles",
"(",
"jar_file",
")... | 44.714286 | 5 |
def parse(self, argv=None):
"""
Parse argv of terminal
:param argv: default is sys.argv
"""
if not argv:
argv = sys.argv
elif isinstance(argv, str):
argv = argv.split()
self._argv = argv[1:]
if not self._argv:
self.validate_options()
if self._command_func:
self._command_func(**self._results)
return True
return False
cmd = self._argv[0]
if not cmd.startswith('-'):
# parse subcommands
for command in self._command_list:
if isinstance(command, Command) and command._name == cmd:
command._parent = self
return command.parse(self._argv)
_positional_index = 0
while self._argv:
arg = self._argv[0]
self._argv = self._argv[1:]
if not self.parse_options(arg):
self._args_results.append(arg)
if len(self._positional_list) > _positional_index:
# positional arguments
key = self._positional_list[_positional_index]
self._results[key] = arg
_positional_index += 1
# validate
self.validate_options()
if self._parent and isinstance(self._parent, Command):
self._parent._args_results = self._args_results
if self._command_func:
self._command_func(**self._results)
return True
return False | [
"def",
"parse",
"(",
"self",
",",
"argv",
"=",
"None",
")",
":",
"if",
"not",
"argv",
":",
"argv",
"=",
"sys",
".",
"argv",
"elif",
"isinstance",
"(",
"argv",
",",
"str",
")",
":",
"argv",
"=",
"argv",
".",
"split",
"(",
")",
"self",
".",
"_arg... | 29.941176 | 16.019608 |
def _copy_old_features(new_eopatch, old_eopatch, copy_features):
""" Copy features from old EOPatch
:param new_eopatch: New EOPatch container where the old features will be copied to
:type new_eopatch: EOPatch
:param old_eopatch: Old EOPatch container where the old features are located
:type old_eopatch: EOPatch
:param copy_features: List of tuples of type (FeatureType, str) or (FeatureType, str, str) that are copied
over into the new EOPatch. The first string is the feature name, and the second one (optional) is a new name
to be used for the feature
:type copy_features: list((FeatureType, str) or (FeatureType, str, str))
"""
if copy_features:
existing_features = set(new_eopatch.get_feature_list())
for copy_feature_type, copy_feature_name, copy_new_feature_name in copy_features:
new_feature = copy_feature_type, copy_new_feature_name
if new_feature in existing_features:
raise ValueError('Feature {} of {} already exists in the new EOPatch! '
'Use a different name!'.format(copy_new_feature_name, copy_feature_type))
else:
existing_features.add(new_feature)
new_eopatch[copy_feature_type][copy_new_feature_name] = \
old_eopatch[copy_feature_type][copy_feature_name]
return new_eopatch | [
"def",
"_copy_old_features",
"(",
"new_eopatch",
",",
"old_eopatch",
",",
"copy_features",
")",
":",
"if",
"copy_features",
":",
"existing_features",
"=",
"set",
"(",
"new_eopatch",
".",
"get_feature_list",
"(",
")",
")",
"for",
"copy_feature_type",
",",
"copy_fea... | 52.5 | 32.178571 |
def delete(self, *, if_unused=True):
"""
Delete the exchange.
This method is a :ref:`coroutine <coroutine>`.
:keyword bool if_unused: If true, the exchange will only be deleted if
it has no queues bound to it.
"""
self.sender.send_ExchangeDelete(self.name, if_unused)
yield from self.synchroniser.wait(spec.ExchangeDeleteOK)
self.reader.ready() | [
"def",
"delete",
"(",
"self",
",",
"*",
",",
"if_unused",
"=",
"True",
")",
":",
"self",
".",
"sender",
".",
"send_ExchangeDelete",
"(",
"self",
".",
"name",
",",
"if_unused",
")",
"yield",
"from",
"self",
".",
"synchroniser",
".",
"wait",
"(",
"spec",... | 34.25 | 17.25 |
def visitInlineShapeDefinition(self, ctx: ShExDocParser.InlineShapeDefinitionContext):
""" shapeDefinition: qualifier* '{' oneOfShape? '}' """
if ctx.qualifier():
for q in ctx.qualifier():
self.visit(q)
if ctx.oneOfShape():
oneof_parser = ShexOneOfShapeParser(self.context)
oneof_parser.visit(ctx.oneOfShape())
self.shape.expression = oneof_parser.expression | [
"def",
"visitInlineShapeDefinition",
"(",
"self",
",",
"ctx",
":",
"ShExDocParser",
".",
"InlineShapeDefinitionContext",
")",
":",
"if",
"ctx",
".",
"qualifier",
"(",
")",
":",
"for",
"q",
"in",
"ctx",
".",
"qualifier",
"(",
")",
":",
"self",
".",
"visit",... | 48.666667 | 14.777778 |
def trustworthiness(X, X_embedded, n_neighbors=5, precomputed=False):
"""Expresses to what extent the local structure is retained.
The trustworthiness is within [0, 1]. It is defined as
.. math::
T(k) = 1 - \frac{2}{nk (2n - 3k - 1)} \sum^n_{i=1}
\sum_{j \in U^{(k)}_i} (r(i, j) - k)
where :math:`r(i, j)` is the rank of the embedded datapoint j
according to the pairwise distances between the embedded datapoints,
:math:`U^{(k)}_i` is the set of points that are in the k nearest
neighbors in the embedded space but not in the original space.
* "Neighborhood Preservation in Nonlinear Projection Methods: An
Experimental Study"
J. Venna, S. Kaski
* "Learning a Parametric Embedding by Preserving Local Structure"
L.J.P. van der Maaten
Parameters
----------
X : array, shape (n_samples, n_features) or (n_samples, n_samples)
If the metric is 'precomputed' X must be a square distance
matrix. Otherwise it contains a sample per row.
X_embedded : array, shape (n_samples, n_components)
Embedding of the training data in low-dimensional space.
n_neighbors : int, optional (default: 5)
Number of neighbors k that will be considered.
precomputed : bool, optional (default: False)
Set this flag if X is a precomputed square distance matrix.
Returns
-------
trustworthiness : float
Trustworthiness of the low-dimensional embedding.
"""
if precomputed:
dist_X = X
else:
dist_X = pairwise_distances(X, squared=True)
dist_X_embedded = pairwise_distances(X_embedded, squared=True)
ind_X = np.argsort(dist_X, axis=1)
ind_X_embedded = np.argsort(dist_X_embedded, axis=1)[:, 1:n_neighbors + 1]
n_samples = X.shape[0]
t = 0.0
ranks = np.zeros(n_neighbors)
for i in range(n_samples):
for j in range(n_neighbors):
ranks[j] = np.where(ind_X[i] == ind_X_embedded[i, j])[0][0]
ranks -= n_neighbors
t += np.sum(ranks[ranks > 0])
t = 1.0 - t * (2.0 / (n_samples * n_neighbors *
(2.0 * n_samples - 3.0 * n_neighbors - 1.0)))
return t | [
"def",
"trustworthiness",
"(",
"X",
",",
"X_embedded",
",",
"n_neighbors",
"=",
"5",
",",
"precomputed",
"=",
"False",
")",
":",
"if",
"precomputed",
":",
"dist_X",
"=",
"X",
"else",
":",
"dist_X",
"=",
"pairwise_distances",
"(",
"X",
",",
"squared",
"="... | 35.75 | 23.133333 |
def start_market(self):
"""
start the market thread and register backtest broker thread
QAMarket 继承QATrader, QATrader 中有 trade_engine属性 , trade_engine类型是QA_Engine从 QA_Thread继承
"""
# 启动 trade_engine 线程
self.market.start()
# 注册 backtest_broker ,并且启动和它关联线程QAThread 存放在 kernels 词典中, { 'broker_name': QAThread }
#self.market.register(self.broker_name, self.broker)
self.market.connect(self.broker_name) | [
"def",
"start_market",
"(",
"self",
")",
":",
"# 启动 trade_engine 线程",
"self",
".",
"market",
".",
"start",
"(",
")",
"# 注册 backtest_broker ,并且启动和它关联线程QAThread 存放在 kernels 词典中, { 'broker_name': QAThread }",
"#self.market.register(self.broker_name, self.broker)",
"self",
".",
"mark... | 41.818182 | 22 |
def _init(self, parser):
"""Initialize/Build the ``argparse.ArgumentParser`` and subparsers.
This internal version of ``init`` is used to ensure that all
subcommands have a properly initialized parser.
Args
----
parser : argparse.ArgumentParser
The parser for this command.
"""
assert isinstance(parser, argparse.ArgumentParser)
self._init_parser(parser)
self._attach_arguments()
self._attach_subcommands()
self.initialized = True | [
"def",
"_init",
"(",
"self",
",",
"parser",
")",
":",
"assert",
"isinstance",
"(",
"parser",
",",
"argparse",
".",
"ArgumentParser",
")",
"self",
".",
"_init_parser",
"(",
"parser",
")",
"self",
".",
"_attach_arguments",
"(",
")",
"self",
".",
"_attach_sub... | 29.166667 | 17.944444 |
def get_product_url(self, force_http=False):
"""
Creates base url of product location on AWS.
:param force_http: True if HTTP base URL should be used and False otherwise
:type force_http: str
:return: url of product location
:rtype: str
"""
base_url = self.base_http_url if force_http else self.base_url
return '{}products/{}/{}'.format(base_url, self.date.replace('-', '/'), self.product_id) | [
"def",
"get_product_url",
"(",
"self",
",",
"force_http",
"=",
"False",
")",
":",
"base_url",
"=",
"self",
".",
"base_http_url",
"if",
"force_http",
"else",
"self",
".",
"base_url",
"return",
"'{}products/{}/{}'",
".",
"format",
"(",
"base_url",
",",
"self",
... | 41.363636 | 19.727273 |
def _init_content_type_params(self):
""" Return the Content-Type request header parameters
Convert all of the semi-colon separated parameters into
a dict of key/vals. If for some stupid reason duplicate
& conflicting params are present then the last one
wins.
If a particular content-type param is non-compliant
by not being a simple key=val pair then it is skipped.
If no content-type header or params are present then
return an empty dict.
:return: dict
"""
ret = {}
if self.content_type:
params = self.content_type.split(';')[1:]
for param in params:
try:
key, val = param.split('=')
ret[naked(key)] = naked(val)
except ValueError:
continue
return ret | [
"def",
"_init_content_type_params",
"(",
"self",
")",
":",
"ret",
"=",
"{",
"}",
"if",
"self",
".",
"content_type",
":",
"params",
"=",
"self",
".",
"content_type",
".",
"split",
"(",
"';'",
")",
"[",
"1",
":",
"]",
"for",
"param",
"in",
"params",
":... | 29.689655 | 20.586207 |
def unlock(self):
"""Closes the session to the database."""
if not hasattr(self, 'session'):
raise RuntimeError('Error detected! The session that you want to close does not exist any more!')
logger.debug("Closed database session of '%s'" % self._database)
self.session.close()
del self.session | [
"def",
"unlock",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'session'",
")",
":",
"raise",
"RuntimeError",
"(",
"'Error detected! The session that you want to close does not exist any more!'",
")",
"logger",
".",
"debug",
"(",
"\"Closed database... | 44.714286 | 22 |
def filename(self):
"""
Returns readable filename for a transcript
"""
client_id, __ = os.path.splitext(self.video.client_video_id)
file_name = u'{name}-{language}.{format}'.format(
name=client_id,
language=self.language_code,
format=self.file_format
).replace('\n', ' ')
return file_name | [
"def",
"filename",
"(",
"self",
")",
":",
"client_id",
",",
"__",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"self",
".",
"video",
".",
"client_video_id",
")",
"file_name",
"=",
"u'{name}-{language}.{format}'",
".",
"format",
"(",
"name",
"=",
"client_i... | 31.5 | 12.833333 |
async def factbook(self, root):
"""Region's World Factbook Entry.
Returns
-------
an :class:`ApiQuery` of str
"""
# This lib might have been a mistake, but the line below
# definitely isn't.
return html.unescape(html.unescape(root.find('FACTBOOK').text)) | [
"async",
"def",
"factbook",
"(",
"self",
",",
"root",
")",
":",
"# This lib might have been a mistake, but the line below",
"# definitely isn't.",
"return",
"html",
".",
"unescape",
"(",
"html",
".",
"unescape",
"(",
"root",
".",
"find",
"(",
"'FACTBOOK'",
")",
".... | 31 | 17.2 |
def process(self, metric):
"""
Send a metric to Riemann.
"""
event = self._metric_to_riemann_event(metric)
try:
self.client.send_event(event)
except Exception as e:
self.log.error(
"RiemannHandler: Error sending event to Riemann: %s", e) | [
"def",
"process",
"(",
"self",
",",
"metric",
")",
":",
"event",
"=",
"self",
".",
"_metric_to_riemann_event",
"(",
"metric",
")",
"try",
":",
"self",
".",
"client",
".",
"send_event",
"(",
"event",
")",
"except",
"Exception",
"as",
"e",
":",
"self",
"... | 31.6 | 11.8 |
def close(self):
"""
Disconnect from the controller.
"""
logger.info("Closing connection to %s:%s", self._host, self._port)
self._ioloop_future.cancel()
try:
yield from self._ioloop_future
except asyncio.CancelledError:
pass | [
"def",
"close",
"(",
"self",
")",
":",
"logger",
".",
"info",
"(",
"\"Closing connection to %s:%s\"",
",",
"self",
".",
"_host",
",",
"self",
".",
"_port",
")",
"self",
".",
"_ioloop_future",
".",
"cancel",
"(",
")",
"try",
":",
"yield",
"from",
"self",
... | 29.5 | 11.9 |
def create_char(self, location, bitmap):
"""Create a new character.
The HD44780 supports up to 8 custom characters (location 0-7).
:param location: The place in memory where the character is stored.
Values need to be integers between 0 and 7.
:type location: int
:param bitmap: The bitmap containing the character. This should be a
tuple of 8 numbers, each representing a 5 pixel row.
:type bitmap: tuple of int
:raises AssertionError: Raised when an invalid location is passed in or
when bitmap has an incorrect size.
Example:
.. sourcecode:: python
>>> smiley = (
... 0b00000,
... 0b01010,
... 0b01010,
... 0b00000,
... 0b10001,
... 0b10001,
... 0b01110,
... 0b00000,
... )
>>> lcd.create_char(0, smiley)
"""
assert 0 <= location <= 7, 'Only locations 0-7 are valid.'
assert len(bitmap) == 8, 'Bitmap should have exactly 8 rows.'
# Store previous position
pos = self.cursor_pos
# Write character to CGRAM
self.command(c.LCD_SETCGRAMADDR | location << 3)
for row in bitmap:
self._send_data(row)
# Restore cursor pos
self.cursor_pos = pos | [
"def",
"create_char",
"(",
"self",
",",
"location",
",",
"bitmap",
")",
":",
"assert",
"0",
"<=",
"location",
"<=",
"7",
",",
"'Only locations 0-7 are valid.'",
"assert",
"len",
"(",
"bitmap",
")",
"==",
"8",
",",
"'Bitmap should have exactly 8 rows.'",
"# Store... | 31.090909 | 19.840909 |
def render_queries(queries, sort):
"""
Returns a StringIO containing the formatted SQL queries.
_sort_ is a field to sort by.
"""
output = StringIO()
if sort == 'order':
print >>output, " time query"
for query in queries:
print >>output, " %8s %s" % (query["time"], query["sql"])
return output
if sort == 'time':
def sorter(x, y):
return cmp(x[1][1], y[1][1])
elif sort == 'queries':
def sorter(x, y):
return cmp(x[1][0], y[1][0])
else:
raise RuntimeError("Unknown sort: %s" % sort)
print >>output, " queries time query"
results = {}
for query in queries:
try:
result = results[query["sql"]]
result[0] += 1
result[1] += Decimal(query["time"])
except KeyError:
results[query["sql"]] = [1, Decimal(query["time"])]
results = sorted(results.iteritems(), cmp=sorter, reverse=True)
for result in results:
print >>output, " %8d %8.3f %s" % (
result[1][0], result[1][1], result[0]
)
return output | [
"def",
"render_queries",
"(",
"queries",
",",
"sort",
")",
":",
"output",
"=",
"StringIO",
"(",
")",
"if",
"sort",
"==",
"'order'",
":",
"print",
">>",
"output",
",",
"\" time query\"",
"for",
"query",
"in",
"queries",
":",
"print",
">>",
"output",
"... | 31.428571 | 14.685714 |
def _minigui_report_search_status(self, leaves):
"""Prints the current MCTS search status to stderr.
Reports the current search path, root node's child_Q, root node's
child_N, the most visited path in a format that can be parsed by
one of the STDERR_HANDLERS in minigui.ts.
Args:
leaves: list of leaf MCTSNodes returned by tree_search().
"""
root = self._player.get_root()
msg = {
"id": hex(id(root)),
"n": int(root.N),
"q": float(root.Q),
}
msg["childQ"] = [int(round(q * 1000)) for q in root.child_Q]
msg["childN"] = [int(n) for n in root.child_N]
ranked_children = root.rank_children()
variations = {}
for i in ranked_children[:15]:
if root.child_N[i] == 0 or i not in root.children:
break
c = coords.to_gtp(coords.from_flat(i))
child = root.children[i]
nodes = child.most_visited_path_nodes()
moves = [coords.to_gtp(coords.from_flat(m.fmove)) for m in nodes]
variations[c] = {
"n": int(root.child_N[i]),
"q": float(root.child_Q[i]),
"moves": [c] + moves,
}
if leaves:
path = []
leaf = leaves[0]
while leaf != root:
path.append(leaf.fmove)
leaf = leaf.parent
if path:
path.reverse()
variations["live"] = {
"n": int(root.child_N[path[0]]),
"q": float(root.child_Q[path[0]]),
"moves": [coords.to_gtp(coords.from_flat(m)) for m in path]
}
if variations:
msg["variations"] = variations
dbg("mg-update:%s" % json.dumps(msg, sort_keys=True)) | [
"def",
"_minigui_report_search_status",
"(",
"self",
",",
"leaves",
")",
":",
"root",
"=",
"self",
".",
"_player",
".",
"get_root",
"(",
")",
"msg",
"=",
"{",
"\"id\"",
":",
"hex",
"(",
"id",
"(",
"root",
")",
")",
",",
"\"n\"",
":",
"int",
"(",
"r... | 33.163636 | 18.709091 |
def raise_on_invalid_zip(func: Callable) -> Callable:
"""Raise an exception when there's no data (via a bad ZIP code)."""
async def decorator(*args: list, **kwargs: dict) -> dict:
"""Decorate."""
data = await func(*args, **kwargs)
if not data['Location']['periods']:
raise InvalidZipError('No data returned for ZIP code')
return data
return decorator | [
"def",
"raise_on_invalid_zip",
"(",
"func",
":",
"Callable",
")",
"->",
"Callable",
":",
"async",
"def",
"decorator",
"(",
"*",
"args",
":",
"list",
",",
"*",
"*",
"kwargs",
":",
"dict",
")",
"->",
"dict",
":",
"\"\"\"Decorate.\"\"\"",
"data",
"=",
"awai... | 39.8 | 14.6 |
def toggle_eventtype(self):
"""Check or uncheck all event types in event type scroll."""
check = self.check_all_eventtype.isChecked()
for btn in self.idx_eventtype_list:
btn.setChecked(check) | [
"def",
"toggle_eventtype",
"(",
"self",
")",
":",
"check",
"=",
"self",
".",
"check_all_eventtype",
".",
"isChecked",
"(",
")",
"for",
"btn",
"in",
"self",
".",
"idx_eventtype_list",
":",
"btn",
".",
"setChecked",
"(",
"check",
")"
] | 38.666667 | 11.166667 |
def intersection(L1, L2):
"""Intersects two line segments
Args:
L1 ([float, float]): x and y coordinates
L2 ([float, float]): x and y coordinates
Returns:
bool: if they intersect
(float, float): x and y of intersection, if they do
"""
D = L1[0] * L2[1] - L1[1] * L2[0]
Dx = L1[2] * L2[1] - L1[1] * L2[2]
Dy = L1[0] * L2[2] - L1[2] * L2[0]
if D != 0:
x = Dx / D
y = Dy / D
return x, y
else:
return False | [
"def",
"intersection",
"(",
"L1",
",",
"L2",
")",
":",
"D",
"=",
"L1",
"[",
"0",
"]",
"*",
"L2",
"[",
"1",
"]",
"-",
"L1",
"[",
"1",
"]",
"*",
"L2",
"[",
"0",
"]",
"Dx",
"=",
"L1",
"[",
"2",
"]",
"*",
"L2",
"[",
"1",
"]",
"-",
"L1",
... | 25.526316 | 16.157895 |
def numToDigits(num, places):
"""
Helper, for converting numbers to textual digits.
"""
s = str(num)
if len(s) < places:
return ("0" * (places - len(s))) + s
elif len(s) > places:
return s[len(s)-places: ]
else:
return s | [
"def",
"numToDigits",
"(",
"num",
",",
"places",
")",
":",
"s",
"=",
"str",
"(",
"num",
")",
"if",
"len",
"(",
"s",
")",
"<",
"places",
":",
"return",
"(",
"\"0\"",
"*",
"(",
"places",
"-",
"len",
"(",
"s",
")",
")",
")",
"+",
"s",
"elif",
... | 23.818182 | 13.272727 |
def main(argv=None):
"""Routine to page text or determine window size via command line."""
if argv is None:
argv = sys.argv
try:
opts, args = getopt.getopt(argv[1:], 'dhs', ['nodelay', 'help', 'size'])
except getopt.error as msg:
raise Usage(msg)
# Print usage and return, regardless of presence of other args.
for opt, _ in opts:
if opt in ('-h', '--help'):
print(__doc__)
print(help_msg)
return 0
isdelay = False
for opt, _ in opts:
# Prints the size of the terminal and returns.
# Mutually exclusive to the paging of text and overrides that behaviour.
if opt in ('-s', '--size'):
print('Length: %d, Width: %d' % TerminalSize())
return 0
elif opt in ('-d', '--delay'):
isdelay = True
else:
raise Usage('Invalid arguments.')
# Page text supplied in either specified file or stdin.
if len(args) == 1:
with open(args[0]) as f:
fd = f.read()
else:
fd = sys.stdin.read()
Pager(fd, delay=isdelay).Page() | [
"def",
"main",
"(",
"argv",
"=",
"None",
")",
":",
"if",
"argv",
"is",
"None",
":",
"argv",
"=",
"sys",
".",
"argv",
"try",
":",
"opts",
",",
"args",
"=",
"getopt",
".",
"getopt",
"(",
"argv",
"[",
"1",
":",
"]",
",",
"'dhs'",
",",
"[",
"'nod... | 25.894737 | 22.131579 |
def map(cls, visitor, value, value_type):
"""The common visitor API used by all three visitor implementations.
args:
``visitor=``\ *Visitor*
Visitor options instance: contains the callbacks to use to
implement the visiting, as well as traversal & filtering
options.
``value=``\ *Object*
Object being visited
``value_type=``\ *RecordType*
The type object controlling the visiting.
"""
unpacked = visitor.unpack(value, value_type, visitor)
if unpacked == cls.StopVisiting or isinstance(
unpacked, cls.StopVisiting
):
return unpacked.return_value
if isinstance(unpacked, tuple):
props, coll = unpacked
else:
props, coll = unpacked, None
# recurse into values for collections
if coll:
coll_map_generator = cls.map_collection(
visitor, coll, value_type,
)
mapped_coll = visitor.collect(
coll_map_generator, value_type, visitor,
)
else:
mapped_coll = None
# recurse into regular properties
mapped_props = None
if props:
mapped_props = cls.map_record(visitor, props, value_type)
elif mapped_coll is None:
return visitor.apply(value, None, visitor)
return visitor.reduce(
mapped_props, mapped_coll, value_type, visitor,
) | [
"def",
"map",
"(",
"cls",
",",
"visitor",
",",
"value",
",",
"value_type",
")",
":",
"unpacked",
"=",
"visitor",
".",
"unpack",
"(",
"value",
",",
"value_type",
",",
"visitor",
")",
"if",
"unpacked",
"==",
"cls",
".",
"StopVisiting",
"or",
"isinstance",
... | 30.653061 | 18.469388 |
def check_codes(self, expected_codes, received_code, info):
"""
Checks if any of expected matches received.
:param expected_codes: tuple of expected codes
:type expected_codes: :py:class:`tuple`
:param received_code: received code for matching
:type received_code: :py:class:`aioftp.Code`
:param info: list of response lines from server
:type info: :py:class:`list`
:raises aioftp.StatusCodeError: if received code does not matches any
expected code
"""
if not any(map(received_code.matches, expected_codes)):
raise errors.StatusCodeError(expected_codes, received_code, info) | [
"def",
"check_codes",
"(",
"self",
",",
"expected_codes",
",",
"received_code",
",",
"info",
")",
":",
"if",
"not",
"any",
"(",
"map",
"(",
"received_code",
".",
"matches",
",",
"expected_codes",
")",
")",
":",
"raise",
"errors",
".",
"StatusCodeError",
"(... | 37.444444 | 20.555556 |
def present(
name,
user=None,
fingerprint=None,
key=None,
port=None,
enc=None,
config=None,
hash_known_hosts=True,
timeout=5,
fingerprint_hash_type=None):
'''
Verifies that the specified host is known by the specified user
On many systems, specifically those running with openssh 4 or older, the
``enc`` option must be set, only openssh 5 and above can detect the key
type.
name
The name of the remote host (e.g. "github.com")
Note that only a single hostname is supported, if foo.example.com and
bar.example.com have the same host you will need two separate Salt
States to represent them.
user
The user who owns the ssh authorized keys file to modify
fingerprint
The fingerprint of the key which must be present in the known_hosts
file (optional if key specified)
key
The public key which must be present in the known_hosts file
(optional if fingerprint specified)
port
optional parameter, port which will be used to when requesting the
public key from the remote host, defaults to port 22.
enc
Defines what type of key is being used, can be ed25519, ecdsa ssh-rsa
or ssh-dss
config
The location of the authorized keys file relative to the user's home
directory, defaults to ".ssh/known_hosts". If no user is specified,
defaults to "/etc/ssh/ssh_known_hosts". If present, must be an
absolute path when a user is not specified.
hash_known_hosts : True
Hash all hostnames and addresses in the known hosts file.
timeout : int
Set the timeout for connection attempts. If ``timeout`` seconds have
elapsed since a connection was initiated to a host or since the last
time anything was read from that host, then the connection is closed
and the host in question considered unavailable. Default is 5 seconds.
.. versionadded:: 2016.3.0
fingerprint_hash_type
The public key fingerprint hash type that the public key fingerprint
was originally hashed with. This defaults to ``sha256`` if not specified.
.. versionadded:: 2016.11.4
.. versionchanged:: 2017.7.0: default changed from ``md5`` to ``sha256``
'''
ret = {'name': name,
'changes': {},
'result': None if __opts__['test'] else True,
'comment': ''}
if not user:
config = config or '/etc/ssh/ssh_known_hosts'
else:
config = config or '.ssh/known_hosts'
if not user and not os.path.isabs(config):
comment = 'If not specifying a "user", specify an absolute "config".'
ret['result'] = False
return dict(ret, comment=comment)
if __opts__['test']:
if key and fingerprint:
comment = 'Specify either "key" or "fingerprint", not both.'
ret['result'] = False
return dict(ret, comment=comment)
elif key and not enc:
comment = 'Required argument "enc" if using "key" argument.'
ret['result'] = False
return dict(ret, comment=comment)
try:
result = __salt__['ssh.check_known_host'](user, name,
key=key,
fingerprint=fingerprint,
config=config,
port=port,
fingerprint_hash_type=fingerprint_hash_type)
except CommandNotFoundError as err:
ret['result'] = False
ret['comment'] = 'ssh.check_known_host error: {0}'.format(err)
return ret
if result == 'exists':
comment = 'Host {0} is already in {1}'.format(name, config)
ret['result'] = True
return dict(ret, comment=comment)
elif result == 'add':
comment = 'Key for {0} is set to be added to {1}'.format(name,
config)
return dict(ret, comment=comment)
else: # 'update'
comment = 'Key for {0} is set to be updated in {1}'.format(name,
config)
return dict(ret, comment=comment)
result = __salt__['ssh.set_known_host'](
user=user,
hostname=name,
fingerprint=fingerprint,
key=key,
port=port,
enc=enc,
config=config,
hash_known_hosts=hash_known_hosts,
timeout=timeout,
fingerprint_hash_type=fingerprint_hash_type)
if result['status'] == 'exists':
return dict(ret,
comment='{0} already exists in {1}'.format(name, config))
elif result['status'] == 'error':
return dict(ret, result=False, comment=result['error'])
else: # 'updated'
if key:
new_key = result['new'][0]['key']
return dict(ret,
changes={'old': result['old'], 'new': result['new']},
comment='{0}\'s key saved to {1} (key: {2})'.format(
name, config, new_key))
else:
fingerprint = result['new'][0]['fingerprint']
return dict(ret,
changes={'old': result['old'], 'new': result['new']},
comment='{0}\'s key saved to {1} (fingerprint: {2})'.format(
name, config, fingerprint)) | [
"def",
"present",
"(",
"name",
",",
"user",
"=",
"None",
",",
"fingerprint",
"=",
"None",
",",
"key",
"=",
"None",
",",
"port",
"=",
"None",
",",
"enc",
"=",
"None",
",",
"config",
"=",
"None",
",",
"hash_known_hosts",
"=",
"True",
",",
"timeout",
... | 37.666667 | 24.183673 |
def filter(self, term_doc_matrix):
'''
Parameters
----------
term_doc_matrix : TermDocMatrix
Returns
-------
TermDocMatrix pmi-filterd term doc matrix
'''
df = term_doc_matrix.get_term_freq_df()
if len(df) == 0:
return term_doc_matrix
low_pmi_bigrams = get_low_pmi_bigrams(self._threshold_coef, df).index
infrequent_terms = df[df.sum(axis=1) < self._min_freq].index
filtered_term_doc_mat = term_doc_matrix.remove_terms(set(low_pmi_bigrams | infrequent_terms))
try:
filtered_term_doc_mat.get_term_freq_df()
except ValueError:
raise AtLeastOneCategoryHasNoTermsException()
return filtered_term_doc_mat | [
"def",
"filter",
"(",
"self",
",",
"term_doc_matrix",
")",
":",
"df",
"=",
"term_doc_matrix",
".",
"get_term_freq_df",
"(",
")",
"if",
"len",
"(",
"df",
")",
"==",
"0",
":",
"return",
"term_doc_matrix",
"low_pmi_bigrams",
"=",
"get_low_pmi_bigrams",
"(",
"se... | 29.619048 | 22.095238 |
def com_google_fonts_check_ttx_roundtrip(font):
"""Checking with fontTools.ttx"""
from fontTools import ttx
import sys
ttFont = ttx.TTFont(font)
failed = False
class TTXLogger:
msgs = []
def __init__(self):
self.original_stderr = sys.stderr
self.original_stdout = sys.stdout
sys.stderr = self
sys.stdout = self
def write(self, data):
if data not in self.msgs:
self.msgs.append(data)
def restore(self):
sys.stderr = self.original_stderr
sys.stdout = self.original_stdout
from xml.parsers.expat import ExpatError
try:
logger = TTXLogger()
xml_file = font + ".xml"
ttFont.saveXML(xml_file)
export_error_msgs = logger.msgs
if len(export_error_msgs):
failed = True
yield INFO, ("While converting TTF into an XML file,"
" ttx emited the messages listed below.")
for msg in export_error_msgs:
yield FAIL, msg.strip()
f = ttx.TTFont()
f.importXML(font + ".xml")
import_error_msgs = [msg for msg in logger.msgs if msg not in export_error_msgs]
if len(import_error_msgs):
failed = True
yield INFO, ("While importing an XML file and converting"
" it back to TTF, ttx emited the messages"
" listed below.")
for msg in import_error_msgs:
yield FAIL, msg.strip()
logger.restore()
except ExpatError as e:
failed = True
yield FAIL, ("TTX had some problem parsing the generated XML file."
" This most likely mean there's some problem in the font."
" Please inspect the output of ttx in order to find more"
" on what went wrong. A common problem is the presence of"
" control characteres outside the accepted character range"
" as defined in the XML spec. FontTools has got a bug which"
" causes TTX to generate corrupt XML files in those cases."
" So, check the entries of the name table and remove any"
" control chars that you find there."
" The full ttx error message was:\n"
"======\n{}\n======".format(e))
if not failed:
yield PASS, "Hey! It all looks good!"
# and then we need to cleanup our mess...
if os.path.exists(xml_file):
os.remove(xml_file) | [
"def",
"com_google_fonts_check_ttx_roundtrip",
"(",
"font",
")",
":",
"from",
"fontTools",
"import",
"ttx",
"import",
"sys",
"ttFont",
"=",
"ttx",
".",
"TTFont",
"(",
"font",
")",
"failed",
"=",
"False",
"class",
"TTXLogger",
":",
"msgs",
"=",
"[",
"]",
"d... | 32.1 | 19.371429 |
def disco_loop(opc, version, queue, real_out, dup_lines=False,
show_bytes=False):
"""Disassembles a queue of code objects. If we discover
another code object which will be found in co_consts, we add
the new code to the list. Note that the order of code discovery
is in the order of first encountered which is not amenable for
the format used by a disassembler where code objects should
be defined before using them in other functions.
However this is not recursive and will overall lead to less
memory consumption at run time.
"""
while len(queue) > 0:
co = queue.popleft()
if co.co_name not in ('<module>', '?'):
real_out.write("\n" + format_code_info(co, version) + "\n")
bytecode = Bytecode(co, opc, dup_lines=dup_lines)
real_out.write(bytecode.dis(show_bytes=show_bytes) + "\n")
for c in co.co_consts:
if iscode(c):
queue.append(c)
pass
pass | [
"def",
"disco_loop",
"(",
"opc",
",",
"version",
",",
"queue",
",",
"real_out",
",",
"dup_lines",
"=",
"False",
",",
"show_bytes",
"=",
"False",
")",
":",
"while",
"len",
"(",
"queue",
")",
">",
"0",
":",
"co",
"=",
"queue",
".",
"popleft",
"(",
")... | 39.16 | 19.32 |
def decaying(start, stop, decay):
"""Yield an infinite series of linearly decaying values."""
curr = float(start)
while True:
yield max(curr, stop)
curr -= (decay) | [
"def",
"decaying",
"(",
"start",
",",
"stop",
",",
"decay",
")",
":",
"curr",
"=",
"float",
"(",
"start",
")",
"while",
"True",
":",
"yield",
"max",
"(",
"curr",
",",
"stop",
")",
"curr",
"-=",
"(",
"decay",
")"
] | 26.571429 | 16.714286 |
def _construct_result(left, result, index, name, dtype=None):
"""
If the raw op result has a non-None name (e.g. it is an Index object) and
the name argument is None, then passing name to the constructor will
not be enough; we still need to override the name attribute.
"""
out = left._constructor(result, index=index, dtype=dtype)
out = out.__finalize__(left)
out.name = name
return out | [
"def",
"_construct_result",
"(",
"left",
",",
"result",
",",
"index",
",",
"name",
",",
"dtype",
"=",
"None",
")",
":",
"out",
"=",
"left",
".",
"_constructor",
"(",
"result",
",",
"index",
"=",
"index",
",",
"dtype",
"=",
"dtype",
")",
"out",
"=",
... | 41.4 | 19 |
def plotdata(self, key, part='re', scale='log', steps=50):
"""Return a tuple of arrays x, y that can be fed to plt.plot,
where x is the scale in GeV and y is the parameter of interest.
Parameters:
- key: dicionary key of the parameter to be plotted (e.g. a WCxf
coefficient name or a SM parameter like 'g')
- part: plot the real part 're' (default) or the imaginary part 'im'
- scale: 'log'; make the x steps logarithmically distributed; for
'linear', linearly distributed
- steps: steps in x to take (default: 50)
"""
if scale == 'log':
x = np.logspace(log(self.scale_min),
log(self.scale_max),
steps,
base=e)
elif scale == 'linear':
x = np.linspace(self.scale_min,
self.scale_max,
steps)
y = self.fun(x)
y = np.array([d[key] for d in y])
if part == 're':
return x, y.real
elif part == 'im':
return x, y.imag | [
"def",
"plotdata",
"(",
"self",
",",
"key",
",",
"part",
"=",
"'re'",
",",
"scale",
"=",
"'log'",
",",
"steps",
"=",
"50",
")",
":",
"if",
"scale",
"==",
"'log'",
":",
"x",
"=",
"np",
".",
"logspace",
"(",
"log",
"(",
"self",
".",
"scale_min",
... | 39.428571 | 14.571429 |
def color_pipeline(raw, setup, bpp=12):
"""Order from:
http://www.visionresearch.com/phantomzone/viewtopic.php?f=20&t=572#p3884
"""
# 1. Offset the raw image by the amount in flare
print("fFlare: ", setup.fFlare)
# 2. White balance the raw picture using the white balance component of cmatrix
BayerPatterns = {3: "gbrg", 4: "rggb"}
pattern = BayerPatterns[setup.CFA]
raw = whitebalance_raw(raw.astype(np.float32), setup, pattern).astype(np.uint16)
# 3. Debayer the image
rgb_image = cv2.cvtColor(raw, cv2.COLOR_BAYER_GB2RGB)
# convert to float
rgb_image = rgb_image.astype(np.float32) / (2 ** bpp - 1)
# return rgb_image
# 4. Apply the color correction matrix component of cmatrix
#
# From the documentation:
# ...should decompose this
# matrix in two components: a diagonal one with the white balance to be
# applied before interpolation and a normalized one to be applied after
# interpolation.
cmCalib = np.asarray(setup.cmCalib).reshape(3, 3)
# normalize matrix
ccm = cmCalib / cmCalib.sum(axis=1)[:, np.newaxis]
# or should it be normalized this way?
ccm2 = cmCalib.copy()
ccm2[0][0] = 1 - ccm2[0][1] - ccm2[0][2]
ccm2[1][1] = 1 - ccm2[1][0] - ccm2[1][2]
ccm2[2][2] = 1 - ccm2[2][0] - ccm2[2][1]
print("cmCalib", cmCalib)
print("ccm: ", ccm)
print("ccm2", ccm2)
m = np.asarray(
[
1.4956012040024347,
-0.5162879962189262,
0.020686792216491584,
-0.09884672458400766,
0.757682383759598,
0.34116434082440983,
-0.04121405804689133,
-0.5527871476076358,
1.5940012056545272,
]
).reshape(3, 3)
rgb_image = np.dot(rgb_image, m.T)
# rgb_reshaped = rgb_image.reshape((rgb_image.shape[0] * rgb_image.shape[1], rgb_image.shape[2]))
# rgb_image = np.dot(m, rgb_reshaped.T).T.reshape(rgb_image.shape)
# 5. Apply the user RGB matrix umatrix
# cmUser = np.asarray(setup.cmUser).reshape(3, 3)
# rgb_image = np.dot(rgb_image, cmUser.T)
# 6. Offset the image by the amount in offset
print("fOffset: ", setup.fOffset)
# 7. Apply the global gain
print("fGain: ", setup.fGain)
# 8. Apply the per-component gains red, green, blue
print("fGainR, fGainG, fGainB: ", setup.fGainR, setup.fGainG, setup.fGainB)
# 9. Apply the gamma curves; the green channel uses gamma, red uses gamma + rgamma and blue uses gamma + bgamma
print("fGamma, fGammaR, fGammaB: ", setup.fGamma, setup.fGammaR, setup.fGammaB)
rgb_image = apply_gamma(rgb_image, setup)
# 10. Apply the tone curve to each of the red, green, blue channels
fTone = np.asarray(setup.fTone)
print(setup.ToneLabel, setup.TonePoints, fTone)
# 11. Add the pedestals to each color channel, and linearly rescale to keep the white point the same.
print("fPedestalR, fPedestalG, fPedestalB: ", setup.fPedestalR, setup.fPedestalG, setup.fPedestalB)
# 12. Convert to YCrCb using REC709 coefficients
# 13. Scale the Cr and Cb components by chroma.
print("fChroma: ", setup.fChroma)
# 14. Rotate the Cr and Cb components around the origin in the CrCb plane by hue degrees.
print("fHue: ", setup.fHue)
return rgb_image | [
"def",
"color_pipeline",
"(",
"raw",
",",
"setup",
",",
"bpp",
"=",
"12",
")",
":",
"# 1. Offset the raw image by the amount in flare",
"print",
"(",
"\"fFlare: \"",
",",
"setup",
".",
"fFlare",
")",
"# 2. White balance the raw picture using the white balance component of c... | 33.989474 | 23.368421 |
def _getarray(loci, tree):
"""
parse the loci file list and return presence/absence matrix
ordered by the tips on the tree
"""
## order tips
tree.ladderize()
## get tip names
snames = tree.get_leaf_names()
## make an empty matrix
lxs = np.zeros((len(snames), len(loci)), dtype=np.int)
## fill the matrix
for loc in xrange(len(loci)):
for seq in loci[loc].split("\n")[:-1]:
lxs[snames.index(seq.split()[0]), loc] += 1
return lxs, snames | [
"def",
"_getarray",
"(",
"loci",
",",
"tree",
")",
":",
"## order tips",
"tree",
".",
"ladderize",
"(",
")",
"## get tip names",
"snames",
"=",
"tree",
".",
"get_leaf_names",
"(",
")",
"## make an empty matrix",
"lxs",
"=",
"np",
".",
"zeros",
"(",
"(",
"l... | 23.47619 | 19.333333 |
def createResource(self):
"""
When invoked by L{PrefixURLMixin}, return a L{websharing.SharingIndex}
for my application.
"""
pp = ixmantissa.IPublicPage(self.application, None)
if pp is not None:
warn(
"Use the sharing system to provide public pages, not IPublicPage",
category=DeprecationWarning,
stacklevel=2)
return pp.getResource()
return SharingIndex(self.application.open()) | [
"def",
"createResource",
"(",
"self",
")",
":",
"pp",
"=",
"ixmantissa",
".",
"IPublicPage",
"(",
"self",
".",
"application",
",",
"None",
")",
"if",
"pp",
"is",
"not",
"None",
":",
"warn",
"(",
"\"Use the sharing system to provide public pages, not IPublicPage\""... | 37.230769 | 14.769231 |
def predict(self, X):
"""
Predictions with the model for all the MCMC samples. Returns posterior means and standard deviations at X. Note that this is different in GPy where the variances are given.
"""
if X.ndim==1: X = X[None,:]
ps = self.model.param_array.copy()
means = []
stds = []
for s in self.hmc_samples:
if self.model._fixes_ is None:
self.model[:] = s
else:
self.model[self.model._fixes_] = s
self.model._trigger_params_changed()
m, v = self.model.predict(X)
means.append(m)
stds.append(np.sqrt(np.clip(v, 1e-10, np.inf)))
self.model.param_array[:] = ps
self.model._trigger_params_changed()
return means, stds | [
"def",
"predict",
"(",
"self",
",",
"X",
")",
":",
"if",
"X",
".",
"ndim",
"==",
"1",
":",
"X",
"=",
"X",
"[",
"None",
",",
":",
"]",
"ps",
"=",
"self",
".",
"model",
".",
"param_array",
".",
"copy",
"(",
")",
"means",
"=",
"[",
"]",
"stds"... | 37.761905 | 17.095238 |
def create_search_index():
""" takes people names from blockchain and writes deduped names in a 'cache'
"""
# create people name cache
counter = 0
people_names = []
twitter_handles = []
usernames = []
log.debug("-" * 5)
log.debug("Creating search index")
for user in namespace.find():
# the profile/info to be inserted
search_profile = {}
counter += 1
if(counter % 1000 == 0):
log.debug("Processed entries: %s" % counter)
if validUsername(user['username']):
pass
else:
continue
profile = get_json(user['profile'])
hasBazaarId=False
# search for openbazaar id in the profile
if 'account' in profile:
for accounts in profile['account']:
if accounts['service'] == 'openbazaar':
hasBazaarId = True
search_profile['openbazaar']=accounts['identifier']
if (hasBazaarId == False):
search_profile['openbazaar'] = None
if 'name' in profile:
try:
name = profile['name']
except:
continue
try:
name = name['formatted'].lower()
except:
name = name.lower()
people_names.append(name)
search_profile['name'] = name
else:
search_profile['name'] = None
if 'twitter' in profile:
twitter_handle = profile['twitter']
try:
twitter_handle = twitter_handle['username'].lower()
except:
try:
twitter_handle = profile['twitter'].lower()
except:
continue
twitter_handles.append(twitter_handle)
search_profile['twitter_handle'] = twitter_handle
else:
search_profile['twitter_handle'] = None
search_profile['fullyQualifiedName'] = user['fqu']
search_profile['username'] = user['username']
usernames.append(user['fqu'])
search_profile['profile'] = profile
search_profiles.save(search_profile)
# dedup names
people_names = list(set(people_names))
people_names = {'name': people_names}
twitter_handles = list(set(twitter_handles))
twitter_handles = {'twitter_handle': twitter_handles}
usernames = list(set(usernames))
usernames = {'username': usernames}
# save final dedup results to mongodb (using it as a cache)
people_cache.save(people_names)
twitter_cache.save(twitter_handles)
username_cache.save(usernames)
optimize_db()
log.debug('Created name/twitter/username search index') | [
"def",
"create_search_index",
"(",
")",
":",
"# create people name cache",
"counter",
"=",
"0",
"people_names",
"=",
"[",
"]",
"twitter_handles",
"=",
"[",
"]",
"usernames",
"=",
"[",
"]",
"log",
".",
"debug",
"(",
"\"-\"",
"*",
"5",
")",
"log",
".",
"de... | 26.237624 | 19.950495 |
def extract_args(cls, *args):
"""
Takes any arguments like a model and crud, or just one of
those, in any order, and return a model and crud.
"""
model = None
crudbuilder = None
for arg in args:
if issubclass(arg, models.Model):
model = arg
else:
crudbuilder = arg
return [model, crudbuilder] | [
"def",
"extract_args",
"(",
"cls",
",",
"*",
"args",
")",
":",
"model",
"=",
"None",
"crudbuilder",
"=",
"None",
"for",
"arg",
"in",
"args",
":",
"if",
"issubclass",
"(",
"arg",
",",
"models",
".",
"Model",
")",
":",
"model",
"=",
"arg",
"else",
":... | 26.666667 | 15.733333 |
def print_warning(cls):
"""Print a missing progress bar warning if it was not printed.
"""
if not cls.warning:
cls.warning = True
print('Can\'t create progress bar:', str(TQDM_IMPORT_ERROR),
file=sys.stderr) | [
"def",
"print_warning",
"(",
"cls",
")",
":",
"if",
"not",
"cls",
".",
"warning",
":",
"cls",
".",
"warning",
"=",
"True",
"print",
"(",
"'Can\\'t create progress bar:'",
",",
"str",
"(",
"TQDM_IMPORT_ERROR",
")",
",",
"file",
"=",
"sys",
".",
"stderr",
... | 38.142857 | 11.142857 |
def init():
'''
Return the list of svn remotes and their configuration information
'''
bp_ = os.path.join(__opts__['cachedir'], 'svnfs')
new_remote = False
repos = []
per_remote_defaults = {}
for param in PER_REMOTE_OVERRIDES:
per_remote_defaults[param] = \
six.text_type(__opts__['svnfs_{0}'.format(param)])
for remote in __opts__['svnfs_remotes']:
repo_conf = copy.deepcopy(per_remote_defaults)
if isinstance(remote, dict):
repo_url = next(iter(remote))
per_remote_conf = dict(
[(key, six.text_type(val)) for key, val in
six.iteritems(salt.utils.data.repack_dictlist(remote[repo_url]))]
)
if not per_remote_conf:
log.error(
'Invalid per-remote configuration for remote %s. If no '
'per-remote parameters are being specified, there may be '
'a trailing colon after the URL, which should be removed. '
'Check the master configuration file.', repo_url
)
_failhard()
per_remote_errors = False
for param in (x for x in per_remote_conf
if x not in PER_REMOTE_OVERRIDES):
log.error(
'Invalid configuration parameter \'%s\' for remote %s. '
'Valid parameters are: %s. See the documentation for '
'further information.',
param, repo_url, ', '.join(PER_REMOTE_OVERRIDES)
)
per_remote_errors = True
if per_remote_errors:
_failhard()
repo_conf.update(per_remote_conf)
else:
repo_url = remote
if not isinstance(repo_url, six.string_types):
log.error(
'Invalid svnfs remote %s. Remotes must be strings, you may '
'need to enclose the URL in quotes', repo_url
)
_failhard()
try:
repo_conf['mountpoint'] = salt.utils.url.strip_proto(
repo_conf['mountpoint']
)
except TypeError:
# mountpoint not specified
pass
hash_type = getattr(hashlib, __opts__.get('hash_type', 'md5'))
repo_hash = hash_type(repo_url).hexdigest()
rp_ = os.path.join(bp_, repo_hash)
if not os.path.isdir(rp_):
os.makedirs(rp_)
if not os.listdir(rp_):
# Only attempt a new checkout if the directory is empty.
try:
CLIENT.checkout(repo_url, rp_)
repos.append(rp_)
new_remote = True
except pysvn._pysvn.ClientError as exc:
log.error(
'Failed to initialize svnfs remote \'%s\': %s',
repo_url, exc
)
_failhard()
else:
# Confirm that there is an svn checkout at the necessary path by
# running pysvn.Client().status()
try:
CLIENT.status(rp_)
except pysvn._pysvn.ClientError as exc:
log.error(
'Cache path %s (corresponding remote: %s) exists but is '
'not a valid subversion checkout. You will need to '
'manually delete this directory on the master to continue '
'to use this svnfs remote.', rp_, repo_url
)
_failhard()
repo_conf.update({
'repo': rp_,
'url': repo_url,
'hash': repo_hash,
'cachedir': rp_,
'lockfile': os.path.join(rp_, 'update.lk')
})
repos.append(repo_conf)
if new_remote:
remote_map = os.path.join(__opts__['cachedir'], 'svnfs/remote_map.txt')
try:
with salt.utils.files.fopen(remote_map, 'w+') as fp_:
timestamp = datetime.now().strftime('%d %b %Y %H:%M:%S.%f')
fp_.write('# svnfs_remote map as of {0}\n'.format(timestamp))
for repo_conf in repos:
fp_.write(
salt.utils.stringutils.to_str(
'{0} = {1}\n'.format(
repo_conf['hash'], repo_conf['url']
)
)
)
except OSError:
pass
else:
log.info('Wrote new svnfs_remote map to %s', remote_map)
return repos | [
"def",
"init",
"(",
")",
":",
"bp_",
"=",
"os",
".",
"path",
".",
"join",
"(",
"__opts__",
"[",
"'cachedir'",
"]",
",",
"'svnfs'",
")",
"new_remote",
"=",
"False",
"repos",
"=",
"[",
"]",
"per_remote_defaults",
"=",
"{",
"}",
"for",
"param",
"in",
... | 36.512195 | 20.463415 |
def getProvIden(self, provstack):
'''
Returns the iden corresponding to a provenance stack and stores if it hasn't seen it before
'''
iden = _providen(provstack)
misc, frames = provstack
# Convert each frame back from (k, v) tuples to a dict
dictframes = [(typ, {k: v for (k, v) in info}) for (typ, info) in frames]
bytz = s_msgpack.en((misc, dictframes))
didwrite = self.slab.put(iden, bytz, overwrite=False, db=self.db)
if didwrite:
self.provseq.save([iden])
return iden | [
"def",
"getProvIden",
"(",
"self",
",",
"provstack",
")",
":",
"iden",
"=",
"_providen",
"(",
"provstack",
")",
"misc",
",",
"frames",
"=",
"provstack",
"# Convert each frame back from (k, v) tuples to a dict",
"dictframes",
"=",
"[",
"(",
"typ",
",",
"{",
"k",
... | 40 | 23.142857 |
def compact(self):
"""Convert this instance to a compact version: the value will be the
same at all times, but repeated measurements are discarded.
"""
previous_value = object()
redundant = []
for time, value in self:
if value == previous_value:
redundant.append(time)
previous_value = value
for time in redundant:
del self[time] | [
"def",
"compact",
"(",
"self",
")",
":",
"previous_value",
"=",
"object",
"(",
")",
"redundant",
"=",
"[",
"]",
"for",
"time",
",",
"value",
"in",
"self",
":",
"if",
"value",
"==",
"previous_value",
":",
"redundant",
".",
"append",
"(",
"time",
")",
... | 32.769231 | 11.923077 |
def get_session_id(self):
"""
get a unique id (shortish string) to allow simple aggregation
of log records from multiple sources. This id is used for the
life of the running program to allow extraction from all logs.
WARING - this can give duplicate sessions when 2 apps hit it
at the same time.
"""
max_session = '0'
try:
with open(self.log_folder + os.sep + '_sessions.txt', 'r') as f:
for _ in f:
txt = f.readline()
if txt.strip('\n') != '':
max_session = txt
except Exception:
max_session = '1'
this_session = str(int(max_session) + random.randint(9,100)).zfill(9) # not a great way to ensure uniqueness - TODO FIX
with open(self.log_folder + os.sep + '_sessions.txt', 'a') as f2:
f2.write(this_session + '\n')
return this_session | [
"def",
"get_session_id",
"(",
"self",
")",
":",
"max_session",
"=",
"'0'",
"try",
":",
"with",
"open",
"(",
"self",
".",
"log_folder",
"+",
"os",
".",
"sep",
"+",
"'_sessions.txt'",
",",
"'r'",
")",
"as",
"f",
":",
"for",
"_",
"in",
"f",
":",
"txt"... | 43 | 20.090909 |
def set_service_status(service, status):
'''Update the status of a particular service in the database.
'''
srv = db.ServiceStates()
srv.type = service
srv.status = status
dbs = db.get_session()
dbs.merge(srv)
dbs.commit()
dbs.close() | [
"def",
"set_service_status",
"(",
"service",
",",
"status",
")",
":",
"srv",
"=",
"db",
".",
"ServiceStates",
"(",
")",
"srv",
".",
"type",
"=",
"service",
"srv",
".",
"status",
"=",
"status",
"dbs",
"=",
"db",
".",
"get_session",
"(",
")",
"dbs",
".... | 23.636364 | 20.909091 |
def child_set(self, child, **kwargs):
"""Set a child properties on the given child to key/value pairs."""
for name, value in kwargs.items():
name = name.replace('_', '-')
self.child_set_property(child, name, value) | [
"def",
"child_set",
"(",
"self",
",",
"child",
",",
"*",
"*",
"kwargs",
")",
":",
"for",
"name",
",",
"value",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"name",
"=",
"name",
".",
"replace",
"(",
"'_'",
",",
"'-'",
")",
"self",
".",
"child_set_... | 41.666667 | 10.166667 |
def output_reduce_list(path_list, force=False):
"""Generates structure file with protons from a list of structure files."""
output_paths = []
for path in path_list:
output_path = output_reduce(path, force=force)
if output_path:
output_paths.append(output_path)
return output_paths | [
"def",
"output_reduce_list",
"(",
"path_list",
",",
"force",
"=",
"False",
")",
":",
"output_paths",
"=",
"[",
"]",
"for",
"path",
"in",
"path_list",
":",
"output_path",
"=",
"output_reduce",
"(",
"path",
",",
"force",
"=",
"force",
")",
"if",
"output_path... | 39.625 | 11.5 |
def render_done(self, form, **kwargs):
"""
This method gets called when all forms passed. The method should also
re-validate all steps to prevent manipulation. If any form don't
validate, `render_revalidation_failure` should get called.
If everything is fine call `done`.
"""
final_form_list = []
# walk through the form list and try to validate the data again.
for form_key in self.get_form_list():
form_obj = self.get_form(step=form_key,
data=self.storage.get_step_data(form_key),
files=self.storage.get_step_files(form_key))
if not form_obj.is_valid():
return self.render_revalidation_failure(form_key, form_obj, **kwargs)
final_form_list.append(form_obj)
# render the done view and reset the wizard before returning the
# response. This is needed to prevent from rendering done with the
# same data twice.
done_response = self.done(final_form_list, **kwargs)
self.storage.reset()
return done_response | [
"def",
"render_done",
"(",
"self",
",",
"form",
",",
"*",
"*",
"kwargs",
")",
":",
"final_form_list",
"=",
"[",
"]",
"# walk through the form list and try to validate the data again.",
"for",
"form_key",
"in",
"self",
".",
"get_form_list",
"(",
")",
":",
"form_obj... | 47.26087 | 17.869565 |
def json(self, url, method='get', params=None, data=None):
"""
请求并返回json
:type url: str
:param url: API
:type method: str
:param method: HTTP METHOD
:type params: dict
:param params: query
:type data: dict
:param data: body
:rtype: dict
:return:
"""
r = self.req(url, method, params, data)
return r.json() | [
"def",
"json",
"(",
"self",
",",
"url",
",",
"method",
"=",
"'get'",
",",
"params",
"=",
"None",
",",
"data",
"=",
"None",
")",
":",
"r",
"=",
"self",
".",
"req",
"(",
"url",
",",
"method",
",",
"params",
",",
"data",
")",
"return",
"r",
".",
... | 20.863636 | 18.772727 |
def formatTime(self, record, datefmt=None):
"""Format the log timestamp."""
_seconds_fraction = record.created - int(record.created)
_datetime_utc = time.mktime(time.gmtime(record.created))
_datetime_utc += _seconds_fraction
_created = self.converter(_datetime_utc)
if datefmt:
time_string = _created.strftime(datefmt)
else:
time_string = _created.strftime('%Y-%m-%dT%H:%M:%S.%fZ')
time_string = "%s,%03d" % (time_string, record.msecs)
return time_string | [
"def",
"formatTime",
"(",
"self",
",",
"record",
",",
"datefmt",
"=",
"None",
")",
":",
"_seconds_fraction",
"=",
"record",
".",
"created",
"-",
"int",
"(",
"record",
".",
"created",
")",
"_datetime_utc",
"=",
"time",
".",
"mktime",
"(",
"time",
".",
"... | 41.769231 | 17.538462 |
def isValidSemver(version):
"""Semantic version number - determines whether the version is qualified. The format is MAJOR.Minor.PATCH, more with https://semver.org/"""
if version and isinstance(version, string_types):
try:
semver.parse(version)
except (TypeError,ValueError):
return False
else:
return True
return False | [
"def",
"isValidSemver",
"(",
"version",
")",
":",
"if",
"version",
"and",
"isinstance",
"(",
"version",
",",
"string_types",
")",
":",
"try",
":",
"semver",
".",
"parse",
"(",
"version",
")",
"except",
"(",
"TypeError",
",",
"ValueError",
")",
":",
"retu... | 38.2 | 14.7 |
def validate_IPykernel(venv_dir):
"""Validates that this env contains an IPython kernel and returns info to start it
Returns: tuple
(ARGV, language, resource_dir)
"""
python_exe_name = find_exe(venv_dir, "python")
if python_exe_name is None:
python_exe_name = find_exe(venv_dir, "python2")
if python_exe_name is None:
python_exe_name = find_exe(venv_dir, "python3")
if python_exe_name is None:
return [], None, None
# Make some checks for ipython first, because calling the import is expensive
if find_exe(venv_dir, "ipython") is None:
if find_exe(venv_dir, "ipython2") is None:
if find_exe(venv_dir, "ipython3") is None:
return [], None, None
# check if this is really an ipython **kernel**
import subprocess
try:
subprocess.check_call([python_exe_name, '-c', '"import ipykernel"'])
except:
# not installed? -> not useable in any case...
return [], None, None
argv = [python_exe_name, "-m", "ipykernel", "-f", "{connection_file}"]
resources_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "logos", "python")
return argv, "python", resources_dir | [
"def",
"validate_IPykernel",
"(",
"venv_dir",
")",
":",
"python_exe_name",
"=",
"find_exe",
"(",
"venv_dir",
",",
"\"python\"",
")",
"if",
"python_exe_name",
"is",
"None",
":",
"python_exe_name",
"=",
"find_exe",
"(",
"venv_dir",
",",
"\"python2\"",
")",
"if",
... | 38.387097 | 18.806452 |
def daterange(start, stop, step=1, inclusive=False):
"""In the spirit of :func:`range` and :func:`xrange`, the `daterange`
generator that yields a sequence of :class:`~datetime.date`
objects, starting at *start*, incrementing by *step*, until *stop*
is reached.
When *inclusive* is True, the final date may be *stop*, **if**
*step* falls evenly on it. By default, *step* is one day. See
details below for many more details.
Args:
start (datetime.date): The starting date The first value in
the sequence.
stop (datetime.date): The stopping date. By default not
included in return. Can be `None` to yield an infinite
sequence.
step (int): The value to increment *start* by to reach
*stop*. Can be an :class:`int` number of days, a
:class:`datetime.timedelta`, or a :class:`tuple` of integers,
`(year, month, day)`. Positive and negative *step* values
are supported.
inclusive (bool): Whether or not the *stop* date can be
returned. *stop* is only returned when a *step* falls evenly
on it.
>>> christmas = date(year=2015, month=12, day=25)
>>> boxing_day = date(year=2015, month=12, day=26)
>>> new_year = date(year=2016, month=1, day=1)
>>> for day in daterange(christmas, new_year):
... print(repr(day))
datetime.date(2015, 12, 25)
datetime.date(2015, 12, 26)
datetime.date(2015, 12, 27)
datetime.date(2015, 12, 28)
datetime.date(2015, 12, 29)
datetime.date(2015, 12, 30)
datetime.date(2015, 12, 31)
>>> for day in daterange(christmas, boxing_day):
... print(repr(day))
datetime.date(2015, 12, 25)
>>> for day in daterange(date(2017, 5, 1), date(2017, 8, 1),
... step=(0, 1, 0), inclusive=True):
... print(repr(day))
datetime.date(2017, 5, 1)
datetime.date(2017, 6, 1)
datetime.date(2017, 7, 1)
datetime.date(2017, 8, 1)
*Be careful when using stop=None, as this will yield an infinite
sequence of dates.*
"""
if not isinstance(start, date):
raise TypeError("start expected datetime.date instance")
if stop and not isinstance(stop, date):
raise TypeError("stop expected datetime.date instance or None")
try:
y_step, m_step, d_step = step
except TypeError:
y_step, m_step, d_step = 0, 0, step
else:
y_step, m_step = int(y_step), int(m_step)
if isinstance(d_step, int):
d_step = timedelta(days=int(d_step))
elif isinstance(d_step, timedelta):
pass
else:
raise ValueError('step expected int, timedelta, or tuple'
' (year, month, day), not: %r' % step)
if stop is None:
finished = lambda t: False
elif start < stop:
finished = operator.gt if inclusive else operator.ge
else:
finished = operator.lt if inclusive else operator.le
now = start
while not finished(now, stop):
yield now
if y_step or m_step:
m_y_step, cur_month = divmod(now.month + m_step, 12)
now = now.replace(year=now.year + y_step + m_y_step,
month=cur_month or 12)
now = now + d_step
return | [
"def",
"daterange",
"(",
"start",
",",
"stop",
",",
"step",
"=",
"1",
",",
"inclusive",
"=",
"False",
")",
":",
"if",
"not",
"isinstance",
"(",
"start",
",",
"date",
")",
":",
"raise",
"TypeError",
"(",
"\"start expected datetime.date instance\"",
")",
"if... | 37.964706 | 18.647059 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.