text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def get_completed(self):
"""Determine if the game was completed.
If there's a postgame, it will indicate completion.
If there is no postgame, guess based on resignation.
"""
postgame = self.get_postgame()
if postgame:
return postgame.complete
else:
return True if self._cache['resigned'] else False | [
"def",
"get_completed",
"(",
"self",
")",
":",
"postgame",
"=",
"self",
".",
"get_postgame",
"(",
")",
"if",
"postgame",
":",
"return",
"postgame",
".",
"complete",
"else",
":",
"return",
"True",
"if",
"self",
".",
"_cache",
"[",
"'resigned'",
"]",
"else... | 33.545455 | 15.363636 |
def close(self):
"""Close the connection."""
if self.sock:
self.sock.close()
self.sock = 0
self.eof = 1 | [
"def",
"close",
"(",
"self",
")",
":",
"if",
"self",
".",
"sock",
":",
"self",
".",
"sock",
".",
"close",
"(",
")",
"self",
".",
"sock",
"=",
"0",
"self",
".",
"eof",
"=",
"1"
] | 23.666667 | 15.5 |
def get_value_tuple(self):
"""
Returns a tuple of the color's values (in order). For example,
an LabColor object will return (lab_l, lab_a, lab_b), where each
member of the tuple is the float value for said variable.
"""
retval = tuple()
for val in self.VALUES:
retval += (getattr(self, val),)
return retval | [
"def",
"get_value_tuple",
"(",
"self",
")",
":",
"retval",
"=",
"tuple",
"(",
")",
"for",
"val",
"in",
"self",
".",
"VALUES",
":",
"retval",
"+=",
"(",
"getattr",
"(",
"self",
",",
"val",
")",
",",
")",
"return",
"retval"
] | 37.4 | 14.8 |
def benchmark_command(cmd, progress):
"""Benchmark one command execution"""
full_cmd = '/usr/bin/time --format="%U %M" {0}'.format(cmd)
print '{0:6.2f}% Running {1}'.format(100.0 * progress, full_cmd)
(_, err) = subprocess.Popen(
['/bin/sh', '-c', full_cmd],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE
).communicate('')
values = err.strip().split(' ')
if len(values) == 2:
try:
return (float(values[0]), float(values[1]))
except: # pylint:disable=I0011,W0702
pass # Handled by the code after the "if"
print err
raise Exception('Error during benchmarking') | [
"def",
"benchmark_command",
"(",
"cmd",
",",
"progress",
")",
":",
"full_cmd",
"=",
"'/usr/bin/time --format=\"%U %M\" {0}'",
".",
"format",
"(",
"cmd",
")",
"print",
"'{0:6.2f}% Running {1}'",
".",
"format",
"(",
"100.0",
"*",
"progress",
",",
"full_cmd",
")",
... | 33.75 | 15.6 |
def p_classDeclaration(p):
# pylint: disable=line-too-long
"""classDeclaration : CLASS className '{' classFeatureList '}' ';'
| CLASS className superClass '{' classFeatureList '}' ';'
| CLASS className alias '{' classFeatureList '}' ';'
| CLASS className alias superClass '{' classFeatureList '}' ';'
| qualifierList CLASS className '{' classFeatureList '}' ';'
| qualifierList CLASS className superClass '{' classFeatureList '}' ';'
| qualifierList CLASS className alias '{' classFeatureList '}' ';'
| qualifierList CLASS className alias superClass '{' classFeatureList '}' ';'
""" # noqa: E501
superclass = None
alias = None
quals = []
if isinstance(p[1], six.string_types): # no class qualifiers
cname = p[2]
if p[3][0] == '$': # alias present
alias = p[3]
if p[4] == '{': # no superclass
cfl = p[5]
else: # superclass
superclass = p[4]
cfl = p[6]
else: # no alias
if p[3] == '{': # no superclass
cfl = p[4]
else: # superclass
superclass = p[3]
cfl = p[5]
else: # class qualifiers
quals = p[1]
cname = p[3]
if p[4][0] == '$': # alias present
alias = p[4]
if p[5] == '{': # no superclass
cfl = p[6]
else: # superclass
superclass = p[5]
cfl = p[7]
else: # no alias
if p[4] == '{': # no superclass
cfl = p[5]
else: # superclass
superclass = p[4]
cfl = p[6]
quals = OrderedDict([(x.name, x) for x in quals])
methods = OrderedDict()
props = OrderedDict()
for item in cfl:
item.class_origin = cname
if isinstance(item, CIMMethod):
methods[item.name] = item
else:
props[item.name] = item
p[0] = CIMClass(cname, properties=props, methods=methods,
superclass=superclass, qualifiers=quals)
if alias:
p.parser.aliases[alias] = p[0] | [
"def",
"p_classDeclaration",
"(",
"p",
")",
":",
"# pylint: disable=line-too-long",
"# noqa: E501",
"superclass",
"=",
"None",
"alias",
"=",
"None",
"quals",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"p",
"[",
"1",
"]",
",",
"six",
".",
"string_types",
")",
... | 39.12069 | 16.413793 |
def free_params(self, value):
"""Set the free parameters. Note that this bypasses enforce_bounds.
"""
value = scipy.asarray(value, dtype=float)
self.K_up_to_date = False
self.k.free_params = value[:self.k.num_free_params]
self.noise_k.free_params = value[self.k.num_free_params:self.k.num_free_params + self.noise_k.num_free_params]
if self.mu is not None:
self.mu.free_params = value[self.k.num_free_params + self.noise_k.num_free_params:] | [
"def",
"free_params",
"(",
"self",
",",
"value",
")",
":",
"value",
"=",
"scipy",
".",
"asarray",
"(",
"value",
",",
"dtype",
"=",
"float",
")",
"self",
".",
"K_up_to_date",
"=",
"False",
"self",
".",
"k",
".",
"free_params",
"=",
"value",
"[",
":",
... | 55.555556 | 20.888889 |
async def disconnect_message(self, message, context):
"""Handle a disconnect message.
See :meth:`AbstractDeviceAdapter.disconnect`.
"""
conn_string = message.get('connection_string')
client_id = context.user_data
await self.disconnect(client_id, conn_string) | [
"async",
"def",
"disconnect_message",
"(",
"self",
",",
"message",
",",
"context",
")",
":",
"conn_string",
"=",
"message",
".",
"get",
"(",
"'connection_string'",
")",
"client_id",
"=",
"context",
".",
"user_data",
"await",
"self",
".",
"disconnect",
"(",
"... | 30 | 17.6 |
def read_config(ctx, param, config_path):
"""Callback that is used whenever --config is passed."""
if sys.argv[1] == 'init':
return
cfg = ctx.ensure_object(Config)
if config_path is None:
config_path = path.join(sys.path[0], 'v2ex_config.json')
if not path.exists(config_path):
sys.exit("Can't find config file at {0}.\nPlease read "
"https://github.com/lord63/v2ex_daily_mission "
"to follow the guide.".format(config_path))
cfg.load_config(config_path)
return config_path | [
"def",
"read_config",
"(",
"ctx",
",",
"param",
",",
"config_path",
")",
":",
"if",
"sys",
".",
"argv",
"[",
"1",
"]",
"==",
"'init'",
":",
"return",
"cfg",
"=",
"ctx",
".",
"ensure_object",
"(",
"Config",
")",
"if",
"config_path",
"is",
"None",
":",... | 42.076923 | 13.615385 |
def upload(ui, repo, name, **opts):
"""upload diffs to the code review server
Uploads the current modifications for a given change to the server.
"""
if codereview_disabled:
raise hg_util.Abort(codereview_disabled)
repo.ui.quiet = True
cl, err = LoadCL(ui, repo, name, web=True)
if err != "":
raise hg_util.Abort(err)
if not cl.local:
raise hg_util.Abort("cannot upload non-local change")
cl.Upload(ui, repo)
print "%s%s\n" % (server_url_base, cl.name)
return 0 | [
"def",
"upload",
"(",
"ui",
",",
"repo",
",",
"name",
",",
"*",
"*",
"opts",
")",
":",
"if",
"codereview_disabled",
":",
"raise",
"hg_util",
".",
"Abort",
"(",
"codereview_disabled",
")",
"repo",
".",
"ui",
".",
"quiet",
"=",
"True",
"cl",
",",
"err"... | 27.294118 | 16.823529 |
def export_keys(output_path, stash, passphrase, backend):
"""Export all keys to a file
"""
stash = _get_stash(backend, stash, passphrase)
try:
click.echo('Exporting stash to {0}...'.format(output_path))
stash.export(output_path=output_path)
click.echo('Export complete!')
except GhostError as ex:
sys.exit(ex) | [
"def",
"export_keys",
"(",
"output_path",
",",
"stash",
",",
"passphrase",
",",
"backend",
")",
":",
"stash",
"=",
"_get_stash",
"(",
"backend",
",",
"stash",
",",
"passphrase",
")",
"try",
":",
"click",
".",
"echo",
"(",
"'Exporting stash to {0}...'",
".",
... | 32 | 15 |
def fetch(self, task_channel=values.unset):
"""
Fetch a WorkspaceRealTimeStatisticsInstance
:param unicode task_channel: Filter real-time and cumulative statistics by TaskChannel.
:returns: Fetched WorkspaceRealTimeStatisticsInstance
:rtype: twilio.rest.taskrouter.v1.workspace.workspace_real_time_statistics.WorkspaceRealTimeStatisticsInstance
"""
return self._proxy.fetch(task_channel=task_channel, ) | [
"def",
"fetch",
"(",
"self",
",",
"task_channel",
"=",
"values",
".",
"unset",
")",
":",
"return",
"self",
".",
"_proxy",
".",
"fetch",
"(",
"task_channel",
"=",
"task_channel",
",",
")"
] | 45.1 | 26.9 |
def get_family_search_session(self, proxy=None):
"""Gets the ``OsidSession`` associated with the family search service.
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.relationship.FamilySearchSession) - a
``FamilySearchSession``
raise: NullArgument - ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_family_search()`` is
``false``
*compliance: optional -- This method must be implemented if ``supports_family_search()`` is ``true``.*
"""
if not self.supports_family_search():
raise Unimplemented()
try:
from . import sessions
except ImportError:
raise OperationFailed()
proxy = self._convert_proxy(proxy)
try:
session = sessions.FamilySearchSession(proxy=proxy, runtime=self._runtime)
except AttributeError:
raise OperationFailed()
return session | [
"def",
"get_family_search_session",
"(",
"self",
",",
"proxy",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"supports_family_search",
"(",
")",
":",
"raise",
"Unimplemented",
"(",
")",
"try",
":",
"from",
".",
"import",
"sessions",
"except",
"ImportError... | 40.28 | 17.2 |
def install_board_with_programmer(mcu,
programmer,
f_cpu=16000000,
core='arduino',
replace_existing=False,
):
"""install board with programmer."""
bunch = AutoBunch()
board_id = '{mcu}_{f_cpu}_{programmer}'.format(f_cpu=f_cpu,
mcu=mcu,
programmer=programmer,
)
bunch.name = '{mcu}@{f} Prog:{programmer}'.format(f=strfreq(f_cpu),
mcu=mcu,
programmer=programmer,
)
bunch.upload.using = programmer
bunch.build.mcu = mcu
bunch.build.f_cpu = str(f_cpu) + 'L'
bunch.build.core = core
install_board(board_id, bunch, replace_existing=replace_existing) | [
"def",
"install_board_with_programmer",
"(",
"mcu",
",",
"programmer",
",",
"f_cpu",
"=",
"16000000",
",",
"core",
"=",
"'arduino'",
",",
"replace_existing",
"=",
"False",
",",
")",
":",
"bunch",
"=",
"AutoBunch",
"(",
")",
"board_id",
"=",
"'{mcu}_{f_cpu}_{pr... | 42.75 | 17.416667 |
def remove_images():
"""Removes all dangling images as well as all images referenced in a dusty spec; forceful removal is not used"""
client = get_docker_client()
removed = _remove_dangling_images()
dusty_images = get_dusty_images()
all_images = client.images(all=True)
for image in all_images:
if set(image['RepoTags']).intersection(dusty_images):
try:
client.remove_image(image['Id'])
except Exception as e:
logging.info("Couldn't remove image {}".format(image['RepoTags']))
else:
log_to_client("Removed Image {}".format(image['RepoTags']))
removed.append(image)
return removed | [
"def",
"remove_images",
"(",
")",
":",
"client",
"=",
"get_docker_client",
"(",
")",
"removed",
"=",
"_remove_dangling_images",
"(",
")",
"dusty_images",
"=",
"get_dusty_images",
"(",
")",
"all_images",
"=",
"client",
".",
"images",
"(",
"all",
"=",
"True",
... | 43.75 | 14.25 |
def _parse_list(element, definition):
"""Parse xml element by definition given by list.
Find all elements matched by the string given as the first value
in the list (as XPath or @attribute).
If there is a second argument it will be handled as a definitions
for the elements matched or the text when not.
:param element: ElementTree element
:param definition: definition schema
:type definition: list
:return: parsed xml
:rtype: list
"""
if len(definition) == 0:
raise XmlToJsonException('List definition needs some definition')
tag = definition[0]
tag_def = definition[1] if len(definition) > 1 else None
sub_list = []
for el in element.findall(tag):
sub_list.append(xml_to_json(el, tag_def))
return sub_list | [
"def",
"_parse_list",
"(",
"element",
",",
"definition",
")",
":",
"if",
"len",
"(",
"definition",
")",
"==",
"0",
":",
"raise",
"XmlToJsonException",
"(",
"'List definition needs some definition'",
")",
"tag",
"=",
"definition",
"[",
"0",
"]",
"tag_def",
"=",... | 28.62963 | 20.259259 |
def make_password(length, chars=string.letters + string.digits + '#$%&!'):
"""
Generate and return a random password
:param length: Desired length
:param chars: Character set to use
"""
return get_random_string(length, chars) | [
"def",
"make_password",
"(",
"length",
",",
"chars",
"=",
"string",
".",
"letters",
"+",
"string",
".",
"digits",
"+",
"'#$%&!'",
")",
":",
"return",
"get_random_string",
"(",
"length",
",",
"chars",
")"
] | 30.375 | 10.875 |
def _resolve_attribute(self, attribute):
"""Recursively replaces references to other attributes with their value.
Args:
attribute (str): The name of the attribute to resolve.
Returns:
str: The resolved value of 'attribute'.
"""
value = self.attributes[attribute]
if not value:
return None
resolved_value = re.sub('\$\((.*?)\)',self._resolve_attribute_match, value)
return resolved_value | [
"def",
"_resolve_attribute",
"(",
"self",
",",
"attribute",
")",
":",
"value",
"=",
"self",
".",
"attributes",
"[",
"attribute",
"]",
"if",
"not",
"value",
":",
"return",
"None",
"resolved_value",
"=",
"re",
".",
"sub",
"(",
"'\\$\\((.*?)\\)'",
",",
"self"... | 31.666667 | 20 |
def run_step(context):
"""Write payload out to json file.
Args:
context: pypyr.context.Context. Mandatory.
The following context keys expected:
- fileWriteJson
- path. mandatory. path-like. Write output file to
here. Will create directories in path for you.
- payload. optional. Write this key to output file. If not
specified, output entire context.
Returns:
None.
Raises:
pypyr.errors.KeyNotInContextError: fileWriteJson or
fileWriteJson['path'] missing in context.
pypyr.errors.KeyInContextHasNoValueError: fileWriteJson or
fileWriteJson['path'] exists but is None.
"""
logger.debug("started")
context.assert_child_key_has_value('fileWriteJson', 'path', __name__)
out_path = context.get_formatted_string(context['fileWriteJson']['path'])
# doing it like this to safeguard against accidentally dumping all context
# with potentially sensitive values in it to disk if payload exists but is
# None.
is_payload_specified = 'payload' in context['fileWriteJson']
logger.debug(f"opening destination file for writing: {out_path}")
os.makedirs(os.path.abspath(os.path.dirname(out_path)), exist_ok=True)
with open(out_path, 'w') as outfile:
if is_payload_specified:
payload = context['fileWriteJson']['payload']
formatted_iterable = context.get_formatted_iterable(payload)
else:
formatted_iterable = context.get_formatted_iterable(context)
json.dump(formatted_iterable, outfile, indent=2, ensure_ascii=False)
logger.info(f"formatted context content and wrote to {out_path}")
logger.debug("done") | [
"def",
"run_step",
"(",
"context",
")",
":",
"logger",
".",
"debug",
"(",
"\"started\"",
")",
"context",
".",
"assert_child_key_has_value",
"(",
"'fileWriteJson'",
",",
"'path'",
",",
"__name__",
")",
"out_path",
"=",
"context",
".",
"get_formatted_string",
"(",... | 39.863636 | 25.886364 |
def iter_series(self, workbook, row, col):
"""
Yield series dictionaries with values resolved to the final excel formulas.
"""
for series in self.__series:
series = dict(series)
series["values"] = series["values"].get_formula(workbook, row, col)
if "categories" in series:
series["categories"] = series["categories"].get_formula(workbook, row, col)
yield series | [
"def",
"iter_series",
"(",
"self",
",",
"workbook",
",",
"row",
",",
"col",
")",
":",
"for",
"series",
"in",
"self",
".",
"__series",
":",
"series",
"=",
"dict",
"(",
"series",
")",
"series",
"[",
"\"values\"",
"]",
"=",
"series",
"[",
"\"values\"",
... | 44.8 | 16.4 |
def Upload(self,directory,filename):
"""Uploads/Updates/Replaces files"""
db = self._loadDB(directory)
logger.debug("wp: Attempting upload of %s"%(filename))
# See if this already exists in our DB
if db.has_key(filename):
pid=db[filename]
logger.debug('wp: Found %s in DB with post id %s'%(filename,pid))
else:
pid=None
fullfile=os.path.join(directory,filename)
fid=open(fullfile,'r');
# Read meta data and content into dictionary
post=self._readMetaAndContent(fid)
#Connect to WP
self._connectToWP()
# If no pid, it means post is fresh off the press
# and not uploaded yet!
if not pid:
# Get a PID by uploading
pid=self.wp.call(NewPost(post))
if pid:
logger.debug("wp: Uploaded post with pid %s",pid)
db[filename]=pid
self._saveDB(directory,db)
return True
else:
logger.error("wp: Couldn't upload post")
return False
else:
# Already has PID, replace post
logger.debug("wp: Replacing post with pid %s",pid)
#FIXME: Check return value?!
self.wp.call(EditPost(pid,post))
return True
return False | [
"def",
"Upload",
"(",
"self",
",",
"directory",
",",
"filename",
")",
":",
"db",
"=",
"self",
".",
"_loadDB",
"(",
"directory",
")",
"logger",
".",
"debug",
"(",
"\"wp: Attempting upload of %s\"",
"%",
"(",
"filename",
")",
")",
"# See if this already exists i... | 29.644444 | 18.444444 |
def list_containers(self):
'''return a list of containers, determined by finding the metadata field
"type" with value "container." We alert the user to no containers
if results is empty, and exit
{'metadata': {'items':
[
{'key': 'type', 'value': 'container'}, ...
]
}
}
'''
results = []
for image in self._bucket.list_blobs():
if image.metadata is not None:
if "type" in image.metadata:
if image.metadata['type'] == "container":
results.append(image)
if len(results) == 0:
bot.info("No containers found, based on metadata type:container")
return results | [
"def",
"list_containers",
"(",
"self",
")",
":",
"results",
"=",
"[",
"]",
"for",
"image",
"in",
"self",
".",
"_bucket",
".",
"list_blobs",
"(",
")",
":",
"if",
"image",
".",
"metadata",
"is",
"not",
"None",
":",
"if",
"\"type\"",
"in",
"image",
".",... | 31.833333 | 21.25 |
def apply_t0(self, hits):
"""Apply only t0s"""
if HAVE_NUMBA:
apply_t0_nb(
hits.time, hits.dom_id, hits.channel_id, self._lookup_tables
)
else:
n = len(hits)
cal = np.empty(n)
lookup = self._calib_by_dom_and_channel
for i in range(n):
calib = lookup[hits['dom_id'][i]][hits['channel_id'][i]]
cal[i] = calib[6]
hits.time += cal
return hits | [
"def",
"apply_t0",
"(",
"self",
",",
"hits",
")",
":",
"if",
"HAVE_NUMBA",
":",
"apply_t0_nb",
"(",
"hits",
".",
"time",
",",
"hits",
".",
"dom_id",
",",
"hits",
".",
"channel_id",
",",
"self",
".",
"_lookup_tables",
")",
"else",
":",
"n",
"=",
"len"... | 32.533333 | 17.2 |
def get_or_add_childTnLst(self):
"""Return parent element for a new `p:video` child element.
The `p:video` element causes play controls to appear under a video
shape (pic shape containing video). There can be more than one video
shape on a slide, which causes the precondition to vary. It needs to
handle the case when there is no `p:sld/p:timing` element and when
that element already exists. If the case isn't simple, it just nukes
what's there and adds a fresh one. This could theoretically remove
desired existing timing information, but there isn't any evidence
available to me one way or the other, so I've taken the simple
approach.
"""
childTnLst = self._childTnLst
if childTnLst is None:
childTnLst = self._add_childTnLst()
return childTnLst | [
"def",
"get_or_add_childTnLst",
"(",
"self",
")",
":",
"childTnLst",
"=",
"self",
".",
"_childTnLst",
"if",
"childTnLst",
"is",
"None",
":",
"childTnLst",
"=",
"self",
".",
"_add_childTnLst",
"(",
")",
"return",
"childTnLst"
] | 50.529412 | 22.294118 |
def _render_log():
"""Totally tap into Towncrier internals to get an in-memory result.
"""
config = load_config(ROOT)
definitions = config['types']
fragments, fragment_filenames = find_fragments(
pathlib.Path(config['directory']).absolute(),
config['sections'],
None,
definitions,
)
rendered = render_fragments(
pathlib.Path(config['template']).read_text(encoding='utf-8'),
config['issue_format'],
split_fragments(fragments, definitions),
definitions,
config['underlines'][1:],
)
return rendered | [
"def",
"_render_log",
"(",
")",
":",
"config",
"=",
"load_config",
"(",
"ROOT",
")",
"definitions",
"=",
"config",
"[",
"'types'",
"]",
"fragments",
",",
"fragment_filenames",
"=",
"find_fragments",
"(",
"pathlib",
".",
"Path",
"(",
"config",
"[",
"'director... | 30.789474 | 15.526316 |
def _insertBPoint(self, index, type, anchor, bcpIn, bcpOut, **kwargs):
"""
Subclasses may override this method.
"""
# insert a simple line segment at the given anchor
# look it up as a bPoint and change the bcpIn and bcpOut there
# this avoids code duplication
self._insertSegment(index=index, type="line",
points=[anchor], smooth=False)
bPoints = self.bPoints
index += 1
if index >= len(bPoints):
# its an append instead of an insert
# so take the last bPoint
index = -1
bPoint = bPoints[index]
bPoint.bcpIn = bcpIn
bPoint.bcpOut = bcpOut
bPoint.type = type | [
"def",
"_insertBPoint",
"(",
"self",
",",
"index",
",",
"type",
",",
"anchor",
",",
"bcpIn",
",",
"bcpOut",
",",
"*",
"*",
"kwargs",
")",
":",
"# insert a simple line segment at the given anchor",
"# look it up as a bPoint and change the bcpIn and bcpOut there",
"# this a... | 37.684211 | 12 |
def stop(self):
"""Synchronously stop the background loop from outside.
This method will block until the background loop is completely stopped
so it cannot be called from inside the loop itself.
This method is safe to call multiple times. If the loop is not
currently running it will return without doing anything.
"""
if not self.loop:
return
if self.inside_loop():
raise InternalError("BackgroundEventLoop.stop() called from inside event loop; "
"would have deadlocked.")
try:
self.run_coroutine(self._stop_internal())
self.thread.join()
except:
self._logger.exception("Error stopping BackgroundEventLoop")
raise
finally:
self.thread = None
self.loop = None
self.tasks = set() | [
"def",
"stop",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"loop",
":",
"return",
"if",
"self",
".",
"inside_loop",
"(",
")",
":",
"raise",
"InternalError",
"(",
"\"BackgroundEventLoop.stop() called from inside event loop; \"",
"\"would have deadlocked.\"",
")"... | 32.814815 | 23.703704 |
def credentials_required(view_func):
"""
This decorator should be used with views that need simple authentication
against Django's authentication framework.
"""
@wraps(view_func, assigned=available_attrs(view_func))
def decorator(request, *args, **kwargs):
if settings.LOCALSHOP_USE_PROXIED_IP:
try:
ip_addr = request.META['HTTP_X_FORWARDED_FOR']
except KeyError:
return HttpResponseForbidden('No permission')
else:
# HTTP_X_FORWARDED_FOR can be a comma-separated list of IPs.
# The client's IP will be the first one.
ip_addr = ip_addr.split(",")[0].strip()
else:
ip_addr = request.META['REMOTE_ADDR']
if CIDR.objects.has_access(ip_addr, with_credentials=False):
return view_func(request, *args, **kwargs)
if not CIDR.objects.has_access(ip_addr, with_credentials=True):
return HttpResponseForbidden('No permission')
# Just return the original view because already logged in
if request.user.is_authenticated():
return view_func(request, *args, **kwargs)
user = authenticate_user(request)
if user is not None:
login(request, user)
return view_func(request, *args, **kwargs)
return HttpResponseUnauthorized(content='Authorization Required')
return decorator | [
"def",
"credentials_required",
"(",
"view_func",
")",
":",
"@",
"wraps",
"(",
"view_func",
",",
"assigned",
"=",
"available_attrs",
"(",
"view_func",
")",
")",
"def",
"decorator",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",... | 39.222222 | 19.388889 |
def get_user_details(user_id):
"""Get information about number of changesets, blocks and mapping days of a
user, using both the OSM API and the Mapbox comments APIself.
"""
reasons = []
try:
url = OSM_USERS_API.format(user_id=requests.compat.quote(user_id))
user_request = requests.get(url)
if user_request.status_code == 200:
user_data = user_request.content
xml_data = ET.fromstring(user_data).getchildren()[0].getchildren()
changesets = [i for i in xml_data if i.tag == 'changesets'][0]
blocks = [i for i in xml_data if i.tag == 'blocks'][0]
if int(changesets.get('count')) <= 5:
reasons.append('New mapper')
elif int(changesets.get('count')) <= 30:
url = MAPBOX_USERS_API.format(
user_id=requests.compat.quote(user_id)
)
user_request = requests.get(url)
if user_request.status_code == 200:
mapping_days = int(
user_request.json().get('extra').get('mapping_days')
)
if mapping_days <= 5:
reasons.append('New mapper')
if int(blocks.getchildren()[0].get('count')) > 1:
reasons.append('User has multiple blocks')
except Exception as e:
message = 'Could not verify user of the changeset: {}, {}'
print(message.format(user_id, str(e)))
return reasons | [
"def",
"get_user_details",
"(",
"user_id",
")",
":",
"reasons",
"=",
"[",
"]",
"try",
":",
"url",
"=",
"OSM_USERS_API",
".",
"format",
"(",
"user_id",
"=",
"requests",
".",
"compat",
".",
"quote",
"(",
"user_id",
")",
")",
"user_request",
"=",
"requests"... | 46.90625 | 15.28125 |
def start(self, **kwargs):
"""
Start this container. Similar to the ``docker start`` command, but
doesn't support attach options.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
return self.client.api.start(self.id, **kwargs) | [
"def",
"start",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"client",
".",
"api",
".",
"start",
"(",
"self",
".",
"id",
",",
"*",
"*",
"kwargs",
")"
] | 32.4 | 14.2 |
def set_password(name, password):
'''
Set the password for a named user. The password must be a properly defined
hash, the password hash can be generated with this command:
``openssl passwd -1 <plaintext password>``
CLI Example:
.. code-block:: bash
salt '*' shadow.set_password root $1$UYCIxa628.9qXjpQCjM4a..
'''
s_file = '/etc/shadow'
ret = {}
if not os.path.isfile(s_file):
return ret
lines = []
with salt.utils.files.fopen(s_file, 'rb') as ifile:
for line in ifile:
comps = line.strip().split(':')
if comps[0] != name:
lines.append(line)
continue
comps[1] = password
line = ':'.join(comps)
lines.append('{0}\n'.format(line))
with salt.utils.files.fopen(s_file, 'w+') as ofile:
lines = [salt.utils.stringutils.to_str(_l) for _l in lines]
ofile.writelines(lines)
uinfo = info(name)
return uinfo['passwd'] == password | [
"def",
"set_password",
"(",
"name",
",",
"password",
")",
":",
"s_file",
"=",
"'/etc/shadow'",
"ret",
"=",
"{",
"}",
"if",
"not",
"os",
".",
"path",
".",
"isfile",
"(",
"s_file",
")",
":",
"return",
"ret",
"lines",
"=",
"[",
"]",
"with",
"salt",
".... | 31.741935 | 18.645161 |
def local_time(unix_time, utc_offset, microseconds):
"""
Returns a UNIX time as a broken down time
for a particular transition type.
:type unix_time: int
:type utc_offset: int
:type microseconds: int
:rtype: tuple
"""
year = EPOCH_YEAR
seconds = int(math.floor(unix_time))
# Shift to a base year that is 400-year aligned.
if seconds >= 0:
seconds -= 10957 * SECS_PER_DAY
year += 30 # == 2000
else:
seconds += (146097 - 10957) * SECS_PER_DAY
year -= 370 # == 1600
seconds += utc_offset
# Handle years in chunks of 400/100/4/1
year += 400 * (seconds // SECS_PER_400_YEARS)
seconds %= SECS_PER_400_YEARS
if seconds < 0:
seconds += SECS_PER_400_YEARS
year -= 400
leap_year = 1 # 4-century aligned
sec_per_100years = SECS_PER_100_YEARS[leap_year]
while seconds >= sec_per_100years:
seconds -= sec_per_100years
year += 100
leap_year = 0 # 1-century, non 4-century aligned
sec_per_100years = SECS_PER_100_YEARS[leap_year]
sec_per_4years = SECS_PER_4_YEARS[leap_year]
while seconds >= sec_per_4years:
seconds -= sec_per_4years
year += 4
leap_year = 1 # 4-year, non century aligned
sec_per_4years = SECS_PER_4_YEARS[leap_year]
sec_per_year = SECS_PER_YEAR[leap_year]
while seconds >= sec_per_year:
seconds -= sec_per_year
year += 1
leap_year = 0 # non 4-year aligned
sec_per_year = SECS_PER_YEAR[leap_year]
# Handle months and days
month = TM_DECEMBER + 1
day = seconds // SECS_PER_DAY + 1
seconds %= SECS_PER_DAY
while month != TM_JANUARY + 1:
month_offset = MONTHS_OFFSETS[leap_year][month]
if day > month_offset:
day -= month_offset
break
month -= 1
# Handle hours, minutes, seconds and microseconds
hour = seconds // SECS_PER_HOUR
seconds %= SECS_PER_HOUR
minute = seconds // SECS_PER_MIN
second = seconds % SECS_PER_MIN
return (year, month, day, hour, minute, second, microseconds) | [
"def",
"local_time",
"(",
"unix_time",
",",
"utc_offset",
",",
"microseconds",
")",
":",
"year",
"=",
"EPOCH_YEAR",
"seconds",
"=",
"int",
"(",
"math",
".",
"floor",
"(",
"unix_time",
")",
")",
"# Shift to a base year that is 400-year aligned.",
"if",
"seconds",
... | 28.219178 | 16.191781 |
def send_button(recipient):
"""
Shortcuts are supported
page.send(recipient, Template.Buttons("hello", [
{'type': 'web_url', 'title': 'Open Web URL', 'value': 'https://www.oculus.com/en-us/rift/'},
{'type': 'postback', 'title': 'tigger Postback', 'value': 'DEVELOPED_DEFINED_PAYLOAD'},
{'type': 'phone_number', 'title': 'Call Phone Number', 'value': '+16505551234'},
]))
"""
page.send(recipient, Template.Buttons("hello", [
Template.ButtonWeb("Open Web URL", "https://www.oculus.com/en-us/rift/"),
Template.ButtonPostBack("trigger Postback", "DEVELOPED_DEFINED_PAYLOAD"),
Template.ButtonPhoneNumber("Call Phone Number", "+16505551234")
])) | [
"def",
"send_button",
"(",
"recipient",
")",
":",
"page",
".",
"send",
"(",
"recipient",
",",
"Template",
".",
"Buttons",
"(",
"\"hello\"",
",",
"[",
"Template",
".",
"ButtonWeb",
"(",
"\"Open Web URL\"",
",",
"\"https://www.oculus.com/en-us/rift/\"",
")",
",",
... | 50.142857 | 28 |
def validate_minimum(value, minimum, is_exclusive, **kwargs):
"""
Validator function for validating that a value does not violate it's
minimum allowed value. This validation can be inclusive, or exclusive of
the minimum depending on the value of `is_exclusive`.
"""
if is_exclusive:
comparison_text = "greater than"
compare_fn = operator.gt
else:
comparison_text = "greater than or equal to"
compare_fn = operator.ge
if not compare_fn(value, minimum):
raise ValidationError(
MESSAGES['minimum']['invalid'].format(value, comparison_text, minimum),
) | [
"def",
"validate_minimum",
"(",
"value",
",",
"minimum",
",",
"is_exclusive",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"is_exclusive",
":",
"comparison_text",
"=",
"\"greater than\"",
"compare_fn",
"=",
"operator",
".",
"gt",
"else",
":",
"comparison_text",
"=... | 36.823529 | 18.352941 |
def gen_triplets_master(wv_master, geometry=None, debugplot=0):
"""Compute information associated to triplets in master table.
Determine all the possible triplets that can be generated from the
array `wv_master`. In addition, the relative position of the
central line of each triplet is also computed.
Parameters
----------
wv_master : 1d numpy array, float
Array with wavelengths corresponding to the master table
(Angstroms).
geometry : tuple (4 integers) or None
x, y, dx, dy values employed to set the window geometry.
debugplot : int
Determines whether intermediate computations and/or plots
are displayed. The valid codes are defined in
numina.array.display.pause_debugplot.
Returns
-------
ntriplets_master : int
Number of triplets built from master table.
ratios_master_sorted : 1d numpy array, float
Array with values of the relative position of the central line
of each triplet, sorted in ascending order.
triplets_master_sorted_list : list of tuples
List with tuples of three numbers, corresponding to the three
line indices in the master table. The list is sorted to be in
correspondence with `ratios_master_sorted`.
"""
nlines_master = wv_master.size
# Check that the wavelengths in the master table are sorted
wv_previous = wv_master[0]
for i in range(1, nlines_master):
if wv_previous >= wv_master[i]:
raise ValueError('Wavelengths:\n--> ' +
str(wv_previous) + '\n--> ' + str(wv_master[i]) +
'\nin master table are duplicated or not sorted')
wv_previous = wv_master[i]
# Generate all the possible triplets with the numbers of the lines
# in the master table. Each triplet is defined as a tuple of three
# numbers corresponding to the three line indices in the master
# table. The collection of tuples is stored in an ordinary python
# list.
iter_comb_triplets = itertools.combinations(range(nlines_master), 3)
triplets_master_list = [val for val in iter_comb_triplets]
# Verify that the number of triplets coincides with the expected
# value.
ntriplets_master = len(triplets_master_list)
if ntriplets_master == comb(nlines_master, 3, exact=True):
if abs(debugplot) >= 10:
print('>>> Total number of lines in master table:',
nlines_master)
print('>>> Number of triplets in master table...:',
ntriplets_master)
else:
raise ValueError('Invalid number of combinations')
# For each triplet, compute the relative position of the central
# line.
ratios_master = np.zeros(ntriplets_master)
for index, value in enumerate(triplets_master_list):
i1, i2, i3 = value
delta1 = wv_master[i2] - wv_master[i1]
delta2 = wv_master[i3] - wv_master[i1]
ratios_master[index] = delta1 / delta2
# Compute the array of indices that index the above ratios in
# sorted order.
isort_ratios_master = np.argsort(ratios_master)
# Simultaneous sort of position ratios and triplets.
ratios_master_sorted = ratios_master[isort_ratios_master]
triplets_master_sorted_list = [triplets_master_list[i]
for i in isort_ratios_master]
if abs(debugplot) in [21, 22]:
# compute and plot histogram with position ratios
bins_in = np.linspace(0.0, 1.0, 41)
hist, bins_out = np.histogram(ratios_master, bins=bins_in)
#
from numina.array.display.matplotlib_qt import plt
fig = plt.figure()
ax = fig.add_subplot(111)
width_hist = 0.8*(bins_out[1]-bins_out[0])
center = (bins_out[:-1]+bins_out[1:])/2
ax.bar(center, hist, align='center', width=width_hist)
ax.set_xlabel('distance ratio in each triplet')
ax.set_ylabel('Number of triplets')
ax.set_title("Number of lines/triplets: " +
str(nlines_master) + "/" + str(ntriplets_master))
# set window geometry
set_window_geometry(geometry)
pause_debugplot(debugplot, pltshow=True, tight_layout=True)
return ntriplets_master, ratios_master_sorted, triplets_master_sorted_list | [
"def",
"gen_triplets_master",
"(",
"wv_master",
",",
"geometry",
"=",
"None",
",",
"debugplot",
"=",
"0",
")",
":",
"nlines_master",
"=",
"wv_master",
".",
"size",
"# Check that the wavelengths in the master table are sorted",
"wv_previous",
"=",
"wv_master",
"[",
"0"... | 41.607843 | 20.480392 |
def check_days(text):
"""Suggest the preferred forms."""
err = "MAU102"
msg = "Days of the week should be capitalized. '{}' is the preferred form."
list = [
["Monday", ["monday"]],
["Tuesday", ["tuesday"]],
["Wednesday", ["wednesday"]],
["Thursday", ["thursday"]],
["Friday", ["friday"]],
["Saturday", ["saturday"]],
["Sunday", ["sunday"]],
]
return preferred_forms_check(text, list, err, msg, ignore_case=False) | [
"def",
"check_days",
"(",
"text",
")",
":",
"err",
"=",
"\"MAU102\"",
"msg",
"=",
"\"Days of the week should be capitalized. '{}' is the preferred form.\"",
"list",
"=",
"[",
"[",
"\"Monday\"",
",",
"[",
"\"monday\"",
"]",
"]",
",",
"[",
"\"Tuesday\"",
",",
"[",
... | 30.176471 | 18.176471 |
def get_symbols_list(self):
'''Return a list of GdxSymb found in the GdxFile.'''
slist = []
rc, nSymb, nElem = gdxcc.gdxSystemInfo(self.gdx_handle)
assert rc, 'Unable to retrieve "%s" info' % self.filename
self.number_symbols = nSymb
self.number_elements = nElem
slist = [None]*(nSymb+1)
for j in range(0,nSymb+1):
sinfo = self.get_sid_info(j)
if j==0:
sinfo['name'] = 'universal_set'
slist[j] = GdxSymb(self,sinfo)
return slist | [
"def",
"get_symbols_list",
"(",
"self",
")",
":",
"slist",
"=",
"[",
"]",
"rc",
",",
"nSymb",
",",
"nElem",
"=",
"gdxcc",
".",
"gdxSystemInfo",
"(",
"self",
".",
"gdx_handle",
")",
"assert",
"rc",
",",
"'Unable to retrieve \"%s\" info'",
"%",
"self",
".",
... | 38.5 | 12.5 |
def consume(self, kind):
"""Consume one token and verify it is of the expected kind."""
next_token = self.stream.move()
assert next_token.kind == kind | [
"def",
"consume",
"(",
"self",
",",
"kind",
")",
":",
"next_token",
"=",
"self",
".",
"stream",
".",
"move",
"(",
")",
"assert",
"next_token",
".",
"kind",
"==",
"kind"
] | 42.75 | 4.75 |
def database_set_properties(object_id, input_params={}, always_retry=True, **kwargs):
"""
Invokes the /database-xxxx/setProperties API method.
For more info, see: https://wiki.dnanexus.com/API-Specification-v1.0.0/Properties#API-method%3A-%2Fclass-xxxx%2FsetProperties
"""
return DXHTTPRequest('/%s/setProperties' % object_id, input_params, always_retry=always_retry, **kwargs) | [
"def",
"database_set_properties",
"(",
"object_id",
",",
"input_params",
"=",
"{",
"}",
",",
"always_retry",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"DXHTTPRequest",
"(",
"'/%s/setProperties'",
"%",
"object_id",
",",
"input_params",
",",
"alw... | 56 | 36.857143 |
def start(self, stages=None):
"""
Makes the ``Piper`` ready to return results. This involves starting the
the provided ``NuMap`` instance. If multiple ``Pipers`` share a
``NuMap`` instance the order in which these ``Pipers`` are started is
important. The valid order is upstream before downstream. The ``NuMap``
instance can only be started once, but the process can be done in 2
stages. This methods "stages" argument is a ``tuple`` which can contain
any the numbers ``0`` and/or ``1`` and/or ``2`` specifying which stage
of the start routine should be carried out:
- stage 0 - creates the needed ``itertools.tee`` objects.
- stage 1 - activates ``NuMap`` pool. A call to ``next`` will block..
- stage 2 - activates ``NuMap`` pool managers.
If this ``Piper`` shares a ``NuMap`` with other ``Pipers`` the proper
way to start them is to start them in a valid postorder with stages
``(0, 1)`` and ``(2,)`` separately.
Arguments:
- stages(tuple) [default: ``(0,)`` if linear; ``(0,1,2)`` if parallel]
Performs the specified stages of the start of a ``Piper`` instance.
Stage ``0`` is necessary and sufficient to start a linear ``Piper``
which uses an ``itertools.imap``. Stages ``1`` and ``2`` are
required to start any parallel ``Piper`` instance.
"""
# defaults differ linear vs. parallel
stages = stages or ((0,) if self.imap is imap else (0, 1, 2))
if not self.connected:
self.log.error('Piper %s is not connected.' % self)
raise PiperError('Piper %s is not connected.' % self)
if not self.started:
if 0 in stages:
self.tees.extend(tee(self, self.tee_num))
if hasattr(self.imap, 'start'):
# parallel piper
self.imap.start(stages)
if 2 in stages:
self.log.debug('Piper %s has been started using %s' % \
(self, self.imap))
self.started = True
else:
# linear piper
self.log.debug('Piper %s has been started using %s' % \
(self, self.imap))
self.started = True | [
"def",
"start",
"(",
"self",
",",
"stages",
"=",
"None",
")",
":",
"# defaults differ linear vs. parallel",
"stages",
"=",
"stages",
"or",
"(",
"(",
"0",
",",
")",
"if",
"self",
".",
"imap",
"is",
"imap",
"else",
"(",
"0",
",",
"1",
",",
"2",
")",
... | 46.68 | 23.32 |
def create_node(hostname, username, password, name, address):
'''
Create a new node if it does not already exist.
hostname
The host/address of the bigip device
username
The iControl REST username
password
The iControl REST password
name
The name of the node to create
address
The address of the node
'''
ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''}
if __opts__['test']:
return _test_output(ret, 'create', params={
'hostname': hostname,
'username': username,
'password': password,
'name': name,
'address': address
}
)
#is this node currently configured?
existing = __salt__['bigip.list_node'](hostname, username, password, name)
# if it exists
if existing['code'] == 200:
ret['result'] = True
ret['comment'] = 'A node by this name currently exists. No change made.'
# if it doesn't exist
elif existing['code'] == 404:
response = __salt__['bigip.create_node'](hostname, username, password, name, address)
ret['result'] = True
ret['changes']['old'] = {}
ret['changes']['new'] = response['content']
ret['comment'] = 'Node was successfully created.'
# else something else was returned
else:
ret = _load_result(existing, ret)
return ret | [
"def",
"create_node",
"(",
"hostname",
",",
"username",
",",
"password",
",",
"name",
",",
"address",
")",
":",
"ret",
"=",
"{",
"'name'",
":",
"name",
",",
"'changes'",
":",
"{",
"}",
",",
"'result'",
":",
"False",
",",
"'comment'",
":",
"''",
"}",
... | 27.019608 | 22.352941 |
def _cmd(self, cmd, *args, **kw):
'''
write a single command, with variable number of arguments. after the
command, the device must return ACK
'''
ok = kw.setdefault('ok', False)
self._wakeup()
if args:
cmd = "%s %s" % (cmd, ' '.join(str(a) for a in args))
for i in xrange(3):
log.info("send: " + cmd)
self.port.write(cmd + '\n')
if ok:
ack = self.port.read(len(self.OK)) # read OK
log_raw('read', ack)
if ack == self.OK:
return
else:
ack = self.port.read(len(self.ACK)) # read ACK
log_raw('read', ack)
if ack == self.ACK:
return
raise NoDeviceException('Can not access weather station') | [
"def",
"_cmd",
"(",
"self",
",",
"cmd",
",",
"*",
"args",
",",
"*",
"*",
"kw",
")",
":",
"ok",
"=",
"kw",
".",
"setdefault",
"(",
"'ok'",
",",
"False",
")",
"self",
".",
"_wakeup",
"(",
")",
"if",
"args",
":",
"cmd",
"=",
"\"%s %s\"",
"%",
"(... | 34.791667 | 16.291667 |
def new_text_cell(text=None):
"""Create a new text cell."""
cell = NotebookNode()
if text is not None:
cell.text = unicode(text)
cell.cell_type = u'text'
return cell | [
"def",
"new_text_cell",
"(",
"text",
"=",
"None",
")",
":",
"cell",
"=",
"NotebookNode",
"(",
")",
"if",
"text",
"is",
"not",
"None",
":",
"cell",
".",
"text",
"=",
"unicode",
"(",
"text",
")",
"cell",
".",
"cell_type",
"=",
"u'text'",
"return",
"cel... | 26.714286 | 12.285714 |
def save(name, filter=False):
'''
Save the register to <salt cachedir>/thorium/saves/<name>, or to an
absolute path.
If an absolute path is specified, then the directory will be created
non-recursively if it doesn't exist.
USAGE:
.. code-block:: yaml
foo:
file.save
/tmp/foo:
file.save
'''
ret = {'name': name,
'changes': {},
'comment': '',
'result': True}
if name.startswith('/'):
tgt_dir = os.path.dirname(name)
fn_ = name
else:
tgt_dir = os.path.join(__opts__['cachedir'], 'thorium', 'saves')
fn_ = os.path.join(tgt_dir, name)
if not os.path.isdir(tgt_dir):
os.makedirs(tgt_dir)
with salt.utils.files.fopen(fn_, 'w+') as fp_:
if filter is True:
salt.utils.json.dump(salt.utils.data.simple_types_filter(__reg__), fp_)
else:
salt.utils.json.dump(__reg__, fp_)
return ret | [
"def",
"save",
"(",
"name",
",",
"filter",
"=",
"False",
")",
":",
"ret",
"=",
"{",
"'name'",
":",
"name",
",",
"'changes'",
":",
"{",
"}",
",",
"'comment'",
":",
"''",
",",
"'result'",
":",
"True",
"}",
"if",
"name",
".",
"startswith",
"(",
"'/'... | 26.277778 | 22.333333 |
def separate_operations(document_ast: DocumentNode) -> Dict[str, DocumentNode]:
"""Separate operations in a given AST document.
This function accepts a single AST document which may contain many operations and
fragments and returns a collection of AST documents each of which contains a single
operation as well the fragment definitions it refers to.
"""
# Populate metadata and build a dependency graph.
visitor = SeparateOperations()
visit(document_ast, visitor)
operations = visitor.operations
fragments = visitor.fragments
positions = visitor.positions
dep_graph = visitor.dep_graph
# For each operation, produce a new synthesized AST which includes only what is
# necessary for completing that operation.
separated_document_asts = {}
for operation in operations:
operation_name = op_name(operation)
dependencies: Set[str] = set()
collect_transitive_dependencies(dependencies, dep_graph, operation_name)
# The list of definition nodes to be included for this operation, sorted to
# retain the same order as the original document.
definitions: List[ExecutableDefinitionNode] = [operation]
for name in dependencies:
definitions.append(fragments[name])
definitions.sort(key=lambda n: positions.get(n, 0))
separated_document_asts[operation_name] = DocumentNode(definitions=definitions)
return separated_document_asts | [
"def",
"separate_operations",
"(",
"document_ast",
":",
"DocumentNode",
")",
"->",
"Dict",
"[",
"str",
",",
"DocumentNode",
"]",
":",
"# Populate metadata and build a dependency graph.",
"visitor",
"=",
"SeparateOperations",
"(",
")",
"visit",
"(",
"document_ast",
","... | 42.5 | 21.441176 |
def get(self, hostport):
"""Get a Peer for the given destination.
A new Peer is added to the peer heap and returned if one does
not already exist for the given host-port. Otherwise, the
existing Peer is returned.
"""
assert hostport, "hostport is required"
assert isinstance(hostport, basestring), "hostport must be a string"
if hostport not in self._peers:
self._add(hostport)
return self._peers[hostport] | [
"def",
"get",
"(",
"self",
",",
"hostport",
")",
":",
"assert",
"hostport",
",",
"\"hostport is required\"",
"assert",
"isinstance",
"(",
"hostport",
",",
"basestring",
")",
",",
"\"hostport must be a string\"",
"if",
"hostport",
"not",
"in",
"self",
".",
"_peer... | 34.285714 | 18.071429 |
def with_metaclass(meta, *bases):
"""Python 2 and 3 compatible way to do meta classes"""
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, "temporary_class", (), {}) | [
"def",
"with_metaclass",
"(",
"meta",
",",
"*",
"bases",
")",
":",
"class",
"metaclass",
"(",
"meta",
")",
":",
"def",
"__new__",
"(",
"cls",
",",
"name",
",",
"this_bases",
",",
"d",
")",
":",
"return",
"meta",
"(",
"name",
",",
"bases",
",",
"d",... | 32.875 | 16.125 |
def update(self, json_state):
"""Update the json data from a dictionary.
Only updates if it already exists in the device.
"""
self._json_state.update(
{k: json_state[k] for k in json_state if self._json_state.get(k)})
self._update_name() | [
"def",
"update",
"(",
"self",
",",
"json_state",
")",
":",
"self",
".",
"_json_state",
".",
"update",
"(",
"{",
"k",
":",
"json_state",
"[",
"k",
"]",
"for",
"k",
"in",
"json_state",
"if",
"self",
".",
"_json_state",
".",
"get",
"(",
"k",
")",
"}",... | 35.375 | 15.75 |
def create_table_level(self):
"""Create the QTableView that will hold the level model."""
self.table_level = QTableView()
self.table_level.setEditTriggers(QTableWidget.NoEditTriggers)
self.table_level.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.table_level.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.table_level.setFrameStyle(QFrame.Plain)
self.table_level.horizontalHeader().sectionResized.connect(
self._index_resized)
self.table_level.verticalHeader().sectionResized.connect(
self._header_resized)
self.table_level.setItemDelegate(QItemDelegate())
self.layout.addWidget(self.table_level, 0, 0)
self.table_level.setContentsMargins(0, 0, 0, 0)
self.table_level.horizontalHeader().sectionClicked.connect(
self.sortByIndex) | [
"def",
"create_table_level",
"(",
"self",
")",
":",
"self",
".",
"table_level",
"=",
"QTableView",
"(",
")",
"self",
".",
"table_level",
".",
"setEditTriggers",
"(",
"QTableWidget",
".",
"NoEditTriggers",
")",
"self",
".",
"table_level",
".",
"setHorizontalScrol... | 63.4375 | 22.9375 |
def from_config_specs(cls, config_specs, prepare=True):
"""
Alternate constructor that merges config attributes from
``$HOME/.bangrc`` and :attr:`config_specs` into a single
:class:`Config` object.
The first (and potentially *only* spec) in :attr:`config_specs` should
be main configuration file for the stack to be deployed. The returned
object's :attr:`filepath` will be set to the absolute path of the first
config file.
If multiple config specs are supplied, their values are merged together
in the order specified in :attr:`config_specs` - That is, later values
override earlier values.
:param config_specs: List of config specs.
:type config_specs: :class:`list` of :class:`str`
:param bool prepare: Flag to control whether or not :meth:`prepare` is
called automatically before returning the object.
:rtype: :class:`Config`
"""
bangrc = parse_bangrc()
config_dir = bangrc.get(A.CONFIG_DIR, DEFAULT_CONFIG_DIR)
config_paths = [
resolve_config_spec(cs, config_dir) for cs in config_specs
]
config = cls()
config.update(bangrc)
if config_paths:
config.filepath = config_paths[0]
for c in config_paths:
with open(c) as f:
deep_merge_dicts(config, yaml.safe_load(f))
if prepare:
config.prepare()
return config | [
"def",
"from_config_specs",
"(",
"cls",
",",
"config_specs",
",",
"prepare",
"=",
"True",
")",
":",
"bangrc",
"=",
"parse_bangrc",
"(",
")",
"config_dir",
"=",
"bangrc",
".",
"get",
"(",
"A",
".",
"CONFIG_DIR",
",",
"DEFAULT_CONFIG_DIR",
")",
"config_paths",... | 37.923077 | 22.487179 |
def do_set(self, args: argparse.Namespace) -> None:
"""Set a settable parameter or show current settings of parameters"""
# Check if param was passed in
if not args.param:
return self.show(args)
param = utils.norm_fold(args.param.strip())
# Check if value was passed in
if not args.value:
return self.show(args, param)
value = args.value
# Check if param points to just one settable
if param not in self.settable:
hits = [p for p in self.settable if p.startswith(param)]
if len(hits) == 1:
param = hits[0]
else:
return self.show(args, param)
# Update the settable's value
current_value = getattr(self, param)
value = utils.cast(current_value, value)
setattr(self, param, value)
self.poutput('{} - was: {}\nnow: {}\n'.format(param, current_value, value))
# See if we need to call a change hook for this settable
if current_value != value:
onchange_hook = getattr(self, '_onchange_{}'.format(param), None)
if onchange_hook is not None:
onchange_hook(old=current_value, new=value) | [
"def",
"do_set",
"(",
"self",
",",
"args",
":",
"argparse",
".",
"Namespace",
")",
"->",
"None",
":",
"# Check if param was passed in",
"if",
"not",
"args",
".",
"param",
":",
"return",
"self",
".",
"show",
"(",
"args",
")",
"param",
"=",
"utils",
".",
... | 36.69697 | 16.787879 |
def _getScriptSettingsFromIniFile(policy_info):
'''
helper function to parse/read a GPO Startup/Shutdown script file
psscript.ini and script.ini file definitions are here
https://msdn.microsoft.com/en-us/library/ff842529.aspx
https://msdn.microsoft.com/en-us/library/dd303238.aspx
'''
_existingData = None
if os.path.isfile(policy_info['ScriptIni']['IniPath']):
with salt.utils.files.fopen(policy_info['ScriptIni']['IniPath'], 'rb') as fhr:
_existingData = fhr.read()
if _existingData:
try:
_existingData = deserialize(_existingData.decode('utf-16-le').lstrip('\ufeff'))
log.debug('Have deserialized data %s', _existingData)
except Exception as error:
log.exception('An error occurred attempting to deserialize data for %s', policy_info['Policy'])
raise CommandExecutionError(error)
if 'Section' in policy_info['ScriptIni'] and policy_info['ScriptIni']['Section'].lower() in [z.lower() for z in _existingData.keys()]:
if 'SettingName' in policy_info['ScriptIni']:
log.debug('Need to look for %s', policy_info['ScriptIni']['SettingName'])
if policy_info['ScriptIni']['SettingName'].lower() in [z.lower() for z in _existingData[policy_info['ScriptIni']['Section']].keys()]:
return _existingData[policy_info['ScriptIni']['Section']][policy_info['ScriptIni']['SettingName'].lower()]
else:
return None
else:
return _existingData[policy_info['ScriptIni']['Section']]
else:
return None
return None | [
"def",
"_getScriptSettingsFromIniFile",
"(",
"policy_info",
")",
":",
"_existingData",
"=",
"None",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"policy_info",
"[",
"'ScriptIni'",
"]",
"[",
"'IniPath'",
"]",
")",
":",
"with",
"salt",
".",
"utils",
".",
"fi... | 53.78125 | 32.84375 |
def _prefer_package(self, package):
""" Prefer a serializtion handler over other handlers.
:param str package: The name of the package to use
:raises ValueError: When the given package name is not one of the available
supported serializtion packages for this handler
:return: The name of the serialization handler
:rtype: str
"""
if isinstance(package, str) and package != self.imported:
if package not in self.packages:
raise ValueError(
f"preferred package {package!r} does not exist, allowed are "
f"{self.packages!r}"
)
# clear out current serialization handler (if exists)
if hasattr(self, "_handler"):
del self._handler
# manually update imported handlers with a given preference
self._imported = self._discover_import(prefer=package)
return package
return self.imported | [
"def",
"_prefer_package",
"(",
"self",
",",
"package",
")",
":",
"if",
"isinstance",
"(",
"package",
",",
"str",
")",
"and",
"package",
"!=",
"self",
".",
"imported",
":",
"if",
"package",
"not",
"in",
"self",
".",
"packages",
":",
"raise",
"ValueError",... | 43.130435 | 18.130435 |
def _get_team_abbreviation(self, team):
"""
Retrieve team's abbreviation.
The team's abbreviation is embedded within the 'school_name' tag and
requires special parsing as it is located in the middle of a URI. The
abbreviation is returned for the requested school.
Parameters
----------
team : PyQuery object
A PyQuery object representing a single row in a table on the
conference page.
Returns
-------
string
Returns a string of the team's abbreviation, such as 'PURDUE'.
"""
name_tag = team('th[data-stat="school_name"] a')
team_abbreviation = re.sub(r'.*/cfb/schools/', '', str(name_tag))
team_abbreviation = re.sub(r'/.*', '', team_abbreviation)
return team_abbreviation | [
"def",
"_get_team_abbreviation",
"(",
"self",
",",
"team",
")",
":",
"name_tag",
"=",
"team",
"(",
"'th[data-stat=\"school_name\"] a'",
")",
"team_abbreviation",
"=",
"re",
".",
"sub",
"(",
"r'.*/cfb/schools/'",
",",
"''",
",",
"str",
"(",
"name_tag",
")",
")"... | 35.565217 | 22 |
def props_to_image(regionprops, shape, prop):
r"""
Creates an image with each region colored according the specified ``prop``,
as obtained by ``regionprops_3d``.
Parameters
----------
regionprops : list
This is a list of properties for each region that is computed
by PoreSpy's ``regionprops_3D`` or Skimage's ``regionsprops``.
shape : array_like
The shape of the original image for which ``regionprops`` was obtained.
prop : string
The region property of interest. Can be a scalar item such as 'volume'
in which case the the regions will be colored by their respective
volumes, or can be an image-type property such as 'border' or
'convex_image', which will return an image composed of the sub-images.
Returns
-------
image : ND-array
An ND-image the same size as the original image, with each region
represented by the values specified in ``prop``.
See Also
--------
props_to_DataFrame
regionprops_3d
"""
im = sp.zeros(shape=shape)
for r in regionprops:
if prop == 'convex':
mask = r.convex_image
else:
mask = r.image
temp = mask * r[prop]
s = bbox_to_slices(r.bbox)
im[s] += temp
return im | [
"def",
"props_to_image",
"(",
"regionprops",
",",
"shape",
",",
"prop",
")",
":",
"im",
"=",
"sp",
".",
"zeros",
"(",
"shape",
"=",
"shape",
")",
"for",
"r",
"in",
"regionprops",
":",
"if",
"prop",
"==",
"'convex'",
":",
"mask",
"=",
"r",
".",
"con... | 30.238095 | 23.928571 |
def monthly_build_list_regex(self):
"""Return the regex for the folder containing builds of a month."""
# Regex for possible builds for the given date
return r'nightly/%(YEAR)s/%(MONTH)s/' % {
'YEAR': self.date.year,
'MONTH': str(self.date.month).zfill(2)} | [
"def",
"monthly_build_list_regex",
"(",
"self",
")",
":",
"# Regex for possible builds for the given date",
"return",
"r'nightly/%(YEAR)s/%(MONTH)s/'",
"%",
"{",
"'YEAR'",
":",
"self",
".",
"date",
".",
"year",
",",
"'MONTH'",
":",
"str",
"(",
"self",
".",
"date",
... | 49.833333 | 7.333333 |
async def do_run_task(context, run_cancellable, to_cancellable_process):
"""Run the task logic.
Returns the integer status of the task.
args:
context (scriptworker.context.Context): the scriptworker context.
run_cancellable (typing.Callable): wraps future such that it'll cancel upon worker shutdown
to_cancellable_process (typing.Callable): wraps ``TaskProcess`` such that it will stop if the worker is shutting
down
Raises:
Exception: on unexpected exception.
Returns:
int: exit status
"""
status = 0
try:
if context.config['verify_chain_of_trust']:
chain = ChainOfTrust(context, context.config['cot_job_type'])
await run_cancellable(verify_chain_of_trust(chain))
status = await run_task(context, to_cancellable_process)
generate_cot(context)
except asyncio.CancelledError:
log.info("CoT cancelled asynchronously")
raise WorkerShutdownDuringTask
except ScriptWorkerException as e:
status = worst_level(status, e.exit_code)
log.error("Hit ScriptWorkerException: {}".format(e))
except Exception as e:
log.exception("SCRIPTWORKER_UNEXPECTED_EXCEPTION task {}".format(e))
raise
return status | [
"async",
"def",
"do_run_task",
"(",
"context",
",",
"run_cancellable",
",",
"to_cancellable_process",
")",
":",
"status",
"=",
"0",
"try",
":",
"if",
"context",
".",
"config",
"[",
"'verify_chain_of_trust'",
"]",
":",
"chain",
"=",
"ChainOfTrust",
"(",
"contex... | 35.885714 | 24.142857 |
def doc_unwrap(raw_doc):
"""
Applies two transformations to raw_doc:
1. N consecutive newlines are converted into N-1 newlines.
2. A lone newline is converted to a space, which basically unwraps text.
Returns a new string, or None if the input was None.
"""
if raw_doc is None:
return None
docstring = ''
consecutive_newlines = 0
# Remove all leading and trailing whitespace in the documentation block
for c in raw_doc.strip():
if c == '\n':
consecutive_newlines += 1
if consecutive_newlines > 1:
docstring += c
else:
if consecutive_newlines == 1:
docstring += ' '
consecutive_newlines = 0
docstring += c
return docstring | [
"def",
"doc_unwrap",
"(",
"raw_doc",
")",
":",
"if",
"raw_doc",
"is",
"None",
":",
"return",
"None",
"docstring",
"=",
"''",
"consecutive_newlines",
"=",
"0",
"# Remove all leading and trailing whitespace in the documentation block",
"for",
"c",
"in",
"raw_doc",
".",
... | 31.791667 | 14.875 |
def colour_hsv(self):
"""Return colour as HSV value"""
hexvalue = self.status()[self.DPS][self.DPS_INDEX_COLOUR]
return BulbDevice._hexvalue_to_hsv(hexvalue) | [
"def",
"colour_hsv",
"(",
"self",
")",
":",
"hexvalue",
"=",
"self",
".",
"status",
"(",
")",
"[",
"self",
".",
"DPS",
"]",
"[",
"self",
".",
"DPS_INDEX_COLOUR",
"]",
"return",
"BulbDevice",
".",
"_hexvalue_to_hsv",
"(",
"hexvalue",
")"
] | 44.5 | 14 |
def ks_unif_durbin_matrix(samples, statistic):
"""
Calculates the probability that the statistic is less than the given value,
using a fairly accurate implementation of the Durbin's matrix formula.
Not an exact transliteration of the Marsaglia code, but using the same
ideas. Assumes samples > 0. See: doi:10.18637/jss.v008.i18.
"""
# Construct the Durbin matrix.
h, k = modf(samples * statistic)
k = int(k)
h = 1 - h
m = 2 * k + 1
A = tri(m, k=1)
hs = h ** arange(1, m + 1)
A[:, 0] -= hs
A[-1] -= hs[::-1]
if h > .5:
A[-1, 0] += (2 * h - 1) ** m
A /= fromfunction(lambda i, j: gamma(fmax(1, i - j + 2)), (m, m))
# Calculate A ** n, expressed as P * 2 ** eP to avoid overflows.
P = identity(m)
s = samples
eA, eP = 0, 0
while s != 1:
s, b = divmod(s, 2)
if b == 1:
P = dot(P, A)
eP += eA
if P[k, k] > factor:
P /= factor
eP += shift
A = dot(A, A)
eA *= 2
if A[k, k] > factor:
A /= factor
eA += shift
P = dot(P, A)
eP += eA
# Calculate n! / n ** n * P[k, k].
x = P[k, k]
for i in arange(1, samples + 1):
x *= i / samples
if x < factorr:
x *= factor
eP -= shift
return x * 2 ** eP | [
"def",
"ks_unif_durbin_matrix",
"(",
"samples",
",",
"statistic",
")",
":",
"# Construct the Durbin matrix.",
"h",
",",
"k",
"=",
"modf",
"(",
"samples",
"*",
"statistic",
")",
"k",
"=",
"int",
"(",
"k",
")",
"h",
"=",
"1",
"-",
"h",
"m",
"=",
"2",
"... | 28.191489 | 18.617021 |
def DEFINE_choice(self, name, default, choices, help, constant=False):
"""A helper for defining choice string options."""
self.AddOption(
type_info.Choice(
name=name, default=default, choices=choices, description=help),
constant=constant) | [
"def",
"DEFINE_choice",
"(",
"self",
",",
"name",
",",
"default",
",",
"choices",
",",
"help",
",",
"constant",
"=",
"False",
")",
":",
"self",
".",
"AddOption",
"(",
"type_info",
".",
"Choice",
"(",
"name",
"=",
"name",
",",
"default",
"=",
"default",... | 44.833333 | 19.166667 |
def configure(self, options, conf):
"""
Configure plugin.
"""
super(LeakDetectorPlugin, self).configure(options, conf)
if options.leak_detector_level:
self.reporting_level = int(options.leak_detector_level)
self.report_delta = options.leak_detector_report_delta
self.patch_mock = options.leak_detector_patch_mock
self.ignore_patterns = options.leak_detector_ignore_patterns
self.save_traceback = options.leak_detector_save_traceback
self.multiprocessing_enabled = bool(getattr(options, 'multiprocess_workers', False)) | [
"def",
"configure",
"(",
"self",
",",
"options",
",",
"conf",
")",
":",
"super",
"(",
"LeakDetectorPlugin",
",",
"self",
")",
".",
"configure",
"(",
"options",
",",
"conf",
")",
"if",
"options",
".",
"leak_detector_level",
":",
"self",
".",
"reporting_leve... | 49.833333 | 18.166667 |
def row(cls, pitch, pa, pitch_list, ball_tally, strike_tally):
"""
Pitching Result
Pitch f/x fields: https://fastballs.wordpress.com/category/pitchfx-glossary/
:param pitch: pitch object(type:Beautifulsoup)
:param pa: At bat data for pa(dict)
:param pitch_list: Pitching
:param ball_tally: Ball telly
:param strike_tally: Strike telly
:return: {
'retro_game_id': Retrosheet Game id
'game_type': Game Type(S/R/F/D/L/W)
'game_type_des': Game Type Description
(Spring Training or Regular Season or Wild-card Game or Divisional Series or LCS or World Series)
'st_fl': Spring Training FLAG(T or F)
'regseason_fl': Regular Season FLAG(T or F)
'playoff_fl': Play Off Flag(T or F)
'local_game_time': Game Time(UTC -5)
'game_id': Game Id
'home_team_id': Home Team Id
'home_team_lg': Home Team league(AL or NL)
'away_team_id': Away Team Id
'away_team_lg': Away Team league(AL or NL)
'home_team_name': Home Team Name
'away_team_name': Away Team Name
'home_team_name_full': Home Team Name(Full Name)
'away_team_name_full': Away Team Name(Full Name)
'interleague_fl': Inter League Flag(T or F)
'park_id': Park Id
'park_name': Park Name
'park_loc': Park Location
'inning_number': Inning Number
'bat_home_id': Batter Id
'outs_ct': Out count
'pit_mlbid': Pitcher Id
'pit_first_name': Pitcher First Name
'pit_last_name': Pitcher Last Name
'pit_box_name': Pitcher Box name
'pit_hand_cd': Pitcher Throw Hand(R or L)
'bat_first_name': Batter First Name
'bat_last_name': Batter Last Name
'bat_box_name': Batter Box name
'ab_number': At Bat Sequence Number in Game
'start_bases': Bases(Before At Bat)
(___, 1__, 12_, 123, etc...)
'end_bases': Bases(After At Bat)
(___, 1__, 12_, 123, etc...)
'event_outs_ct': Event Out Count
'pa_ball_ct': Plate appearance Ball count
'pa_strike_ct': Plate appearance Strike count
'pitch_seq': Pitch Sequence(Strike or Ball) ex: B, SSB, BBSBS etc...
'pa_terminal_fl': Plate appearance Terminate Flag(T or F)
'pa_event_cd': Event Code for Retrosheet http://www.retrosheet.org/datause.txt
'pitch_res': Pitch Response(S or B or X) X = In Play
'pitch_des': Pitch Description
'pitch_id': Pitch Id
'x': Point for X(inches)
'y': Point for Y(inches)
'start_speed': The pitch speed(MPH) at the initial point
'end_speed': The pitch speed(MPH) at the current batters
'sz_top': The distance in feet from the ground to the top of the current batter’s
'sz_bot': The distance in feet from the ground to the bottom of the current batter’s
'pfx_x': The horizontal movement, in inches, of the pitch between the release point and home plate
'pfx_z': The vertical movement, in inches, of the pitch between the release point and home plate
'px': The left/right distance, in feet, of the pitch from the middle of the plate as it crossed home plate
'pz': The height of the pitch in feet as it crossed the front of home plate
'x0': The left/right distance, in feet, of the pitch, measured at the initial point
'y0': The distance in feet from home plate where the PITCHf/x system is set to measure the initial parameters
'z0': The height, in feet, of the pitch, measured at the initial point
'vx0': The velocity of the pitch, in feet per second, in three dimensions, measured at the initial point
'vy0': The velocity of the pitch, in feet per second, in three dimensions, measured at the initial point
'vz0': The velocity of the pitch, in feet per second, in three dimensions, measured at the initial point
'ax': The acceleration of the pitch, in feet per second per second, in three dimensions, measured at the initial point
'ay': The acceleration of the pitch, in feet per second per second, in three dimensions, measured at the initial point
'az': The acceleration of the pitch, in feet per second per second, in three dimensions, measured at the initial point
'break_y': The distance in feet from the ground to the top of the current batter’s
'break_angle': The angle, in degrees, from vertical to the straight line path from the release point to where the pitch crossed the front of home plate, as seen from the catcher’s/umpire’s perspective
'break_length': The measurement of the greatest distance, in inches, between the trajectory of the pitch at any point between the release point and the front of home plate
'pitch_type': Pitch Type
'pitch_type_seq': Pitch type Sequence, ex:FF|CU|FF
'type_confidence': Pitch type confidence
'zone': Pitch Zone
'spin_dir': Pitch Spin Dir
'spin_rate': Pitch Spin Rate
'sv_id': Pitch in the air(From Datetime_To Datetime)
'event_num': Event Sequence Number(atbat, pitch, action)
}
"""
pitch_res = MlbamUtil.get_attribute_stats(pitch, 'type', str, MlbamConst.UNKNOWN_FULL)
pitch_seq = [pitch['pitch_res'] for pitch in pitch_list]
pitch_seq.extend([pitch_res])
pitch_type = MlbamUtil.get_attribute_stats(pitch, 'pitch_type', str, MlbamConst.UNKNOWN_SHORT)
pitch_type_seq = [pitch['pitch_type'] for pitch in pitch_list]
pitch_type_seq.extend([pitch_type])
pitching = OrderedDict()
pitching['retro_game_id'] = pa['retro_game_id']
pitching['year'] = pa['year']
pitching['month'] = pa['month']
pitching['day'] = pa['day']
pitching['st_fl'] = pa['st_fl']
pitching['regseason_fl'] = pa['regseason_fl']
pitching['playoff_fl'] = pa['playoff_fl']
pitching['game_type'] = pa['game_type']
pitching['game_type_des'] = pa['game_type_des']
pitching['local_game_time'] = pa['local_game_time']
pitching['game_id'] = pa['game_id']
pitching['home_team_id'] = pa['home_team_id']
pitching['away_team_id'] = pa['away_team_id']
pitching['home_team_lg'] = pa['home_team_lg']
pitching['away_team_lg'] = pa['away_team_lg']
pitching['interleague_fl'] = pa['interleague_fl']
pitching['park_id'] = pa['park_id']
pitching['park_name'] = pa['park_name']
pitching['park_location'] = pa['park_location']
pitching['inning_number'] = pa['inning_number']
pitching['bat_home_id'] = pa['bat_home_id']
pitching['outs_ct'] = pa['outs_ct']
pitching['pit_mlbid'] = pa['pit_mlbid']
pitching['pit_first_name'] = pa['pit_first_name']
pitching['pit_last_name'] = pa['pit_last_name']
pitching['pit_box_name'] = pa['pit_box_name']
pitching['pit_hand_cd'] = pa['pit_hand_cd']
pitching['bat_mlbid'] = pa['bat_mlbid']
pitching['bat_first_name'] = pa['bat_first_name']
pitching['bat_last_name'] = pa['bat_last_name']
pitching['bat_box_name'] = pa['bat_box_name']
pitching['bat_hand_cd'] = pa['bat_hand_cd']
pitching['ab_number'] = pa['ab_number']
pitching['start_bases'] = pa['start_bases']
pitching['end_bases'] = pa['end_bases']
pitching['event_outs_ct'] = pa['event_outs_ct']
pitching['pa_ball_ct'] = ball_tally
pitching['pa_strike_ct'] = strike_tally
pitching['pitch_seq'] = ''.join(pitch_seq)
pitching['pa_terminal_fl'] = cls.is_pa_terminal(ball_tally, strike_tally, pitch_res, pa['event_cd'])
pitching['pa_event_cd'] = pa['event_cd']
pitching['pitch_res'] = pitch_res
pitching['pitch_des'] = MlbamUtil.get_attribute_stats(pitch, 'des', str, MlbamConst.UNKNOWN_FULL)
pitching['pitch_id'] = MlbamUtil.get_attribute_stats(pitch, 'id', int, None)
pitching['x'] = MlbamUtil.get_attribute_stats(pitch, 'x', float, None)
pitching['y'] = MlbamUtil.get_attribute_stats(pitch, 'y', float, None)
pitching['start_speed'] = MlbamUtil.get_attribute_stats(pitch, 'start_speed', float, None)
pitching['end_speed'] = MlbamUtil.get_attribute_stats(pitch, 'end_speed', float, None)
pitching['sz_top'] = MlbamUtil.get_attribute_stats(pitch, 'sz_top', float, None)
pitching['sz_bot'] = MlbamUtil.get_attribute_stats(pitch, 'sz_bot', float, None)
pitching['pfx_x'] = MlbamUtil.get_attribute_stats(pitch, 'pfx_x', float, None)
pitching['pfx_z'] = MlbamUtil.get_attribute_stats(pitch, 'pfx_z', float, None)
pitching['px'] = MlbamUtil.get_attribute_stats(pitch, 'px', float, None)
pitching['pz'] = MlbamUtil.get_attribute_stats(pitch, 'pz', float, None)
pitching['x0'] = MlbamUtil.get_attribute_stats(pitch, 'x0', float, None)
pitching['y0'] = MlbamUtil.get_attribute_stats(pitch, 'y0', float, None)
pitching['z0'] = MlbamUtil.get_attribute_stats(pitch, 'z0', float, None)
pitching['vx0'] = MlbamUtil.get_attribute_stats(pitch, 'vx0', float, None)
pitching['vy0'] = MlbamUtil.get_attribute_stats(pitch, 'vy0', float, None)
pitching['vz0'] = MlbamUtil.get_attribute_stats(pitch, 'vz0', float, None)
pitching['ax'] = MlbamUtil.get_attribute_stats(pitch, 'ax', float, None)
pitching['ay'] = MlbamUtil.get_attribute_stats(pitch, 'ay', float, None)
pitching['az'] = MlbamUtil.get_attribute_stats(pitch, 'az', float, None)
pitching['break_y'] = MlbamUtil.get_attribute_stats(pitch, 'break_y', float, None)
pitching['break_angle'] = MlbamUtil.get_attribute_stats(pitch, 'break_angle', float, None)
pitching['break_length'] = MlbamUtil.get_attribute_stats(pitch, 'break_length', float, None)
pitching['pitch_type'] = pitch_type
pitching['pitch_type_seq'] = '|'.join(pitch_type_seq)
pitching['type_confidence'] = MlbamUtil.get_attribute_stats(pitch, 'type_confidence', float, None)
pitching['zone'] = MlbamUtil.get_attribute_stats(pitch, 'zone', float, None)
pitching['spin_dir'] = MlbamUtil.get_attribute_stats(pitch, 'spin_dir', float, None)
pitching['spin_rate'] = MlbamUtil.get_attribute_stats(pitch, 'spin_rate', float, None)
pitching['sv_id'] = MlbamUtil.get_attribute_stats(pitch, 'sv_id', str, None)
pitching['event_num'] = MlbamUtil.get_attribute_stats(pitch, 'event_num', int, -1)
return pitching | [
"def",
"row",
"(",
"cls",
",",
"pitch",
",",
"pa",
",",
"pitch_list",
",",
"ball_tally",
",",
"strike_tally",
")",
":",
"pitch_res",
"=",
"MlbamUtil",
".",
"get_attribute_stats",
"(",
"pitch",
",",
"'type'",
",",
"str",
",",
"MlbamConst",
".",
"UNKNOWN_FUL... | 63.176471 | 25.8 |
def search_domain(self, searchterm):
"""Search for domains
:type searchterm: str
:rtype: list
"""
return self.__search(type_attribute=self.__mispdomaintypes(), value=searchterm) | [
"def",
"search_domain",
"(",
"self",
",",
"searchterm",
")",
":",
"return",
"self",
".",
"__search",
"(",
"type_attribute",
"=",
"self",
".",
"__mispdomaintypes",
"(",
")",
",",
"value",
"=",
"searchterm",
")"
] | 31.428571 | 16.285714 |
def example_reading_spec(self):
"""Data fields to store on disk and their decoders."""
# Subclasses can override and/or extend.
processed_reward_type = tf.float32
if self.is_processed_rewards_discrete:
processed_reward_type = tf.int64
data_fields = {
TIMESTEP_FIELD: tf.FixedLenFeature((1,), tf.int64),
RAW_REWARD_FIELD: tf.FixedLenFeature((1,), tf.float32),
PROCESSED_REWARD_FIELD: tf.FixedLenFeature((1,), processed_reward_type),
DONE_FIELD: tf.FixedLenFeature((1,), tf.int64), # we wrote this as int.
# Special treatment because we need to determine type and shape, also
# enables classes to override.
OBSERVATION_FIELD: self.observation_spec,
ACTION_FIELD: self.action_spec,
}
data_items_to_decoders = {
field: tf.contrib.slim.tfexample_decoder.Tensor(field)
for field in data_fields
}
return data_fields, data_items_to_decoders | [
"def",
"example_reading_spec",
"(",
"self",
")",
":",
"# Subclasses can override and/or extend.",
"processed_reward_type",
"=",
"tf",
".",
"float32",
"if",
"self",
".",
"is_processed_rewards_discrete",
":",
"processed_reward_type",
"=",
"tf",
".",
"int64",
"data_fields",
... | 34.62963 | 21 |
def literalize(self):
"""
Return an expression where NOTs are only occurring as literals.
Applied recursively to subexpressions.
"""
if self.isliteral:
return self
args = tuple(arg.literalize() for arg in self.args)
if all(arg is self.args[i] for i, arg in enumerate(args)):
return self
return self.__class__(*args) | [
"def",
"literalize",
"(",
"self",
")",
":",
"if",
"self",
".",
"isliteral",
":",
"return",
"self",
"args",
"=",
"tuple",
"(",
"arg",
".",
"literalize",
"(",
")",
"for",
"arg",
"in",
"self",
".",
"args",
")",
"if",
"all",
"(",
"arg",
"is",
"self",
... | 32.75 | 16.083333 |
def denorm(self,arr):
"""Reverse the normalization done to a batch of images.
Arguments:
arr: of shape/size (N,3,sz,sz)
"""
if type(arr) is not np.ndarray: arr = to_np(arr)
if len(arr.shape)==3: arr = arr[None]
return self.transform.denorm(np.rollaxis(arr,1,4)) | [
"def",
"denorm",
"(",
"self",
",",
"arr",
")",
":",
"if",
"type",
"(",
"arr",
")",
"is",
"not",
"np",
".",
"ndarray",
":",
"arr",
"=",
"to_np",
"(",
"arr",
")",
"if",
"len",
"(",
"arr",
".",
"shape",
")",
"==",
"3",
":",
"arr",
"=",
"arr",
... | 34.888889 | 13.555556 |
def to_dict(self):
"""
Create a JSON-serializable representation of the ISA.
The dictionary representation is of the form::
{
"1Q": {
"0": {
"type": "Xhalves"
},
"1": {
"type": "Xhalves",
"dead": True
},
...
},
"2Q": {
"1-4": {
"type": "CZ"
},
"1-5": {
"type": "CZ"
},
...
},
...
}
:return: A dictionary representation of self.
:rtype: Dict[str, Any]
"""
def _maybe_configure(o, t):
# type: (Union[Qubit,Edge], str) -> dict
"""
Exclude default values from generated dictionary.
:param Union[Qubit,Edge] o: The object to serialize
:param str t: The default value for ``o.type``.
:return: d
"""
d = {}
if o.type != t:
d["type"] = o.type
if o.dead:
d["dead"] = o.dead
return d
return {
"1Q": {"{}".format(q.id): _maybe_configure(q, DEFAULT_QUBIT_TYPE) for q in self.qubits},
"2Q": {"{}-{}".format(*edge.targets): _maybe_configure(edge, DEFAULT_EDGE_TYPE)
for edge in self.edges}
} | [
"def",
"to_dict",
"(",
"self",
")",
":",
"def",
"_maybe_configure",
"(",
"o",
",",
"t",
")",
":",
"# type: (Union[Qubit,Edge], str) -> dict",
"\"\"\"\n Exclude default values from generated dictionary.\n\n :param Union[Qubit,Edge] o: The object to serialize\n ... | 28.388889 | 18.462963 |
def check_url_accessibility(url, timeout=10):
'''
Check whether the URL accessible and returns HTTP 200 OK or not
if not raises ValidationError
'''
if(url=='localhost'):
url = 'http://127.0.0.1'
try:
req = urllib2.urlopen(url, timeout=timeout)
if (req.getcode()==200):
return True
except Exception:
pass
fail("URL '%s' is not accessible from this machine" % url) | [
"def",
"check_url_accessibility",
"(",
"url",
",",
"timeout",
"=",
"10",
")",
":",
"if",
"(",
"url",
"==",
"'localhost'",
")",
":",
"url",
"=",
"'http://127.0.0.1'",
"try",
":",
"req",
"=",
"urllib2",
".",
"urlopen",
"(",
"url",
",",
"timeout",
"=",
"t... | 30.857143 | 19.428571 |
def windowed_sum_slow(arrays, span, t=None, indices=None, tpowers=0,
period=None, subtract_mid=False):
"""Compute the windowed sum of the given arrays.
This is a slow function, used primarily for testing and validation
of the faster version of ``windowed_sum()``
Parameters
----------
arrays : tuple of arrays
arrays to window
span : int or array of ints
The span to use for the sum at each point. If array is provided,
it must be broadcastable with ``indices``
indices : array
the indices of the center of the desired windows. If ``None``,
the indices are assumed to be ``range(len(arrays[0]))`` though
these are not actually instantiated.
t : array (optional)
Times associated with the arrays
tpowers : list (optional)
Powers of t for each array sum
period : float (optional)
Period to use, if times are periodic. If supplied, input times
must be arranged such that (t % period) is sorted!
subtract_mid : boolean
If true, then subtract the middle value from each sum
Returns
-------
arrays : tuple of ndarrays
arrays containing the windowed sum of each input array
"""
span = np.asarray(span, dtype=int)
if not np.all(span > 0):
raise ValueError("span values must be positive")
arrays = tuple(map(np.asarray, arrays))
N = arrays[0].size
if not all(a.shape == (N,) for a in arrays):
raise ValueError("sizes of provided arrays must match")
t_input = t
if t is not None:
t = np.asarray(t)
if t.shape != (N,):
raise ValueError("shape of t must match shape of arrays")
else:
t = np.ones(N)
tpowers = tpowers + np.zeros(len(arrays))
if len(tpowers) != len(arrays):
raise ValueError("tpowers must be broadcastable with number of arrays")
if period:
if t_input is None:
raise ValueError("periodic requires t to be provided")
t = t % period
if indices is None:
indices = np.arange(N)
spans, indices = np.broadcast_arrays(span, indices)
results = []
for tpower, array in zip(tpowers, arrays):
if period:
result = [sum(array[j % N]
* (t[j % N] + (j // N) * period) ** tpower
for j in range(i - s // 2,
i - s // 2 + s)
if not (subtract_mid and j == i))
for i, s in np.broadcast(indices, spans)]
else:
result = [sum(array[j] * t[j] ** tpower
for j in range(max(0, i - s // 2),
min(N, i - s // 2 + s))
if not (subtract_mid and j == i))
for i, s in np.broadcast(indices, spans)]
results.append(np.asarray(result))
return tuple(results) | [
"def",
"windowed_sum_slow",
"(",
"arrays",
",",
"span",
",",
"t",
"=",
"None",
",",
"indices",
"=",
"None",
",",
"tpowers",
"=",
"0",
",",
"period",
"=",
"None",
",",
"subtract_mid",
"=",
"False",
")",
":",
"span",
"=",
"np",
".",
"asarray",
"(",
"... | 36.123457 | 19.493827 |
def truncate_selection(self, position_from):
"""Unselect read-only parts in shell, like prompt"""
position_from = self.get_position(position_from)
cursor = self.textCursor()
start, end = cursor.selectionStart(), cursor.selectionEnd()
if start < end:
start = max([position_from, start])
else:
end = max([position_from, end])
self.set_selection(start, end) | [
"def",
"truncate_selection",
"(",
"self",
",",
"position_from",
")",
":",
"position_from",
"=",
"self",
".",
"get_position",
"(",
"position_from",
")",
"cursor",
"=",
"self",
".",
"textCursor",
"(",
")",
"start",
",",
"end",
"=",
"cursor",
".",
"selectionSta... | 43.4 | 11.1 |
def measured_current(self):
"""
The measured current that the battery is supplying (in microamps)
"""
self._measured_current, value = self.get_attr_int(self._measured_current, 'current_now')
return value | [
"def",
"measured_current",
"(",
"self",
")",
":",
"self",
".",
"_measured_current",
",",
"value",
"=",
"self",
".",
"get_attr_int",
"(",
"self",
".",
"_measured_current",
",",
"'current_now'",
")",
"return",
"value"
] | 39.666667 | 20.333333 |
def doorient(self):
"""
NOTE: we need to retrieve values in case no modifications are done.
(since we'd get a closed h5py handle)
"""
assert self.cal1Dfn.is_file(
), 'please specify filename for each camera under [cam]/cal1Dname: in .ini file {}'.format(self.cal1Dfn)
with h5py.File(self.cal1Dfn, 'r') as f:
az = f['az'][()]
el = f['el'][()]
ra = f['ra'][()]
dec = f['dec'][()]
assert az.ndim == el.ndim == 2
assert az.shape == el.shape
if self.transpose:
logging.debug(
'tranposing cam #{} az/el/ra/dec data. '.format(self.name))
az = az.T
el = el.T
ra = ra.T
dec = dec.T
if self.fliplr:
logging.debug(
'flipping horizontally cam #{} az/el/ra/dec data.'.format(self.name))
az = np.fliplr(az)
el = np.fliplr(el)
ra = np.fliplr(ra)
dec = np.fliplr(dec)
if self.flipud:
logging.debug(
'flipping vertically cam #{} az/el/ra/dec data.'.format(self.name))
az = np.flipud(az)
el = np.flipud(el)
ra = np.flipud(ra)
dec = np.flipud(dec)
if self.rotccw != 0:
logging.debug(
'rotating cam #{} az/el/ra/dec data.'.format(self.name))
az = np.rot90(az, self.rotccw)
el = np.rot90(el, self.rotccw)
ra = np.rot90(ra, self.rotccw)
dec = np.rot90(dec, self.rotccw)
self.az = az
self.el = el
self.ra = ra
self.dec = dec | [
"def",
"doorient",
"(",
"self",
")",
":",
"assert",
"self",
".",
"cal1Dfn",
".",
"is_file",
"(",
")",
",",
"'please specify filename for each camera under [cam]/cal1Dname: in .ini file {}'",
".",
"format",
"(",
"self",
".",
"cal1Dfn",
")",
"with",
"h5py",
".",
"F... | 32.9 | 17.34 |
def rel_path(self, other):
"""Return a path to "other" relative to this directory.
"""
# This complicated and expensive method, which constructs relative
# paths between arbitrary Node.FS objects, is no longer used
# by SCons itself. It was introduced to store dependency paths
# in .sconsign files relative to the target, but that ended up
# being significantly inefficient.
#
# We're continuing to support the method because some SConstruct
# files out there started using it when it was available, and
# we're all about backwards compatibility..
try:
memo_dict = self._memo['rel_path']
except KeyError:
memo_dict = {}
self._memo['rel_path'] = memo_dict
else:
try:
return memo_dict[other]
except KeyError:
pass
if self is other:
result = '.'
elif not other in self._path_elements:
try:
other_dir = other.get_dir()
except AttributeError:
result = str(other)
else:
if other_dir is None:
result = other.name
else:
dir_rel_path = self.rel_path(other_dir)
if dir_rel_path == '.':
result = other.name
else:
result = dir_rel_path + OS_SEP + other.name
else:
i = self._path_elements.index(other) + 1
path_elems = ['..'] * (len(self._path_elements) - i) \
+ [n.name for n in other._path_elements[i:]]
result = OS_SEP.join(path_elems)
memo_dict[other] = result
return result | [
"def",
"rel_path",
"(",
"self",
",",
"other",
")",
":",
"# This complicated and expensive method, which constructs relative",
"# paths between arbitrary Node.FS objects, is no longer used",
"# by SCons itself. It was introduced to store dependency paths",
"# in .sconsign files relative to the... | 33.358491 | 19.396226 |
def update_all(self, rs=None, since=None):
"Sync all objects for the relations rs (if None, sync all resources)"
self._log.info("Updating resources: %s", ' '.join(r.tag for r in rs))
if rs is None:
rs = resource.all_resources()
ctx = self._ContextClass(self)
for r in rs:
self._atomic_update(lambda: ctx.sync_resource(r, since=since)) | [
"def",
"update_all",
"(",
"self",
",",
"rs",
"=",
"None",
",",
"since",
"=",
"None",
")",
":",
"self",
".",
"_log",
".",
"info",
"(",
"\"Updating resources: %s\"",
",",
"' '",
".",
"join",
"(",
"r",
".",
"tag",
"for",
"r",
"in",
"rs",
")",
")",
"... | 43.444444 | 21.222222 |
def validate_leafref_path(ctx, stmt, path_spec, path,
accept_non_leaf_target=False,
accept_non_config_target=False):
"""Return the leaf that the path points to and the expanded path arg,
or None on error."""
pathpos = path.pos
# Unprefixed paths in typedefs in YANG 1 were underspecified. In
# YANG 1.1 the semantics are defined. The code below is compatible
# with old pyang for YANG 1 modules.
# If an un-prefixed identifier is found, it defaults to the
# module where the path is defined, except if found within
# a grouping, in which case it defaults to the module where the
# grouping is used.
if (path.parent.parent is not None and
path.parent.parent.keyword == 'typedef'):
if path.i_module.i_version == '1':
local_module = path.i_module
else:
local_module = stmt.i_module
elif stmt.keyword == 'module':
local_module = stmt
else:
local_module = stmt.i_module
if stmt.keyword == 'typedef':
in_typedef = True
else:
in_typedef = False
def find_identifier(identifier):
if util.is_prefixed(identifier):
(prefix, name) = identifier
pmodule = prefix_to_module(path.i_module, prefix, stmt.pos,
ctx.errors)
if pmodule is None:
raise NotFound
return (pmodule, name)
elif in_typedef and stmt.i_module.i_version != '1':
raise Abort
else: # local identifier
return (local_module, identifier)
def is_identifier(x):
if util.is_local(x):
return True
if type(x) == type(()) and len(x) == 2:
return True
return False
def is_predicate(x):
if type(x) == type(()) and len(x) == 4 and x[0] == 'predicate':
return True
return False
def follow_path(ptr, up, dn):
path_list = []
last_skipped = None
if up == -1: # absolute path
(pmodule, name) = find_identifier(dn[0])
ptr = search_child(pmodule.i_children, pmodule.i_modulename, name)
if not is_submodule_included(path, ptr):
ptr = None
if ptr is None:
# check all our submodules
for inc in path.i_orig_module.search('include'):
submod = ctx.get_module(inc.arg)
if submod is not None:
ptr = search_child(submod.i_children,
submod.arg, name)
if ptr is not None:
break
if ptr is None:
err_add(ctx.errors, pathpos, 'LEAFREF_IDENTIFIER_NOT_FOUND',
(pmodule.arg, name, stmt.arg, stmt.pos))
raise NotFound
path_list.append(('dn', ptr))
dn = dn[1:]
else:
while up > 0:
if ptr is None:
err_add(ctx.errors, pathpos, 'LEAFREF_TOO_MANY_UP',
(stmt.arg, stmt.pos))
raise NotFound
if ptr.keyword in ('augment', 'grouping'):
# don't check the path here - check in the expanded tree
raise Abort
ptr = ptr.parent
if ptr is None:
err_add(ctx.errors, pathpos, 'LEAFREF_TOO_MANY_UP',
(stmt.arg, stmt.pos))
raise NotFound
while ptr.keyword in ['case', 'choice', 'input', 'output']:
if ptr.keyword in ['input', 'output']:
last_skipped = ptr.keyword
ptr = ptr.parent
if ptr is None:
err_add(ctx.errors, pathpos, 'LEAFREF_TOO_MANY_UP',
(stmt.arg, stmt.pos))
raise NotFound
# continue after the case, maybe also skip the choice
if ptr is None:
err_add(ctx.errors, pathpos, 'LEAFREF_TOO_MANY_UP',
(stmt.arg, stmt.pos))
raise NotFound
path_list.append(('up', ptr))
up = up - 1
if ptr is None: # or ptr.keyword == 'grouping':
err_add(ctx.errors, pathpos, 'LEAFREF_TOO_MANY_UP',
(stmt.arg, stmt.pos))
raise NotFound
if ptr.keyword in ('augment', 'grouping'):
# don't check the path here - check in the expanded tree
raise Abort
i = 0
key_list = None
keys = []
while i < len(dn):
if is_identifier(dn[i]) == True:
(pmodule, name) = find_identifier(dn[i])
module_name = pmodule.i_modulename
elif ptr.keyword == 'list': # predicate on a list, good
key_list = ptr
keys = []
# check each predicate
while i < len(dn) and is_predicate(dn[i]) == True:
# unpack the predicate
(_tag, keyleaf, pup, pdn) = dn[i]
(pmodule, pname) = find_identifier(keyleaf)
# make sure the keyleaf is really a key in the list
pleaf = search_child(ptr.i_key, pmodule.i_modulename, pname)
if pleaf is None:
err_add(ctx.errors, pathpos, 'LEAFREF_NO_KEY',
(pmodule.arg, pname, stmt.arg, stmt.pos))
raise NotFound
# make sure it's not already referenced
if keyleaf in keys:
err_add(ctx.errors, pathpos, 'LEAFREF_MULTIPLE_KEYS',
(pmodule.arg, pname, stmt.arg, stmt.pos))
raise NotFound
keys.append((pmodule.arg, pname))
if pup == 0:
i = i + 1
break
# check what this predicate refers to; make sure it's
# another leaf; either of type leafref to keyleaf, OR same
# type as the keyleaf
(xkey_list, x_key, xleaf, _x) = follow_path(stmt, pup, pdn)
stmt.i_derefed_leaf = xleaf
if xleaf.keyword != 'leaf':
err_add(ctx.errors, pathpos,
'LEAFREF_BAD_PREDICATE_PTR',
(pmodule.arg, pname, xleaf.arg, xleaf.pos))
raise NotFound
i = i + 1
continue
else:
err_add(ctx.errors, pathpos, 'LEAFREF_BAD_PREDICATE',
(ptr.i_module.arg, ptr.arg, stmt.arg, stmt.pos))
raise NotFound
if ptr.keyword in _keyword_with_children:
ptr = search_data_node(ptr.i_children, module_name, name,
last_skipped)
if not is_submodule_included(path, ptr):
ptr = None
if ptr is None:
err_add(ctx.errors, pathpos, 'LEAFREF_IDENTIFIER_NOT_FOUND',
(module_name, name, stmt.arg, stmt.pos))
raise NotFound
else:
err_add(ctx.errors, pathpos, 'LEAFREF_IDENTIFIER_BAD_NODE',
(module_name, name, stmt.arg, stmt.pos,
util.keyword_to_str(ptr.keyword)))
raise NotFound
path_list.append(('dn', ptr))
i = i + 1
return (key_list, keys, ptr, path_list)
try:
if path_spec is None: # e.g. invalid path
return None
(up, dn, derefup, derefdn) = path_spec
if derefup > 0:
# first follow the deref
(key_list, keys, ptr, _x) = follow_path(stmt, derefup, derefdn)
if ptr.keyword != 'leaf':
err_add(ctx.errors, pathpos, 'LEAFREF_DEREF_NOT_LEAFREF',
(ptr.arg, ptr.pos))
return None
if ptr.i_leafref is None:
err_add(ctx.errors, pathpos, 'LEAFREF_DEREF_NOT_LEAFREF',
(ptr.arg, ptr.pos))
return None
stmt.i_derefed_leaf = ptr
# make sure the referenced leaf is expanded
if ptr.i_leafref_expanded is False:
v_reference_leaf_leafref(ctx, ptr)
if ptr.i_leafref_ptr is None:
return None
(derefed_stmt, _pos) = ptr.i_leafref_ptr
if derefed_stmt is None:
# FIXME: what is this??
return None
if not hasattr(derefed_stmt, 'i_is_key'):
# it follows from the YANG spec which says that predicates
# are only used for constraining keys that the derefed stmt
# must be a key
err_add(ctx.errors, pathpos, 'LEAFREF_DEREF_NOT_KEY',
(ptr.arg, ptr.pos,
derefed_stmt.arg, derefed_stmt.pos))
return None
# split ptr's leafref path into two parts:
# '/a/b/c' --> '/a/b', 'c'
m = re_path.match(ptr.i_leafref.i_expanded_path)
s1 = m.group(1)
s2 = m.group(2)
# split the deref path into two parts:
# 'deref(../a)/b' --> '../a', 'b'
m = re_deref.match(path.arg)
d1 = m.group(1)
d2 = m.group(2)
expanded_path = "%s[%s = current()/%s]/%s" % \
(s1, s2, d1, d2)
(key_list, keys, ptr, path_list) = follow_path(derefed_stmt, up, dn)
else:
(key_list, keys, ptr, path_list) = follow_path(stmt, up, dn)
expanded_path = path.arg
# ptr is now the node that the leafref path points to
# check that it is a leaf
if (ptr.keyword not in ('leaf', 'leaf-list') and
not accept_non_leaf_target):
err_add(ctx.errors, pathpos, 'LEAFREF_NOT_LEAF',
(stmt.arg, stmt.pos))
return None
if (key_list == ptr.parent and
(ptr.i_module.i_modulename, ptr.arg) in keys):
err_add(ctx.errors, pathpos, 'LEAFREF_MULTIPLE_KEYS',
(ptr.i_module.i_modulename, ptr.arg, stmt.arg, stmt.pos))
if ((hasattr(stmt, 'i_config') and stmt.i_config == True) and
hasattr(ptr, 'i_config') and ptr.i_config == False
and not accept_non_config_target):
err_add(ctx.errors, pathpos, 'LEAFREF_BAD_CONFIG',
(stmt.arg, ptr.arg, ptr.pos))
if ptr == stmt:
err_add(ctx.errors, pathpos, 'CIRCULAR_DEPENDENCY',
('leafref', path.arg))
return None
return ptr, expanded_path, path_list
except NotFound:
return None
except Abort:
return None | [
"def",
"validate_leafref_path",
"(",
"ctx",
",",
"stmt",
",",
"path_spec",
",",
"path",
",",
"accept_non_leaf_target",
"=",
"False",
",",
"accept_non_config_target",
"=",
"False",
")",
":",
"pathpos",
"=",
"path",
".",
"pos",
"# Unprefixed paths in typedefs in YANG ... | 43.312253 | 16.146245 |
def add_tagged_report_number(reading_line,
len_reportnum,
reportnum,
startpos,
true_replacement_index,
extras):
"""In rebuilding the line, add an identified institutional REPORT-NUMBER
(standardised and tagged) into the line.
@param reading_line: (string) The reference line before capitalization
was performed, and before REPORT-NUMBERs and TITLEs were stipped out.
@param len_reportnum: (integer) the length of the matched REPORT-NUMBER.
@param reportnum: (string) the replacement text for the matched
REPORT-NUMBER.
@param startpos: (integer) the pointer to the next position in the
reading-line from which to start rebuilding.
@param true_replacement_index: (integer) the replacement index of the
matched REPORT-NUMBER in the reading-line, with stripped punctuation
and whitespace accounted for.
@param extras: (integer) extras to be added into the replacement index.
@return: (tuple) containing a string (the rebuilt line segment) and an
integer (the next 'startpos' in the reading-line).
"""
rebuilt_line = u"" # The segment of the line that's being rebuilt to
# include the tagged & standardised REPORT-NUMBER
# Fill rebuilt_line with the contents of the reading_line up to the point
# of the institutional REPORT-NUMBER. However, stop 1 character before the
# replacement index of this REPORT-NUMBER to allow for removal of braces,
# if necessary:
if (true_replacement_index - startpos - 1) >= 0:
rebuilt_line += reading_line[startpos:true_replacement_index - 1]
else:
rebuilt_line += reading_line[startpos:true_replacement_index]
# Add the tagged REPORT-NUMBER into the rebuilt-line segment:
rebuilt_line += u"<cds.REPORTNUMBER>%(reportnum)s</cds.REPORTNUMBER>" \
% {'reportnum': reportnum}
# Move the pointer in the reading-line past the current match:
startpos = true_replacement_index + len_reportnum + extras
# Move past closing brace for report number (if there was one):
try:
if reading_line[startpos] in (u"]", u")"):
startpos += 1
except IndexError:
# moved past end of line - ignore
pass
# return the rebuilt-line segment and the pointer to the next position in
# the reading-line from which to start rebuilding up to the next match:
return rebuilt_line, startpos | [
"def",
"add_tagged_report_number",
"(",
"reading_line",
",",
"len_reportnum",
",",
"reportnum",
",",
"startpos",
",",
"true_replacement_index",
",",
"extras",
")",
":",
"rebuilt_line",
"=",
"u\"\"",
"# The segment of the line that's being rebuilt to",
"# include the tagged & ... | 48.807692 | 23.326923 |
def compute_venn3_colors(set_colors):
'''
Given three base colors, computes combinations of colors corresponding to all regions of the venn diagram.
returns a list of 7 elements, providing colors for regions (100, 010, 110, 001, 101, 011, 111).
>>> compute_venn3_colors(['r', 'g', 'b'])
(array([ 1., 0., 0.]),..., array([ 0.4, 0.2, 0.4]))
'''
ccv = ColorConverter()
base_colors = [np.array(ccv.to_rgb(c)) for c in set_colors]
return (base_colors[0], base_colors[1], mix_colors(base_colors[0], base_colors[1]), base_colors[2],
mix_colors(base_colors[0], base_colors[2]), mix_colors(base_colors[1], base_colors[2]), mix_colors(base_colors[0], base_colors[1], base_colors[2])) | [
"def",
"compute_venn3_colors",
"(",
"set_colors",
")",
":",
"ccv",
"=",
"ColorConverter",
"(",
")",
"base_colors",
"=",
"[",
"np",
".",
"array",
"(",
"ccv",
".",
"to_rgb",
"(",
"c",
")",
")",
"for",
"c",
"in",
"set_colors",
"]",
"return",
"(",
"base_co... | 59.583333 | 40.083333 |
async def get_departures(
self,
station_id: str,
direction_id: Optional[str] = None,
max_journeys: int = 20,
products: Optional[List[str]] = None,
) -> Dict[str, Any]:
"""Fetch data from rmv.de."""
self.station_id: str = station_id
self.direction_id: str = direction_id
self.max_journeys: int = max_journeys
self.products_filter: str = _product_filter(products or ALL_PRODUCTS)
base_url: str = _base_url()
params: Dict[str, Union[str, int]] = {
"selectDate": "today",
"time": "now",
"input": self.station_id,
"maxJourneys": self.max_journeys,
"boardType": "dep",
"productsFilter": self.products_filter,
"disableEquivs": "discard_nearby",
"output": "xml",
"start": "yes",
}
if self.direction_id:
params["dirInput"] = self.direction_id
url = base_url + urllib.parse.urlencode(params)
try:
with async_timeout.timeout(self._timeout):
async with self._session.get(url) as response:
_LOGGER.debug(f"Response from RMV API: {response.status}")
xml = await response.read()
_LOGGER.debug(xml)
except (asyncio.TimeoutError, aiohttp.ClientError):
_LOGGER.error("Can not load data from RMV API")
raise RMVtransportApiConnectionError()
# pylint: disable=I1101
try:
self.obj = objectify.fromstring(xml)
except (TypeError, etree.XMLSyntaxError):
_LOGGER.debug(f"Get from string: {xml[:100]}")
print(f"Get from string: {xml}")
raise RMVtransportError()
try:
self.now = self.current_time()
self.station = self._station()
except (TypeError, AttributeError):
_LOGGER.debug(
f"Time/Station TypeError or AttributeError {objectify.dump(self.obj)}"
)
raise RMVtransportError()
self.journeys.clear()
try:
for journey in self.obj.SBRes.JourneyList.Journey:
self.journeys.append(RMVJourney(journey, self.now))
except AttributeError:
_LOGGER.debug(f"Extract journeys: {objectify.dump(self.obj.SBRes)}")
raise RMVtransportError()
return self.data() | [
"async",
"def",
"get_departures",
"(",
"self",
",",
"station_id",
":",
"str",
",",
"direction_id",
":",
"Optional",
"[",
"str",
"]",
"=",
"None",
",",
"max_journeys",
":",
"int",
"=",
"20",
",",
"products",
":",
"Optional",
"[",
"List",
"[",
"str",
"]"... | 35.014706 | 17.264706 |
def ListChildren(self, urn, limit=None, age=NEWEST_TIME):
"""Lists bunch of directories efficiently.
Args:
urn: Urn to list children.
limit: Max number of children to list.
age: The age of the items to retrieve. Should be one of ALL_TIMES,
NEWEST_TIME or a range.
Returns:
RDFURNs instances of each child.
"""
_, children_urns = list(
self.MultiListChildren([urn], limit=limit, age=age))[0]
return children_urns | [
"def",
"ListChildren",
"(",
"self",
",",
"urn",
",",
"limit",
"=",
"None",
",",
"age",
"=",
"NEWEST_TIME",
")",
":",
"_",
",",
"children_urns",
"=",
"list",
"(",
"self",
".",
"MultiListChildren",
"(",
"[",
"urn",
"]",
",",
"limit",
"=",
"limit",
",",... | 30.866667 | 17.466667 |
def export_partlist_to_file(input, output, timeout=20, showgui=False):
'''
call eagle and export sch or brd to partlist text file
:param input: .sch or .brd file name
:param output: text file name
:param timeout: int
:param showgui: Bool, True -> do not hide eagle GUI
:rtype: None
'''
input = norm_path(input)
output = norm_path(output)
commands = export_command(output=output, output_type='partlist')
command_eagle(
input=input, timeout=timeout, commands=commands, showgui=showgui) | [
"def",
"export_partlist_to_file",
"(",
"input",
",",
"output",
",",
"timeout",
"=",
"20",
",",
"showgui",
"=",
"False",
")",
":",
"input",
"=",
"norm_path",
"(",
"input",
")",
"output",
"=",
"norm_path",
"(",
"output",
")",
"commands",
"=",
"export_command... | 32.875 | 22.625 |
def detect():
"""Does this compiler support OpenMP parallelization?"""
compiler = new_compiler()
hasopenmp = hasfunction(compiler, 'omp_get_num_threads()')
needs_gomp = hasopenmp
if not hasopenmp:
compiler.add_library('gomp')
hasopenmp = hasfunction(compiler, 'omp_get_num_threads()')
needs_gomp = hasopenmp
return hasopenmp | [
"def",
"detect",
"(",
")",
":",
"compiler",
"=",
"new_compiler",
"(",
")",
"hasopenmp",
"=",
"hasfunction",
"(",
"compiler",
",",
"'omp_get_num_threads()'",
")",
"needs_gomp",
"=",
"hasopenmp",
"if",
"not",
"hasopenmp",
":",
"compiler",
".",
"add_library",
"("... | 32.5 | 16.8 |
def to_params(self):
"""
Convert the instance dictionary into a sorted list of pairs
(name, valrepr) where valrepr is the string representation of
the underlying value.
"""
dic = self.__dict__
return [(k, repr(dic[k])) for k in sorted(dic)
if not k.startswith('_')] | [
"def",
"to_params",
"(",
"self",
")",
":",
"dic",
"=",
"self",
".",
"__dict__",
"return",
"[",
"(",
"k",
",",
"repr",
"(",
"dic",
"[",
"k",
"]",
")",
")",
"for",
"k",
"in",
"sorted",
"(",
"dic",
")",
"if",
"not",
"k",
".",
"startswith",
"(",
... | 36.555556 | 12.777778 |
def check_statement(self, stmt, max_paths=1, max_path_length=5):
"""Check a single Statement against the model.
Parameters
----------
stmt : indra.statements.Statement
The Statement to check.
max_paths : Optional[int]
The maximum number of specific paths to return for each Statement
to be explained. Default: 1
max_path_length : Optional[int]
The maximum length of specific paths to return. Default: 5
Returns
-------
boolean
True if the model satisfies the Statement.
"""
# Make sure the influence map is initialized
self.get_im()
# Check if this is one of the statement types that we can check
if not isinstance(stmt, (Modification, RegulateAmount,
RegulateActivity, Influence)):
return PathResult(False, 'STATEMENT_TYPE_NOT_HANDLED',
max_paths, max_path_length)
# Get the polarity for the statement
if isinstance(stmt, Modification):
target_polarity = -1 if isinstance(stmt, RemoveModification) else 1
elif isinstance(stmt, RegulateActivity):
target_polarity = 1 if stmt.is_activation else -1
elif isinstance(stmt, RegulateAmount):
target_polarity = -1 if isinstance(stmt, DecreaseAmount) else 1
elif isinstance(stmt, Influence):
target_polarity = -1 if stmt.overall_polarity() == -1 else 1
# Get the subject and object (works also for Modifications)
subj, obj = stmt.agent_list()
# Get a list of monomer patterns matching the subject FIXME Currently
# this will match rules with the corresponding monomer pattern on it.
# In future, this statement should (possibly) also match rules in which
# 1) the agent is in its active form, or 2) the agent is tagged as the
# enzyme in a rule of the appropriate activity (e.g., a phosphorylation
# rule) FIXME
if subj is not None:
subj_mps = list(pa.grounded_monomer_patterns(self.model, subj,
ignore_activities=True))
if not subj_mps:
logger.debug('No monomers found corresponding to agent %s' %
subj)
return PathResult(False, 'SUBJECT_MONOMERS_NOT_FOUND',
max_paths, max_path_length)
else:
subj_mps = [None]
# Observables may not be found for an activation since there may be no
# rule in the model activating the object, and the object may not have
# an "active" site of the appropriate type
obs_names = self.stmt_to_obs[stmt]
if not obs_names:
logger.debug("No observables for stmt %s, returning False" % stmt)
return PathResult(False, 'OBSERVABLES_NOT_FOUND',
max_paths, max_path_length)
for subj_mp, obs_name in itertools.product(subj_mps, obs_names):
# NOTE: Returns on the path found for the first enz_mp/obs combo
result = self._find_im_paths(subj_mp, obs_name, target_polarity,
max_paths, max_path_length)
# If a path was found, then we return it; otherwise, that means
# there was no path for this observable, so we have to try the next
# one
if result.path_found:
return result
# If we got here, then there was no path for any observable
return PathResult(False, 'NO_PATHS_FOUND',
max_paths, max_path_length) | [
"def",
"check_statement",
"(",
"self",
",",
"stmt",
",",
"max_paths",
"=",
"1",
",",
"max_path_length",
"=",
"5",
")",
":",
"# Make sure the influence map is initialized",
"self",
".",
"get_im",
"(",
")",
"# Check if this is one of the statement types that we can check",
... | 50.916667 | 21.625 |
def get_trace(self, trace_id, project_id=None):
"""
Gets a single trace by its ID.
Args:
trace_id (str): ID of the trace to return.
project_id (str): Required. ID of the Cloud project where the trace
data is stored.
Returns:
A Trace dict.
"""
if project_id is None:
project_id = self.project
return self.trace_api.get_trace(project_id=project_id, trace_id=trace_id) | [
"def",
"get_trace",
"(",
"self",
",",
"trace_id",
",",
"project_id",
"=",
"None",
")",
":",
"if",
"project_id",
"is",
"None",
":",
"project_id",
"=",
"self",
".",
"project",
"return",
"self",
".",
"trace_api",
".",
"get_trace",
"(",
"project_id",
"=",
"p... | 27.823529 | 20.647059 |
def register(self, url, doc):
"""Register a DOI via the DataCite API.
:param url: Specify the URL for the API.
:param doc: Set metadata for DOI.
:returns: `True` if is registered successfully.
"""
try:
self.pid.register()
# Set metadata for DOI
self.api.metadata_post(doc)
# Mint DOI
self.api.doi_post(self.pid.pid_value, url)
except (DataCiteError, HttpError):
logger.exception("Failed to register in DataCite",
extra=dict(pid=self.pid))
raise
logger.info("Successfully registered in DataCite",
extra=dict(pid=self.pid))
return True | [
"def",
"register",
"(",
"self",
",",
"url",
",",
"doc",
")",
":",
"try",
":",
"self",
".",
"pid",
".",
"register",
"(",
")",
"# Set metadata for DOI",
"self",
".",
"api",
".",
"metadata_post",
"(",
"doc",
")",
"# Mint DOI",
"self",
".",
"api",
".",
"... | 36 | 12.8 |
async def close(self):
"""
Terminate the ICE agent, ending ICE processing and streams.
"""
if self.__isClosed:
return
self.__isClosed = True
self.__setSignalingState('closed')
# stop senders / receivers
for transceiver in self.__transceivers:
await transceiver.stop()
if self.__sctp:
await self.__sctp.stop()
# stop transports
for transceiver in self.__transceivers:
await transceiver._transport.stop()
await transceiver._transport.transport.stop()
if self.__sctp:
await self.__sctp.transport.stop()
await self.__sctp.transport.transport.stop()
self.__updateIceConnectionState()
# no more events will be emitted, so remove all event listeners
# to facilitate garbage collection.
self.remove_all_listeners() | [
"async",
"def",
"close",
"(",
"self",
")",
":",
"if",
"self",
".",
"__isClosed",
":",
"return",
"self",
".",
"__isClosed",
"=",
"True",
"self",
".",
"__setSignalingState",
"(",
"'closed'",
")",
"# stop senders / receivers",
"for",
"transceiver",
"in",
"self",
... | 33.148148 | 13.888889 |
def WaitUntilComplete(self,poll_freq=2,timeout=None):
"""Poll until status is completed.
If status is 'notStarted' or 'executing' continue polling.
If status is 'succeeded' return
Else raise exception
poll_freq option is in seconds
"""
start_time = time.time()
while not self.time_completed:
status = self.Status()
if status == 'executing':
if not self.time_executed: self.time_executed = time.time()
if clc.v2.time_utils.TimeoutExpired(start_time, timeout):
raise clc.RequestTimeoutException('Timeout waiting for Request: {0}'.format(self.id), status)
elif status == 'succeeded':
self.time_completed = time.time()
elif status in ("failed", "resumed" or "unknown"):
# TODO - need to ID best reaction for resumed status (e.g. manual intervention)
self.time_completed = time.time()
raise(clc.CLCException("%s %s execution %s" % (self.context_key,self.context_val,status)))
time.sleep(poll_freq) | [
"def",
"WaitUntilComplete",
"(",
"self",
",",
"poll_freq",
"=",
"2",
",",
"timeout",
"=",
"None",
")",
":",
"start_time",
"=",
"time",
".",
"time",
"(",
")",
"while",
"not",
"self",
".",
"time_completed",
":",
"status",
"=",
"self",
".",
"Status",
"(",... | 35.884615 | 21.538462 |
def update_metadata(session, path=DATA_PATH):
"""Update metadata files (only ladders right now)."""
with open(os.path.join(path, 'games.json')) as handle:
games = json.loads(handle.read())
for key, data in get_metadata(session, games).items():
with open(os.path.join(path, '{}.json'.format(key)), 'w') as handle:
handle.write(json.dumps(data, indent=2)) | [
"def",
"update_metadata",
"(",
"session",
",",
"path",
"=",
"DATA_PATH",
")",
":",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"path",
",",
"'games.json'",
")",
")",
"as",
"handle",
":",
"games",
"=",
"json",
".",
"loads",
"(",
"handle"... | 48.875 | 15.5 |
def get_hyperparameter_configurations(self, num, r, searchspace_json, random_state): # pylint: disable=invalid-name
"""Randomly generate num hyperparameter configurations from search space
Parameters
----------
num: int
the number of hyperparameter configurations
Returns
-------
list
a list of hyperparameter configurations. Format: [[key1, value1], [key2, value2], ...]
"""
global _KEY # pylint: disable=global-statement
assert self.i == 0
hyperparameter_configs = dict()
for _ in range(num):
params_id = create_bracket_parameter_id(self.bracket_id, self.i)
params = json2paramater(searchspace_json, random_state)
params[_KEY] = r
hyperparameter_configs[params_id] = params
self._record_hyper_configs(hyperparameter_configs)
return [[key, value] for key, value in hyperparameter_configs.items()] | [
"def",
"get_hyperparameter_configurations",
"(",
"self",
",",
"num",
",",
"r",
",",
"searchspace_json",
",",
"random_state",
")",
":",
"# pylint: disable=invalid-name",
"global",
"_KEY",
"# pylint: disable=global-statement",
"assert",
"self",
".",
"i",
"==",
"0",
"hyp... | 42.130435 | 24 |
def highlight_text(self, text, start, end):
"""
Highlights given text.
:param text: Text.
:type text: QString
:param start: Text start index.
:type start: int
:param end: Text end index.
:type end: int
:return: Method success.
:rtype: bool
"""
for rule in self.__rules:
index = rule.pattern.indexIn(text, start)
while index >= start and index < end:
length = rule.pattern.matchedLength()
format = self.formats.get_format(rule.name) or self.formats.get_format("default")
self.setFormat(index, min(length, end - index), format)
index = rule.pattern.indexIn(text, index + length)
return True | [
"def",
"highlight_text",
"(",
"self",
",",
"text",
",",
"start",
",",
"end",
")",
":",
"for",
"rule",
"in",
"self",
".",
"__rules",
":",
"index",
"=",
"rule",
".",
"pattern",
".",
"indexIn",
"(",
"text",
",",
"start",
")",
"while",
"index",
">=",
"... | 34.590909 | 16.590909 |
def adapter(data, headers, **kwargs):
"""Wrap the formatting inside a function for TabularOutputFormatter."""
for row in chain((headers,), data):
yield "\t".join((replace(r, (('\n', r'\n'), ('\t', r'\t'))) for r in row)) | [
"def",
"adapter",
"(",
"data",
",",
"headers",
",",
"*",
"*",
"kwargs",
")",
":",
"for",
"row",
"in",
"chain",
"(",
"(",
"headers",
",",
")",
",",
"data",
")",
":",
"yield",
"\"\\t\"",
".",
"join",
"(",
"(",
"replace",
"(",
"r",
",",
"(",
"(",
... | 58.25 | 11.5 |
def remove_child_vault(self, vault_id, child_id):
"""Removes a child from a vault.
arg: vault_id (osid.id.Id): the ``Id`` of a vault
arg: child_id (osid.id.Id): the ``Id`` of the child
raise: NotFound - ``vault_id`` not parent of ``child_id``
raise: NullArgument - ``vault_id`` or ``child_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchyDesignSession.remove_child_bin_template
if self._catalog_session is not None:
return self._catalog_session.remove_child_catalog(catalog_id=vault_id, child_id=child_id)
return self._hierarchy_session.remove_child(id_=vault_id, child_id=child_id) | [
"def",
"remove_child_vault",
"(",
"self",
",",
"vault_id",
",",
"child_id",
")",
":",
"# Implemented from template for",
"# osid.resource.BinHierarchyDesignSession.remove_child_bin_template",
"if",
"self",
".",
"_catalog_session",
"is",
"not",
"None",
":",
"return",
"self",... | 52.117647 | 23.352941 |
async def _loadNodeValu(self, full, valu):
'''
Load a node from storage into the tree.
( used by initialization routines to build the tree)
'''
node = self.root
for path in iterpath(full):
name = path[-1]
step = node.kids.get(name)
if step is None:
step = await self._initNodePath(node, path, None)
node = step
node.valu = valu
return node | [
"async",
"def",
"_loadNodeValu",
"(",
"self",
",",
"full",
",",
"valu",
")",
":",
"node",
"=",
"self",
".",
"root",
"for",
"path",
"in",
"iterpath",
"(",
"full",
")",
":",
"name",
"=",
"path",
"[",
"-",
"1",
"]",
"step",
"=",
"node",
".",
"kids",... | 25.222222 | 20.777778 |
def copy_default_data_file(filename, module=None):
"""Copies file from default data directory to local directory."""
if module is None:
module = __get_filetypes_module()
fullpath = get_default_data_path(filename, module=module)
shutil.copy(fullpath, ".") | [
"def",
"copy_default_data_file",
"(",
"filename",
",",
"module",
"=",
"None",
")",
":",
"if",
"module",
"is",
"None",
":",
"module",
"=",
"__get_filetypes_module",
"(",
")",
"fullpath",
"=",
"get_default_data_path",
"(",
"filename",
",",
"module",
"=",
"module... | 45.5 | 10 |
def _getSensorInputRecord(self, inputRecord):
"""
inputRecord - dict containing the input to the sensor
Return a 'SensorInput' object, which represents the 'parsed'
representation of the input record
"""
sensor = self._getSensorRegion()
dataRow = copy.deepcopy(sensor.getSelf().getOutputValues('sourceOut'))
dataDict = copy.deepcopy(inputRecord)
inputRecordEncodings = sensor.getSelf().getOutputValues('sourceEncodings')
inputRecordCategory = int(sensor.getOutputData('categoryOut')[0])
resetOut = sensor.getOutputData('resetOut')[0]
return SensorInput(dataRow=dataRow,
dataDict=dataDict,
dataEncodings=inputRecordEncodings,
sequenceReset=resetOut,
category=inputRecordCategory) | [
"def",
"_getSensorInputRecord",
"(",
"self",
",",
"inputRecord",
")",
":",
"sensor",
"=",
"self",
".",
"_getSensorRegion",
"(",
")",
"dataRow",
"=",
"copy",
".",
"deepcopy",
"(",
"sensor",
".",
"getSelf",
"(",
")",
".",
"getOutputValues",
"(",
"'sourceOut'",... | 42.210526 | 14.842105 |
def list(self, opts):
"""List all confs or if a conf is given, all the stanzas in it."""
argv = opts.args
count = len(argv)
# unflagged arguments are conf, stanza, key. In this order
# but all are optional
cpres = True if count > 0 else False
spres = True if count > 1 else False
kpres = True if count > 2 else False
if not cpres:
# List out the available confs
for conf in self.service.confs:
print(conf.name)
else:
# Print out detail on the requested conf
# check for optional stanza, or key requested (or all)
name = argv[0]
conf = self.service.confs[name]
for stanza in conf:
if (spres and argv[1] == stanza.name) or not spres:
print("[%s]" % stanza.name)
for key, value in six.iteritems(stanza.content):
if (kpres and argv[2] == key) or not kpres:
print("%s = %s" % (key, value))
print() | [
"def",
"list",
"(",
"self",
",",
"opts",
")",
":",
"argv",
"=",
"opts",
".",
"args",
"count",
"=",
"len",
"(",
"argv",
")",
"# unflagged arguments are conf, stanza, key. In this order",
"# but all are optional",
"cpres",
"=",
"True",
"if",
"count",
">",
"0",
"... | 37.724138 | 16.827586 |
def send_fetch_request(self, payloads=None, fail_on_error=True,
callback=None,
max_wait_time=DEFAULT_FETCH_SERVER_WAIT_MSECS,
min_bytes=DEFAULT_FETCH_MIN_BYTES):
"""
Encode and send a FetchRequest
Payloads are grouped by topic and partition so they can be pipelined
to the same brokers.
Raises
======
FailedPayloadsError, LeaderUnavailableError, PartitionUnavailableError
"""
if (max_wait_time / 1000) > (self.timeout - 0.1):
raise ValueError(
"%r: max_wait_time: %d must be less than client.timeout by "
"at least 100 milliseconds.", self, max_wait_time)
encoder = partial(KafkaCodec.encode_fetch_request,
max_wait_time=max_wait_time,
min_bytes=min_bytes)
# resps is a list of FetchResponse() objects, each of which can hold
# 1-n messages.
resps = yield self._send_broker_aware_request(
payloads, encoder,
KafkaCodec.decode_fetch_response)
returnValue(self._handle_responses(resps, fail_on_error, callback)) | [
"def",
"send_fetch_request",
"(",
"self",
",",
"payloads",
"=",
"None",
",",
"fail_on_error",
"=",
"True",
",",
"callback",
"=",
"None",
",",
"max_wait_time",
"=",
"DEFAULT_FETCH_SERVER_WAIT_MSECS",
",",
"min_bytes",
"=",
"DEFAULT_FETCH_MIN_BYTES",
")",
":",
"if",... | 39.933333 | 22.133333 |
def add_flair_template(self, subreddit, text='', css_class='',
text_editable=False, is_link=False):
"""Add a flair template to the given subreddit.
:returns: The json response from the server.
"""
data = {'r': six.text_type(subreddit),
'text': text,
'css_class': css_class,
'text_editable': six.text_type(text_editable),
'flair_type': 'LINK_FLAIR' if is_link else 'USER_FLAIR'}
return self.request_json(self.config['flairtemplate'], data=data) | [
"def",
"add_flair_template",
"(",
"self",
",",
"subreddit",
",",
"text",
"=",
"''",
",",
"css_class",
"=",
"''",
",",
"text_editable",
"=",
"False",
",",
"is_link",
"=",
"False",
")",
":",
"data",
"=",
"{",
"'r'",
":",
"six",
".",
"text_type",
"(",
"... | 43.384615 | 18.615385 |
def reload(self, reload_timeout, save_config):
"""Reload the device."""
PROCEED = re.compile(re.escape("Proceed with reload? [confirm]"))
CONTINUE = re.compile(re.escape("Do you wish to continue?[confirm(y/n)]"))
DONE = re.compile(re.escape("[Done]"))
CONFIGURATION_COMPLETED = re.compile("SYSTEM CONFIGURATION COMPLETED")
CONFIGURATION_IN_PROCESS = re.compile("SYSTEM CONFIGURATION IN PROCESS")
# CONSOLE = re.compile("ios con[0|1]/RS?P[0-1]/CPU0 is now available")
CONSOLE = re.compile("con[0|1]/(?:RS?P)?[0-1]/CPU0 is now available")
CONSOLE_STBY = re.compile("con[0|1]/(?:RS?P)?[0-1]/CPU0 is in standby")
RECONFIGURE_USERNAME_PROMPT = "[Nn][Oo] root-system username is configured"
ROOT_USERNAME_PROMPT = "Enter root-system username\: "
ROOT_PASSWORD_PROMPT = "Enter secret( again)?\: "
# BOOT=disk0:asr9k-os-mbi-6.1.1/0x100305/mbiasr9k-rsp3.vm,1; \
# disk0:asr9k-os-mbi-5.3.4/0x100305/mbiasr9k-rsp3.vm,2;
# Candidate Boot Image num 0 is disk0:asr9k-os-mbi-6.1.1/0x100305/mbiasr9k-rsp3.vm
# Candidate Boot Image num 1 is disk0:asr9k-os-mbi-5.3.4/0x100305/mbiasr9k-rsp3.vm
CANDIDATE_BOOT_IMAGE = "Candidate Boot Image num 0 is .*vm"
NOT_COMMITTED = re.compile(re.escape("Some active software packages are not yet committed. Proceed?[confirm]"))
RELOAD_NA = re.compile("Reload to the ROM monitor disallowed from a telnet line")
# 0 1 2 3 4 5
events = [RELOAD_NA, DONE, PROCEED, CONFIGURATION_IN_PROCESS, self.rommon_re, self.press_return_re,
# 6 7 8 9
CONSOLE, CONFIGURATION_COMPLETED, RECONFIGURE_USERNAME_PROMPT, ROOT_USERNAME_PROMPT,
# 10 11 12 13 14 15
ROOT_PASSWORD_PROMPT, self.username_re, TIMEOUT, EOF, self.reload_cmd, CANDIDATE_BOOT_IMAGE,
# 16 17
NOT_COMMITTED, CONSOLE_STBY, CONTINUE]
transitions = [
(RELOAD_NA, [0], -1, a_reload_na, 0),
(CONTINUE, [0], 0, partial(a_send, "y\r"), 0),
# temp for testing
(NOT_COMMITTED, [0], -1, a_not_committed, 10),
(DONE, [0], 2, None, 120),
(PROCEED, [2], 3, partial(a_send, "\r"), reload_timeout),
# this needs to be verified
(self.rommon_re, [0, 3], 3, partial(a_send_boot, "boot"), 600),
(CANDIDATE_BOOT_IMAGE, [0, 3], 4, a_message_callback, 600),
(CONSOLE, [0, 1, 3, 4], 5, None, 600),
# This is required. Otherwise nothing more is displayed on the console
(self.press_return_re, [5], 6, partial(a_send, "\r"), 300),
# configure root username and password the same as used for device connection.
(RECONFIGURE_USERNAME_PROMPT, [6, 7, 10], 8, None, 10),
(ROOT_USERNAME_PROMPT, [8], 9, partial(a_send_username, self.device.node_info.username), 1),
(ROOT_PASSWORD_PROMPT, [9], 9, partial(a_send_password, self.device.node_info.password), 1),
(CONFIGURATION_IN_PROCESS, [6, 9], 10, None, 1200),
(CONFIGURATION_COMPLETED, [10], -1, a_reconnect, 0),
(CONSOLE_STBY, [4], -1, ConnectionStandbyConsole("Standby Console"), 0),
(self.username_re, [7, 9], -1, a_return_and_reconnect, 0),
(TIMEOUT, [0, 1, 2], -1, ConnectionAuthenticationError("Unable to reload"), 0),
(EOF, [0, 1, 2, 3, 4, 5], -1, ConnectionError("Device disconnected"), 0),
(TIMEOUT, [6], 7, partial(a_send, "\r"), 180),
(TIMEOUT, [7], -1, ConnectionAuthenticationError("Unable to reconnect after reloading"), 0),
(TIMEOUT, [10], -1, a_reconnect, 0),
]
fsm = FSM("RELOAD", self.device, events, transitions, timeout=600)
return fsm.run() | [
"def",
"reload",
"(",
"self",
",",
"reload_timeout",
",",
"save_config",
")",
":",
"PROCEED",
"=",
"re",
".",
"compile",
"(",
"re",
".",
"escape",
"(",
"\"Proceed with reload? [confirm]\"",
")",
")",
"CONTINUE",
"=",
"re",
".",
"compile",
"(",
"re",
".",
... | 64.564516 | 33.693548 |
def create_memory_layer(
layer_name, geometry, coordinate_reference_system=None, fields=None):
"""Create a vector memory layer.
:param layer_name: The name of the layer.
:type layer_name: str
:param geometry: The geometry of the layer.
:rtype geometry: QgsWkbTypes (note:
from C++ QgsWkbTypes::GeometryType enum)
:param coordinate_reference_system: The CRS of the memory layer.
:type coordinate_reference_system: QgsCoordinateReferenceSystem
:param fields: Fields of the vector layer. Default to None.
:type fields: QgsFields
:return: The memory layer.
:rtype: QgsVectorLayer
"""
if geometry == QgsWkbTypes.PointGeometry:
wkb_type = QgsWkbTypes.MultiPoint
elif geometry == QgsWkbTypes.LineGeometry:
wkb_type = QgsWkbTypes.MultiLineString
elif geometry == QgsWkbTypes.PolygonGeometry:
wkb_type = QgsWkbTypes.MultiPolygon
elif geometry == QgsWkbTypes.NullGeometry:
wkb_type = QgsWkbTypes.NoGeometry
else:
raise MemoryLayerCreationError(
'Layer geometry must be one of: Point, Line, '
'Polygon or Null, I got %s' % geometry)
if coordinate_reference_system is None:
coordinate_reference_system = QgsCoordinateReferenceSystem()
if fields is None:
fields = QgsFields()
elif not isinstance(fields, QgsFields):
# fields is a list
new_fields = QgsFields()
for f in fields:
new_fields.append(f)
fields = new_fields
memory_layer = QgsMemoryProviderUtils. \
createMemoryLayer(name=layer_name,
fields=fields,
geometryType=wkb_type,
crs=coordinate_reference_system)
memory_layer.dataProvider().createSpatialIndex()
memory_layer.keywords = {
'inasafe_fields': {}
}
return memory_layer | [
"def",
"create_memory_layer",
"(",
"layer_name",
",",
"geometry",
",",
"coordinate_reference_system",
"=",
"None",
",",
"fields",
"=",
"None",
")",
":",
"if",
"geometry",
"==",
"QgsWkbTypes",
".",
"PointGeometry",
":",
"wkb_type",
"=",
"QgsWkbTypes",
".",
"Multi... | 34 | 15.945455 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.