text stringlengths 89 104k | code_tokens list | avg_line_len float64 7.91 980 | score float64 0 630 |
|---|---|---|---|
def idle_task(self):
'''handle mavlink packets'''
if self.accelcal_count != -1:
if self.accelcal_wait_enter and self.empty_input_count != self.mpstate.empty_input_count:
self.accelcal_wait_enter = False
self.accelcal_count += 1
# tell the APM that user has done as requested
self.master.mav.command_ack_send(self.accelcal_count, 1)
if self.accelcal_count >= 6:
self.accelcal_count = -1
if self.compassmot_running:
if self.mpstate.empty_input_count != self.empty_input_count:
# user has hit enter, stop the process
self.compassmot_running = False
print("sending stop")
self.master.mav.command_ack_send(0, 1) | [
"def",
"idle_task",
"(",
"self",
")",
":",
"if",
"self",
".",
"accelcal_count",
"!=",
"-",
"1",
":",
"if",
"self",
".",
"accelcal_wait_enter",
"and",
"self",
".",
"empty_input_count",
"!=",
"self",
".",
"mpstate",
".",
"empty_input_count",
":",
"self",
".",
"accelcal_wait_enter",
"=",
"False",
"self",
".",
"accelcal_count",
"+=",
"1",
"# tell the APM that user has done as requested",
"self",
".",
"master",
".",
"mav",
".",
"command_ack_send",
"(",
"self",
".",
"accelcal_count",
",",
"1",
")",
"if",
"self",
".",
"accelcal_count",
">=",
"6",
":",
"self",
".",
"accelcal_count",
"=",
"-",
"1",
"if",
"self",
".",
"compassmot_running",
":",
"if",
"self",
".",
"mpstate",
".",
"empty_input_count",
"!=",
"self",
".",
"empty_input_count",
":",
"# user has hit enter, stop the process",
"self",
".",
"compassmot_running",
"=",
"False",
"print",
"(",
"\"sending stop\"",
")",
"self",
".",
"master",
".",
"mav",
".",
"command_ack_send",
"(",
"0",
",",
"1",
")"
] | 47.941176 | 16.411765 |
def submit_design_run(self, data_view_id, num_candidates, effort, target=None, constraints=[], sampler="Default"):
"""
Submits a new experimental design run.
:param data_view_id: The ID number of the data view to which the
run belongs, as a string
:type data_view_id: str
:param num_candidates: The number of candidates to return
:type num_candidates: int
:param target: An :class:``Target`` instance representing
the design run optimization target
:type target: :class:``Target``
:param constraints: An array of design constraints (instances of
objects which extend :class:``BaseConstraint``)
:type constraints: list of :class:``BaseConstraint``
:param sampler: The name of the sampler to use during the design run:
either "Default" or "This view"
:type sampler: str
:return: A :class:`DesignRun` instance containing the UID of the
new run
"""
if effort > 30:
raise CitrinationClientError("Parameter effort must be less than 30 to trigger a design run")
if target is not None:
target = target.to_dict()
constraint_dicts = [c.to_dict() for c in constraints]
body = {
"num_candidates": num_candidates,
"target": target,
"effort": effort,
"constraints": constraint_dicts,
"sampler": sampler
}
url = routes.submit_data_view_design(data_view_id)
response = self._post_json(url, body).json()
return DesignRun(response["data"]["design_run"]["uid"]) | [
"def",
"submit_design_run",
"(",
"self",
",",
"data_view_id",
",",
"num_candidates",
",",
"effort",
",",
"target",
"=",
"None",
",",
"constraints",
"=",
"[",
"]",
",",
"sampler",
"=",
"\"Default\"",
")",
":",
"if",
"effort",
">",
"30",
":",
"raise",
"CitrinationClientError",
"(",
"\"Parameter effort must be less than 30 to trigger a design run\"",
")",
"if",
"target",
"is",
"not",
"None",
":",
"target",
"=",
"target",
".",
"to_dict",
"(",
")",
"constraint_dicts",
"=",
"[",
"c",
".",
"to_dict",
"(",
")",
"for",
"c",
"in",
"constraints",
"]",
"body",
"=",
"{",
"\"num_candidates\"",
":",
"num_candidates",
",",
"\"target\"",
":",
"target",
",",
"\"effort\"",
":",
"effort",
",",
"\"constraints\"",
":",
"constraint_dicts",
",",
"\"sampler\"",
":",
"sampler",
"}",
"url",
"=",
"routes",
".",
"submit_data_view_design",
"(",
"data_view_id",
")",
"response",
"=",
"self",
".",
"_post_json",
"(",
"url",
",",
"body",
")",
".",
"json",
"(",
")",
"return",
"DesignRun",
"(",
"response",
"[",
"\"data\"",
"]",
"[",
"\"design_run\"",
"]",
"[",
"\"uid\"",
"]",
")"
] | 38.761905 | 21.714286 |
def sample(self, size=(), rule="R", antithetic=None):
"""
Create pseudo-random generated samples.
By default, the samples are created using standard (pseudo-)random
samples. However, if needed, the samples can also be created by either
low-discrepancy sequences, and/or variance reduction techniques.
Changing the sampling scheme, use the following ``rule`` flag:
+-------+-------------------------------------------------+
| key | Description |
+=======+=================================================+
| ``C`` | Roots of the first order Chebyshev polynomials. |
+-------+-------------------------------------------------+
| ``NC``| Chebyshev nodes adjusted to ensure nested. |
+-------+-------------------------------------------------+
| ``K`` | Korobov lattice. |
+-------+-------------------------------------------------+
| ``R`` | Classical (Pseudo-)Random samples. |
+-------+-------------------------------------------------+
| ``RG``| Regular spaced grid. |
+-------+-------------------------------------------------+
| ``NG``| Nested regular spaced grid. |
+-------+-------------------------------------------------+
| ``L`` | Latin hypercube samples. |
+-------+-------------------------------------------------+
| ``S`` | Sobol low-discrepancy sequence. |
+-------+-------------------------------------------------+
| ``H`` | Halton low-discrepancy sequence. |
+-------+-------------------------------------------------+
| ``M`` | Hammersley low-discrepancy sequence. |
+-------+-------------------------------------------------+
All samples are created on the ``[0, 1]``-hypercube, which then is
mapped into the domain of the distribution using the inverse Rosenblatt
transformation.
Args:
size (numpy.ndarray):
The size of the samples to generate.
rule (str):
Indicator defining the sampling scheme.
antithetic (bool, numpy.ndarray):
If provided, will be used to setup antithetic variables. If
array, defines the axes to mirror.
Returns:
(numpy.ndarray):
Random samples with shape ``(len(self),)+self.shape``.
"""
size_ = numpy.prod(size, dtype=int)
dim = len(self)
if dim > 1:
if isinstance(size, (tuple, list, numpy.ndarray)):
shape = (dim,) + tuple(size)
else:
shape = (dim, size)
else:
shape = size
from . import sampler
out = sampler.generator.generate_samples(
order=size_, domain=self, rule=rule, antithetic=antithetic)
try:
out = out.reshape(shape)
except:
if len(self) == 1:
out = out.flatten()
else:
out = out.reshape(dim, int(out.size/dim))
return out | [
"def",
"sample",
"(",
"self",
",",
"size",
"=",
"(",
")",
",",
"rule",
"=",
"\"R\"",
",",
"antithetic",
"=",
"None",
")",
":",
"size_",
"=",
"numpy",
".",
"prod",
"(",
"size",
",",
"dtype",
"=",
"int",
")",
"dim",
"=",
"len",
"(",
"self",
")",
"if",
"dim",
">",
"1",
":",
"if",
"isinstance",
"(",
"size",
",",
"(",
"tuple",
",",
"list",
",",
"numpy",
".",
"ndarray",
")",
")",
":",
"shape",
"=",
"(",
"dim",
",",
")",
"+",
"tuple",
"(",
"size",
")",
"else",
":",
"shape",
"=",
"(",
"dim",
",",
"size",
")",
"else",
":",
"shape",
"=",
"size",
"from",
".",
"import",
"sampler",
"out",
"=",
"sampler",
".",
"generator",
".",
"generate_samples",
"(",
"order",
"=",
"size_",
",",
"domain",
"=",
"self",
",",
"rule",
"=",
"rule",
",",
"antithetic",
"=",
"antithetic",
")",
"try",
":",
"out",
"=",
"out",
".",
"reshape",
"(",
"shape",
")",
"except",
":",
"if",
"len",
"(",
"self",
")",
"==",
"1",
":",
"out",
"=",
"out",
".",
"flatten",
"(",
")",
"else",
":",
"out",
"=",
"out",
".",
"reshape",
"(",
"dim",
",",
"int",
"(",
"out",
".",
"size",
"/",
"dim",
")",
")",
"return",
"out"
] | 44.410959 | 23.315068 |
def make_rendition(self, width, height):
'''build a rendition
0 x 0 -> will give master URL
only width -> will make a renditions with master's aspect ratio
width x height -> will make an image potentialy cropped
'''
image = Image.open(self.master)
format = image.format
target_w = float(width)
target_h = float(height)
if (target_w == 0):
target_w = self.master_width
if (target_h == 0):
target_h = self.master_height
rendition_key = '%dx%d' % (target_w, target_h)
if rendition_key in self.renditions:
return self.renditions[rendition_key]
if (target_w != self.master_width or target_h != self.master_height):
r = target_w / target_h
R = float(self.master_width) / self.master_height
if r != R:
if r > R:
crop_w = self.master_width
crop_h = crop_w / r
x = 0
y = int(self.master_height - crop_h) >> 1
else:
crop_h = self.master_height
crop_w = crop_h * r
x = int(self.master_width - crop_w) >> 1
y = 0
image = image.crop((x, y, int(crop_w + x), int(crop_h + y)))
image.thumbnail((int(target_w), int(target_h)), Image.ANTIALIAS)
filename, ext = os.path.splitext(self.get_master_filename())
rendition_name = '%s/%s_%s%s' % (
IMAGE_DIRECTORY,
filename,
rendition_key,
ext
)
fd = BytesIO()
image.save(fd, format)
default_storage.save(rendition_name, fd)
self.renditions[rendition_key] = rendition_name
self.save()
return rendition_name
return self.master.name | [
"def",
"make_rendition",
"(",
"self",
",",
"width",
",",
"height",
")",
":",
"image",
"=",
"Image",
".",
"open",
"(",
"self",
".",
"master",
")",
"format",
"=",
"image",
".",
"format",
"target_w",
"=",
"float",
"(",
"width",
")",
"target_h",
"=",
"float",
"(",
"height",
")",
"if",
"(",
"target_w",
"==",
"0",
")",
":",
"target_w",
"=",
"self",
".",
"master_width",
"if",
"(",
"target_h",
"==",
"0",
")",
":",
"target_h",
"=",
"self",
".",
"master_height",
"rendition_key",
"=",
"'%dx%d'",
"%",
"(",
"target_w",
",",
"target_h",
")",
"if",
"rendition_key",
"in",
"self",
".",
"renditions",
":",
"return",
"self",
".",
"renditions",
"[",
"rendition_key",
"]",
"if",
"(",
"target_w",
"!=",
"self",
".",
"master_width",
"or",
"target_h",
"!=",
"self",
".",
"master_height",
")",
":",
"r",
"=",
"target_w",
"/",
"target_h",
"R",
"=",
"float",
"(",
"self",
".",
"master_width",
")",
"/",
"self",
".",
"master_height",
"if",
"r",
"!=",
"R",
":",
"if",
"r",
">",
"R",
":",
"crop_w",
"=",
"self",
".",
"master_width",
"crop_h",
"=",
"crop_w",
"/",
"r",
"x",
"=",
"0",
"y",
"=",
"int",
"(",
"self",
".",
"master_height",
"-",
"crop_h",
")",
">>",
"1",
"else",
":",
"crop_h",
"=",
"self",
".",
"master_height",
"crop_w",
"=",
"crop_h",
"*",
"r",
"x",
"=",
"int",
"(",
"self",
".",
"master_width",
"-",
"crop_w",
")",
">>",
"1",
"y",
"=",
"0",
"image",
"=",
"image",
".",
"crop",
"(",
"(",
"x",
",",
"y",
",",
"int",
"(",
"crop_w",
"+",
"x",
")",
",",
"int",
"(",
"crop_h",
"+",
"y",
")",
")",
")",
"image",
".",
"thumbnail",
"(",
"(",
"int",
"(",
"target_w",
")",
",",
"int",
"(",
"target_h",
")",
")",
",",
"Image",
".",
"ANTIALIAS",
")",
"filename",
",",
"ext",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"self",
".",
"get_master_filename",
"(",
")",
")",
"rendition_name",
"=",
"'%s/%s_%s%s'",
"%",
"(",
"IMAGE_DIRECTORY",
",",
"filename",
",",
"rendition_key",
",",
"ext",
")",
"fd",
"=",
"BytesIO",
"(",
")",
"image",
".",
"save",
"(",
"fd",
",",
"format",
")",
"default_storage",
".",
"save",
"(",
"rendition_name",
",",
"fd",
")",
"self",
".",
"renditions",
"[",
"rendition_key",
"]",
"=",
"rendition_name",
"self",
".",
"save",
"(",
")",
"return",
"rendition_name",
"return",
"self",
".",
"master",
".",
"name"
] | 32.067797 | 19.254237 |
def watch_active_servings(dk_api, kitchen, period):
"""
returns a string.
:param dk_api: -- api object
:param kitchen: string
:param period: integer
:rtype: string
"""
print 'period', period
# try:
# p = int(period)
# except ValueError:
# return 'DKCloudCommand.watch_active_servings requires an integer for the period'
if period <= 0:
return 'DKCloudCommand.watch_active_servings requires a positive period'
DKActiveServingWatcherSingleton().set_sleep_time(period)
DKActiveServingWatcherSingleton().set_api(dk_api)
DKActiveServingWatcherSingleton().set_kitchen(kitchen)
DKActiveServingWatcherSingleton().start_watcher()
return "" | [
"def",
"watch_active_servings",
"(",
"dk_api",
",",
"kitchen",
",",
"period",
")",
":",
"print",
"'period'",
",",
"period",
"# try:",
"# p = int(period)",
"# except ValueError:",
"# return 'DKCloudCommand.watch_active_servings requires an integer for the period'",
"if",
"period",
"<=",
"0",
":",
"return",
"'DKCloudCommand.watch_active_servings requires a positive period'",
"DKActiveServingWatcherSingleton",
"(",
")",
".",
"set_sleep_time",
"(",
"period",
")",
"DKActiveServingWatcherSingleton",
"(",
")",
".",
"set_api",
"(",
"dk_api",
")",
"DKActiveServingWatcherSingleton",
"(",
")",
".",
"set_kitchen",
"(",
"kitchen",
")",
"DKActiveServingWatcherSingleton",
"(",
")",
".",
"start_watcher",
"(",
")",
"return",
"\"\""
] | 35.227273 | 19.318182 |
def ssh_to_task(task) -> paramiko.SSHClient:
"""Create ssh connection to task's machine
returns Paramiko SSH client connected to host.
"""
username = task.ssh_username
hostname = task.public_ip
ssh_key_fn = get_keypair_fn()
print(f"ssh -i {ssh_key_fn} {username}@{hostname}")
pkey = paramiko.RSAKey.from_private_key_file(ssh_key_fn)
ssh_client = paramiko.SSHClient()
ssh_client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
assert ssh_client
counter = 1
while True:
try:
ssh_client.connect(hostname=hostname, username=username, pkey=pkey)
if counter % 11 == 0: # occasionally re-obtain public ip, machine could've gotten restarted
hostname = task.public_ip
break
except Exception as e:
print(
f'{task.name}: Exception connecting to {hostname} via ssh (could be a timeout): {e}')
time.sleep(RETRY_INTERVAL_SEC)
return ssh_client | [
"def",
"ssh_to_task",
"(",
"task",
")",
"->",
"paramiko",
".",
"SSHClient",
":",
"username",
"=",
"task",
".",
"ssh_username",
"hostname",
"=",
"task",
".",
"public_ip",
"ssh_key_fn",
"=",
"get_keypair_fn",
"(",
")",
"print",
"(",
"f\"ssh -i {ssh_key_fn} {username}@{hostname}\"",
")",
"pkey",
"=",
"paramiko",
".",
"RSAKey",
".",
"from_private_key_file",
"(",
"ssh_key_fn",
")",
"ssh_client",
"=",
"paramiko",
".",
"SSHClient",
"(",
")",
"ssh_client",
".",
"set_missing_host_key_policy",
"(",
"paramiko",
".",
"AutoAddPolicy",
"(",
")",
")",
"assert",
"ssh_client",
"counter",
"=",
"1",
"while",
"True",
":",
"try",
":",
"ssh_client",
".",
"connect",
"(",
"hostname",
"=",
"hostname",
",",
"username",
"=",
"username",
",",
"pkey",
"=",
"pkey",
")",
"if",
"counter",
"%",
"11",
"==",
"0",
":",
"# occasionally re-obtain public ip, machine could've gotten restarted",
"hostname",
"=",
"task",
".",
"public_ip",
"break",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"f'{task.name}: Exception connecting to {hostname} via ssh (could be a timeout): {e}'",
")",
"time",
".",
"sleep",
"(",
"RETRY_INTERVAL_SEC",
")",
"return",
"ssh_client"
] | 29.833333 | 23.333333 |
def traverse_data(obj, key_target):
''' will traverse nested list and dicts until key_target equals the current dict key '''
if isinstance(obj, str) and '.json' in str(obj):
obj = json.load(open(obj, 'r'))
if isinstance(obj, list):
queue = obj.copy()
elif isinstance(obj, dict):
queue = [obj.copy()]
else:
sys.exit('obj needs to be a list or dict')
count = 0
''' BFS '''
while not queue or count != 1000:
count += 1
curr_obj = queue.pop()
if isinstance(curr_obj, dict):
for key, value in curr_obj.items():
if key == key_target:
return curr_obj
else:
queue.append(curr_obj[key])
elif isinstance(curr_obj, list):
for co in curr_obj:
queue.append(co)
if count == 1000:
sys.exit('traverse_data needs to be updated...')
return False | [
"def",
"traverse_data",
"(",
"obj",
",",
"key_target",
")",
":",
"if",
"isinstance",
"(",
"obj",
",",
"str",
")",
"and",
"'.json'",
"in",
"str",
"(",
"obj",
")",
":",
"obj",
"=",
"json",
".",
"load",
"(",
"open",
"(",
"obj",
",",
"'r'",
")",
")",
"if",
"isinstance",
"(",
"obj",
",",
"list",
")",
":",
"queue",
"=",
"obj",
".",
"copy",
"(",
")",
"elif",
"isinstance",
"(",
"obj",
",",
"dict",
")",
":",
"queue",
"=",
"[",
"obj",
".",
"copy",
"(",
")",
"]",
"else",
":",
"sys",
".",
"exit",
"(",
"'obj needs to be a list or dict'",
")",
"count",
"=",
"0",
"''' BFS '''",
"while",
"not",
"queue",
"or",
"count",
"!=",
"1000",
":",
"count",
"+=",
"1",
"curr_obj",
"=",
"queue",
".",
"pop",
"(",
")",
"if",
"isinstance",
"(",
"curr_obj",
",",
"dict",
")",
":",
"for",
"key",
",",
"value",
"in",
"curr_obj",
".",
"items",
"(",
")",
":",
"if",
"key",
"==",
"key_target",
":",
"return",
"curr_obj",
"else",
":",
"queue",
".",
"append",
"(",
"curr_obj",
"[",
"key",
"]",
")",
"elif",
"isinstance",
"(",
"curr_obj",
",",
"list",
")",
":",
"for",
"co",
"in",
"curr_obj",
":",
"queue",
".",
"append",
"(",
"co",
")",
"if",
"count",
"==",
"1000",
":",
"sys",
".",
"exit",
"(",
"'traverse_data needs to be updated...'",
")",
"return",
"False"
] | 34.259259 | 13.444444 |
def _product_file_hash(self, product=None):
"""
Get the hash of the each product file
"""
if self.hasher is None:
return None
else:
products = self._rectify_products(product)
product_file_hash = [
util_hash.hash_file(p, hasher=self.hasher, base='hex')
for p in products
]
return product_file_hash | [
"def",
"_product_file_hash",
"(",
"self",
",",
"product",
"=",
"None",
")",
":",
"if",
"self",
".",
"hasher",
"is",
"None",
":",
"return",
"None",
"else",
":",
"products",
"=",
"self",
".",
"_rectify_products",
"(",
"product",
")",
"product_file_hash",
"=",
"[",
"util_hash",
".",
"hash_file",
"(",
"p",
",",
"hasher",
"=",
"self",
".",
"hasher",
",",
"base",
"=",
"'hex'",
")",
"for",
"p",
"in",
"products",
"]",
"return",
"product_file_hash"
] | 32 | 11.538462 |
def dimensions(self):
"""Iterate over the dimension columns, regardless of parent/child status
"""
from ambry.valuetype.core import ROLE
for c in self.columns:
if c.role == ROLE.DIMENSION:
yield c | [
"def",
"dimensions",
"(",
"self",
")",
":",
"from",
"ambry",
".",
"valuetype",
".",
"core",
"import",
"ROLE",
"for",
"c",
"in",
"self",
".",
"columns",
":",
"if",
"c",
".",
"role",
"==",
"ROLE",
".",
"DIMENSION",
":",
"yield",
"c"
] | 25 | 17.1 |
def assignBranchRegisters(inodes, registerMaker):
"""Assign temporary registers to each of the branch nodes.
"""
for node in inodes:
node.reg = registerMaker(node, temporary=True) | [
"def",
"assignBranchRegisters",
"(",
"inodes",
",",
"registerMaker",
")",
":",
"for",
"node",
"in",
"inodes",
":",
"node",
".",
"reg",
"=",
"registerMaker",
"(",
"node",
",",
"temporary",
"=",
"True",
")"
] | 39 | 8 |
def delete_cluster_role_binding(self, name, **kwargs): # noqa: E501
"""delete_cluster_role_binding # noqa: E501
delete a ClusterRoleBinding # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_cluster_role_binding(name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the ClusterRoleBinding (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:param V1DeleteOptions body:
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_cluster_role_binding_with_http_info(name, **kwargs) # noqa: E501
else:
(data) = self.delete_cluster_role_binding_with_http_info(name, **kwargs) # noqa: E501
return data | [
"def",
"delete_cluster_role_binding",
"(",
"self",
",",
"name",
",",
"*",
"*",
"kwargs",
")",
":",
"# noqa: E501",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'async_req'",
")",
":",
"return",
"self",
".",
"delete_cluster_role_binding_with_http_info",
"(",
"name",
",",
"*",
"*",
"kwargs",
")",
"# noqa: E501",
"else",
":",
"(",
"data",
")",
"=",
"self",
".",
"delete_cluster_role_binding_with_http_info",
"(",
"name",
",",
"*",
"*",
"kwargs",
")",
"# noqa: E501",
"return",
"data"
] | 93.481481 | 66.407407 |
def money_receipts(pronac, dt):
"""
Checks how many items are in a same receipt when payment type is
withdraw/money
- is_outlier: True if there are any receipts that have more than one
- itens_que_compartilham_comprovantes: List of items that share receipt
"""
df = verified_repeated_receipts_for_pronac(pronac)
comprovantes_saque = df[df['tpFormaDePagamento'] == 3.0]
return metric_return(comprovantes_saque) | [
"def",
"money_receipts",
"(",
"pronac",
",",
"dt",
")",
":",
"df",
"=",
"verified_repeated_receipts_for_pronac",
"(",
"pronac",
")",
"comprovantes_saque",
"=",
"df",
"[",
"df",
"[",
"'tpFormaDePagamento'",
"]",
"==",
"3.0",
"]",
"return",
"metric_return",
"(",
"comprovantes_saque",
")"
] | 40.363636 | 19.272727 |
def stop(self):
""" Stop the container gracefully.
First all entrypoints are asked to ``stop()``.
This ensures that no new worker threads are started.
It is the extensions' responsibility to gracefully shut down when
``stop()`` is called on them and only return when they have stopped.
After all entrypoints have stopped the container waits for any
active workers to complete.
After all active workers have stopped the container stops all
dependency providers.
At this point there should be no more managed threads. In case there
are any managed threads, they are killed by the container.
"""
if self._died.ready():
_log.debug('already stopped %s', self)
return
if self._being_killed:
# this race condition can happen when a container is hosted by a
# runner and yields during its kill method; if it's unlucky in
# scheduling the runner will try to stop() it before self._died
# has a result
_log.debug('already being killed %s', self)
try:
self._died.wait()
except:
pass # don't re-raise if we died with an exception
return
_log.debug('stopping %s', self)
with _log_time('stopped %s', self):
# entrypoint have to be stopped before dependencies to ensure
# that running workers can successfully complete
self.entrypoints.all.stop()
# there might still be some running workers, which we have to
# wait for to complete before we can stop dependencies
self._worker_pool.waitall()
# it should be safe now to stop any dependency as there is no
# active worker which could be using it
self.dependencies.all.stop()
# finally, stop remaining extensions
self.subextensions.all.stop()
# any any managed threads they spawned
self._kill_managed_threads()
self.started = False
# if `kill` is called after `stop`, they race to send this
if not self._died.ready():
self._died.send(None) | [
"def",
"stop",
"(",
"self",
")",
":",
"if",
"self",
".",
"_died",
".",
"ready",
"(",
")",
":",
"_log",
".",
"debug",
"(",
"'already stopped %s'",
",",
"self",
")",
"return",
"if",
"self",
".",
"_being_killed",
":",
"# this race condition can happen when a container is hosted by a",
"# runner and yields during its kill method; if it's unlucky in",
"# scheduling the runner will try to stop() it before self._died",
"# has a result",
"_log",
".",
"debug",
"(",
"'already being killed %s'",
",",
"self",
")",
"try",
":",
"self",
".",
"_died",
".",
"wait",
"(",
")",
"except",
":",
"pass",
"# don't re-raise if we died with an exception",
"return",
"_log",
".",
"debug",
"(",
"'stopping %s'",
",",
"self",
")",
"with",
"_log_time",
"(",
"'stopped %s'",
",",
"self",
")",
":",
"# entrypoint have to be stopped before dependencies to ensure",
"# that running workers can successfully complete",
"self",
".",
"entrypoints",
".",
"all",
".",
"stop",
"(",
")",
"# there might still be some running workers, which we have to",
"# wait for to complete before we can stop dependencies",
"self",
".",
"_worker_pool",
".",
"waitall",
"(",
")",
"# it should be safe now to stop any dependency as there is no",
"# active worker which could be using it",
"self",
".",
"dependencies",
".",
"all",
".",
"stop",
"(",
")",
"# finally, stop remaining extensions",
"self",
".",
"subextensions",
".",
"all",
".",
"stop",
"(",
")",
"# any any managed threads they spawned",
"self",
".",
"_kill_managed_threads",
"(",
")",
"self",
".",
"started",
"=",
"False",
"# if `kill` is called after `stop`, they race to send this",
"if",
"not",
"self",
".",
"_died",
".",
"ready",
"(",
")",
":",
"self",
".",
"_died",
".",
"send",
"(",
"None",
")"
] | 36.311475 | 22.557377 |
def cublasDsymm(handle, side, uplo, m, n, alpha, A, lda, B, ldb, beta, C, ldc):
"""
Matrix-matrix product for real symmetric matrix.
"""
status = _libcublas.cublasDsymm_v2(handle,
_CUBLAS_SIDE_MODE[side],
_CUBLAS_FILL_MODE[uplo],
m, n, ctypes.byref(ctypes.c_double(alpha)),
int(A), lda, int(B), ldb,
ctypes.byref(ctypes.c_double(beta)),
int(C), ldc)
cublasCheckStatus(status) | [
"def",
"cublasDsymm",
"(",
"handle",
",",
"side",
",",
"uplo",
",",
"m",
",",
"n",
",",
"alpha",
",",
"A",
",",
"lda",
",",
"B",
",",
"ldb",
",",
"beta",
",",
"C",
",",
"ldc",
")",
":",
"status",
"=",
"_libcublas",
".",
"cublasDsymm_v2",
"(",
"handle",
",",
"_CUBLAS_SIDE_MODE",
"[",
"side",
"]",
",",
"_CUBLAS_FILL_MODE",
"[",
"uplo",
"]",
",",
"m",
",",
"n",
",",
"ctypes",
".",
"byref",
"(",
"ctypes",
".",
"c_double",
"(",
"alpha",
")",
")",
",",
"int",
"(",
"A",
")",
",",
"lda",
",",
"int",
"(",
"B",
")",
",",
"ldb",
",",
"ctypes",
".",
"byref",
"(",
"ctypes",
".",
"c_double",
"(",
"beta",
")",
")",
",",
"int",
"(",
"C",
")",
",",
"ldc",
")",
"cublasCheckStatus",
"(",
"status",
")"
] | 44.642857 | 21.785714 |
def cluster_types(types, max_clust=12):
"""
Generates a dictionary mapping each binary number in types to an integer
from 0 to max_clust. Hierarchical clustering is used to determine which
which binary numbers should map to the same integer.
"""
if len(types) < max_clust:
max_clust = len(types)
# Do actual clustering
cluster_dict = do_clustering(types, max_clust)
cluster_ranks = rank_clusters(cluster_dict)
# Create a dictionary mapping binary numbers to indices
ranks = {}
for key in cluster_dict:
for typ in cluster_dict[key]:
ranks[typ] = cluster_ranks[key]
return ranks | [
"def",
"cluster_types",
"(",
"types",
",",
"max_clust",
"=",
"12",
")",
":",
"if",
"len",
"(",
"types",
")",
"<",
"max_clust",
":",
"max_clust",
"=",
"len",
"(",
"types",
")",
"# Do actual clustering",
"cluster_dict",
"=",
"do_clustering",
"(",
"types",
",",
"max_clust",
")",
"cluster_ranks",
"=",
"rank_clusters",
"(",
"cluster_dict",
")",
"# Create a dictionary mapping binary numbers to indices",
"ranks",
"=",
"{",
"}",
"for",
"key",
"in",
"cluster_dict",
":",
"for",
"typ",
"in",
"cluster_dict",
"[",
"key",
"]",
":",
"ranks",
"[",
"typ",
"]",
"=",
"cluster_ranks",
"[",
"key",
"]",
"return",
"ranks"
] | 30.47619 | 18.380952 |
def eliminate_implications(s):
"""Change >>, <<, and <=> into &, |, and ~. That is, return an Expr
that is equivalent to s, but has only &, |, and ~ as logical operators.
>>> eliminate_implications(A >> (~B << C))
((~B | ~C) | ~A)
>>> eliminate_implications(A ^ B)
((A & ~B) | (~A & B))
"""
if not s.args or is_symbol(s.op): return s ## (Atoms are unchanged.)
args = map(eliminate_implications, s.args)
a, b = args[0], args[-1]
if s.op == '>>':
return (b | ~a)
elif s.op == '<<':
return (a | ~b)
elif s.op == '<=>':
return (a | ~b) & (b | ~a)
elif s.op == '^':
assert len(args) == 2 ## TODO: relax this restriction
return (a & ~b) | (~a & b)
else:
assert s.op in ('&', '|', '~')
return Expr(s.op, *args) | [
"def",
"eliminate_implications",
"(",
"s",
")",
":",
"if",
"not",
"s",
".",
"args",
"or",
"is_symbol",
"(",
"s",
".",
"op",
")",
":",
"return",
"s",
"## (Atoms are unchanged.)",
"args",
"=",
"map",
"(",
"eliminate_implications",
",",
"s",
".",
"args",
")",
"a",
",",
"b",
"=",
"args",
"[",
"0",
"]",
",",
"args",
"[",
"-",
"1",
"]",
"if",
"s",
".",
"op",
"==",
"'>>'",
":",
"return",
"(",
"b",
"|",
"~",
"a",
")",
"elif",
"s",
".",
"op",
"==",
"'<<'",
":",
"return",
"(",
"a",
"|",
"~",
"b",
")",
"elif",
"s",
".",
"op",
"==",
"'<=>'",
":",
"return",
"(",
"a",
"|",
"~",
"b",
")",
"&",
"(",
"b",
"|",
"~",
"a",
")",
"elif",
"s",
".",
"op",
"==",
"'^'",
":",
"assert",
"len",
"(",
"args",
")",
"==",
"2",
"## TODO: relax this restriction",
"return",
"(",
"a",
"&",
"~",
"b",
")",
"|",
"(",
"~",
"a",
"&",
"b",
")",
"else",
":",
"assert",
"s",
".",
"op",
"in",
"(",
"'&'",
",",
"'|'",
",",
"'~'",
")",
"return",
"Expr",
"(",
"s",
".",
"op",
",",
"*",
"args",
")"
] | 34.913043 | 14.217391 |
def get_setting(connection, key):
"""Get key from connection or default to settings."""
if key in connection.settings_dict:
return connection.settings_dict[key]
else:
return getattr(settings, key) | [
"def",
"get_setting",
"(",
"connection",
",",
"key",
")",
":",
"if",
"key",
"in",
"connection",
".",
"settings_dict",
":",
"return",
"connection",
".",
"settings_dict",
"[",
"key",
"]",
"else",
":",
"return",
"getattr",
"(",
"settings",
",",
"key",
")"
] | 36.5 | 7.666667 |
def list_absent(name, value, delimiter=DEFAULT_TARGET_DELIM):
'''
Delete a value from a grain formed as a list.
.. versionadded:: 2014.1.0
name
The grain name.
value
The value to delete from the grain list.
delimiter
A delimiter different from the default ``:`` can be provided.
.. versionadded:: v2015.8.2
The grain should be `list type <http://docs.python.org/2/tutorial/datastructures.html#data-structures>`_
.. code-block:: yaml
roles:
grains.list_absent:
- value: db
For multiple grains, the syntax looks like:
.. code-block:: yaml
roles:
grains.list_absent:
- value:
- web
- dev
'''
name = re.sub(delimiter, DEFAULT_TARGET_DELIM, name)
ret = {'name': name,
'changes': {},
'result': True,
'comment': ''}
comments = []
grain = __salt__['grains.get'](name, None)
if grain:
if isinstance(grain, list):
if not isinstance(value, list):
value = [value]
for val in value:
if val not in grain:
comments.append('Value {1} is absent from '
'grain {0}'.format(name, val))
elif __opts__['test']:
ret['result'] = None
comments.append('Value {1} in grain {0} is set '
'to be deleted'.format(name, val))
if 'deleted' not in ret['changes'].keys():
ret['changes'] = {'deleted': []}
ret['changes']['deleted'].append(val)
elif val in grain:
__salt__['grains.remove'](name, val)
comments.append('Value {1} was deleted from '
'grain {0}'.format(name, val))
if 'deleted' not in ret['changes'].keys():
ret['changes'] = {'deleted': []}
ret['changes']['deleted'].append(val)
ret['comment'] = '\n'.join(comments)
return ret
else:
ret['result'] = False
ret['comment'] = 'Grain {0} is not a valid list'\
.format(name)
else:
ret['comment'] = 'Grain {0} does not exist'.format(name)
return ret | [
"def",
"list_absent",
"(",
"name",
",",
"value",
",",
"delimiter",
"=",
"DEFAULT_TARGET_DELIM",
")",
":",
"name",
"=",
"re",
".",
"sub",
"(",
"delimiter",
",",
"DEFAULT_TARGET_DELIM",
",",
"name",
")",
"ret",
"=",
"{",
"'name'",
":",
"name",
",",
"'changes'",
":",
"{",
"}",
",",
"'result'",
":",
"True",
",",
"'comment'",
":",
"''",
"}",
"comments",
"=",
"[",
"]",
"grain",
"=",
"__salt__",
"[",
"'grains.get'",
"]",
"(",
"name",
",",
"None",
")",
"if",
"grain",
":",
"if",
"isinstance",
"(",
"grain",
",",
"list",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"list",
")",
":",
"value",
"=",
"[",
"value",
"]",
"for",
"val",
"in",
"value",
":",
"if",
"val",
"not",
"in",
"grain",
":",
"comments",
".",
"append",
"(",
"'Value {1} is absent from '",
"'grain {0}'",
".",
"format",
"(",
"name",
",",
"val",
")",
")",
"elif",
"__opts__",
"[",
"'test'",
"]",
":",
"ret",
"[",
"'result'",
"]",
"=",
"None",
"comments",
".",
"append",
"(",
"'Value {1} in grain {0} is set '",
"'to be deleted'",
".",
"format",
"(",
"name",
",",
"val",
")",
")",
"if",
"'deleted'",
"not",
"in",
"ret",
"[",
"'changes'",
"]",
".",
"keys",
"(",
")",
":",
"ret",
"[",
"'changes'",
"]",
"=",
"{",
"'deleted'",
":",
"[",
"]",
"}",
"ret",
"[",
"'changes'",
"]",
"[",
"'deleted'",
"]",
".",
"append",
"(",
"val",
")",
"elif",
"val",
"in",
"grain",
":",
"__salt__",
"[",
"'grains.remove'",
"]",
"(",
"name",
",",
"val",
")",
"comments",
".",
"append",
"(",
"'Value {1} was deleted from '",
"'grain {0}'",
".",
"format",
"(",
"name",
",",
"val",
")",
")",
"if",
"'deleted'",
"not",
"in",
"ret",
"[",
"'changes'",
"]",
".",
"keys",
"(",
")",
":",
"ret",
"[",
"'changes'",
"]",
"=",
"{",
"'deleted'",
":",
"[",
"]",
"}",
"ret",
"[",
"'changes'",
"]",
"[",
"'deleted'",
"]",
".",
"append",
"(",
"val",
")",
"ret",
"[",
"'comment'",
"]",
"=",
"'\\n'",
".",
"join",
"(",
"comments",
")",
"return",
"ret",
"else",
":",
"ret",
"[",
"'result'",
"]",
"=",
"False",
"ret",
"[",
"'comment'",
"]",
"=",
"'Grain {0} is not a valid list'",
".",
"format",
"(",
"name",
")",
"else",
":",
"ret",
"[",
"'comment'",
"]",
"=",
"'Grain {0} does not exist'",
".",
"format",
"(",
"name",
")",
"return",
"ret"
] | 31.891892 | 21.864865 |
def find_all(self, collection):
"""
Search a collection for all available items.
Args:
collection: The db collection. See main class documentation.
Returns:
List of all items in the collection.
"""
obj = getattr(self.db, collection)
result = obj.find()
return result | [
"def",
"find_all",
"(",
"self",
",",
"collection",
")",
":",
"obj",
"=",
"getattr",
"(",
"self",
".",
"db",
",",
"collection",
")",
"result",
"=",
"obj",
".",
"find",
"(",
")",
"return",
"result"
] | 28.666667 | 15.5 |
def log(self, level, *args, **kwargs):
"""Log something.
.. seealso:: Proxy:
:class:`.Logger`.level
"""
target = getattr(self.__logger, level)
target(*args, **kwargs) | [
"def",
"log",
"(",
"self",
",",
"level",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"target",
"=",
"getattr",
"(",
"self",
".",
"__logger",
",",
"level",
")",
"target",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] | 26.5 | 9.5 |
def lock_file(self, fpath, after_setup=False, wait=False):
"""Locks the specified file.
:param str|unicode fpath: File path.
:param bool after_setup:
True - after logging/daemon setup
False - before starting
:param bool wait:
True - wait if locked
False - exit if locked
"""
command = 'flock-wait' if wait else 'flock'
if after_setup:
command = '%s2' % command
self._set(command, fpath)
return self._section | [
"def",
"lock_file",
"(",
"self",
",",
"fpath",
",",
"after_setup",
"=",
"False",
",",
"wait",
"=",
"False",
")",
":",
"command",
"=",
"'flock-wait'",
"if",
"wait",
"else",
"'flock'",
"if",
"after_setup",
":",
"command",
"=",
"'%s2'",
"%",
"command",
"self",
".",
"_set",
"(",
"command",
",",
"fpath",
")",
"return",
"self",
".",
"_section"
] | 23.954545 | 18.090909 |
def relpath(path, start):
"""Get relative path to start.
Note: Modeled after python2.6 :meth:`os.path.relpath`.
"""
path_items = path_list(path)
start_items = path_list(start)
# Find common parts of path.
common = []
for pth, stt in zip(path_items, start_items):
if pth != stt:
break
common.append(pth)
# Shared parts index in both lists.
common_ind = len(common)
parent_num = len(start_items) - common_ind
# Start with parent traversal and add relative parts.
rel_items = [PARENT] * parent_num + path_items[common_ind:]
return path_join(*rel_items) | [
"def",
"relpath",
"(",
"path",
",",
"start",
")",
":",
"path_items",
"=",
"path_list",
"(",
"path",
")",
"start_items",
"=",
"path_list",
"(",
"start",
")",
"# Find common parts of path.",
"common",
"=",
"[",
"]",
"for",
"pth",
",",
"stt",
"in",
"zip",
"(",
"path_items",
",",
"start_items",
")",
":",
"if",
"pth",
"!=",
"stt",
":",
"break",
"common",
".",
"append",
"(",
"pth",
")",
"# Shared parts index in both lists.",
"common_ind",
"=",
"len",
"(",
"common",
")",
"parent_num",
"=",
"len",
"(",
"start_items",
")",
"-",
"common_ind",
"# Start with parent traversal and add relative parts.",
"rel_items",
"=",
"[",
"PARENT",
"]",
"*",
"parent_num",
"+",
"path_items",
"[",
"common_ind",
":",
"]",
"return",
"path_join",
"(",
"*",
"rel_items",
")"
] | 28 | 16.863636 |
def _import_modules(dir_path):
""" Attempts to import modules in the specified directory path.
`dir_path`
Base directory path to attempt to import modules.
"""
def _import_module(module):
""" Imports the specified module.
"""
# already loaded, skip
if module in mods_loaded:
return False
__import__(module)
mods_loaded.append(module)
mods_loaded = []
# check if provided path exists
if not os.path.isdir(dir_path):
return
try:
# update import search path
sys.path.insert(0, dir_path)
# check for modules in the dir path
for entry in os.listdir(dir_path):
path = os.path.join(dir_path, entry)
if os.path.isdir(path): # directory
_import_module(entry)
elif _RE_PY_EXT.search(entry): # python file
if not _RE_INIT_PY.match(entry): # exclude init
name = _RE_PY_EXT.sub('', entry)
_import_module(name)
finally:
# remove inserted path
sys.path.pop(0) | [
"def",
"_import_modules",
"(",
"dir_path",
")",
":",
"def",
"_import_module",
"(",
"module",
")",
":",
"\"\"\" Imports the specified module.\n \"\"\"",
"# already loaded, skip",
"if",
"module",
"in",
"mods_loaded",
":",
"return",
"False",
"__import__",
"(",
"module",
")",
"mods_loaded",
".",
"append",
"(",
"module",
")",
"mods_loaded",
"=",
"[",
"]",
"# check if provided path exists",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"dir_path",
")",
":",
"return",
"try",
":",
"# update import search path",
"sys",
".",
"path",
".",
"insert",
"(",
"0",
",",
"dir_path",
")",
"# check for modules in the dir path",
"for",
"entry",
"in",
"os",
".",
"listdir",
"(",
"dir_path",
")",
":",
"path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"dir_path",
",",
"entry",
")",
"if",
"os",
".",
"path",
".",
"isdir",
"(",
"path",
")",
":",
"# directory",
"_import_module",
"(",
"entry",
")",
"elif",
"_RE_PY_EXT",
".",
"search",
"(",
"entry",
")",
":",
"# python file",
"if",
"not",
"_RE_INIT_PY",
".",
"match",
"(",
"entry",
")",
":",
"# exclude init",
"name",
"=",
"_RE_PY_EXT",
".",
"sub",
"(",
"''",
",",
"entry",
")",
"_import_module",
"(",
"name",
")",
"finally",
":",
"# remove inserted path",
"sys",
".",
"path",
".",
"pop",
"(",
"0",
")"
] | 26.829268 | 17.170732 |
def _parse_file(self):
"""Preprocess and parse C file into an AST"""
# We need to set the CPU type to pull in the right register definitions
# only preprocess the file (-E) and get rid of gcc extensions that aren't
# supported in ISO C.
args = utilities.build_includes(self.arch.includes())
# args.append('-mcpu=%s' % self.arch.property('chip'))
args.append('-E')
args.append('-D__attribute__(x)=')
args.append('-D__extension__=')
self.ast = parse_file(self.filepath, use_cpp=True, cpp_path='arm-none-eabi-gcc', cpp_args=args) | [
"def",
"_parse_file",
"(",
"self",
")",
":",
"# We need to set the CPU type to pull in the right register definitions",
"# only preprocess the file (-E) and get rid of gcc extensions that aren't",
"# supported in ISO C.",
"args",
"=",
"utilities",
".",
"build_includes",
"(",
"self",
".",
"arch",
".",
"includes",
"(",
")",
")",
"# args.append('-mcpu=%s' % self.arch.property('chip'))",
"args",
".",
"append",
"(",
"'-E'",
")",
"args",
".",
"append",
"(",
"'-D__attribute__(x)='",
")",
"args",
".",
"append",
"(",
"'-D__extension__='",
")",
"self",
".",
"ast",
"=",
"parse_file",
"(",
"self",
".",
"filepath",
",",
"use_cpp",
"=",
"True",
",",
"cpp_path",
"=",
"'arm-none-eabi-gcc'",
",",
"cpp_args",
"=",
"args",
")"
] | 45.846154 | 24.076923 |
def check_cluster(
cluster_config,
data_path,
java_home,
check_replicas,
batch_size,
minutes,
start_time,
end_time,
):
"""Check the integrity of the Kafka log files in a cluster.
start_time and end_time should be in the format specified
by TIME_FORMAT_REGEX.
:param data_path: the path to the log folder on the broker
:type data_path: str
:param java_home: the JAVA_HOME of the broker
:type java_home: str
:param check_replicas: also checks the replica files
:type check_replicas: bool
:param batch_size: the size of the batch
:type batch_size: int
:param minutes: check the files modified in the last N minutes
:type minutes: int
:param start_time: check the files modified after start_time
:type start_time: str
:param end_time: check the files modified before end_time
:type end_time: str
"""
brokers = get_broker_list(cluster_config)
broker_files = find_files(data_path, brokers, minutes, start_time, end_time)
if not check_replicas: # remove replicas
broker_files = filter_leader_files(cluster_config, broker_files)
processes = []
print("Starting {n} parallel processes".format(n=len(broker_files)))
try:
for broker, host, files in broker_files:
print(
" Broker: {host}, {n} files to check".format(
host=host,
n=len(files)),
)
p = Process(
name="dump_process_" + host,
target=check_files_on_host,
args=(java_home, host, files, batch_size),
)
p.start()
processes.append(p)
print("Processes running:")
for process in processes:
process.join()
except KeyboardInterrupt:
print("Terminating all processes")
for process in processes:
process.terminate()
process.join()
print("All processes terminated")
sys.exit(1) | [
"def",
"check_cluster",
"(",
"cluster_config",
",",
"data_path",
",",
"java_home",
",",
"check_replicas",
",",
"batch_size",
",",
"minutes",
",",
"start_time",
",",
"end_time",
",",
")",
":",
"brokers",
"=",
"get_broker_list",
"(",
"cluster_config",
")",
"broker_files",
"=",
"find_files",
"(",
"data_path",
",",
"brokers",
",",
"minutes",
",",
"start_time",
",",
"end_time",
")",
"if",
"not",
"check_replicas",
":",
"# remove replicas",
"broker_files",
"=",
"filter_leader_files",
"(",
"cluster_config",
",",
"broker_files",
")",
"processes",
"=",
"[",
"]",
"print",
"(",
"\"Starting {n} parallel processes\"",
".",
"format",
"(",
"n",
"=",
"len",
"(",
"broker_files",
")",
")",
")",
"try",
":",
"for",
"broker",
",",
"host",
",",
"files",
"in",
"broker_files",
":",
"print",
"(",
"\" Broker: {host}, {n} files to check\"",
".",
"format",
"(",
"host",
"=",
"host",
",",
"n",
"=",
"len",
"(",
"files",
")",
")",
",",
")",
"p",
"=",
"Process",
"(",
"name",
"=",
"\"dump_process_\"",
"+",
"host",
",",
"target",
"=",
"check_files_on_host",
",",
"args",
"=",
"(",
"java_home",
",",
"host",
",",
"files",
",",
"batch_size",
")",
",",
")",
"p",
".",
"start",
"(",
")",
"processes",
".",
"append",
"(",
"p",
")",
"print",
"(",
"\"Processes running:\"",
")",
"for",
"process",
"in",
"processes",
":",
"process",
".",
"join",
"(",
")",
"except",
"KeyboardInterrupt",
":",
"print",
"(",
"\"Terminating all processes\"",
")",
"for",
"process",
"in",
"processes",
":",
"process",
".",
"terminate",
"(",
")",
"process",
".",
"join",
"(",
")",
"print",
"(",
"\"All processes terminated\"",
")",
"sys",
".",
"exit",
"(",
"1",
")"
] | 32.766667 | 17.566667 |
def create_comment_browser(self, layout):
"""Create a comment browser and insert it into the given layout
:param layout: the layout to insert the browser into
:type layout: QLayout
:returns: the created browser
:rtype: :class:`jukeboxcore.gui.widgets.browser.ListBrowser`
:raises: None
"""
brws = CommentBrowser(1, headers=['Comments:'])
layout.insertWidget(1, brws)
return brws | [
"def",
"create_comment_browser",
"(",
"self",
",",
"layout",
")",
":",
"brws",
"=",
"CommentBrowser",
"(",
"1",
",",
"headers",
"=",
"[",
"'Comments:'",
"]",
")",
"layout",
".",
"insertWidget",
"(",
"1",
",",
"brws",
")",
"return",
"brws"
] | 37.333333 | 13.5 |
def send_mass_video(self, group_or_users, media_id, title=None,
description=None, is_to_all=False, preview=False,
send_ignore_reprint=0, client_msg_id=None):
"""
群发视频消息
详情请参考
https://mp.weixin.qq.com/wiki?id=mp1481187827_i0l21
:param group_or_users: 值为整型数字时为按分组群发,值为列表/元组时为按 OpenID 列表群发
当 is_to_all 为 True 时,传入 None 即对所有用户发送。
:param media_id: 视频的媒体 ID。可以通过 :func:`upload_video` 上传。
:param title: 视频标题
:param description: 视频描述
:param is_to_all: 用于设定是否向全部用户发送,值为true或false,选择true该消息群发给所有用户
选择false可根据group_id发送给指定群组的用户
:type is_to_all: bool
:param preview: 是否发送预览,此时 group_or_users 参数应为一个openid字符串
:type preview: bool
:param send_ignore_reprint: 指定待群发的文章被判定为转载时,是否继续群发。
当 send_ignore_reprint 参数设置为1时,文章被判定为转载时,且原创文允许转载时,将继续进行群发操作。
当 send_ignore_reprint 参数设置为0时,文章被判定为转载时,将停止群发操作。
send_ignore_reprint 默认为0。
:type send_ignore_reprint: int
:param client_msg_id: 开发者侧群发 msgid,长度限制 64 字节
:type client_msg_id: str
:return: 返回的 JSON 数据包
"""
video_data = {
'media_id': media_id
}
if title:
video_data['title'] = title
if description:
video_data['description'] = description
return self._send_mass_message(
group_or_users,
'mpvideo',
{
'mpvideo': video_data
},
is_to_all,
preview,
send_ignore_reprint,
client_msg_id,
) | [
"def",
"send_mass_video",
"(",
"self",
",",
"group_or_users",
",",
"media_id",
",",
"title",
"=",
"None",
",",
"description",
"=",
"None",
",",
"is_to_all",
"=",
"False",
",",
"preview",
"=",
"False",
",",
"send_ignore_reprint",
"=",
"0",
",",
"client_msg_id",
"=",
"None",
")",
":",
"video_data",
"=",
"{",
"'media_id'",
":",
"media_id",
"}",
"if",
"title",
":",
"video_data",
"[",
"'title'",
"]",
"=",
"title",
"if",
"description",
":",
"video_data",
"[",
"'description'",
"]",
"=",
"description",
"return",
"self",
".",
"_send_mass_message",
"(",
"group_or_users",
",",
"'mpvideo'",
",",
"{",
"'mpvideo'",
":",
"video_data",
"}",
",",
"is_to_all",
",",
"preview",
",",
"send_ignore_reprint",
",",
"client_msg_id",
",",
")"
] | 36.531915 | 19.765957 |
def template_global(self, name=None):
"""A decorator that is used to register a custom template global function.
You can specify a name for the global function, otherwise the function
name will be used. Example::
@app.template_global()
def double(n):
return 2 * n
.. versionadded:: 0.10
:param name: the optional name of the global function, otherwise the
function name will be used.
"""
def decorator(f):
self.add_template_global(f, name=name)
return f
return decorator | [
"def",
"template_global",
"(",
"self",
",",
"name",
"=",
"None",
")",
":",
"def",
"decorator",
"(",
"f",
")",
":",
"self",
".",
"add_template_global",
"(",
"f",
",",
"name",
"=",
"name",
")",
"return",
"f",
"return",
"decorator"
] | 33.611111 | 17.333333 |
def list_all_credit_card_payments(cls, **kwargs):
"""List CreditCardPayments
Return a list of CreditCardPayments
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.list_all_credit_card_payments(async=True)
>>> result = thread.get()
:param async bool
:param int page: page number
:param int size: page size
:param str sort: page order
:return: page[CreditCardPayment]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._list_all_credit_card_payments_with_http_info(**kwargs)
else:
(data) = cls._list_all_credit_card_payments_with_http_info(**kwargs)
return data | [
"def",
"list_all_credit_card_payments",
"(",
"cls",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'async'",
")",
":",
"return",
"cls",
".",
"_list_all_credit_card_payments_with_http_info",
"(",
"*",
"*",
"kwargs",
")",
"else",
":",
"(",
"data",
")",
"=",
"cls",
".",
"_list_all_credit_card_payments_with_http_info",
"(",
"*",
"*",
"kwargs",
")",
"return",
"data"
] | 39.565217 | 15.869565 |
def cli_head(context, path=None):
"""
Performs a HEAD on the item (account, container, or object).
See :py:mod:`swiftly.cli.head` for context usage information.
See :py:class:`CLIHead` for more information.
"""
path = path.lstrip('/') if path else None
with context.client_manager.with_client() as client:
if not path:
status, reason, headers, contents = client.head_account(
headers=context.headers, query=context.query, cdn=context.cdn)
mute = context.muted_account_headers
elif '/' not in path.rstrip('/'):
path = path.rstrip('/')
status, reason, headers, contents = client.head_container(
path, headers=context.headers, query=context.query,
cdn=context.cdn)
mute = context.muted_container_headers
else:
status, reason, headers, contents = client.head_object(
*path.split('/', 1), headers=context.headers,
query=context.query, cdn=context.cdn)
mute = context.muted_object_headers
if hasattr(contents, 'read'):
contents = contents.read()
if status // 100 != 2:
if status == 404 and context.ignore_404:
return
if not path:
raise ReturnCode('heading account: %s %s' % (status, reason))
elif '/' not in path:
raise ReturnCode(
'heading container %r: %s %s' % (path, status, reason))
else:
raise ReturnCode(
'heading object %r: %s %s' % (path, status, reason))
else:
with context.io_manager.with_stdout() as fp:
context.write_headers(fp, headers, mute) | [
"def",
"cli_head",
"(",
"context",
",",
"path",
"=",
"None",
")",
":",
"path",
"=",
"path",
".",
"lstrip",
"(",
"'/'",
")",
"if",
"path",
"else",
"None",
"with",
"context",
".",
"client_manager",
".",
"with_client",
"(",
")",
"as",
"client",
":",
"if",
"not",
"path",
":",
"status",
",",
"reason",
",",
"headers",
",",
"contents",
"=",
"client",
".",
"head_account",
"(",
"headers",
"=",
"context",
".",
"headers",
",",
"query",
"=",
"context",
".",
"query",
",",
"cdn",
"=",
"context",
".",
"cdn",
")",
"mute",
"=",
"context",
".",
"muted_account_headers",
"elif",
"'/'",
"not",
"in",
"path",
".",
"rstrip",
"(",
"'/'",
")",
":",
"path",
"=",
"path",
".",
"rstrip",
"(",
"'/'",
")",
"status",
",",
"reason",
",",
"headers",
",",
"contents",
"=",
"client",
".",
"head_container",
"(",
"path",
",",
"headers",
"=",
"context",
".",
"headers",
",",
"query",
"=",
"context",
".",
"query",
",",
"cdn",
"=",
"context",
".",
"cdn",
")",
"mute",
"=",
"context",
".",
"muted_container_headers",
"else",
":",
"status",
",",
"reason",
",",
"headers",
",",
"contents",
"=",
"client",
".",
"head_object",
"(",
"*",
"path",
".",
"split",
"(",
"'/'",
",",
"1",
")",
",",
"headers",
"=",
"context",
".",
"headers",
",",
"query",
"=",
"context",
".",
"query",
",",
"cdn",
"=",
"context",
".",
"cdn",
")",
"mute",
"=",
"context",
".",
"muted_object_headers",
"if",
"hasattr",
"(",
"contents",
",",
"'read'",
")",
":",
"contents",
"=",
"contents",
".",
"read",
"(",
")",
"if",
"status",
"//",
"100",
"!=",
"2",
":",
"if",
"status",
"==",
"404",
"and",
"context",
".",
"ignore_404",
":",
"return",
"if",
"not",
"path",
":",
"raise",
"ReturnCode",
"(",
"'heading account: %s %s'",
"%",
"(",
"status",
",",
"reason",
")",
")",
"elif",
"'/'",
"not",
"in",
"path",
":",
"raise",
"ReturnCode",
"(",
"'heading container %r: %s %s'",
"%",
"(",
"path",
",",
"status",
",",
"reason",
")",
")",
"else",
":",
"raise",
"ReturnCode",
"(",
"'heading object %r: %s %s'",
"%",
"(",
"path",
",",
"status",
",",
"reason",
")",
")",
"else",
":",
"with",
"context",
".",
"io_manager",
".",
"with_stdout",
"(",
")",
"as",
"fp",
":",
"context",
".",
"write_headers",
"(",
"fp",
",",
"headers",
",",
"mute",
")"
] | 41.170732 | 17.365854 |
def substitute(dict_, source):
""" Perform re.sub with the patterns in the given dict
Args:
dict_: {pattern: repl}
source: str
"""
d_esc = (re.escape(k) for k in dict_.keys())
pattern = re.compile('|'.join(d_esc))
return pattern.sub(lambda x: dict_[x.group()], source) | [
"def",
"substitute",
"(",
"dict_",
",",
"source",
")",
":",
"d_esc",
"=",
"(",
"re",
".",
"escape",
"(",
"k",
")",
"for",
"k",
"in",
"dict_",
".",
"keys",
"(",
")",
")",
"pattern",
"=",
"re",
".",
"compile",
"(",
"'|'",
".",
"join",
"(",
"d_esc",
")",
")",
"return",
"pattern",
".",
"sub",
"(",
"lambda",
"x",
":",
"dict_",
"[",
"x",
".",
"group",
"(",
")",
"]",
",",
"source",
")"
] | 32.888889 | 11.444444 |
def dump(self):
"""Dump the details of an ATR."""
for i in range(0, len(self.TA)):
if self.TA[i] is not None:
print("TA%d: %x" % (i + 1, self.TA[i]))
if self.TB[i] is not None:
print("TB%d: %x" % (i + 1, self.TB[i]))
if self.TC[i] is not None:
print("TC%d: %x" % (i + 1, self.TC[i]))
if self.TD[i] is not None:
print("TD%d: %x" % (i + 1, self.TD[i]))
print('supported protocols ' + ','.join(self.getSupportedProtocols()))
print('T=0 supported: ' + str(self.isT0Supported()))
print('T=1 supported: ' + str(self.isT1Supported()))
if self.getChecksum():
print('checksum: %d' % self.getChecksum())
print('\tclock rate conversion factor: ' +
str(self.getClockRateConversion()))
print('\tbit rate adjustment factor: ' + str(self.getBitRateFactor()))
print('\tmaximum programming current: ' +
str(self.getProgrammingCurrent()))
print('\tprogramming voltage: ' + str(self.getProgrammingVoltage()))
print('\tguard time: ' + str(self.getGuardTime()))
print('nb of interface bytes: %d' % self.getInterfaceBytesCount())
print('nb of historical bytes: %d' % self.getHistoricalBytesCount()) | [
"def",
"dump",
"(",
"self",
")",
":",
"for",
"i",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"self",
".",
"TA",
")",
")",
":",
"if",
"self",
".",
"TA",
"[",
"i",
"]",
"is",
"not",
"None",
":",
"print",
"(",
"\"TA%d: %x\"",
"%",
"(",
"i",
"+",
"1",
",",
"self",
".",
"TA",
"[",
"i",
"]",
")",
")",
"if",
"self",
".",
"TB",
"[",
"i",
"]",
"is",
"not",
"None",
":",
"print",
"(",
"\"TB%d: %x\"",
"%",
"(",
"i",
"+",
"1",
",",
"self",
".",
"TB",
"[",
"i",
"]",
")",
")",
"if",
"self",
".",
"TC",
"[",
"i",
"]",
"is",
"not",
"None",
":",
"print",
"(",
"\"TC%d: %x\"",
"%",
"(",
"i",
"+",
"1",
",",
"self",
".",
"TC",
"[",
"i",
"]",
")",
")",
"if",
"self",
".",
"TD",
"[",
"i",
"]",
"is",
"not",
"None",
":",
"print",
"(",
"\"TD%d: %x\"",
"%",
"(",
"i",
"+",
"1",
",",
"self",
".",
"TD",
"[",
"i",
"]",
")",
")",
"print",
"(",
"'supported protocols '",
"+",
"','",
".",
"join",
"(",
"self",
".",
"getSupportedProtocols",
"(",
")",
")",
")",
"print",
"(",
"'T=0 supported: '",
"+",
"str",
"(",
"self",
".",
"isT0Supported",
"(",
")",
")",
")",
"print",
"(",
"'T=1 supported: '",
"+",
"str",
"(",
"self",
".",
"isT1Supported",
"(",
")",
")",
")",
"if",
"self",
".",
"getChecksum",
"(",
")",
":",
"print",
"(",
"'checksum: %d'",
"%",
"self",
".",
"getChecksum",
"(",
")",
")",
"print",
"(",
"'\\tclock rate conversion factor: '",
"+",
"str",
"(",
"self",
".",
"getClockRateConversion",
"(",
")",
")",
")",
"print",
"(",
"'\\tbit rate adjustment factor: '",
"+",
"str",
"(",
"self",
".",
"getBitRateFactor",
"(",
")",
")",
")",
"print",
"(",
"'\\tmaximum programming current: '",
"+",
"str",
"(",
"self",
".",
"getProgrammingCurrent",
"(",
")",
")",
")",
"print",
"(",
"'\\tprogramming voltage: '",
"+",
"str",
"(",
"self",
".",
"getProgrammingVoltage",
"(",
")",
")",
")",
"print",
"(",
"'\\tguard time: '",
"+",
"str",
"(",
"self",
".",
"getGuardTime",
"(",
")",
")",
")",
"print",
"(",
"'nb of interface bytes: %d'",
"%",
"self",
".",
"getInterfaceBytesCount",
"(",
")",
")",
"print",
"(",
"'nb of historical bytes: %d'",
"%",
"self",
".",
"getHistoricalBytesCount",
"(",
")",
")"
] | 40.875 | 21.03125 |
def replace_apply_state(meta_graph, state_ops, feed_map):
"""Replaces state ops with non state Placeholder ops for the apply graph."""
for node in meta_graph.graph_def.node:
keys_to_purge = []
tensor_name = node.name + ":0"
# Verify that the node is a state op and that its due to be rewired
# in the feedmap.
if node.op in state_ops and tensor_name in feed_map:
node.op = "Placeholder"
for key in node.attr:
# Only shape and dtype are required for Placeholder. Remove other
# attributes.
if key != "shape":
keys_to_purge.append(key)
for key in keys_to_purge:
del node.attr[key]
node.attr["dtype"].type = types_pb2.DT_RESOURCE | [
"def",
"replace_apply_state",
"(",
"meta_graph",
",",
"state_ops",
",",
"feed_map",
")",
":",
"for",
"node",
"in",
"meta_graph",
".",
"graph_def",
".",
"node",
":",
"keys_to_purge",
"=",
"[",
"]",
"tensor_name",
"=",
"node",
".",
"name",
"+",
"\":0\"",
"# Verify that the node is a state op and that its due to be rewired",
"# in the feedmap.",
"if",
"node",
".",
"op",
"in",
"state_ops",
"and",
"tensor_name",
"in",
"feed_map",
":",
"node",
".",
"op",
"=",
"\"Placeholder\"",
"for",
"key",
"in",
"node",
".",
"attr",
":",
"# Only shape and dtype are required for Placeholder. Remove other",
"# attributes.",
"if",
"key",
"!=",
"\"shape\"",
":",
"keys_to_purge",
".",
"append",
"(",
"key",
")",
"for",
"key",
"in",
"keys_to_purge",
":",
"del",
"node",
".",
"attr",
"[",
"key",
"]",
"node",
".",
"attr",
"[",
"\"dtype\"",
"]",
".",
"type",
"=",
"types_pb2",
".",
"DT_RESOURCE"
] | 41.176471 | 14 |
def send(self, message_type, task_id, message):
""" Sends a message to the UDP receiver
Parameter
---------
message_type: monitoring.MessageType (enum)
In this case message type is RESOURCE_INFO most often
task_id: int
Task identifier of the task for which resource monitoring is being reported
message: object
Arbitrary pickle-able object that is to be sent
Returns:
# bytes sent
"""
x = 0
try:
buffer = pickle.dumps((self.source_id, # Identifier for manager
int(time.time()), # epoch timestamp
message_type,
message))
except Exception as e:
print("Exception during pickling {}".format(e))
return
try:
x = self.sock.sendto(buffer, (self.ip, self.port))
except socket.timeout:
print("Could not send message within timeout limit")
return False
return x | [
"def",
"send",
"(",
"self",
",",
"message_type",
",",
"task_id",
",",
"message",
")",
":",
"x",
"=",
"0",
"try",
":",
"buffer",
"=",
"pickle",
".",
"dumps",
"(",
"(",
"self",
".",
"source_id",
",",
"# Identifier for manager",
"int",
"(",
"time",
".",
"time",
"(",
")",
")",
",",
"# epoch timestamp",
"message_type",
",",
"message",
")",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"(",
"\"Exception during pickling {}\"",
".",
"format",
"(",
"e",
")",
")",
"return",
"try",
":",
"x",
"=",
"self",
".",
"sock",
".",
"sendto",
"(",
"buffer",
",",
"(",
"self",
".",
"ip",
",",
"self",
".",
"port",
")",
")",
"except",
"socket",
".",
"timeout",
":",
"print",
"(",
"\"Could not send message within timeout limit\"",
")",
"return",
"False",
"return",
"x"
] | 33.25 | 21.9375 |
def _set_rowcount(self, query_results):
"""Set the rowcount from query results.
Normally, this sets rowcount to the number of rows returned by the
query, but if it was a DML statement, it sets rowcount to the number
of modified rows.
:type query_results:
:class:`~google.cloud.bigquery.query._QueryResults`
:param query_results: results of a query
"""
total_rows = 0
num_dml_affected_rows = query_results.num_dml_affected_rows
if query_results.total_rows is not None and query_results.total_rows > 0:
total_rows = query_results.total_rows
if num_dml_affected_rows is not None and num_dml_affected_rows > 0:
total_rows = num_dml_affected_rows
self.rowcount = total_rows | [
"def",
"_set_rowcount",
"(",
"self",
",",
"query_results",
")",
":",
"total_rows",
"=",
"0",
"num_dml_affected_rows",
"=",
"query_results",
".",
"num_dml_affected_rows",
"if",
"query_results",
".",
"total_rows",
"is",
"not",
"None",
"and",
"query_results",
".",
"total_rows",
">",
"0",
":",
"total_rows",
"=",
"query_results",
".",
"total_rows",
"if",
"num_dml_affected_rows",
"is",
"not",
"None",
"and",
"num_dml_affected_rows",
">",
"0",
":",
"total_rows",
"=",
"num_dml_affected_rows",
"self",
".",
"rowcount",
"=",
"total_rows"
] | 41.315789 | 20.578947 |
def enable_svc_check(self, service):
"""Enable checks for a service
Format of the line that triggers function call::
ENABLE_SVC_CHECK;<host_name>;<service_description>
:param service: service to edit
:type service: alignak.objects.service.Service
:return: None
"""
if not service.active_checks_enabled:
service.modified_attributes |= \
DICT_MODATTR["MODATTR_ACTIVE_CHECKS_ENABLED"].value
service.active_checks_enabled = True
self.send_an_element(service.get_update_status_brok()) | [
"def",
"enable_svc_check",
"(",
"self",
",",
"service",
")",
":",
"if",
"not",
"service",
".",
"active_checks_enabled",
":",
"service",
".",
"modified_attributes",
"|=",
"DICT_MODATTR",
"[",
"\"MODATTR_ACTIVE_CHECKS_ENABLED\"",
"]",
".",
"value",
"service",
".",
"active_checks_enabled",
"=",
"True",
"self",
".",
"send_an_element",
"(",
"service",
".",
"get_update_status_brok",
"(",
")",
")"
] | 38.866667 | 14.8 |
def users_lookupByEmail(self, *, email: str, **kwargs) -> SlackResponse:
"""Find a user with an email address.
Args:
email (str): An email address belonging to a user in the workspace.
e.g. 'spengler@ghostbusters.example.com'
"""
kwargs.update({"email": email})
return self.api_call("users.lookupByEmail", http_verb="GET", params=kwargs) | [
"def",
"users_lookupByEmail",
"(",
"self",
",",
"*",
",",
"email",
":",
"str",
",",
"*",
"*",
"kwargs",
")",
"->",
"SlackResponse",
":",
"kwargs",
".",
"update",
"(",
"{",
"\"email\"",
":",
"email",
"}",
")",
"return",
"self",
".",
"api_call",
"(",
"\"users.lookupByEmail\"",
",",
"http_verb",
"=",
"\"GET\"",
",",
"params",
"=",
"kwargs",
")"
] | 44.222222 | 22 |
def watch(self, pipeline=None, full_document='default', resume_after=None,
max_await_time_ms=None, batch_size=None, collation=None,
start_at_operation_time=None, session=None):
"""Watch changes on this database.
Performs an aggregation with an implicit initial ``$changeStream``
stage and returns a
:class:`~pymongo.change_stream.DatabaseChangeStream` cursor which
iterates over changes on all collections in this database.
Introduced in MongoDB 4.0.
.. code-block:: python
with db.watch() as stream:
for change in stream:
print(change)
The :class:`~pymongo.change_stream.DatabaseChangeStream` iterable
blocks until the next change document is returned or an error is
raised. If the
:meth:`~pymongo.change_stream.DatabaseChangeStream.next` method
encounters a network error when retrieving a batch from the server,
it will automatically attempt to recreate the cursor such that no
change events are missed. Any error encountered during the resume
attempt indicates there may be an outage and will be raised.
.. code-block:: python
try:
with db.watch(
[{'$match': {'operationType': 'insert'}}]) as stream:
for insert_change in stream:
print(insert_change)
except pymongo.errors.PyMongoError:
# The ChangeStream encountered an unrecoverable error or the
# resume attempt failed to recreate the cursor.
logging.error('...')
For a precise description of the resume process see the
`change streams specification`_.
:Parameters:
- `pipeline` (optional): A list of aggregation pipeline stages to
append to an initial ``$changeStream`` stage. Not all
pipeline stages are valid after a ``$changeStream`` stage, see the
MongoDB documentation on change streams for the supported stages.
- `full_document` (optional): The fullDocument to pass as an option
to the ``$changeStream`` stage. Allowed values: 'default',
'updateLookup'. Defaults to 'default'.
When set to 'updateLookup', the change notification for partial
updates will include both a delta describing the changes to the
document, as well as a copy of the entire document that was
changed from some time after the change occurred.
- `resume_after` (optional): The logical starting point for this
change stream.
- `max_await_time_ms` (optional): The maximum time in milliseconds
for the server to wait for changes before responding to a getMore
operation.
- `batch_size` (optional): The maximum number of documents to return
per batch.
- `collation` (optional): The :class:`~pymongo.collation.Collation`
to use for the aggregation.
- `start_at_operation_time` (optional): If provided, the resulting
change stream will only return changes that occurred at or after
the specified :class:`~bson.timestamp.Timestamp`. Requires
MongoDB >= 4.0.
- `session` (optional): a
:class:`~pymongo.client_session.ClientSession`.
:Returns:
A :class:`~pymongo.change_stream.DatabaseChangeStream` cursor.
.. versionadded:: 3.7
.. mongodoc:: changeStreams
.. _change streams specification:
https://github.com/mongodb/specifications/blob/master/source/change-streams/change-streams.rst
"""
return DatabaseChangeStream(
self, pipeline, full_document, resume_after, max_await_time_ms,
batch_size, collation, start_at_operation_time, session
) | [
"def",
"watch",
"(",
"self",
",",
"pipeline",
"=",
"None",
",",
"full_document",
"=",
"'default'",
",",
"resume_after",
"=",
"None",
",",
"max_await_time_ms",
"=",
"None",
",",
"batch_size",
"=",
"None",
",",
"collation",
"=",
"None",
",",
"start_at_operation_time",
"=",
"None",
",",
"session",
"=",
"None",
")",
":",
"return",
"DatabaseChangeStream",
"(",
"self",
",",
"pipeline",
",",
"full_document",
",",
"resume_after",
",",
"max_await_time_ms",
",",
"batch_size",
",",
"collation",
",",
"start_at_operation_time",
",",
"session",
")"
] | 46.154762 | 25.357143 |
def _xorterm_prime(lexer):
"""Return an xor term' expression, eliminates left recursion."""
tok = next(lexer)
# '^' PRODTERM XORTERM'
if isinstance(tok, OP_xor):
prodterm = _prodterm(lexer)
xorterm_prime = _xorterm_prime(lexer)
if xorterm_prime is None:
return prodterm
else:
return ('xor', prodterm, xorterm_prime)
# null
else:
lexer.unpop_token(tok)
return None | [
"def",
"_xorterm_prime",
"(",
"lexer",
")",
":",
"tok",
"=",
"next",
"(",
"lexer",
")",
"# '^' PRODTERM XORTERM'",
"if",
"isinstance",
"(",
"tok",
",",
"OP_xor",
")",
":",
"prodterm",
"=",
"_prodterm",
"(",
"lexer",
")",
"xorterm_prime",
"=",
"_xorterm_prime",
"(",
"lexer",
")",
"if",
"xorterm_prime",
"is",
"None",
":",
"return",
"prodterm",
"else",
":",
"return",
"(",
"'xor'",
",",
"prodterm",
",",
"xorterm_prime",
")",
"# null",
"else",
":",
"lexer",
".",
"unpop_token",
"(",
"tok",
")",
"return",
"None"
] | 29.666667 | 14.333333 |
def parse_options():
"""
Parses command-line option
"""
try:
opts, args = getopt.getopt(sys.argv[1:], 'ac:e:hilms:t:vx',
['adapt', 'comp=', 'enum=', 'exhaust', 'help', 'incr', 'blo',
'minimize', 'solver=', 'trim=', 'verbose'])
except getopt.GetoptError as err:
sys.stderr.write(str(err).capitalize())
usage()
sys.exit(1)
adapt = False
exhaust = False
cmode = None
to_enum = 1
incr = False
blo = False
minz = False
solver = 'g3'
trim = 0
verbose = 1
for opt, arg in opts:
if opt in ('-a', '--adapt'):
adapt = True
elif opt in ('-c', '--comp'):
cmode = str(arg)
elif opt in ('-e', '--enum'):
to_enum = str(arg)
if to_enum != 'all':
to_enum = int(to_enum)
else:
to_enum = 0
elif opt in ('-h', '--help'):
usage()
sys.exit(0)
elif opt in ('-i', '--incr'):
incr = True
elif opt in ('-l', '--blo'):
blo = True
elif opt in ('-m', '--minimize'):
minz = True
elif opt in ('-s', '--solver'):
solver = str(arg)
elif opt in ('-t', '--trim'):
trim = int(arg)
elif opt in ('-v', '--verbose'):
verbose += 1
elif opt in ('-x', '--exhaust'):
exhaust = True
else:
assert False, 'Unhandled option: {0} {1}'.format(opt, arg)
return adapt, blo, cmode, to_enum, exhaust, incr, minz, solver, trim, \
verbose, args | [
"def",
"parse_options",
"(",
")",
":",
"try",
":",
"opts",
",",
"args",
"=",
"getopt",
".",
"getopt",
"(",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
",",
"'ac:e:hilms:t:vx'",
",",
"[",
"'adapt'",
",",
"'comp='",
",",
"'enum='",
",",
"'exhaust'",
",",
"'help'",
",",
"'incr'",
",",
"'blo'",
",",
"'minimize'",
",",
"'solver='",
",",
"'trim='",
",",
"'verbose'",
"]",
")",
"except",
"getopt",
".",
"GetoptError",
"as",
"err",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"str",
"(",
"err",
")",
".",
"capitalize",
"(",
")",
")",
"usage",
"(",
")",
"sys",
".",
"exit",
"(",
"1",
")",
"adapt",
"=",
"False",
"exhaust",
"=",
"False",
"cmode",
"=",
"None",
"to_enum",
"=",
"1",
"incr",
"=",
"False",
"blo",
"=",
"False",
"minz",
"=",
"False",
"solver",
"=",
"'g3'",
"trim",
"=",
"0",
"verbose",
"=",
"1",
"for",
"opt",
",",
"arg",
"in",
"opts",
":",
"if",
"opt",
"in",
"(",
"'-a'",
",",
"'--adapt'",
")",
":",
"adapt",
"=",
"True",
"elif",
"opt",
"in",
"(",
"'-c'",
",",
"'--comp'",
")",
":",
"cmode",
"=",
"str",
"(",
"arg",
")",
"elif",
"opt",
"in",
"(",
"'-e'",
",",
"'--enum'",
")",
":",
"to_enum",
"=",
"str",
"(",
"arg",
")",
"if",
"to_enum",
"!=",
"'all'",
":",
"to_enum",
"=",
"int",
"(",
"to_enum",
")",
"else",
":",
"to_enum",
"=",
"0",
"elif",
"opt",
"in",
"(",
"'-h'",
",",
"'--help'",
")",
":",
"usage",
"(",
")",
"sys",
".",
"exit",
"(",
"0",
")",
"elif",
"opt",
"in",
"(",
"'-i'",
",",
"'--incr'",
")",
":",
"incr",
"=",
"True",
"elif",
"opt",
"in",
"(",
"'-l'",
",",
"'--blo'",
")",
":",
"blo",
"=",
"True",
"elif",
"opt",
"in",
"(",
"'-m'",
",",
"'--minimize'",
")",
":",
"minz",
"=",
"True",
"elif",
"opt",
"in",
"(",
"'-s'",
",",
"'--solver'",
")",
":",
"solver",
"=",
"str",
"(",
"arg",
")",
"elif",
"opt",
"in",
"(",
"'-t'",
",",
"'--trim'",
")",
":",
"trim",
"=",
"int",
"(",
"arg",
")",
"elif",
"opt",
"in",
"(",
"'-v'",
",",
"'--verbose'",
")",
":",
"verbose",
"+=",
"1",
"elif",
"opt",
"in",
"(",
"'-x'",
",",
"'--exhaust'",
")",
":",
"exhaust",
"=",
"True",
"else",
":",
"assert",
"False",
",",
"'Unhandled option: {0} {1}'",
".",
"format",
"(",
"opt",
",",
"arg",
")",
"return",
"adapt",
",",
"blo",
",",
"cmode",
",",
"to_enum",
",",
"exhaust",
",",
"incr",
",",
"minz",
",",
"solver",
",",
"trim",
",",
"verbose",
",",
"args"
] | 27.517241 | 16.862069 |
def semantic_parent(self):
"""Return the semantic parent for this cursor."""
if not hasattr(self, '_semantic_parent'):
self._semantic_parent = conf.lib.clang_getCursorSemanticParent(self)
return self._semantic_parent | [
"def",
"semantic_parent",
"(",
"self",
")",
":",
"if",
"not",
"hasattr",
"(",
"self",
",",
"'_semantic_parent'",
")",
":",
"self",
".",
"_semantic_parent",
"=",
"conf",
".",
"lib",
".",
"clang_getCursorSemanticParent",
"(",
"self",
")",
"return",
"self",
".",
"_semantic_parent"
] | 41.333333 | 17.833333 |
def print_descr(rect, annot):
"""Print a short description to the right of an annot rect."""
annot.parent.insertText(rect.br + (10, 0),
"'%s' annotation" % annot.type[1], color = red) | [
"def",
"print_descr",
"(",
"rect",
",",
"annot",
")",
":",
"annot",
".",
"parent",
".",
"insertText",
"(",
"rect",
".",
"br",
"+",
"(",
"10",
",",
"0",
")",
",",
"\"'%s' annotation\"",
"%",
"annot",
".",
"type",
"[",
"1",
"]",
",",
"color",
"=",
"red",
")"
] | 52 | 11 |
def _plot_graph(G, vertex_color, vertex_size, highlight,
edges, edge_color, edge_width,
indices, colorbar, limits, ax, title, backend):
r"""Plot a graph with signals as color or vertex size.
Parameters
----------
vertex_color : array_like or color
Signal to plot as vertex color (length is the number of vertices).
If None, vertex color is set to `graph.plotting['vertex_color']`.
Alternatively, a color can be set in any format accepted by matplotlib.
Each vertex color can by specified by an RGB(A) array of dimension
`n_vertices` x 3 (or 4).
vertex_size : array_like or int
Signal to plot as vertex size (length is the number of vertices).
Vertex size ranges from 0.5 to 2 times `graph.plotting['vertex_size']`.
If None, vertex size is set to `graph.plotting['vertex_size']`.
Alternatively, a size can be passed as an integer.
The pyqtgraph backend only accepts an integer size.
highlight : iterable
List of indices of vertices to be highlighted.
Useful for example to show where a filter was localized.
Only available with the matplotlib backend.
edges : bool
Whether to draw edges in addition to vertices.
Default to True if less than 10,000 edges to draw.
Note that drawing many edges can be slow.
edge_color : array_like or color
Signal to plot as edge color (length is the number of edges).
Edge color is given by `graph.plotting['edge_color']` and transparency
ranges from 0.2 to 0.9.
If None, edge color is set to `graph.plotting['edge_color']`.
Alternatively, a color can be set in any format accepted by matplotlib.
Each edge color can by specified by an RGB(A) array of dimension
`n_edges` x 3 (or 4).
Only available with the matplotlib backend.
edge_width : array_like or int
Signal to plot as edge width (length is the number of edges).
Edge width ranges from 0.5 to 2 times `graph.plotting['edge_width']`.
If None, edge width is set to `graph.plotting['edge_width']`.
Alternatively, a width can be passed as an integer.
Only available with the matplotlib backend.
indices : bool
Whether to print the node indices (in the adjacency / Laplacian matrix
and signal vectors) on top of each node.
Useful to locate a node of interest.
Only available with the matplotlib backend.
colorbar : bool
Whether to plot a colorbar indicating the signal's amplitude.
Only available with the matplotlib backend.
limits : [vmin, vmax]
Map colors from vmin to vmax.
Defaults to signal minimum and maximum value.
Only available with the matplotlib backend.
ax : :class:`matplotlib.axes.Axes`
Axes where to draw the graph. Optional, created if not passed.
Only available with the matplotlib backend.
title : str
Title of the figure.
backend: {'matplotlib', 'pyqtgraph', None}
Defines the drawing backend to use.
Defaults to :data:`pygsp.plotting.BACKEND`.
Returns
-------
fig : :class:`matplotlib.figure.Figure`
The figure the plot belongs to. Only with the matplotlib backend.
ax : :class:`matplotlib.axes.Axes`
The axes the plot belongs to. Only with the matplotlib backend.
Notes
-----
The orientation of directed edges is not shown. If edges exist in both
directions, they will be drawn on top of each other.
Examples
--------
>>> import matplotlib
>>> graph = graphs.Sensor(20, seed=42)
>>> graph.compute_fourier_basis(n_eigenvectors=4)
>>> _, _, weights = graph.get_edge_list()
>>> fig, ax = graph.plot(graph.U[:, 1], vertex_size=graph.dw,
... edge_color=weights)
>>> graph.plotting['vertex_size'] = 300
>>> graph.plotting['edge_width'] = 5
>>> graph.plotting['edge_style'] = '--'
>>> fig, ax = graph.plot(edge_width=weights, edge_color=(0, .8, .8, .5),
... vertex_color='black')
>>> fig, ax = graph.plot(vertex_size=graph.dw, indices=True,
... highlight=[17, 3, 16], edges=False)
"""
if not hasattr(G, 'coords') or G.coords is None:
raise AttributeError('Graph has no coordinate set. '
'Please run G.set_coordinates() first.')
check_2d_3d = (G.coords.ndim != 2) or (G.coords.shape[1] not in [2, 3])
if G.coords.ndim != 1 and check_2d_3d:
raise AttributeError('Coordinates should be in 1D, 2D or 3D space.')
if G.coords.shape[0] != G.N:
raise AttributeError('Graph needs G.N = {} coordinates.'.format(G.N))
if backend is None:
backend = BACKEND
def check_shape(signal, name, length, many=False):
if (signal.ndim == 0) or (signal.shape[0] != length):
txt = '{}: signal should have length {}.'
txt = txt.format(name, length)
raise ValueError(txt)
if (not many) and (signal.ndim != 1):
txt = '{}: can plot only one signal (not {}).'
txt = txt.format(name, signal.shape[1])
raise ValueError(txt)
def normalize(x):
"""Scale values in [intercept, 1]. Return 0.5 if constant.
Set intercept value in G.plotting["normalize_intercept"]
with value in [0, 1], default is .25.
"""
ptp = x.ptp()
if ptp == 0:
return np.full(x.shape, 0.5)
else:
intercept = G.plotting['normalize_intercept']
return (1. - intercept) * (x - x.min()) / ptp + intercept
def is_color(color):
if backend == 'matplotlib':
mpl, _, _ = _import_plt()
if mpl.colors.is_color_like(color):
return True # single color
try:
return all(map(mpl.colors.is_color_like, color)) # color list
except TypeError:
return False # e.g., color is an int
else:
return False # No support for pyqtgraph (yet).
if vertex_color is None:
limits = [0, 0]
colorbar = False
if backend == 'matplotlib':
vertex_color = (G.plotting['vertex_color'],)
elif is_color(vertex_color):
limits = [0, 0]
colorbar = False
else:
vertex_color = np.asanyarray(vertex_color).squeeze()
check_shape(vertex_color, 'Vertex color', G.n_vertices,
many=(G.coords.ndim == 1))
if vertex_size is None:
vertex_size = G.plotting['vertex_size']
elif not np.isscalar(vertex_size):
vertex_size = np.asanyarray(vertex_size).squeeze()
check_shape(vertex_size, 'Vertex size', G.n_vertices)
vertex_size = G.plotting['vertex_size'] * 4 * normalize(vertex_size)**2
if edges is None:
edges = G.Ne < 10e3
if edge_color is None:
edge_color = (G.plotting['edge_color'],)
elif not is_color(edge_color):
edge_color = np.asanyarray(edge_color).squeeze()
check_shape(edge_color, 'Edge color', G.n_edges)
edge_color = 0.9 * normalize(edge_color)
edge_color = [
np.tile(G.plotting['edge_color'][:3], [len(edge_color), 1]),
edge_color[:, np.newaxis],
]
edge_color = np.concatenate(edge_color, axis=1)
if edge_width is None:
edge_width = G.plotting['edge_width']
elif not np.isscalar(edge_width):
edge_width = np.array(edge_width).squeeze()
check_shape(edge_width, 'Edge width', G.n_edges)
edge_width = G.plotting['edge_width'] * 2 * normalize(edge_width)
if limits is None:
limits = [1.05*vertex_color.min(), 1.05*vertex_color.max()]
if title is None:
title = G.__repr__(limit=4)
if backend == 'pyqtgraph':
if vertex_color is None:
_qtg_plot_graph(G, edges=edges, vertex_size=vertex_size,
title=title)
else:
_qtg_plot_signal(G, signal=vertex_color, vertex_size=vertex_size,
edges=edges, limits=limits, title=title)
elif backend == 'matplotlib':
return _plt_plot_graph(G, vertex_color=vertex_color,
vertex_size=vertex_size, highlight=highlight,
edges=edges, indices=indices, colorbar=colorbar,
edge_color=edge_color, edge_width=edge_width,
limits=limits, ax=ax, title=title)
else:
raise ValueError('Unknown backend {}.'.format(backend)) | [
"def",
"_plot_graph",
"(",
"G",
",",
"vertex_color",
",",
"vertex_size",
",",
"highlight",
",",
"edges",
",",
"edge_color",
",",
"edge_width",
",",
"indices",
",",
"colorbar",
",",
"limits",
",",
"ax",
",",
"title",
",",
"backend",
")",
":",
"if",
"not",
"hasattr",
"(",
"G",
",",
"'coords'",
")",
"or",
"G",
".",
"coords",
"is",
"None",
":",
"raise",
"AttributeError",
"(",
"'Graph has no coordinate set. '",
"'Please run G.set_coordinates() first.'",
")",
"check_2d_3d",
"=",
"(",
"G",
".",
"coords",
".",
"ndim",
"!=",
"2",
")",
"or",
"(",
"G",
".",
"coords",
".",
"shape",
"[",
"1",
"]",
"not",
"in",
"[",
"2",
",",
"3",
"]",
")",
"if",
"G",
".",
"coords",
".",
"ndim",
"!=",
"1",
"and",
"check_2d_3d",
":",
"raise",
"AttributeError",
"(",
"'Coordinates should be in 1D, 2D or 3D space.'",
")",
"if",
"G",
".",
"coords",
".",
"shape",
"[",
"0",
"]",
"!=",
"G",
".",
"N",
":",
"raise",
"AttributeError",
"(",
"'Graph needs G.N = {} coordinates.'",
".",
"format",
"(",
"G",
".",
"N",
")",
")",
"if",
"backend",
"is",
"None",
":",
"backend",
"=",
"BACKEND",
"def",
"check_shape",
"(",
"signal",
",",
"name",
",",
"length",
",",
"many",
"=",
"False",
")",
":",
"if",
"(",
"signal",
".",
"ndim",
"==",
"0",
")",
"or",
"(",
"signal",
".",
"shape",
"[",
"0",
"]",
"!=",
"length",
")",
":",
"txt",
"=",
"'{}: signal should have length {}.'",
"txt",
"=",
"txt",
".",
"format",
"(",
"name",
",",
"length",
")",
"raise",
"ValueError",
"(",
"txt",
")",
"if",
"(",
"not",
"many",
")",
"and",
"(",
"signal",
".",
"ndim",
"!=",
"1",
")",
":",
"txt",
"=",
"'{}: can plot only one signal (not {}).'",
"txt",
"=",
"txt",
".",
"format",
"(",
"name",
",",
"signal",
".",
"shape",
"[",
"1",
"]",
")",
"raise",
"ValueError",
"(",
"txt",
")",
"def",
"normalize",
"(",
"x",
")",
":",
"\"\"\"Scale values in [intercept, 1]. Return 0.5 if constant.\n\n Set intercept value in G.plotting[\"normalize_intercept\"]\n with value in [0, 1], default is .25.\n \"\"\"",
"ptp",
"=",
"x",
".",
"ptp",
"(",
")",
"if",
"ptp",
"==",
"0",
":",
"return",
"np",
".",
"full",
"(",
"x",
".",
"shape",
",",
"0.5",
")",
"else",
":",
"intercept",
"=",
"G",
".",
"plotting",
"[",
"'normalize_intercept'",
"]",
"return",
"(",
"1.",
"-",
"intercept",
")",
"*",
"(",
"x",
"-",
"x",
".",
"min",
"(",
")",
")",
"/",
"ptp",
"+",
"intercept",
"def",
"is_color",
"(",
"color",
")",
":",
"if",
"backend",
"==",
"'matplotlib'",
":",
"mpl",
",",
"_",
",",
"_",
"=",
"_import_plt",
"(",
")",
"if",
"mpl",
".",
"colors",
".",
"is_color_like",
"(",
"color",
")",
":",
"return",
"True",
"# single color",
"try",
":",
"return",
"all",
"(",
"map",
"(",
"mpl",
".",
"colors",
".",
"is_color_like",
",",
"color",
")",
")",
"# color list",
"except",
"TypeError",
":",
"return",
"False",
"# e.g., color is an int",
"else",
":",
"return",
"False",
"# No support for pyqtgraph (yet).",
"if",
"vertex_color",
"is",
"None",
":",
"limits",
"=",
"[",
"0",
",",
"0",
"]",
"colorbar",
"=",
"False",
"if",
"backend",
"==",
"'matplotlib'",
":",
"vertex_color",
"=",
"(",
"G",
".",
"plotting",
"[",
"'vertex_color'",
"]",
",",
")",
"elif",
"is_color",
"(",
"vertex_color",
")",
":",
"limits",
"=",
"[",
"0",
",",
"0",
"]",
"colorbar",
"=",
"False",
"else",
":",
"vertex_color",
"=",
"np",
".",
"asanyarray",
"(",
"vertex_color",
")",
".",
"squeeze",
"(",
")",
"check_shape",
"(",
"vertex_color",
",",
"'Vertex color'",
",",
"G",
".",
"n_vertices",
",",
"many",
"=",
"(",
"G",
".",
"coords",
".",
"ndim",
"==",
"1",
")",
")",
"if",
"vertex_size",
"is",
"None",
":",
"vertex_size",
"=",
"G",
".",
"plotting",
"[",
"'vertex_size'",
"]",
"elif",
"not",
"np",
".",
"isscalar",
"(",
"vertex_size",
")",
":",
"vertex_size",
"=",
"np",
".",
"asanyarray",
"(",
"vertex_size",
")",
".",
"squeeze",
"(",
")",
"check_shape",
"(",
"vertex_size",
",",
"'Vertex size'",
",",
"G",
".",
"n_vertices",
")",
"vertex_size",
"=",
"G",
".",
"plotting",
"[",
"'vertex_size'",
"]",
"*",
"4",
"*",
"normalize",
"(",
"vertex_size",
")",
"**",
"2",
"if",
"edges",
"is",
"None",
":",
"edges",
"=",
"G",
".",
"Ne",
"<",
"10e3",
"if",
"edge_color",
"is",
"None",
":",
"edge_color",
"=",
"(",
"G",
".",
"plotting",
"[",
"'edge_color'",
"]",
",",
")",
"elif",
"not",
"is_color",
"(",
"edge_color",
")",
":",
"edge_color",
"=",
"np",
".",
"asanyarray",
"(",
"edge_color",
")",
".",
"squeeze",
"(",
")",
"check_shape",
"(",
"edge_color",
",",
"'Edge color'",
",",
"G",
".",
"n_edges",
")",
"edge_color",
"=",
"0.9",
"*",
"normalize",
"(",
"edge_color",
")",
"edge_color",
"=",
"[",
"np",
".",
"tile",
"(",
"G",
".",
"plotting",
"[",
"'edge_color'",
"]",
"[",
":",
"3",
"]",
",",
"[",
"len",
"(",
"edge_color",
")",
",",
"1",
"]",
")",
",",
"edge_color",
"[",
":",
",",
"np",
".",
"newaxis",
"]",
",",
"]",
"edge_color",
"=",
"np",
".",
"concatenate",
"(",
"edge_color",
",",
"axis",
"=",
"1",
")",
"if",
"edge_width",
"is",
"None",
":",
"edge_width",
"=",
"G",
".",
"plotting",
"[",
"'edge_width'",
"]",
"elif",
"not",
"np",
".",
"isscalar",
"(",
"edge_width",
")",
":",
"edge_width",
"=",
"np",
".",
"array",
"(",
"edge_width",
")",
".",
"squeeze",
"(",
")",
"check_shape",
"(",
"edge_width",
",",
"'Edge width'",
",",
"G",
".",
"n_edges",
")",
"edge_width",
"=",
"G",
".",
"plotting",
"[",
"'edge_width'",
"]",
"*",
"2",
"*",
"normalize",
"(",
"edge_width",
")",
"if",
"limits",
"is",
"None",
":",
"limits",
"=",
"[",
"1.05",
"*",
"vertex_color",
".",
"min",
"(",
")",
",",
"1.05",
"*",
"vertex_color",
".",
"max",
"(",
")",
"]",
"if",
"title",
"is",
"None",
":",
"title",
"=",
"G",
".",
"__repr__",
"(",
"limit",
"=",
"4",
")",
"if",
"backend",
"==",
"'pyqtgraph'",
":",
"if",
"vertex_color",
"is",
"None",
":",
"_qtg_plot_graph",
"(",
"G",
",",
"edges",
"=",
"edges",
",",
"vertex_size",
"=",
"vertex_size",
",",
"title",
"=",
"title",
")",
"else",
":",
"_qtg_plot_signal",
"(",
"G",
",",
"signal",
"=",
"vertex_color",
",",
"vertex_size",
"=",
"vertex_size",
",",
"edges",
"=",
"edges",
",",
"limits",
"=",
"limits",
",",
"title",
"=",
"title",
")",
"elif",
"backend",
"==",
"'matplotlib'",
":",
"return",
"_plt_plot_graph",
"(",
"G",
",",
"vertex_color",
"=",
"vertex_color",
",",
"vertex_size",
"=",
"vertex_size",
",",
"highlight",
"=",
"highlight",
",",
"edges",
"=",
"edges",
",",
"indices",
"=",
"indices",
",",
"colorbar",
"=",
"colorbar",
",",
"edge_color",
"=",
"edge_color",
",",
"edge_width",
"=",
"edge_width",
",",
"limits",
"=",
"limits",
",",
"ax",
"=",
"ax",
",",
"title",
"=",
"title",
")",
"else",
":",
"raise",
"ValueError",
"(",
"'Unknown backend {}.'",
".",
"format",
"(",
"backend",
")",
")"
] | 41.661765 | 19.759804 |
def write_image(filename, image):
""" Write image data to PNG, JPG file
:param filename: name of PNG or JPG file to write data to
:type filename: str
:param image: image data to write to file
:type image: numpy array
"""
data_format = get_data_format(filename)
if data_format is MimeType.JPG:
LOGGER.warning('Warning: jpeg is a lossy format therefore saved data will be modified.')
return Image.fromarray(image).save(filename) | [
"def",
"write_image",
"(",
"filename",
",",
"image",
")",
":",
"data_format",
"=",
"get_data_format",
"(",
"filename",
")",
"if",
"data_format",
"is",
"MimeType",
".",
"JPG",
":",
"LOGGER",
".",
"warning",
"(",
"'Warning: jpeg is a lossy format therefore saved data will be modified.'",
")",
"return",
"Image",
".",
"fromarray",
"(",
"image",
")",
".",
"save",
"(",
"filename",
")"
] | 38.333333 | 14.5 |
def track_locations(locations):
""" Return an iterator tweets from users in these locations.
See https://dev.twitter.com/streaming/overview/request-parameters#locations
Params:
locations...list of bounding box locations of the form:
southwest_longitude, southwest_latitude, northeast_longitude, northeast_latitude, ...
"""
if len(locations) % 4 != 0:
raise Exception('length of bounding box list should be a multiple of four')
results = twapi.request('statuses/filter', {'locations': ','.join('%f' % l for l in locations)})
return results.get_iterator() | [
"def",
"track_locations",
"(",
"locations",
")",
":",
"if",
"len",
"(",
"locations",
")",
"%",
"4",
"!=",
"0",
":",
"raise",
"Exception",
"(",
"'length of bounding box list should be a multiple of four'",
")",
"results",
"=",
"twapi",
".",
"request",
"(",
"'statuses/filter'",
",",
"{",
"'locations'",
":",
"','",
".",
"join",
"(",
"'%f'",
"%",
"l",
"for",
"l",
"in",
"locations",
")",
"}",
")",
"return",
"results",
".",
"get_iterator",
"(",
")"
] | 54.090909 | 24.727273 |
def _all_combos(self):
"""
RETURN AN ITERATOR OF ALL COORDINATES
"""
combos = _product(self.dims)
if not combos:
return
calc = [(coalesce(_product(self.dims[i+1:]), 1), mm) for i, mm in enumerate(self.dims)]
for c in xrange(combos):
yield tuple(int(c / dd) % mm for dd, mm in calc) | [
"def",
"_all_combos",
"(",
"self",
")",
":",
"combos",
"=",
"_product",
"(",
"self",
".",
"dims",
")",
"if",
"not",
"combos",
":",
"return",
"calc",
"=",
"[",
"(",
"coalesce",
"(",
"_product",
"(",
"self",
".",
"dims",
"[",
"i",
"+",
"1",
":",
"]",
")",
",",
"1",
")",
",",
"mm",
")",
"for",
"i",
",",
"mm",
"in",
"enumerate",
"(",
"self",
".",
"dims",
")",
"]",
"for",
"c",
"in",
"xrange",
"(",
"combos",
")",
":",
"yield",
"tuple",
"(",
"int",
"(",
"c",
"/",
"dd",
")",
"%",
"mm",
"for",
"dd",
",",
"mm",
"in",
"calc",
")"
] | 29.333333 | 19.166667 |
def _reset_docs(self):
"""
Helper to clear the docs on RESET or filter mismatch.
"""
_LOGGER.debug("resetting documents")
self.change_map.clear()
self.resume_token = None
# Mark each document as deleted. If documents are not deleted
# they will be sent again by the server.
for snapshot in self.doc_tree.keys():
name = snapshot.reference._document_path
self.change_map[name] = ChangeType.REMOVED
self.current = False | [
"def",
"_reset_docs",
"(",
"self",
")",
":",
"_LOGGER",
".",
"debug",
"(",
"\"resetting documents\"",
")",
"self",
".",
"change_map",
".",
"clear",
"(",
")",
"self",
".",
"resume_token",
"=",
"None",
"# Mark each document as deleted. If documents are not deleted",
"# they will be sent again by the server.",
"for",
"snapshot",
"in",
"self",
".",
"doc_tree",
".",
"keys",
"(",
")",
":",
"name",
"=",
"snapshot",
".",
"reference",
".",
"_document_path",
"self",
".",
"change_map",
"[",
"name",
"]",
"=",
"ChangeType",
".",
"REMOVED",
"self",
".",
"current",
"=",
"False"
] | 33.866667 | 14.666667 |
def read_partial_map(filenames, column, fullsky=True, **kwargs):
"""
Read a partial HEALPix file(s) and return pixels and values/map. Can
handle 3D healpix maps (pix, value, zdim). Returned array has
shape (dimz,npix).
Parameters:
-----------
filenames : list of input filenames
column : column of interest
fullsky : partial or fullsky map
kwargs : passed to fitsio.read
Returns:
--------
(nside,pix,map) : pixel array and healpix map (partial or fullsky)
"""
# Make sure that PIXEL is in columns
#kwargs['columns'] = ['PIXEL',column]
kwargs['columns'] = ['PIXEL'] + np.atleast_1d(column).tolist()
filenames = np.atleast_1d(filenames)
header = fitsio.read_header(filenames[0],ext=kwargs.get('ext',1))
data = ugali.utils.fileio.load_files(filenames,**kwargs)
pix = data['PIXEL']
value = data[column]
nside = header['NSIDE']
npix = hp.nside2npix(nside)
ndupes = len(pix) - len(np.unique(pix))
if ndupes > 0:
msg = '%i duplicate pixels during load.'%(ndupes)
raise Exception(msg)
if fullsky and not np.isscalar(column):
raise Exception("Cannot make fullsky map from list of columns.")
if fullsky:
shape = list(value.shape)
shape[0] = npix
hpxmap = hp.UNSEEN * np.ones(shape,dtype=value.dtype)
hpxmap[pix] = value
return (nside,pix,hpxmap.T)
else:
return (nside,pix,value.T) | [
"def",
"read_partial_map",
"(",
"filenames",
",",
"column",
",",
"fullsky",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"# Make sure that PIXEL is in columns",
"#kwargs['columns'] = ['PIXEL',column]",
"kwargs",
"[",
"'columns'",
"]",
"=",
"[",
"'PIXEL'",
"]",
"+",
"np",
".",
"atleast_1d",
"(",
"column",
")",
".",
"tolist",
"(",
")",
"filenames",
"=",
"np",
".",
"atleast_1d",
"(",
"filenames",
")",
"header",
"=",
"fitsio",
".",
"read_header",
"(",
"filenames",
"[",
"0",
"]",
",",
"ext",
"=",
"kwargs",
".",
"get",
"(",
"'ext'",
",",
"1",
")",
")",
"data",
"=",
"ugali",
".",
"utils",
".",
"fileio",
".",
"load_files",
"(",
"filenames",
",",
"*",
"*",
"kwargs",
")",
"pix",
"=",
"data",
"[",
"'PIXEL'",
"]",
"value",
"=",
"data",
"[",
"column",
"]",
"nside",
"=",
"header",
"[",
"'NSIDE'",
"]",
"npix",
"=",
"hp",
".",
"nside2npix",
"(",
"nside",
")",
"ndupes",
"=",
"len",
"(",
"pix",
")",
"-",
"len",
"(",
"np",
".",
"unique",
"(",
"pix",
")",
")",
"if",
"ndupes",
">",
"0",
":",
"msg",
"=",
"'%i duplicate pixels during load.'",
"%",
"(",
"ndupes",
")",
"raise",
"Exception",
"(",
"msg",
")",
"if",
"fullsky",
"and",
"not",
"np",
".",
"isscalar",
"(",
"column",
")",
":",
"raise",
"Exception",
"(",
"\"Cannot make fullsky map from list of columns.\"",
")",
"if",
"fullsky",
":",
"shape",
"=",
"list",
"(",
"value",
".",
"shape",
")",
"shape",
"[",
"0",
"]",
"=",
"npix",
"hpxmap",
"=",
"hp",
".",
"UNSEEN",
"*",
"np",
".",
"ones",
"(",
"shape",
",",
"dtype",
"=",
"value",
".",
"dtype",
")",
"hpxmap",
"[",
"pix",
"]",
"=",
"value",
"return",
"(",
"nside",
",",
"pix",
",",
"hpxmap",
".",
"T",
")",
"else",
":",
"return",
"(",
"nside",
",",
"pix",
",",
"value",
".",
"T",
")"
] | 31.478261 | 18.782609 |
def save_user(self, uid, user_password, user_email='', user_channels=None, user_roles=None, user_views=None, disable_account=False):
'''
a method to add or update an authorized user to the bucket
:param uid: string with id to assign to user
:param user_password: string with password to assign to user
:param user_email: [optional] string with email of user for future lookup
:param user_channels: [optional] list of strings with channels to subscribe to user
:param user_roles: [optional] list of strings with roles to assign to user
:param user_views: [optional] list of query criteria to create as views for user
:param disable_account: boolean to disable access to records by user
:return: integer with status code of user account creation
'''
# https://developer.couchbase.com/documentation/mobile/1.5/references/sync-gateway/admin-rest-api/index.html#/user/put__db___user__name_
# https://developer.couchbase.com/documentation/mobile/1.5/guides/sync-gateway/authorizing-users/index.html
title = '%s.save_user' % self.__class__.__name__
# validate inputs
input_fields = {
'uid': uid,
'user_password': user_password,
'user_email': user_email,
'user_channels': user_channels,
'user_roles': user_roles
}
for key, value in input_fields.items():
if value:
object_title = '%s(%s=%s)' % (title, key, str(value))
self.fields.validate(value, '.%s' % key, object_title)
# construct url
url = self.bucket_url + '/_user/%s' % uid
# create default settings
json_data = {
'admin_channels': [ uid ],
'admin_roles': [ uid ],
'name': uid,
'password': user_password,
'disabled': disable_account
}
# add optional additional channels and roles
if user_email:
json_data['email'] = user_email
if user_channels:
json_data['admin_channels'].extend(user_channels)
if user_roles:
json_data['admin_roles'].extend(user_roles)
# send request
response = requests.put(url, json=json_data)
# create indices
if response.status_code in (200, 201) and not self.public:
self.create_view(uid=uid)
if user_views:
for criteria in user_views:
self.create_view(query_criteria=criteria, uid=uid)
# report outcome
self.printer('User "%s" updated in bucket "%s"' % (uid, self.bucket_name))
return response.status_code | [
"def",
"save_user",
"(",
"self",
",",
"uid",
",",
"user_password",
",",
"user_email",
"=",
"''",
",",
"user_channels",
"=",
"None",
",",
"user_roles",
"=",
"None",
",",
"user_views",
"=",
"None",
",",
"disable_account",
"=",
"False",
")",
":",
"# https://developer.couchbase.com/documentation/mobile/1.5/references/sync-gateway/admin-rest-api/index.html#/user/put__db___user__name_",
"# https://developer.couchbase.com/documentation/mobile/1.5/guides/sync-gateway/authorizing-users/index.html",
"title",
"=",
"'%s.save_user'",
"%",
"self",
".",
"__class__",
".",
"__name__",
"# validate inputs",
"input_fields",
"=",
"{",
"'uid'",
":",
"uid",
",",
"'user_password'",
":",
"user_password",
",",
"'user_email'",
":",
"user_email",
",",
"'user_channels'",
":",
"user_channels",
",",
"'user_roles'",
":",
"user_roles",
"}",
"for",
"key",
",",
"value",
"in",
"input_fields",
".",
"items",
"(",
")",
":",
"if",
"value",
":",
"object_title",
"=",
"'%s(%s=%s)'",
"%",
"(",
"title",
",",
"key",
",",
"str",
"(",
"value",
")",
")",
"self",
".",
"fields",
".",
"validate",
"(",
"value",
",",
"'.%s'",
"%",
"key",
",",
"object_title",
")",
"# construct url",
"url",
"=",
"self",
".",
"bucket_url",
"+",
"'/_user/%s'",
"%",
"uid",
"# create default settings",
"json_data",
"=",
"{",
"'admin_channels'",
":",
"[",
"uid",
"]",
",",
"'admin_roles'",
":",
"[",
"uid",
"]",
",",
"'name'",
":",
"uid",
",",
"'password'",
":",
"user_password",
",",
"'disabled'",
":",
"disable_account",
"}",
"# add optional additional channels and roles",
"if",
"user_email",
":",
"json_data",
"[",
"'email'",
"]",
"=",
"user_email",
"if",
"user_channels",
":",
"json_data",
"[",
"'admin_channels'",
"]",
".",
"extend",
"(",
"user_channels",
")",
"if",
"user_roles",
":",
"json_data",
"[",
"'admin_roles'",
"]",
".",
"extend",
"(",
"user_roles",
")",
"# send request",
"response",
"=",
"requests",
".",
"put",
"(",
"url",
",",
"json",
"=",
"json_data",
")",
"# create indices",
"if",
"response",
".",
"status_code",
"in",
"(",
"200",
",",
"201",
")",
"and",
"not",
"self",
".",
"public",
":",
"self",
".",
"create_view",
"(",
"uid",
"=",
"uid",
")",
"if",
"user_views",
":",
"for",
"criteria",
"in",
"user_views",
":",
"self",
".",
"create_view",
"(",
"query_criteria",
"=",
"criteria",
",",
"uid",
"=",
"uid",
")",
"# report outcome",
"self",
".",
"printer",
"(",
"'User \"%s\" updated in bucket \"%s\"'",
"%",
"(",
"uid",
",",
"self",
".",
"bucket_name",
")",
")",
"return",
"response",
".",
"status_code"
] | 39.761194 | 25.134328 |
def declare_type(self, declared_type): # type: (TypeDef) -> TypeDef
"""Add this type to our collection, if needed."""
if declared_type not in self.collected_types:
self.collected_types[declared_type.name] = declared_type
return declared_type | [
"def",
"declare_type",
"(",
"self",
",",
"declared_type",
")",
":",
"# type: (TypeDef) -> TypeDef",
"if",
"declared_type",
"not",
"in",
"self",
".",
"collected_types",
":",
"self",
".",
"collected_types",
"[",
"declared_type",
".",
"name",
"]",
"=",
"declared_type",
"return",
"declared_type"
] | 54.8 | 16.2 |
def arduino_path():
"""expanded root path, ARDUINO_HOME env var or arduino_default_path()"""
x = _ARDUINO_PATH
if not x:
x = os.environ.get('ARDUINO_HOME')
if not x:
x = arduino_default_path()
assert x, str(x)
x = path(x).expand().abspath()
assert x.exists(), 'arduino path not found:' + str(x)
return x | [
"def",
"arduino_path",
"(",
")",
":",
"x",
"=",
"_ARDUINO_PATH",
"if",
"not",
"x",
":",
"x",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'ARDUINO_HOME'",
")",
"if",
"not",
"x",
":",
"x",
"=",
"arduino_default_path",
"(",
")",
"assert",
"x",
",",
"str",
"(",
"x",
")",
"x",
"=",
"path",
"(",
"x",
")",
".",
"expand",
"(",
")",
".",
"abspath",
"(",
")",
"assert",
"x",
".",
"exists",
"(",
")",
",",
"'arduino path not found:'",
"+",
"str",
"(",
"x",
")",
"return",
"x"
] | 21.3125 | 23.3125 |
def render_embed_css(self, css_embed: Iterable[bytes]) -> bytes:
"""Default method used to render the final embedded css for the
rendered webpage.
Override this method in a sub-classed controller to change the output.
"""
return b'<style type="text/css">\n' + b"\n".join(css_embed) + b"\n</style>" | [
"def",
"render_embed_css",
"(",
"self",
",",
"css_embed",
":",
"Iterable",
"[",
"bytes",
"]",
")",
"->",
"bytes",
":",
"return",
"b'<style type=\"text/css\">\\n'",
"+",
"b\"\\n\"",
".",
"join",
"(",
"css_embed",
")",
"+",
"b\"\\n</style>\""
] | 47.428571 | 22.857143 |
def parameterize_notebook(nb, parameters, report_mode=False):
"""Assigned parameters into the appropriate place in the input notebook
Parameters
----------
nb : NotebookNode
Executable notebook object
parameters : dict
Arbitrary keyword arguments to pass as notebook parameters
report_mode : bool, optional
Flag to set report mode
"""
# Load from a file if 'parameters' is a string.
if isinstance(parameters, six.string_types):
parameters = read_yaml_file(parameters)
# Copy the nb object to avoid polluting the input
nb = copy.deepcopy(nb)
kernel_name = nb.metadata.kernelspec.name
language = nb.metadata.kernelspec.language
# Generate parameter content based on the kernel_name
param_content = translate_parameters(kernel_name, language, parameters)
newcell = nbformat.v4.new_code_cell(source=param_content)
newcell.metadata['tags'] = ['injected-parameters']
if report_mode:
newcell.metadata['jupyter'] = newcell.get('jupyter', {})
newcell.metadata['jupyter']['source_hidden'] = True
param_cell_index = _find_first_tagged_cell_index(nb, 'parameters')
injected_cell_index = _find_first_tagged_cell_index(nb, 'injected-parameters')
if injected_cell_index >= 0:
# Replace the injected cell with a new version
before = nb.cells[:injected_cell_index]
after = nb.cells[injected_cell_index + 1 :]
elif param_cell_index >= 0:
# Add an injected cell after the parameter cell
before = nb.cells[: param_cell_index + 1]
after = nb.cells[param_cell_index + 1 :]
else:
# Inject to the top of the notebook
logger.warning("Input notebook does not contain a cell with tag 'parameters'")
before = []
after = nb.cells
nb.cells = before + [newcell] + after
nb.metadata.papermill['parameters'] = parameters
return nb | [
"def",
"parameterize_notebook",
"(",
"nb",
",",
"parameters",
",",
"report_mode",
"=",
"False",
")",
":",
"# Load from a file if 'parameters' is a string.",
"if",
"isinstance",
"(",
"parameters",
",",
"six",
".",
"string_types",
")",
":",
"parameters",
"=",
"read_yaml_file",
"(",
"parameters",
")",
"# Copy the nb object to avoid polluting the input",
"nb",
"=",
"copy",
".",
"deepcopy",
"(",
"nb",
")",
"kernel_name",
"=",
"nb",
".",
"metadata",
".",
"kernelspec",
".",
"name",
"language",
"=",
"nb",
".",
"metadata",
".",
"kernelspec",
".",
"language",
"# Generate parameter content based on the kernel_name",
"param_content",
"=",
"translate_parameters",
"(",
"kernel_name",
",",
"language",
",",
"parameters",
")",
"newcell",
"=",
"nbformat",
".",
"v4",
".",
"new_code_cell",
"(",
"source",
"=",
"param_content",
")",
"newcell",
".",
"metadata",
"[",
"'tags'",
"]",
"=",
"[",
"'injected-parameters'",
"]",
"if",
"report_mode",
":",
"newcell",
".",
"metadata",
"[",
"'jupyter'",
"]",
"=",
"newcell",
".",
"get",
"(",
"'jupyter'",
",",
"{",
"}",
")",
"newcell",
".",
"metadata",
"[",
"'jupyter'",
"]",
"[",
"'source_hidden'",
"]",
"=",
"True",
"param_cell_index",
"=",
"_find_first_tagged_cell_index",
"(",
"nb",
",",
"'parameters'",
")",
"injected_cell_index",
"=",
"_find_first_tagged_cell_index",
"(",
"nb",
",",
"'injected-parameters'",
")",
"if",
"injected_cell_index",
">=",
"0",
":",
"# Replace the injected cell with a new version",
"before",
"=",
"nb",
".",
"cells",
"[",
":",
"injected_cell_index",
"]",
"after",
"=",
"nb",
".",
"cells",
"[",
"injected_cell_index",
"+",
"1",
":",
"]",
"elif",
"param_cell_index",
">=",
"0",
":",
"# Add an injected cell after the parameter cell",
"before",
"=",
"nb",
".",
"cells",
"[",
":",
"param_cell_index",
"+",
"1",
"]",
"after",
"=",
"nb",
".",
"cells",
"[",
"param_cell_index",
"+",
"1",
":",
"]",
"else",
":",
"# Inject to the top of the notebook",
"logger",
".",
"warning",
"(",
"\"Input notebook does not contain a cell with tag 'parameters'\"",
")",
"before",
"=",
"[",
"]",
"after",
"=",
"nb",
".",
"cells",
"nb",
".",
"cells",
"=",
"before",
"+",
"[",
"newcell",
"]",
"+",
"after",
"nb",
".",
"metadata",
".",
"papermill",
"[",
"'parameters'",
"]",
"=",
"parameters",
"return",
"nb"
] | 36.230769 | 20.115385 |
def iterate(self):
"""
Must be called regularly when using an external event loop.
"""
if not self._inLoop:
raise RuntimeError('run loop not started')
elif self._driverLoop:
raise RuntimeError('iterate not valid in driver run loop')
self.proxy.iterate() | [
"def",
"iterate",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_inLoop",
":",
"raise",
"RuntimeError",
"(",
"'run loop not started'",
")",
"elif",
"self",
".",
"_driverLoop",
":",
"raise",
"RuntimeError",
"(",
"'iterate not valid in driver run loop'",
")",
"self",
".",
"proxy",
".",
"iterate",
"(",
")"
] | 35.222222 | 14.111111 |
def _etextno_to_uri_subdirectory(etextno):
"""Returns the subdirectory that an etextno will be found in a gutenberg
mirror. Generally, one finds the subdirectory by separating out each digit
of the etext number, and uses it for a directory. The exception here is for
etext numbers less than 10, which are prepended with a 0 for the directory
traversal.
>>> _etextno_to_uri_subdirectory(1)
'0/1'
>>> _etextno_to_uri_subdirectory(19)
'1/19'
>>> _etextno_to_uri_subdirectory(15453)
'1/5/4/5/15453'
"""
str_etextno = str(etextno).zfill(2)
all_but_last_digit = list(str_etextno[:-1])
subdir_part = "/".join(all_but_last_digit)
subdir = "{}/{}".format(subdir_part, etextno) # etextno not zfilled
return subdir | [
"def",
"_etextno_to_uri_subdirectory",
"(",
"etextno",
")",
":",
"str_etextno",
"=",
"str",
"(",
"etextno",
")",
".",
"zfill",
"(",
"2",
")",
"all_but_last_digit",
"=",
"list",
"(",
"str_etextno",
"[",
":",
"-",
"1",
"]",
")",
"subdir_part",
"=",
"\"/\"",
".",
"join",
"(",
"all_but_last_digit",
")",
"subdir",
"=",
"\"{}/{}\"",
".",
"format",
"(",
"subdir_part",
",",
"etextno",
")",
"# etextno not zfilled",
"return",
"subdir"
] | 39.736842 | 17.789474 |
def execute(self, eopatch):
""" Add cloud binary mask and (optionally) cloud probability map to input eopatch
:param eopatch: Input `EOPatch` instance
:return: `EOPatch` with additional cloud maps
"""
# Downsample or make request
if not eopatch.data:
raise ValueError('EOPatch must contain some data feature')
if self.data_feature in eopatch.data:
new_data, rescale = self._downscaling(eopatch.data[self.data_feature], eopatch.meta_info)
reference_shape = eopatch.data[self.data_feature].shape[:3]
else:
new_data, new_dates = self._make_request(eopatch.bbox, eopatch.meta_info, eopatch.timestamp)
removed_frames = eopatch.consolidate_timestamps(new_dates)
for rm_frame in removed_frames:
LOGGER.warning('Removed data for frame %s from '
'eopatch due to unavailability of %s!', str(rm_frame), self.data_feature)
# Get reference shape from first item in data dictionary
reference_shape = next(iter(eopatch.data.values())).shape[:3]
rescale = self._get_rescale_factors(reference_shape[1:3], eopatch.meta_info)
clf_probs_lr = self.classifier.get_cloud_probability_maps(new_data)
clf_mask_lr = self.classifier.get_mask_from_prob(clf_probs_lr)
# Add cloud mask as a feature to EOPatch
clf_mask_hr = self._upsampling(clf_mask_lr, rescale, reference_shape, interp='nearest')
eopatch.mask[self.cm_feature] = clf_mask_hr.astype(np.bool)
# If the feature name for cloud probability maps is specified, add as feature
if self.cprobs_feature is not None:
clf_probs_hr = self._upsampling(clf_probs_lr, rescale, reference_shape, interp='linear')
eopatch.data[self.cprobs_feature] = clf_probs_hr.astype(np.float32)
return eopatch | [
"def",
"execute",
"(",
"self",
",",
"eopatch",
")",
":",
"# Downsample or make request",
"if",
"not",
"eopatch",
".",
"data",
":",
"raise",
"ValueError",
"(",
"'EOPatch must contain some data feature'",
")",
"if",
"self",
".",
"data_feature",
"in",
"eopatch",
".",
"data",
":",
"new_data",
",",
"rescale",
"=",
"self",
".",
"_downscaling",
"(",
"eopatch",
".",
"data",
"[",
"self",
".",
"data_feature",
"]",
",",
"eopatch",
".",
"meta_info",
")",
"reference_shape",
"=",
"eopatch",
".",
"data",
"[",
"self",
".",
"data_feature",
"]",
".",
"shape",
"[",
":",
"3",
"]",
"else",
":",
"new_data",
",",
"new_dates",
"=",
"self",
".",
"_make_request",
"(",
"eopatch",
".",
"bbox",
",",
"eopatch",
".",
"meta_info",
",",
"eopatch",
".",
"timestamp",
")",
"removed_frames",
"=",
"eopatch",
".",
"consolidate_timestamps",
"(",
"new_dates",
")",
"for",
"rm_frame",
"in",
"removed_frames",
":",
"LOGGER",
".",
"warning",
"(",
"'Removed data for frame %s from '",
"'eopatch due to unavailability of %s!'",
",",
"str",
"(",
"rm_frame",
")",
",",
"self",
".",
"data_feature",
")",
"# Get reference shape from first item in data dictionary",
"reference_shape",
"=",
"next",
"(",
"iter",
"(",
"eopatch",
".",
"data",
".",
"values",
"(",
")",
")",
")",
".",
"shape",
"[",
":",
"3",
"]",
"rescale",
"=",
"self",
".",
"_get_rescale_factors",
"(",
"reference_shape",
"[",
"1",
":",
"3",
"]",
",",
"eopatch",
".",
"meta_info",
")",
"clf_probs_lr",
"=",
"self",
".",
"classifier",
".",
"get_cloud_probability_maps",
"(",
"new_data",
")",
"clf_mask_lr",
"=",
"self",
".",
"classifier",
".",
"get_mask_from_prob",
"(",
"clf_probs_lr",
")",
"# Add cloud mask as a feature to EOPatch",
"clf_mask_hr",
"=",
"self",
".",
"_upsampling",
"(",
"clf_mask_lr",
",",
"rescale",
",",
"reference_shape",
",",
"interp",
"=",
"'nearest'",
")",
"eopatch",
".",
"mask",
"[",
"self",
".",
"cm_feature",
"]",
"=",
"clf_mask_hr",
".",
"astype",
"(",
"np",
".",
"bool",
")",
"# If the feature name for cloud probability maps is specified, add as feature",
"if",
"self",
".",
"cprobs_feature",
"is",
"not",
"None",
":",
"clf_probs_hr",
"=",
"self",
".",
"_upsampling",
"(",
"clf_probs_lr",
",",
"rescale",
",",
"reference_shape",
",",
"interp",
"=",
"'linear'",
")",
"eopatch",
".",
"data",
"[",
"self",
".",
"cprobs_feature",
"]",
"=",
"clf_probs_hr",
".",
"astype",
"(",
"np",
".",
"float32",
")",
"return",
"eopatch"
] | 52.5 | 29.388889 |
def set_cache_url(self):
"""
The cache url is a comma separated list of emails.
"""
emails = u",".join(sorted(self.addresses))
self.cache_url = u"%s:%s" % (self.scheme, emails) | [
"def",
"set_cache_url",
"(",
"self",
")",
":",
"emails",
"=",
"u\",\"",
".",
"join",
"(",
"sorted",
"(",
"self",
".",
"addresses",
")",
")",
"self",
".",
"cache_url",
"=",
"u\"%s:%s\"",
"%",
"(",
"self",
".",
"scheme",
",",
"emails",
")"
] | 35.166667 | 10.166667 |
def get_algs_from_ciphersuite_name(ciphersuite_name):
"""
Return the 3-tuple made of the Key Exchange Algorithm class, the Cipher
class and the HMAC class, through the parsing of the ciphersuite name.
"""
tls1_3 = False
if ciphersuite_name.startswith("TLS"):
s = ciphersuite_name[4:]
if s.endswith("CCM") or s.endswith("CCM_8"):
kx_name, s = s.split("_WITH_")
kx_alg = _tls_kx_algs.get(kx_name)
hash_alg = _tls_hash_algs.get("SHA256")
cipher_alg = _tls_cipher_algs.get(s)
hmac_alg = None
else:
if "WITH" in s:
kx_name, s = s.split("_WITH_")
kx_alg = _tls_kx_algs.get(kx_name)
else:
tls1_3 = True
kx_alg = _tls_kx_algs.get("TLS13")
hash_name = s.split('_')[-1]
hash_alg = _tls_hash_algs.get(hash_name)
cipher_name = s[:-(len(hash_name) + 1)]
if tls1_3:
cipher_name += "_TLS13"
cipher_alg = _tls_cipher_algs.get(cipher_name)
hmac_alg = None
if cipher_alg is not None and cipher_alg.type != "aead":
hmac_name = "HMAC-%s" % hash_name
hmac_alg = _tls_hmac_algs.get(hmac_name)
elif ciphersuite_name.startswith("SSL"):
s = ciphersuite_name[7:]
kx_alg = _tls_kx_algs.get("SSLv2")
cipher_name, hash_name = s.split("_WITH_")
cipher_alg = _tls_cipher_algs.get(cipher_name.rstrip("_EXPORT40"))
kx_alg.export = cipher_name.endswith("_EXPORT40")
hmac_alg = _tls_hmac_algs.get("HMAC-NULL")
hash_alg = _tls_hash_algs.get(hash_name)
return kx_alg, cipher_alg, hmac_alg, hash_alg, tls1_3 | [
"def",
"get_algs_from_ciphersuite_name",
"(",
"ciphersuite_name",
")",
":",
"tls1_3",
"=",
"False",
"if",
"ciphersuite_name",
".",
"startswith",
"(",
"\"TLS\"",
")",
":",
"s",
"=",
"ciphersuite_name",
"[",
"4",
":",
"]",
"if",
"s",
".",
"endswith",
"(",
"\"CCM\"",
")",
"or",
"s",
".",
"endswith",
"(",
"\"CCM_8\"",
")",
":",
"kx_name",
",",
"s",
"=",
"s",
".",
"split",
"(",
"\"_WITH_\"",
")",
"kx_alg",
"=",
"_tls_kx_algs",
".",
"get",
"(",
"kx_name",
")",
"hash_alg",
"=",
"_tls_hash_algs",
".",
"get",
"(",
"\"SHA256\"",
")",
"cipher_alg",
"=",
"_tls_cipher_algs",
".",
"get",
"(",
"s",
")",
"hmac_alg",
"=",
"None",
"else",
":",
"if",
"\"WITH\"",
"in",
"s",
":",
"kx_name",
",",
"s",
"=",
"s",
".",
"split",
"(",
"\"_WITH_\"",
")",
"kx_alg",
"=",
"_tls_kx_algs",
".",
"get",
"(",
"kx_name",
")",
"else",
":",
"tls1_3",
"=",
"True",
"kx_alg",
"=",
"_tls_kx_algs",
".",
"get",
"(",
"\"TLS13\"",
")",
"hash_name",
"=",
"s",
".",
"split",
"(",
"'_'",
")",
"[",
"-",
"1",
"]",
"hash_alg",
"=",
"_tls_hash_algs",
".",
"get",
"(",
"hash_name",
")",
"cipher_name",
"=",
"s",
"[",
":",
"-",
"(",
"len",
"(",
"hash_name",
")",
"+",
"1",
")",
"]",
"if",
"tls1_3",
":",
"cipher_name",
"+=",
"\"_TLS13\"",
"cipher_alg",
"=",
"_tls_cipher_algs",
".",
"get",
"(",
"cipher_name",
")",
"hmac_alg",
"=",
"None",
"if",
"cipher_alg",
"is",
"not",
"None",
"and",
"cipher_alg",
".",
"type",
"!=",
"\"aead\"",
":",
"hmac_name",
"=",
"\"HMAC-%s\"",
"%",
"hash_name",
"hmac_alg",
"=",
"_tls_hmac_algs",
".",
"get",
"(",
"hmac_name",
")",
"elif",
"ciphersuite_name",
".",
"startswith",
"(",
"\"SSL\"",
")",
":",
"s",
"=",
"ciphersuite_name",
"[",
"7",
":",
"]",
"kx_alg",
"=",
"_tls_kx_algs",
".",
"get",
"(",
"\"SSLv2\"",
")",
"cipher_name",
",",
"hash_name",
"=",
"s",
".",
"split",
"(",
"\"_WITH_\"",
")",
"cipher_alg",
"=",
"_tls_cipher_algs",
".",
"get",
"(",
"cipher_name",
".",
"rstrip",
"(",
"\"_EXPORT40\"",
")",
")",
"kx_alg",
".",
"export",
"=",
"cipher_name",
".",
"endswith",
"(",
"\"_EXPORT40\"",
")",
"hmac_alg",
"=",
"_tls_hmac_algs",
".",
"get",
"(",
"\"HMAC-NULL\"",
")",
"hash_alg",
"=",
"_tls_hash_algs",
".",
"get",
"(",
"hash_name",
")",
"return",
"kx_alg",
",",
"cipher_alg",
",",
"hmac_alg",
",",
"hash_alg",
",",
"tls1_3"
] | 36.638298 | 16.638298 |
def extract_file_config(content):
"""
Pull out the file-specific config specified in the docstring.
"""
prop_pat = re.compile(
r"^\s*#\s*sphinx_gallery_([A-Za-z0-9_]+)\s*=\s*(.+)\s*$",
re.MULTILINE)
file_conf = {}
for match in re.finditer(prop_pat, content):
name = match.group(1)
value = match.group(2)
try:
value = ast.literal_eval(value)
except (SyntaxError, ValueError):
logger.warning(
'Sphinx-gallery option %s was passed invalid value %s',
name, value)
else:
file_conf[name] = value
return file_conf | [
"def",
"extract_file_config",
"(",
"content",
")",
":",
"prop_pat",
"=",
"re",
".",
"compile",
"(",
"r\"^\\s*#\\s*sphinx_gallery_([A-Za-z0-9_]+)\\s*=\\s*(.+)\\s*$\"",
",",
"re",
".",
"MULTILINE",
")",
"file_conf",
"=",
"{",
"}",
"for",
"match",
"in",
"re",
".",
"finditer",
"(",
"prop_pat",
",",
"content",
")",
":",
"name",
"=",
"match",
".",
"group",
"(",
"1",
")",
"value",
"=",
"match",
".",
"group",
"(",
"2",
")",
"try",
":",
"value",
"=",
"ast",
".",
"literal_eval",
"(",
"value",
")",
"except",
"(",
"SyntaxError",
",",
"ValueError",
")",
":",
"logger",
".",
"warning",
"(",
"'Sphinx-gallery option %s was passed invalid value %s'",
",",
"name",
",",
"value",
")",
"else",
":",
"file_conf",
"[",
"name",
"]",
"=",
"value",
"return",
"file_conf"
] | 30.428571 | 15.285714 |
def get_service_plan_for_service(self, service_name):
"""
Return the service plans available for a given service.
"""
services = self.get_services()
for service in services['resources']:
if service['entity']['label'] == service_name:
response = self.api.get(service['entity']['service_plans_url'])
return response['resources'] | [
"def",
"get_service_plan_for_service",
"(",
"self",
",",
"service_name",
")",
":",
"services",
"=",
"self",
".",
"get_services",
"(",
")",
"for",
"service",
"in",
"services",
"[",
"'resources'",
"]",
":",
"if",
"service",
"[",
"'entity'",
"]",
"[",
"'label'",
"]",
"==",
"service_name",
":",
"response",
"=",
"self",
".",
"api",
".",
"get",
"(",
"service",
"[",
"'entity'",
"]",
"[",
"'service_plans_url'",
"]",
")",
"return",
"response",
"[",
"'resources'",
"]"
] | 44.666667 | 11.555556 |
def readTable(self, tableName):
"""
Read the table corresponding to the specified name, equivalent to the
AMPL statement:
.. code-block:: ampl
read table tableName;
Args:
tableName: Name of the table to be read.
"""
lock_and_call(
lambda: self._impl.readTable(tableName),
self._lock
) | [
"def",
"readTable",
"(",
"self",
",",
"tableName",
")",
":",
"lock_and_call",
"(",
"lambda",
":",
"self",
".",
"_impl",
".",
"readTable",
"(",
"tableName",
")",
",",
"self",
".",
"_lock",
")"
] | 24 | 20 |
def terminate(self):
"""Properly terminates this player instance. Preferably use this instead of relying on python's garbage
collector to cause this to be called from the object's destructor.
"""
self.handle, handle = None, self.handle
if threading.current_thread() is self._event_thread:
# Handle special case to allow event handle to be detached.
# This is necessary since otherwise the event thread would deadlock itself.
grim_reaper = threading.Thread(target=lambda: _mpv_terminate_destroy(handle))
grim_reaper.start()
else:
_mpv_terminate_destroy(handle)
if self._event_thread:
self._event_thread.join() | [
"def",
"terminate",
"(",
"self",
")",
":",
"self",
".",
"handle",
",",
"handle",
"=",
"None",
",",
"self",
".",
"handle",
"if",
"threading",
".",
"current_thread",
"(",
")",
"is",
"self",
".",
"_event_thread",
":",
"# Handle special case to allow event handle to be detached.",
"# This is necessary since otherwise the event thread would deadlock itself.",
"grim_reaper",
"=",
"threading",
".",
"Thread",
"(",
"target",
"=",
"lambda",
":",
"_mpv_terminate_destroy",
"(",
"handle",
")",
")",
"grim_reaper",
".",
"start",
"(",
")",
"else",
":",
"_mpv_terminate_destroy",
"(",
"handle",
")",
"if",
"self",
".",
"_event_thread",
":",
"self",
".",
"_event_thread",
".",
"join",
"(",
")"
] | 52.214286 | 18.071429 |
def list_vpnservices(retrieve_all=True, profile=None, **kwargs):
'''
Fetches a list of all configured VPN services for a tenant
CLI Example:
.. code-block:: bash
salt '*' neutron.list_vpnservices
:param retrieve_all: True or False, default: True (Optional)
:param profile: Profile to build on (Optional)
:return: List of VPN service
'''
conn = _auth(profile)
return conn.list_vpnservices(retrieve_all, **kwargs) | [
"def",
"list_vpnservices",
"(",
"retrieve_all",
"=",
"True",
",",
"profile",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"conn",
"=",
"_auth",
"(",
"profile",
")",
"return",
"conn",
".",
"list_vpnservices",
"(",
"retrieve_all",
",",
"*",
"*",
"kwargs",
")"
] | 28 | 24.125 |
def run(self, shell=True, cmdline=False, echo=True):
"""Run FIO job"""
if env():
return 1
cmd = ["fio"] + self.__parse_parms()
if cmdline:
cij.emph("cij.fio.run: shell: %r, cmd: %r" % (shell, cmd))
return cij.ssh.command(cmd, shell, echo) | [
"def",
"run",
"(",
"self",
",",
"shell",
"=",
"True",
",",
"cmdline",
"=",
"False",
",",
"echo",
"=",
"True",
")",
":",
"if",
"env",
"(",
")",
":",
"return",
"1",
"cmd",
"=",
"[",
"\"fio\"",
"]",
"+",
"self",
".",
"__parse_parms",
"(",
")",
"if",
"cmdline",
":",
"cij",
".",
"emph",
"(",
"\"cij.fio.run: shell: %r, cmd: %r\"",
"%",
"(",
"shell",
",",
"cmd",
")",
")",
"return",
"cij",
".",
"ssh",
".",
"command",
"(",
"cmd",
",",
"shell",
",",
"echo",
")"
] | 26.818182 | 21.636364 |
def new_conn(self):
"""
Create a new ConnectionWrapper instance
:return:
"""
"""
:return:
"""
logger.debug("Opening new connection to rethinkdb with args=%s" % self._conn_args)
return ConnectionWrapper(self._pool, **self._conn_args) | [
"def",
"new_conn",
"(",
"self",
")",
":",
"\"\"\"\n :return:\n \"\"\"",
"logger",
".",
"debug",
"(",
"\"Opening new connection to rethinkdb with args=%s\"",
"%",
"self",
".",
"_conn_args",
")",
"return",
"ConnectionWrapper",
"(",
"self",
".",
"_pool",
",",
"*",
"*",
"self",
".",
"_conn_args",
")"
] | 29.5 | 14.9 |
def identifier(self, mask: str = '##-##/##') -> str:
"""Generate a random identifier by mask.
With this method you can generate any identifiers that
you need. Simply select the mask that you need.
:param mask:
The mask. Here ``@`` is a placeholder for characters and ``#`` is
placeholder for digits.
:return: An identifier.
:Example:
07-97/04
"""
return self.random.custom_code(mask=mask) | [
"def",
"identifier",
"(",
"self",
",",
"mask",
":",
"str",
"=",
"'##-##/##'",
")",
"->",
"str",
":",
"return",
"self",
".",
"random",
".",
"custom_code",
"(",
"mask",
"=",
"mask",
")"
] | 31.8 | 19.466667 |
def record_entering(self, time, code, frame_key, parent_stats):
"""Entered to a function call."""
stats = parent_stats.ensure_child(code, RecordingStatistics)
self._times_entered[(code, frame_key)] = time
stats.own_hits += 1 | [
"def",
"record_entering",
"(",
"self",
",",
"time",
",",
"code",
",",
"frame_key",
",",
"parent_stats",
")",
":",
"stats",
"=",
"parent_stats",
".",
"ensure_child",
"(",
"code",
",",
"RecordingStatistics",
")",
"self",
".",
"_times_entered",
"[",
"(",
"code",
",",
"frame_key",
")",
"]",
"=",
"time",
"stats",
".",
"own_hits",
"+=",
"1"
] | 50.4 | 15.4 |
def paste(**kwargs):
"""Returns system clipboard contents."""
window = Tk()
window.withdraw()
d = window.selection_get(selection = 'CLIPBOARD')
return d | [
"def",
"paste",
"(",
"*",
"*",
"kwargs",
")",
":",
"window",
"=",
"Tk",
"(",
")",
"window",
".",
"withdraw",
"(",
")",
"d",
"=",
"window",
".",
"selection_get",
"(",
"selection",
"=",
"'CLIPBOARD'",
")",
"return",
"d"
] | 27.833333 | 17.166667 |
def stop(self):
"""
Stops the service.
"""
if self.log_file != PIPE and not (self.log_file == DEVNULL and _HAS_NATIVE_DEVNULL):
try:
self.log_file.close()
except Exception:
pass
if self.process is None:
return
try:
self.send_remote_shutdown_command()
except TypeError:
pass
try:
if self.process:
for stream in [self.process.stdin,
self.process.stdout,
self.process.stderr]:
try:
stream.close()
except AttributeError:
pass
self.process.terminate()
self.process.wait()
self.process.kill()
self.process = None
except OSError:
pass | [
"def",
"stop",
"(",
"self",
")",
":",
"if",
"self",
".",
"log_file",
"!=",
"PIPE",
"and",
"not",
"(",
"self",
".",
"log_file",
"==",
"DEVNULL",
"and",
"_HAS_NATIVE_DEVNULL",
")",
":",
"try",
":",
"self",
".",
"log_file",
".",
"close",
"(",
")",
"except",
"Exception",
":",
"pass",
"if",
"self",
".",
"process",
"is",
"None",
":",
"return",
"try",
":",
"self",
".",
"send_remote_shutdown_command",
"(",
")",
"except",
"TypeError",
":",
"pass",
"try",
":",
"if",
"self",
".",
"process",
":",
"for",
"stream",
"in",
"[",
"self",
".",
"process",
".",
"stdin",
",",
"self",
".",
"process",
".",
"stdout",
",",
"self",
".",
"process",
".",
"stderr",
"]",
":",
"try",
":",
"stream",
".",
"close",
"(",
")",
"except",
"AttributeError",
":",
"pass",
"self",
".",
"process",
".",
"terminate",
"(",
")",
"self",
".",
"process",
".",
"wait",
"(",
")",
"self",
".",
"process",
".",
"kill",
"(",
")",
"self",
".",
"process",
"=",
"None",
"except",
"OSError",
":",
"pass"
] | 27.757576 | 16.181818 |
def afterContext(self):
"""Pop my mod stack and restore sys.modules to the state
it was in when mod stack was pushed.
"""
mods = self._mod_stack.pop()
to_del = [ m for m in sys.modules.keys() if m not in mods ]
if to_del:
log.debug('removing sys modules entries: %s', to_del)
for mod in to_del:
del sys.modules[mod]
sys.modules.update(mods) | [
"def",
"afterContext",
"(",
"self",
")",
":",
"mods",
"=",
"self",
".",
"_mod_stack",
".",
"pop",
"(",
")",
"to_del",
"=",
"[",
"m",
"for",
"m",
"in",
"sys",
".",
"modules",
".",
"keys",
"(",
")",
"if",
"m",
"not",
"in",
"mods",
"]",
"if",
"to_del",
":",
"log",
".",
"debug",
"(",
"'removing sys modules entries: %s'",
",",
"to_del",
")",
"for",
"mod",
"in",
"to_del",
":",
"del",
"sys",
".",
"modules",
"[",
"mod",
"]",
"sys",
".",
"modules",
".",
"update",
"(",
"mods",
")"
] | 38.727273 | 11 |
def block(self, **kwargs):
"""Block the user.
Args:
**kwargs: Extra options to send to the server (e.g. sudo)
Raises:
GitlabAuthenticationError: If authentication is not correct
GitlabBlockError: If the user could not be blocked
Returns:
bool: Whether the user status has been changed
"""
path = '/users/%s/block' % self.id
server_data = self.manager.gitlab.http_post(path, **kwargs)
if server_data is True:
self._attrs['state'] = 'blocked'
return server_data | [
"def",
"block",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"path",
"=",
"'/users/%s/block'",
"%",
"self",
".",
"id",
"server_data",
"=",
"self",
".",
"manager",
".",
"gitlab",
".",
"http_post",
"(",
"path",
",",
"*",
"*",
"kwargs",
")",
"if",
"server_data",
"is",
"True",
":",
"self",
".",
"_attrs",
"[",
"'state'",
"]",
"=",
"'blocked'",
"return",
"server_data"
] | 32.055556 | 20.333333 |
def convert_dense(net, node, module, builder):
"""Convert a dense layer from mxnet to coreml.
Parameters
----------
net: network
A mxnet network object.
node: layer
Node to convert.
module: module
An module for MXNet
builder: NeuralNetworkBuilder
A neural network builder object.
"""
input_name, output_name = _get_input_output_name(net, node)
has_bias = True
name = node['name']
inputs = node['inputs']
args, _ = module.get_params()
W = args[_get_node_name(net, inputs[1][0])].asnumpy()
if has_bias:
Wb = args[_get_node_name(net, inputs[2][0])].asnumpy()
else:
Wb = None
nC, nB = W.shape
builder.add_inner_product(
name=name,
W=W,
b=Wb,
input_channels=nB,
output_channels=nC,
has_bias=has_bias,
input_name=input_name,
output_name=output_name
) | [
"def",
"convert_dense",
"(",
"net",
",",
"node",
",",
"module",
",",
"builder",
")",
":",
"input_name",
",",
"output_name",
"=",
"_get_input_output_name",
"(",
"net",
",",
"node",
")",
"has_bias",
"=",
"True",
"name",
"=",
"node",
"[",
"'name'",
"]",
"inputs",
"=",
"node",
"[",
"'inputs'",
"]",
"args",
",",
"_",
"=",
"module",
".",
"get_params",
"(",
")",
"W",
"=",
"args",
"[",
"_get_node_name",
"(",
"net",
",",
"inputs",
"[",
"1",
"]",
"[",
"0",
"]",
")",
"]",
".",
"asnumpy",
"(",
")",
"if",
"has_bias",
":",
"Wb",
"=",
"args",
"[",
"_get_node_name",
"(",
"net",
",",
"inputs",
"[",
"2",
"]",
"[",
"0",
"]",
")",
"]",
".",
"asnumpy",
"(",
")",
"else",
":",
"Wb",
"=",
"None",
"nC",
",",
"nB",
"=",
"W",
".",
"shape",
"builder",
".",
"add_inner_product",
"(",
"name",
"=",
"name",
",",
"W",
"=",
"W",
",",
"b",
"=",
"Wb",
",",
"input_channels",
"=",
"nB",
",",
"output_channels",
"=",
"nC",
",",
"has_bias",
"=",
"has_bias",
",",
"input_name",
"=",
"input_name",
",",
"output_name",
"=",
"output_name",
")"
] | 22.475 | 20.35 |
def _difference(self, original_keys, updated_keys, name, item_index):
"""Calculate difference between the original and updated sets of keys.
Removed items will be removed from item_index, new items should have
been added by the discovery process. (?help or ?sensor-list)
This method is for use in inspect_requests and inspect_sensors only.
Returns
-------
(added, removed)
added : set of str
Names of the keys that were added
removed : set of str
Names of the keys that were removed
"""
original_keys = set(original_keys)
updated_keys = set(updated_keys)
added_keys = updated_keys.difference(original_keys)
removed_keys = set()
if name is None:
removed_keys = original_keys.difference(updated_keys)
elif name not in updated_keys and name in original_keys:
removed_keys = set([name])
for key in removed_keys:
if key in item_index:
del(item_index[key])
# Check the keys that was not added now or not lined up for removal,
# and see if they changed.
for key in updated_keys.difference(added_keys.union(removed_keys)):
if item_index[key].get('_changed'):
item_index[key]['_changed'] = False
removed_keys.add(key)
added_keys.add(key)
return added_keys, removed_keys | [
"def",
"_difference",
"(",
"self",
",",
"original_keys",
",",
"updated_keys",
",",
"name",
",",
"item_index",
")",
":",
"original_keys",
"=",
"set",
"(",
"original_keys",
")",
"updated_keys",
"=",
"set",
"(",
"updated_keys",
")",
"added_keys",
"=",
"updated_keys",
".",
"difference",
"(",
"original_keys",
")",
"removed_keys",
"=",
"set",
"(",
")",
"if",
"name",
"is",
"None",
":",
"removed_keys",
"=",
"original_keys",
".",
"difference",
"(",
"updated_keys",
")",
"elif",
"name",
"not",
"in",
"updated_keys",
"and",
"name",
"in",
"original_keys",
":",
"removed_keys",
"=",
"set",
"(",
"[",
"name",
"]",
")",
"for",
"key",
"in",
"removed_keys",
":",
"if",
"key",
"in",
"item_index",
":",
"del",
"(",
"item_index",
"[",
"key",
"]",
")",
"# Check the keys that was not added now or not lined up for removal,",
"# and see if they changed.",
"for",
"key",
"in",
"updated_keys",
".",
"difference",
"(",
"added_keys",
".",
"union",
"(",
"removed_keys",
")",
")",
":",
"if",
"item_index",
"[",
"key",
"]",
".",
"get",
"(",
"'_changed'",
")",
":",
"item_index",
"[",
"key",
"]",
"[",
"'_changed'",
"]",
"=",
"False",
"removed_keys",
".",
"add",
"(",
"key",
")",
"added_keys",
".",
"add",
"(",
"key",
")",
"return",
"added_keys",
",",
"removed_keys"
] | 35.825 | 19.4 |
def create_authentication_string(username, password):
'''
Creates an authentication string from the username and password.
:username: Username.
:password: Password.
:return: The encoded string.
'''
username_utf8 = username.encode('utf-8')
userpw_utf8 = password.encode('utf-8')
username_perc = quote(username_utf8)
userpw_perc = quote(userpw_utf8)
authinfostring = username_perc + ':' + userpw_perc
authinfostring_base64 = base64.b64encode(authinfostring.encode('utf-8')).decode('utf-8')
return authinfostring_base64 | [
"def",
"create_authentication_string",
"(",
"username",
",",
"password",
")",
":",
"username_utf8",
"=",
"username",
".",
"encode",
"(",
"'utf-8'",
")",
"userpw_utf8",
"=",
"password",
".",
"encode",
"(",
"'utf-8'",
")",
"username_perc",
"=",
"quote",
"(",
"username_utf8",
")",
"userpw_perc",
"=",
"quote",
"(",
"userpw_utf8",
")",
"authinfostring",
"=",
"username_perc",
"+",
"':'",
"+",
"userpw_perc",
"authinfostring_base64",
"=",
"base64",
".",
"b64encode",
"(",
"authinfostring",
".",
"encode",
"(",
"'utf-8'",
")",
")",
".",
"decode",
"(",
"'utf-8'",
")",
"return",
"authinfostring_base64"
] | 32.647059 | 20.647059 |
def comment_expression(self):
"""doc: http://open.youku.com/docs/doc?id=92
"""
url = 'https://openapi.youku.com/v2/schemas/comment/expression.json'
r = requests.get(url)
check_error(r)
return r.json() | [
"def",
"comment_expression",
"(",
"self",
")",
":",
"url",
"=",
"'https://openapi.youku.com/v2/schemas/comment/expression.json'",
"r",
"=",
"requests",
".",
"get",
"(",
"url",
")",
"check_error",
"(",
"r",
")",
"return",
"r",
".",
"json",
"(",
")"
] | 34.571429 | 13.285714 |
def add_neighbor(self, edge: "Edge") -> None:
"""
Adds a new neighbor to the node.
Arguments:
edge (Edge): The edge that would connect this node with its neighbor.
"""
if edge is None or (edge.source != self and edge.target != self):
return
if edge.source == self:
other: Node = edge.target
elif edge.target == self:
other: Node = edge.source
else:
raise ValueError("Tried to add a neighbor with an invalid edge.")
edge_key: Tuple(int, int) = edge.key
# The graph is considered undirected, check neighbor existence accordingly.
if self._neighbors.get(edge_key) or self._neighbors.get((edge_key[1], edge_key[0])):
return # The neighbor is already added.
self._neighbors[edge_key] = edge
self.dispatch_event(NeighborAddedEvent(other)) | [
"def",
"add_neighbor",
"(",
"self",
",",
"edge",
":",
"\"Edge\"",
")",
"->",
"None",
":",
"if",
"edge",
"is",
"None",
"or",
"(",
"edge",
".",
"source",
"!=",
"self",
"and",
"edge",
".",
"target",
"!=",
"self",
")",
":",
"return",
"if",
"edge",
".",
"source",
"==",
"self",
":",
"other",
":",
"Node",
"=",
"edge",
".",
"target",
"elif",
"edge",
".",
"target",
"==",
"self",
":",
"other",
":",
"Node",
"=",
"edge",
".",
"source",
"else",
":",
"raise",
"ValueError",
"(",
"\"Tried to add a neighbor with an invalid edge.\"",
")",
"edge_key",
":",
"Tuple",
"(",
"int",
",",
"int",
")",
"=",
"edge",
".",
"key",
"# The graph is considered undirected, check neighbor existence accordingly.\r",
"if",
"self",
".",
"_neighbors",
".",
"get",
"(",
"edge_key",
")",
"or",
"self",
".",
"_neighbors",
".",
"get",
"(",
"(",
"edge_key",
"[",
"1",
"]",
",",
"edge_key",
"[",
"0",
"]",
")",
")",
":",
"return",
"# The neighbor is already added.\r",
"self",
".",
"_neighbors",
"[",
"edge_key",
"]",
"=",
"edge",
"self",
".",
"dispatch_event",
"(",
"NeighborAddedEvent",
"(",
"other",
")",
")"
] | 36.56 | 21.28 |
def kmc(forward_in, database_name, min_occurrences=1, reverse_in='NA', k=31, cleanup=True,
returncmd=False, tmpdir='tmp', **kwargs):
"""
Runs kmc to count kmers.
:param forward_in: Forward input reads. Assumed to be fastq.
:param database_name: Name for output kmc database.
:param min_occurrences: Minimum number of times kmer must be seen to be included in database.
:param reverse_in: Reverse input reads. Automatically found.
:param k: Kmer size. Default 31.
:param cleanup: If true, deletes tmpdir that kmc needs.
:param tmpdir: Temporary directory to store intermediary kmc files. Default tmp.
:param returncmd: If true, will return the command used to call KMC as well as out and err.
:param kwargs: Other kmc arguments in parameter='argument' format.
:return: Stdout and stderr from kmc.
"""
# Create the tmpdir kmc needs if it isn't already present.
if not os.path.isdir(tmpdir):
os.makedirs(tmpdir)
options = kwargs_to_string(kwargs)
if os.path.isfile(forward_in.replace('_R1', '_R2')) and reverse_in == 'NA' and '_R1' in forward_in:
reverse_in = forward_in.replace('_R1', '_R2')
filelist = os.path.join(tmpdir, 'filelist.txt')
with open(filelist, 'w') as f:
f.write(forward_in + '\n')
f.write(reverse_in + '\n')
cmd = 'kmc -k{} -ci{} {} @{} {} {}'.format(k, min_occurrences, options, filelist, database_name, tmpdir)
elif reverse_in == 'NA':
cmd = 'kmc -k{} -ci{} {} {} {} {}'.format(k, min_occurrences, options, forward_in, database_name, tmpdir)
else:
filelist = os.path.join(tmpdir, 'filelist.txt')
with open(filelist, 'w') as f:
f.write(forward_in + '\n')
f.write(reverse_in + '\n')
cmd = 'kmc -k{} -ci{} {} @{} {} {}'.format(k, min_occurrences, options, filelist, database_name, tmpdir)
out, err = accessoryfunctions.run_subprocess(cmd)
if cleanup:
shutil.rmtree(tmpdir)
if returncmd:
return out, err, cmd
else:
return out, err | [
"def",
"kmc",
"(",
"forward_in",
",",
"database_name",
",",
"min_occurrences",
"=",
"1",
",",
"reverse_in",
"=",
"'NA'",
",",
"k",
"=",
"31",
",",
"cleanup",
"=",
"True",
",",
"returncmd",
"=",
"False",
",",
"tmpdir",
"=",
"'tmp'",
",",
"*",
"*",
"kwargs",
")",
":",
"# Create the tmpdir kmc needs if it isn't already present.",
"if",
"not",
"os",
".",
"path",
".",
"isdir",
"(",
"tmpdir",
")",
":",
"os",
".",
"makedirs",
"(",
"tmpdir",
")",
"options",
"=",
"kwargs_to_string",
"(",
"kwargs",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"forward_in",
".",
"replace",
"(",
"'_R1'",
",",
"'_R2'",
")",
")",
"and",
"reverse_in",
"==",
"'NA'",
"and",
"'_R1'",
"in",
"forward_in",
":",
"reverse_in",
"=",
"forward_in",
".",
"replace",
"(",
"'_R1'",
",",
"'_R2'",
")",
"filelist",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tmpdir",
",",
"'filelist.txt'",
")",
"with",
"open",
"(",
"filelist",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"forward_in",
"+",
"'\\n'",
")",
"f",
".",
"write",
"(",
"reverse_in",
"+",
"'\\n'",
")",
"cmd",
"=",
"'kmc -k{} -ci{} {} @{} {} {}'",
".",
"format",
"(",
"k",
",",
"min_occurrences",
",",
"options",
",",
"filelist",
",",
"database_name",
",",
"tmpdir",
")",
"elif",
"reverse_in",
"==",
"'NA'",
":",
"cmd",
"=",
"'kmc -k{} -ci{} {} {} {} {}'",
".",
"format",
"(",
"k",
",",
"min_occurrences",
",",
"options",
",",
"forward_in",
",",
"database_name",
",",
"tmpdir",
")",
"else",
":",
"filelist",
"=",
"os",
".",
"path",
".",
"join",
"(",
"tmpdir",
",",
"'filelist.txt'",
")",
"with",
"open",
"(",
"filelist",
",",
"'w'",
")",
"as",
"f",
":",
"f",
".",
"write",
"(",
"forward_in",
"+",
"'\\n'",
")",
"f",
".",
"write",
"(",
"reverse_in",
"+",
"'\\n'",
")",
"cmd",
"=",
"'kmc -k{} -ci{} {} @{} {} {}'",
".",
"format",
"(",
"k",
",",
"min_occurrences",
",",
"options",
",",
"filelist",
",",
"database_name",
",",
"tmpdir",
")",
"out",
",",
"err",
"=",
"accessoryfunctions",
".",
"run_subprocess",
"(",
"cmd",
")",
"if",
"cleanup",
":",
"shutil",
".",
"rmtree",
"(",
"tmpdir",
")",
"if",
"returncmd",
":",
"return",
"out",
",",
"err",
",",
"cmd",
"else",
":",
"return",
"out",
",",
"err"
] | 49.926829 | 21.878049 |
def read_namespaced_horizontal_pod_autoscaler_status(self, name, namespace, **kwargs): # noqa: E501
"""read_namespaced_horizontal_pod_autoscaler_status # noqa: E501
read status of the specified HorizontalPodAutoscaler # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.read_namespaced_horizontal_pod_autoscaler_status(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the HorizontalPodAutoscaler (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:return: V2beta1HorizontalPodAutoscaler
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.read_namespaced_horizontal_pod_autoscaler_status_with_http_info(name, namespace, **kwargs) # noqa: E501
else:
(data) = self.read_namespaced_horizontal_pod_autoscaler_status_with_http_info(name, namespace, **kwargs) # noqa: E501
return data | [
"def",
"read_namespaced_horizontal_pod_autoscaler_status",
"(",
"self",
",",
"name",
",",
"namespace",
",",
"*",
"*",
"kwargs",
")",
":",
"# noqa: E501",
"kwargs",
"[",
"'_return_http_data_only'",
"]",
"=",
"True",
"if",
"kwargs",
".",
"get",
"(",
"'async_req'",
")",
":",
"return",
"self",
".",
"read_namespaced_horizontal_pod_autoscaler_status_with_http_info",
"(",
"name",
",",
"namespace",
",",
"*",
"*",
"kwargs",
")",
"# noqa: E501",
"else",
":",
"(",
"data",
")",
"=",
"self",
".",
"read_namespaced_horizontal_pod_autoscaler_status_with_http_info",
"(",
"name",
",",
"namespace",
",",
"*",
"*",
"kwargs",
")",
"# noqa: E501",
"return",
"data"
] | 57.695652 | 30.304348 |
def gen_radio_view(sig_dic):
'''
for checkbox
'''
view_zuoxiang = '''
<div class="col-sm-4"><span class="des">{0}</span></div>
<div class="col-sm-8">
'''.format(sig_dic['zh'])
dic_tmp = sig_dic['dic']
for key in dic_tmp.keys():
tmp_str = '''<span class="input_text">
{{% if '{0}' in postinfo.extinfo and postinfo.extinfo['{0}'] == "{1}" %}}
{2}
{{% end %}}
</span>'''.format(sig_dic['en'], key, dic_tmp[key])
view_zuoxiang += tmp_str
view_zuoxiang += '''</div>'''
return view_zuoxiang | [
"def",
"gen_radio_view",
"(",
"sig_dic",
")",
":",
"view_zuoxiang",
"=",
"'''\n <div class=\"col-sm-4\"><span class=\"des\">{0}</span></div>\n <div class=\"col-sm-8\">\n '''",
".",
"format",
"(",
"sig_dic",
"[",
"'zh'",
"]",
")",
"dic_tmp",
"=",
"sig_dic",
"[",
"'dic'",
"]",
"for",
"key",
"in",
"dic_tmp",
".",
"keys",
"(",
")",
":",
"tmp_str",
"=",
"'''<span class=\"input_text\">\n {{% if '{0}' in postinfo.extinfo and postinfo.extinfo['{0}'] == \"{1}\" %}}\n {2}\n {{% end %}}\n </span>'''",
".",
"format",
"(",
"sig_dic",
"[",
"'en'",
"]",
",",
"key",
",",
"dic_tmp",
"[",
"key",
"]",
")",
"view_zuoxiang",
"+=",
"tmp_str",
"view_zuoxiang",
"+=",
"'''</div>'''",
"return",
"view_zuoxiang"
] | 28.2 | 20.7 |
def generate(converter, input_file, format='xml', encoding='utf8'):
"""
Given a converter (as returned by compile()), this function reads
the given input file and converts it to the requested output format.
Supported output formats are 'xml', 'yaml', 'json', or 'none'.
:type converter: compiler.Context
:param converter: The compiled converter.
:type input_file: str
:param input_file: Name of a file to convert.
:type format: str
:param format: The output format.
:type encoding: str
:param encoding: Character encoding of the input file.
:rtype: str
:return: The resulting output.
"""
with codecs.open(input_file, encoding=encoding) as thefile:
return generate_string(converter, thefile.read(), format=format) | [
"def",
"generate",
"(",
"converter",
",",
"input_file",
",",
"format",
"=",
"'xml'",
",",
"encoding",
"=",
"'utf8'",
")",
":",
"with",
"codecs",
".",
"open",
"(",
"input_file",
",",
"encoding",
"=",
"encoding",
")",
"as",
"thefile",
":",
"return",
"generate_string",
"(",
"converter",
",",
"thefile",
".",
"read",
"(",
")",
",",
"format",
"=",
"format",
")"
] | 38.6 | 18.2 |
def _process_glsl_template(template, colors):
"""Replace $color_i by color #i in the GLSL template."""
for i in range(len(colors) - 1, -1, -1):
color = colors[i]
assert len(color) == 4
vec4_color = 'vec4(%.3f, %.3f, %.3f, %.3f)' % tuple(color)
template = template.replace('$color_%d' % i, vec4_color)
return template | [
"def",
"_process_glsl_template",
"(",
"template",
",",
"colors",
")",
":",
"for",
"i",
"in",
"range",
"(",
"len",
"(",
"colors",
")",
"-",
"1",
",",
"-",
"1",
",",
"-",
"1",
")",
":",
"color",
"=",
"colors",
"[",
"i",
"]",
"assert",
"len",
"(",
"color",
")",
"==",
"4",
"vec4_color",
"=",
"'vec4(%.3f, %.3f, %.3f, %.3f)'",
"%",
"tuple",
"(",
"color",
")",
"template",
"=",
"template",
".",
"replace",
"(",
"'$color_%d'",
"%",
"i",
",",
"vec4_color",
")",
"return",
"template"
] | 44.125 | 13.125 |
def get_language_progress(self, lang):
"""Get informations about user's progression in a language."""
if not self._is_current_language(lang):
self._switch_language(lang)
fields = ['streak', 'language_string', 'level_progress',
'num_skills_learned', 'level_percent', 'level_points',
'points_rank', 'next_level', 'level_left', 'language',
'points', 'fluency_score', 'level']
return self._make_dict(fields, self.user_data.language_data[lang]) | [
"def",
"get_language_progress",
"(",
"self",
",",
"lang",
")",
":",
"if",
"not",
"self",
".",
"_is_current_language",
"(",
"lang",
")",
":",
"self",
".",
"_switch_language",
"(",
"lang",
")",
"fields",
"=",
"[",
"'streak'",
",",
"'language_string'",
",",
"'level_progress'",
",",
"'num_skills_learned'",
",",
"'level_percent'",
",",
"'level_points'",
",",
"'points_rank'",
",",
"'next_level'",
",",
"'level_left'",
",",
"'language'",
",",
"'points'",
",",
"'fluency_score'",
",",
"'level'",
"]",
"return",
"self",
".",
"_make_dict",
"(",
"fields",
",",
"self",
".",
"user_data",
".",
"language_data",
"[",
"lang",
"]",
")"
] | 48.090909 | 20.454545 |
def load(input_filename):
'''Load an image with Pillow and convert it to numpy array. Also
returns the image DPI in x and y as a tuple.'''
try:
pil_img = Image.open(input_filename)
except IOError:
sys.stderr.write('warning: error opening {}\n'.format(
input_filename))
return None, None
if pil_img.mode != 'RGB':
pil_img = pil_img.convert('RGB')
if 'dpi' in pil_img.info:
dpi = pil_img.info['dpi']
else:
dpi = (300, 300)
img = np.array(pil_img)
return img, dpi | [
"def",
"load",
"(",
"input_filename",
")",
":",
"try",
":",
"pil_img",
"=",
"Image",
".",
"open",
"(",
"input_filename",
")",
"except",
"IOError",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'warning: error opening {}\\n'",
".",
"format",
"(",
"input_filename",
")",
")",
"return",
"None",
",",
"None",
"if",
"pil_img",
".",
"mode",
"!=",
"'RGB'",
":",
"pil_img",
"=",
"pil_img",
".",
"convert",
"(",
"'RGB'",
")",
"if",
"'dpi'",
"in",
"pil_img",
".",
"info",
":",
"dpi",
"=",
"pil_img",
".",
"info",
"[",
"'dpi'",
"]",
"else",
":",
"dpi",
"=",
"(",
"300",
",",
"300",
")",
"img",
"=",
"np",
".",
"array",
"(",
"pil_img",
")",
"return",
"img",
",",
"dpi"
] | 23.304348 | 22 |
def move_up(lines=1, file=sys.stdout):
""" Move the cursor up a number of lines.
Esc[ValueA:
Moves the cursor up by the specified number of lines without changing
columns. If the cursor is already on the top line, ANSI.SYS ignores
this sequence.
"""
move.up(lines).write(file=file) | [
"def",
"move_up",
"(",
"lines",
"=",
"1",
",",
"file",
"=",
"sys",
".",
"stdout",
")",
":",
"move",
".",
"up",
"(",
"lines",
")",
".",
"write",
"(",
"file",
"=",
"file",
")"
] | 35.333333 | 17.555556 |
async def createAnswer(self):
"""
Create an SDP answer to an offer received from a remote peer during
the offer/answer negotiation of a WebRTC connection.
:rtype: :class:`RTCSessionDescription`
"""
# check state is valid
self.__assertNotClosed()
if self.signalingState not in ['have-remote-offer', 'have-local-pranswer']:
raise InvalidStateError('Cannot create answer in signaling state "%s"' %
self.signalingState)
# create description
ntp_seconds = clock.current_ntp_time() >> 32
description = sdp.SessionDescription()
description.origin = '- %d %d IN IP4 0.0.0.0' % (ntp_seconds, ntp_seconds)
description.msid_semantic.append(sdp.GroupDescription(
semantic='WMS',
items=['*']))
description.type = 'answer'
for remote_m in self.__remoteDescription().media:
if remote_m.kind in ['audio', 'video']:
transceiver = self.__getTransceiverByMid(remote_m.rtp.muxId)
description.media.append(create_media_description_for_transceiver(
transceiver,
cname=self.__cname,
direction=and_direction(transceiver.direction, transceiver._offerDirection),
mid=transceiver.mid))
else:
description.media.append(create_media_description_for_sctp(
self.__sctp, legacy=self._sctpLegacySdp, mid=self.__sctp.mid))
bundle = sdp.GroupDescription(semantic='BUNDLE', items=[])
for media in description.media:
bundle.items.append(media.rtp.muxId)
description.group.append(bundle)
return wrap_session_description(description) | [
"async",
"def",
"createAnswer",
"(",
"self",
")",
":",
"# check state is valid",
"self",
".",
"__assertNotClosed",
"(",
")",
"if",
"self",
".",
"signalingState",
"not",
"in",
"[",
"'have-remote-offer'",
",",
"'have-local-pranswer'",
"]",
":",
"raise",
"InvalidStateError",
"(",
"'Cannot create answer in signaling state \"%s\"'",
"%",
"self",
".",
"signalingState",
")",
"# create description",
"ntp_seconds",
"=",
"clock",
".",
"current_ntp_time",
"(",
")",
">>",
"32",
"description",
"=",
"sdp",
".",
"SessionDescription",
"(",
")",
"description",
".",
"origin",
"=",
"'- %d %d IN IP4 0.0.0.0'",
"%",
"(",
"ntp_seconds",
",",
"ntp_seconds",
")",
"description",
".",
"msid_semantic",
".",
"append",
"(",
"sdp",
".",
"GroupDescription",
"(",
"semantic",
"=",
"'WMS'",
",",
"items",
"=",
"[",
"'*'",
"]",
")",
")",
"description",
".",
"type",
"=",
"'answer'",
"for",
"remote_m",
"in",
"self",
".",
"__remoteDescription",
"(",
")",
".",
"media",
":",
"if",
"remote_m",
".",
"kind",
"in",
"[",
"'audio'",
",",
"'video'",
"]",
":",
"transceiver",
"=",
"self",
".",
"__getTransceiverByMid",
"(",
"remote_m",
".",
"rtp",
".",
"muxId",
")",
"description",
".",
"media",
".",
"append",
"(",
"create_media_description_for_transceiver",
"(",
"transceiver",
",",
"cname",
"=",
"self",
".",
"__cname",
",",
"direction",
"=",
"and_direction",
"(",
"transceiver",
".",
"direction",
",",
"transceiver",
".",
"_offerDirection",
")",
",",
"mid",
"=",
"transceiver",
".",
"mid",
")",
")",
"else",
":",
"description",
".",
"media",
".",
"append",
"(",
"create_media_description_for_sctp",
"(",
"self",
".",
"__sctp",
",",
"legacy",
"=",
"self",
".",
"_sctpLegacySdp",
",",
"mid",
"=",
"self",
".",
"__sctp",
".",
"mid",
")",
")",
"bundle",
"=",
"sdp",
".",
"GroupDescription",
"(",
"semantic",
"=",
"'BUNDLE'",
",",
"items",
"=",
"[",
"]",
")",
"for",
"media",
"in",
"description",
".",
"media",
":",
"bundle",
".",
"items",
".",
"append",
"(",
"media",
".",
"rtp",
".",
"muxId",
")",
"description",
".",
"group",
".",
"append",
"(",
"bundle",
")",
"return",
"wrap_session_description",
"(",
"description",
")"
] | 44.175 | 20.975 |
def postag(
X,
ax=None,
tagset="penn_treebank",
colormap=None,
colors=None,
frequency=False,
**kwargs
):
"""
Display a barchart with the counts of different parts of speech
in X, which consists of a part-of-speech-tagged corpus, which the
visualizer expects to be a list of lists of lists of (token, tag)
tuples.
Parameters
----------
X : list or generator
Should be provided as a list of documents or a generator
that yields a list of documents that contain a list of
sentences that contain (token, tag) tuples.
ax : matplotlib axes
The axes to plot the figure on.
tagset: string
The tagset that was used to perform part-of-speech tagging.
Either "penn_treebank" or "universal", defaults to "penn_treebank".
Use "universal" if corpus has been tagged using SpaCy.
colors : list or tuple of colors
Specify the colors for each individual part-of-speech.
colormap : string or matplotlib cmap
Specify a colormap to color the parts-of-speech.
frequency: bool {True, False}, default: False
If set to True, part-of-speech tags will be plotted according to frequency,
from most to least frequent.
kwargs : dict
Pass any additional keyword arguments to the PosTagVisualizer.
Returns
-------
ax : matplotlib axes
Returns the axes on which the PosTagVisualizer was drawn.
"""
# Instantiate the visualizer
visualizer = PosTagVisualizer(
ax=ax, tagset=tagset, colors=colors, colormap=colormap,
frequency=frequency, **kwargs
)
# Fit and transform the visualizer (calls draw)
visualizer.fit(X, **kwargs)
# Return the axes object on the visualizer
return visualizer | [
"def",
"postag",
"(",
"X",
",",
"ax",
"=",
"None",
",",
"tagset",
"=",
"\"penn_treebank\"",
",",
"colormap",
"=",
"None",
",",
"colors",
"=",
"None",
",",
"frequency",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"# Instantiate the visualizer",
"visualizer",
"=",
"PosTagVisualizer",
"(",
"ax",
"=",
"ax",
",",
"tagset",
"=",
"tagset",
",",
"colors",
"=",
"colors",
",",
"colormap",
"=",
"colormap",
",",
"frequency",
"=",
"frequency",
",",
"*",
"*",
"kwargs",
")",
"# Fit and transform the visualizer (calls draw)",
"visualizer",
".",
"fit",
"(",
"X",
",",
"*",
"*",
"kwargs",
")",
"# Return the axes object on the visualizer",
"return",
"visualizer"
] | 33 | 21.264151 |
def upgrade(refresh=True):
'''
Upgrade all of the packages to the latest available version.
Returns a dict containing the changes::
{'<package>': {'old': '<old-version>',
'new': '<new-version>'}}
CLI Example:
.. code-block:: bash
salt '*' pkgutil.upgrade
'''
if salt.utils.data.is_true(refresh):
refresh_db()
old = list_pkgs()
# Install or upgrade the package
# If package is already installed
cmd = '/opt/csw/bin/pkgutil -yu'
__salt__['cmd.run_all'](cmd)
__context__.pop('pkg.list_pkgs', None)
new = list_pkgs()
return salt.utils.data.compare_dicts(old, new) | [
"def",
"upgrade",
"(",
"refresh",
"=",
"True",
")",
":",
"if",
"salt",
".",
"utils",
".",
"data",
".",
"is_true",
"(",
"refresh",
")",
":",
"refresh_db",
"(",
")",
"old",
"=",
"list_pkgs",
"(",
")",
"# Install or upgrade the package",
"# If package is already installed",
"cmd",
"=",
"'/opt/csw/bin/pkgutil -yu'",
"__salt__",
"[",
"'cmd.run_all'",
"]",
"(",
"cmd",
")",
"__context__",
".",
"pop",
"(",
"'pkg.list_pkgs'",
",",
"None",
")",
"new",
"=",
"list_pkgs",
"(",
")",
"return",
"salt",
".",
"utils",
".",
"data",
".",
"compare_dicts",
"(",
"old",
",",
"new",
")"
] | 23.962963 | 19.888889 |
def arguments_from_optionable(parser, component, prefix=""):
""" Add argparse arguments from all options of one :class:`Optionable`
>>> # Let's build a dummy optionable component:
>>> comp = Optionable()
>>> comp.add_option("num", Numeric(default=1, max=12, help="An exemple of option"))
>>> comp.add_option("title", Text(help="The title of the title"))
>>> comp.add_option("ok", Boolean(help="is it ok ?", default=True))
>>> comp.add_option("cool", Boolean(help="is it cool ?", default=False))
>>>
>>> # one can then register all the options of this component to a arg parser
>>> parser = argparse.ArgumentParser(prog="PROG")
>>> arguments_from_optionable(parser, comp)
>>> parser.print_help()
usage: PROG [-h] [--num NUM] [--title TITLE] [--not-ok] [--cool]
<BLANKLINE>
optional arguments:
-h, --help show this help message and exit
--num NUM An exemple of option
--title TITLE The title of the title
--not-ok is it ok ?
--cool is it cool ?
The option values for a componant can then be retrieved with :func:`get_config_for`
.. doctest::
:hide:
>>> import argparse
>>> args = argparse.Namespace()
>>> args.num = 1
>>> args.title = "My title"
>>> args.ok = True
>>> args.cool = False
>>> args = parser.parse_args() # doctest: +SKIP
>>> config = get_config_for(args, comp)
>>> comp("input", **config) # doctest: +SKIP
"comp_result"
"""
for option in component.options:
if component.options[option].hidden:
continue
argument_from_option(parser, component, option, prefix=prefix) | [
"def",
"arguments_from_optionable",
"(",
"parser",
",",
"component",
",",
"prefix",
"=",
"\"\"",
")",
":",
"for",
"option",
"in",
"component",
".",
"options",
":",
"if",
"component",
".",
"options",
"[",
"option",
"]",
".",
"hidden",
":",
"continue",
"argument_from_option",
"(",
"parser",
",",
"component",
",",
"option",
",",
"prefix",
"=",
"prefix",
")"
] | 38.022727 | 19.363636 |
def from_url(url, format=None):
"""
Returns the crs object from a string interpreted as a specified format, located at a given url site.
Arguments:
- *url*: The url where the crs string is to be read from.
- *format* (optional): Which format to parse the crs string as. One of "ogc wkt", "esri wkt", or "proj4".
If None, tries to autodetect the format for you (default).
Returns:
- CRS object.
"""
# first get string from url
string = urllib2.urlopen(url).read()
if PY3 is True:
# decode str into string
string = string.decode('utf-8')
# then determine parser
if format:
# user specified format
format = format.lower().replace(" ", "_")
func = parse.__getattr__("from_%s" % format)
else:
# unknown format
func = parse.from_unknown_text
# then load
crs = func(string)
return crs | [
"def",
"from_url",
"(",
"url",
",",
"format",
"=",
"None",
")",
":",
"# first get string from url",
"string",
"=",
"urllib2",
".",
"urlopen",
"(",
"url",
")",
".",
"read",
"(",
")",
"if",
"PY3",
"is",
"True",
":",
"# decode str into string",
"string",
"=",
"string",
".",
"decode",
"(",
"'utf-8'",
")",
"# then determine parser",
"if",
"format",
":",
"# user specified format",
"format",
"=",
"format",
".",
"lower",
"(",
")",
".",
"replace",
"(",
"\" \"",
",",
"\"_\"",
")",
"func",
"=",
"parse",
".",
"__getattr__",
"(",
"\"from_%s\"",
"%",
"format",
")",
"else",
":",
"# unknown format",
"func",
"=",
"parse",
".",
"from_unknown_text",
"# then load",
"crs",
"=",
"func",
"(",
"string",
")",
"return",
"crs"
] | 26.939394 | 23.30303 |
def _serialize_function(obj):
"""
Still needing this much try-except stuff. We should find a way to get rid of this.
:param obj:
:return:
"""
try:
obj = inspect.getsource(obj)
except (TypeError, IOError):
try:
obj = marshal.dumps(obj)
except ValueError:
if hasattr(obj, '__dict__'):
obj = _serialize_dict(obj.__dict__)
return obj | [
"def",
"_serialize_function",
"(",
"obj",
")",
":",
"try",
":",
"obj",
"=",
"inspect",
".",
"getsource",
"(",
"obj",
")",
"except",
"(",
"TypeError",
",",
"IOError",
")",
":",
"try",
":",
"obj",
"=",
"marshal",
".",
"dumps",
"(",
"obj",
")",
"except",
"ValueError",
":",
"if",
"hasattr",
"(",
"obj",
",",
"'__dict__'",
")",
":",
"obj",
"=",
"_serialize_dict",
"(",
"obj",
".",
"__dict__",
")",
"return",
"obj"
] | 27.4 | 15.8 |
def derived_contracts(self):
'''
list(Contract): Return the list of contracts derived from self
'''
candidates = self.slither.contracts
return [c for c in candidates if self in c.inheritance] | [
"def",
"derived_contracts",
"(",
"self",
")",
":",
"candidates",
"=",
"self",
".",
"slither",
".",
"contracts",
"return",
"[",
"c",
"for",
"c",
"in",
"candidates",
"if",
"self",
"in",
"c",
".",
"inheritance",
"]"
] | 38.333333 | 21.666667 |
def ssh_interface(vm_):
'''
Return the ssh_interface type to connect to. Either 'public_ips' (default)
or 'private_ips'.
'''
ret = config.get_cloud_config_value(
'ssh_interface', vm_, __opts__, default='public_ips',
search_global=False
)
if ret not in ('public_ips', 'private_ips'):
log.warning(
'Invalid ssh_interface: %s. '
'Allowed options are ("public_ips", "private_ips"). '
'Defaulting to "public_ips".', ret
)
ret = 'public_ips'
return ret | [
"def",
"ssh_interface",
"(",
"vm_",
")",
":",
"ret",
"=",
"config",
".",
"get_cloud_config_value",
"(",
"'ssh_interface'",
",",
"vm_",
",",
"__opts__",
",",
"default",
"=",
"'public_ips'",
",",
"search_global",
"=",
"False",
")",
"if",
"ret",
"not",
"in",
"(",
"'public_ips'",
",",
"'private_ips'",
")",
":",
"log",
".",
"warning",
"(",
"'Invalid ssh_interface: %s. '",
"'Allowed options are (\"public_ips\", \"private_ips\"). '",
"'Defaulting to \"public_ips\".'",
",",
"ret",
")",
"ret",
"=",
"'public_ips'",
"return",
"ret"
] | 31.647059 | 20 |
def get_pin_and_cookie_name(app):
"""Given an application object this returns a semi-stable 9 digit pin
code and a random key. The hope is that this is stable between
restarts to not make debugging particularly frustrating. If the pin
was forcefully disabled this returns `None`.
Second item in the resulting tuple is the cookie name for remembering.
"""
pin = os.environ.get("WERKZEUG_DEBUG_PIN")
rv = None
num = None
# Pin was explicitly disabled
if pin == "off":
return None, None
# Pin was provided explicitly
if pin is not None and pin.replace("-", "").isdigit():
# If there are separators in the pin, return it directly
if "-" in pin:
rv = pin
else:
num = pin
modname = getattr(app, "__module__", getattr(app.__class__, "__module__"))
try:
# getuser imports the pwd module, which does not exist in Google
# App Engine. It may also raise a KeyError if the UID does not
# have a username, such as in Docker.
username = getpass.getuser()
except (ImportError, KeyError):
username = None
mod = sys.modules.get(modname)
# This information only exists to make the cookie unique on the
# computer, not as a security feature.
probably_public_bits = [
username,
modname,
getattr(app, "__name__", getattr(app.__class__, "__name__")),
getattr(mod, "__file__", None),
]
# This information is here to make it harder for an attacker to
# guess the cookie name. They are unlikely to be contained anywhere
# within the unauthenticated debug page.
private_bits = [str(uuid.getnode()), get_machine_id()]
h = hashlib.md5()
for bit in chain(probably_public_bits, private_bits):
if not bit:
continue
if isinstance(bit, text_type):
bit = bit.encode("utf-8")
h.update(bit)
h.update(b"cookiesalt")
cookie_name = "__wzd" + h.hexdigest()[:20]
# If we need to generate a pin we salt it a bit more so that we don't
# end up with the same value and generate out 9 digits
if num is None:
h.update(b"pinsalt")
num = ("%09d" % int(h.hexdigest(), 16))[:9]
# Format the pincode in groups of digits for easier remembering if
# we don't have a result yet.
if rv is None:
for group_size in 5, 4, 3:
if len(num) % group_size == 0:
rv = "-".join(
num[x : x + group_size].rjust(group_size, "0")
for x in range(0, len(num), group_size)
)
break
else:
rv = num
return rv, cookie_name | [
"def",
"get_pin_and_cookie_name",
"(",
"app",
")",
":",
"pin",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"\"WERKZEUG_DEBUG_PIN\"",
")",
"rv",
"=",
"None",
"num",
"=",
"None",
"# Pin was explicitly disabled",
"if",
"pin",
"==",
"\"off\"",
":",
"return",
"None",
",",
"None",
"# Pin was provided explicitly",
"if",
"pin",
"is",
"not",
"None",
"and",
"pin",
".",
"replace",
"(",
"\"-\"",
",",
"\"\"",
")",
".",
"isdigit",
"(",
")",
":",
"# If there are separators in the pin, return it directly",
"if",
"\"-\"",
"in",
"pin",
":",
"rv",
"=",
"pin",
"else",
":",
"num",
"=",
"pin",
"modname",
"=",
"getattr",
"(",
"app",
",",
"\"__module__\"",
",",
"getattr",
"(",
"app",
".",
"__class__",
",",
"\"__module__\"",
")",
")",
"try",
":",
"# getuser imports the pwd module, which does not exist in Google",
"# App Engine. It may also raise a KeyError if the UID does not",
"# have a username, such as in Docker.",
"username",
"=",
"getpass",
".",
"getuser",
"(",
")",
"except",
"(",
"ImportError",
",",
"KeyError",
")",
":",
"username",
"=",
"None",
"mod",
"=",
"sys",
".",
"modules",
".",
"get",
"(",
"modname",
")",
"# This information only exists to make the cookie unique on the",
"# computer, not as a security feature.",
"probably_public_bits",
"=",
"[",
"username",
",",
"modname",
",",
"getattr",
"(",
"app",
",",
"\"__name__\"",
",",
"getattr",
"(",
"app",
".",
"__class__",
",",
"\"__name__\"",
")",
")",
",",
"getattr",
"(",
"mod",
",",
"\"__file__\"",
",",
"None",
")",
",",
"]",
"# This information is here to make it harder for an attacker to",
"# guess the cookie name. They are unlikely to be contained anywhere",
"# within the unauthenticated debug page.",
"private_bits",
"=",
"[",
"str",
"(",
"uuid",
".",
"getnode",
"(",
")",
")",
",",
"get_machine_id",
"(",
")",
"]",
"h",
"=",
"hashlib",
".",
"md5",
"(",
")",
"for",
"bit",
"in",
"chain",
"(",
"probably_public_bits",
",",
"private_bits",
")",
":",
"if",
"not",
"bit",
":",
"continue",
"if",
"isinstance",
"(",
"bit",
",",
"text_type",
")",
":",
"bit",
"=",
"bit",
".",
"encode",
"(",
"\"utf-8\"",
")",
"h",
".",
"update",
"(",
"bit",
")",
"h",
".",
"update",
"(",
"b\"cookiesalt\"",
")",
"cookie_name",
"=",
"\"__wzd\"",
"+",
"h",
".",
"hexdigest",
"(",
")",
"[",
":",
"20",
"]",
"# If we need to generate a pin we salt it a bit more so that we don't",
"# end up with the same value and generate out 9 digits",
"if",
"num",
"is",
"None",
":",
"h",
".",
"update",
"(",
"b\"pinsalt\"",
")",
"num",
"=",
"(",
"\"%09d\"",
"%",
"int",
"(",
"h",
".",
"hexdigest",
"(",
")",
",",
"16",
")",
")",
"[",
":",
"9",
"]",
"# Format the pincode in groups of digits for easier remembering if",
"# we don't have a result yet.",
"if",
"rv",
"is",
"None",
":",
"for",
"group_size",
"in",
"5",
",",
"4",
",",
"3",
":",
"if",
"len",
"(",
"num",
")",
"%",
"group_size",
"==",
"0",
":",
"rv",
"=",
"\"-\"",
".",
"join",
"(",
"num",
"[",
"x",
":",
"x",
"+",
"group_size",
"]",
".",
"rjust",
"(",
"group_size",
",",
"\"0\"",
")",
"for",
"x",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"num",
")",
",",
"group_size",
")",
")",
"break",
"else",
":",
"rv",
"=",
"num",
"return",
"rv",
",",
"cookie_name"
] | 32.654321 | 21.049383 |
def simBirth(self,which_agents):
'''
Makes new Markov consumer by drawing initial normalized assets, permanent income levels, and
discrete states. Calls IndShockConsumerType.simBirth, then draws from initial Markov distribution.
Parameters
----------
which_agents : np.array(Bool)
Boolean array of size self.AgentCount indicating which agents should be "born".
Returns
-------
None
'''
IndShockConsumerType.simBirth(self,which_agents) # Get initial assets and permanent income
if not self.global_markov: #Markov state is not changed if it is set at the global level
N = np.sum(which_agents)
base_draws = drawUniform(N,seed=self.RNG.randint(0,2**31-1))
Cutoffs = np.cumsum(np.array(self.MrkvPrbsInit))
self.MrkvNow[which_agents] = np.searchsorted(Cutoffs,base_draws).astype(int) | [
"def",
"simBirth",
"(",
"self",
",",
"which_agents",
")",
":",
"IndShockConsumerType",
".",
"simBirth",
"(",
"self",
",",
"which_agents",
")",
"# Get initial assets and permanent income",
"if",
"not",
"self",
".",
"global_markov",
":",
"#Markov state is not changed if it is set at the global level",
"N",
"=",
"np",
".",
"sum",
"(",
"which_agents",
")",
"base_draws",
"=",
"drawUniform",
"(",
"N",
",",
"seed",
"=",
"self",
".",
"RNG",
".",
"randint",
"(",
"0",
",",
"2",
"**",
"31",
"-",
"1",
")",
")",
"Cutoffs",
"=",
"np",
".",
"cumsum",
"(",
"np",
".",
"array",
"(",
"self",
".",
"MrkvPrbsInit",
")",
")",
"self",
".",
"MrkvNow",
"[",
"which_agents",
"]",
"=",
"np",
".",
"searchsorted",
"(",
"Cutoffs",
",",
"base_draws",
")",
".",
"astype",
"(",
"int",
")"
] | 45.85 | 33.35 |
def execute_command(self, *args, **options):
"""Execute a command and return a parsed response"""
pool = self.connection_pool
command_name = args[0]
for i in _xrange(self.execution_attempts):
connection = pool.get_connection(command_name, **options)
try:
connection.send_command(*args)
res = self.parse_response(connection, command_name, **options)
pool.release(connection)
return res
except ConnectionError:
pool.purge(connection)
if i >= self.execution_attempts - 1:
raise | [
"def",
"execute_command",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"options",
")",
":",
"pool",
"=",
"self",
".",
"connection_pool",
"command_name",
"=",
"args",
"[",
"0",
"]",
"for",
"i",
"in",
"_xrange",
"(",
"self",
".",
"execution_attempts",
")",
":",
"connection",
"=",
"pool",
".",
"get_connection",
"(",
"command_name",
",",
"*",
"*",
"options",
")",
"try",
":",
"connection",
".",
"send_command",
"(",
"*",
"args",
")",
"res",
"=",
"self",
".",
"parse_response",
"(",
"connection",
",",
"command_name",
",",
"*",
"*",
"options",
")",
"pool",
".",
"release",
"(",
"connection",
")",
"return",
"res",
"except",
"ConnectionError",
":",
"pool",
".",
"purge",
"(",
"connection",
")",
"if",
"i",
">=",
"self",
".",
"execution_attempts",
"-",
"1",
":",
"raise"
] | 42.933333 | 11.6 |
def create(
self,
name,
command_to_run,
container_image,
container_type,
description="",
logs_path="",
results_path="",
environment_variables=None,
required_arguments=None,
required_arguments_default_values=None,
extra_data_to_post=None,
):
"""Create a container task type.
Args:
name (str): The name of the task.
command_to_run (str): The command to run to execute the task.
container_image (str): The container name and tag. For
example, ubuntu:14.04 for Docker; and docker://ubuntu:14:04
or shub://vsoch/hello-world for Singularity.
container_type (str): The type of the container.
description (str, optional): The description of the task type.
logs_path (str, optional): The path of the logs directory
inside the container.
results_path (str, optional): The path of the results
directory inside the container.
environment_variables (list, optional): The environment
variables required on the host to execute the task.
required_arguments (list, optional): The argument names for
the task type.
required_arguments_default_values (dict, optional): Default
values for the task's required arguments.
extra_data_to_post (dict, optional): Extra key-value pairs
to add to the request data. This is useful for
subclasses which require extra parameters.
Returns:
:class:`saltant.models.container_task_type.ContainerTaskType`:
A container task type model instance representing the
task type just created.
"""
# Add in extra data specific to container task types
if extra_data_to_post is None:
extra_data_to_post = {}
extra_data_to_post.update(
{
"container_image": container_image,
"container_type": container_type,
"logs_path": logs_path,
"results_path": results_path,
}
)
# Call the parent create function
return super(ContainerTaskTypeManager, self).create(
name=name,
command_to_run=command_to_run,
description=description,
environment_variables=environment_variables,
required_arguments=required_arguments,
required_arguments_default_values=required_arguments_default_values,
extra_data_to_post=extra_data_to_post,
) | [
"def",
"create",
"(",
"self",
",",
"name",
",",
"command_to_run",
",",
"container_image",
",",
"container_type",
",",
"description",
"=",
"\"\"",
",",
"logs_path",
"=",
"\"\"",
",",
"results_path",
"=",
"\"\"",
",",
"environment_variables",
"=",
"None",
",",
"required_arguments",
"=",
"None",
",",
"required_arguments_default_values",
"=",
"None",
",",
"extra_data_to_post",
"=",
"None",
",",
")",
":",
"# Add in extra data specific to container task types",
"if",
"extra_data_to_post",
"is",
"None",
":",
"extra_data_to_post",
"=",
"{",
"}",
"extra_data_to_post",
".",
"update",
"(",
"{",
"\"container_image\"",
":",
"container_image",
",",
"\"container_type\"",
":",
"container_type",
",",
"\"logs_path\"",
":",
"logs_path",
",",
"\"results_path\"",
":",
"results_path",
",",
"}",
")",
"# Call the parent create function",
"return",
"super",
"(",
"ContainerTaskTypeManager",
",",
"self",
")",
".",
"create",
"(",
"name",
"=",
"name",
",",
"command_to_run",
"=",
"command_to_run",
",",
"description",
"=",
"description",
",",
"environment_variables",
"=",
"environment_variables",
",",
"required_arguments",
"=",
"required_arguments",
",",
"required_arguments_default_values",
"=",
"required_arguments_default_values",
",",
"extra_data_to_post",
"=",
"extra_data_to_post",
",",
")"
] | 40.075758 | 19.212121 |
def mcycle(return_X_y=True):
"""motorcyle acceleration dataset
Parameters
----------
return_X_y : bool,
if True, returns a model-ready tuple of data (X, y)
otherwise, returns a Pandas DataFrame
Returns
-------
model-ready tuple of data (X, y)
OR
Pandas DataFrame
Notes
-----
X contains the times after the impact.
y contains the acceleration.
Source:
https://vincentarelbundock.github.io/Rdatasets/doc/MASS/mcycle.html
"""
# y is real
# recommend LinearGAM
motor = pd.read_csv(PATH + '/mcycle.csv', index_col=0)
if return_X_y:
X = motor.times.values
y = motor.accel
return _clean_X_y(X, y)
return motor | [
"def",
"mcycle",
"(",
"return_X_y",
"=",
"True",
")",
":",
"# y is real",
"# recommend LinearGAM",
"motor",
"=",
"pd",
".",
"read_csv",
"(",
"PATH",
"+",
"'/mcycle.csv'",
",",
"index_col",
"=",
"0",
")",
"if",
"return_X_y",
":",
"X",
"=",
"motor",
".",
"times",
".",
"values",
"y",
"=",
"motor",
".",
"accel",
"return",
"_clean_X_y",
"(",
"X",
",",
"y",
")",
"return",
"motor"
] | 22.709677 | 20.967742 |
def close(self):
"""Toggle state to closed switch disconnector"""
self._state = 'closed'
self.grid.graph.add_edge(
self._nodes[0], self._nodes[1], {'line': self._line}) | [
"def",
"close",
"(",
"self",
")",
":",
"self",
".",
"_state",
"=",
"'closed'",
"self",
".",
"grid",
".",
"graph",
".",
"add_edge",
"(",
"self",
".",
"_nodes",
"[",
"0",
"]",
",",
"self",
".",
"_nodes",
"[",
"1",
"]",
",",
"{",
"'line'",
":",
"self",
".",
"_line",
"}",
")"
] | 40 | 13.2 |
def run_checks(collector):
"""Just run the checks for our modules"""
artifact = collector.configuration["dashmat"].artifact
chosen = artifact
if chosen in (None, "", NotSpecified):
chosen = None
dashmat = collector.configuration["dashmat"]
modules = collector.configuration["__active_modules__"]
config_root = collector.configuration["config_root"]
module_options = collector.configuration["modules"]
datastore = JsonDataStore(os.path.join(config_root, "data.json"))
if dashmat.redis_host:
datastore = RedisDataStore(redis.Redis(dashmat.redis_host))
scheduler = Scheduler(datastore)
for name, module in modules.items():
if chosen is None or name == chosen:
server = module.make_server(module_options[name].server_options)
scheduler.register(module, server, name)
scheduler.twitch(force=True) | [
"def",
"run_checks",
"(",
"collector",
")",
":",
"artifact",
"=",
"collector",
".",
"configuration",
"[",
"\"dashmat\"",
"]",
".",
"artifact",
"chosen",
"=",
"artifact",
"if",
"chosen",
"in",
"(",
"None",
",",
"\"\"",
",",
"NotSpecified",
")",
":",
"chosen",
"=",
"None",
"dashmat",
"=",
"collector",
".",
"configuration",
"[",
"\"dashmat\"",
"]",
"modules",
"=",
"collector",
".",
"configuration",
"[",
"\"__active_modules__\"",
"]",
"config_root",
"=",
"collector",
".",
"configuration",
"[",
"\"config_root\"",
"]",
"module_options",
"=",
"collector",
".",
"configuration",
"[",
"\"modules\"",
"]",
"datastore",
"=",
"JsonDataStore",
"(",
"os",
".",
"path",
".",
"join",
"(",
"config_root",
",",
"\"data.json\"",
")",
")",
"if",
"dashmat",
".",
"redis_host",
":",
"datastore",
"=",
"RedisDataStore",
"(",
"redis",
".",
"Redis",
"(",
"dashmat",
".",
"redis_host",
")",
")",
"scheduler",
"=",
"Scheduler",
"(",
"datastore",
")",
"for",
"name",
",",
"module",
"in",
"modules",
".",
"items",
"(",
")",
":",
"if",
"chosen",
"is",
"None",
"or",
"name",
"==",
"chosen",
":",
"server",
"=",
"module",
".",
"make_server",
"(",
"module_options",
"[",
"name",
"]",
".",
"server_options",
")",
"scheduler",
".",
"register",
"(",
"module",
",",
"server",
",",
"name",
")",
"scheduler",
".",
"twitch",
"(",
"force",
"=",
"True",
")"
] | 36.375 | 19.333333 |
def get_cutout(self, resource, resolution, x_range, y_range, z_range, time_range=None, id_list=[], no_cache=None, access_mode=CacheMode.no_cache, **kwargs):
"""Get a cutout from the volume service.
Note that access_mode=no_cache is desirable when reading large amounts of
data at once. In these cases, the data is not first read into the
cache, but instead, is sent directly from the data store to the
requester.
Args:
resource (intern.resource.boss.resource.ChannelResource | str): Channel or layer Resource. If a
string is provided instead, BossRemote.parse_bossURI is called instead on a URI-formatted
string of the form `bossdb://collection/experiment/channel`.
resolution (int): 0 indicates native resolution.
x_range (list[int]): x range such as [10, 20] which means x>=10 and x<20.
y_range (list[int]): y range such as [10, 20] which means y>=10 and y<20.
z_range (list[int]): z range such as [10, 20] which means z>=10 and z<20.
time_range (optional [list[int]]): time range such as [30, 40] which means t>=30 and t<40.
id_list (optional [list[int]]): list of object ids to filter the cutout by.
no_cache (optional [boolean or None]): Deprecated way to specify the use of cache to be True or False.
access_mode should be used instead
access_mode (optional [Enum]): Identifies one of three cache access options:
cache = Will check both cache and for dirty keys
no_cache = Will skip cache check but check for dirty keys
raw = Will skip both the cache and dirty keys check
TODO: Add mode to documentation
Returns:
(numpy.array): A 3D or 4D (time) numpy matrix in (time)ZYX order.
Raises:
requests.HTTPError on error.
"""
if no_cache is not None:
warnings.warn("The no-cache option has been deprecated and will not be used in future versions of intern.")
warnings.warn("Please from intern.service.boss.volume import CacheMode and use access_mode=CacheMode.[cache,no-cache,raw] instead.")
if no_cache and access_mode != CacheMode.no_cache:
warnings.warn("Both no_cache and access_mode were used, please use access_mode only. As no_cache has been deprecated. ")
warnings.warn("Your request will be made using the default mode no_cache.")
access_mode=CacheMode.no_cache
if no_cache:
access_mode=CacheMode.no_cache
elif no_cache == False:
access_mode=CacheMode.cache
return self._volume.get_cutout(resource, resolution, x_range, y_range, z_range, time_range, id_list, access_mode, **kwargs) | [
"def",
"get_cutout",
"(",
"self",
",",
"resource",
",",
"resolution",
",",
"x_range",
",",
"y_range",
",",
"z_range",
",",
"time_range",
"=",
"None",
",",
"id_list",
"=",
"[",
"]",
",",
"no_cache",
"=",
"None",
",",
"access_mode",
"=",
"CacheMode",
".",
"no_cache",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"no_cache",
"is",
"not",
"None",
":",
"warnings",
".",
"warn",
"(",
"\"The no-cache option has been deprecated and will not be used in future versions of intern.\"",
")",
"warnings",
".",
"warn",
"(",
"\"Please from intern.service.boss.volume import CacheMode and use access_mode=CacheMode.[cache,no-cache,raw] instead.\"",
")",
"if",
"no_cache",
"and",
"access_mode",
"!=",
"CacheMode",
".",
"no_cache",
":",
"warnings",
".",
"warn",
"(",
"\"Both no_cache and access_mode were used, please use access_mode only. As no_cache has been deprecated. \"",
")",
"warnings",
".",
"warn",
"(",
"\"Your request will be made using the default mode no_cache.\"",
")",
"access_mode",
"=",
"CacheMode",
".",
"no_cache",
"if",
"no_cache",
":",
"access_mode",
"=",
"CacheMode",
".",
"no_cache",
"elif",
"no_cache",
"==",
"False",
":",
"access_mode",
"=",
"CacheMode",
".",
"cache",
"return",
"self",
".",
"_volume",
".",
"get_cutout",
"(",
"resource",
",",
"resolution",
",",
"x_range",
",",
"y_range",
",",
"z_range",
",",
"time_range",
",",
"id_list",
",",
"access_mode",
",",
"*",
"*",
"kwargs",
")"
] | 64.130435 | 39.5 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.