code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def QA_indicator_WR(DataFrame, N, N1):
'威廉指标'
HIGH = DataFrame['high']
LOW = DataFrame['low']
CLOSE = DataFrame['close']
WR1 = 100 * (HHV(HIGH, N) - CLOSE) / (HHV(HIGH, N) - LLV(LOW, N))
WR2 = 100 * (HHV(HIGH, N1) - CLOSE) / (HHV(HIGH, N1) - LLV(LOW, N1))
DICT = {'WR1': WR1, 'WR2': WR2}
return pd.DataFrame(DICT) | def function[QA_indicator_WR, parameter[DataFrame, N, N1]]:
constant[威廉指标]
variable[HIGH] assign[=] call[name[DataFrame]][constant[high]]
variable[LOW] assign[=] call[name[DataFrame]][constant[low]]
variable[CLOSE] assign[=] call[name[DataFrame]][constant[close]]
variable[WR1] assign[=] binary_operation[binary_operation[constant[100] * binary_operation[call[name[HHV], parameter[name[HIGH], name[N]]] - name[CLOSE]]] / binary_operation[call[name[HHV], parameter[name[HIGH], name[N]]] - call[name[LLV], parameter[name[LOW], name[N]]]]]
variable[WR2] assign[=] binary_operation[binary_operation[constant[100] * binary_operation[call[name[HHV], parameter[name[HIGH], name[N1]]] - name[CLOSE]]] / binary_operation[call[name[HHV], parameter[name[HIGH], name[N1]]] - call[name[LLV], parameter[name[LOW], name[N1]]]]]
variable[DICT] assign[=] dictionary[[<ast.Constant object at 0x7da1b1ff3160>, <ast.Constant object at 0x7da1b1ff36d0>], [<ast.Name object at 0x7da1b1ff3bb0>, <ast.Name object at 0x7da1b1ff1570>]]
return[call[name[pd].DataFrame, parameter[name[DICT]]]] | keyword[def] identifier[QA_indicator_WR] ( identifier[DataFrame] , identifier[N] , identifier[N1] ):
literal[string]
identifier[HIGH] = identifier[DataFrame] [ literal[string] ]
identifier[LOW] = identifier[DataFrame] [ literal[string] ]
identifier[CLOSE] = identifier[DataFrame] [ literal[string] ]
identifier[WR1] = literal[int] *( identifier[HHV] ( identifier[HIGH] , identifier[N] )- identifier[CLOSE] )/( identifier[HHV] ( identifier[HIGH] , identifier[N] )- identifier[LLV] ( identifier[LOW] , identifier[N] ))
identifier[WR2] = literal[int] *( identifier[HHV] ( identifier[HIGH] , identifier[N1] )- identifier[CLOSE] )/( identifier[HHV] ( identifier[HIGH] , identifier[N1] )- identifier[LLV] ( identifier[LOW] , identifier[N1] ))
identifier[DICT] ={ literal[string] : identifier[WR1] , literal[string] : identifier[WR2] }
keyword[return] identifier[pd] . identifier[DataFrame] ( identifier[DICT] ) | def QA_indicator_WR(DataFrame, N, N1):
"""威廉指标"""
HIGH = DataFrame['high']
LOW = DataFrame['low']
CLOSE = DataFrame['close']
WR1 = 100 * (HHV(HIGH, N) - CLOSE) / (HHV(HIGH, N) - LLV(LOW, N))
WR2 = 100 * (HHV(HIGH, N1) - CLOSE) / (HHV(HIGH, N1) - LLV(LOW, N1))
DICT = {'WR1': WR1, 'WR2': WR2}
return pd.DataFrame(DICT) |
def get_instances(name, lifecycle_state="InService", health_status="Healthy",
attribute="private_ip_address", attributes=None, region=None,
key=None, keyid=None, profile=None):
'''
return attribute of all instances in the named autoscale group.
CLI example::
salt-call boto_asg.get_instances my_autoscale_group_name
'''
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
ec2_conn = _get_ec2_conn(region=region, key=key, keyid=keyid, profile=profile)
retries = 30
while True:
try:
asgs = conn.get_all_groups(names=[name])
break
except boto.exception.BotoServerError as e:
if retries and e.code == 'Throttling':
log.debug('Throttled by AWS API, retrying in 5 seconds...')
time.sleep(5)
retries -= 1
continue
log.error(e)
return False
if len(asgs) != 1:
log.debug("name '%s' returns multiple ASGs: %s", name, [asg.name for asg in asgs])
return False
asg = asgs[0]
instance_ids = []
# match lifecycle_state and health_status
for i in asg.instances:
if lifecycle_state is not None and i.lifecycle_state != lifecycle_state:
continue
if health_status is not None and i.health_status != health_status:
continue
instance_ids.append(i.instance_id)
# get full instance info, so that we can return the attribute
instances = ec2_conn.get_only_instances(instance_ids=instance_ids)
if attributes:
return [[_convert_attribute(instance, attr) for attr in attributes] for instance in instances]
else:
# properly handle case when not all instances have the requested attribute
return [_convert_attribute(instance, attribute) for instance in instances if getattr(instance, attribute)] | def function[get_instances, parameter[name, lifecycle_state, health_status, attribute, attributes, region, key, keyid, profile]]:
constant[
return attribute of all instances in the named autoscale group.
CLI example::
salt-call boto_asg.get_instances my_autoscale_group_name
]
variable[conn] assign[=] call[name[_get_conn], parameter[]]
variable[ec2_conn] assign[=] call[name[_get_ec2_conn], parameter[]]
variable[retries] assign[=] constant[30]
while constant[True] begin[:]
<ast.Try object at 0x7da1b2195510>
if compare[call[name[len], parameter[name[asgs]]] not_equal[!=] constant[1]] begin[:]
call[name[log].debug, parameter[constant[name '%s' returns multiple ASGs: %s], name[name], <ast.ListComp object at 0x7da1b21972e0>]]
return[constant[False]]
variable[asg] assign[=] call[name[asgs]][constant[0]]
variable[instance_ids] assign[=] list[[]]
for taget[name[i]] in starred[name[asg].instances] begin[:]
if <ast.BoolOp object at 0x7da1b2195600> begin[:]
continue
if <ast.BoolOp object at 0x7da1b21952a0> begin[:]
continue
call[name[instance_ids].append, parameter[name[i].instance_id]]
variable[instances] assign[=] call[name[ec2_conn].get_only_instances, parameter[]]
if name[attributes] begin[:]
return[<ast.ListComp object at 0x7da1b21940d0>] | keyword[def] identifier[get_instances] ( identifier[name] , identifier[lifecycle_state] = literal[string] , identifier[health_status] = literal[string] ,
identifier[attribute] = literal[string] , identifier[attributes] = keyword[None] , identifier[region] = keyword[None] ,
identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ):
literal[string]
identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
identifier[ec2_conn] = identifier[_get_ec2_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] )
identifier[retries] = literal[int]
keyword[while] keyword[True] :
keyword[try] :
identifier[asgs] = identifier[conn] . identifier[get_all_groups] ( identifier[names] =[ identifier[name] ])
keyword[break]
keyword[except] identifier[boto] . identifier[exception] . identifier[BotoServerError] keyword[as] identifier[e] :
keyword[if] identifier[retries] keyword[and] identifier[e] . identifier[code] == literal[string] :
identifier[log] . identifier[debug] ( literal[string] )
identifier[time] . identifier[sleep] ( literal[int] )
identifier[retries] -= literal[int]
keyword[continue]
identifier[log] . identifier[error] ( identifier[e] )
keyword[return] keyword[False]
keyword[if] identifier[len] ( identifier[asgs] )!= literal[int] :
identifier[log] . identifier[debug] ( literal[string] , identifier[name] ,[ identifier[asg] . identifier[name] keyword[for] identifier[asg] keyword[in] identifier[asgs] ])
keyword[return] keyword[False]
identifier[asg] = identifier[asgs] [ literal[int] ]
identifier[instance_ids] =[]
keyword[for] identifier[i] keyword[in] identifier[asg] . identifier[instances] :
keyword[if] identifier[lifecycle_state] keyword[is] keyword[not] keyword[None] keyword[and] identifier[i] . identifier[lifecycle_state] != identifier[lifecycle_state] :
keyword[continue]
keyword[if] identifier[health_status] keyword[is] keyword[not] keyword[None] keyword[and] identifier[i] . identifier[health_status] != identifier[health_status] :
keyword[continue]
identifier[instance_ids] . identifier[append] ( identifier[i] . identifier[instance_id] )
identifier[instances] = identifier[ec2_conn] . identifier[get_only_instances] ( identifier[instance_ids] = identifier[instance_ids] )
keyword[if] identifier[attributes] :
keyword[return] [[ identifier[_convert_attribute] ( identifier[instance] , identifier[attr] ) keyword[for] identifier[attr] keyword[in] identifier[attributes] ] keyword[for] identifier[instance] keyword[in] identifier[instances] ]
keyword[else] :
keyword[return] [ identifier[_convert_attribute] ( identifier[instance] , identifier[attribute] ) keyword[for] identifier[instance] keyword[in] identifier[instances] keyword[if] identifier[getattr] ( identifier[instance] , identifier[attribute] )] | def get_instances(name, lifecycle_state='InService', health_status='Healthy', attribute='private_ip_address', attributes=None, region=None, key=None, keyid=None, profile=None):
"""
return attribute of all instances in the named autoscale group.
CLI example::
salt-call boto_asg.get_instances my_autoscale_group_name
"""
conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile)
ec2_conn = _get_ec2_conn(region=region, key=key, keyid=keyid, profile=profile)
retries = 30
while True:
try:
asgs = conn.get_all_groups(names=[name])
break # depends on [control=['try'], data=[]]
except boto.exception.BotoServerError as e:
if retries and e.code == 'Throttling':
log.debug('Throttled by AWS API, retrying in 5 seconds...')
time.sleep(5)
retries -= 1
continue # depends on [control=['if'], data=[]]
log.error(e)
return False # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]]
if len(asgs) != 1:
log.debug("name '%s' returns multiple ASGs: %s", name, [asg.name for asg in asgs])
return False # depends on [control=['if'], data=[]]
asg = asgs[0]
instance_ids = []
# match lifecycle_state and health_status
for i in asg.instances:
if lifecycle_state is not None and i.lifecycle_state != lifecycle_state:
continue # depends on [control=['if'], data=[]]
if health_status is not None and i.health_status != health_status:
continue # depends on [control=['if'], data=[]]
instance_ids.append(i.instance_id) # depends on [control=['for'], data=['i']]
# get full instance info, so that we can return the attribute
instances = ec2_conn.get_only_instances(instance_ids=instance_ids)
if attributes:
return [[_convert_attribute(instance, attr) for attr in attributes] for instance in instances] # depends on [control=['if'], data=[]]
else:
# properly handle case when not all instances have the requested attribute
return [_convert_attribute(instance, attribute) for instance in instances if getattr(instance, attribute)] |
def softmax(x):
"""Can be replaced once scipy 1.3 is released, although numeric stability should be checked."""
e_x = np.exp(x - np.max(x))
return e_x / e_x.sum(axis=1)[:, None] | def function[softmax, parameter[x]]:
constant[Can be replaced once scipy 1.3 is released, although numeric stability should be checked.]
variable[e_x] assign[=] call[name[np].exp, parameter[binary_operation[name[x] - call[name[np].max, parameter[name[x]]]]]]
return[binary_operation[name[e_x] / call[call[name[e_x].sum, parameter[]]][tuple[[<ast.Slice object at 0x7da18ede6bc0>, <ast.Constant object at 0x7da18ede40a0>]]]]] | keyword[def] identifier[softmax] ( identifier[x] ):
literal[string]
identifier[e_x] = identifier[np] . identifier[exp] ( identifier[x] - identifier[np] . identifier[max] ( identifier[x] ))
keyword[return] identifier[e_x] / identifier[e_x] . identifier[sum] ( identifier[axis] = literal[int] )[:, keyword[None] ] | def softmax(x):
"""Can be replaced once scipy 1.3 is released, although numeric stability should be checked."""
e_x = np.exp(x - np.max(x))
return e_x / e_x.sum(axis=1)[:, None] |
def interleave(infile_1, infile_2, outfile, suffix1=None, suffix2=None):
'''Makes interleaved file from two sequence files. If used, will append suffix1 onto end
of every sequence name in infile_1, unless it already ends with suffix1. Similar for sufffix2.'''
seq_reader_1 = sequences.file_reader(infile_1)
seq_reader_2 = sequences.file_reader(infile_2)
f_out = utils.open_file_write(outfile)
for seq_1 in seq_reader_1:
try:
seq_2 = next(seq_reader_2)
except:
utils.close(f_out)
raise Error('Error getting mate for sequence', seq_1.id, ' ... cannot continue')
if suffix1 is not None and not seq_1.id.endswith(suffix1):
seq_1.id += suffix1
if suffix2 is not None and not seq_2.id.endswith(suffix2):
seq_2.id += suffix2
print(seq_1, file=f_out)
print(seq_2, file=f_out)
try:
seq_2 = next(seq_reader_2)
except:
seq_2 = None
if seq_2 is not None:
utils.close(f_out)
raise Error('Error getting mate for sequence', seq_2.id, ' ... cannot continue')
utils.close(f_out) | def function[interleave, parameter[infile_1, infile_2, outfile, suffix1, suffix2]]:
constant[Makes interleaved file from two sequence files. If used, will append suffix1 onto end
of every sequence name in infile_1, unless it already ends with suffix1. Similar for sufffix2.]
variable[seq_reader_1] assign[=] call[name[sequences].file_reader, parameter[name[infile_1]]]
variable[seq_reader_2] assign[=] call[name[sequences].file_reader, parameter[name[infile_2]]]
variable[f_out] assign[=] call[name[utils].open_file_write, parameter[name[outfile]]]
for taget[name[seq_1]] in starred[name[seq_reader_1]] begin[:]
<ast.Try object at 0x7da1affe6320>
if <ast.BoolOp object at 0x7da1affe6d40> begin[:]
<ast.AugAssign object at 0x7da1affe40a0>
if <ast.BoolOp object at 0x7da1affe53f0> begin[:]
<ast.AugAssign object at 0x7da1affe6ef0>
call[name[print], parameter[name[seq_1]]]
call[name[print], parameter[name[seq_2]]]
<ast.Try object at 0x7da1affe6b30>
if compare[name[seq_2] is_not constant[None]] begin[:]
call[name[utils].close, parameter[name[f_out]]]
<ast.Raise object at 0x7da1affe4d60>
call[name[utils].close, parameter[name[f_out]]] | keyword[def] identifier[interleave] ( identifier[infile_1] , identifier[infile_2] , identifier[outfile] , identifier[suffix1] = keyword[None] , identifier[suffix2] = keyword[None] ):
literal[string]
identifier[seq_reader_1] = identifier[sequences] . identifier[file_reader] ( identifier[infile_1] )
identifier[seq_reader_2] = identifier[sequences] . identifier[file_reader] ( identifier[infile_2] )
identifier[f_out] = identifier[utils] . identifier[open_file_write] ( identifier[outfile] )
keyword[for] identifier[seq_1] keyword[in] identifier[seq_reader_1] :
keyword[try] :
identifier[seq_2] = identifier[next] ( identifier[seq_reader_2] )
keyword[except] :
identifier[utils] . identifier[close] ( identifier[f_out] )
keyword[raise] identifier[Error] ( literal[string] , identifier[seq_1] . identifier[id] , literal[string] )
keyword[if] identifier[suffix1] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[seq_1] . identifier[id] . identifier[endswith] ( identifier[suffix1] ):
identifier[seq_1] . identifier[id] += identifier[suffix1]
keyword[if] identifier[suffix2] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[seq_2] . identifier[id] . identifier[endswith] ( identifier[suffix2] ):
identifier[seq_2] . identifier[id] += identifier[suffix2]
identifier[print] ( identifier[seq_1] , identifier[file] = identifier[f_out] )
identifier[print] ( identifier[seq_2] , identifier[file] = identifier[f_out] )
keyword[try] :
identifier[seq_2] = identifier[next] ( identifier[seq_reader_2] )
keyword[except] :
identifier[seq_2] = keyword[None]
keyword[if] identifier[seq_2] keyword[is] keyword[not] keyword[None] :
identifier[utils] . identifier[close] ( identifier[f_out] )
keyword[raise] identifier[Error] ( literal[string] , identifier[seq_2] . identifier[id] , literal[string] )
identifier[utils] . identifier[close] ( identifier[f_out] ) | def interleave(infile_1, infile_2, outfile, suffix1=None, suffix2=None):
"""Makes interleaved file from two sequence files. If used, will append suffix1 onto end
of every sequence name in infile_1, unless it already ends with suffix1. Similar for sufffix2."""
seq_reader_1 = sequences.file_reader(infile_1)
seq_reader_2 = sequences.file_reader(infile_2)
f_out = utils.open_file_write(outfile)
for seq_1 in seq_reader_1:
try:
seq_2 = next(seq_reader_2) # depends on [control=['try'], data=[]]
except:
utils.close(f_out)
raise Error('Error getting mate for sequence', seq_1.id, ' ... cannot continue') # depends on [control=['except'], data=[]]
if suffix1 is not None and (not seq_1.id.endswith(suffix1)):
seq_1.id += suffix1 # depends on [control=['if'], data=[]]
if suffix2 is not None and (not seq_2.id.endswith(suffix2)):
seq_2.id += suffix2 # depends on [control=['if'], data=[]]
print(seq_1, file=f_out)
print(seq_2, file=f_out) # depends on [control=['for'], data=['seq_1']]
try:
seq_2 = next(seq_reader_2) # depends on [control=['try'], data=[]]
except:
seq_2 = None # depends on [control=['except'], data=[]]
if seq_2 is not None:
utils.close(f_out)
raise Error('Error getting mate for sequence', seq_2.id, ' ... cannot continue') # depends on [control=['if'], data=['seq_2']]
utils.close(f_out) |
def threshold_image(image, low_thresh=None, high_thresh=None, inval=1, outval=0, binary=True):
"""
Converts a scalar image into a binary image by thresholding operations
ANTsR function: `thresholdImage`
Arguments
---------
image : ANTsImage
Input image to operate on
low_thresh : scalar (optional)
Lower edge of threshold window
hight_thresh : scalar (optional)
Higher edge of threshold window
inval : scalar
Output value for image voxels in between lothresh and hithresh
outval : scalar
Output value for image voxels lower than lothresh or higher than hithresh
binary : boolean
if true, returns binary thresholded image
if false, return binary thresholded image multiplied by original image
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') )
>>> timage = ants.threshold_image(image, 0.5, 1e15)
"""
if high_thresh is None:
high_thresh = image.max() + 0.01
if low_thresh is None:
low_thresh = image.min() - 0.01
dim = image.dimension
outimage = image.clone()
args = [dim, image, outimage, low_thresh, high_thresh, inval, outval]
processed_args = _int_antsProcessArguments(args)
libfn = utils.get_lib_fn('ThresholdImage')
libfn(processed_args)
if binary:
return outimage
else:
return outimage*image | def function[threshold_image, parameter[image, low_thresh, high_thresh, inval, outval, binary]]:
constant[
Converts a scalar image into a binary image by thresholding operations
ANTsR function: `thresholdImage`
Arguments
---------
image : ANTsImage
Input image to operate on
low_thresh : scalar (optional)
Lower edge of threshold window
hight_thresh : scalar (optional)
Higher edge of threshold window
inval : scalar
Output value for image voxels in between lothresh and hithresh
outval : scalar
Output value for image voxels lower than lothresh or higher than hithresh
binary : boolean
if true, returns binary thresholded image
if false, return binary thresholded image multiplied by original image
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') )
>>> timage = ants.threshold_image(image, 0.5, 1e15)
]
if compare[name[high_thresh] is constant[None]] begin[:]
variable[high_thresh] assign[=] binary_operation[call[name[image].max, parameter[]] + constant[0.01]]
if compare[name[low_thresh] is constant[None]] begin[:]
variable[low_thresh] assign[=] binary_operation[call[name[image].min, parameter[]] - constant[0.01]]
variable[dim] assign[=] name[image].dimension
variable[outimage] assign[=] call[name[image].clone, parameter[]]
variable[args] assign[=] list[[<ast.Name object at 0x7da1b16016c0>, <ast.Name object at 0x7da1b1601420>, <ast.Name object at 0x7da1b16012d0>, <ast.Name object at 0x7da1b1600eb0>, <ast.Name object at 0x7da1b1601000>, <ast.Name object at 0x7da1b1600f70>, <ast.Name object at 0x7da1b1601c60>]]
variable[processed_args] assign[=] call[name[_int_antsProcessArguments], parameter[name[args]]]
variable[libfn] assign[=] call[name[utils].get_lib_fn, parameter[constant[ThresholdImage]]]
call[name[libfn], parameter[name[processed_args]]]
if name[binary] begin[:]
return[name[outimage]] | keyword[def] identifier[threshold_image] ( identifier[image] , identifier[low_thresh] = keyword[None] , identifier[high_thresh] = keyword[None] , identifier[inval] = literal[int] , identifier[outval] = literal[int] , identifier[binary] = keyword[True] ):
literal[string]
keyword[if] identifier[high_thresh] keyword[is] keyword[None] :
identifier[high_thresh] = identifier[image] . identifier[max] ()+ literal[int]
keyword[if] identifier[low_thresh] keyword[is] keyword[None] :
identifier[low_thresh] = identifier[image] . identifier[min] ()- literal[int]
identifier[dim] = identifier[image] . identifier[dimension]
identifier[outimage] = identifier[image] . identifier[clone] ()
identifier[args] =[ identifier[dim] , identifier[image] , identifier[outimage] , identifier[low_thresh] , identifier[high_thresh] , identifier[inval] , identifier[outval] ]
identifier[processed_args] = identifier[_int_antsProcessArguments] ( identifier[args] )
identifier[libfn] = identifier[utils] . identifier[get_lib_fn] ( literal[string] )
identifier[libfn] ( identifier[processed_args] )
keyword[if] identifier[binary] :
keyword[return] identifier[outimage]
keyword[else] :
keyword[return] identifier[outimage] * identifier[image] | def threshold_image(image, low_thresh=None, high_thresh=None, inval=1, outval=0, binary=True):
"""
Converts a scalar image into a binary image by thresholding operations
ANTsR function: `thresholdImage`
Arguments
---------
image : ANTsImage
Input image to operate on
low_thresh : scalar (optional)
Lower edge of threshold window
hight_thresh : scalar (optional)
Higher edge of threshold window
inval : scalar
Output value for image voxels in between lothresh and hithresh
outval : scalar
Output value for image voxels lower than lothresh or higher than hithresh
binary : boolean
if true, returns binary thresholded image
if false, return binary thresholded image multiplied by original image
Returns
-------
ANTsImage
Example
-------
>>> import ants
>>> image = ants.image_read( ants.get_ants_data('r16') )
>>> timage = ants.threshold_image(image, 0.5, 1e15)
"""
if high_thresh is None:
high_thresh = image.max() + 0.01 # depends on [control=['if'], data=['high_thresh']]
if low_thresh is None:
low_thresh = image.min() - 0.01 # depends on [control=['if'], data=['low_thresh']]
dim = image.dimension
outimage = image.clone()
args = [dim, image, outimage, low_thresh, high_thresh, inval, outval]
processed_args = _int_antsProcessArguments(args)
libfn = utils.get_lib_fn('ThresholdImage')
libfn(processed_args)
if binary:
return outimage # depends on [control=['if'], data=[]]
else:
return outimage * image |
def set_index(self, index_name, index):
"""
add an index to the schema
for the most part, you will use the __getattr__ method of adding indexes for a more fluid interface,
but you can use this if you want to get closer to the bare metal
index_name -- string -- the name of the index
index -- Index() -- an Index instance
"""
if not index_name:
raise ValueError("index_name must have a value")
if index_name in self.indexes:
raise ValueError("index_name {} has already been defined on {}".format(
index_name, str(self.indexes[index_name].fields)
))
if not isinstance(index, Index): raise ValueError("{} is not an Index instance".format(type(index)))
index.name = index_name
self.indexes[index_name] = index
return self | def function[set_index, parameter[self, index_name, index]]:
constant[
add an index to the schema
for the most part, you will use the __getattr__ method of adding indexes for a more fluid interface,
but you can use this if you want to get closer to the bare metal
index_name -- string -- the name of the index
index -- Index() -- an Index instance
]
if <ast.UnaryOp object at 0x7da18ede56f0> begin[:]
<ast.Raise object at 0x7da18ede43d0>
if compare[name[index_name] in name[self].indexes] begin[:]
<ast.Raise object at 0x7da18ede4820>
if <ast.UnaryOp object at 0x7da18ede4ee0> begin[:]
<ast.Raise object at 0x7da18ede6140>
name[index].name assign[=] name[index_name]
call[name[self].indexes][name[index_name]] assign[=] name[index]
return[name[self]] | keyword[def] identifier[set_index] ( identifier[self] , identifier[index_name] , identifier[index] ):
literal[string]
keyword[if] keyword[not] identifier[index_name] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[index_name] keyword[in] identifier[self] . identifier[indexes] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] (
identifier[index_name] , identifier[str] ( identifier[self] . identifier[indexes] [ identifier[index_name] ]. identifier[fields] )
))
keyword[if] keyword[not] identifier[isinstance] ( identifier[index] , identifier[Index] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[index] )))
identifier[index] . identifier[name] = identifier[index_name]
identifier[self] . identifier[indexes] [ identifier[index_name] ]= identifier[index]
keyword[return] identifier[self] | def set_index(self, index_name, index):
"""
add an index to the schema
for the most part, you will use the __getattr__ method of adding indexes for a more fluid interface,
but you can use this if you want to get closer to the bare metal
index_name -- string -- the name of the index
index -- Index() -- an Index instance
"""
if not index_name:
raise ValueError('index_name must have a value') # depends on [control=['if'], data=[]]
if index_name in self.indexes:
raise ValueError('index_name {} has already been defined on {}'.format(index_name, str(self.indexes[index_name].fields))) # depends on [control=['if'], data=['index_name']]
if not isinstance(index, Index):
raise ValueError('{} is not an Index instance'.format(type(index))) # depends on [control=['if'], data=[]]
index.name = index_name
self.indexes[index_name] = index
return self |
def ticker(ctx, market):
""" Show ticker of a market
"""
market = Market(market, bitshares_instance=ctx.bitshares)
ticker = market.ticker()
t = [["key", "value"]]
for key in ticker:
t.append([key, str(ticker[key])])
print_table(t) | def function[ticker, parameter[ctx, market]]:
constant[ Show ticker of a market
]
variable[market] assign[=] call[name[Market], parameter[name[market]]]
variable[ticker] assign[=] call[name[market].ticker, parameter[]]
variable[t] assign[=] list[[<ast.List object at 0x7da20c6c7490>]]
for taget[name[key]] in starred[name[ticker]] begin[:]
call[name[t].append, parameter[list[[<ast.Name object at 0x7da1b054ae30>, <ast.Call object at 0x7da1b054bdf0>]]]]
call[name[print_table], parameter[name[t]]] | keyword[def] identifier[ticker] ( identifier[ctx] , identifier[market] ):
literal[string]
identifier[market] = identifier[Market] ( identifier[market] , identifier[bitshares_instance] = identifier[ctx] . identifier[bitshares] )
identifier[ticker] = identifier[market] . identifier[ticker] ()
identifier[t] =[[ literal[string] , literal[string] ]]
keyword[for] identifier[key] keyword[in] identifier[ticker] :
identifier[t] . identifier[append] ([ identifier[key] , identifier[str] ( identifier[ticker] [ identifier[key] ])])
identifier[print_table] ( identifier[t] ) | def ticker(ctx, market):
""" Show ticker of a market
"""
market = Market(market, bitshares_instance=ctx.bitshares)
ticker = market.ticker()
t = [['key', 'value']]
for key in ticker:
t.append([key, str(ticker[key])]) # depends on [control=['for'], data=['key']]
print_table(t) |
def read_input_registers(slave_id, starting_address, quantity):
""" Return ADU for Modbus function code 04: Read Input Registers.
:param slave_id: Number of slave.
:return: Byte array with ADU.
"""
function = ReadInputRegisters()
function.starting_address = starting_address
function.quantity = quantity
return _create_request_adu(slave_id, function.request_pdu) | def function[read_input_registers, parameter[slave_id, starting_address, quantity]]:
constant[ Return ADU for Modbus function code 04: Read Input Registers.
:param slave_id: Number of slave.
:return: Byte array with ADU.
]
variable[function] assign[=] call[name[ReadInputRegisters], parameter[]]
name[function].starting_address assign[=] name[starting_address]
name[function].quantity assign[=] name[quantity]
return[call[name[_create_request_adu], parameter[name[slave_id], name[function].request_pdu]]] | keyword[def] identifier[read_input_registers] ( identifier[slave_id] , identifier[starting_address] , identifier[quantity] ):
literal[string]
identifier[function] = identifier[ReadInputRegisters] ()
identifier[function] . identifier[starting_address] = identifier[starting_address]
identifier[function] . identifier[quantity] = identifier[quantity]
keyword[return] identifier[_create_request_adu] ( identifier[slave_id] , identifier[function] . identifier[request_pdu] ) | def read_input_registers(slave_id, starting_address, quantity):
""" Return ADU for Modbus function code 04: Read Input Registers.
:param slave_id: Number of slave.
:return: Byte array with ADU.
"""
function = ReadInputRegisters()
function.starting_address = starting_address
function.quantity = quantity
return _create_request_adu(slave_id, function.request_pdu) |
def action_size(self) -> Sequence[Shape]:
'''Returns the MDP action size.'''
return self._sizes(self._compiler.rddl.action_size) | def function[action_size, parameter[self]]:
constant[Returns the MDP action size.]
return[call[name[self]._sizes, parameter[name[self]._compiler.rddl.action_size]]] | keyword[def] identifier[action_size] ( identifier[self] )-> identifier[Sequence] [ identifier[Shape] ]:
literal[string]
keyword[return] identifier[self] . identifier[_sizes] ( identifier[self] . identifier[_compiler] . identifier[rddl] . identifier[action_size] ) | def action_size(self) -> Sequence[Shape]:
"""Returns the MDP action size."""
return self._sizes(self._compiler.rddl.action_size) |
def get_series_number_by_guess_for_liver(dcmreader, counts, bins, qt_app=None):
"""
Select the venous series from CT with around 200 images
:param dcmreader:
:param counts:
:param bins:
:param qt_app:
:return:
"""
series_info = dcmreader.dicomdirectory.get_stats_of_series_in_dir()
print(dcmreader.print_series_info(series_info))
import pandas as pd
df = pd.DataFrame(list(series_info.values()))
#select CT
df = df[df["Modality"].str.lower().str.contains("ct") == True]
# select just venous
df = df[df["SeriesDescription"].str.lower().str.contains("ven") == True]
# remove saggittal
df = df[df["SeriesDescription"].str.lower().str.contains("sag") == False]
# remove cor
df = df[df["SeriesDescription"].str.lower().str.contains("cor") == False]
df["dst_to_200"] = np.abs(200 - df.Count)
dfs = df.sort_values(by="dst_to_200", ascending=True)
sn = list(dfs.SeriesNumber)[0]
return sn | def function[get_series_number_by_guess_for_liver, parameter[dcmreader, counts, bins, qt_app]]:
constant[
Select the venous series from CT with around 200 images
:param dcmreader:
:param counts:
:param bins:
:param qt_app:
:return:
]
variable[series_info] assign[=] call[name[dcmreader].dicomdirectory.get_stats_of_series_in_dir, parameter[]]
call[name[print], parameter[call[name[dcmreader].print_series_info, parameter[name[series_info]]]]]
import module[pandas] as alias[pd]
variable[df] assign[=] call[name[pd].DataFrame, parameter[call[name[list], parameter[call[name[series_info].values, parameter[]]]]]]
variable[df] assign[=] call[name[df]][compare[call[call[call[name[df]][constant[Modality]].str.lower, parameter[]].str.contains, parameter[constant[ct]]] equal[==] constant[True]]]
variable[df] assign[=] call[name[df]][compare[call[call[call[name[df]][constant[SeriesDescription]].str.lower, parameter[]].str.contains, parameter[constant[ven]]] equal[==] constant[True]]]
variable[df] assign[=] call[name[df]][compare[call[call[call[name[df]][constant[SeriesDescription]].str.lower, parameter[]].str.contains, parameter[constant[sag]]] equal[==] constant[False]]]
variable[df] assign[=] call[name[df]][compare[call[call[call[name[df]][constant[SeriesDescription]].str.lower, parameter[]].str.contains, parameter[constant[cor]]] equal[==] constant[False]]]
call[name[df]][constant[dst_to_200]] assign[=] call[name[np].abs, parameter[binary_operation[constant[200] - name[df].Count]]]
variable[dfs] assign[=] call[name[df].sort_values, parameter[]]
variable[sn] assign[=] call[call[name[list], parameter[name[dfs].SeriesNumber]]][constant[0]]
return[name[sn]] | keyword[def] identifier[get_series_number_by_guess_for_liver] ( identifier[dcmreader] , identifier[counts] , identifier[bins] , identifier[qt_app] = keyword[None] ):
literal[string]
identifier[series_info] = identifier[dcmreader] . identifier[dicomdirectory] . identifier[get_stats_of_series_in_dir] ()
identifier[print] ( identifier[dcmreader] . identifier[print_series_info] ( identifier[series_info] ))
keyword[import] identifier[pandas] keyword[as] identifier[pd]
identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[list] ( identifier[series_info] . identifier[values] ()))
identifier[df] = identifier[df] [ identifier[df] [ literal[string] ]. identifier[str] . identifier[lower] (). identifier[str] . identifier[contains] ( literal[string] )== keyword[True] ]
identifier[df] = identifier[df] [ identifier[df] [ literal[string] ]. identifier[str] . identifier[lower] (). identifier[str] . identifier[contains] ( literal[string] )== keyword[True] ]
identifier[df] = identifier[df] [ identifier[df] [ literal[string] ]. identifier[str] . identifier[lower] (). identifier[str] . identifier[contains] ( literal[string] )== keyword[False] ]
identifier[df] = identifier[df] [ identifier[df] [ literal[string] ]. identifier[str] . identifier[lower] (). identifier[str] . identifier[contains] ( literal[string] )== keyword[False] ]
identifier[df] [ literal[string] ]= identifier[np] . identifier[abs] ( literal[int] - identifier[df] . identifier[Count] )
identifier[dfs] = identifier[df] . identifier[sort_values] ( identifier[by] = literal[string] , identifier[ascending] = keyword[True] )
identifier[sn] = identifier[list] ( identifier[dfs] . identifier[SeriesNumber] )[ literal[int] ]
keyword[return] identifier[sn] | def get_series_number_by_guess_for_liver(dcmreader, counts, bins, qt_app=None):
"""
Select the venous series from CT with around 200 images
:param dcmreader:
:param counts:
:param bins:
:param qt_app:
:return:
"""
series_info = dcmreader.dicomdirectory.get_stats_of_series_in_dir()
print(dcmreader.print_series_info(series_info))
import pandas as pd
df = pd.DataFrame(list(series_info.values()))
#select CT
df = df[df['Modality'].str.lower().str.contains('ct') == True]
# select just venous
df = df[df['SeriesDescription'].str.lower().str.contains('ven') == True]
# remove saggittal
df = df[df['SeriesDescription'].str.lower().str.contains('sag') == False]
# remove cor
df = df[df['SeriesDescription'].str.lower().str.contains('cor') == False]
df['dst_to_200'] = np.abs(200 - df.Count)
dfs = df.sort_values(by='dst_to_200', ascending=True)
sn = list(dfs.SeriesNumber)[0]
return sn |
def parse_primers(self, primers=None, mismatches=0, revcompl=False):
"""This functions starts with self because it's
meant as an extension to the FASTA class."""
# Default primers #
if primers is None: primers = self.primers
# Case straight #
if not revcompl:
fwd_regex = regex.compile("(%s){s<=%i}" % (primers.fwd_pattern, mismatches))
rev_regex = regex.compile("(%s){s<=%i}" % (primers.rev_pattern, mismatches))
generator = (ReadWithPrimers(r, fwd_regex, rev_regex) for r in self.parse())
# Case revcompl #
if revcompl:
fwd_regex = regex.compile("(%s){s<=%i}" % (primers.fwd_pattern, mismatches))
rev_regex = regex.compile("(%s){s<=%i}" % (primers.rev_pattern_revcompl, mismatches))
generator = (ReadWithPrimersRevCompl(r, fwd_regex, rev_regex) for r in self.parse())
# Return #
return GenWithLength(generator, len(self)) | def function[parse_primers, parameter[self, primers, mismatches, revcompl]]:
constant[This functions starts with self because it's
meant as an extension to the FASTA class.]
if compare[name[primers] is constant[None]] begin[:]
variable[primers] assign[=] name[self].primers
if <ast.UnaryOp object at 0x7da18bccb640> begin[:]
variable[fwd_regex] assign[=] call[name[regex].compile, parameter[binary_operation[constant[(%s){s<=%i}] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18bcc8580>, <ast.Name object at 0x7da18bcc8490>]]]]]
variable[rev_regex] assign[=] call[name[regex].compile, parameter[binary_operation[constant[(%s){s<=%i}] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b095ff70>, <ast.Name object at 0x7da1b095dea0>]]]]]
variable[generator] assign[=] <ast.GeneratorExp object at 0x7da1b095d6f0>
if name[revcompl] begin[:]
variable[fwd_regex] assign[=] call[name[regex].compile, parameter[binary_operation[constant[(%s){s<=%i}] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b095f490>, <ast.Name object at 0x7da1b095cc10>]]]]]
variable[rev_regex] assign[=] call[name[regex].compile, parameter[binary_operation[constant[(%s){s<=%i}] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b095d7e0>, <ast.Name object at 0x7da1b095ece0>]]]]]
variable[generator] assign[=] <ast.GeneratorExp object at 0x7da1b095f0a0>
return[call[name[GenWithLength], parameter[name[generator], call[name[len], parameter[name[self]]]]]] | keyword[def] identifier[parse_primers] ( identifier[self] , identifier[primers] = keyword[None] , identifier[mismatches] = literal[int] , identifier[revcompl] = keyword[False] ):
literal[string]
keyword[if] identifier[primers] keyword[is] keyword[None] : identifier[primers] = identifier[self] . identifier[primers]
keyword[if] keyword[not] identifier[revcompl] :
identifier[fwd_regex] = identifier[regex] . identifier[compile] ( literal[string] %( identifier[primers] . identifier[fwd_pattern] , identifier[mismatches] ))
identifier[rev_regex] = identifier[regex] . identifier[compile] ( literal[string] %( identifier[primers] . identifier[rev_pattern] , identifier[mismatches] ))
identifier[generator] =( identifier[ReadWithPrimers] ( identifier[r] , identifier[fwd_regex] , identifier[rev_regex] ) keyword[for] identifier[r] keyword[in] identifier[self] . identifier[parse] ())
keyword[if] identifier[revcompl] :
identifier[fwd_regex] = identifier[regex] . identifier[compile] ( literal[string] %( identifier[primers] . identifier[fwd_pattern] , identifier[mismatches] ))
identifier[rev_regex] = identifier[regex] . identifier[compile] ( literal[string] %( identifier[primers] . identifier[rev_pattern_revcompl] , identifier[mismatches] ))
identifier[generator] =( identifier[ReadWithPrimersRevCompl] ( identifier[r] , identifier[fwd_regex] , identifier[rev_regex] ) keyword[for] identifier[r] keyword[in] identifier[self] . identifier[parse] ())
keyword[return] identifier[GenWithLength] ( identifier[generator] , identifier[len] ( identifier[self] )) | def parse_primers(self, primers=None, mismatches=0, revcompl=False):
"""This functions starts with self because it's
meant as an extension to the FASTA class."""
# Default primers #
if primers is None:
primers = self.primers # depends on [control=['if'], data=['primers']]
# Case straight #
if not revcompl:
fwd_regex = regex.compile('(%s){s<=%i}' % (primers.fwd_pattern, mismatches))
rev_regex = regex.compile('(%s){s<=%i}' % (primers.rev_pattern, mismatches))
generator = (ReadWithPrimers(r, fwd_regex, rev_regex) for r in self.parse()) # depends on [control=['if'], data=[]]
# Case revcompl #
if revcompl:
fwd_regex = regex.compile('(%s){s<=%i}' % (primers.fwd_pattern, mismatches))
rev_regex = regex.compile('(%s){s<=%i}' % (primers.rev_pattern_revcompl, mismatches))
generator = (ReadWithPrimersRevCompl(r, fwd_regex, rev_regex) for r in self.parse()) # depends on [control=['if'], data=[]]
# Return #
return GenWithLength(generator, len(self)) |
def Max(a, axis, keep_dims):
"""
Max reduction op.
"""
return np.amax(a, axis=axis if not isinstance(axis, np.ndarray) else tuple(axis),
keepdims=keep_dims), | def function[Max, parameter[a, axis, keep_dims]]:
constant[
Max reduction op.
]
return[tuple[[<ast.Call object at 0x7da1b053b220>]]] | keyword[def] identifier[Max] ( identifier[a] , identifier[axis] , identifier[keep_dims] ):
literal[string]
keyword[return] identifier[np] . identifier[amax] ( identifier[a] , identifier[axis] = identifier[axis] keyword[if] keyword[not] identifier[isinstance] ( identifier[axis] , identifier[np] . identifier[ndarray] ) keyword[else] identifier[tuple] ( identifier[axis] ),
identifier[keepdims] = identifier[keep_dims] ), | def Max(a, axis, keep_dims):
"""
Max reduction op.
"""
return (np.amax(a, axis=axis if not isinstance(axis, np.ndarray) else tuple(axis), keepdims=keep_dims),) |
def variables(self):
'''A list of Theano variables for loss computations.'''
result = self.inputs
seen = set(i.name for i in result)
for loss in self.losses:
for v in loss.variables:
if v.name not in seen:
result.append(v)
seen.add(v.name)
return result | def function[variables, parameter[self]]:
constant[A list of Theano variables for loss computations.]
variable[result] assign[=] name[self].inputs
variable[seen] assign[=] call[name[set], parameter[<ast.GeneratorExp object at 0x7da1b026c850>]]
for taget[name[loss]] in starred[name[self].losses] begin[:]
for taget[name[v]] in starred[name[loss].variables] begin[:]
if compare[name[v].name <ast.NotIn object at 0x7da2590d7190> name[seen]] begin[:]
call[name[result].append, parameter[name[v]]]
call[name[seen].add, parameter[name[v].name]]
return[name[result]] | keyword[def] identifier[variables] ( identifier[self] ):
literal[string]
identifier[result] = identifier[self] . identifier[inputs]
identifier[seen] = identifier[set] ( identifier[i] . identifier[name] keyword[for] identifier[i] keyword[in] identifier[result] )
keyword[for] identifier[loss] keyword[in] identifier[self] . identifier[losses] :
keyword[for] identifier[v] keyword[in] identifier[loss] . identifier[variables] :
keyword[if] identifier[v] . identifier[name] keyword[not] keyword[in] identifier[seen] :
identifier[result] . identifier[append] ( identifier[v] )
identifier[seen] . identifier[add] ( identifier[v] . identifier[name] )
keyword[return] identifier[result] | def variables(self):
"""A list of Theano variables for loss computations."""
result = self.inputs
seen = set((i.name for i in result))
for loss in self.losses:
for v in loss.variables:
if v.name not in seen:
result.append(v)
seen.add(v.name) # depends on [control=['if'], data=['seen']] # depends on [control=['for'], data=['v']] # depends on [control=['for'], data=['loss']]
return result |
def answers(self):
"""获取用户的所有答案.
:return: 用户所有答案,返回生成器.
:rtype: Answer.Iterable
"""
from .question import Question
from .answer import Answer
if self.url is None or self.answer_num == 0:
return
for page_index in range(1, (self.answer_num - 1) // 20 + 2):
html = self._session.get(
self.url + 'answers?page=' + str(page_index)).text
soup = BeautifulSoup(html)
questions = soup.find_all('a', class_='question_link')
upvotes = soup.find_all('a', class_='zm-item-vote-count')
for q, upvote in zip(questions, upvotes):
answer_url = Zhihu_URL + q['href']
question_url = Zhihu_URL + re_a2q.match(q['href']).group(1)
question_title = q.text
upvote_num = upvote.text
if upvote_num.isdigit():
upvote_num = int(upvote_num)
else:
upvote_num = None
question = Question(question_url, question_title,
session=self._session)
yield Answer(answer_url, question, self, upvote_num,
session=self._session) | def function[answers, parameter[self]]:
constant[获取用户的所有答案.
:return: 用户所有答案,返回生成器.
:rtype: Answer.Iterable
]
from relative_module[question] import module[Question]
from relative_module[answer] import module[Answer]
if <ast.BoolOp object at 0x7da20e9b3820> begin[:]
return[None]
for taget[name[page_index]] in starred[call[name[range], parameter[constant[1], binary_operation[binary_operation[binary_operation[name[self].answer_num - constant[1]] <ast.FloorDiv object at 0x7da2590d6bc0> constant[20]] + constant[2]]]]] begin[:]
variable[html] assign[=] call[name[self]._session.get, parameter[binary_operation[binary_operation[name[self].url + constant[answers?page=]] + call[name[str], parameter[name[page_index]]]]]].text
variable[soup] assign[=] call[name[BeautifulSoup], parameter[name[html]]]
variable[questions] assign[=] call[name[soup].find_all, parameter[constant[a]]]
variable[upvotes] assign[=] call[name[soup].find_all, parameter[constant[a]]]
for taget[tuple[[<ast.Name object at 0x7da20e9b0f70>, <ast.Name object at 0x7da20e9b1f90>]]] in starred[call[name[zip], parameter[name[questions], name[upvotes]]]] begin[:]
variable[answer_url] assign[=] binary_operation[name[Zhihu_URL] + call[name[q]][constant[href]]]
variable[question_url] assign[=] binary_operation[name[Zhihu_URL] + call[call[name[re_a2q].match, parameter[call[name[q]][constant[href]]]].group, parameter[constant[1]]]]
variable[question_title] assign[=] name[q].text
variable[upvote_num] assign[=] name[upvote].text
if call[name[upvote_num].isdigit, parameter[]] begin[:]
variable[upvote_num] assign[=] call[name[int], parameter[name[upvote_num]]]
variable[question] assign[=] call[name[Question], parameter[name[question_url], name[question_title]]]
<ast.Yield object at 0x7da20e9b0e20> | keyword[def] identifier[answers] ( identifier[self] ):
literal[string]
keyword[from] . identifier[question] keyword[import] identifier[Question]
keyword[from] . identifier[answer] keyword[import] identifier[Answer]
keyword[if] identifier[self] . identifier[url] keyword[is] keyword[None] keyword[or] identifier[self] . identifier[answer_num] == literal[int] :
keyword[return]
keyword[for] identifier[page_index] keyword[in] identifier[range] ( literal[int] ,( identifier[self] . identifier[answer_num] - literal[int] )// literal[int] + literal[int] ):
identifier[html] = identifier[self] . identifier[_session] . identifier[get] (
identifier[self] . identifier[url] + literal[string] + identifier[str] ( identifier[page_index] )). identifier[text]
identifier[soup] = identifier[BeautifulSoup] ( identifier[html] )
identifier[questions] = identifier[soup] . identifier[find_all] ( literal[string] , identifier[class_] = literal[string] )
identifier[upvotes] = identifier[soup] . identifier[find_all] ( literal[string] , identifier[class_] = literal[string] )
keyword[for] identifier[q] , identifier[upvote] keyword[in] identifier[zip] ( identifier[questions] , identifier[upvotes] ):
identifier[answer_url] = identifier[Zhihu_URL] + identifier[q] [ literal[string] ]
identifier[question_url] = identifier[Zhihu_URL] + identifier[re_a2q] . identifier[match] ( identifier[q] [ literal[string] ]). identifier[group] ( literal[int] )
identifier[question_title] = identifier[q] . identifier[text]
identifier[upvote_num] = identifier[upvote] . identifier[text]
keyword[if] identifier[upvote_num] . identifier[isdigit] ():
identifier[upvote_num] = identifier[int] ( identifier[upvote_num] )
keyword[else] :
identifier[upvote_num] = keyword[None]
identifier[question] = identifier[Question] ( identifier[question_url] , identifier[question_title] ,
identifier[session] = identifier[self] . identifier[_session] )
keyword[yield] identifier[Answer] ( identifier[answer_url] , identifier[question] , identifier[self] , identifier[upvote_num] ,
identifier[session] = identifier[self] . identifier[_session] ) | def answers(self):
"""获取用户的所有答案.
:return: 用户所有答案,返回生成器.
:rtype: Answer.Iterable
"""
from .question import Question
from .answer import Answer
if self.url is None or self.answer_num == 0:
return # depends on [control=['if'], data=[]]
for page_index in range(1, (self.answer_num - 1) // 20 + 2):
html = self._session.get(self.url + 'answers?page=' + str(page_index)).text
soup = BeautifulSoup(html)
questions = soup.find_all('a', class_='question_link')
upvotes = soup.find_all('a', class_='zm-item-vote-count')
for (q, upvote) in zip(questions, upvotes):
answer_url = Zhihu_URL + q['href']
question_url = Zhihu_URL + re_a2q.match(q['href']).group(1)
question_title = q.text
upvote_num = upvote.text
if upvote_num.isdigit():
upvote_num = int(upvote_num) # depends on [control=['if'], data=[]]
else:
upvote_num = None
question = Question(question_url, question_title, session=self._session)
yield Answer(answer_url, question, self, upvote_num, session=self._session) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['page_index']] |
def calc_cost(y, yhat, cost_matrix):
"""Calculate the cost with given cost matrix
y : ground truth
yhat : estimation
cost_matrix : array-like, shape=(n_classes, n_classes)
The ith row, jth column represents the cost of the ground truth being
ith class and prediction as jth class.
"""
return np.mean(cost_matrix[list(y), list(yhat)]) | def function[calc_cost, parameter[y, yhat, cost_matrix]]:
constant[Calculate the cost with given cost matrix
y : ground truth
yhat : estimation
cost_matrix : array-like, shape=(n_classes, n_classes)
The ith row, jth column represents the cost of the ground truth being
ith class and prediction as jth class.
]
return[call[name[np].mean, parameter[call[name[cost_matrix]][tuple[[<ast.Call object at 0x7da18eb57700>, <ast.Call object at 0x7da18eb56e90>]]]]]] | keyword[def] identifier[calc_cost] ( identifier[y] , identifier[yhat] , identifier[cost_matrix] ):
literal[string]
keyword[return] identifier[np] . identifier[mean] ( identifier[cost_matrix] [ identifier[list] ( identifier[y] ), identifier[list] ( identifier[yhat] )]) | def calc_cost(y, yhat, cost_matrix):
"""Calculate the cost with given cost matrix
y : ground truth
yhat : estimation
cost_matrix : array-like, shape=(n_classes, n_classes)
The ith row, jth column represents the cost of the ground truth being
ith class and prediction as jth class.
"""
return np.mean(cost_matrix[list(y), list(yhat)]) |
def crate_attribute(self, crate_id, attribute):
"""Get crate attribute"""
path = urijoin(CRATES_API_URL, CATEGORY_CRATES, crate_id, attribute)
raw_attribute_data = self.fetch(path)
return raw_attribute_data | def function[crate_attribute, parameter[self, crate_id, attribute]]:
constant[Get crate attribute]
variable[path] assign[=] call[name[urijoin], parameter[name[CRATES_API_URL], name[CATEGORY_CRATES], name[crate_id], name[attribute]]]
variable[raw_attribute_data] assign[=] call[name[self].fetch, parameter[name[path]]]
return[name[raw_attribute_data]] | keyword[def] identifier[crate_attribute] ( identifier[self] , identifier[crate_id] , identifier[attribute] ):
literal[string]
identifier[path] = identifier[urijoin] ( identifier[CRATES_API_URL] , identifier[CATEGORY_CRATES] , identifier[crate_id] , identifier[attribute] )
identifier[raw_attribute_data] = identifier[self] . identifier[fetch] ( identifier[path] )
keyword[return] identifier[raw_attribute_data] | def crate_attribute(self, crate_id, attribute):
"""Get crate attribute"""
path = urijoin(CRATES_API_URL, CATEGORY_CRATES, crate_id, attribute)
raw_attribute_data = self.fetch(path)
return raw_attribute_data |
def _put_request(self, url, headers, data=None):
"""
Issue a PUT request to the specified endpoint with the data provided.
:param url: str
:pararm headers: dict
:param data: dict
"""
return self._session.put(url, headers=headers, data=data) | def function[_put_request, parameter[self, url, headers, data]]:
constant[
Issue a PUT request to the specified endpoint with the data provided.
:param url: str
:pararm headers: dict
:param data: dict
]
return[call[name[self]._session.put, parameter[name[url]]]] | keyword[def] identifier[_put_request] ( identifier[self] , identifier[url] , identifier[headers] , identifier[data] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_session] . identifier[put] ( identifier[url] , identifier[headers] = identifier[headers] , identifier[data] = identifier[data] ) | def _put_request(self, url, headers, data=None):
"""
Issue a PUT request to the specified endpoint with the data provided.
:param url: str
:pararm headers: dict
:param data: dict
"""
return self._session.put(url, headers=headers, data=data) |
def constant_propagation(block, silence_unexpected_net_warnings=False):
""" Removes excess constants in the block.
Note on resulting block:
The output of the block can have wirevectors that are driven but not
listened to. This is to be expected. These are to be removed by the
_remove_unlistened_nets function
"""
net_count = _NetCount(block)
while net_count.shrinking():
_constant_prop_pass(block, silence_unexpected_net_warnings) | def function[constant_propagation, parameter[block, silence_unexpected_net_warnings]]:
constant[ Removes excess constants in the block.
Note on resulting block:
The output of the block can have wirevectors that are driven but not
listened to. This is to be expected. These are to be removed by the
_remove_unlistened_nets function
]
variable[net_count] assign[=] call[name[_NetCount], parameter[name[block]]]
while call[name[net_count].shrinking, parameter[]] begin[:]
call[name[_constant_prop_pass], parameter[name[block], name[silence_unexpected_net_warnings]]] | keyword[def] identifier[constant_propagation] ( identifier[block] , identifier[silence_unexpected_net_warnings] = keyword[False] ):
literal[string]
identifier[net_count] = identifier[_NetCount] ( identifier[block] )
keyword[while] identifier[net_count] . identifier[shrinking] ():
identifier[_constant_prop_pass] ( identifier[block] , identifier[silence_unexpected_net_warnings] ) | def constant_propagation(block, silence_unexpected_net_warnings=False):
""" Removes excess constants in the block.
Note on resulting block:
The output of the block can have wirevectors that are driven but not
listened to. This is to be expected. These are to be removed by the
_remove_unlistened_nets function
"""
net_count = _NetCount(block)
while net_count.shrinking():
_constant_prop_pass(block, silence_unexpected_net_warnings) # depends on [control=['while'], data=[]] |
def checkpat(self, pattern):
"""
check for errors in a regex pattern
"""
if pattern is None:
return
try:
re.match(pattern, "")
except re.error:
print3("\nBad user-defined singular pattern:\n\t%s\n" % pattern)
raise BadUserDefinedPatternError | def function[checkpat, parameter[self, pattern]]:
constant[
check for errors in a regex pattern
]
if compare[name[pattern] is constant[None]] begin[:]
return[None]
<ast.Try object at 0x7da20c796aa0> | keyword[def] identifier[checkpat] ( identifier[self] , identifier[pattern] ):
literal[string]
keyword[if] identifier[pattern] keyword[is] keyword[None] :
keyword[return]
keyword[try] :
identifier[re] . identifier[match] ( identifier[pattern] , literal[string] )
keyword[except] identifier[re] . identifier[error] :
identifier[print3] ( literal[string] % identifier[pattern] )
keyword[raise] identifier[BadUserDefinedPatternError] | def checkpat(self, pattern):
"""
check for errors in a regex pattern
"""
if pattern is None:
return # depends on [control=['if'], data=[]]
try:
re.match(pattern, '') # depends on [control=['try'], data=[]]
except re.error:
print3('\nBad user-defined singular pattern:\n\t%s\n' % pattern)
raise BadUserDefinedPatternError # depends on [control=['except'], data=[]] |
def meta(self):
"""Returns a dictionary with arrays of addresses in CIDR format
specifying theaddresses that the incoming service hooks will originate
from.
.. versionadded:: 0.5
"""
url = self._build_url('meta')
return self._json(self._get(url), 200) | def function[meta, parameter[self]]:
constant[Returns a dictionary with arrays of addresses in CIDR format
specifying theaddresses that the incoming service hooks will originate
from.
.. versionadded:: 0.5
]
variable[url] assign[=] call[name[self]._build_url, parameter[constant[meta]]]
return[call[name[self]._json, parameter[call[name[self]._get, parameter[name[url]]], constant[200]]]] | keyword[def] identifier[meta] ( identifier[self] ):
literal[string]
identifier[url] = identifier[self] . identifier[_build_url] ( literal[string] )
keyword[return] identifier[self] . identifier[_json] ( identifier[self] . identifier[_get] ( identifier[url] ), literal[int] ) | def meta(self):
"""Returns a dictionary with arrays of addresses in CIDR format
specifying theaddresses that the incoming service hooks will originate
from.
.. versionadded:: 0.5
"""
url = self._build_url('meta')
return self._json(self._get(url), 200) |
def execute(self, sql):
"""Execute arbitary SQL against the database."""
cursor = self.connection.cursor()
try:
cursor.execute(sql)
finally:
cursor.close() | def function[execute, parameter[self, sql]]:
constant[Execute arbitary SQL against the database.]
variable[cursor] assign[=] call[name[self].connection.cursor, parameter[]]
<ast.Try object at 0x7da2054a58a0> | keyword[def] identifier[execute] ( identifier[self] , identifier[sql] ):
literal[string]
identifier[cursor] = identifier[self] . identifier[connection] . identifier[cursor] ()
keyword[try] :
identifier[cursor] . identifier[execute] ( identifier[sql] )
keyword[finally] :
identifier[cursor] . identifier[close] () | def execute(self, sql):
"""Execute arbitary SQL against the database."""
cursor = self.connection.cursor()
try:
cursor.execute(sql) # depends on [control=['try'], data=[]]
finally:
cursor.close() |
def generate_unique_name(self, basename):
"""Generates a unique name for a child given a base name."""
counts = self.__counts
try:
count = counts[basename]
counts[basename] += 1
except KeyError:
count = 0
counts[basename] = 1
prefix = self.Naming_prefix
if count == 0:
name = prefix + basename
else:
name = prefix + basename + "_" + str(count)
if prefix != "" or count != 0:
try:
count = counts[name]
return self.generate_unique_name(name)
except KeyError:
# wasn't already used so return it
counts[name] = 1
return name | def function[generate_unique_name, parameter[self, basename]]:
constant[Generates a unique name for a child given a base name.]
variable[counts] assign[=] name[self].__counts
<ast.Try object at 0x7da20e9b30a0>
variable[prefix] assign[=] name[self].Naming_prefix
if compare[name[count] equal[==] constant[0]] begin[:]
variable[name] assign[=] binary_operation[name[prefix] + name[basename]]
if <ast.BoolOp object at 0x7da20e9b0730> begin[:]
<ast.Try object at 0x7da20e9b2500>
return[name[name]] | keyword[def] identifier[generate_unique_name] ( identifier[self] , identifier[basename] ):
literal[string]
identifier[counts] = identifier[self] . identifier[__counts]
keyword[try] :
identifier[count] = identifier[counts] [ identifier[basename] ]
identifier[counts] [ identifier[basename] ]+= literal[int]
keyword[except] identifier[KeyError] :
identifier[count] = literal[int]
identifier[counts] [ identifier[basename] ]= literal[int]
identifier[prefix] = identifier[self] . identifier[Naming_prefix]
keyword[if] identifier[count] == literal[int] :
identifier[name] = identifier[prefix] + identifier[basename]
keyword[else] :
identifier[name] = identifier[prefix] + identifier[basename] + literal[string] + identifier[str] ( identifier[count] )
keyword[if] identifier[prefix] != literal[string] keyword[or] identifier[count] != literal[int] :
keyword[try] :
identifier[count] = identifier[counts] [ identifier[name] ]
keyword[return] identifier[self] . identifier[generate_unique_name] ( identifier[name] )
keyword[except] identifier[KeyError] :
identifier[counts] [ identifier[name] ]= literal[int]
keyword[return] identifier[name] | def generate_unique_name(self, basename):
"""Generates a unique name for a child given a base name."""
counts = self.__counts
try:
count = counts[basename]
counts[basename] += 1 # depends on [control=['try'], data=[]]
except KeyError:
count = 0
counts[basename] = 1 # depends on [control=['except'], data=[]]
prefix = self.Naming_prefix
if count == 0:
name = prefix + basename # depends on [control=['if'], data=[]]
else:
name = prefix + basename + '_' + str(count)
if prefix != '' or count != 0:
try:
count = counts[name]
return self.generate_unique_name(name) # depends on [control=['try'], data=[]]
except KeyError:
# wasn't already used so return it
counts[name] = 1 # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return name |
def rfc2426(self):
"""RFC2426-encode the field content.
:return: the field in the RFC 2426 format.
:returntype: `str`"""
if self.uri:
return rfc2425encode(self.name,self.uri,{"value":"uri"})
elif self.sound:
return rfc2425encode(self.name,self.sound) | def function[rfc2426, parameter[self]]:
constant[RFC2426-encode the field content.
:return: the field in the RFC 2426 format.
:returntype: `str`]
if name[self].uri begin[:]
return[call[name[rfc2425encode], parameter[name[self].name, name[self].uri, dictionary[[<ast.Constant object at 0x7da1b004fb20>], [<ast.Constant object at 0x7da1b004d5a0>]]]]] | keyword[def] identifier[rfc2426] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[uri] :
keyword[return] identifier[rfc2425encode] ( identifier[self] . identifier[name] , identifier[self] . identifier[uri] ,{ literal[string] : literal[string] })
keyword[elif] identifier[self] . identifier[sound] :
keyword[return] identifier[rfc2425encode] ( identifier[self] . identifier[name] , identifier[self] . identifier[sound] ) | def rfc2426(self):
"""RFC2426-encode the field content.
:return: the field in the RFC 2426 format.
:returntype: `str`"""
if self.uri:
return rfc2425encode(self.name, self.uri, {'value': 'uri'}) # depends on [control=['if'], data=[]]
elif self.sound:
return rfc2425encode(self.name, self.sound) # depends on [control=['if'], data=[]] |
def conjugate(self, tense = 'present'):
''' Tries to conjugate a given verb using verbix.com.'''
if self.tenses.has_key(tense):
return self._extract(self.tenses[tense])
elif self.tenses.has_key(tense.title()):
return self._extract(self.tenses[tense.title()])
return [None] | def function[conjugate, parameter[self, tense]]:
constant[ Tries to conjugate a given verb using verbix.com.]
if call[name[self].tenses.has_key, parameter[name[tense]]] begin[:]
return[call[name[self]._extract, parameter[call[name[self].tenses][name[tense]]]]]
return[list[[<ast.Constant object at 0x7da1b101a5f0>]]] | keyword[def] identifier[conjugate] ( identifier[self] , identifier[tense] = literal[string] ):
literal[string]
keyword[if] identifier[self] . identifier[tenses] . identifier[has_key] ( identifier[tense] ):
keyword[return] identifier[self] . identifier[_extract] ( identifier[self] . identifier[tenses] [ identifier[tense] ])
keyword[elif] identifier[self] . identifier[tenses] . identifier[has_key] ( identifier[tense] . identifier[title] ()):
keyword[return] identifier[self] . identifier[_extract] ( identifier[self] . identifier[tenses] [ identifier[tense] . identifier[title] ()])
keyword[return] [ keyword[None] ] | def conjugate(self, tense='present'):
""" Tries to conjugate a given verb using verbix.com."""
if self.tenses.has_key(tense):
return self._extract(self.tenses[tense]) # depends on [control=['if'], data=[]]
elif self.tenses.has_key(tense.title()):
return self._extract(self.tenses[tense.title()]) # depends on [control=['if'], data=[]]
return [None] |
def key_percent (self, char):
'''find matching <([{}])>'''
self.motion = self.motion_matching
self.delete_right = 1
self.state = _VI_MOTION
self.apply () | def function[key_percent, parameter[self, char]]:
constant[find matching <([{}])>]
name[self].motion assign[=] name[self].motion_matching
name[self].delete_right assign[=] constant[1]
name[self].state assign[=] name[_VI_MOTION]
call[name[self].apply, parameter[]] | keyword[def] identifier[key_percent] ( identifier[self] , identifier[char] ):
literal[string]
identifier[self] . identifier[motion] = identifier[self] . identifier[motion_matching]
identifier[self] . identifier[delete_right] = literal[int]
identifier[self] . identifier[state] = identifier[_VI_MOTION]
identifier[self] . identifier[apply] () | def key_percent(self, char):
"""find matching <([{}])>"""
self.motion = self.motion_matching
self.delete_right = 1
self.state = _VI_MOTION
self.apply() |
def clone(self):
"""
Create a complete copy of self.
:returns: A MaterialPackage that is identical to self.
"""
result = copy.copy(self)
result.compound_masses = copy.deepcopy(self.compound_masses)
return result | def function[clone, parameter[self]]:
constant[
Create a complete copy of self.
:returns: A MaterialPackage that is identical to self.
]
variable[result] assign[=] call[name[copy].copy, parameter[name[self]]]
name[result].compound_masses assign[=] call[name[copy].deepcopy, parameter[name[self].compound_masses]]
return[name[result]] | keyword[def] identifier[clone] ( identifier[self] ):
literal[string]
identifier[result] = identifier[copy] . identifier[copy] ( identifier[self] )
identifier[result] . identifier[compound_masses] = identifier[copy] . identifier[deepcopy] ( identifier[self] . identifier[compound_masses] )
keyword[return] identifier[result] | def clone(self):
"""
Create a complete copy of self.
:returns: A MaterialPackage that is identical to self.
"""
result = copy.copy(self)
result.compound_masses = copy.deepcopy(self.compound_masses)
return result |
def calculate_Y(sub_network,skip_pre=False):
"""Calculate bus admittance matrices for AC sub-networks."""
if not skip_pre:
calculate_dependent_values(sub_network.network)
if sub_network.network.sub_networks.at[sub_network.name,"carrier"] != "AC":
logger.warning("Non-AC networks not supported for Y!")
return
branches = sub_network.branches()
buses_o = sub_network.buses_o
network = sub_network.network
#following leans heavily on pypower.makeYbus
#Copyright Richard Lincoln, Ray Zimmerman, BSD-style licence
num_branches = len(branches)
num_buses = len(buses_o)
y_se = 1/(branches["r_pu"] + 1.j*branches["x_pu"])
y_sh = branches["g_pu"]+ 1.j*branches["b_pu"]
tau = branches["tap_ratio"].fillna(1.)
#catch some transformers falsely set with tau = 0 by pypower
tau[tau==0] = 1.
#define the HV tap ratios
tau_hv = pd.Series(1.,branches.index)
tau_hv[branches.tap_side==0] = tau[branches.tap_side==0]
#define the LV tap ratios
tau_lv = pd.Series(1.,branches.index)
tau_lv[branches.tap_side==1] = tau[branches.tap_side==1]
phase_shift = np.exp(1.j*branches["phase_shift"].fillna(0.)*np.pi/180.)
#build the admittance matrix elements for each branch
Y11 = (y_se + 0.5*y_sh)/tau_lv**2
Y10 = -y_se/tau_lv/tau_hv/phase_shift
Y01 = -y_se/tau_lv/tau_hv/np.conj(phase_shift)
Y00 = (y_se + 0.5*y_sh)/tau_hv**2
#bus shunt impedances
b_sh = network.shunt_impedances.b_pu.groupby(network.shunt_impedances.bus).sum().reindex(buses_o, fill_value = 0.)
g_sh = network.shunt_impedances.g_pu.groupby(network.shunt_impedances.bus).sum().reindex(buses_o, fill_value = 0.)
Y_sh = g_sh + 1.j*b_sh
#get bus indices
bus0 = buses_o.get_indexer(branches.bus0)
bus1 = buses_o.get_indexer(branches.bus1)
#connection matrices
C0 = csr_matrix((ones(num_branches), (np.arange(num_branches), bus0)), (num_branches, num_buses))
C1 = csr_matrix((ones(num_branches), (np.arange(num_branches), bus1)), (num_branches, num_buses))
#build Y{0,1} such that Y{0,1} * V is the vector complex branch currents
i = r_[np.arange(num_branches), np.arange(num_branches)]
sub_network.Y0 = csr_matrix((r_[Y00,Y01],(i,r_[bus0,bus1])), (num_branches,num_buses))
sub_network.Y1 = csr_matrix((r_[Y10,Y11],(i,r_[bus0,bus1])), (num_branches,num_buses))
#now build bus admittance matrix
sub_network.Y = C0.T * sub_network.Y0 + C1.T * sub_network.Y1 + \
csr_matrix((Y_sh, (np.arange(num_buses), np.arange(num_buses)))) | def function[calculate_Y, parameter[sub_network, skip_pre]]:
constant[Calculate bus admittance matrices for AC sub-networks.]
if <ast.UnaryOp object at 0x7da18c4cf760> begin[:]
call[name[calculate_dependent_values], parameter[name[sub_network].network]]
if compare[call[name[sub_network].network.sub_networks.at][tuple[[<ast.Attribute object at 0x7da18c4ce890>, <ast.Constant object at 0x7da18c4cf430>]]] not_equal[!=] constant[AC]] begin[:]
call[name[logger].warning, parameter[constant[Non-AC networks not supported for Y!]]]
return[None]
variable[branches] assign[=] call[name[sub_network].branches, parameter[]]
variable[buses_o] assign[=] name[sub_network].buses_o
variable[network] assign[=] name[sub_network].network
variable[num_branches] assign[=] call[name[len], parameter[name[branches]]]
variable[num_buses] assign[=] call[name[len], parameter[name[buses_o]]]
variable[y_se] assign[=] binary_operation[constant[1] / binary_operation[call[name[branches]][constant[r_pu]] + binary_operation[constant[1j] * call[name[branches]][constant[x_pu]]]]]
variable[y_sh] assign[=] binary_operation[call[name[branches]][constant[g_pu]] + binary_operation[constant[1j] * call[name[branches]][constant[b_pu]]]]
variable[tau] assign[=] call[call[name[branches]][constant[tap_ratio]].fillna, parameter[constant[1.0]]]
call[name[tau]][compare[name[tau] equal[==] constant[0]]] assign[=] constant[1.0]
variable[tau_hv] assign[=] call[name[pd].Series, parameter[constant[1.0], name[branches].index]]
call[name[tau_hv]][compare[name[branches].tap_side equal[==] constant[0]]] assign[=] call[name[tau]][compare[name[branches].tap_side equal[==] constant[0]]]
variable[tau_lv] assign[=] call[name[pd].Series, parameter[constant[1.0], name[branches].index]]
call[name[tau_lv]][compare[name[branches].tap_side equal[==] constant[1]]] assign[=] call[name[tau]][compare[name[branches].tap_side equal[==] constant[1]]]
variable[phase_shift] assign[=] call[name[np].exp, parameter[binary_operation[binary_operation[binary_operation[constant[1j] * call[call[name[branches]][constant[phase_shift]].fillna, parameter[constant[0.0]]]] * name[np].pi] / constant[180.0]]]]
variable[Y11] assign[=] binary_operation[binary_operation[name[y_se] + binary_operation[constant[0.5] * name[y_sh]]] / binary_operation[name[tau_lv] ** constant[2]]]
variable[Y10] assign[=] binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18c4cfd90> / name[tau_lv]] / name[tau_hv]] / name[phase_shift]]
variable[Y01] assign[=] binary_operation[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18c4ce2f0> / name[tau_lv]] / name[tau_hv]] / call[name[np].conj, parameter[name[phase_shift]]]]
variable[Y00] assign[=] binary_operation[binary_operation[name[y_se] + binary_operation[constant[0.5] * name[y_sh]]] / binary_operation[name[tau_hv] ** constant[2]]]
variable[b_sh] assign[=] call[call[call[name[network].shunt_impedances.b_pu.groupby, parameter[name[network].shunt_impedances.bus]].sum, parameter[]].reindex, parameter[name[buses_o]]]
variable[g_sh] assign[=] call[call[call[name[network].shunt_impedances.g_pu.groupby, parameter[name[network].shunt_impedances.bus]].sum, parameter[]].reindex, parameter[name[buses_o]]]
variable[Y_sh] assign[=] binary_operation[name[g_sh] + binary_operation[constant[1j] * name[b_sh]]]
variable[bus0] assign[=] call[name[buses_o].get_indexer, parameter[name[branches].bus0]]
variable[bus1] assign[=] call[name[buses_o].get_indexer, parameter[name[branches].bus1]]
variable[C0] assign[=] call[name[csr_matrix], parameter[tuple[[<ast.Call object at 0x7da20c7c91e0>, <ast.Tuple object at 0x7da20c7c9540>]], tuple[[<ast.Name object at 0x7da20c7c85e0>, <ast.Name object at 0x7da20c7c9e40>]]]]
variable[C1] assign[=] call[name[csr_matrix], parameter[tuple[[<ast.Call object at 0x7da20c7ca440>, <ast.Tuple object at 0x7da20c7ca740>]], tuple[[<ast.Name object at 0x7da20c7ca4a0>, <ast.Name object at 0x7da20c7cbe20>]]]]
variable[i] assign[=] call[name[r_]][tuple[[<ast.Call object at 0x7da20c7c9c00>, <ast.Call object at 0x7da20c7c87c0>]]]
name[sub_network].Y0 assign[=] call[name[csr_matrix], parameter[tuple[[<ast.Subscript object at 0x7da20c7c9900>, <ast.Tuple object at 0x7da20c7cac80>]], tuple[[<ast.Name object at 0x7da20c7c9600>, <ast.Name object at 0x7da20c7c9e10>]]]]
name[sub_network].Y1 assign[=] call[name[csr_matrix], parameter[tuple[[<ast.Subscript object at 0x7da20c7ca380>, <ast.Tuple object at 0x7da20c7caf50>]], tuple[[<ast.Name object at 0x7da20c7cb730>, <ast.Name object at 0x7da20c7c9780>]]]]
name[sub_network].Y assign[=] binary_operation[binary_operation[binary_operation[name[C0].T * name[sub_network].Y0] + binary_operation[name[C1].T * name[sub_network].Y1]] + call[name[csr_matrix], parameter[tuple[[<ast.Name object at 0x7da20c7cae90>, <ast.Tuple object at 0x7da20c7cb3d0>]]]]] | keyword[def] identifier[calculate_Y] ( identifier[sub_network] , identifier[skip_pre] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[skip_pre] :
identifier[calculate_dependent_values] ( identifier[sub_network] . identifier[network] )
keyword[if] identifier[sub_network] . identifier[network] . identifier[sub_networks] . identifier[at] [ identifier[sub_network] . identifier[name] , literal[string] ]!= literal[string] :
identifier[logger] . identifier[warning] ( literal[string] )
keyword[return]
identifier[branches] = identifier[sub_network] . identifier[branches] ()
identifier[buses_o] = identifier[sub_network] . identifier[buses_o]
identifier[network] = identifier[sub_network] . identifier[network]
identifier[num_branches] = identifier[len] ( identifier[branches] )
identifier[num_buses] = identifier[len] ( identifier[buses_o] )
identifier[y_se] = literal[int] /( identifier[branches] [ literal[string] ]+ literal[int] * identifier[branches] [ literal[string] ])
identifier[y_sh] = identifier[branches] [ literal[string] ]+ literal[int] * identifier[branches] [ literal[string] ]
identifier[tau] = identifier[branches] [ literal[string] ]. identifier[fillna] ( literal[int] )
identifier[tau] [ identifier[tau] == literal[int] ]= literal[int]
identifier[tau_hv] = identifier[pd] . identifier[Series] ( literal[int] , identifier[branches] . identifier[index] )
identifier[tau_hv] [ identifier[branches] . identifier[tap_side] == literal[int] ]= identifier[tau] [ identifier[branches] . identifier[tap_side] == literal[int] ]
identifier[tau_lv] = identifier[pd] . identifier[Series] ( literal[int] , identifier[branches] . identifier[index] )
identifier[tau_lv] [ identifier[branches] . identifier[tap_side] == literal[int] ]= identifier[tau] [ identifier[branches] . identifier[tap_side] == literal[int] ]
identifier[phase_shift] = identifier[np] . identifier[exp] ( literal[int] * identifier[branches] [ literal[string] ]. identifier[fillna] ( literal[int] )* identifier[np] . identifier[pi] / literal[int] )
identifier[Y11] =( identifier[y_se] + literal[int] * identifier[y_sh] )/ identifier[tau_lv] ** literal[int]
identifier[Y10] =- identifier[y_se] / identifier[tau_lv] / identifier[tau_hv] / identifier[phase_shift]
identifier[Y01] =- identifier[y_se] / identifier[tau_lv] / identifier[tau_hv] / identifier[np] . identifier[conj] ( identifier[phase_shift] )
identifier[Y00] =( identifier[y_se] + literal[int] * identifier[y_sh] )/ identifier[tau_hv] ** literal[int]
identifier[b_sh] = identifier[network] . identifier[shunt_impedances] . identifier[b_pu] . identifier[groupby] ( identifier[network] . identifier[shunt_impedances] . identifier[bus] ). identifier[sum] (). identifier[reindex] ( identifier[buses_o] , identifier[fill_value] = literal[int] )
identifier[g_sh] = identifier[network] . identifier[shunt_impedances] . identifier[g_pu] . identifier[groupby] ( identifier[network] . identifier[shunt_impedances] . identifier[bus] ). identifier[sum] (). identifier[reindex] ( identifier[buses_o] , identifier[fill_value] = literal[int] )
identifier[Y_sh] = identifier[g_sh] + literal[int] * identifier[b_sh]
identifier[bus0] = identifier[buses_o] . identifier[get_indexer] ( identifier[branches] . identifier[bus0] )
identifier[bus1] = identifier[buses_o] . identifier[get_indexer] ( identifier[branches] . identifier[bus1] )
identifier[C0] = identifier[csr_matrix] (( identifier[ones] ( identifier[num_branches] ),( identifier[np] . identifier[arange] ( identifier[num_branches] ), identifier[bus0] )),( identifier[num_branches] , identifier[num_buses] ))
identifier[C1] = identifier[csr_matrix] (( identifier[ones] ( identifier[num_branches] ),( identifier[np] . identifier[arange] ( identifier[num_branches] ), identifier[bus1] )),( identifier[num_branches] , identifier[num_buses] ))
identifier[i] = identifier[r_] [ identifier[np] . identifier[arange] ( identifier[num_branches] ), identifier[np] . identifier[arange] ( identifier[num_branches] )]
identifier[sub_network] . identifier[Y0] = identifier[csr_matrix] (( identifier[r_] [ identifier[Y00] , identifier[Y01] ],( identifier[i] , identifier[r_] [ identifier[bus0] , identifier[bus1] ])),( identifier[num_branches] , identifier[num_buses] ))
identifier[sub_network] . identifier[Y1] = identifier[csr_matrix] (( identifier[r_] [ identifier[Y10] , identifier[Y11] ],( identifier[i] , identifier[r_] [ identifier[bus0] , identifier[bus1] ])),( identifier[num_branches] , identifier[num_buses] ))
identifier[sub_network] . identifier[Y] = identifier[C0] . identifier[T] * identifier[sub_network] . identifier[Y0] + identifier[C1] . identifier[T] * identifier[sub_network] . identifier[Y1] + identifier[csr_matrix] (( identifier[Y_sh] ,( identifier[np] . identifier[arange] ( identifier[num_buses] ), identifier[np] . identifier[arange] ( identifier[num_buses] )))) | def calculate_Y(sub_network, skip_pre=False):
"""Calculate bus admittance matrices for AC sub-networks."""
if not skip_pre:
calculate_dependent_values(sub_network.network) # depends on [control=['if'], data=[]]
if sub_network.network.sub_networks.at[sub_network.name, 'carrier'] != 'AC':
logger.warning('Non-AC networks not supported for Y!')
return # depends on [control=['if'], data=[]]
branches = sub_network.branches()
buses_o = sub_network.buses_o
network = sub_network.network
#following leans heavily on pypower.makeYbus
#Copyright Richard Lincoln, Ray Zimmerman, BSD-style licence
num_branches = len(branches)
num_buses = len(buses_o)
y_se = 1 / (branches['r_pu'] + 1j * branches['x_pu'])
y_sh = branches['g_pu'] + 1j * branches['b_pu']
tau = branches['tap_ratio'].fillna(1.0)
#catch some transformers falsely set with tau = 0 by pypower
tau[tau == 0] = 1.0
#define the HV tap ratios
tau_hv = pd.Series(1.0, branches.index)
tau_hv[branches.tap_side == 0] = tau[branches.tap_side == 0]
#define the LV tap ratios
tau_lv = pd.Series(1.0, branches.index)
tau_lv[branches.tap_side == 1] = tau[branches.tap_side == 1]
phase_shift = np.exp(1j * branches['phase_shift'].fillna(0.0) * np.pi / 180.0)
#build the admittance matrix elements for each branch
Y11 = (y_se + 0.5 * y_sh) / tau_lv ** 2
Y10 = -y_se / tau_lv / tau_hv / phase_shift
Y01 = -y_se / tau_lv / tau_hv / np.conj(phase_shift)
Y00 = (y_se + 0.5 * y_sh) / tau_hv ** 2
#bus shunt impedances
b_sh = network.shunt_impedances.b_pu.groupby(network.shunt_impedances.bus).sum().reindex(buses_o, fill_value=0.0)
g_sh = network.shunt_impedances.g_pu.groupby(network.shunt_impedances.bus).sum().reindex(buses_o, fill_value=0.0)
Y_sh = g_sh + 1j * b_sh
#get bus indices
bus0 = buses_o.get_indexer(branches.bus0)
bus1 = buses_o.get_indexer(branches.bus1)
#connection matrices
C0 = csr_matrix((ones(num_branches), (np.arange(num_branches), bus0)), (num_branches, num_buses))
C1 = csr_matrix((ones(num_branches), (np.arange(num_branches), bus1)), (num_branches, num_buses))
#build Y{0,1} such that Y{0,1} * V is the vector complex branch currents
i = r_[np.arange(num_branches), np.arange(num_branches)]
sub_network.Y0 = csr_matrix((r_[Y00, Y01], (i, r_[bus0, bus1])), (num_branches, num_buses))
sub_network.Y1 = csr_matrix((r_[Y10, Y11], (i, r_[bus0, bus1])), (num_branches, num_buses))
#now build bus admittance matrix
sub_network.Y = C0.T * sub_network.Y0 + C1.T * sub_network.Y1 + csr_matrix((Y_sh, (np.arange(num_buses), np.arange(num_buses)))) |
def run_executable(repo, args, includes):
"""
Run the executable and capture the input and output...
"""
# Get platform information
mgr = plugins_get_mgr()
repomgr = mgr.get(what='instrumentation', name='platform')
platform_metadata = repomgr.get_metadata()
print("Obtaining Commit Information")
(executable, commiturl) = \
find_executable_commitpath(repo, args)
# Create a local directory
tmpdir = tempfile.mkdtemp()
# Construct the strace command
print("Running the command")
strace_filename = os.path.join(tmpdir,'strace.out.txt')
cmd = ["strace.py", "-f", "-o", strace_filename,
"-s", "1024", "-q", "--"] + args
# Run the command
p = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate()
# Capture the stdout/stderr
stdout = os.path.join(tmpdir, 'stdout.log.txt')
with open(stdout, 'w') as fd:
fd.write(out.decode('utf-8'))
stderr = os.path.join(tmpdir, 'stderr.log.txt')
with open(stderr, 'w') as fd:
fd.write(err.decode('utf-8'))
# Check the strace output
files = extract_files(strace_filename, includes)
# Now insert the execution metadata
execution_metadata = {
'likelyexecutable': executable,
'commitpath': commiturl,
'args': args,
}
execution_metadata.update(platform_metadata)
for i in range(len(files)):
files[i]['execution_metadata'] = execution_metadata
return files | def function[run_executable, parameter[repo, args, includes]]:
constant[
Run the executable and capture the input and output...
]
variable[mgr] assign[=] call[name[plugins_get_mgr], parameter[]]
variable[repomgr] assign[=] call[name[mgr].get, parameter[]]
variable[platform_metadata] assign[=] call[name[repomgr].get_metadata, parameter[]]
call[name[print], parameter[constant[Obtaining Commit Information]]]
<ast.Tuple object at 0x7da1afea62c0> assign[=] call[name[find_executable_commitpath], parameter[name[repo], name[args]]]
variable[tmpdir] assign[=] call[name[tempfile].mkdtemp, parameter[]]
call[name[print], parameter[constant[Running the command]]]
variable[strace_filename] assign[=] call[name[os].path.join, parameter[name[tmpdir], constant[strace.out.txt]]]
variable[cmd] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1afea63b0>, <ast.Constant object at 0x7da1afea6170>, <ast.Constant object at 0x7da1afea57e0>, <ast.Name object at 0x7da1afea4fd0>, <ast.Constant object at 0x7da1afea69b0>, <ast.Constant object at 0x7da1afea5e70>, <ast.Constant object at 0x7da1afea5810>, <ast.Constant object at 0x7da1afea6890>]] + name[args]]
variable[p] assign[=] call[name[subprocess].Popen, parameter[name[cmd]]]
<ast.Tuple object at 0x7da1afea5870> assign[=] call[name[p].communicate, parameter[]]
variable[stdout] assign[=] call[name[os].path.join, parameter[name[tmpdir], constant[stdout.log.txt]]]
with call[name[open], parameter[name[stdout], constant[w]]] begin[:]
call[name[fd].write, parameter[call[name[out].decode, parameter[constant[utf-8]]]]]
variable[stderr] assign[=] call[name[os].path.join, parameter[name[tmpdir], constant[stderr.log.txt]]]
with call[name[open], parameter[name[stderr], constant[w]]] begin[:]
call[name[fd].write, parameter[call[name[err].decode, parameter[constant[utf-8]]]]]
variable[files] assign[=] call[name[extract_files], parameter[name[strace_filename], name[includes]]]
variable[execution_metadata] assign[=] dictionary[[<ast.Constant object at 0x7da1afe39a80>, <ast.Constant object at 0x7da1afe39ea0>, <ast.Constant object at 0x7da1afe3a260>], [<ast.Name object at 0x7da1afe39ab0>, <ast.Name object at 0x7da1afe3ab90>, <ast.Name object at 0x7da1afe3b5e0>]]
call[name[execution_metadata].update, parameter[name[platform_metadata]]]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[files]]]]]] begin[:]
call[call[name[files]][name[i]]][constant[execution_metadata]] assign[=] name[execution_metadata]
return[name[files]] | keyword[def] identifier[run_executable] ( identifier[repo] , identifier[args] , identifier[includes] ):
literal[string]
identifier[mgr] = identifier[plugins_get_mgr] ()
identifier[repomgr] = identifier[mgr] . identifier[get] ( identifier[what] = literal[string] , identifier[name] = literal[string] )
identifier[platform_metadata] = identifier[repomgr] . identifier[get_metadata] ()
identifier[print] ( literal[string] )
( identifier[executable] , identifier[commiturl] )= identifier[find_executable_commitpath] ( identifier[repo] , identifier[args] )
identifier[tmpdir] = identifier[tempfile] . identifier[mkdtemp] ()
identifier[print] ( literal[string] )
identifier[strace_filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[tmpdir] , literal[string] )
identifier[cmd] =[ literal[string] , literal[string] , literal[string] , identifier[strace_filename] ,
literal[string] , literal[string] , literal[string] , literal[string] ]+ identifier[args]
identifier[p] = identifier[subprocess] . identifier[Popen] ( identifier[cmd] ,
identifier[stdout] = identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[subprocess] . identifier[PIPE] )
identifier[out] , identifier[err] = identifier[p] . identifier[communicate] ()
identifier[stdout] = identifier[os] . identifier[path] . identifier[join] ( identifier[tmpdir] , literal[string] )
keyword[with] identifier[open] ( identifier[stdout] , literal[string] ) keyword[as] identifier[fd] :
identifier[fd] . identifier[write] ( identifier[out] . identifier[decode] ( literal[string] ))
identifier[stderr] = identifier[os] . identifier[path] . identifier[join] ( identifier[tmpdir] , literal[string] )
keyword[with] identifier[open] ( identifier[stderr] , literal[string] ) keyword[as] identifier[fd] :
identifier[fd] . identifier[write] ( identifier[err] . identifier[decode] ( literal[string] ))
identifier[files] = identifier[extract_files] ( identifier[strace_filename] , identifier[includes] )
identifier[execution_metadata] ={
literal[string] : identifier[executable] ,
literal[string] : identifier[commiturl] ,
literal[string] : identifier[args] ,
}
identifier[execution_metadata] . identifier[update] ( identifier[platform_metadata] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[files] )):
identifier[files] [ identifier[i] ][ literal[string] ]= identifier[execution_metadata]
keyword[return] identifier[files] | def run_executable(repo, args, includes):
"""
Run the executable and capture the input and output...
"""
# Get platform information
mgr = plugins_get_mgr()
repomgr = mgr.get(what='instrumentation', name='platform')
platform_metadata = repomgr.get_metadata()
print('Obtaining Commit Information')
(executable, commiturl) = find_executable_commitpath(repo, args)
# Create a local directory
tmpdir = tempfile.mkdtemp()
# Construct the strace command
print('Running the command')
strace_filename = os.path.join(tmpdir, 'strace.out.txt')
cmd = ['strace.py', '-f', '-o', strace_filename, '-s', '1024', '-q', '--'] + args
# Run the command
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = p.communicate()
# Capture the stdout/stderr
stdout = os.path.join(tmpdir, 'stdout.log.txt')
with open(stdout, 'w') as fd:
fd.write(out.decode('utf-8')) # depends on [control=['with'], data=['fd']]
stderr = os.path.join(tmpdir, 'stderr.log.txt')
with open(stderr, 'w') as fd:
fd.write(err.decode('utf-8')) # depends on [control=['with'], data=['fd']]
# Check the strace output
files = extract_files(strace_filename, includes)
# Now insert the execution metadata
execution_metadata = {'likelyexecutable': executable, 'commitpath': commiturl, 'args': args}
execution_metadata.update(platform_metadata)
for i in range(len(files)):
files[i]['execution_metadata'] = execution_metadata # depends on [control=['for'], data=['i']]
return files |
def read_writer_config(config_files, loader=UnsafeLoader):
"""Read the writer `config_files` and return the info extracted."""
conf = {}
LOG.debug('Reading %s', str(config_files))
for config_file in config_files:
with open(config_file) as fd:
conf.update(yaml.load(fd.read(), Loader=loader))
try:
writer_info = conf['writer']
except KeyError:
raise KeyError(
"Malformed config file {}: missing writer 'writer'".format(
config_files))
writer_info['config_files'] = config_files
return writer_info | def function[read_writer_config, parameter[config_files, loader]]:
constant[Read the writer `config_files` and return the info extracted.]
variable[conf] assign[=] dictionary[[], []]
call[name[LOG].debug, parameter[constant[Reading %s], call[name[str], parameter[name[config_files]]]]]
for taget[name[config_file]] in starred[name[config_files]] begin[:]
with call[name[open], parameter[name[config_file]]] begin[:]
call[name[conf].update, parameter[call[name[yaml].load, parameter[call[name[fd].read, parameter[]]]]]]
<ast.Try object at 0x7da1b1d5f9d0>
call[name[writer_info]][constant[config_files]] assign[=] name[config_files]
return[name[writer_info]] | keyword[def] identifier[read_writer_config] ( identifier[config_files] , identifier[loader] = identifier[UnsafeLoader] ):
literal[string]
identifier[conf] ={}
identifier[LOG] . identifier[debug] ( literal[string] , identifier[str] ( identifier[config_files] ))
keyword[for] identifier[config_file] keyword[in] identifier[config_files] :
keyword[with] identifier[open] ( identifier[config_file] ) keyword[as] identifier[fd] :
identifier[conf] . identifier[update] ( identifier[yaml] . identifier[load] ( identifier[fd] . identifier[read] (), identifier[Loader] = identifier[loader] ))
keyword[try] :
identifier[writer_info] = identifier[conf] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[KeyError] (
literal[string] . identifier[format] (
identifier[config_files] ))
identifier[writer_info] [ literal[string] ]= identifier[config_files]
keyword[return] identifier[writer_info] | def read_writer_config(config_files, loader=UnsafeLoader):
"""Read the writer `config_files` and return the info extracted."""
conf = {}
LOG.debug('Reading %s', str(config_files))
for config_file in config_files:
with open(config_file) as fd:
conf.update(yaml.load(fd.read(), Loader=loader)) # depends on [control=['with'], data=['fd']] # depends on [control=['for'], data=['config_file']]
try:
writer_info = conf['writer'] # depends on [control=['try'], data=[]]
except KeyError:
raise KeyError("Malformed config file {}: missing writer 'writer'".format(config_files)) # depends on [control=['except'], data=[]]
writer_info['config_files'] = config_files
return writer_info |
def last_location_of_minimum(x):
"""
Returns the last location of the minimal value of x.
The position is calculated relatively to the length of x.
:param x: the time series to calculate the feature of
:type x: numpy.ndarray
:return: the value of this feature
:return type: float
"""
x = np.asarray(x)
return 1.0 - np.argmin(x[::-1]) / len(x) if len(x) > 0 else np.NaN | def function[last_location_of_minimum, parameter[x]]:
constant[
Returns the last location of the minimal value of x.
The position is calculated relatively to the length of x.
:param x: the time series to calculate the feature of
:type x: numpy.ndarray
:return: the value of this feature
:return type: float
]
variable[x] assign[=] call[name[np].asarray, parameter[name[x]]]
return[<ast.IfExp object at 0x7da2047e9180>] | keyword[def] identifier[last_location_of_minimum] ( identifier[x] ):
literal[string]
identifier[x] = identifier[np] . identifier[asarray] ( identifier[x] )
keyword[return] literal[int] - identifier[np] . identifier[argmin] ( identifier[x] [::- literal[int] ])/ identifier[len] ( identifier[x] ) keyword[if] identifier[len] ( identifier[x] )> literal[int] keyword[else] identifier[np] . identifier[NaN] | def last_location_of_minimum(x):
"""
Returns the last location of the minimal value of x.
The position is calculated relatively to the length of x.
:param x: the time series to calculate the feature of
:type x: numpy.ndarray
:return: the value of this feature
:return type: float
"""
x = np.asarray(x)
return 1.0 - np.argmin(x[::-1]) / len(x) if len(x) > 0 else np.NaN |
def cycle_windows(tree, direction):
"""
Cycle through windows of the current workspace
"""
wanted = {
"orientation": ("vertical" if direction in ("up", "down")
else "horizontal"),
"direction": (1 if direction in ("down", "right")
else -1),
}
split = find_parent_split(tree.focused.parent, wanted["orientation"])
if split:
# Get the next child given the direction
child_ids = [child.id for child in split.children]
focus_idx = child_ids.index(split.focused_child.id)
next_idx = (focus_idx + wanted['direction']) % len(child_ids)
next_node = split.children[next_idx]
return find_focusable(next_node)
return None | def function[cycle_windows, parameter[tree, direction]]:
constant[
Cycle through windows of the current workspace
]
variable[wanted] assign[=] dictionary[[<ast.Constant object at 0x7da20c795ea0>, <ast.Constant object at 0x7da20c795f90>], [<ast.IfExp object at 0x7da20c794a60>, <ast.IfExp object at 0x7da20c795870>]]
variable[split] assign[=] call[name[find_parent_split], parameter[name[tree].focused.parent, call[name[wanted]][constant[orientation]]]]
if name[split] begin[:]
variable[child_ids] assign[=] <ast.ListComp object at 0x7da20c796140>
variable[focus_idx] assign[=] call[name[child_ids].index, parameter[name[split].focused_child.id]]
variable[next_idx] assign[=] binary_operation[binary_operation[name[focus_idx] + call[name[wanted]][constant[direction]]] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[child_ids]]]]
variable[next_node] assign[=] call[name[split].children][name[next_idx]]
return[call[name[find_focusable], parameter[name[next_node]]]]
return[constant[None]] | keyword[def] identifier[cycle_windows] ( identifier[tree] , identifier[direction] ):
literal[string]
identifier[wanted] ={
literal[string] :( literal[string] keyword[if] identifier[direction] keyword[in] ( literal[string] , literal[string] )
keyword[else] literal[string] ),
literal[string] :( literal[int] keyword[if] identifier[direction] keyword[in] ( literal[string] , literal[string] )
keyword[else] - literal[int] ),
}
identifier[split] = identifier[find_parent_split] ( identifier[tree] . identifier[focused] . identifier[parent] , identifier[wanted] [ literal[string] ])
keyword[if] identifier[split] :
identifier[child_ids] =[ identifier[child] . identifier[id] keyword[for] identifier[child] keyword[in] identifier[split] . identifier[children] ]
identifier[focus_idx] = identifier[child_ids] . identifier[index] ( identifier[split] . identifier[focused_child] . identifier[id] )
identifier[next_idx] =( identifier[focus_idx] + identifier[wanted] [ literal[string] ])% identifier[len] ( identifier[child_ids] )
identifier[next_node] = identifier[split] . identifier[children] [ identifier[next_idx] ]
keyword[return] identifier[find_focusable] ( identifier[next_node] )
keyword[return] keyword[None] | def cycle_windows(tree, direction):
"""
Cycle through windows of the current workspace
"""
wanted = {'orientation': 'vertical' if direction in ('up', 'down') else 'horizontal', 'direction': 1 if direction in ('down', 'right') else -1}
split = find_parent_split(tree.focused.parent, wanted['orientation'])
if split:
# Get the next child given the direction
child_ids = [child.id for child in split.children]
focus_idx = child_ids.index(split.focused_child.id)
next_idx = (focus_idx + wanted['direction']) % len(child_ids)
next_node = split.children[next_idx]
return find_focusable(next_node) # depends on [control=['if'], data=[]]
return None |
def has_attr(cls, attr_name):
"""Check to see if an attribute is defined for the model."""
if attr_name in cls.attrs:
return True
if isinstance(cls.primary_key_name, str) and cls.primary_key_name == attr_name:
return True
if isinstance(cls.primary_key_name, tuple) and attr_name in cls.primary_key_name:
return True
if cls.timestamps is not None and attr_name in cls.timestamps:
return True
return False | def function[has_attr, parameter[cls, attr_name]]:
constant[Check to see if an attribute is defined for the model.]
if compare[name[attr_name] in name[cls].attrs] begin[:]
return[constant[True]]
if <ast.BoolOp object at 0x7da20c76c280> begin[:]
return[constant[True]]
if <ast.BoolOp object at 0x7da20c76de10> begin[:]
return[constant[True]]
if <ast.BoolOp object at 0x7da20c76c190> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[has_attr] ( identifier[cls] , identifier[attr_name] ):
literal[string]
keyword[if] identifier[attr_name] keyword[in] identifier[cls] . identifier[attrs] :
keyword[return] keyword[True]
keyword[if] identifier[isinstance] ( identifier[cls] . identifier[primary_key_name] , identifier[str] ) keyword[and] identifier[cls] . identifier[primary_key_name] == identifier[attr_name] :
keyword[return] keyword[True]
keyword[if] identifier[isinstance] ( identifier[cls] . identifier[primary_key_name] , identifier[tuple] ) keyword[and] identifier[attr_name] keyword[in] identifier[cls] . identifier[primary_key_name] :
keyword[return] keyword[True]
keyword[if] identifier[cls] . identifier[timestamps] keyword[is] keyword[not] keyword[None] keyword[and] identifier[attr_name] keyword[in] identifier[cls] . identifier[timestamps] :
keyword[return] keyword[True]
keyword[return] keyword[False] | def has_attr(cls, attr_name):
"""Check to see if an attribute is defined for the model."""
if attr_name in cls.attrs:
return True # depends on [control=['if'], data=[]]
if isinstance(cls.primary_key_name, str) and cls.primary_key_name == attr_name:
return True # depends on [control=['if'], data=[]]
if isinstance(cls.primary_key_name, tuple) and attr_name in cls.primary_key_name:
return True # depends on [control=['if'], data=[]]
if cls.timestamps is not None and attr_name in cls.timestamps:
return True # depends on [control=['if'], data=[]]
return False |
def _compute_soil_amplification(self, C, vs30, pga_bc, mean):
"""
Compute soil amplification, that is S term in equation (5), p. 2191,
and add to mean values for non hard rock sites.
"""
# convert from base e (as defined in BA2008) to base 10 (as used in
# AB2006)
sal = np.log10(np.exp(self._get_site_amplification_linear(vs30, C)))
sanl = np.log10(np.exp(
self._get_site_amplification_non_linear(vs30, pga_bc, C)))
idxs = vs30 < 2000.0
mean[idxs] = mean[idxs] + sal[idxs] + sanl[idxs] | def function[_compute_soil_amplification, parameter[self, C, vs30, pga_bc, mean]]:
constant[
Compute soil amplification, that is S term in equation (5), p. 2191,
and add to mean values for non hard rock sites.
]
variable[sal] assign[=] call[name[np].log10, parameter[call[name[np].exp, parameter[call[name[self]._get_site_amplification_linear, parameter[name[vs30], name[C]]]]]]]
variable[sanl] assign[=] call[name[np].log10, parameter[call[name[np].exp, parameter[call[name[self]._get_site_amplification_non_linear, parameter[name[vs30], name[pga_bc], name[C]]]]]]]
variable[idxs] assign[=] compare[name[vs30] less[<] constant[2000.0]]
call[name[mean]][name[idxs]] assign[=] binary_operation[binary_operation[call[name[mean]][name[idxs]] + call[name[sal]][name[idxs]]] + call[name[sanl]][name[idxs]]] | keyword[def] identifier[_compute_soil_amplification] ( identifier[self] , identifier[C] , identifier[vs30] , identifier[pga_bc] , identifier[mean] ):
literal[string]
identifier[sal] = identifier[np] . identifier[log10] ( identifier[np] . identifier[exp] ( identifier[self] . identifier[_get_site_amplification_linear] ( identifier[vs30] , identifier[C] )))
identifier[sanl] = identifier[np] . identifier[log10] ( identifier[np] . identifier[exp] (
identifier[self] . identifier[_get_site_amplification_non_linear] ( identifier[vs30] , identifier[pga_bc] , identifier[C] )))
identifier[idxs] = identifier[vs30] < literal[int]
identifier[mean] [ identifier[idxs] ]= identifier[mean] [ identifier[idxs] ]+ identifier[sal] [ identifier[idxs] ]+ identifier[sanl] [ identifier[idxs] ] | def _compute_soil_amplification(self, C, vs30, pga_bc, mean):
"""
Compute soil amplification, that is S term in equation (5), p. 2191,
and add to mean values for non hard rock sites.
"""
# convert from base e (as defined in BA2008) to base 10 (as used in
# AB2006)
sal = np.log10(np.exp(self._get_site_amplification_linear(vs30, C)))
sanl = np.log10(np.exp(self._get_site_amplification_non_linear(vs30, pga_bc, C)))
idxs = vs30 < 2000.0
mean[idxs] = mean[idxs] + sal[idxs] + sanl[idxs] |
def f0Morph(fromWavFN, pitchPath, stepList,
outputName, doPlotPitchSteps, fromPitchData, toPitchData,
outputMinPitch, outputMaxPitch, praatEXE, keepPitchRange=False,
keepAveragePitch=False, sourcePitchDataList=None,
minIntervalLength=0.3):
'''
Resynthesizes the pitch track from a source to a target wav file
fromPitchData and toPitchData should be segmented according to the
portions that you want to morph. The two lists must have the same
number of sublists.
Occurs over a three-step process.
This function can act as a template for how to use the function
morph_sequence.morphChunkedDataLists to morph pitch contours or
other data.
By default, everything is morphed, but it is possible to maintain elements
of the original speaker's pitch (average pitch and pitch range) by setting
the appropriate flag)
sourcePitchDataList: if passed in, any regions unspecified by
fromPitchData will be sampled from this list. In
essence, this allows one to leave segments of
the original pitch contour untouched by the
morph process.
'''
fromDuration = audio_scripts.getSoundFileDuration(fromWavFN)
# Find source pitch samples that will be mixed in with the target
# pitch samples later
nonMorphPitchData = []
if sourcePitchDataList is not None:
timeList = sorted(fromPitchData)
timeList = [(row[0][0], row[-1][0]) for row in timeList]
endTime = sourcePitchDataList[-1][0]
invertedTimeList = praatio_utils.invertIntervalList(timeList, endTime)
invertedTimeList = [(start, stop) for start, stop in invertedTimeList
if stop - start > minIntervalLength]
for start, stop in invertedTimeList:
pitchList = praatio_utils.getValuesInInterval(sourcePitchDataList,
start,
stop)
nonMorphPitchData.extend(pitchList)
# Iterative pitch tier data path
pitchTierPath = join(pitchPath, "pitchTiers")
resynthesizedPath = join(pitchPath, "f0_resynthesized_wavs")
for tmpPath in [pitchTierPath, resynthesizedPath]:
utils.makeDir(tmpPath)
# 1. Prepare the data for morphing - acquire the segments to merge
# (Done elsewhere, with the input fed into this function)
# 2. Morph the fromData to the toData
try:
finalOutputList = morph_sequence.morphChunkedDataLists(fromPitchData,
toPitchData,
stepList)
except IndexError:
raise MissingPitchDataException()
fromPitchData = [row for subList in fromPitchData for row in subList]
toPitchData = [row for subList in toPitchData for row in subList]
# 3. Save the pitch data and resynthesize the pitch
mergedDataList = []
for i in range(0, len(finalOutputList)):
outputDataList = finalOutputList[i]
if keepPitchRange is True:
outputDataList = morph_sequence.morphRange(outputDataList,
fromPitchData)
if keepAveragePitch is True:
outputDataList = morph_sequence.morphAveragePitch(outputDataList,
fromPitchData)
if sourcePitchDataList is not None:
outputDataList.extend(nonMorphPitchData)
outputDataList.sort()
stepOutputName = "%s_%0.3g" % (outputName, stepList[i])
pitchFNFullPath = join(pitchTierPath, "%s.PitchTier" % stepOutputName)
outputFN = join(resynthesizedPath, "%s.wav" % stepOutputName)
pointObj = dataio.PointObject2D(outputDataList, dataio.PITCH,
0, fromDuration)
pointObj.save(pitchFNFullPath)
outputTime, outputVals = zip(*outputDataList)
mergedDataList.append((outputTime, outputVals))
praat_scripts.resynthesizePitch(praatEXE, fromWavFN, pitchFNFullPath,
outputFN, outputMinPitch,
outputMaxPitch)
# 4. (Optional) Plot the generated contours
if doPlotPitchSteps:
fromTime, fromVals = zip(*fromPitchData)
toTime, toVals = zip(*toPitchData)
plot_morphed_data.plotF0((fromTime, fromVals),
(toTime, toVals),
mergedDataList,
join(pitchTierPath,
"%s.png" % outputName)) | def function[f0Morph, parameter[fromWavFN, pitchPath, stepList, outputName, doPlotPitchSteps, fromPitchData, toPitchData, outputMinPitch, outputMaxPitch, praatEXE, keepPitchRange, keepAveragePitch, sourcePitchDataList, minIntervalLength]]:
constant[
Resynthesizes the pitch track from a source to a target wav file
fromPitchData and toPitchData should be segmented according to the
portions that you want to morph. The two lists must have the same
number of sublists.
Occurs over a three-step process.
This function can act as a template for how to use the function
morph_sequence.morphChunkedDataLists to morph pitch contours or
other data.
By default, everything is morphed, but it is possible to maintain elements
of the original speaker's pitch (average pitch and pitch range) by setting
the appropriate flag)
sourcePitchDataList: if passed in, any regions unspecified by
fromPitchData will be sampled from this list. In
essence, this allows one to leave segments of
the original pitch contour untouched by the
morph process.
]
variable[fromDuration] assign[=] call[name[audio_scripts].getSoundFileDuration, parameter[name[fromWavFN]]]
variable[nonMorphPitchData] assign[=] list[[]]
if compare[name[sourcePitchDataList] is_not constant[None]] begin[:]
variable[timeList] assign[=] call[name[sorted], parameter[name[fromPitchData]]]
variable[timeList] assign[=] <ast.ListComp object at 0x7da1b11dbc10>
variable[endTime] assign[=] call[call[name[sourcePitchDataList]][<ast.UnaryOp object at 0x7da1b11db2b0>]][constant[0]]
variable[invertedTimeList] assign[=] call[name[praatio_utils].invertIntervalList, parameter[name[timeList], name[endTime]]]
variable[invertedTimeList] assign[=] <ast.ListComp object at 0x7da1b11d9c30>
for taget[tuple[[<ast.Name object at 0x7da1b11d8070>, <ast.Name object at 0x7da1b11daad0>]]] in starred[name[invertedTimeList]] begin[:]
variable[pitchList] assign[=] call[name[praatio_utils].getValuesInInterval, parameter[name[sourcePitchDataList], name[start], name[stop]]]
call[name[nonMorphPitchData].extend, parameter[name[pitchList]]]
variable[pitchTierPath] assign[=] call[name[join], parameter[name[pitchPath], constant[pitchTiers]]]
variable[resynthesizedPath] assign[=] call[name[join], parameter[name[pitchPath], constant[f0_resynthesized_wavs]]]
for taget[name[tmpPath]] in starred[list[[<ast.Name object at 0x7da1b11d8610>, <ast.Name object at 0x7da1b11dbeb0>]]] begin[:]
call[name[utils].makeDir, parameter[name[tmpPath]]]
<ast.Try object at 0x7da1b11d83a0>
variable[fromPitchData] assign[=] <ast.ListComp object at 0x7da1b1107f70>
variable[toPitchData] assign[=] <ast.ListComp object at 0x7da1b1106950>
variable[mergedDataList] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[finalOutputList]]]]]] begin[:]
variable[outputDataList] assign[=] call[name[finalOutputList]][name[i]]
if compare[name[keepPitchRange] is constant[True]] begin[:]
variable[outputDataList] assign[=] call[name[morph_sequence].morphRange, parameter[name[outputDataList], name[fromPitchData]]]
if compare[name[keepAveragePitch] is constant[True]] begin[:]
variable[outputDataList] assign[=] call[name[morph_sequence].morphAveragePitch, parameter[name[outputDataList], name[fromPitchData]]]
if compare[name[sourcePitchDataList] is_not constant[None]] begin[:]
call[name[outputDataList].extend, parameter[name[nonMorphPitchData]]]
call[name[outputDataList].sort, parameter[]]
variable[stepOutputName] assign[=] binary_operation[constant[%s_%0.3g] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1104d60>, <ast.Subscript object at 0x7da1b11044c0>]]]
variable[pitchFNFullPath] assign[=] call[name[join], parameter[name[pitchTierPath], binary_operation[constant[%s.PitchTier] <ast.Mod object at 0x7da2590d6920> name[stepOutputName]]]]
variable[outputFN] assign[=] call[name[join], parameter[name[resynthesizedPath], binary_operation[constant[%s.wav] <ast.Mod object at 0x7da2590d6920> name[stepOutputName]]]]
variable[pointObj] assign[=] call[name[dataio].PointObject2D, parameter[name[outputDataList], name[dataio].PITCH, constant[0], name[fromDuration]]]
call[name[pointObj].save, parameter[name[pitchFNFullPath]]]
<ast.Tuple object at 0x7da1b1016b00> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b1017580>]]
call[name[mergedDataList].append, parameter[tuple[[<ast.Name object at 0x7da1b1016e00>, <ast.Name object at 0x7da1b10175e0>]]]]
call[name[praat_scripts].resynthesizePitch, parameter[name[praatEXE], name[fromWavFN], name[pitchFNFullPath], name[outputFN], name[outputMinPitch], name[outputMaxPitch]]]
if name[doPlotPitchSteps] begin[:]
<ast.Tuple object at 0x7da1b1017610> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b10175b0>]]
<ast.Tuple object at 0x7da1b11ef640> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b11eeb90>]]
call[name[plot_morphed_data].plotF0, parameter[tuple[[<ast.Name object at 0x7da1b11edc90>, <ast.Name object at 0x7da1b11ef850>]], tuple[[<ast.Name object at 0x7da1b11ed240>, <ast.Name object at 0x7da1b11ecd00>]], name[mergedDataList], call[name[join], parameter[name[pitchTierPath], binary_operation[constant[%s.png] <ast.Mod object at 0x7da2590d6920> name[outputName]]]]]] | keyword[def] identifier[f0Morph] ( identifier[fromWavFN] , identifier[pitchPath] , identifier[stepList] ,
identifier[outputName] , identifier[doPlotPitchSteps] , identifier[fromPitchData] , identifier[toPitchData] ,
identifier[outputMinPitch] , identifier[outputMaxPitch] , identifier[praatEXE] , identifier[keepPitchRange] = keyword[False] ,
identifier[keepAveragePitch] = keyword[False] , identifier[sourcePitchDataList] = keyword[None] ,
identifier[minIntervalLength] = literal[int] ):
literal[string]
identifier[fromDuration] = identifier[audio_scripts] . identifier[getSoundFileDuration] ( identifier[fromWavFN] )
identifier[nonMorphPitchData] =[]
keyword[if] identifier[sourcePitchDataList] keyword[is] keyword[not] keyword[None] :
identifier[timeList] = identifier[sorted] ( identifier[fromPitchData] )
identifier[timeList] =[( identifier[row] [ literal[int] ][ literal[int] ], identifier[row] [- literal[int] ][ literal[int] ]) keyword[for] identifier[row] keyword[in] identifier[timeList] ]
identifier[endTime] = identifier[sourcePitchDataList] [- literal[int] ][ literal[int] ]
identifier[invertedTimeList] = identifier[praatio_utils] . identifier[invertIntervalList] ( identifier[timeList] , identifier[endTime] )
identifier[invertedTimeList] =[( identifier[start] , identifier[stop] ) keyword[for] identifier[start] , identifier[stop] keyword[in] identifier[invertedTimeList]
keyword[if] identifier[stop] - identifier[start] > identifier[minIntervalLength] ]
keyword[for] identifier[start] , identifier[stop] keyword[in] identifier[invertedTimeList] :
identifier[pitchList] = identifier[praatio_utils] . identifier[getValuesInInterval] ( identifier[sourcePitchDataList] ,
identifier[start] ,
identifier[stop] )
identifier[nonMorphPitchData] . identifier[extend] ( identifier[pitchList] )
identifier[pitchTierPath] = identifier[join] ( identifier[pitchPath] , literal[string] )
identifier[resynthesizedPath] = identifier[join] ( identifier[pitchPath] , literal[string] )
keyword[for] identifier[tmpPath] keyword[in] [ identifier[pitchTierPath] , identifier[resynthesizedPath] ]:
identifier[utils] . identifier[makeDir] ( identifier[tmpPath] )
keyword[try] :
identifier[finalOutputList] = identifier[morph_sequence] . identifier[morphChunkedDataLists] ( identifier[fromPitchData] ,
identifier[toPitchData] ,
identifier[stepList] )
keyword[except] identifier[IndexError] :
keyword[raise] identifier[MissingPitchDataException] ()
identifier[fromPitchData] =[ identifier[row] keyword[for] identifier[subList] keyword[in] identifier[fromPitchData] keyword[for] identifier[row] keyword[in] identifier[subList] ]
identifier[toPitchData] =[ identifier[row] keyword[for] identifier[subList] keyword[in] identifier[toPitchData] keyword[for] identifier[row] keyword[in] identifier[subList] ]
identifier[mergedDataList] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[finalOutputList] )):
identifier[outputDataList] = identifier[finalOutputList] [ identifier[i] ]
keyword[if] identifier[keepPitchRange] keyword[is] keyword[True] :
identifier[outputDataList] = identifier[morph_sequence] . identifier[morphRange] ( identifier[outputDataList] ,
identifier[fromPitchData] )
keyword[if] identifier[keepAveragePitch] keyword[is] keyword[True] :
identifier[outputDataList] = identifier[morph_sequence] . identifier[morphAveragePitch] ( identifier[outputDataList] ,
identifier[fromPitchData] )
keyword[if] identifier[sourcePitchDataList] keyword[is] keyword[not] keyword[None] :
identifier[outputDataList] . identifier[extend] ( identifier[nonMorphPitchData] )
identifier[outputDataList] . identifier[sort] ()
identifier[stepOutputName] = literal[string] %( identifier[outputName] , identifier[stepList] [ identifier[i] ])
identifier[pitchFNFullPath] = identifier[join] ( identifier[pitchTierPath] , literal[string] % identifier[stepOutputName] )
identifier[outputFN] = identifier[join] ( identifier[resynthesizedPath] , literal[string] % identifier[stepOutputName] )
identifier[pointObj] = identifier[dataio] . identifier[PointObject2D] ( identifier[outputDataList] , identifier[dataio] . identifier[PITCH] ,
literal[int] , identifier[fromDuration] )
identifier[pointObj] . identifier[save] ( identifier[pitchFNFullPath] )
identifier[outputTime] , identifier[outputVals] = identifier[zip] (* identifier[outputDataList] )
identifier[mergedDataList] . identifier[append] (( identifier[outputTime] , identifier[outputVals] ))
identifier[praat_scripts] . identifier[resynthesizePitch] ( identifier[praatEXE] , identifier[fromWavFN] , identifier[pitchFNFullPath] ,
identifier[outputFN] , identifier[outputMinPitch] ,
identifier[outputMaxPitch] )
keyword[if] identifier[doPlotPitchSteps] :
identifier[fromTime] , identifier[fromVals] = identifier[zip] (* identifier[fromPitchData] )
identifier[toTime] , identifier[toVals] = identifier[zip] (* identifier[toPitchData] )
identifier[plot_morphed_data] . identifier[plotF0] (( identifier[fromTime] , identifier[fromVals] ),
( identifier[toTime] , identifier[toVals] ),
identifier[mergedDataList] ,
identifier[join] ( identifier[pitchTierPath] ,
literal[string] % identifier[outputName] )) | def f0Morph(fromWavFN, pitchPath, stepList, outputName, doPlotPitchSteps, fromPitchData, toPitchData, outputMinPitch, outputMaxPitch, praatEXE, keepPitchRange=False, keepAveragePitch=False, sourcePitchDataList=None, minIntervalLength=0.3):
"""
Resynthesizes the pitch track from a source to a target wav file
fromPitchData and toPitchData should be segmented according to the
portions that you want to morph. The two lists must have the same
number of sublists.
Occurs over a three-step process.
This function can act as a template for how to use the function
morph_sequence.morphChunkedDataLists to morph pitch contours or
other data.
By default, everything is morphed, but it is possible to maintain elements
of the original speaker's pitch (average pitch and pitch range) by setting
the appropriate flag)
sourcePitchDataList: if passed in, any regions unspecified by
fromPitchData will be sampled from this list. In
essence, this allows one to leave segments of
the original pitch contour untouched by the
morph process.
"""
fromDuration = audio_scripts.getSoundFileDuration(fromWavFN)
# Find source pitch samples that will be mixed in with the target
# pitch samples later
nonMorphPitchData = []
if sourcePitchDataList is not None:
timeList = sorted(fromPitchData)
timeList = [(row[0][0], row[-1][0]) for row in timeList]
endTime = sourcePitchDataList[-1][0]
invertedTimeList = praatio_utils.invertIntervalList(timeList, endTime)
invertedTimeList = [(start, stop) for (start, stop) in invertedTimeList if stop - start > minIntervalLength]
for (start, stop) in invertedTimeList:
pitchList = praatio_utils.getValuesInInterval(sourcePitchDataList, start, stop)
nonMorphPitchData.extend(pitchList) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['sourcePitchDataList']]
# Iterative pitch tier data path
pitchTierPath = join(pitchPath, 'pitchTiers')
resynthesizedPath = join(pitchPath, 'f0_resynthesized_wavs')
for tmpPath in [pitchTierPath, resynthesizedPath]:
utils.makeDir(tmpPath) # depends on [control=['for'], data=['tmpPath']]
# 1. Prepare the data for morphing - acquire the segments to merge
# (Done elsewhere, with the input fed into this function)
# 2. Morph the fromData to the toData
try:
finalOutputList = morph_sequence.morphChunkedDataLists(fromPitchData, toPitchData, stepList) # depends on [control=['try'], data=[]]
except IndexError:
raise MissingPitchDataException() # depends on [control=['except'], data=[]]
fromPitchData = [row for subList in fromPitchData for row in subList]
toPitchData = [row for subList in toPitchData for row in subList]
# 3. Save the pitch data and resynthesize the pitch
mergedDataList = []
for i in range(0, len(finalOutputList)):
outputDataList = finalOutputList[i]
if keepPitchRange is True:
outputDataList = morph_sequence.morphRange(outputDataList, fromPitchData) # depends on [control=['if'], data=[]]
if keepAveragePitch is True:
outputDataList = morph_sequence.morphAveragePitch(outputDataList, fromPitchData) # depends on [control=['if'], data=[]]
if sourcePitchDataList is not None:
outputDataList.extend(nonMorphPitchData)
outputDataList.sort() # depends on [control=['if'], data=[]]
stepOutputName = '%s_%0.3g' % (outputName, stepList[i])
pitchFNFullPath = join(pitchTierPath, '%s.PitchTier' % stepOutputName)
outputFN = join(resynthesizedPath, '%s.wav' % stepOutputName)
pointObj = dataio.PointObject2D(outputDataList, dataio.PITCH, 0, fromDuration)
pointObj.save(pitchFNFullPath)
(outputTime, outputVals) = zip(*outputDataList)
mergedDataList.append((outputTime, outputVals))
praat_scripts.resynthesizePitch(praatEXE, fromWavFN, pitchFNFullPath, outputFN, outputMinPitch, outputMaxPitch) # depends on [control=['for'], data=['i']]
# 4. (Optional) Plot the generated contours
if doPlotPitchSteps:
(fromTime, fromVals) = zip(*fromPitchData)
(toTime, toVals) = zip(*toPitchData)
plot_morphed_data.plotF0((fromTime, fromVals), (toTime, toVals), mergedDataList, join(pitchTierPath, '%s.png' % outputName)) # depends on [control=['if'], data=[]] |
def delete(self, obj, force=False):
"""Deletes all of the fields at the specified locations.
args:
``obj=``\ *OBJECT*
the object to remove the fields from
``force=``\ *BOOL*
if True, missing attributes do not raise errors. Otherwise,
the first failure raises an exception without making any
changes to ``obj``.
"""
# TODO: this could be a whole lot more efficient!
if not force:
for fs in self:
try:
fs.get(obj)
except FieldSelectorException:
raise
for fs in self:
try:
fs.delete(obj)
except FieldSelectorException:
pass | def function[delete, parameter[self, obj, force]]:
constant[Deletes all of the fields at the specified locations.
args:
``obj=``\ *OBJECT*
the object to remove the fields from
``force=``\ *BOOL*
if True, missing attributes do not raise errors. Otherwise,
the first failure raises an exception without making any
changes to ``obj``.
]
if <ast.UnaryOp object at 0x7da20c6c4220> begin[:]
for taget[name[fs]] in starred[name[self]] begin[:]
<ast.Try object at 0x7da20c6c69e0>
for taget[name[fs]] in starred[name[self]] begin[:]
<ast.Try object at 0x7da20c6c55d0> | keyword[def] identifier[delete] ( identifier[self] , identifier[obj] , identifier[force] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[force] :
keyword[for] identifier[fs] keyword[in] identifier[self] :
keyword[try] :
identifier[fs] . identifier[get] ( identifier[obj] )
keyword[except] identifier[FieldSelectorException] :
keyword[raise]
keyword[for] identifier[fs] keyword[in] identifier[self] :
keyword[try] :
identifier[fs] . identifier[delete] ( identifier[obj] )
keyword[except] identifier[FieldSelectorException] :
keyword[pass] | def delete(self, obj, force=False):
"""Deletes all of the fields at the specified locations.
args:
``obj=``\\ *OBJECT*
the object to remove the fields from
``force=``\\ *BOOL*
if True, missing attributes do not raise errors. Otherwise,
the first failure raises an exception without making any
changes to ``obj``.
"""
# TODO: this could be a whole lot more efficient!
if not force:
for fs in self:
try:
fs.get(obj) # depends on [control=['try'], data=[]]
except FieldSelectorException:
raise # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['fs']] # depends on [control=['if'], data=[]]
for fs in self:
try:
fs.delete(obj) # depends on [control=['try'], data=[]]
except FieldSelectorException:
pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['fs']] |
def _save_or_delete_workflow(self):
"""
Calls the real save method if we pass the beggining of the wf
"""
if not self.current.task_type.startswith('Start'):
if self.current.task_name.startswith('End') and not self.are_we_in_subprocess():
self.wf_state['finished'] = True
self.wf_state['finish_date'] = datetime.now().strftime(
settings.DATETIME_DEFAULT_FORMAT)
if self.current.workflow_name not in settings.EPHEMERAL_WORKFLOWS and not \
self.wf_state['in_external']:
wfi = WFCache(self.current).get_instance()
TaskInvitation.objects.filter(instance=wfi, role=self.current.role,
wf_name=wfi.wf.name).delete()
self.current.log.info("Delete WFCache: %s %s" % (self.current.workflow_name,
self.current.token))
self.save_workflow_to_cache(self.serialize_workflow()) | def function[_save_or_delete_workflow, parameter[self]]:
constant[
Calls the real save method if we pass the beggining of the wf
]
if <ast.UnaryOp object at 0x7da20c6aa1a0> begin[:]
if <ast.BoolOp object at 0x7da20c6a9480> begin[:]
call[name[self].wf_state][constant[finished]] assign[=] constant[True]
call[name[self].wf_state][constant[finish_date]] assign[=] call[call[name[datetime].now, parameter[]].strftime, parameter[name[settings].DATETIME_DEFAULT_FORMAT]]
if <ast.BoolOp object at 0x7da20c6aa410> begin[:]
variable[wfi] assign[=] call[call[name[WFCache], parameter[name[self].current]].get_instance, parameter[]]
call[call[name[TaskInvitation].objects.filter, parameter[]].delete, parameter[]]
call[name[self].current.log.info, parameter[binary_operation[constant[Delete WFCache: %s %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6aa620>, <ast.Attribute object at 0x7da2041dabf0>]]]]]
call[name[self].save_workflow_to_cache, parameter[call[name[self].serialize_workflow, parameter[]]]] | keyword[def] identifier[_save_or_delete_workflow] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[current] . identifier[task_type] . identifier[startswith] ( literal[string] ):
keyword[if] identifier[self] . identifier[current] . identifier[task_name] . identifier[startswith] ( literal[string] ) keyword[and] keyword[not] identifier[self] . identifier[are_we_in_subprocess] ():
identifier[self] . identifier[wf_state] [ literal[string] ]= keyword[True]
identifier[self] . identifier[wf_state] [ literal[string] ]= identifier[datetime] . identifier[now] (). identifier[strftime] (
identifier[settings] . identifier[DATETIME_DEFAULT_FORMAT] )
keyword[if] identifier[self] . identifier[current] . identifier[workflow_name] keyword[not] keyword[in] identifier[settings] . identifier[EPHEMERAL_WORKFLOWS] keyword[and] keyword[not] identifier[self] . identifier[wf_state] [ literal[string] ]:
identifier[wfi] = identifier[WFCache] ( identifier[self] . identifier[current] ). identifier[get_instance] ()
identifier[TaskInvitation] . identifier[objects] . identifier[filter] ( identifier[instance] = identifier[wfi] , identifier[role] = identifier[self] . identifier[current] . identifier[role] ,
identifier[wf_name] = identifier[wfi] . identifier[wf] . identifier[name] ). identifier[delete] ()
identifier[self] . identifier[current] . identifier[log] . identifier[info] ( literal[string] %( identifier[self] . identifier[current] . identifier[workflow_name] ,
identifier[self] . identifier[current] . identifier[token] ))
identifier[self] . identifier[save_workflow_to_cache] ( identifier[self] . identifier[serialize_workflow] ()) | def _save_or_delete_workflow(self):
"""
Calls the real save method if we pass the beggining of the wf
"""
if not self.current.task_type.startswith('Start'):
if self.current.task_name.startswith('End') and (not self.are_we_in_subprocess()):
self.wf_state['finished'] = True
self.wf_state['finish_date'] = datetime.now().strftime(settings.DATETIME_DEFAULT_FORMAT)
if self.current.workflow_name not in settings.EPHEMERAL_WORKFLOWS and (not self.wf_state['in_external']):
wfi = WFCache(self.current).get_instance()
TaskInvitation.objects.filter(instance=wfi, role=self.current.role, wf_name=wfi.wf.name).delete() # depends on [control=['if'], data=[]]
self.current.log.info('Delete WFCache: %s %s' % (self.current.workflow_name, self.current.token)) # depends on [control=['if'], data=[]]
self.save_workflow_to_cache(self.serialize_workflow()) # depends on [control=['if'], data=[]] |
def insert(queue, items, backend='sqlite'):
'''
Add an item or items to a queue
CLI Example:
.. code-block:: bash
salt-run queue.insert myqueue myitem
salt-run queue.insert myqueue "['item1', 'item2', 'item3']"
salt-run queue.insert myqueue myitem backend=sqlite
salt-run queue.insert myqueue "['item1', 'item2', 'item3']" backend=sqlite
'''
queue_funcs = salt.loader.queues(__opts__)
cmd = '{0}.insert'.format(backend)
if cmd not in queue_funcs:
raise SaltInvocationError('Function "{0}" is not available'.format(cmd))
ret = queue_funcs[cmd](items=items, queue=queue)
return ret | def function[insert, parameter[queue, items, backend]]:
constant[
Add an item or items to a queue
CLI Example:
.. code-block:: bash
salt-run queue.insert myqueue myitem
salt-run queue.insert myqueue "['item1', 'item2', 'item3']"
salt-run queue.insert myqueue myitem backend=sqlite
salt-run queue.insert myqueue "['item1', 'item2', 'item3']" backend=sqlite
]
variable[queue_funcs] assign[=] call[name[salt].loader.queues, parameter[name[__opts__]]]
variable[cmd] assign[=] call[constant[{0}.insert].format, parameter[name[backend]]]
if compare[name[cmd] <ast.NotIn object at 0x7da2590d7190> name[queue_funcs]] begin[:]
<ast.Raise object at 0x7da207f01090>
variable[ret] assign[=] call[call[name[queue_funcs]][name[cmd]], parameter[]]
return[name[ret]] | keyword[def] identifier[insert] ( identifier[queue] , identifier[items] , identifier[backend] = literal[string] ):
literal[string]
identifier[queue_funcs] = identifier[salt] . identifier[loader] . identifier[queues] ( identifier[__opts__] )
identifier[cmd] = literal[string] . identifier[format] ( identifier[backend] )
keyword[if] identifier[cmd] keyword[not] keyword[in] identifier[queue_funcs] :
keyword[raise] identifier[SaltInvocationError] ( literal[string] . identifier[format] ( identifier[cmd] ))
identifier[ret] = identifier[queue_funcs] [ identifier[cmd] ]( identifier[items] = identifier[items] , identifier[queue] = identifier[queue] )
keyword[return] identifier[ret] | def insert(queue, items, backend='sqlite'):
"""
Add an item or items to a queue
CLI Example:
.. code-block:: bash
salt-run queue.insert myqueue myitem
salt-run queue.insert myqueue "['item1', 'item2', 'item3']"
salt-run queue.insert myqueue myitem backend=sqlite
salt-run queue.insert myqueue "['item1', 'item2', 'item3']" backend=sqlite
"""
queue_funcs = salt.loader.queues(__opts__)
cmd = '{0}.insert'.format(backend)
if cmd not in queue_funcs:
raise SaltInvocationError('Function "{0}" is not available'.format(cmd)) # depends on [control=['if'], data=['cmd']]
ret = queue_funcs[cmd](items=items, queue=queue)
return ret |
def write_wave(path, audio, sample_rate):
"""Writes a .wav file.
Takes path, PCM audio data, and sample rate.
"""
with contextlib.closing(wave.open(path, 'wb')) as wf:
wf.setnchannels(1)
wf.setsampwidth(2)
wf.setframerate(sample_rate)
wf.writeframes(audio) | def function[write_wave, parameter[path, audio, sample_rate]]:
constant[Writes a .wav file.
Takes path, PCM audio data, and sample rate.
]
with call[name[contextlib].closing, parameter[call[name[wave].open, parameter[name[path], constant[wb]]]]] begin[:]
call[name[wf].setnchannels, parameter[constant[1]]]
call[name[wf].setsampwidth, parameter[constant[2]]]
call[name[wf].setframerate, parameter[name[sample_rate]]]
call[name[wf].writeframes, parameter[name[audio]]] | keyword[def] identifier[write_wave] ( identifier[path] , identifier[audio] , identifier[sample_rate] ):
literal[string]
keyword[with] identifier[contextlib] . identifier[closing] ( identifier[wave] . identifier[open] ( identifier[path] , literal[string] )) keyword[as] identifier[wf] :
identifier[wf] . identifier[setnchannels] ( literal[int] )
identifier[wf] . identifier[setsampwidth] ( literal[int] )
identifier[wf] . identifier[setframerate] ( identifier[sample_rate] )
identifier[wf] . identifier[writeframes] ( identifier[audio] ) | def write_wave(path, audio, sample_rate):
"""Writes a .wav file.
Takes path, PCM audio data, and sample rate.
"""
with contextlib.closing(wave.open(path, 'wb')) as wf:
wf.setnchannels(1)
wf.setsampwidth(2)
wf.setframerate(sample_rate)
wf.writeframes(audio) # depends on [control=['with'], data=['wf']] |
def predict(self, document_path: str, model_name: str, consent_id: str = None) -> Prediction:
"""Run inference and create prediction on document.
This method takes care of creating and uploading a document specified by document_path.
as well as running inference using model specified by model_name to create prediction on the document.
>>> from las import ApiClient
>>> api_client = ApiClient(endpoint='<api endpoint>')
>>> api_client.predict(document_path='document.jpeg', model_name='invoice')
:param document_path: Path to document to run inference on
:type document_path: str
:param model_name: The name of the model to use for inference
:type model_name: str
:param consent_id: An identifier to mark the owner of the document handle
:type consent_id: str
:return: Prediction on document
:rtype: Prediction
:raises InvalidCredentialsException: If the credentials are invalid
:raises TooManyRequestsException: If limit of requests per second is reached
:raises LimitExceededException: If limit of total requests per month is reached
:raises requests.exception.RequestException: If error was raised by requests
"""
content_type = self._get_content_type(document_path)
consent_id = consent_id or str(uuid4())
document_id = self._upload_document(document_path, content_type, consent_id)
prediction_response = self.post_predictions(document_id, model_name)
return Prediction(document_id, consent_id, model_name, prediction_response) | def function[predict, parameter[self, document_path, model_name, consent_id]]:
constant[Run inference and create prediction on document.
This method takes care of creating and uploading a document specified by document_path.
as well as running inference using model specified by model_name to create prediction on the document.
>>> from las import ApiClient
>>> api_client = ApiClient(endpoint='<api endpoint>')
>>> api_client.predict(document_path='document.jpeg', model_name='invoice')
:param document_path: Path to document to run inference on
:type document_path: str
:param model_name: The name of the model to use for inference
:type model_name: str
:param consent_id: An identifier to mark the owner of the document handle
:type consent_id: str
:return: Prediction on document
:rtype: Prediction
:raises InvalidCredentialsException: If the credentials are invalid
:raises TooManyRequestsException: If limit of requests per second is reached
:raises LimitExceededException: If limit of total requests per month is reached
:raises requests.exception.RequestException: If error was raised by requests
]
variable[content_type] assign[=] call[name[self]._get_content_type, parameter[name[document_path]]]
variable[consent_id] assign[=] <ast.BoolOp object at 0x7da18eb550f0>
variable[document_id] assign[=] call[name[self]._upload_document, parameter[name[document_path], name[content_type], name[consent_id]]]
variable[prediction_response] assign[=] call[name[self].post_predictions, parameter[name[document_id], name[model_name]]]
return[call[name[Prediction], parameter[name[document_id], name[consent_id], name[model_name], name[prediction_response]]]] | keyword[def] identifier[predict] ( identifier[self] , identifier[document_path] : identifier[str] , identifier[model_name] : identifier[str] , identifier[consent_id] : identifier[str] = keyword[None] )-> identifier[Prediction] :
literal[string]
identifier[content_type] = identifier[self] . identifier[_get_content_type] ( identifier[document_path] )
identifier[consent_id] = identifier[consent_id] keyword[or] identifier[str] ( identifier[uuid4] ())
identifier[document_id] = identifier[self] . identifier[_upload_document] ( identifier[document_path] , identifier[content_type] , identifier[consent_id] )
identifier[prediction_response] = identifier[self] . identifier[post_predictions] ( identifier[document_id] , identifier[model_name] )
keyword[return] identifier[Prediction] ( identifier[document_id] , identifier[consent_id] , identifier[model_name] , identifier[prediction_response] ) | def predict(self, document_path: str, model_name: str, consent_id: str=None) -> Prediction:
"""Run inference and create prediction on document.
This method takes care of creating and uploading a document specified by document_path.
as well as running inference using model specified by model_name to create prediction on the document.
>>> from las import ApiClient
>>> api_client = ApiClient(endpoint='<api endpoint>')
>>> api_client.predict(document_path='document.jpeg', model_name='invoice')
:param document_path: Path to document to run inference on
:type document_path: str
:param model_name: The name of the model to use for inference
:type model_name: str
:param consent_id: An identifier to mark the owner of the document handle
:type consent_id: str
:return: Prediction on document
:rtype: Prediction
:raises InvalidCredentialsException: If the credentials are invalid
:raises TooManyRequestsException: If limit of requests per second is reached
:raises LimitExceededException: If limit of total requests per month is reached
:raises requests.exception.RequestException: If error was raised by requests
"""
content_type = self._get_content_type(document_path)
consent_id = consent_id or str(uuid4())
document_id = self._upload_document(document_path, content_type, consent_id)
prediction_response = self.post_predictions(document_id, model_name)
return Prediction(document_id, consent_id, model_name, prediction_response) |
def compile_change(self, blueprint, command, connection):
"""
Compile a change column command into a series of SQL statement.
:param blueprint: The blueprint
:type blueprint: Blueprint
:param command: The command
:type command: Fluent
:param connection: The connection
:type connection: orator.connections.Connection
:rtype: list
"""
schema = connection.get_schema_manager()
table_diff = self._get_changed_diff(blueprint, schema)
if table_diff:
sql = schema.get_database_platform().get_alter_table_sql(table_diff)
if isinstance(sql, list):
return sql
return [sql]
return [] | def function[compile_change, parameter[self, blueprint, command, connection]]:
constant[
Compile a change column command into a series of SQL statement.
:param blueprint: The blueprint
:type blueprint: Blueprint
:param command: The command
:type command: Fluent
:param connection: The connection
:type connection: orator.connections.Connection
:rtype: list
]
variable[schema] assign[=] call[name[connection].get_schema_manager, parameter[]]
variable[table_diff] assign[=] call[name[self]._get_changed_diff, parameter[name[blueprint], name[schema]]]
if name[table_diff] begin[:]
variable[sql] assign[=] call[call[name[schema].get_database_platform, parameter[]].get_alter_table_sql, parameter[name[table_diff]]]
if call[name[isinstance], parameter[name[sql], name[list]]] begin[:]
return[name[sql]]
return[list[[<ast.Name object at 0x7da18f58e800>]]]
return[list[[]]] | keyword[def] identifier[compile_change] ( identifier[self] , identifier[blueprint] , identifier[command] , identifier[connection] ):
literal[string]
identifier[schema] = identifier[connection] . identifier[get_schema_manager] ()
identifier[table_diff] = identifier[self] . identifier[_get_changed_diff] ( identifier[blueprint] , identifier[schema] )
keyword[if] identifier[table_diff] :
identifier[sql] = identifier[schema] . identifier[get_database_platform] (). identifier[get_alter_table_sql] ( identifier[table_diff] )
keyword[if] identifier[isinstance] ( identifier[sql] , identifier[list] ):
keyword[return] identifier[sql]
keyword[return] [ identifier[sql] ]
keyword[return] [] | def compile_change(self, blueprint, command, connection):
"""
Compile a change column command into a series of SQL statement.
:param blueprint: The blueprint
:type blueprint: Blueprint
:param command: The command
:type command: Fluent
:param connection: The connection
:type connection: orator.connections.Connection
:rtype: list
"""
schema = connection.get_schema_manager()
table_diff = self._get_changed_diff(blueprint, schema)
if table_diff:
sql = schema.get_database_platform().get_alter_table_sql(table_diff)
if isinstance(sql, list):
return sql # depends on [control=['if'], data=[]]
return [sql] # depends on [control=['if'], data=[]]
return [] |
def init_key_jar(public_path='', private_path='', key_defs='', owner='',
read_only=True):
"""
A number of cases here:
1. A private path is given
a. The file exists and a JWKS is found there.
From that JWKS a KeyJar instance is built.
b.
If the private path file doesn't exit the key definitions are
used to build a KeyJar instance. A JWKS with the private keys are
written to the file named in private_path.
If a public path is also provided a JWKS with public keys are written
to that file.
2. A public path is given but no private path.
a. If the public path file exists then the JWKS in that file is used to
construct a KeyJar.
b. If no such file exists then a KeyJar will be built
based on the key_defs specification and a JWKS with the public keys
will be written to the public path file.
3. If neither a public path nor a private path is given then a KeyJar is
built based on the key_defs specification and no JWKS will be written
to file.
In all cases a KeyJar instance is returned
The keys stored in the KeyJar will be stored under the '' identifier.
:param public_path: A file path to a file that contains a JWKS with public
keys
:param private_path: A file path to a file that contains a JWKS with
private keys.
:param key_defs: A definition of what keys should be created if they are
not already available
:param owner: The owner of the keys
:param read_only: This function should not attempt to write anything
to a file system.
:return: An instantiated :py:class;`oidcmsg.key_jar.KeyJar` instance
"""
if private_path:
if os.path.isfile(private_path):
_jwks = open(private_path, 'r').read()
_kj = KeyJar()
_kj.import_jwks(json.loads(_jwks), owner)
if key_defs:
_kb = _kj.issuer_keys[owner][0]
_diff = key_diff(_kb, key_defs)
if _diff:
if read_only:
logger.error('Not allowed to write to disc!')
else:
update_key_bundle(_kb, _diff)
_kj.issuer_keys[owner] = [_kb]
jwks = _kj.export_jwks(private=True, issuer=owner)
fp = open(private_path, 'w')
fp.write(json.dumps(jwks))
fp.close()
else:
_kj = build_keyjar(key_defs, owner=owner)
if not read_only:
jwks = _kj.export_jwks(private=True, issuer=owner)
head, tail = os.path.split(private_path)
if head and not os.path.isdir(head):
os.makedirs(head)
fp = open(private_path, 'w')
fp.write(json.dumps(jwks))
fp.close()
if public_path and not read_only:
jwks = _kj.export_jwks(issuer=owner) # public part
head, tail = os.path.split(public_path)
if head and not os.path.isdir(head):
os.makedirs(head)
fp = open(public_path, 'w')
fp.write(json.dumps(jwks))
fp.close()
elif public_path:
if os.path.isfile(public_path):
_jwks = open(public_path, 'r').read()
_kj = KeyJar()
_kj.import_jwks(json.loads(_jwks), owner)
if key_defs:
_kb = _kj.issuer_keys[owner][0]
_diff = key_diff(_kb, key_defs)
if _diff:
if read_only:
logger.error('Not allowed to write to disc!')
else:
update_key_bundle(_kb, _diff)
_kj.issuer_keys[owner] = [_kb]
jwks = _kj.export_jwks(issuer=owner)
fp = open(private_path, 'w')
fp.write(json.dumps(jwks))
fp.close()
else:
_kj = build_keyjar(key_defs, owner=owner)
if not read_only:
_jwks = _kj.export_jwks(issuer=owner)
head, tail = os.path.split(public_path)
if head and not os.path.isdir(head):
os.makedirs(head)
fp = open(public_path, 'w')
fp.write(json.dumps(_jwks))
fp.close()
else:
_kj = build_keyjar(key_defs, owner=owner)
return _kj | def function[init_key_jar, parameter[public_path, private_path, key_defs, owner, read_only]]:
constant[
A number of cases here:
1. A private path is given
a. The file exists and a JWKS is found there.
From that JWKS a KeyJar instance is built.
b.
If the private path file doesn't exit the key definitions are
used to build a KeyJar instance. A JWKS with the private keys are
written to the file named in private_path.
If a public path is also provided a JWKS with public keys are written
to that file.
2. A public path is given but no private path.
a. If the public path file exists then the JWKS in that file is used to
construct a KeyJar.
b. If no such file exists then a KeyJar will be built
based on the key_defs specification and a JWKS with the public keys
will be written to the public path file.
3. If neither a public path nor a private path is given then a KeyJar is
built based on the key_defs specification and no JWKS will be written
to file.
In all cases a KeyJar instance is returned
The keys stored in the KeyJar will be stored under the '' identifier.
:param public_path: A file path to a file that contains a JWKS with public
keys
:param private_path: A file path to a file that contains a JWKS with
private keys.
:param key_defs: A definition of what keys should be created if they are
not already available
:param owner: The owner of the keys
:param read_only: This function should not attempt to write anything
to a file system.
:return: An instantiated :py:class;`oidcmsg.key_jar.KeyJar` instance
]
if name[private_path] begin[:]
if call[name[os].path.isfile, parameter[name[private_path]]] begin[:]
variable[_jwks] assign[=] call[call[name[open], parameter[name[private_path], constant[r]]].read, parameter[]]
variable[_kj] assign[=] call[name[KeyJar], parameter[]]
call[name[_kj].import_jwks, parameter[call[name[json].loads, parameter[name[_jwks]]], name[owner]]]
if name[key_defs] begin[:]
variable[_kb] assign[=] call[call[name[_kj].issuer_keys][name[owner]]][constant[0]]
variable[_diff] assign[=] call[name[key_diff], parameter[name[_kb], name[key_defs]]]
if name[_diff] begin[:]
if name[read_only] begin[:]
call[name[logger].error, parameter[constant[Not allowed to write to disc!]]]
if <ast.BoolOp object at 0x7da1b05c4400> begin[:]
variable[jwks] assign[=] call[name[_kj].export_jwks, parameter[]]
<ast.Tuple object at 0x7da1b0534b80> assign[=] call[name[os].path.split, parameter[name[public_path]]]
if <ast.BoolOp object at 0x7da1b0537760> begin[:]
call[name[os].makedirs, parameter[name[head]]]
variable[fp] assign[=] call[name[open], parameter[name[public_path], constant[w]]]
call[name[fp].write, parameter[call[name[json].dumps, parameter[name[jwks]]]]]
call[name[fp].close, parameter[]]
return[name[_kj]] | keyword[def] identifier[init_key_jar] ( identifier[public_path] = literal[string] , identifier[private_path] = literal[string] , identifier[key_defs] = literal[string] , identifier[owner] = literal[string] ,
identifier[read_only] = keyword[True] ):
literal[string]
keyword[if] identifier[private_path] :
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[private_path] ):
identifier[_jwks] = identifier[open] ( identifier[private_path] , literal[string] ). identifier[read] ()
identifier[_kj] = identifier[KeyJar] ()
identifier[_kj] . identifier[import_jwks] ( identifier[json] . identifier[loads] ( identifier[_jwks] ), identifier[owner] )
keyword[if] identifier[key_defs] :
identifier[_kb] = identifier[_kj] . identifier[issuer_keys] [ identifier[owner] ][ literal[int] ]
identifier[_diff] = identifier[key_diff] ( identifier[_kb] , identifier[key_defs] )
keyword[if] identifier[_diff] :
keyword[if] identifier[read_only] :
identifier[logger] . identifier[error] ( literal[string] )
keyword[else] :
identifier[update_key_bundle] ( identifier[_kb] , identifier[_diff] )
identifier[_kj] . identifier[issuer_keys] [ identifier[owner] ]=[ identifier[_kb] ]
identifier[jwks] = identifier[_kj] . identifier[export_jwks] ( identifier[private] = keyword[True] , identifier[issuer] = identifier[owner] )
identifier[fp] = identifier[open] ( identifier[private_path] , literal[string] )
identifier[fp] . identifier[write] ( identifier[json] . identifier[dumps] ( identifier[jwks] ))
identifier[fp] . identifier[close] ()
keyword[else] :
identifier[_kj] = identifier[build_keyjar] ( identifier[key_defs] , identifier[owner] = identifier[owner] )
keyword[if] keyword[not] identifier[read_only] :
identifier[jwks] = identifier[_kj] . identifier[export_jwks] ( identifier[private] = keyword[True] , identifier[issuer] = identifier[owner] )
identifier[head] , identifier[tail] = identifier[os] . identifier[path] . identifier[split] ( identifier[private_path] )
keyword[if] identifier[head] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[head] ):
identifier[os] . identifier[makedirs] ( identifier[head] )
identifier[fp] = identifier[open] ( identifier[private_path] , literal[string] )
identifier[fp] . identifier[write] ( identifier[json] . identifier[dumps] ( identifier[jwks] ))
identifier[fp] . identifier[close] ()
keyword[if] identifier[public_path] keyword[and] keyword[not] identifier[read_only] :
identifier[jwks] = identifier[_kj] . identifier[export_jwks] ( identifier[issuer] = identifier[owner] )
identifier[head] , identifier[tail] = identifier[os] . identifier[path] . identifier[split] ( identifier[public_path] )
keyword[if] identifier[head] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[head] ):
identifier[os] . identifier[makedirs] ( identifier[head] )
identifier[fp] = identifier[open] ( identifier[public_path] , literal[string] )
identifier[fp] . identifier[write] ( identifier[json] . identifier[dumps] ( identifier[jwks] ))
identifier[fp] . identifier[close] ()
keyword[elif] identifier[public_path] :
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[public_path] ):
identifier[_jwks] = identifier[open] ( identifier[public_path] , literal[string] ). identifier[read] ()
identifier[_kj] = identifier[KeyJar] ()
identifier[_kj] . identifier[import_jwks] ( identifier[json] . identifier[loads] ( identifier[_jwks] ), identifier[owner] )
keyword[if] identifier[key_defs] :
identifier[_kb] = identifier[_kj] . identifier[issuer_keys] [ identifier[owner] ][ literal[int] ]
identifier[_diff] = identifier[key_diff] ( identifier[_kb] , identifier[key_defs] )
keyword[if] identifier[_diff] :
keyword[if] identifier[read_only] :
identifier[logger] . identifier[error] ( literal[string] )
keyword[else] :
identifier[update_key_bundle] ( identifier[_kb] , identifier[_diff] )
identifier[_kj] . identifier[issuer_keys] [ identifier[owner] ]=[ identifier[_kb] ]
identifier[jwks] = identifier[_kj] . identifier[export_jwks] ( identifier[issuer] = identifier[owner] )
identifier[fp] = identifier[open] ( identifier[private_path] , literal[string] )
identifier[fp] . identifier[write] ( identifier[json] . identifier[dumps] ( identifier[jwks] ))
identifier[fp] . identifier[close] ()
keyword[else] :
identifier[_kj] = identifier[build_keyjar] ( identifier[key_defs] , identifier[owner] = identifier[owner] )
keyword[if] keyword[not] identifier[read_only] :
identifier[_jwks] = identifier[_kj] . identifier[export_jwks] ( identifier[issuer] = identifier[owner] )
identifier[head] , identifier[tail] = identifier[os] . identifier[path] . identifier[split] ( identifier[public_path] )
keyword[if] identifier[head] keyword[and] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[head] ):
identifier[os] . identifier[makedirs] ( identifier[head] )
identifier[fp] = identifier[open] ( identifier[public_path] , literal[string] )
identifier[fp] . identifier[write] ( identifier[json] . identifier[dumps] ( identifier[_jwks] ))
identifier[fp] . identifier[close] ()
keyword[else] :
identifier[_kj] = identifier[build_keyjar] ( identifier[key_defs] , identifier[owner] = identifier[owner] )
keyword[return] identifier[_kj] | def init_key_jar(public_path='', private_path='', key_defs='', owner='', read_only=True):
"""
A number of cases here:
1. A private path is given
a. The file exists and a JWKS is found there.
From that JWKS a KeyJar instance is built.
b.
If the private path file doesn't exit the key definitions are
used to build a KeyJar instance. A JWKS with the private keys are
written to the file named in private_path.
If a public path is also provided a JWKS with public keys are written
to that file.
2. A public path is given but no private path.
a. If the public path file exists then the JWKS in that file is used to
construct a KeyJar.
b. If no such file exists then a KeyJar will be built
based on the key_defs specification and a JWKS with the public keys
will be written to the public path file.
3. If neither a public path nor a private path is given then a KeyJar is
built based on the key_defs specification and no JWKS will be written
to file.
In all cases a KeyJar instance is returned
The keys stored in the KeyJar will be stored under the '' identifier.
:param public_path: A file path to a file that contains a JWKS with public
keys
:param private_path: A file path to a file that contains a JWKS with
private keys.
:param key_defs: A definition of what keys should be created if they are
not already available
:param owner: The owner of the keys
:param read_only: This function should not attempt to write anything
to a file system.
:return: An instantiated :py:class;`oidcmsg.key_jar.KeyJar` instance
"""
if private_path:
if os.path.isfile(private_path):
_jwks = open(private_path, 'r').read()
_kj = KeyJar()
_kj.import_jwks(json.loads(_jwks), owner)
if key_defs:
_kb = _kj.issuer_keys[owner][0]
_diff = key_diff(_kb, key_defs)
if _diff:
if read_only:
logger.error('Not allowed to write to disc!') # depends on [control=['if'], data=[]]
else:
update_key_bundle(_kb, _diff)
_kj.issuer_keys[owner] = [_kb]
jwks = _kj.export_jwks(private=True, issuer=owner)
fp = open(private_path, 'w')
fp.write(json.dumps(jwks))
fp.close() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
_kj = build_keyjar(key_defs, owner=owner)
if not read_only:
jwks = _kj.export_jwks(private=True, issuer=owner)
(head, tail) = os.path.split(private_path)
if head and (not os.path.isdir(head)):
os.makedirs(head) # depends on [control=['if'], data=[]]
fp = open(private_path, 'w')
fp.write(json.dumps(jwks))
fp.close() # depends on [control=['if'], data=[]]
if public_path and (not read_only):
jwks = _kj.export_jwks(issuer=owner) # public part
(head, tail) = os.path.split(public_path)
if head and (not os.path.isdir(head)):
os.makedirs(head) # depends on [control=['if'], data=[]]
fp = open(public_path, 'w')
fp.write(json.dumps(jwks))
fp.close() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif public_path:
if os.path.isfile(public_path):
_jwks = open(public_path, 'r').read()
_kj = KeyJar()
_kj.import_jwks(json.loads(_jwks), owner)
if key_defs:
_kb = _kj.issuer_keys[owner][0]
_diff = key_diff(_kb, key_defs)
if _diff:
if read_only:
logger.error('Not allowed to write to disc!') # depends on [control=['if'], data=[]]
else:
update_key_bundle(_kb, _diff)
_kj.issuer_keys[owner] = [_kb]
jwks = _kj.export_jwks(issuer=owner)
fp = open(private_path, 'w')
fp.write(json.dumps(jwks))
fp.close() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
_kj = build_keyjar(key_defs, owner=owner)
if not read_only:
_jwks = _kj.export_jwks(issuer=owner)
(head, tail) = os.path.split(public_path)
if head and (not os.path.isdir(head)):
os.makedirs(head) # depends on [control=['if'], data=[]]
fp = open(public_path, 'w')
fp.write(json.dumps(_jwks))
fp.close() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
_kj = build_keyjar(key_defs, owner=owner)
return _kj |
def intersection(l1, l2):
'''Returns intersection of two lists. Assumes the lists are sorted by start positions'''
if len(l1) == 0 or len(l2) == 0:
return []
out = []
l2_pos = 0
for l in l1:
while l2_pos < len(l2) and l2[l2_pos].end < l.start:
l2_pos += 1
if l2_pos == len(l2):
break
while l2_pos < len(l2) and l.intersects(l2[l2_pos]):
out.append(l.intersection(l2[l2_pos]))
l2_pos += 1
l2_pos = max(0, l2_pos - 1)
return out | def function[intersection, parameter[l1, l2]]:
constant[Returns intersection of two lists. Assumes the lists are sorted by start positions]
if <ast.BoolOp object at 0x7da1aff748e0> begin[:]
return[list[[]]]
variable[out] assign[=] list[[]]
variable[l2_pos] assign[=] constant[0]
for taget[name[l]] in starred[name[l1]] begin[:]
while <ast.BoolOp object at 0x7da1aff749d0> begin[:]
<ast.AugAssign object at 0x7da1aff74d60>
if compare[name[l2_pos] equal[==] call[name[len], parameter[name[l2]]]] begin[:]
break
while <ast.BoolOp object at 0x7da1aff76e00> begin[:]
call[name[out].append, parameter[call[name[l].intersection, parameter[call[name[l2]][name[l2_pos]]]]]]
<ast.AugAssign object at 0x7da1aff74f10>
variable[l2_pos] assign[=] call[name[max], parameter[constant[0], binary_operation[name[l2_pos] - constant[1]]]]
return[name[out]] | keyword[def] identifier[intersection] ( identifier[l1] , identifier[l2] ):
literal[string]
keyword[if] identifier[len] ( identifier[l1] )== literal[int] keyword[or] identifier[len] ( identifier[l2] )== literal[int] :
keyword[return] []
identifier[out] =[]
identifier[l2_pos] = literal[int]
keyword[for] identifier[l] keyword[in] identifier[l1] :
keyword[while] identifier[l2_pos] < identifier[len] ( identifier[l2] ) keyword[and] identifier[l2] [ identifier[l2_pos] ]. identifier[end] < identifier[l] . identifier[start] :
identifier[l2_pos] += literal[int]
keyword[if] identifier[l2_pos] == identifier[len] ( identifier[l2] ):
keyword[break]
keyword[while] identifier[l2_pos] < identifier[len] ( identifier[l2] ) keyword[and] identifier[l] . identifier[intersects] ( identifier[l2] [ identifier[l2_pos] ]):
identifier[out] . identifier[append] ( identifier[l] . identifier[intersection] ( identifier[l2] [ identifier[l2_pos] ]))
identifier[l2_pos] += literal[int]
identifier[l2_pos] = identifier[max] ( literal[int] , identifier[l2_pos] - literal[int] )
keyword[return] identifier[out] | def intersection(l1, l2):
"""Returns intersection of two lists. Assumes the lists are sorted by start positions"""
if len(l1) == 0 or len(l2) == 0:
return [] # depends on [control=['if'], data=[]]
out = []
l2_pos = 0
for l in l1:
while l2_pos < len(l2) and l2[l2_pos].end < l.start:
l2_pos += 1 # depends on [control=['while'], data=[]]
if l2_pos == len(l2):
break # depends on [control=['if'], data=[]]
while l2_pos < len(l2) and l.intersects(l2[l2_pos]):
out.append(l.intersection(l2[l2_pos]))
l2_pos += 1 # depends on [control=['while'], data=[]]
l2_pos = max(0, l2_pos - 1) # depends on [control=['for'], data=['l']]
return out |
def sin(x):
"""
Sine
"""
if isinstance(x, UncertainFunction):
mcpts = np.sin(x._mcpts)
return UncertainFunction(mcpts)
else:
return np.sin(x) | def function[sin, parameter[x]]:
constant[
Sine
]
if call[name[isinstance], parameter[name[x], name[UncertainFunction]]] begin[:]
variable[mcpts] assign[=] call[name[np].sin, parameter[name[x]._mcpts]]
return[call[name[UncertainFunction], parameter[name[mcpts]]]] | keyword[def] identifier[sin] ( identifier[x] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[x] , identifier[UncertainFunction] ):
identifier[mcpts] = identifier[np] . identifier[sin] ( identifier[x] . identifier[_mcpts] )
keyword[return] identifier[UncertainFunction] ( identifier[mcpts] )
keyword[else] :
keyword[return] identifier[np] . identifier[sin] ( identifier[x] ) | def sin(x):
"""
Sine
"""
if isinstance(x, UncertainFunction):
mcpts = np.sin(x._mcpts)
return UncertainFunction(mcpts) # depends on [control=['if'], data=[]]
else:
return np.sin(x) |
def add(self, effect):
"""
Add an LV2 plugin encapsulated as a jack client
:param Lv2Effect effect: Effect that will be loaded as LV2 plugin encapsulated
"""
effect.instance = self.instance_index
self.instance_index += 1
self.connection.send(ProtocolParser.add(effect)) | def function[add, parameter[self, effect]]:
constant[
Add an LV2 plugin encapsulated as a jack client
:param Lv2Effect effect: Effect that will be loaded as LV2 plugin encapsulated
]
name[effect].instance assign[=] name[self].instance_index
<ast.AugAssign object at 0x7da18dc05de0>
call[name[self].connection.send, parameter[call[name[ProtocolParser].add, parameter[name[effect]]]]] | keyword[def] identifier[add] ( identifier[self] , identifier[effect] ):
literal[string]
identifier[effect] . identifier[instance] = identifier[self] . identifier[instance_index]
identifier[self] . identifier[instance_index] += literal[int]
identifier[self] . identifier[connection] . identifier[send] ( identifier[ProtocolParser] . identifier[add] ( identifier[effect] )) | def add(self, effect):
"""
Add an LV2 plugin encapsulated as a jack client
:param Lv2Effect effect: Effect that will be loaded as LV2 plugin encapsulated
"""
effect.instance = self.instance_index
self.instance_index += 1
self.connection.send(ProtocolParser.add(effect)) |
def fastp_read_gc_plot(self):
""" Make the read GC plot for Fastp """
data_labels, pdata = self.filter_pconfig_pdata_subplots(self.fastp_gc_content_data, 'Base Content Percent')
pconfig = {
'id': 'fastp-seq-content-gc-plot',
'title': 'Fastp: Read GC Content',
'xlab': 'Read Position',
'ylab': 'R1 Before filtering: Base Content Percent',
'ymax': 100,
'ymin': 0,
'xDecimals': False,
'yLabelFormat': '{value}%',
'tt_label': '{point.x}: {point.y:.2f}%',
'data_labels': data_labels
}
return linegraph.plot(pdata, pconfig) | def function[fastp_read_gc_plot, parameter[self]]:
constant[ Make the read GC plot for Fastp ]
<ast.Tuple object at 0x7da18eb56ef0> assign[=] call[name[self].filter_pconfig_pdata_subplots, parameter[name[self].fastp_gc_content_data, constant[Base Content Percent]]]
variable[pconfig] assign[=] dictionary[[<ast.Constant object at 0x7da18eb57550>, <ast.Constant object at 0x7da18eb56770>, <ast.Constant object at 0x7da18eb54b20>, <ast.Constant object at 0x7da18eb56bf0>, <ast.Constant object at 0x7da18eb56740>, <ast.Constant object at 0x7da18eb56ce0>, <ast.Constant object at 0x7da18eb54ee0>, <ast.Constant object at 0x7da18eb56b30>, <ast.Constant object at 0x7da18eb57f10>, <ast.Constant object at 0x7da18eb54c70>], [<ast.Constant object at 0x7da18eb56170>, <ast.Constant object at 0x7da18eb57d90>, <ast.Constant object at 0x7da18eb54df0>, <ast.Constant object at 0x7da18eb57340>, <ast.Constant object at 0x7da18eb56320>, <ast.Constant object at 0x7da18eb55030>, <ast.Constant object at 0x7da18eb55060>, <ast.Constant object at 0x7da18eb56080>, <ast.Constant object at 0x7da18eb56140>, <ast.Name object at 0x7da18eb57400>]]
return[call[name[linegraph].plot, parameter[name[pdata], name[pconfig]]]] | keyword[def] identifier[fastp_read_gc_plot] ( identifier[self] ):
literal[string]
identifier[data_labels] , identifier[pdata] = identifier[self] . identifier[filter_pconfig_pdata_subplots] ( identifier[self] . identifier[fastp_gc_content_data] , literal[string] )
identifier[pconfig] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : keyword[False] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : identifier[data_labels]
}
keyword[return] identifier[linegraph] . identifier[plot] ( identifier[pdata] , identifier[pconfig] ) | def fastp_read_gc_plot(self):
""" Make the read GC plot for Fastp """
(data_labels, pdata) = self.filter_pconfig_pdata_subplots(self.fastp_gc_content_data, 'Base Content Percent')
pconfig = {'id': 'fastp-seq-content-gc-plot', 'title': 'Fastp: Read GC Content', 'xlab': 'Read Position', 'ylab': 'R1 Before filtering: Base Content Percent', 'ymax': 100, 'ymin': 0, 'xDecimals': False, 'yLabelFormat': '{value}%', 'tt_label': '{point.x}: {point.y:.2f}%', 'data_labels': data_labels}
return linegraph.plot(pdata, pconfig) |
def hexblock_qword(cls, data, address = None,
bits = None,
separator = ' ',
width = 2):
"""
Dump a block of hexadecimal QWORDs from binary data.
@type data: str
@param data: Binary data.
@type address: str
@param address: Memory address where the data was read from.
@type bits: int
@param bits:
(Optional) Number of bits of the target architecture.
The default is platform dependent. See: L{HexDump.address_size}
@type separator: str
@param separator:
Separator between the hexadecimal representation of each QWORD.
@type width: int
@param width:
(Optional) Maximum number of QWORDs to convert per text line.
@rtype: str
@return: Multiline output text.
"""
return cls.hexblock_cb(cls.hexa_qword, data,
address, bits, width * 8,
cb_kwargs = {'separator': separator}) | def function[hexblock_qword, parameter[cls, data, address, bits, separator, width]]:
constant[
Dump a block of hexadecimal QWORDs from binary data.
@type data: str
@param data: Binary data.
@type address: str
@param address: Memory address where the data was read from.
@type bits: int
@param bits:
(Optional) Number of bits of the target architecture.
The default is platform dependent. See: L{HexDump.address_size}
@type separator: str
@param separator:
Separator between the hexadecimal representation of each QWORD.
@type width: int
@param width:
(Optional) Maximum number of QWORDs to convert per text line.
@rtype: str
@return: Multiline output text.
]
return[call[name[cls].hexblock_cb, parameter[name[cls].hexa_qword, name[data], name[address], name[bits], binary_operation[name[width] * constant[8]]]]] | keyword[def] identifier[hexblock_qword] ( identifier[cls] , identifier[data] , identifier[address] = keyword[None] ,
identifier[bits] = keyword[None] ,
identifier[separator] = literal[string] ,
identifier[width] = literal[int] ):
literal[string]
keyword[return] identifier[cls] . identifier[hexblock_cb] ( identifier[cls] . identifier[hexa_qword] , identifier[data] ,
identifier[address] , identifier[bits] , identifier[width] * literal[int] ,
identifier[cb_kwargs] ={ literal[string] : identifier[separator] }) | def hexblock_qword(cls, data, address=None, bits=None, separator=' ', width=2):
"""
Dump a block of hexadecimal QWORDs from binary data.
@type data: str
@param data: Binary data.
@type address: str
@param address: Memory address where the data was read from.
@type bits: int
@param bits:
(Optional) Number of bits of the target architecture.
The default is platform dependent. See: L{HexDump.address_size}
@type separator: str
@param separator:
Separator between the hexadecimal representation of each QWORD.
@type width: int
@param width:
(Optional) Maximum number of QWORDs to convert per text line.
@rtype: str
@return: Multiline output text.
"""
return cls.hexblock_cb(cls.hexa_qword, data, address, bits, width * 8, cb_kwargs={'separator': separator}) |
def autoparse(
func=None, *,
description=None,
epilog=None,
add_nos=False,
parser=None):
'''
This decorator converts a function that takes normal arguments into a
function which takes a single optional argument, argv, parses it using an
argparse.ArgumentParser, and calls the underlying function with the parsed
arguments. If it is not given, sys.argv[1:] is used. This is so that the
function can be used as a setuptools entry point, as well as a normal main
function. sys.argv[1:] is not evaluated until the function is called, to
allow injecting different arguments for testing.
It uses the argument signature of the function to create an
ArgumentParser. Parameters without defaults become positional parameters,
while parameters *with* defaults become --options. Use annotations to set
the type of the parameter.
The `desctiption` and `epilog` parameters corrospond to the same respective
argparse parameters. If no description is given, it defaults to the
decorated functions's docstring, if present.
If add_nos is True, every boolean option (that is, every parameter with a
default of True/False or a type of bool) will have a --no- version created
as well, which inverts the option. For instance, the --verbose option will
have a --no-verbose counterpart. These are not mutually exclusive-
whichever one appears last in the argument list will have precedence.
If a parser is given, it is used instead of one generated from the function
signature. In this case, no parser is created; instead, the given parser is
used to parse the argv argument. The parser's results' argument names must
match up with the parameter names of the decorated function.
The decorated function is attached to the result as the `func` attribute,
and the parser is attached as the `parser` attribute.
'''
# If @autoparse(...) is used instead of @autoparse
if func is None:
return lambda f: autoparse(
f, description=description,
epilog=epilog,
add_nos=add_nos,
parser=parser)
func_sig = signature(func)
docstr_description, docstr_epilog = parse_docstring(getdoc(func))
if parser is None:
parser = make_parser(
func_sig,
description or docstr_description,
epilog or docstr_epilog,
add_nos)
@wraps(func)
def autoparse_wrapper(argv=None):
if argv is None:
argv = sys.argv[1:]
# Get empty argument binding, to fill with parsed arguments. This
# object does all the heavy lifting of turning named arguments into
# into correctly bound *args and **kwargs.
parsed_args = func_sig.bind_partial()
parsed_args.arguments.update(vars(parser.parse_args(argv)))
return func(*parsed_args.args, **parsed_args.kwargs)
# TODO: attach an updated __signature__ to autoparse_wrapper, just in case.
# Attach the wrapped function and parser, and return the wrapper.
autoparse_wrapper.func = func
autoparse_wrapper.parser = parser
return autoparse_wrapper | def function[autoparse, parameter[func]]:
constant[
This decorator converts a function that takes normal arguments into a
function which takes a single optional argument, argv, parses it using an
argparse.ArgumentParser, and calls the underlying function with the parsed
arguments. If it is not given, sys.argv[1:] is used. This is so that the
function can be used as a setuptools entry point, as well as a normal main
function. sys.argv[1:] is not evaluated until the function is called, to
allow injecting different arguments for testing.
It uses the argument signature of the function to create an
ArgumentParser. Parameters without defaults become positional parameters,
while parameters *with* defaults become --options. Use annotations to set
the type of the parameter.
The `desctiption` and `epilog` parameters corrospond to the same respective
argparse parameters. If no description is given, it defaults to the
decorated functions's docstring, if present.
If add_nos is True, every boolean option (that is, every parameter with a
default of True/False or a type of bool) will have a --no- version created
as well, which inverts the option. For instance, the --verbose option will
have a --no-verbose counterpart. These are not mutually exclusive-
whichever one appears last in the argument list will have precedence.
If a parser is given, it is used instead of one generated from the function
signature. In this case, no parser is created; instead, the given parser is
used to parse the argv argument. The parser's results' argument names must
match up with the parameter names of the decorated function.
The decorated function is attached to the result as the `func` attribute,
and the parser is attached as the `parser` attribute.
]
if compare[name[func] is constant[None]] begin[:]
return[<ast.Lambda object at 0x7da18fe904f0>]
variable[func_sig] assign[=] call[name[signature], parameter[name[func]]]
<ast.Tuple object at 0x7da18fe91ae0> assign[=] call[name[parse_docstring], parameter[call[name[getdoc], parameter[name[func]]]]]
if compare[name[parser] is constant[None]] begin[:]
variable[parser] assign[=] call[name[make_parser], parameter[name[func_sig], <ast.BoolOp object at 0x7da18fe93ac0>, <ast.BoolOp object at 0x7da18fe900a0>, name[add_nos]]]
def function[autoparse_wrapper, parameter[argv]]:
if compare[name[argv] is constant[None]] begin[:]
variable[argv] assign[=] call[name[sys].argv][<ast.Slice object at 0x7da18fe93dc0>]
variable[parsed_args] assign[=] call[name[func_sig].bind_partial, parameter[]]
call[name[parsed_args].arguments.update, parameter[call[name[vars], parameter[call[name[parser].parse_args, parameter[name[argv]]]]]]]
return[call[name[func], parameter[<ast.Starred object at 0x7da18fe93580>]]]
name[autoparse_wrapper].func assign[=] name[func]
name[autoparse_wrapper].parser assign[=] name[parser]
return[name[autoparse_wrapper]] | keyword[def] identifier[autoparse] (
identifier[func] = keyword[None] ,*,
identifier[description] = keyword[None] ,
identifier[epilog] = keyword[None] ,
identifier[add_nos] = keyword[False] ,
identifier[parser] = keyword[None] ):
literal[string]
keyword[if] identifier[func] keyword[is] keyword[None] :
keyword[return] keyword[lambda] identifier[f] : identifier[autoparse] (
identifier[f] , identifier[description] = identifier[description] ,
identifier[epilog] = identifier[epilog] ,
identifier[add_nos] = identifier[add_nos] ,
identifier[parser] = identifier[parser] )
identifier[func_sig] = identifier[signature] ( identifier[func] )
identifier[docstr_description] , identifier[docstr_epilog] = identifier[parse_docstring] ( identifier[getdoc] ( identifier[func] ))
keyword[if] identifier[parser] keyword[is] keyword[None] :
identifier[parser] = identifier[make_parser] (
identifier[func_sig] ,
identifier[description] keyword[or] identifier[docstr_description] ,
identifier[epilog] keyword[or] identifier[docstr_epilog] ,
identifier[add_nos] )
@ identifier[wraps] ( identifier[func] )
keyword[def] identifier[autoparse_wrapper] ( identifier[argv] = keyword[None] ):
keyword[if] identifier[argv] keyword[is] keyword[None] :
identifier[argv] = identifier[sys] . identifier[argv] [ literal[int] :]
identifier[parsed_args] = identifier[func_sig] . identifier[bind_partial] ()
identifier[parsed_args] . identifier[arguments] . identifier[update] ( identifier[vars] ( identifier[parser] . identifier[parse_args] ( identifier[argv] )))
keyword[return] identifier[func] (* identifier[parsed_args] . identifier[args] ,** identifier[parsed_args] . identifier[kwargs] )
identifier[autoparse_wrapper] . identifier[func] = identifier[func]
identifier[autoparse_wrapper] . identifier[parser] = identifier[parser]
keyword[return] identifier[autoparse_wrapper] | def autoparse(func=None, *, description=None, epilog=None, add_nos=False, parser=None):
"""
This decorator converts a function that takes normal arguments into a
function which takes a single optional argument, argv, parses it using an
argparse.ArgumentParser, and calls the underlying function with the parsed
arguments. If it is not given, sys.argv[1:] is used. This is so that the
function can be used as a setuptools entry point, as well as a normal main
function. sys.argv[1:] is not evaluated until the function is called, to
allow injecting different arguments for testing.
It uses the argument signature of the function to create an
ArgumentParser. Parameters without defaults become positional parameters,
while parameters *with* defaults become --options. Use annotations to set
the type of the parameter.
The `desctiption` and `epilog` parameters corrospond to the same respective
argparse parameters. If no description is given, it defaults to the
decorated functions's docstring, if present.
If add_nos is True, every boolean option (that is, every parameter with a
default of True/False or a type of bool) will have a --no- version created
as well, which inverts the option. For instance, the --verbose option will
have a --no-verbose counterpart. These are not mutually exclusive-
whichever one appears last in the argument list will have precedence.
If a parser is given, it is used instead of one generated from the function
signature. In this case, no parser is created; instead, the given parser is
used to parse the argv argument. The parser's results' argument names must
match up with the parameter names of the decorated function.
The decorated function is attached to the result as the `func` attribute,
and the parser is attached as the `parser` attribute.
"""
# If @autoparse(...) is used instead of @autoparse
if func is None:
return lambda f: autoparse(f, description=description, epilog=epilog, add_nos=add_nos, parser=parser) # depends on [control=['if'], data=[]]
func_sig = signature(func)
(docstr_description, docstr_epilog) = parse_docstring(getdoc(func))
if parser is None:
parser = make_parser(func_sig, description or docstr_description, epilog or docstr_epilog, add_nos) # depends on [control=['if'], data=['parser']]
@wraps(func)
def autoparse_wrapper(argv=None):
if argv is None:
argv = sys.argv[1:] # depends on [control=['if'], data=['argv']]
# Get empty argument binding, to fill with parsed arguments. This
# object does all the heavy lifting of turning named arguments into
# into correctly bound *args and **kwargs.
parsed_args = func_sig.bind_partial()
parsed_args.arguments.update(vars(parser.parse_args(argv)))
return func(*parsed_args.args, **parsed_args.kwargs)
# TODO: attach an updated __signature__ to autoparse_wrapper, just in case.
# Attach the wrapped function and parser, and return the wrapper.
autoparse_wrapper.func = func
autoparse_wrapper.parser = parser
return autoparse_wrapper |
def _add_sample_measure(self, measure_params, num_samples):
"""Generate memory samples from current statevector.
Args:
measure_params (list): List of (qubit, cmembit) values for
measure instructions to sample.
num_samples (int): The number of memory samples to generate.
Returns:
list: A list of memory values in hex format.
"""
# Get unique qubits that are actually measured
measured_qubits = list({qubit for qubit, cmembit in measure_params})
num_measured = len(measured_qubits)
# Axis for numpy.sum to compute probabilities
axis = list(range(self._number_of_qubits))
for qubit in reversed(measured_qubits):
# Remove from largest qubit to smallest so list position is correct
# with respect to position from end of the list
axis.remove(self._number_of_qubits - 1 - qubit)
probabilities = np.reshape(np.sum(np.abs(self._statevector) ** 2,
axis=tuple(axis)),
2 ** num_measured)
# Generate samples on measured qubits
samples = self._local_random.choice(range(2 ** num_measured),
num_samples, p=probabilities)
# Convert to bit-strings
memory = []
for sample in samples:
classical_memory = self._classical_memory
for count, (qubit, cmembit) in enumerate(sorted(measure_params)):
qubit_outcome = int((sample & (1 << count)) >> count)
membit = 1 << cmembit
classical_memory = (classical_memory & (~membit)) | (qubit_outcome << cmembit)
value = bin(classical_memory)[2:]
memory.append(hex(int(value, 2)))
return memory | def function[_add_sample_measure, parameter[self, measure_params, num_samples]]:
constant[Generate memory samples from current statevector.
Args:
measure_params (list): List of (qubit, cmembit) values for
measure instructions to sample.
num_samples (int): The number of memory samples to generate.
Returns:
list: A list of memory values in hex format.
]
variable[measured_qubits] assign[=] call[name[list], parameter[<ast.SetComp object at 0x7da1b059db10>]]
variable[num_measured] assign[=] call[name[len], parameter[name[measured_qubits]]]
variable[axis] assign[=] call[name[list], parameter[call[name[range], parameter[name[self]._number_of_qubits]]]]
for taget[name[qubit]] in starred[call[name[reversed], parameter[name[measured_qubits]]]] begin[:]
call[name[axis].remove, parameter[binary_operation[binary_operation[name[self]._number_of_qubits - constant[1]] - name[qubit]]]]
variable[probabilities] assign[=] call[name[np].reshape, parameter[call[name[np].sum, parameter[binary_operation[call[name[np].abs, parameter[name[self]._statevector]] ** constant[2]]]], binary_operation[constant[2] ** name[num_measured]]]]
variable[samples] assign[=] call[name[self]._local_random.choice, parameter[call[name[range], parameter[binary_operation[constant[2] ** name[num_measured]]]], name[num_samples]]]
variable[memory] assign[=] list[[]]
for taget[name[sample]] in starred[name[samples]] begin[:]
variable[classical_memory] assign[=] name[self]._classical_memory
for taget[tuple[[<ast.Name object at 0x7da1b059fc40>, <ast.Tuple object at 0x7da1b059cc10>]]] in starred[call[name[enumerate], parameter[call[name[sorted], parameter[name[measure_params]]]]]] begin[:]
variable[qubit_outcome] assign[=] call[name[int], parameter[binary_operation[binary_operation[name[sample] <ast.BitAnd object at 0x7da2590d6b60> binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> name[count]]] <ast.RShift object at 0x7da2590d6a40> name[count]]]]
variable[membit] assign[=] binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> name[cmembit]]
variable[classical_memory] assign[=] binary_operation[binary_operation[name[classical_memory] <ast.BitAnd object at 0x7da2590d6b60> <ast.UnaryOp object at 0x7da1b0535ab0>] <ast.BitOr object at 0x7da2590d6aa0> binary_operation[name[qubit_outcome] <ast.LShift object at 0x7da2590d69e0> name[cmembit]]]
variable[value] assign[=] call[call[name[bin], parameter[name[classical_memory]]]][<ast.Slice object at 0x7da1b0537fd0>]
call[name[memory].append, parameter[call[name[hex], parameter[call[name[int], parameter[name[value], constant[2]]]]]]]
return[name[memory]] | keyword[def] identifier[_add_sample_measure] ( identifier[self] , identifier[measure_params] , identifier[num_samples] ):
literal[string]
identifier[measured_qubits] = identifier[list] ({ identifier[qubit] keyword[for] identifier[qubit] , identifier[cmembit] keyword[in] identifier[measure_params] })
identifier[num_measured] = identifier[len] ( identifier[measured_qubits] )
identifier[axis] = identifier[list] ( identifier[range] ( identifier[self] . identifier[_number_of_qubits] ))
keyword[for] identifier[qubit] keyword[in] identifier[reversed] ( identifier[measured_qubits] ):
identifier[axis] . identifier[remove] ( identifier[self] . identifier[_number_of_qubits] - literal[int] - identifier[qubit] )
identifier[probabilities] = identifier[np] . identifier[reshape] ( identifier[np] . identifier[sum] ( identifier[np] . identifier[abs] ( identifier[self] . identifier[_statevector] )** literal[int] ,
identifier[axis] = identifier[tuple] ( identifier[axis] )),
literal[int] ** identifier[num_measured] )
identifier[samples] = identifier[self] . identifier[_local_random] . identifier[choice] ( identifier[range] ( literal[int] ** identifier[num_measured] ),
identifier[num_samples] , identifier[p] = identifier[probabilities] )
identifier[memory] =[]
keyword[for] identifier[sample] keyword[in] identifier[samples] :
identifier[classical_memory] = identifier[self] . identifier[_classical_memory]
keyword[for] identifier[count] ,( identifier[qubit] , identifier[cmembit] ) keyword[in] identifier[enumerate] ( identifier[sorted] ( identifier[measure_params] )):
identifier[qubit_outcome] = identifier[int] (( identifier[sample] &( literal[int] << identifier[count] ))>> identifier[count] )
identifier[membit] = literal[int] << identifier[cmembit]
identifier[classical_memory] =( identifier[classical_memory] &(~ identifier[membit] ))|( identifier[qubit_outcome] << identifier[cmembit] )
identifier[value] = identifier[bin] ( identifier[classical_memory] )[ literal[int] :]
identifier[memory] . identifier[append] ( identifier[hex] ( identifier[int] ( identifier[value] , literal[int] )))
keyword[return] identifier[memory] | def _add_sample_measure(self, measure_params, num_samples):
"""Generate memory samples from current statevector.
Args:
measure_params (list): List of (qubit, cmembit) values for
measure instructions to sample.
num_samples (int): The number of memory samples to generate.
Returns:
list: A list of memory values in hex format.
"""
# Get unique qubits that are actually measured
measured_qubits = list({qubit for (qubit, cmembit) in measure_params})
num_measured = len(measured_qubits)
# Axis for numpy.sum to compute probabilities
axis = list(range(self._number_of_qubits))
for qubit in reversed(measured_qubits):
# Remove from largest qubit to smallest so list position is correct
# with respect to position from end of the list
axis.remove(self._number_of_qubits - 1 - qubit) # depends on [control=['for'], data=['qubit']]
probabilities = np.reshape(np.sum(np.abs(self._statevector) ** 2, axis=tuple(axis)), 2 ** num_measured)
# Generate samples on measured qubits
samples = self._local_random.choice(range(2 ** num_measured), num_samples, p=probabilities)
# Convert to bit-strings
memory = []
for sample in samples:
classical_memory = self._classical_memory
for (count, (qubit, cmembit)) in enumerate(sorted(measure_params)):
qubit_outcome = int((sample & 1 << count) >> count)
membit = 1 << cmembit
classical_memory = classical_memory & ~membit | qubit_outcome << cmembit # depends on [control=['for'], data=[]]
value = bin(classical_memory)[2:]
memory.append(hex(int(value, 2))) # depends on [control=['for'], data=['sample']]
return memory |
def _get_script_args(cls, type_, name, header, script_text):
"""
For Windows, add a .py extension and an .exe launcher
"""
if type_ == 'gui':
launcher_type = 'gui'
ext = '-script.pyw'
old = ['.pyw']
else:
launcher_type = 'cli'
ext = '-script.py'
old = ['.py', '.pyc', '.pyo']
hdr = cls._adjust_header(type_, header)
blockers = [name + x for x in old]
yield (name + ext, hdr + script_text, 't', blockers)
yield (
name + '.exe', get_win_launcher(launcher_type),
'b' # write in binary mode
)
if not is_64bit():
# install a manifest for the launcher to prevent Windows
# from detecting it as an installer (which it will for
# launchers like easy_install.exe). Consider only
# adding a manifest for launchers detected as installers.
# See Distribute #143 for details.
m_name = name + '.exe.manifest'
yield (m_name, load_launcher_manifest(name), 't') | def function[_get_script_args, parameter[cls, type_, name, header, script_text]]:
constant[
For Windows, add a .py extension and an .exe launcher
]
if compare[name[type_] equal[==] constant[gui]] begin[:]
variable[launcher_type] assign[=] constant[gui]
variable[ext] assign[=] constant[-script.pyw]
variable[old] assign[=] list[[<ast.Constant object at 0x7da1b1cd6710>]]
variable[hdr] assign[=] call[name[cls]._adjust_header, parameter[name[type_], name[header]]]
variable[blockers] assign[=] <ast.ListComp object at 0x7da1b1cd6740>
<ast.Yield object at 0x7da1b1cd6980>
<ast.Yield object at 0x7da1b1cd4d60>
if <ast.UnaryOp object at 0x7da1b1b15fc0> begin[:]
variable[m_name] assign[=] binary_operation[name[name] + constant[.exe.manifest]]
<ast.Yield object at 0x7da1b1b16500> | keyword[def] identifier[_get_script_args] ( identifier[cls] , identifier[type_] , identifier[name] , identifier[header] , identifier[script_text] ):
literal[string]
keyword[if] identifier[type_] == literal[string] :
identifier[launcher_type] = literal[string]
identifier[ext] = literal[string]
identifier[old] =[ literal[string] ]
keyword[else] :
identifier[launcher_type] = literal[string]
identifier[ext] = literal[string]
identifier[old] =[ literal[string] , literal[string] , literal[string] ]
identifier[hdr] = identifier[cls] . identifier[_adjust_header] ( identifier[type_] , identifier[header] )
identifier[blockers] =[ identifier[name] + identifier[x] keyword[for] identifier[x] keyword[in] identifier[old] ]
keyword[yield] ( identifier[name] + identifier[ext] , identifier[hdr] + identifier[script_text] , literal[string] , identifier[blockers] )
keyword[yield] (
identifier[name] + literal[string] , identifier[get_win_launcher] ( identifier[launcher_type] ),
literal[string]
)
keyword[if] keyword[not] identifier[is_64bit] ():
identifier[m_name] = identifier[name] + literal[string]
keyword[yield] ( identifier[m_name] , identifier[load_launcher_manifest] ( identifier[name] ), literal[string] ) | def _get_script_args(cls, type_, name, header, script_text):
"""
For Windows, add a .py extension and an .exe launcher
"""
if type_ == 'gui':
launcher_type = 'gui'
ext = '-script.pyw'
old = ['.pyw'] # depends on [control=['if'], data=[]]
else:
launcher_type = 'cli'
ext = '-script.py'
old = ['.py', '.pyc', '.pyo']
hdr = cls._adjust_header(type_, header)
blockers = [name + x for x in old]
yield (name + ext, hdr + script_text, 't', blockers) # write in binary mode
yield (name + '.exe', get_win_launcher(launcher_type), 'b')
if not is_64bit():
# install a manifest for the launcher to prevent Windows
# from detecting it as an installer (which it will for
# launchers like easy_install.exe). Consider only
# adding a manifest for launchers detected as installers.
# See Distribute #143 for details.
m_name = name + '.exe.manifest'
yield (m_name, load_launcher_manifest(name), 't') # depends on [control=['if'], data=[]] |
def ceiling(value, mod=1):
"""
RETURN SMALLEST INTEGER GREATER THAN value
"""
if value == None:
return None
mod = int(mod)
v = int(math_floor(value + mod))
return v - (v % mod) | def function[ceiling, parameter[value, mod]]:
constant[
RETURN SMALLEST INTEGER GREATER THAN value
]
if compare[name[value] equal[==] constant[None]] begin[:]
return[constant[None]]
variable[mod] assign[=] call[name[int], parameter[name[mod]]]
variable[v] assign[=] call[name[int], parameter[call[name[math_floor], parameter[binary_operation[name[value] + name[mod]]]]]]
return[binary_operation[name[v] - binary_operation[name[v] <ast.Mod object at 0x7da2590d6920> name[mod]]]] | keyword[def] identifier[ceiling] ( identifier[value] , identifier[mod] = literal[int] ):
literal[string]
keyword[if] identifier[value] == keyword[None] :
keyword[return] keyword[None]
identifier[mod] = identifier[int] ( identifier[mod] )
identifier[v] = identifier[int] ( identifier[math_floor] ( identifier[value] + identifier[mod] ))
keyword[return] identifier[v] -( identifier[v] % identifier[mod] ) | def ceiling(value, mod=1):
"""
RETURN SMALLEST INTEGER GREATER THAN value
"""
if value == None:
return None # depends on [control=['if'], data=[]]
mod = int(mod)
v = int(math_floor(value + mod))
return v - v % mod |
def check_auth(email, password):
"""Check if a username/password combination is valid.
"""
try:
user = User.get(User.email == email)
except User.DoesNotExist:
return False
return password == user.password | def function[check_auth, parameter[email, password]]:
constant[Check if a username/password combination is valid.
]
<ast.Try object at 0x7da18bcca620>
return[compare[name[password] equal[==] name[user].password]] | keyword[def] identifier[check_auth] ( identifier[email] , identifier[password] ):
literal[string]
keyword[try] :
identifier[user] = identifier[User] . identifier[get] ( identifier[User] . identifier[email] == identifier[email] )
keyword[except] identifier[User] . identifier[DoesNotExist] :
keyword[return] keyword[False]
keyword[return] identifier[password] == identifier[user] . identifier[password] | def check_auth(email, password):
"""Check if a username/password combination is valid.
"""
try:
user = User.get(User.email == email) # depends on [control=['try'], data=[]]
except User.DoesNotExist:
return False # depends on [control=['except'], data=[]]
return password == user.password |
def wait_for_event(
name,
id_list,
event_id='id',
timeout=300,
node='master'):
'''
Watch Salt's event bus and block until a condition is met
.. versionadded:: 2014.7.0
name
An event tag to watch for; supports Reactor-style globbing.
id_list
A list of event identifiers to watch for -- usually the minion ID. Each
time an event tag is matched the event data is inspected for
``event_id``, if found it is removed from ``id_list``. When ``id_list``
is empty this function returns success.
event_id : id
The name of a key in the event data. Default is ``id`` for the minion
ID, another common value is ``name`` for use with orchestrating
salt-cloud events.
timeout : 300
The maximum time in seconds to wait before failing.
The following example blocks until all the listed minions complete a
restart and reconnect to the Salt master:
.. code-block:: yaml
reboot_all_minions:
salt.function:
- name: system.reboot
- tgt: '*'
wait_for_reboots:
salt.wait_for_event:
- name: salt/minion/*/start
- id_list:
- jerry
- stuart
- dave
- phil
- kevin
- mike
- require:
- salt: reboot_all_minions
'''
ret = {'name': name, 'changes': {}, 'comment': '', 'result': False}
if __opts__.get('test'):
ret['comment'] = \
'Orchestration would wait for event \'{0}\''.format(name)
ret['result'] = None
return ret
sevent = salt.utils.event.get_event(
node,
__opts__['sock_dir'],
__opts__['transport'],
opts=__opts__,
listen=True)
del_counter = 0
starttime = time.time()
timelimit = starttime + timeout
while True:
event = sevent.get_event(full=True)
is_timedout = time.time() > timelimit
if event is None and not is_timedout:
log.trace("wait_for_event: No event data; waiting.")
continue
elif event is None and is_timedout:
ret['comment'] = 'Timeout value reached.'
return ret
if fnmatch.fnmatch(event['tag'], name):
val = event['data'].get(event_id)
if val is None and 'data' in event['data']:
val = event['data']['data'].get(event_id)
if val is not None:
try:
val_idx = id_list.index(val)
except ValueError:
log.trace("wait_for_event: Event identifier '%s' not in "
"id_list; skipping.", event_id)
else:
del id_list[val_idx]
del_counter += 1
minions_seen = ret['changes'].setdefault('minions_seen', [])
minions_seen.append(val)
log.debug("wait_for_event: Event identifier '%s' removed "
"from id_list; %s items remaining.",
val, len(id_list))
else:
log.trace("wait_for_event: Event identifier '%s' not in event "
"'%s'; skipping.", event_id, event['tag'])
else:
log.debug("wait_for_event: Skipping unmatched event '%s'",
event['tag'])
if not id_list:
ret['result'] = True
ret['comment'] = 'All events seen in {0} seconds.'.format(
time.time() - starttime)
return ret
if is_timedout:
ret['comment'] = 'Timeout value reached.'
return ret | def function[wait_for_event, parameter[name, id_list, event_id, timeout, node]]:
constant[
Watch Salt's event bus and block until a condition is met
.. versionadded:: 2014.7.0
name
An event tag to watch for; supports Reactor-style globbing.
id_list
A list of event identifiers to watch for -- usually the minion ID. Each
time an event tag is matched the event data is inspected for
``event_id``, if found it is removed from ``id_list``. When ``id_list``
is empty this function returns success.
event_id : id
The name of a key in the event data. Default is ``id`` for the minion
ID, another common value is ``name`` for use with orchestrating
salt-cloud events.
timeout : 300
The maximum time in seconds to wait before failing.
The following example blocks until all the listed minions complete a
restart and reconnect to the Salt master:
.. code-block:: yaml
reboot_all_minions:
salt.function:
- name: system.reboot
- tgt: '*'
wait_for_reboots:
salt.wait_for_event:
- name: salt/minion/*/start
- id_list:
- jerry
- stuart
- dave
- phil
- kevin
- mike
- require:
- salt: reboot_all_minions
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da20c7c9480>, <ast.Constant object at 0x7da20c7c85b0>, <ast.Constant object at 0x7da20c7c9fc0>, <ast.Constant object at 0x7da20c7c8b80>], [<ast.Name object at 0x7da20c7c89d0>, <ast.Dict object at 0x7da20c7c8370>, <ast.Constant object at 0x7da20c7c8c10>, <ast.Constant object at 0x7da20c7cb0a0>]]
if call[name[__opts__].get, parameter[constant[test]]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[Orchestration would wait for event '{0}'].format, parameter[name[name]]]
call[name[ret]][constant[result]] assign[=] constant[None]
return[name[ret]]
variable[sevent] assign[=] call[name[salt].utils.event.get_event, parameter[name[node], call[name[__opts__]][constant[sock_dir]], call[name[__opts__]][constant[transport]]]]
variable[del_counter] assign[=] constant[0]
variable[starttime] assign[=] call[name[time].time, parameter[]]
variable[timelimit] assign[=] binary_operation[name[starttime] + name[timeout]]
while constant[True] begin[:]
variable[event] assign[=] call[name[sevent].get_event, parameter[]]
variable[is_timedout] assign[=] compare[call[name[time].time, parameter[]] greater[>] name[timelimit]]
if <ast.BoolOp object at 0x7da20c7ca2c0> begin[:]
call[name[log].trace, parameter[constant[wait_for_event: No event data; waiting.]]]
continue
if call[name[fnmatch].fnmatch, parameter[call[name[event]][constant[tag]], name[name]]] begin[:]
variable[val] assign[=] call[call[name[event]][constant[data]].get, parameter[name[event_id]]]
if <ast.BoolOp object at 0x7da20c7ca620> begin[:]
variable[val] assign[=] call[call[call[name[event]][constant[data]]][constant[data]].get, parameter[name[event_id]]]
if compare[name[val] is_not constant[None]] begin[:]
<ast.Try object at 0x7da20c7cb400>
if <ast.UnaryOp object at 0x7da1b26ae9e0> begin[:]
call[name[ret]][constant[result]] assign[=] constant[True]
call[name[ret]][constant[comment]] assign[=] call[constant[All events seen in {0} seconds.].format, parameter[binary_operation[call[name[time].time, parameter[]] - name[starttime]]]]
return[name[ret]]
if name[is_timedout] begin[:]
call[name[ret]][constant[comment]] assign[=] constant[Timeout value reached.]
return[name[ret]] | keyword[def] identifier[wait_for_event] (
identifier[name] ,
identifier[id_list] ,
identifier[event_id] = literal[string] ,
identifier[timeout] = literal[int] ,
identifier[node] = literal[string] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] , literal[string] :{}, literal[string] : literal[string] , literal[string] : keyword[False] }
keyword[if] identifier[__opts__] . identifier[get] ( literal[string] ):
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]= keyword[None]
keyword[return] identifier[ret]
identifier[sevent] = identifier[salt] . identifier[utils] . identifier[event] . identifier[get_event] (
identifier[node] ,
identifier[__opts__] [ literal[string] ],
identifier[__opts__] [ literal[string] ],
identifier[opts] = identifier[__opts__] ,
identifier[listen] = keyword[True] )
identifier[del_counter] = literal[int]
identifier[starttime] = identifier[time] . identifier[time] ()
identifier[timelimit] = identifier[starttime] + identifier[timeout]
keyword[while] keyword[True] :
identifier[event] = identifier[sevent] . identifier[get_event] ( identifier[full] = keyword[True] )
identifier[is_timedout] = identifier[time] . identifier[time] ()> identifier[timelimit]
keyword[if] identifier[event] keyword[is] keyword[None] keyword[and] keyword[not] identifier[is_timedout] :
identifier[log] . identifier[trace] ( literal[string] )
keyword[continue]
keyword[elif] identifier[event] keyword[is] keyword[None] keyword[and] identifier[is_timedout] :
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret]
keyword[if] identifier[fnmatch] . identifier[fnmatch] ( identifier[event] [ literal[string] ], identifier[name] ):
identifier[val] = identifier[event] [ literal[string] ]. identifier[get] ( identifier[event_id] )
keyword[if] identifier[val] keyword[is] keyword[None] keyword[and] literal[string] keyword[in] identifier[event] [ literal[string] ]:
identifier[val] = identifier[event] [ literal[string] ][ literal[string] ]. identifier[get] ( identifier[event_id] )
keyword[if] identifier[val] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[val_idx] = identifier[id_list] . identifier[index] ( identifier[val] )
keyword[except] identifier[ValueError] :
identifier[log] . identifier[trace] ( literal[string]
literal[string] , identifier[event_id] )
keyword[else] :
keyword[del] identifier[id_list] [ identifier[val_idx] ]
identifier[del_counter] += literal[int]
identifier[minions_seen] = identifier[ret] [ literal[string] ]. identifier[setdefault] ( literal[string] ,[])
identifier[minions_seen] . identifier[append] ( identifier[val] )
identifier[log] . identifier[debug] ( literal[string]
literal[string] ,
identifier[val] , identifier[len] ( identifier[id_list] ))
keyword[else] :
identifier[log] . identifier[trace] ( literal[string]
literal[string] , identifier[event_id] , identifier[event] [ literal[string] ])
keyword[else] :
identifier[log] . identifier[debug] ( literal[string] ,
identifier[event] [ literal[string] ])
keyword[if] keyword[not] identifier[id_list] :
identifier[ret] [ literal[string] ]= keyword[True]
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] (
identifier[time] . identifier[time] ()- identifier[starttime] )
keyword[return] identifier[ret]
keyword[if] identifier[is_timedout] :
identifier[ret] [ literal[string] ]= literal[string]
keyword[return] identifier[ret] | def wait_for_event(name, id_list, event_id='id', timeout=300, node='master'):
"""
Watch Salt's event bus and block until a condition is met
.. versionadded:: 2014.7.0
name
An event tag to watch for; supports Reactor-style globbing.
id_list
A list of event identifiers to watch for -- usually the minion ID. Each
time an event tag is matched the event data is inspected for
``event_id``, if found it is removed from ``id_list``. When ``id_list``
is empty this function returns success.
event_id : id
The name of a key in the event data. Default is ``id`` for the minion
ID, another common value is ``name`` for use with orchestrating
salt-cloud events.
timeout : 300
The maximum time in seconds to wait before failing.
The following example blocks until all the listed minions complete a
restart and reconnect to the Salt master:
.. code-block:: yaml
reboot_all_minions:
salt.function:
- name: system.reboot
- tgt: '*'
wait_for_reboots:
salt.wait_for_event:
- name: salt/minion/*/start
- id_list:
- jerry
- stuart
- dave
- phil
- kevin
- mike
- require:
- salt: reboot_all_minions
"""
ret = {'name': name, 'changes': {}, 'comment': '', 'result': False}
if __opts__.get('test'):
ret['comment'] = "Orchestration would wait for event '{0}'".format(name)
ret['result'] = None
return ret # depends on [control=['if'], data=[]]
sevent = salt.utils.event.get_event(node, __opts__['sock_dir'], __opts__['transport'], opts=__opts__, listen=True)
del_counter = 0
starttime = time.time()
timelimit = starttime + timeout
while True:
event = sevent.get_event(full=True)
is_timedout = time.time() > timelimit
if event is None and (not is_timedout):
log.trace('wait_for_event: No event data; waiting.')
continue # depends on [control=['if'], data=[]]
elif event is None and is_timedout:
ret['comment'] = 'Timeout value reached.'
return ret # depends on [control=['if'], data=[]]
if fnmatch.fnmatch(event['tag'], name):
val = event['data'].get(event_id)
if val is None and 'data' in event['data']:
val = event['data']['data'].get(event_id) # depends on [control=['if'], data=[]]
if val is not None:
try:
val_idx = id_list.index(val) # depends on [control=['try'], data=[]]
except ValueError:
log.trace("wait_for_event: Event identifier '%s' not in id_list; skipping.", event_id) # depends on [control=['except'], data=[]]
else:
del id_list[val_idx]
del_counter += 1
minions_seen = ret['changes'].setdefault('minions_seen', [])
minions_seen.append(val)
log.debug("wait_for_event: Event identifier '%s' removed from id_list; %s items remaining.", val, len(id_list)) # depends on [control=['if'], data=['val']]
else:
log.trace("wait_for_event: Event identifier '%s' not in event '%s'; skipping.", event_id, event['tag']) # depends on [control=['if'], data=[]]
else:
log.debug("wait_for_event: Skipping unmatched event '%s'", event['tag'])
if not id_list:
ret['result'] = True
ret['comment'] = 'All events seen in {0} seconds.'.format(time.time() - starttime)
return ret # depends on [control=['if'], data=[]]
if is_timedout:
ret['comment'] = 'Timeout value reached.'
return ret # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] |
def readf(prompt, default=None, minval=None, maxval=None,
allowed_single_chars=None, question_mark=True):
"""Return integer value read from keyboard
Parameters
----------
prompt : str
Prompt string.
default : float or None
Default value.
minval : float or None
Mininum allowed value.
maxval : float or None
Maximum allowed value.
allowed_single_chars : str
String containing allowed valid characters.
question_mark : bool
If True, display question mark after prompt.
Returns
-------
result : float
Read value.
"""
return read_value(ftype=float,
prompt=prompt,
default=default,
minval=minval,
maxval=maxval,
allowed_single_chars=allowed_single_chars,
question_mark=question_mark) | def function[readf, parameter[prompt, default, minval, maxval, allowed_single_chars, question_mark]]:
constant[Return integer value read from keyboard
Parameters
----------
prompt : str
Prompt string.
default : float or None
Default value.
minval : float or None
Mininum allowed value.
maxval : float or None
Maximum allowed value.
allowed_single_chars : str
String containing allowed valid characters.
question_mark : bool
If True, display question mark after prompt.
Returns
-------
result : float
Read value.
]
return[call[name[read_value], parameter[]]] | keyword[def] identifier[readf] ( identifier[prompt] , identifier[default] = keyword[None] , identifier[minval] = keyword[None] , identifier[maxval] = keyword[None] ,
identifier[allowed_single_chars] = keyword[None] , identifier[question_mark] = keyword[True] ):
literal[string]
keyword[return] identifier[read_value] ( identifier[ftype] = identifier[float] ,
identifier[prompt] = identifier[prompt] ,
identifier[default] = identifier[default] ,
identifier[minval] = identifier[minval] ,
identifier[maxval] = identifier[maxval] ,
identifier[allowed_single_chars] = identifier[allowed_single_chars] ,
identifier[question_mark] = identifier[question_mark] ) | def readf(prompt, default=None, minval=None, maxval=None, allowed_single_chars=None, question_mark=True):
"""Return integer value read from keyboard
Parameters
----------
prompt : str
Prompt string.
default : float or None
Default value.
minval : float or None
Mininum allowed value.
maxval : float or None
Maximum allowed value.
allowed_single_chars : str
String containing allowed valid characters.
question_mark : bool
If True, display question mark after prompt.
Returns
-------
result : float
Read value.
"""
return read_value(ftype=float, prompt=prompt, default=default, minval=minval, maxval=maxval, allowed_single_chars=allowed_single_chars, question_mark=question_mark) |
def _set_action_profile_association(self, v, load=False):
"""
Setter method for action_profile_association, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mep/cfm_mep_sub_commands/remote_mep/action_profile_association (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_action_profile_association is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_action_profile_association() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..32']}), is_leaf=True, yang_name="action-profile-association", rest_name="action-profile", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Associate action profile with Remote-MEP', u'alt-name': u'action-profile'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """action_profile_association must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..32']}), is_leaf=True, yang_name="action-profile-association", rest_name="action-profile", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Associate action profile with Remote-MEP', u'alt-name': u'action-profile'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='string', is_config=True)""",
})
self.__action_profile_association = t
if hasattr(self, '_set'):
self._set() | def function[_set_action_profile_association, parameter[self, v, load]]:
constant[
Setter method for action_profile_association, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mep/cfm_mep_sub_commands/remote_mep/action_profile_association (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_action_profile_association is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_action_profile_association() directly.
]
if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:]
variable[v] assign[=] call[name[v]._utype, parameter[name[v]]]
<ast.Try object at 0x7da1b256f2b0>
name[self].__action_profile_association assign[=] name[t]
if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:]
call[name[self]._set, parameter[]] | keyword[def] identifier[_set_action_profile_association] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ):
identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] )
keyword[try] :
identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[RestrictedClassType] ( identifier[base_type] = identifier[unicode] , identifier[restriction_dict] ={ literal[string] :[ literal[string] ]}), identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] ):
keyword[raise] identifier[ValueError] ({
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
})
identifier[self] . identifier[__action_profile_association] = identifier[t]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[self] . identifier[_set] () | def _set_action_profile_association(self, v, load=False):
"""
Setter method for action_profile_association, mapped from YANG variable /protocol/cfm/domain_name/ma_name/cfm_ma_sub_commands/mep/cfm_mep_sub_commands/remote_mep/action_profile_association (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_action_profile_association is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_action_profile_association() directly.
"""
if hasattr(v, '_utype'):
v = v._utype(v) # depends on [control=['if'], data=[]]
try:
t = YANGDynClass(v, base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..32']}), is_leaf=True, yang_name='action-profile-association', rest_name='action-profile', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Associate action profile with Remote-MEP', u'alt-name': u'action-profile'}}, namespace='urn:brocade.com:mgmt:brocade-dot1ag', defining_module='brocade-dot1ag', yang_type='string', is_config=True) # depends on [control=['try'], data=[]]
except (TypeError, ValueError):
raise ValueError({'error-string': 'action_profile_association must be of a type compatible with string', 'defined-type': 'string', 'generated-type': 'YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={\'length\': [u\'1..32\']}), is_leaf=True, yang_name="action-profile-association", rest_name="action-profile", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'cli-full-command\': None, u\'info\': u\'Associate action profile with Remote-MEP\', u\'alt-name\': u\'action-profile\'}}, namespace=\'urn:brocade.com:mgmt:brocade-dot1ag\', defining_module=\'brocade-dot1ag\', yang_type=\'string\', is_config=True)'}) # depends on [control=['except'], data=[]]
self.__action_profile_association = t
if hasattr(self, '_set'):
self._set() # depends on [control=['if'], data=[]] |
def parse_vars(self, args):
"""
Given variables like ``['a=b', 'c=d']`` turns it into ``{'a':
'b', 'c': 'd'}``
"""
result = {}
for arg in args:
if '=' not in arg:
raise ValueError(
'Variable assignment %r invalid (no "=")'
% arg)
name, value = arg.split('=', 1)
result[name] = value
return result | def function[parse_vars, parameter[self, args]]:
constant[
Given variables like ``['a=b', 'c=d']`` turns it into ``{'a':
'b', 'c': 'd'}``
]
variable[result] assign[=] dictionary[[], []]
for taget[name[arg]] in starred[name[args]] begin[:]
if compare[constant[=] <ast.NotIn object at 0x7da2590d7190> name[arg]] begin[:]
<ast.Raise object at 0x7da18c4ceaa0>
<ast.Tuple object at 0x7da18c4cde70> assign[=] call[name[arg].split, parameter[constant[=], constant[1]]]
call[name[result]][name[name]] assign[=] name[value]
return[name[result]] | keyword[def] identifier[parse_vars] ( identifier[self] , identifier[args] ):
literal[string]
identifier[result] ={}
keyword[for] identifier[arg] keyword[in] identifier[args] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[arg] :
keyword[raise] identifier[ValueError] (
literal[string]
% identifier[arg] )
identifier[name] , identifier[value] = identifier[arg] . identifier[split] ( literal[string] , literal[int] )
identifier[result] [ identifier[name] ]= identifier[value]
keyword[return] identifier[result] | def parse_vars(self, args):
"""
Given variables like ``['a=b', 'c=d']`` turns it into ``{'a':
'b', 'c': 'd'}``
"""
result = {}
for arg in args:
if '=' not in arg:
raise ValueError('Variable assignment %r invalid (no "=")' % arg) # depends on [control=['if'], data=['arg']]
(name, value) = arg.split('=', 1)
result[name] = value # depends on [control=['for'], data=['arg']]
return result |
def convert_path(path):
"""
Convert path to a normalized format
"""
if os.path.isabs(path):
raise Exception("Cannot include file with absolute path {}. Please use relative path instead".format((path)))
path = os.path.normpath(path)
return path | def function[convert_path, parameter[path]]:
constant[
Convert path to a normalized format
]
if call[name[os].path.isabs, parameter[name[path]]] begin[:]
<ast.Raise object at 0x7da1b09e9270>
variable[path] assign[=] call[name[os].path.normpath, parameter[name[path]]]
return[name[path]] | keyword[def] identifier[convert_path] ( identifier[path] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[isabs] ( identifier[path] ):
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] (( identifier[path] )))
identifier[path] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[path] )
keyword[return] identifier[path] | def convert_path(path):
"""
Convert path to a normalized format
"""
if os.path.isabs(path):
raise Exception('Cannot include file with absolute path {}. Please use relative path instead'.format(path)) # depends on [control=['if'], data=[]]
path = os.path.normpath(path)
return path |
def _make_diffusion_matrix(K, weight1=None, weight2=None):
"""Builds the general diffusion matrix with dimension nxn.
.. note::
:math:`n` = number of points of diffusion axis
:math:`n+1` = number of bounts of diffusion axis
**Function-all argument** \n
:param array K: dimensionless diffusivities at cell boundaries
*(size: 1xn+1)*
:param array weight1: weight_1 *(size: 1xn+1)*
:param array weight2: weight_2 *(size: 1xn)*
:returns: completely listed tridiagonal diffusion matrix *(size: nxn)*
:rtype: array
.. note::
The elements of array K are acutally dimensionless:
.. math::
K[i] = K_{\\textrm{physical}} \\frac{\\Delta t}{(\\Delta y)^2}
where :math:`K_{\\textrm{physical}}` is in unit :math:`\\frac{\\textrm{length}^2}{\\textrm{time}}`
The diffusion matrix is build like the following
.. math::
\\textrm{diffTriDiag}=
\\left[ \\begin{array}{cccccc}
1+\\frac{s_1 }{w_{2,0}} & -\\frac{s_1}{w_{2,0}} & 0 & & ... & 0 \\\\
-\\frac{s_1}{w_{2,1}} & 1+\\frac{s_1 + s_2}{w_{2,1}} & -\\frac{s_2}{w_{2,1}} & 0 & ... & 0 \\\\
0 & -\\frac{s_2}{w_{2,2}} & 1+\\frac{s_2 + s_3}{w_{2,2}} & -\\frac{s_3}{w_{2,2}} &... & 0 \\\\
& & \\ddots & \\ddots & \\ddots & \\\\
0 & 0 & ... & -\\frac{s_{n-2}}{w_{2,n-2}} & 1+\\frac{s_{n-2} + s_{n-1}}{w_{2,{n-2}}} & -\\frac{s_{n-1}}{w_{2,{n-2}}} \\\\
0 & 0 & ... & 0 & -\\frac{s_{n-1}}{w_{2,n-1}} & 1+\\frac{s_{n-1}}{w_{2,n-1}} \\\\
\\end{array} \\right]
where
.. math::
\\begin{array}{lllllll}
K &= [K_0, &K_1, &K_2, &...,&K_{n-1}, &K_{n}] \\\\
w_1 &= [w_{1,0}, &w_{1,1},&w_{1,2},&...,&w_{1,n-1},&w_{1,n}] \\\\
w_2 &= [w_{2,0}, &w_{2,1},&w_{2,2},&...,&w_{2,n-1}]
\\end{array}
and following subsitute:
.. math::
s_i = w_{1,i} K_i
"""
# \\begin{eqnarray}
# y & = & ax^2 + bx + c \\\\
# f(x) & = & x^2 + 2xy + y^2
# \\end{eqnarray}
# .. math::
#
# K &= [K_0, &K_1, &K_2, &... , &K_{n-1}, &K_{n}] \\\\
# w_1 &= [w_{1,0}, &w_{1,1}, &w_{1,2}, &... , &w_{1,n-1}, \\ &w_{1,n}] \\\\
# w_2 &= [w_{2,0}, \\ &w_{2,1}, \\ &w_{2,2}, \\ &... \\ , \\ &w_{2,n-1}] &o \\\\
#
# """
J = K.size - 1
if weight1 is None:
weight1 = np.ones_like(K)
if weight2 is None:
weight2 = np.ones(J)
weightedK = weight1 * K
Ka1 = weightedK[0:J] / weight2
Ka3 = weightedK[1:J+1] / weight2
Ka2 = np.insert(Ka1[1:J], 0, 0) + np.append(Ka3[0:J-1], 0)
# Atmosphere tridiagonal matrix
# this code makes a 3xN matrix, suitable for use with solve_banded
#diag = np.empty((3, J))
#diag[0, 1:] = -Ka3[0:J-1]
#diag[1, :] = 1 + Ka2
#diag[2, 0:J-1] = -Ka1[1:J]
# Build the full banded matrix instead
A = (np.diag(1 + Ka2, k=0) +
np.diag(-Ka3[0:J-1], k=1) +
np.diag(-Ka1[1:J], k=-1))
return A | def function[_make_diffusion_matrix, parameter[K, weight1, weight2]]:
constant[Builds the general diffusion matrix with dimension nxn.
.. note::
:math:`n` = number of points of diffusion axis
:math:`n+1` = number of bounts of diffusion axis
**Function-all argument**
:param array K: dimensionless diffusivities at cell boundaries
*(size: 1xn+1)*
:param array weight1: weight_1 *(size: 1xn+1)*
:param array weight2: weight_2 *(size: 1xn)*
:returns: completely listed tridiagonal diffusion matrix *(size: nxn)*
:rtype: array
.. note::
The elements of array K are acutally dimensionless:
.. math::
K[i] = K_{\textrm{physical}} \frac{\Delta t}{(\Delta y)^2}
where :math:`K_{\textrm{physical}}` is in unit :math:`\frac{\textrm{length}^2}{\textrm{time}}`
The diffusion matrix is build like the following
.. math::
\textrm{diffTriDiag}=
\left[ \begin{array}{cccccc}
1+\frac{s_1 }{w_{2,0}} & -\frac{s_1}{w_{2,0}} & 0 & & ... & 0 \\
-\frac{s_1}{w_{2,1}} & 1+\frac{s_1 + s_2}{w_{2,1}} & -\frac{s_2}{w_{2,1}} & 0 & ... & 0 \\
0 & -\frac{s_2}{w_{2,2}} & 1+\frac{s_2 + s_3}{w_{2,2}} & -\frac{s_3}{w_{2,2}} &... & 0 \\
& & \ddots & \ddots & \ddots & \\
0 & 0 & ... & -\frac{s_{n-2}}{w_{2,n-2}} & 1+\frac{s_{n-2} + s_{n-1}}{w_{2,{n-2}}} & -\frac{s_{n-1}}{w_{2,{n-2}}} \\
0 & 0 & ... & 0 & -\frac{s_{n-1}}{w_{2,n-1}} & 1+\frac{s_{n-1}}{w_{2,n-1}} \\
\end{array} \right]
where
.. math::
\begin{array}{lllllll}
K &= [K_0, &K_1, &K_2, &...,&K_{n-1}, &K_{n}] \\
w_1 &= [w_{1,0}, &w_{1,1},&w_{1,2},&...,&w_{1,n-1},&w_{1,n}] \\
w_2 &= [w_{2,0}, &w_{2,1},&w_{2,2},&...,&w_{2,n-1}]
\end{array}
and following subsitute:
.. math::
s_i = w_{1,i} K_i
]
variable[J] assign[=] binary_operation[name[K].size - constant[1]]
if compare[name[weight1] is constant[None]] begin[:]
variable[weight1] assign[=] call[name[np].ones_like, parameter[name[K]]]
if compare[name[weight2] is constant[None]] begin[:]
variable[weight2] assign[=] call[name[np].ones, parameter[name[J]]]
variable[weightedK] assign[=] binary_operation[name[weight1] * name[K]]
variable[Ka1] assign[=] binary_operation[call[name[weightedK]][<ast.Slice object at 0x7da1b13a50c0>] / name[weight2]]
variable[Ka3] assign[=] binary_operation[call[name[weightedK]][<ast.Slice object at 0x7da1b13a4be0>] / name[weight2]]
variable[Ka2] assign[=] binary_operation[call[name[np].insert, parameter[call[name[Ka1]][<ast.Slice object at 0x7da1b13a7dc0>], constant[0], constant[0]]] + call[name[np].append, parameter[call[name[Ka3]][<ast.Slice object at 0x7da1b13a49d0>], constant[0]]]]
variable[A] assign[=] binary_operation[binary_operation[call[name[np].diag, parameter[binary_operation[constant[1] + name[Ka2]]]] + call[name[np].diag, parameter[<ast.UnaryOp object at 0x7da1b13a4d00>]]] + call[name[np].diag, parameter[<ast.UnaryOp object at 0x7da1b13a45b0>]]]
return[name[A]] | keyword[def] identifier[_make_diffusion_matrix] ( identifier[K] , identifier[weight1] = keyword[None] , identifier[weight2] = keyword[None] ):
literal[string]
identifier[J] = identifier[K] . identifier[size] - literal[int]
keyword[if] identifier[weight1] keyword[is] keyword[None] :
identifier[weight1] = identifier[np] . identifier[ones_like] ( identifier[K] )
keyword[if] identifier[weight2] keyword[is] keyword[None] :
identifier[weight2] = identifier[np] . identifier[ones] ( identifier[J] )
identifier[weightedK] = identifier[weight1] * identifier[K]
identifier[Ka1] = identifier[weightedK] [ literal[int] : identifier[J] ]/ identifier[weight2]
identifier[Ka3] = identifier[weightedK] [ literal[int] : identifier[J] + literal[int] ]/ identifier[weight2]
identifier[Ka2] = identifier[np] . identifier[insert] ( identifier[Ka1] [ literal[int] : identifier[J] ], literal[int] , literal[int] )+ identifier[np] . identifier[append] ( identifier[Ka3] [ literal[int] : identifier[J] - literal[int] ], literal[int] )
identifier[A] =( identifier[np] . identifier[diag] ( literal[int] + identifier[Ka2] , identifier[k] = literal[int] )+
identifier[np] . identifier[diag] (- identifier[Ka3] [ literal[int] : identifier[J] - literal[int] ], identifier[k] = literal[int] )+
identifier[np] . identifier[diag] (- identifier[Ka1] [ literal[int] : identifier[J] ], identifier[k] =- literal[int] ))
keyword[return] identifier[A] | def _make_diffusion_matrix(K, weight1=None, weight2=None):
"""Builds the general diffusion matrix with dimension nxn.
.. note::
:math:`n` = number of points of diffusion axis
:math:`n+1` = number of bounts of diffusion axis
**Function-all argument**
:param array K: dimensionless diffusivities at cell boundaries
*(size: 1xn+1)*
:param array weight1: weight_1 *(size: 1xn+1)*
:param array weight2: weight_2 *(size: 1xn)*
:returns: completely listed tridiagonal diffusion matrix *(size: nxn)*
:rtype: array
.. note::
The elements of array K are acutally dimensionless:
.. math::
K[i] = K_{\\textrm{physical}} \\frac{\\Delta t}{(\\Delta y)^2}
where :math:`K_{\\textrm{physical}}` is in unit :math:`\\frac{\\textrm{length}^2}{\\textrm{time}}`
The diffusion matrix is build like the following
.. math::
\\textrm{diffTriDiag}=
\\left[ \\begin{array}{cccccc}
1+\\frac{s_1 }{w_{2,0}} & -\\frac{s_1}{w_{2,0}} & 0 & & ... & 0 \\\\
-\\frac{s_1}{w_{2,1}} & 1+\\frac{s_1 + s_2}{w_{2,1}} & -\\frac{s_2}{w_{2,1}} & 0 & ... & 0 \\\\
0 & -\\frac{s_2}{w_{2,2}} & 1+\\frac{s_2 + s_3}{w_{2,2}} & -\\frac{s_3}{w_{2,2}} &... & 0 \\\\
& & \\ddots & \\ddots & \\ddots & \\\\
0 & 0 & ... & -\\frac{s_{n-2}}{w_{2,n-2}} & 1+\\frac{s_{n-2} + s_{n-1}}{w_{2,{n-2}}} & -\\frac{s_{n-1}}{w_{2,{n-2}}} \\\\
0 & 0 & ... & 0 & -\\frac{s_{n-1}}{w_{2,n-1}} & 1+\\frac{s_{n-1}}{w_{2,n-1}} \\\\
\\end{array} \\right]
where
.. math::
\\begin{array}{lllllll}
K &= [K_0, &K_1, &K_2, &...,&K_{n-1}, &K_{n}] \\\\
w_1 &= [w_{1,0}, &w_{1,1},&w_{1,2},&...,&w_{1,n-1},&w_{1,n}] \\\\
w_2 &= [w_{2,0}, &w_{2,1},&w_{2,2},&...,&w_{2,n-1}]
\\end{array}
and following subsitute:
.. math::
s_i = w_{1,i} K_i
"""
# \\begin{eqnarray}
# y & = & ax^2 + bx + c \\\\
# f(x) & = & x^2 + 2xy + y^2
# \\end{eqnarray}
# .. math::
#
# K &= [K_0, &K_1, &K_2, &... , &K_{n-1}, &K_{n}] \\\\
# w_1 &= [w_{1,0}, &w_{1,1}, &w_{1,2}, &... , &w_{1,n-1}, \\ &w_{1,n}] \\\\
# w_2 &= [w_{2,0}, \\ &w_{2,1}, \\ &w_{2,2}, \\ &... \\ , \\ &w_{2,n-1}] &o \\\\
#
# """
J = K.size - 1
if weight1 is None:
weight1 = np.ones_like(K) # depends on [control=['if'], data=['weight1']]
if weight2 is None:
weight2 = np.ones(J) # depends on [control=['if'], data=['weight2']]
weightedK = weight1 * K
Ka1 = weightedK[0:J] / weight2
Ka3 = weightedK[1:J + 1] / weight2
Ka2 = np.insert(Ka1[1:J], 0, 0) + np.append(Ka3[0:J - 1], 0)
# Atmosphere tridiagonal matrix
# this code makes a 3xN matrix, suitable for use with solve_banded
#diag = np.empty((3, J))
#diag[0, 1:] = -Ka3[0:J-1]
#diag[1, :] = 1 + Ka2
#diag[2, 0:J-1] = -Ka1[1:J]
# Build the full banded matrix instead
A = np.diag(1 + Ka2, k=0) + np.diag(-Ka3[0:J - 1], k=1) + np.diag(-Ka1[1:J], k=-1)
return A |
def delete_alias(i):
"""
Input: {
path - path to the entry
data_uid - data UID
(data_alias) - data alias
(repo_dict) - repo cfg if available to check sync
(share) - if 'yes', try to rm via GIT
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
rd=i.get('repo_dict',{})
rshared=rd.get('shared','')
rsync=rd.get('sync','')
if i.get('share','')=='yes': rshared='git'
p=i['path']
alias=i.get('data_alias','')
uid=''
if alias!='' and os.path.isdir(p):
p0=os.path.join(p, cfg['subdir_ck_ext'])
p9=cfg['file_alias_a'] + alias
p1=os.path.join(p0, p9)
if rshared!='':
ppp=os.getcwd()
os.chdir(p0)
if os.path.isfile(p1):
try:
f=open(p1)
uid=f.readline().strip()
f.close()
except Exception as e:
None
if rshared!='':
ss=cfg['repo_types'][rshared]['rm'].replace('$#files#$', p9)
rx=os.system(ss)
if os.path.isfile(p1): os.remove(p1)
if uid=='': uid=i['data_uid']
if uid!='':
p9=cfg['file_alias_u'] + uid
p1=os.path.join(p0, p9)
if os.path.isfile(p1):
if rshared!='':
ss=cfg['repo_types'][rshared]['rm'].replace('$#files#$', p9)
rx=os.system(ss)
if os.path.isfile(p1): os.remove(p1)
if rshared!='':
os.chdir(ppp)
return {'return':0} | def function[delete_alias, parameter[i]]:
constant[
Input: {
path - path to the entry
data_uid - data UID
(data_alias) - data alias
(repo_dict) - repo cfg if available to check sync
(share) - if 'yes', try to rm via GIT
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
]
variable[rd] assign[=] call[name[i].get, parameter[constant[repo_dict], dictionary[[], []]]]
variable[rshared] assign[=] call[name[rd].get, parameter[constant[shared], constant[]]]
variable[rsync] assign[=] call[name[rd].get, parameter[constant[sync], constant[]]]
if compare[call[name[i].get, parameter[constant[share], constant[]]] equal[==] constant[yes]] begin[:]
variable[rshared] assign[=] constant[git]
variable[p] assign[=] call[name[i]][constant[path]]
variable[alias] assign[=] call[name[i].get, parameter[constant[data_alias], constant[]]]
variable[uid] assign[=] constant[]
if <ast.BoolOp object at 0x7da1b22dc250> begin[:]
variable[p0] assign[=] call[name[os].path.join, parameter[name[p], call[name[cfg]][constant[subdir_ck_ext]]]]
variable[p9] assign[=] binary_operation[call[name[cfg]][constant[file_alias_a]] + name[alias]]
variable[p1] assign[=] call[name[os].path.join, parameter[name[p0], name[p9]]]
if compare[name[rshared] not_equal[!=] constant[]] begin[:]
variable[ppp] assign[=] call[name[os].getcwd, parameter[]]
call[name[os].chdir, parameter[name[p0]]]
if call[name[os].path.isfile, parameter[name[p1]]] begin[:]
<ast.Try object at 0x7da1b2273040>
if compare[name[rshared] not_equal[!=] constant[]] begin[:]
variable[ss] assign[=] call[call[call[call[name[cfg]][constant[repo_types]]][name[rshared]]][constant[rm]].replace, parameter[constant[$#files#$], name[p9]]]
variable[rx] assign[=] call[name[os].system, parameter[name[ss]]]
if call[name[os].path.isfile, parameter[name[p1]]] begin[:]
call[name[os].remove, parameter[name[p1]]]
if compare[name[uid] equal[==] constant[]] begin[:]
variable[uid] assign[=] call[name[i]][constant[data_uid]]
if compare[name[uid] not_equal[!=] constant[]] begin[:]
variable[p9] assign[=] binary_operation[call[name[cfg]][constant[file_alias_u]] + name[uid]]
variable[p1] assign[=] call[name[os].path.join, parameter[name[p0], name[p9]]]
if call[name[os].path.isfile, parameter[name[p1]]] begin[:]
if compare[name[rshared] not_equal[!=] constant[]] begin[:]
variable[ss] assign[=] call[call[call[call[name[cfg]][constant[repo_types]]][name[rshared]]][constant[rm]].replace, parameter[constant[$#files#$], name[p9]]]
variable[rx] assign[=] call[name[os].system, parameter[name[ss]]]
if call[name[os].path.isfile, parameter[name[p1]]] begin[:]
call[name[os].remove, parameter[name[p1]]]
if compare[name[rshared] not_equal[!=] constant[]] begin[:]
call[name[os].chdir, parameter[name[ppp]]]
return[dictionary[[<ast.Constant object at 0x7da1b22713c0>], [<ast.Constant object at 0x7da1b2270250>]]] | keyword[def] identifier[delete_alias] ( identifier[i] ):
literal[string]
identifier[rd] = identifier[i] . identifier[get] ( literal[string] ,{})
identifier[rshared] = identifier[rd] . identifier[get] ( literal[string] , literal[string] )
identifier[rsync] = identifier[rd] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[i] . identifier[get] ( literal[string] , literal[string] )== literal[string] : identifier[rshared] = literal[string]
identifier[p] = identifier[i] [ literal[string] ]
identifier[alias] = identifier[i] . identifier[get] ( literal[string] , literal[string] )
identifier[uid] = literal[string]
keyword[if] identifier[alias] != literal[string] keyword[and] identifier[os] . identifier[path] . identifier[isdir] ( identifier[p] ):
identifier[p0] = identifier[os] . identifier[path] . identifier[join] ( identifier[p] , identifier[cfg] [ literal[string] ])
identifier[p9] = identifier[cfg] [ literal[string] ]+ identifier[alias]
identifier[p1] = identifier[os] . identifier[path] . identifier[join] ( identifier[p0] , identifier[p9] )
keyword[if] identifier[rshared] != literal[string] :
identifier[ppp] = identifier[os] . identifier[getcwd] ()
identifier[os] . identifier[chdir] ( identifier[p0] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[p1] ):
keyword[try] :
identifier[f] = identifier[open] ( identifier[p1] )
identifier[uid] = identifier[f] . identifier[readline] (). identifier[strip] ()
identifier[f] . identifier[close] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[None]
keyword[if] identifier[rshared] != literal[string] :
identifier[ss] = identifier[cfg] [ literal[string] ][ identifier[rshared] ][ literal[string] ]. identifier[replace] ( literal[string] , identifier[p9] )
identifier[rx] = identifier[os] . identifier[system] ( identifier[ss] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[p1] ): identifier[os] . identifier[remove] ( identifier[p1] )
keyword[if] identifier[uid] == literal[string] : identifier[uid] = identifier[i] [ literal[string] ]
keyword[if] identifier[uid] != literal[string] :
identifier[p9] = identifier[cfg] [ literal[string] ]+ identifier[uid]
identifier[p1] = identifier[os] . identifier[path] . identifier[join] ( identifier[p0] , identifier[p9] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[p1] ):
keyword[if] identifier[rshared] != literal[string] :
identifier[ss] = identifier[cfg] [ literal[string] ][ identifier[rshared] ][ literal[string] ]. identifier[replace] ( literal[string] , identifier[p9] )
identifier[rx] = identifier[os] . identifier[system] ( identifier[ss] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[p1] ): identifier[os] . identifier[remove] ( identifier[p1] )
keyword[if] identifier[rshared] != literal[string] :
identifier[os] . identifier[chdir] ( identifier[ppp] )
keyword[return] { literal[string] : literal[int] } | def delete_alias(i):
"""
Input: {
path - path to the entry
data_uid - data UID
(data_alias) - data alias
(repo_dict) - repo cfg if available to check sync
(share) - if 'yes', try to rm via GIT
}
Output: {
return - return code = 0, if successful
> 0, if error
(error) - error text if return > 0
}
"""
rd = i.get('repo_dict', {})
rshared = rd.get('shared', '')
rsync = rd.get('sync', '')
if i.get('share', '') == 'yes':
rshared = 'git' # depends on [control=['if'], data=[]]
p = i['path']
alias = i.get('data_alias', '')
uid = ''
if alias != '' and os.path.isdir(p):
p0 = os.path.join(p, cfg['subdir_ck_ext'])
p9 = cfg['file_alias_a'] + alias
p1 = os.path.join(p0, p9)
if rshared != '':
ppp = os.getcwd()
os.chdir(p0) # depends on [control=['if'], data=[]]
if os.path.isfile(p1):
try:
f = open(p1)
uid = f.readline().strip()
f.close() # depends on [control=['try'], data=[]]
except Exception as e:
None # depends on [control=['except'], data=[]]
if rshared != '':
ss = cfg['repo_types'][rshared]['rm'].replace('$#files#$', p9)
rx = os.system(ss) # depends on [control=['if'], data=['rshared']]
if os.path.isfile(p1):
os.remove(p1) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if uid == '':
uid = i['data_uid'] # depends on [control=['if'], data=['uid']]
if uid != '':
p9 = cfg['file_alias_u'] + uid
p1 = os.path.join(p0, p9)
if os.path.isfile(p1):
if rshared != '':
ss = cfg['repo_types'][rshared]['rm'].replace('$#files#$', p9)
rx = os.system(ss) # depends on [control=['if'], data=['rshared']]
if os.path.isfile(p1):
os.remove(p1) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['uid']]
if rshared != '':
os.chdir(ppp) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return {'return': 0} |
def velocity_confidence(data, vkey='velocity', copy=False):
"""Computes confidences of velocities.
Arguments
---------
data: :class:`~anndata.AnnData`
Annotated data matrix.
vkey: `str` (default: `'velocity'`)
Name of velocity estimates to be used.
copy: `bool` (default: `False`)
Return a copy instead of writing to adata.
Returns
-------
Returns or updates `adata` with the attributes
velocity_length: `.obs`
Length of the velocity vectors for each individual cell
velocity_confidence: `.obs`
Confidence for each cell
"""
adata = data.copy() if copy else data
if vkey not in adata.layers.keys():
raise ValueError(
'You need to run `tl.velocity` first.')
idx = np.array(adata.var[vkey + '_genes'].values, dtype=bool)
X, V = adata.layers['Ms'][:, idx].copy(), adata.layers[vkey][:, idx].copy()
indices = get_indices(dist=adata.uns['neighbors']['distances'])[0]
V -= V.mean(1)[:, None]
V_norm = norm(V)
R = np.zeros(adata.n_obs)
for i in range(adata.n_obs):
Vi_neighs = V[indices[i]]
Vi_neighs -= Vi_neighs.mean(1)[:, None]
R[i] = np.mean(np.einsum('ij, j', Vi_neighs, V[i]) / (norm(Vi_neighs) * V_norm[i])[None, :])
adata.obs[vkey + '_length'] = V_norm.round(2)
adata.obs[vkey + '_confidence'] = R
logg.hint('added \'' + vkey + '_confidence\' (adata.obs)')
if vkey + '_confidence_transition' not in adata.obs.keys():
velocity_confidence_transition(adata, vkey)
return adata if copy else None | def function[velocity_confidence, parameter[data, vkey, copy]]:
constant[Computes confidences of velocities.
Arguments
---------
data: :class:`~anndata.AnnData`
Annotated data matrix.
vkey: `str` (default: `'velocity'`)
Name of velocity estimates to be used.
copy: `bool` (default: `False`)
Return a copy instead of writing to adata.
Returns
-------
Returns or updates `adata` with the attributes
velocity_length: `.obs`
Length of the velocity vectors for each individual cell
velocity_confidence: `.obs`
Confidence for each cell
]
variable[adata] assign[=] <ast.IfExp object at 0x7da20e957eb0>
if compare[name[vkey] <ast.NotIn object at 0x7da2590d7190> call[name[adata].layers.keys, parameter[]]] begin[:]
<ast.Raise object at 0x7da20e957910>
variable[idx] assign[=] call[name[np].array, parameter[call[name[adata].var][binary_operation[name[vkey] + constant[_genes]]].values]]
<ast.Tuple object at 0x7da1b0d526e0> assign[=] tuple[[<ast.Call object at 0x7da2054a5d20>, <ast.Call object at 0x7da2054a6aa0>]]
variable[indices] assign[=] call[call[name[get_indices], parameter[]]][constant[0]]
<ast.AugAssign object at 0x7da2054a4ca0>
variable[V_norm] assign[=] call[name[norm], parameter[name[V]]]
variable[R] assign[=] call[name[np].zeros, parameter[name[adata].n_obs]]
for taget[name[i]] in starred[call[name[range], parameter[name[adata].n_obs]]] begin[:]
variable[Vi_neighs] assign[=] call[name[V]][call[name[indices]][name[i]]]
<ast.AugAssign object at 0x7da2054a7ca0>
call[name[R]][name[i]] assign[=] call[name[np].mean, parameter[binary_operation[call[name[np].einsum, parameter[constant[ij, j], name[Vi_neighs], call[name[V]][name[i]]]] / call[binary_operation[call[name[norm], parameter[name[Vi_neighs]]] * call[name[V_norm]][name[i]]]][tuple[[<ast.Constant object at 0x7da18fe92200>, <ast.Slice object at 0x7da18fe93fd0>]]]]]]
call[name[adata].obs][binary_operation[name[vkey] + constant[_length]]] assign[=] call[name[V_norm].round, parameter[constant[2]]]
call[name[adata].obs][binary_operation[name[vkey] + constant[_confidence]]] assign[=] name[R]
call[name[logg].hint, parameter[binary_operation[binary_operation[constant[added '] + name[vkey]] + constant[_confidence' (adata.obs)]]]]
if compare[binary_operation[name[vkey] + constant[_confidence_transition]] <ast.NotIn object at 0x7da2590d7190> call[name[adata].obs.keys, parameter[]]] begin[:]
call[name[velocity_confidence_transition], parameter[name[adata], name[vkey]]]
return[<ast.IfExp object at 0x7da18fe91bd0>] | keyword[def] identifier[velocity_confidence] ( identifier[data] , identifier[vkey] = literal[string] , identifier[copy] = keyword[False] ):
literal[string]
identifier[adata] = identifier[data] . identifier[copy] () keyword[if] identifier[copy] keyword[else] identifier[data]
keyword[if] identifier[vkey] keyword[not] keyword[in] identifier[adata] . identifier[layers] . identifier[keys] ():
keyword[raise] identifier[ValueError] (
literal[string] )
identifier[idx] = identifier[np] . identifier[array] ( identifier[adata] . identifier[var] [ identifier[vkey] + literal[string] ]. identifier[values] , identifier[dtype] = identifier[bool] )
identifier[X] , identifier[V] = identifier[adata] . identifier[layers] [ literal[string] ][:, identifier[idx] ]. identifier[copy] (), identifier[adata] . identifier[layers] [ identifier[vkey] ][:, identifier[idx] ]. identifier[copy] ()
identifier[indices] = identifier[get_indices] ( identifier[dist] = identifier[adata] . identifier[uns] [ literal[string] ][ literal[string] ])[ literal[int] ]
identifier[V] -= identifier[V] . identifier[mean] ( literal[int] )[:, keyword[None] ]
identifier[V_norm] = identifier[norm] ( identifier[V] )
identifier[R] = identifier[np] . identifier[zeros] ( identifier[adata] . identifier[n_obs] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[adata] . identifier[n_obs] ):
identifier[Vi_neighs] = identifier[V] [ identifier[indices] [ identifier[i] ]]
identifier[Vi_neighs] -= identifier[Vi_neighs] . identifier[mean] ( literal[int] )[:, keyword[None] ]
identifier[R] [ identifier[i] ]= identifier[np] . identifier[mean] ( identifier[np] . identifier[einsum] ( literal[string] , identifier[Vi_neighs] , identifier[V] [ identifier[i] ])/( identifier[norm] ( identifier[Vi_neighs] )* identifier[V_norm] [ identifier[i] ])[ keyword[None] ,:])
identifier[adata] . identifier[obs] [ identifier[vkey] + literal[string] ]= identifier[V_norm] . identifier[round] ( literal[int] )
identifier[adata] . identifier[obs] [ identifier[vkey] + literal[string] ]= identifier[R]
identifier[logg] . identifier[hint] ( literal[string] + identifier[vkey] + literal[string] )
keyword[if] identifier[vkey] + literal[string] keyword[not] keyword[in] identifier[adata] . identifier[obs] . identifier[keys] ():
identifier[velocity_confidence_transition] ( identifier[adata] , identifier[vkey] )
keyword[return] identifier[adata] keyword[if] identifier[copy] keyword[else] keyword[None] | def velocity_confidence(data, vkey='velocity', copy=False):
"""Computes confidences of velocities.
Arguments
---------
data: :class:`~anndata.AnnData`
Annotated data matrix.
vkey: `str` (default: `'velocity'`)
Name of velocity estimates to be used.
copy: `bool` (default: `False`)
Return a copy instead of writing to adata.
Returns
-------
Returns or updates `adata` with the attributes
velocity_length: `.obs`
Length of the velocity vectors for each individual cell
velocity_confidence: `.obs`
Confidence for each cell
"""
adata = data.copy() if copy else data
if vkey not in adata.layers.keys():
raise ValueError('You need to run `tl.velocity` first.') # depends on [control=['if'], data=[]]
idx = np.array(adata.var[vkey + '_genes'].values, dtype=bool)
(X, V) = (adata.layers['Ms'][:, idx].copy(), adata.layers[vkey][:, idx].copy())
indices = get_indices(dist=adata.uns['neighbors']['distances'])[0]
V -= V.mean(1)[:, None]
V_norm = norm(V)
R = np.zeros(adata.n_obs)
for i in range(adata.n_obs):
Vi_neighs = V[indices[i]]
Vi_neighs -= Vi_neighs.mean(1)[:, None]
R[i] = np.mean(np.einsum('ij, j', Vi_neighs, V[i]) / (norm(Vi_neighs) * V_norm[i])[None, :]) # depends on [control=['for'], data=['i']]
adata.obs[vkey + '_length'] = V_norm.round(2)
adata.obs[vkey + '_confidence'] = R
logg.hint("added '" + vkey + "_confidence' (adata.obs)")
if vkey + '_confidence_transition' not in adata.obs.keys():
velocity_confidence_transition(adata, vkey) # depends on [control=['if'], data=[]]
return adata if copy else None |
def get_zone(self, zone_id, records=True):
"""Get a zone and its records.
:param zone: the zone name
:returns: A dictionary containing a large amount of information about
the specified zone.
"""
mask = None
if records:
mask = 'resourceRecords'
return self.service.getObject(id=zone_id, mask=mask) | def function[get_zone, parameter[self, zone_id, records]]:
constant[Get a zone and its records.
:param zone: the zone name
:returns: A dictionary containing a large amount of information about
the specified zone.
]
variable[mask] assign[=] constant[None]
if name[records] begin[:]
variable[mask] assign[=] constant[resourceRecords]
return[call[name[self].service.getObject, parameter[]]] | keyword[def] identifier[get_zone] ( identifier[self] , identifier[zone_id] , identifier[records] = keyword[True] ):
literal[string]
identifier[mask] = keyword[None]
keyword[if] identifier[records] :
identifier[mask] = literal[string]
keyword[return] identifier[self] . identifier[service] . identifier[getObject] ( identifier[id] = identifier[zone_id] , identifier[mask] = identifier[mask] ) | def get_zone(self, zone_id, records=True):
"""Get a zone and its records.
:param zone: the zone name
:returns: A dictionary containing a large amount of information about
the specified zone.
"""
mask = None
if records:
mask = 'resourceRecords' # depends on [control=['if'], data=[]]
return self.service.getObject(id=zone_id, mask=mask) |
def ip(args):
"""
%prog describe
Show current IP address from JSON settings.
"""
if len(args) != 0:
sys.exit(not p.print_help())
s = InstanceSkeleton()
print("IP address:", s.private_ip_address, file=sys.stderr)
print("Instance type:", s.instance_type, file=sys.stderr) | def function[ip, parameter[args]]:
constant[
%prog describe
Show current IP address from JSON settings.
]
if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[0]] begin[:]
call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b09030a0>]]
variable[s] assign[=] call[name[InstanceSkeleton], parameter[]]
call[name[print], parameter[constant[IP address:], name[s].private_ip_address]]
call[name[print], parameter[constant[Instance type:], name[s].instance_type]] | keyword[def] identifier[ip] ( identifier[args] ):
literal[string]
keyword[if] identifier[len] ( identifier[args] )!= literal[int] :
identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ())
identifier[s] = identifier[InstanceSkeleton] ()
identifier[print] ( literal[string] , identifier[s] . identifier[private_ip_address] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[print] ( literal[string] , identifier[s] . identifier[instance_type] , identifier[file] = identifier[sys] . identifier[stderr] ) | def ip(args):
"""
%prog describe
Show current IP address from JSON settings.
"""
if len(args) != 0:
sys.exit(not p.print_help()) # depends on [control=['if'], data=[]]
s = InstanceSkeleton()
print('IP address:', s.private_ip_address, file=sys.stderr)
print('Instance type:', s.instance_type, file=sys.stderr) |
def upload_progress(request):
"""
Used by Ajax calls
Return the upload progress and total length values
"""
if 'X-Progress-ID' in request.GET:
progress_id = request.GET['X-Progress-ID']
elif 'X-Progress-ID' in request.META:
progress_id = request.META['X-Progress-ID']
if progress_id:
cache_key = "%s_%s" % (request.META['REMOTE_ADDR'], progress_id)
data = cache.get(cache_key)
return HttpResponse(simplejson.dumps(data)) | def function[upload_progress, parameter[request]]:
constant[
Used by Ajax calls
Return the upload progress and total length values
]
if compare[constant[X-Progress-ID] in name[request].GET] begin[:]
variable[progress_id] assign[=] call[name[request].GET][constant[X-Progress-ID]]
if name[progress_id] begin[:]
variable[cache_key] assign[=] binary_operation[constant[%s_%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Subscript object at 0x7da1b28c5900>, <ast.Name object at 0x7da1b28c4f10>]]]
variable[data] assign[=] call[name[cache].get, parameter[name[cache_key]]]
return[call[name[HttpResponse], parameter[call[name[simplejson].dumps, parameter[name[data]]]]]] | keyword[def] identifier[upload_progress] ( identifier[request] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[request] . identifier[GET] :
identifier[progress_id] = identifier[request] . identifier[GET] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[request] . identifier[META] :
identifier[progress_id] = identifier[request] . identifier[META] [ literal[string] ]
keyword[if] identifier[progress_id] :
identifier[cache_key] = literal[string] %( identifier[request] . identifier[META] [ literal[string] ], identifier[progress_id] )
identifier[data] = identifier[cache] . identifier[get] ( identifier[cache_key] )
keyword[return] identifier[HttpResponse] ( identifier[simplejson] . identifier[dumps] ( identifier[data] )) | def upload_progress(request):
"""
Used by Ajax calls
Return the upload progress and total length values
"""
if 'X-Progress-ID' in request.GET:
progress_id = request.GET['X-Progress-ID'] # depends on [control=['if'], data=[]]
elif 'X-Progress-ID' in request.META:
progress_id = request.META['X-Progress-ID'] # depends on [control=['if'], data=[]]
if progress_id:
cache_key = '%s_%s' % (request.META['REMOTE_ADDR'], progress_id)
data = cache.get(cache_key)
return HttpResponse(simplejson.dumps(data)) # depends on [control=['if'], data=[]] |
def GetFileEntryByPathSpec(self, path_spec):
"""Retrieves a file entry for a path specification.
Args:
path_spec (PathSpec): a path specification.
Returns:
CPIOFileEntry: a file entry or None if not available.
"""
location = getattr(path_spec, 'location', None)
if (location is None or
not location.startswith(self.LOCATION_ROOT)):
return None
if len(location) == 1:
return cpio_file_entry.CPIOFileEntry(
self._resolver_context, self, path_spec, is_root=True,
is_virtual=True)
cpio_archive_file_entry = self._cpio_archive_file.GetFileEntryByPath(
location[1:])
if cpio_archive_file_entry is None:
return None
return cpio_file_entry.CPIOFileEntry(
self._resolver_context, self, path_spec,
cpio_archive_file_entry=cpio_archive_file_entry) | def function[GetFileEntryByPathSpec, parameter[self, path_spec]]:
constant[Retrieves a file entry for a path specification.
Args:
path_spec (PathSpec): a path specification.
Returns:
CPIOFileEntry: a file entry or None if not available.
]
variable[location] assign[=] call[name[getattr], parameter[name[path_spec], constant[location], constant[None]]]
if <ast.BoolOp object at 0x7da1b07aeec0> begin[:]
return[constant[None]]
if compare[call[name[len], parameter[name[location]]] equal[==] constant[1]] begin[:]
return[call[name[cpio_file_entry].CPIOFileEntry, parameter[name[self]._resolver_context, name[self], name[path_spec]]]]
variable[cpio_archive_file_entry] assign[=] call[name[self]._cpio_archive_file.GetFileEntryByPath, parameter[call[name[location]][<ast.Slice object at 0x7da1b07ad3c0>]]]
if compare[name[cpio_archive_file_entry] is constant[None]] begin[:]
return[constant[None]]
return[call[name[cpio_file_entry].CPIOFileEntry, parameter[name[self]._resolver_context, name[self], name[path_spec]]]] | keyword[def] identifier[GetFileEntryByPathSpec] ( identifier[self] , identifier[path_spec] ):
literal[string]
identifier[location] = identifier[getattr] ( identifier[path_spec] , literal[string] , keyword[None] )
keyword[if] ( identifier[location] keyword[is] keyword[None] keyword[or]
keyword[not] identifier[location] . identifier[startswith] ( identifier[self] . identifier[LOCATION_ROOT] )):
keyword[return] keyword[None]
keyword[if] identifier[len] ( identifier[location] )== literal[int] :
keyword[return] identifier[cpio_file_entry] . identifier[CPIOFileEntry] (
identifier[self] . identifier[_resolver_context] , identifier[self] , identifier[path_spec] , identifier[is_root] = keyword[True] ,
identifier[is_virtual] = keyword[True] )
identifier[cpio_archive_file_entry] = identifier[self] . identifier[_cpio_archive_file] . identifier[GetFileEntryByPath] (
identifier[location] [ literal[int] :])
keyword[if] identifier[cpio_archive_file_entry] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[cpio_file_entry] . identifier[CPIOFileEntry] (
identifier[self] . identifier[_resolver_context] , identifier[self] , identifier[path_spec] ,
identifier[cpio_archive_file_entry] = identifier[cpio_archive_file_entry] ) | def GetFileEntryByPathSpec(self, path_spec):
"""Retrieves a file entry for a path specification.
Args:
path_spec (PathSpec): a path specification.
Returns:
CPIOFileEntry: a file entry or None if not available.
"""
location = getattr(path_spec, 'location', None)
if location is None or not location.startswith(self.LOCATION_ROOT):
return None # depends on [control=['if'], data=[]]
if len(location) == 1:
return cpio_file_entry.CPIOFileEntry(self._resolver_context, self, path_spec, is_root=True, is_virtual=True) # depends on [control=['if'], data=[]]
cpio_archive_file_entry = self._cpio_archive_file.GetFileEntryByPath(location[1:])
if cpio_archive_file_entry is None:
return None # depends on [control=['if'], data=[]]
return cpio_file_entry.CPIOFileEntry(self._resolver_context, self, path_spec, cpio_archive_file_entry=cpio_archive_file_entry) |
def canonicalize_version(version):
"""
This is very similar to Version.__str__, but has one subtle differences
with the way it handles the release segment.
"""
try:
version = Version(version)
except InvalidVersion:
# Legacy versions cannot be normalized
return version
parts = []
# Epoch
if version.epoch != 0:
parts.append("{0}!".format(version.epoch))
# Release segment
# NB: This strips trailing '.0's to normalize
parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in version.release)))
# Pre-release
if version.pre is not None:
parts.append("".join(str(x) for x in version.pre))
# Post-release
if version.post is not None:
parts.append(".post{0}".format(version.post))
# Development release
if version.dev is not None:
parts.append(".dev{0}".format(version.dev))
# Local version segment
if version.local is not None:
parts.append("+{0}".format(version.local))
return "".join(parts) | def function[canonicalize_version, parameter[version]]:
constant[
This is very similar to Version.__str__, but has one subtle differences
with the way it handles the release segment.
]
<ast.Try object at 0x7da1b1ea35e0>
variable[parts] assign[=] list[[]]
if compare[name[version].epoch not_equal[!=] constant[0]] begin[:]
call[name[parts].append, parameter[call[constant[{0}!].format, parameter[name[version].epoch]]]]
call[name[parts].append, parameter[call[name[re].sub, parameter[constant[(\.0)+$], constant[], call[constant[.].join, parameter[<ast.GeneratorExp object at 0x7da1b1ea0790>]]]]]]
if compare[name[version].pre is_not constant[None]] begin[:]
call[name[parts].append, parameter[call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b1ea16f0>]]]]
if compare[name[version].post is_not constant[None]] begin[:]
call[name[parts].append, parameter[call[constant[.post{0}].format, parameter[name[version].post]]]]
if compare[name[version].dev is_not constant[None]] begin[:]
call[name[parts].append, parameter[call[constant[.dev{0}].format, parameter[name[version].dev]]]]
if compare[name[version].local is_not constant[None]] begin[:]
call[name[parts].append, parameter[call[constant[+{0}].format, parameter[name[version].local]]]]
return[call[constant[].join, parameter[name[parts]]]] | keyword[def] identifier[canonicalize_version] ( identifier[version] ):
literal[string]
keyword[try] :
identifier[version] = identifier[Version] ( identifier[version] )
keyword[except] identifier[InvalidVersion] :
keyword[return] identifier[version]
identifier[parts] =[]
keyword[if] identifier[version] . identifier[epoch] != literal[int] :
identifier[parts] . identifier[append] ( literal[string] . identifier[format] ( identifier[version] . identifier[epoch] ))
identifier[parts] . identifier[append] ( identifier[re] . identifier[sub] ( literal[string] , literal[string] , literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[version] . identifier[release] )))
keyword[if] identifier[version] . identifier[pre] keyword[is] keyword[not] keyword[None] :
identifier[parts] . identifier[append] ( literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[version] . identifier[pre] ))
keyword[if] identifier[version] . identifier[post] keyword[is] keyword[not] keyword[None] :
identifier[parts] . identifier[append] ( literal[string] . identifier[format] ( identifier[version] . identifier[post] ))
keyword[if] identifier[version] . identifier[dev] keyword[is] keyword[not] keyword[None] :
identifier[parts] . identifier[append] ( literal[string] . identifier[format] ( identifier[version] . identifier[dev] ))
keyword[if] identifier[version] . identifier[local] keyword[is] keyword[not] keyword[None] :
identifier[parts] . identifier[append] ( literal[string] . identifier[format] ( identifier[version] . identifier[local] ))
keyword[return] literal[string] . identifier[join] ( identifier[parts] ) | def canonicalize_version(version):
"""
This is very similar to Version.__str__, but has one subtle differences
with the way it handles the release segment.
"""
try:
version = Version(version) # depends on [control=['try'], data=[]]
except InvalidVersion:
# Legacy versions cannot be normalized
return version # depends on [control=['except'], data=[]]
parts = []
# Epoch
if version.epoch != 0:
parts.append('{0}!'.format(version.epoch)) # depends on [control=['if'], data=[]]
# Release segment
# NB: This strips trailing '.0's to normalize
parts.append(re.sub('(\\.0)+$', '', '.'.join((str(x) for x in version.release))))
# Pre-release
if version.pre is not None:
parts.append(''.join((str(x) for x in version.pre))) # depends on [control=['if'], data=[]]
# Post-release
if version.post is not None:
parts.append('.post{0}'.format(version.post)) # depends on [control=['if'], data=[]]
# Development release
if version.dev is not None:
parts.append('.dev{0}'.format(version.dev)) # depends on [control=['if'], data=[]]
# Local version segment
if version.local is not None:
parts.append('+{0}'.format(version.local)) # depends on [control=['if'], data=[]]
return ''.join(parts) |
def set_params(self, **params):
"""Sets new values to the specified parameters.
Parameters:
-----------
params : variable sized dictionary, n key-word arguments
Example:
```
scaler.set_params(std=0.30)
```
Returns:
--------
void : void, returns nothing
"""
for k, v in params.items():
vars(self)[k] = v | def function[set_params, parameter[self]]:
constant[Sets new values to the specified parameters.
Parameters:
-----------
params : variable sized dictionary, n key-word arguments
Example:
```
scaler.set_params(std=0.30)
```
Returns:
--------
void : void, returns nothing
]
for taget[tuple[[<ast.Name object at 0x7da1b22989d0>, <ast.Name object at 0x7da1b22984c0>]]] in starred[call[name[params].items, parameter[]]] begin[:]
call[call[name[vars], parameter[name[self]]]][name[k]] assign[=] name[v] | keyword[def] identifier[set_params] ( identifier[self] ,** identifier[params] ):
literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[params] . identifier[items] ():
identifier[vars] ( identifier[self] )[ identifier[k] ]= identifier[v] | def set_params(self, **params):
"""Sets new values to the specified parameters.
Parameters:
-----------
params : variable sized dictionary, n key-word arguments
Example:
```
scaler.set_params(std=0.30)
```
Returns:
--------
void : void, returns nothing
"""
for (k, v) in params.items():
vars(self)[k] = v # depends on [control=['for'], data=[]] |
def _build_A(self, force=False):
r"""
Builds the coefficient matrix based on conductances between pores.
The conductance to use is specified in the algorithm's ``settings``
under ``conductance``. In subclasses (e.g. ``FickianDiffusion``)
this is set by default, though it can be overwritten.
Parameters
----------
force : Boolean (default is ``False``)
If set to ``True`` then the A matrix is built from new. If
``False`` (the default), a cached version of A is returned. The
cached version is *clean* in the sense that no boundary conditions
or sources terms have been added to it.
"""
if force:
self._pure_A = None
if self._pure_A is None:
network = self.project.network
phase = self.project.phases()[self.settings['phase']]
g = phase[self.settings['conductance']]
am = network.create_adjacency_matrix(weights=g, fmt='coo')
self._pure_A = spgr.laplacian(am)
self.A = self._pure_A.copy() | def function[_build_A, parameter[self, force]]:
constant[
Builds the coefficient matrix based on conductances between pores.
The conductance to use is specified in the algorithm's ``settings``
under ``conductance``. In subclasses (e.g. ``FickianDiffusion``)
this is set by default, though it can be overwritten.
Parameters
----------
force : Boolean (default is ``False``)
If set to ``True`` then the A matrix is built from new. If
``False`` (the default), a cached version of A is returned. The
cached version is *clean* in the sense that no boundary conditions
or sources terms have been added to it.
]
if name[force] begin[:]
name[self]._pure_A assign[=] constant[None]
if compare[name[self]._pure_A is constant[None]] begin[:]
variable[network] assign[=] name[self].project.network
variable[phase] assign[=] call[call[name[self].project.phases, parameter[]]][call[name[self].settings][constant[phase]]]
variable[g] assign[=] call[name[phase]][call[name[self].settings][constant[conductance]]]
variable[am] assign[=] call[name[network].create_adjacency_matrix, parameter[]]
name[self]._pure_A assign[=] call[name[spgr].laplacian, parameter[name[am]]]
name[self].A assign[=] call[name[self]._pure_A.copy, parameter[]] | keyword[def] identifier[_build_A] ( identifier[self] , identifier[force] = keyword[False] ):
literal[string]
keyword[if] identifier[force] :
identifier[self] . identifier[_pure_A] = keyword[None]
keyword[if] identifier[self] . identifier[_pure_A] keyword[is] keyword[None] :
identifier[network] = identifier[self] . identifier[project] . identifier[network]
identifier[phase] = identifier[self] . identifier[project] . identifier[phases] ()[ identifier[self] . identifier[settings] [ literal[string] ]]
identifier[g] = identifier[phase] [ identifier[self] . identifier[settings] [ literal[string] ]]
identifier[am] = identifier[network] . identifier[create_adjacency_matrix] ( identifier[weights] = identifier[g] , identifier[fmt] = literal[string] )
identifier[self] . identifier[_pure_A] = identifier[spgr] . identifier[laplacian] ( identifier[am] )
identifier[self] . identifier[A] = identifier[self] . identifier[_pure_A] . identifier[copy] () | def _build_A(self, force=False):
"""
Builds the coefficient matrix based on conductances between pores.
The conductance to use is specified in the algorithm's ``settings``
under ``conductance``. In subclasses (e.g. ``FickianDiffusion``)
this is set by default, though it can be overwritten.
Parameters
----------
force : Boolean (default is ``False``)
If set to ``True`` then the A matrix is built from new. If
``False`` (the default), a cached version of A is returned. The
cached version is *clean* in the sense that no boundary conditions
or sources terms have been added to it.
"""
if force:
self._pure_A = None # depends on [control=['if'], data=[]]
if self._pure_A is None:
network = self.project.network
phase = self.project.phases()[self.settings['phase']]
g = phase[self.settings['conductance']]
am = network.create_adjacency_matrix(weights=g, fmt='coo')
self._pure_A = spgr.laplacian(am) # depends on [control=['if'], data=[]]
self.A = self._pure_A.copy() |
def _get_url(self, resource, item, sys_id=None):
"""Takes table and sys_id (if present), and returns a URL
:param resource: API resource
:param item: API resource item
:param sys_id: Record sys_id
:return:
- url string
"""
url_str = '%(base_url)s/%(base_path)s/%(resource)s/%(item)s' % (
{
'base_url': self.base_url,
'base_path': self.base_path,
'resource': resource,
'item': item
}
)
if sys_id:
return "%s/%s" % (url_str, sys_id)
return url_str | def function[_get_url, parameter[self, resource, item, sys_id]]:
constant[Takes table and sys_id (if present), and returns a URL
:param resource: API resource
:param item: API resource item
:param sys_id: Record sys_id
:return:
- url string
]
variable[url_str] assign[=] binary_operation[constant[%(base_url)s/%(base_path)s/%(resource)s/%(item)s] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b06584c0>, <ast.Constant object at 0x7da1b06580a0>, <ast.Constant object at 0x7da1b065a5c0>, <ast.Constant object at 0x7da1b06582e0>], [<ast.Attribute object at 0x7da1b065ae30>, <ast.Attribute object at 0x7da1b065b1c0>, <ast.Name object at 0x7da1b0659540>, <ast.Name object at 0x7da1b0658df0>]]]
if name[sys_id] begin[:]
return[binary_operation[constant[%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b065b250>, <ast.Name object at 0x7da1b0658dc0>]]]]
return[name[url_str]] | keyword[def] identifier[_get_url] ( identifier[self] , identifier[resource] , identifier[item] , identifier[sys_id] = keyword[None] ):
literal[string]
identifier[url_str] = literal[string] %(
{
literal[string] : identifier[self] . identifier[base_url] ,
literal[string] : identifier[self] . identifier[base_path] ,
literal[string] : identifier[resource] ,
literal[string] : identifier[item]
}
)
keyword[if] identifier[sys_id] :
keyword[return] literal[string] %( identifier[url_str] , identifier[sys_id] )
keyword[return] identifier[url_str] | def _get_url(self, resource, item, sys_id=None):
"""Takes table and sys_id (if present), and returns a URL
:param resource: API resource
:param item: API resource item
:param sys_id: Record sys_id
:return:
- url string
"""
url_str = '%(base_url)s/%(base_path)s/%(resource)s/%(item)s' % {'base_url': self.base_url, 'base_path': self.base_path, 'resource': resource, 'item': item}
if sys_id:
return '%s/%s' % (url_str, sys_id) # depends on [control=['if'], data=[]]
return url_str |
def execute(self, eopatch):
""" Add cloud binary mask and (optionally) cloud probability map to input eopatch
:param eopatch: Input `EOPatch` instance
:return: `EOPatch` with additional cloud maps
"""
# Downsample or make request
if not eopatch.data:
raise ValueError('EOPatch must contain some data feature')
if self.data_feature in eopatch.data:
new_data, rescale = self._downscaling(eopatch.data[self.data_feature], eopatch.meta_info)
reference_shape = eopatch.data[self.data_feature].shape[:3]
else:
new_data, new_dates = self._make_request(eopatch.bbox, eopatch.meta_info, eopatch.timestamp)
removed_frames = eopatch.consolidate_timestamps(new_dates)
for rm_frame in removed_frames:
LOGGER.warning('Removed data for frame %s from '
'eopatch due to unavailability of %s!', str(rm_frame), self.data_feature)
# Get reference shape from first item in data dictionary
reference_shape = next(iter(eopatch.data.values())).shape[:3]
rescale = self._get_rescale_factors(reference_shape[1:3], eopatch.meta_info)
clf_probs_lr = self.classifier.get_cloud_probability_maps(new_data)
clf_mask_lr = self.classifier.get_mask_from_prob(clf_probs_lr)
# Add cloud mask as a feature to EOPatch
clf_mask_hr = self._upsampling(clf_mask_lr, rescale, reference_shape, interp='nearest')
eopatch.mask[self.cm_feature] = clf_mask_hr.astype(np.bool)
# If the feature name for cloud probability maps is specified, add as feature
if self.cprobs_feature is not None:
clf_probs_hr = self._upsampling(clf_probs_lr, rescale, reference_shape, interp='linear')
eopatch.data[self.cprobs_feature] = clf_probs_hr.astype(np.float32)
return eopatch | def function[execute, parameter[self, eopatch]]:
constant[ Add cloud binary mask and (optionally) cloud probability map to input eopatch
:param eopatch: Input `EOPatch` instance
:return: `EOPatch` with additional cloud maps
]
if <ast.UnaryOp object at 0x7da2047ebfd0> begin[:]
<ast.Raise object at 0x7da2047eb040>
if compare[name[self].data_feature in name[eopatch].data] begin[:]
<ast.Tuple object at 0x7da2047eb520> assign[=] call[name[self]._downscaling, parameter[call[name[eopatch].data][name[self].data_feature], name[eopatch].meta_info]]
variable[reference_shape] assign[=] call[call[name[eopatch].data][name[self].data_feature].shape][<ast.Slice object at 0x7da2047e8ac0>]
variable[clf_probs_lr] assign[=] call[name[self].classifier.get_cloud_probability_maps, parameter[name[new_data]]]
variable[clf_mask_lr] assign[=] call[name[self].classifier.get_mask_from_prob, parameter[name[clf_probs_lr]]]
variable[clf_mask_hr] assign[=] call[name[self]._upsampling, parameter[name[clf_mask_lr], name[rescale], name[reference_shape]]]
call[name[eopatch].mask][name[self].cm_feature] assign[=] call[name[clf_mask_hr].astype, parameter[name[np].bool]]
if compare[name[self].cprobs_feature is_not constant[None]] begin[:]
variable[clf_probs_hr] assign[=] call[name[self]._upsampling, parameter[name[clf_probs_lr], name[rescale], name[reference_shape]]]
call[name[eopatch].data][name[self].cprobs_feature] assign[=] call[name[clf_probs_hr].astype, parameter[name[np].float32]]
return[name[eopatch]] | keyword[def] identifier[execute] ( identifier[self] , identifier[eopatch] ):
literal[string]
keyword[if] keyword[not] identifier[eopatch] . identifier[data] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[self] . identifier[data_feature] keyword[in] identifier[eopatch] . identifier[data] :
identifier[new_data] , identifier[rescale] = identifier[self] . identifier[_downscaling] ( identifier[eopatch] . identifier[data] [ identifier[self] . identifier[data_feature] ], identifier[eopatch] . identifier[meta_info] )
identifier[reference_shape] = identifier[eopatch] . identifier[data] [ identifier[self] . identifier[data_feature] ]. identifier[shape] [: literal[int] ]
keyword[else] :
identifier[new_data] , identifier[new_dates] = identifier[self] . identifier[_make_request] ( identifier[eopatch] . identifier[bbox] , identifier[eopatch] . identifier[meta_info] , identifier[eopatch] . identifier[timestamp] )
identifier[removed_frames] = identifier[eopatch] . identifier[consolidate_timestamps] ( identifier[new_dates] )
keyword[for] identifier[rm_frame] keyword[in] identifier[removed_frames] :
identifier[LOGGER] . identifier[warning] ( literal[string]
literal[string] , identifier[str] ( identifier[rm_frame] ), identifier[self] . identifier[data_feature] )
identifier[reference_shape] = identifier[next] ( identifier[iter] ( identifier[eopatch] . identifier[data] . identifier[values] ())). identifier[shape] [: literal[int] ]
identifier[rescale] = identifier[self] . identifier[_get_rescale_factors] ( identifier[reference_shape] [ literal[int] : literal[int] ], identifier[eopatch] . identifier[meta_info] )
identifier[clf_probs_lr] = identifier[self] . identifier[classifier] . identifier[get_cloud_probability_maps] ( identifier[new_data] )
identifier[clf_mask_lr] = identifier[self] . identifier[classifier] . identifier[get_mask_from_prob] ( identifier[clf_probs_lr] )
identifier[clf_mask_hr] = identifier[self] . identifier[_upsampling] ( identifier[clf_mask_lr] , identifier[rescale] , identifier[reference_shape] , identifier[interp] = literal[string] )
identifier[eopatch] . identifier[mask] [ identifier[self] . identifier[cm_feature] ]= identifier[clf_mask_hr] . identifier[astype] ( identifier[np] . identifier[bool] )
keyword[if] identifier[self] . identifier[cprobs_feature] keyword[is] keyword[not] keyword[None] :
identifier[clf_probs_hr] = identifier[self] . identifier[_upsampling] ( identifier[clf_probs_lr] , identifier[rescale] , identifier[reference_shape] , identifier[interp] = literal[string] )
identifier[eopatch] . identifier[data] [ identifier[self] . identifier[cprobs_feature] ]= identifier[clf_probs_hr] . identifier[astype] ( identifier[np] . identifier[float32] )
keyword[return] identifier[eopatch] | def execute(self, eopatch):
""" Add cloud binary mask and (optionally) cloud probability map to input eopatch
:param eopatch: Input `EOPatch` instance
:return: `EOPatch` with additional cloud maps
"""
# Downsample or make request
if not eopatch.data:
raise ValueError('EOPatch must contain some data feature') # depends on [control=['if'], data=[]]
if self.data_feature in eopatch.data:
(new_data, rescale) = self._downscaling(eopatch.data[self.data_feature], eopatch.meta_info)
reference_shape = eopatch.data[self.data_feature].shape[:3] # depends on [control=['if'], data=[]]
else:
(new_data, new_dates) = self._make_request(eopatch.bbox, eopatch.meta_info, eopatch.timestamp)
removed_frames = eopatch.consolidate_timestamps(new_dates)
for rm_frame in removed_frames:
LOGGER.warning('Removed data for frame %s from eopatch due to unavailability of %s!', str(rm_frame), self.data_feature) # depends on [control=['for'], data=['rm_frame']]
# Get reference shape from first item in data dictionary
reference_shape = next(iter(eopatch.data.values())).shape[:3]
rescale = self._get_rescale_factors(reference_shape[1:3], eopatch.meta_info)
clf_probs_lr = self.classifier.get_cloud_probability_maps(new_data)
clf_mask_lr = self.classifier.get_mask_from_prob(clf_probs_lr)
# Add cloud mask as a feature to EOPatch
clf_mask_hr = self._upsampling(clf_mask_lr, rescale, reference_shape, interp='nearest')
eopatch.mask[self.cm_feature] = clf_mask_hr.astype(np.bool)
# If the feature name for cloud probability maps is specified, add as feature
if self.cprobs_feature is not None:
clf_probs_hr = self._upsampling(clf_probs_lr, rescale, reference_shape, interp='linear')
eopatch.data[self.cprobs_feature] = clf_probs_hr.astype(np.float32) # depends on [control=['if'], data=[]]
return eopatch |
def monkey_patch_override_method(klass):
"""
Override a class method with a new version of the same name. The original
method implementation is made available within the override method as
`_original_<METHOD_NAME>`.
"""
def perform_override(override_fn):
fn_name = override_fn.__name__
original_fn_name = '_original_' + fn_name
# Override class method, if it hasn't already been done
if not hasattr(klass, original_fn_name):
original_fn = getattr(klass, fn_name)
setattr(klass, original_fn_name, original_fn)
setattr(klass, fn_name, override_fn)
return perform_override | def function[monkey_patch_override_method, parameter[klass]]:
constant[
Override a class method with a new version of the same name. The original
method implementation is made available within the override method as
`_original_<METHOD_NAME>`.
]
def function[perform_override, parameter[override_fn]]:
variable[fn_name] assign[=] name[override_fn].__name__
variable[original_fn_name] assign[=] binary_operation[constant[_original_] + name[fn_name]]
if <ast.UnaryOp object at 0x7da1b0ebecb0> begin[:]
variable[original_fn] assign[=] call[name[getattr], parameter[name[klass], name[fn_name]]]
call[name[setattr], parameter[name[klass], name[original_fn_name], name[original_fn]]]
call[name[setattr], parameter[name[klass], name[fn_name], name[override_fn]]]
return[name[perform_override]] | keyword[def] identifier[monkey_patch_override_method] ( identifier[klass] ):
literal[string]
keyword[def] identifier[perform_override] ( identifier[override_fn] ):
identifier[fn_name] = identifier[override_fn] . identifier[__name__]
identifier[original_fn_name] = literal[string] + identifier[fn_name]
keyword[if] keyword[not] identifier[hasattr] ( identifier[klass] , identifier[original_fn_name] ):
identifier[original_fn] = identifier[getattr] ( identifier[klass] , identifier[fn_name] )
identifier[setattr] ( identifier[klass] , identifier[original_fn_name] , identifier[original_fn] )
identifier[setattr] ( identifier[klass] , identifier[fn_name] , identifier[override_fn] )
keyword[return] identifier[perform_override] | def monkey_patch_override_method(klass):
"""
Override a class method with a new version of the same name. The original
method implementation is made available within the override method as
`_original_<METHOD_NAME>`.
"""
def perform_override(override_fn):
fn_name = override_fn.__name__
original_fn_name = '_original_' + fn_name
# Override class method, if it hasn't already been done
if not hasattr(klass, original_fn_name):
original_fn = getattr(klass, fn_name)
setattr(klass, original_fn_name, original_fn)
setattr(klass, fn_name, override_fn) # depends on [control=['if'], data=[]]
return perform_override |
def universal_extract_paragraphs(xml):
"""Extract paragraphs from xml that could be from different sources
First try to parse the xml as if it came from elsevier. if we do not
have valid elsevier xml this will throw an exception. the text extraction
function in the pmc client may not throw an exception when parsing elsevier
xml, silently processing the xml incorrectly
Parameters
----------
xml : str
Either an NLM xml, Elsevier xml or plaintext
Returns
-------
paragraphs : str
Extracted plaintext paragraphs from NLM or Elsevier XML
"""
try:
paragraphs = elsevier_client.extract_paragraphs(xml)
except Exception:
paragraphs = None
if paragraphs is None:
try:
paragraphs = pmc_client.extract_paragraphs(xml)
except Exception:
paragraphs = [xml]
return paragraphs | def function[universal_extract_paragraphs, parameter[xml]]:
constant[Extract paragraphs from xml that could be from different sources
First try to parse the xml as if it came from elsevier. if we do not
have valid elsevier xml this will throw an exception. the text extraction
function in the pmc client may not throw an exception when parsing elsevier
xml, silently processing the xml incorrectly
Parameters
----------
xml : str
Either an NLM xml, Elsevier xml or plaintext
Returns
-------
paragraphs : str
Extracted plaintext paragraphs from NLM or Elsevier XML
]
<ast.Try object at 0x7da2041d9060>
if compare[name[paragraphs] is constant[None]] begin[:]
<ast.Try object at 0x7da2041dad40>
return[name[paragraphs]] | keyword[def] identifier[universal_extract_paragraphs] ( identifier[xml] ):
literal[string]
keyword[try] :
identifier[paragraphs] = identifier[elsevier_client] . identifier[extract_paragraphs] ( identifier[xml] )
keyword[except] identifier[Exception] :
identifier[paragraphs] = keyword[None]
keyword[if] identifier[paragraphs] keyword[is] keyword[None] :
keyword[try] :
identifier[paragraphs] = identifier[pmc_client] . identifier[extract_paragraphs] ( identifier[xml] )
keyword[except] identifier[Exception] :
identifier[paragraphs] =[ identifier[xml] ]
keyword[return] identifier[paragraphs] | def universal_extract_paragraphs(xml):
"""Extract paragraphs from xml that could be from different sources
First try to parse the xml as if it came from elsevier. if we do not
have valid elsevier xml this will throw an exception. the text extraction
function in the pmc client may not throw an exception when parsing elsevier
xml, silently processing the xml incorrectly
Parameters
----------
xml : str
Either an NLM xml, Elsevier xml or plaintext
Returns
-------
paragraphs : str
Extracted plaintext paragraphs from NLM or Elsevier XML
"""
try:
paragraphs = elsevier_client.extract_paragraphs(xml) # depends on [control=['try'], data=[]]
except Exception:
paragraphs = None # depends on [control=['except'], data=[]]
if paragraphs is None:
try:
paragraphs = pmc_client.extract_paragraphs(xml) # depends on [control=['try'], data=[]]
except Exception:
paragraphs = [xml] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['paragraphs']]
return paragraphs |
def qubits_count_tuple(qubits: Union[int, Qubits]) -> Tuple[int, Qubits]:
"""Utility method for unraveling 'qubits: Union[int, Qubits]' arguments"""
if isinstance(qubits, int):
return qubits, tuple(range(qubits))
return len(qubits), qubits | def function[qubits_count_tuple, parameter[qubits]]:
constant[Utility method for unraveling 'qubits: Union[int, Qubits]' arguments]
if call[name[isinstance], parameter[name[qubits], name[int]]] begin[:]
return[tuple[[<ast.Name object at 0x7da18bc70220>, <ast.Call object at 0x7da18bc727a0>]]]
return[tuple[[<ast.Call object at 0x7da18bc70bb0>, <ast.Name object at 0x7da18bc715a0>]]] | keyword[def] identifier[qubits_count_tuple] ( identifier[qubits] : identifier[Union] [ identifier[int] , identifier[Qubits] ])-> identifier[Tuple] [ identifier[int] , identifier[Qubits] ]:
literal[string]
keyword[if] identifier[isinstance] ( identifier[qubits] , identifier[int] ):
keyword[return] identifier[qubits] , identifier[tuple] ( identifier[range] ( identifier[qubits] ))
keyword[return] identifier[len] ( identifier[qubits] ), identifier[qubits] | def qubits_count_tuple(qubits: Union[int, Qubits]) -> Tuple[int, Qubits]:
"""Utility method for unraveling 'qubits: Union[int, Qubits]' arguments"""
if isinstance(qubits, int):
return (qubits, tuple(range(qubits))) # depends on [control=['if'], data=[]]
return (len(qubits), qubits) |
def setIsolateHidden(self, state):
"""
Sets whether or not this item is hidden due to isolation.
:param state | <bool>
"""
self._isolatedHidden = state
super(XNode, self).setVisible(self.isVisible()) | def function[setIsolateHidden, parameter[self, state]]:
constant[
Sets whether or not this item is hidden due to isolation.
:param state | <bool>
]
name[self]._isolatedHidden assign[=] name[state]
call[call[name[super], parameter[name[XNode], name[self]]].setVisible, parameter[call[name[self].isVisible, parameter[]]]] | keyword[def] identifier[setIsolateHidden] ( identifier[self] , identifier[state] ):
literal[string]
identifier[self] . identifier[_isolatedHidden] = identifier[state]
identifier[super] ( identifier[XNode] , identifier[self] ). identifier[setVisible] ( identifier[self] . identifier[isVisible] ()) | def setIsolateHidden(self, state):
"""
Sets whether or not this item is hidden due to isolation.
:param state | <bool>
"""
self._isolatedHidden = state
super(XNode, self).setVisible(self.isVisible()) |
def countok(self):
"""
Boolean array showing which stars pass all count constraints.
A "count constraint" is a constraint that affects the number of stars.
"""
ok = np.ones(len(self.stars)).astype(bool)
for name in self.constraints:
c = self.constraints[name]
if c.name not in self.selectfrac_skip:
ok &= c.ok
return ok | def function[countok, parameter[self]]:
constant[
Boolean array showing which stars pass all count constraints.
A "count constraint" is a constraint that affects the number of stars.
]
variable[ok] assign[=] call[call[name[np].ones, parameter[call[name[len], parameter[name[self].stars]]]].astype, parameter[name[bool]]]
for taget[name[name]] in starred[name[self].constraints] begin[:]
variable[c] assign[=] call[name[self].constraints][name[name]]
if compare[name[c].name <ast.NotIn object at 0x7da2590d7190> name[self].selectfrac_skip] begin[:]
<ast.AugAssign object at 0x7da1b26d6800>
return[name[ok]] | keyword[def] identifier[countok] ( identifier[self] ):
literal[string]
identifier[ok] = identifier[np] . identifier[ones] ( identifier[len] ( identifier[self] . identifier[stars] )). identifier[astype] ( identifier[bool] )
keyword[for] identifier[name] keyword[in] identifier[self] . identifier[constraints] :
identifier[c] = identifier[self] . identifier[constraints] [ identifier[name] ]
keyword[if] identifier[c] . identifier[name] keyword[not] keyword[in] identifier[self] . identifier[selectfrac_skip] :
identifier[ok] &= identifier[c] . identifier[ok]
keyword[return] identifier[ok] | def countok(self):
"""
Boolean array showing which stars pass all count constraints.
A "count constraint" is a constraint that affects the number of stars.
"""
ok = np.ones(len(self.stars)).astype(bool)
for name in self.constraints:
c = self.constraints[name]
if c.name not in self.selectfrac_skip:
ok &= c.ok # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['name']]
return ok |
def action_delete(self, ids):
"""Delete selected sessions."""
is_current = any(SessionActivity.is_current(sid_s=id_) for id_ in ids)
if is_current:
flash('You could not remove your current session', 'error')
return
for id_ in ids:
delete_session(sid_s=id_)
db.session.commit() | def function[action_delete, parameter[self, ids]]:
constant[Delete selected sessions.]
variable[is_current] assign[=] call[name[any], parameter[<ast.GeneratorExp object at 0x7da2044c1d80>]]
if name[is_current] begin[:]
call[name[flash], parameter[constant[You could not remove your current session], constant[error]]]
return[None]
for taget[name[id_]] in starred[name[ids]] begin[:]
call[name[delete_session], parameter[]]
call[name[db].session.commit, parameter[]] | keyword[def] identifier[action_delete] ( identifier[self] , identifier[ids] ):
literal[string]
identifier[is_current] = identifier[any] ( identifier[SessionActivity] . identifier[is_current] ( identifier[sid_s] = identifier[id_] ) keyword[for] identifier[id_] keyword[in] identifier[ids] )
keyword[if] identifier[is_current] :
identifier[flash] ( literal[string] , literal[string] )
keyword[return]
keyword[for] identifier[id_] keyword[in] identifier[ids] :
identifier[delete_session] ( identifier[sid_s] = identifier[id_] )
identifier[db] . identifier[session] . identifier[commit] () | def action_delete(self, ids):
"""Delete selected sessions."""
is_current = any((SessionActivity.is_current(sid_s=id_) for id_ in ids))
if is_current:
flash('You could not remove your current session', 'error')
return # depends on [control=['if'], data=[]]
for id_ in ids:
delete_session(sid_s=id_) # depends on [control=['for'], data=['id_']]
db.session.commit() |
def from_yaml(self, node):
'''
Implementes a !from_yaml constructor with the following syntax:
!from_yaml filename key
Arguments:
filename: Filename of external YAML document from which to load,
relative to the current YAML file.
key: Key from external YAML document to return,
using a dot-separated syntax for nested keys.
Examples:
!from_yaml external.yml pop
!from_yaml external.yml foo.bar.pop
!from_yaml "another file.yml" "foo bar.snap crackle.pop"
'''
# Load the content from the node, as a scalar
content = self.construct_scalar(node)
# Split on unquoted spaces
try:
parts = shlex.split(content)
except UnicodeEncodeError:
raise yaml.YAMLError('Non-ASCII arguments to !from_yaml are unsupported')
if len(parts) != 2:
raise yaml.YAMLError('Two arguments expected to !from_yaml')
filename, key = parts
# path is relative to the current YAML document
path = os.path.join(self._root, filename)
# Load the other YAML document
with open(path, 'r') as f:
doc = yaml.load(f, self.__class__)
# Retrieve the key
try:
cur = doc
for k in key.split('.'):
cur = cur[k]
except KeyError:
raise yaml.YAMLError('Key "{}" not found in {}'.format(key, filename))
return cur | def function[from_yaml, parameter[self, node]]:
constant[
Implementes a !from_yaml constructor with the following syntax:
!from_yaml filename key
Arguments:
filename: Filename of external YAML document from which to load,
relative to the current YAML file.
key: Key from external YAML document to return,
using a dot-separated syntax for nested keys.
Examples:
!from_yaml external.yml pop
!from_yaml external.yml foo.bar.pop
!from_yaml "another file.yml" "foo bar.snap crackle.pop"
]
variable[content] assign[=] call[name[self].construct_scalar, parameter[name[node]]]
<ast.Try object at 0x7da1b0d22020>
if compare[call[name[len], parameter[name[parts]]] not_equal[!=] constant[2]] begin[:]
<ast.Raise object at 0x7da1b0d201c0>
<ast.Tuple object at 0x7da1b0d20b20> assign[=] name[parts]
variable[path] assign[=] call[name[os].path.join, parameter[name[self]._root, name[filename]]]
with call[name[open], parameter[name[path], constant[r]]] begin[:]
variable[doc] assign[=] call[name[yaml].load, parameter[name[f], name[self].__class__]]
<ast.Try object at 0x7da1b0d203d0>
return[name[cur]] | keyword[def] identifier[from_yaml] ( identifier[self] , identifier[node] ):
literal[string]
identifier[content] = identifier[self] . identifier[construct_scalar] ( identifier[node] )
keyword[try] :
identifier[parts] = identifier[shlex] . identifier[split] ( identifier[content] )
keyword[except] identifier[UnicodeEncodeError] :
keyword[raise] identifier[yaml] . identifier[YAMLError] ( literal[string] )
keyword[if] identifier[len] ( identifier[parts] )!= literal[int] :
keyword[raise] identifier[yaml] . identifier[YAMLError] ( literal[string] )
identifier[filename] , identifier[key] = identifier[parts]
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[_root] , identifier[filename] )
keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] :
identifier[doc] = identifier[yaml] . identifier[load] ( identifier[f] , identifier[self] . identifier[__class__] )
keyword[try] :
identifier[cur] = identifier[doc]
keyword[for] identifier[k] keyword[in] identifier[key] . identifier[split] ( literal[string] ):
identifier[cur] = identifier[cur] [ identifier[k] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[yaml] . identifier[YAMLError] ( literal[string] . identifier[format] ( identifier[key] , identifier[filename] ))
keyword[return] identifier[cur] | def from_yaml(self, node):
"""
Implementes a !from_yaml constructor with the following syntax:
!from_yaml filename key
Arguments:
filename: Filename of external YAML document from which to load,
relative to the current YAML file.
key: Key from external YAML document to return,
using a dot-separated syntax for nested keys.
Examples:
!from_yaml external.yml pop
!from_yaml external.yml foo.bar.pop
!from_yaml "another file.yml" "foo bar.snap crackle.pop"
"""
# Load the content from the node, as a scalar
content = self.construct_scalar(node)
# Split on unquoted spaces
try:
parts = shlex.split(content) # depends on [control=['try'], data=[]]
except UnicodeEncodeError:
raise yaml.YAMLError('Non-ASCII arguments to !from_yaml are unsupported') # depends on [control=['except'], data=[]]
if len(parts) != 2:
raise yaml.YAMLError('Two arguments expected to !from_yaml') # depends on [control=['if'], data=[]]
(filename, key) = parts
# path is relative to the current YAML document
path = os.path.join(self._root, filename)
# Load the other YAML document
with open(path, 'r') as f:
doc = yaml.load(f, self.__class__) # depends on [control=['with'], data=['f']]
# Retrieve the key
try:
cur = doc
for k in key.split('.'):
cur = cur[k] # depends on [control=['for'], data=['k']] # depends on [control=['try'], data=[]]
except KeyError:
raise yaml.YAMLError('Key "{}" not found in {}'.format(key, filename)) # depends on [control=['except'], data=[]]
return cur |
def apply(self):
"""Apply changes to the plots."""
self.read_group_info()
if self.tabs.count() == 0:
# disactivate buttons
self.button_color.setEnabled(False)
self.button_del.setEnabled(False)
self.button_apply.setEnabled(False)
else:
# activate buttons
self.button_color.setEnabled(True)
self.button_del.setEnabled(True)
self.button_apply.setEnabled(True)
if self.groups:
self.parent.overview.update_position()
self.parent.spectrum.update()
self.parent.notes.enable_events()
else:
self.parent.traces.reset()
self.parent.spectrum.reset()
self.parent.notes.enable_events() | def function[apply, parameter[self]]:
constant[Apply changes to the plots.]
call[name[self].read_group_info, parameter[]]
if compare[call[name[self].tabs.count, parameter[]] equal[==] constant[0]] begin[:]
call[name[self].button_color.setEnabled, parameter[constant[False]]]
call[name[self].button_del.setEnabled, parameter[constant[False]]]
call[name[self].button_apply.setEnabled, parameter[constant[False]]]
if name[self].groups begin[:]
call[name[self].parent.overview.update_position, parameter[]]
call[name[self].parent.spectrum.update, parameter[]]
call[name[self].parent.notes.enable_events, parameter[]] | keyword[def] identifier[apply] ( identifier[self] ):
literal[string]
identifier[self] . identifier[read_group_info] ()
keyword[if] identifier[self] . identifier[tabs] . identifier[count] ()== literal[int] :
identifier[self] . identifier[button_color] . identifier[setEnabled] ( keyword[False] )
identifier[self] . identifier[button_del] . identifier[setEnabled] ( keyword[False] )
identifier[self] . identifier[button_apply] . identifier[setEnabled] ( keyword[False] )
keyword[else] :
identifier[self] . identifier[button_color] . identifier[setEnabled] ( keyword[True] )
identifier[self] . identifier[button_del] . identifier[setEnabled] ( keyword[True] )
identifier[self] . identifier[button_apply] . identifier[setEnabled] ( keyword[True] )
keyword[if] identifier[self] . identifier[groups] :
identifier[self] . identifier[parent] . identifier[overview] . identifier[update_position] ()
identifier[self] . identifier[parent] . identifier[spectrum] . identifier[update] ()
identifier[self] . identifier[parent] . identifier[notes] . identifier[enable_events] ()
keyword[else] :
identifier[self] . identifier[parent] . identifier[traces] . identifier[reset] ()
identifier[self] . identifier[parent] . identifier[spectrum] . identifier[reset] ()
identifier[self] . identifier[parent] . identifier[notes] . identifier[enable_events] () | def apply(self):
"""Apply changes to the plots."""
self.read_group_info()
if self.tabs.count() == 0:
# disactivate buttons
self.button_color.setEnabled(False)
self.button_del.setEnabled(False)
self.button_apply.setEnabled(False) # depends on [control=['if'], data=[]]
else:
# activate buttons
self.button_color.setEnabled(True)
self.button_del.setEnabled(True)
self.button_apply.setEnabled(True)
if self.groups:
self.parent.overview.update_position()
self.parent.spectrum.update()
self.parent.notes.enable_events() # depends on [control=['if'], data=[]]
else:
self.parent.traces.reset()
self.parent.spectrum.reset()
self.parent.notes.enable_events() |
def start_with(self, request):
"""Start the crawler using the given request.
Args:
request (:class:`nyawc.http.Request`): The startpoint for the crawler.
"""
HTTPRequestHelper.patch_with_options(request, self.__options)
self.queue.add_request(request)
self.__crawler_start() | def function[start_with, parameter[self, request]]:
constant[Start the crawler using the given request.
Args:
request (:class:`nyawc.http.Request`): The startpoint for the crawler.
]
call[name[HTTPRequestHelper].patch_with_options, parameter[name[request], name[self].__options]]
call[name[self].queue.add_request, parameter[name[request]]]
call[name[self].__crawler_start, parameter[]] | keyword[def] identifier[start_with] ( identifier[self] , identifier[request] ):
literal[string]
identifier[HTTPRequestHelper] . identifier[patch_with_options] ( identifier[request] , identifier[self] . identifier[__options] )
identifier[self] . identifier[queue] . identifier[add_request] ( identifier[request] )
identifier[self] . identifier[__crawler_start] () | def start_with(self, request):
"""Start the crawler using the given request.
Args:
request (:class:`nyawc.http.Request`): The startpoint for the crawler.
"""
HTTPRequestHelper.patch_with_options(request, self.__options)
self.queue.add_request(request)
self.__crawler_start() |
def flux_consumers(F, rtol=1e-05, atol=1e-12):
r"""Return indexes of states that are net flux producers.
Parameters
----------
F : (n, n) ndarray
Matrix of flux values between pairs of states.
rtol : float
relative tolerance. fulfilled if max(outflux-influx, 0) / max(outflux,influx) < rtol
atol : float
absolute tolerance. fulfilled if max(outflux-influx, 0) < atol
Returns
-------
producers : (n) ndarray of int
indexes of states that are net flux producers. May include "dirty" producers, i.e.
states that have influx but still produce more outflux and thereby violate flux
conservation.
"""
# can be used with sparse or dense
n = np.shape(F)[0]
influxes = np.array(np.sum(F, axis=0)).flatten() # all that flows in
outfluxes = np.array(np.sum(F, axis=1)).flatten() # all that flows out
# net in flux absolute
con_abs = np.maximum(influxes - outfluxes, np.zeros(n))
# net in flux relative
con_rel = con_abs / (np.maximum(outfluxes, influxes))
# return all indexes that are produces in terms of absolute and relative tolerance
return list(np.where((con_abs > atol) * (con_rel > rtol))[0]) | def function[flux_consumers, parameter[F, rtol, atol]]:
constant[Return indexes of states that are net flux producers.
Parameters
----------
F : (n, n) ndarray
Matrix of flux values between pairs of states.
rtol : float
relative tolerance. fulfilled if max(outflux-influx, 0) / max(outflux,influx) < rtol
atol : float
absolute tolerance. fulfilled if max(outflux-influx, 0) < atol
Returns
-------
producers : (n) ndarray of int
indexes of states that are net flux producers. May include "dirty" producers, i.e.
states that have influx but still produce more outflux and thereby violate flux
conservation.
]
variable[n] assign[=] call[call[name[np].shape, parameter[name[F]]]][constant[0]]
variable[influxes] assign[=] call[call[name[np].array, parameter[call[name[np].sum, parameter[name[F]]]]].flatten, parameter[]]
variable[outfluxes] assign[=] call[call[name[np].array, parameter[call[name[np].sum, parameter[name[F]]]]].flatten, parameter[]]
variable[con_abs] assign[=] call[name[np].maximum, parameter[binary_operation[name[influxes] - name[outfluxes]], call[name[np].zeros, parameter[name[n]]]]]
variable[con_rel] assign[=] binary_operation[name[con_abs] / call[name[np].maximum, parameter[name[outfluxes], name[influxes]]]]
return[call[name[list], parameter[call[call[name[np].where, parameter[binary_operation[compare[name[con_abs] greater[>] name[atol]] * compare[name[con_rel] greater[>] name[rtol]]]]]][constant[0]]]]] | keyword[def] identifier[flux_consumers] ( identifier[F] , identifier[rtol] = literal[int] , identifier[atol] = literal[int] ):
literal[string]
identifier[n] = identifier[np] . identifier[shape] ( identifier[F] )[ literal[int] ]
identifier[influxes] = identifier[np] . identifier[array] ( identifier[np] . identifier[sum] ( identifier[F] , identifier[axis] = literal[int] )). identifier[flatten] ()
identifier[outfluxes] = identifier[np] . identifier[array] ( identifier[np] . identifier[sum] ( identifier[F] , identifier[axis] = literal[int] )). identifier[flatten] ()
identifier[con_abs] = identifier[np] . identifier[maximum] ( identifier[influxes] - identifier[outfluxes] , identifier[np] . identifier[zeros] ( identifier[n] ))
identifier[con_rel] = identifier[con_abs] /( identifier[np] . identifier[maximum] ( identifier[outfluxes] , identifier[influxes] ))
keyword[return] identifier[list] ( identifier[np] . identifier[where] (( identifier[con_abs] > identifier[atol] )*( identifier[con_rel] > identifier[rtol] ))[ literal[int] ]) | def flux_consumers(F, rtol=1e-05, atol=1e-12):
"""Return indexes of states that are net flux producers.
Parameters
----------
F : (n, n) ndarray
Matrix of flux values between pairs of states.
rtol : float
relative tolerance. fulfilled if max(outflux-influx, 0) / max(outflux,influx) < rtol
atol : float
absolute tolerance. fulfilled if max(outflux-influx, 0) < atol
Returns
-------
producers : (n) ndarray of int
indexes of states that are net flux producers. May include "dirty" producers, i.e.
states that have influx but still produce more outflux and thereby violate flux
conservation.
"""
# can be used with sparse or dense
n = np.shape(F)[0]
influxes = np.array(np.sum(F, axis=0)).flatten() # all that flows in
outfluxes = np.array(np.sum(F, axis=1)).flatten() # all that flows out
# net in flux absolute
con_abs = np.maximum(influxes - outfluxes, np.zeros(n))
# net in flux relative
con_rel = con_abs / np.maximum(outfluxes, influxes)
# return all indexes that are produces in terms of absolute and relative tolerance
return list(np.where((con_abs > atol) * (con_rel > rtol))[0]) |
def configure_hit(self, hit_config):
''' Configure HIT '''
# Qualification:
quals = []
quals.append(dict(
QualificationTypeId=PERCENT_ASSIGNMENTS_APPROVED_QUAL_ID,
Comparator='GreaterThanOrEqualTo',
IntegerValues=[int(hit_config['approve_requirement'])]
))
quals.append(dict(
QualificationTypeId=NUMBER_HITS_APPROVED_QUAL_ID,
Comparator='GreaterThanOrEqualTo',
IntegerValues=[int(hit_config['number_hits_approved'])]
))
if hit_config['require_master_workers']:
master_qualId = MASTERS_SANDBOX_QUAL_ID if self.is_sandbox else MASTERS_QUAL_ID
quals.append(dict(
QualificationTypeId=master_qualId,
Comparator='Exists'
))
if hit_config['us_only']:
quals.append(dict(
QualificationTypeId=LOCALE_QUAL_ID,
Comparator='EqualTo',
LocaleValues=[{'Country': 'US'}]
))
# Create a HIT type for this HIT.
hit_type = self.mtc.create_hit_type(
Title=hit_config['title'],
Description=hit_config['description'],
Reward=str(hit_config['reward']),
AssignmentDurationInSeconds=int(hit_config['duration'].total_seconds()),
Keywords=hit_config['keywords'],
QualificationRequirements=quals)
# Check the config file to see if notifications are wanted.
config = PsiturkConfig()
config.load_config()
try:
url = config.get('Server Parameters', 'notification_url')
all_event_types = [
"AssignmentAccepted",
"AssignmentAbandoned",
"AssignmentReturned",
"AssignmentSubmitted",
"HITReviewable",
"HITExpired",
]
# TODO: not sure if this works. Can't find documentation in PsiTurk or MTurk
self.mtc.update_notification_settings(
HitTypeId=hit_type['HITTypeId'],
Notification=dict(
Destination=url,
Transport='REST',
Version=NOTIFICATION_VERSION,
EventTypes=all_event_types,
),
)
except Exception as e:
pass
schema_url = "http://mechanicalturk.amazonaws.com/AWSMechanicalTurkDataSchemas/2006-07-14/ExternalQuestion.xsd"
template = '<ExternalQuestion xmlns="%(schema_url)s"><ExternalURL>%%(external_url)s</ExternalURL><FrameHeight>%%(frame_height)s</FrameHeight></ExternalQuestion>' % vars()
question = template % dict(
external_url=hit_config['ad_location'],
frame_height=600,
)
# Specify all the HIT parameters
self.param_dict = dict(
HITTypeId=hit_type['HITTypeId'],
Question=question,
LifetimeInSeconds=int(hit_config['lifetime'].total_seconds()),
MaxAssignments=hit_config['max_assignments'],
# TODO
# ResponseGroups=[
# 'Minimal',
# 'HITDetail',
# 'HITQuestion',
# 'HITAssignmentSummary'
# ]
) | def function[configure_hit, parameter[self, hit_config]]:
constant[ Configure HIT ]
variable[quals] assign[=] list[[]]
call[name[quals].append, parameter[call[name[dict], parameter[]]]]
call[name[quals].append, parameter[call[name[dict], parameter[]]]]
if call[name[hit_config]][constant[require_master_workers]] begin[:]
variable[master_qualId] assign[=] <ast.IfExp object at 0x7da1b077b130>
call[name[quals].append, parameter[call[name[dict], parameter[]]]]
if call[name[hit_config]][constant[us_only]] begin[:]
call[name[quals].append, parameter[call[name[dict], parameter[]]]]
variable[hit_type] assign[=] call[name[self].mtc.create_hit_type, parameter[]]
variable[config] assign[=] call[name[PsiturkConfig], parameter[]]
call[name[config].load_config, parameter[]]
<ast.Try object at 0x7da1b08c8e20>
variable[schema_url] assign[=] constant[http://mechanicalturk.amazonaws.com/AWSMechanicalTurkDataSchemas/2006-07-14/ExternalQuestion.xsd]
variable[template] assign[=] binary_operation[constant[<ExternalQuestion xmlns="%(schema_url)s"><ExternalURL>%%(external_url)s</ExternalURL><FrameHeight>%%(frame_height)s</FrameHeight></ExternalQuestion>] <ast.Mod object at 0x7da2590d6920> call[name[vars], parameter[]]]
variable[question] assign[=] binary_operation[name[template] <ast.Mod object at 0x7da2590d6920> call[name[dict], parameter[]]]
name[self].param_dict assign[=] call[name[dict], parameter[]] | keyword[def] identifier[configure_hit] ( identifier[self] , identifier[hit_config] ):
literal[string]
identifier[quals] =[]
identifier[quals] . identifier[append] ( identifier[dict] (
identifier[QualificationTypeId] = identifier[PERCENT_ASSIGNMENTS_APPROVED_QUAL_ID] ,
identifier[Comparator] = literal[string] ,
identifier[IntegerValues] =[ identifier[int] ( identifier[hit_config] [ literal[string] ])]
))
identifier[quals] . identifier[append] ( identifier[dict] (
identifier[QualificationTypeId] = identifier[NUMBER_HITS_APPROVED_QUAL_ID] ,
identifier[Comparator] = literal[string] ,
identifier[IntegerValues] =[ identifier[int] ( identifier[hit_config] [ literal[string] ])]
))
keyword[if] identifier[hit_config] [ literal[string] ]:
identifier[master_qualId] = identifier[MASTERS_SANDBOX_QUAL_ID] keyword[if] identifier[self] . identifier[is_sandbox] keyword[else] identifier[MASTERS_QUAL_ID]
identifier[quals] . identifier[append] ( identifier[dict] (
identifier[QualificationTypeId] = identifier[master_qualId] ,
identifier[Comparator] = literal[string]
))
keyword[if] identifier[hit_config] [ literal[string] ]:
identifier[quals] . identifier[append] ( identifier[dict] (
identifier[QualificationTypeId] = identifier[LOCALE_QUAL_ID] ,
identifier[Comparator] = literal[string] ,
identifier[LocaleValues] =[{ literal[string] : literal[string] }]
))
identifier[hit_type] = identifier[self] . identifier[mtc] . identifier[create_hit_type] (
identifier[Title] = identifier[hit_config] [ literal[string] ],
identifier[Description] = identifier[hit_config] [ literal[string] ],
identifier[Reward] = identifier[str] ( identifier[hit_config] [ literal[string] ]),
identifier[AssignmentDurationInSeconds] = identifier[int] ( identifier[hit_config] [ literal[string] ]. identifier[total_seconds] ()),
identifier[Keywords] = identifier[hit_config] [ literal[string] ],
identifier[QualificationRequirements] = identifier[quals] )
identifier[config] = identifier[PsiturkConfig] ()
identifier[config] . identifier[load_config] ()
keyword[try] :
identifier[url] = identifier[config] . identifier[get] ( literal[string] , literal[string] )
identifier[all_event_types] =[
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]
identifier[self] . identifier[mtc] . identifier[update_notification_settings] (
identifier[HitTypeId] = identifier[hit_type] [ literal[string] ],
identifier[Notification] = identifier[dict] (
identifier[Destination] = identifier[url] ,
identifier[Transport] = literal[string] ,
identifier[Version] = identifier[NOTIFICATION_VERSION] ,
identifier[EventTypes] = identifier[all_event_types] ,
),
)
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[pass]
identifier[schema_url] = literal[string]
identifier[template] = literal[string] % identifier[vars] ()
identifier[question] = identifier[template] % identifier[dict] (
identifier[external_url] = identifier[hit_config] [ literal[string] ],
identifier[frame_height] = literal[int] ,
)
identifier[self] . identifier[param_dict] = identifier[dict] (
identifier[HITTypeId] = identifier[hit_type] [ literal[string] ],
identifier[Question] = identifier[question] ,
identifier[LifetimeInSeconds] = identifier[int] ( identifier[hit_config] [ literal[string] ]. identifier[total_seconds] ()),
identifier[MaxAssignments] = identifier[hit_config] [ literal[string] ],
) | def configure_hit(self, hit_config):
""" Configure HIT """
# Qualification:
quals = []
quals.append(dict(QualificationTypeId=PERCENT_ASSIGNMENTS_APPROVED_QUAL_ID, Comparator='GreaterThanOrEqualTo', IntegerValues=[int(hit_config['approve_requirement'])]))
quals.append(dict(QualificationTypeId=NUMBER_HITS_APPROVED_QUAL_ID, Comparator='GreaterThanOrEqualTo', IntegerValues=[int(hit_config['number_hits_approved'])]))
if hit_config['require_master_workers']:
master_qualId = MASTERS_SANDBOX_QUAL_ID if self.is_sandbox else MASTERS_QUAL_ID
quals.append(dict(QualificationTypeId=master_qualId, Comparator='Exists')) # depends on [control=['if'], data=[]]
if hit_config['us_only']:
quals.append(dict(QualificationTypeId=LOCALE_QUAL_ID, Comparator='EqualTo', LocaleValues=[{'Country': 'US'}])) # depends on [control=['if'], data=[]]
# Create a HIT type for this HIT.
hit_type = self.mtc.create_hit_type(Title=hit_config['title'], Description=hit_config['description'], Reward=str(hit_config['reward']), AssignmentDurationInSeconds=int(hit_config['duration'].total_seconds()), Keywords=hit_config['keywords'], QualificationRequirements=quals)
# Check the config file to see if notifications are wanted.
config = PsiturkConfig()
config.load_config()
try:
url = config.get('Server Parameters', 'notification_url')
all_event_types = ['AssignmentAccepted', 'AssignmentAbandoned', 'AssignmentReturned', 'AssignmentSubmitted', 'HITReviewable', 'HITExpired']
# TODO: not sure if this works. Can't find documentation in PsiTurk or MTurk
self.mtc.update_notification_settings(HitTypeId=hit_type['HITTypeId'], Notification=dict(Destination=url, Transport='REST', Version=NOTIFICATION_VERSION, EventTypes=all_event_types)) # depends on [control=['try'], data=[]]
except Exception as e:
pass # depends on [control=['except'], data=[]]
schema_url = 'http://mechanicalturk.amazonaws.com/AWSMechanicalTurkDataSchemas/2006-07-14/ExternalQuestion.xsd'
template = '<ExternalQuestion xmlns="%(schema_url)s"><ExternalURL>%%(external_url)s</ExternalURL><FrameHeight>%%(frame_height)s</FrameHeight></ExternalQuestion>' % vars()
question = template % dict(external_url=hit_config['ad_location'], frame_height=600)
# Specify all the HIT parameters
# TODO
# ResponseGroups=[
# 'Minimal',
# 'HITDetail',
# 'HITQuestion',
# 'HITAssignmentSummary'
# ]
self.param_dict = dict(HITTypeId=hit_type['HITTypeId'], Question=question, LifetimeInSeconds=int(hit_config['lifetime'].total_seconds()), MaxAssignments=hit_config['max_assignments']) |
def format(self, formatter, *args, **kwargs):
"""
This is a secure way to make a fake from another Provider.
"""
# TODO: data export?
return self.get_formatter(formatter)(*args, **kwargs) | def function[format, parameter[self, formatter]]:
constant[
This is a secure way to make a fake from another Provider.
]
return[call[call[name[self].get_formatter, parameter[name[formatter]]], parameter[<ast.Starred object at 0x7da18dc98040>]]] | keyword[def] identifier[format] ( identifier[self] , identifier[formatter] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[self] . identifier[get_formatter] ( identifier[formatter] )(* identifier[args] ,** identifier[kwargs] ) | def format(self, formatter, *args, **kwargs):
"""
This is a secure way to make a fake from another Provider.
"""
# TODO: data export?
return self.get_formatter(formatter)(*args, **kwargs) |
def add_signal_receiver(self, callback_fn, signal, user_arg):
"""
Add a signal receiver callback with user argument
See also :py:meth:`remove_signal_receiver`,
:py:exc:`.ConnSignalNameNotRecognisedException`
:param func callback_fn:
User-defined callback function to call when signal triggers
:param str signal:
Signal name e.g., :py:attr:`.ConnInterface.SIGNAL_PROPERTY_CHANGED`
:param user_arg:
User-defined callback argument to be passed with callback function
:return:
:raises ConnSignalNameNotRecognisedException:
if the signal name is not registered
"""
if (signal in self._signal_names):
s = Signal(signal, callback_fn, user_arg)
self._signals[signal] = s
self._bus.add_signal_receiver(s.signal_handler,
signal,
dbus_interface=self._dbus_addr)
else:
raise ConnSignalNameNotRecognisedException | def function[add_signal_receiver, parameter[self, callback_fn, signal, user_arg]]:
constant[
Add a signal receiver callback with user argument
See also :py:meth:`remove_signal_receiver`,
:py:exc:`.ConnSignalNameNotRecognisedException`
:param func callback_fn:
User-defined callback function to call when signal triggers
:param str signal:
Signal name e.g., :py:attr:`.ConnInterface.SIGNAL_PROPERTY_CHANGED`
:param user_arg:
User-defined callback argument to be passed with callback function
:return:
:raises ConnSignalNameNotRecognisedException:
if the signal name is not registered
]
if compare[name[signal] in name[self]._signal_names] begin[:]
variable[s] assign[=] call[name[Signal], parameter[name[signal], name[callback_fn], name[user_arg]]]
call[name[self]._signals][name[signal]] assign[=] name[s]
call[name[self]._bus.add_signal_receiver, parameter[name[s].signal_handler, name[signal]]] | keyword[def] identifier[add_signal_receiver] ( identifier[self] , identifier[callback_fn] , identifier[signal] , identifier[user_arg] ):
literal[string]
keyword[if] ( identifier[signal] keyword[in] identifier[self] . identifier[_signal_names] ):
identifier[s] = identifier[Signal] ( identifier[signal] , identifier[callback_fn] , identifier[user_arg] )
identifier[self] . identifier[_signals] [ identifier[signal] ]= identifier[s]
identifier[self] . identifier[_bus] . identifier[add_signal_receiver] ( identifier[s] . identifier[signal_handler] ,
identifier[signal] ,
identifier[dbus_interface] = identifier[self] . identifier[_dbus_addr] )
keyword[else] :
keyword[raise] identifier[ConnSignalNameNotRecognisedException] | def add_signal_receiver(self, callback_fn, signal, user_arg):
"""
Add a signal receiver callback with user argument
See also :py:meth:`remove_signal_receiver`,
:py:exc:`.ConnSignalNameNotRecognisedException`
:param func callback_fn:
User-defined callback function to call when signal triggers
:param str signal:
Signal name e.g., :py:attr:`.ConnInterface.SIGNAL_PROPERTY_CHANGED`
:param user_arg:
User-defined callback argument to be passed with callback function
:return:
:raises ConnSignalNameNotRecognisedException:
if the signal name is not registered
"""
if signal in self._signal_names:
s = Signal(signal, callback_fn, user_arg)
self._signals[signal] = s
self._bus.add_signal_receiver(s.signal_handler, signal, dbus_interface=self._dbus_addr) # depends on [control=['if'], data=['signal']]
else:
raise ConnSignalNameNotRecognisedException |
def _parse_bare_key(self): # type: () -> Key
"""
Parses a bare key.
"""
key_type = None
dotted = False
self.mark()
while self._current.is_bare_key_char() and self.inc():
pass
key = self.extract()
if self._current == ".":
self.inc()
dotted = True
key += "." + self._parse_key().as_string()
key_type = KeyType.Bare
return Key(key, key_type, "", dotted) | def function[_parse_bare_key, parameter[self]]:
constant[
Parses a bare key.
]
variable[key_type] assign[=] constant[None]
variable[dotted] assign[=] constant[False]
call[name[self].mark, parameter[]]
while <ast.BoolOp object at 0x7da1b2065000> begin[:]
pass
variable[key] assign[=] call[name[self].extract, parameter[]]
if compare[name[self]._current equal[==] constant[.]] begin[:]
call[name[self].inc, parameter[]]
variable[dotted] assign[=] constant[True]
<ast.AugAssign object at 0x7da1b2064ee0>
variable[key_type] assign[=] name[KeyType].Bare
return[call[name[Key], parameter[name[key], name[key_type], constant[], name[dotted]]]] | keyword[def] identifier[_parse_bare_key] ( identifier[self] ):
literal[string]
identifier[key_type] = keyword[None]
identifier[dotted] = keyword[False]
identifier[self] . identifier[mark] ()
keyword[while] identifier[self] . identifier[_current] . identifier[is_bare_key_char] () keyword[and] identifier[self] . identifier[inc] ():
keyword[pass]
identifier[key] = identifier[self] . identifier[extract] ()
keyword[if] identifier[self] . identifier[_current] == literal[string] :
identifier[self] . identifier[inc] ()
identifier[dotted] = keyword[True]
identifier[key] += literal[string] + identifier[self] . identifier[_parse_key] (). identifier[as_string] ()
identifier[key_type] = identifier[KeyType] . identifier[Bare]
keyword[return] identifier[Key] ( identifier[key] , identifier[key_type] , literal[string] , identifier[dotted] ) | def _parse_bare_key(self): # type: () -> Key
'\n Parses a bare key.\n '
key_type = None
dotted = False
self.mark()
while self._current.is_bare_key_char() and self.inc():
pass # depends on [control=['while'], data=[]]
key = self.extract()
if self._current == '.':
self.inc()
dotted = True
key += '.' + self._parse_key().as_string()
key_type = KeyType.Bare # depends on [control=['if'], data=[]]
return Key(key, key_type, '', dotted) |
def constrains(self):
"""
returns a list of parameters that are constrained by this parameter
"""
params = []
for constraint in self.in_constraints:
for var in constraint._vars:
param = var.get_parameter()
if param.component == constraint.component and param.qualifier == constraint.qualifier:
if param not in params and param.uniqueid != self.uniqueid:
params.append(param)
return params | def function[constrains, parameter[self]]:
constant[
returns a list of parameters that are constrained by this parameter
]
variable[params] assign[=] list[[]]
for taget[name[constraint]] in starred[name[self].in_constraints] begin[:]
for taget[name[var]] in starred[name[constraint]._vars] begin[:]
variable[param] assign[=] call[name[var].get_parameter, parameter[]]
if <ast.BoolOp object at 0x7da18f58fcd0> begin[:]
if <ast.BoolOp object at 0x7da18f58d480> begin[:]
call[name[params].append, parameter[name[param]]]
return[name[params]] | keyword[def] identifier[constrains] ( identifier[self] ):
literal[string]
identifier[params] =[]
keyword[for] identifier[constraint] keyword[in] identifier[self] . identifier[in_constraints] :
keyword[for] identifier[var] keyword[in] identifier[constraint] . identifier[_vars] :
identifier[param] = identifier[var] . identifier[get_parameter] ()
keyword[if] identifier[param] . identifier[component] == identifier[constraint] . identifier[component] keyword[and] identifier[param] . identifier[qualifier] == identifier[constraint] . identifier[qualifier] :
keyword[if] identifier[param] keyword[not] keyword[in] identifier[params] keyword[and] identifier[param] . identifier[uniqueid] != identifier[self] . identifier[uniqueid] :
identifier[params] . identifier[append] ( identifier[param] )
keyword[return] identifier[params] | def constrains(self):
"""
returns a list of parameters that are constrained by this parameter
"""
params = []
for constraint in self.in_constraints:
for var in constraint._vars:
param = var.get_parameter()
if param.component == constraint.component and param.qualifier == constraint.qualifier:
if param not in params and param.uniqueid != self.uniqueid:
params.append(param) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['var']] # depends on [control=['for'], data=['constraint']]
return params |
def new(
name,
bucket,
timeout,
memory,
description,
subnet_ids,
security_group_ids
):
""" Create a new lambda project """
config = {}
if timeout:
config['timeout'] = timeout
if memory:
config['memory'] = memory
if description:
config['description'] = description
if subnet_ids:
config['subnet_ids'] = subnet_ids
if security_group_ids:
config['security_group_ids'] = security_group_ids
lambder.create_project(name, bucket, config) | def function[new, parameter[name, bucket, timeout, memory, description, subnet_ids, security_group_ids]]:
constant[ Create a new lambda project ]
variable[config] assign[=] dictionary[[], []]
if name[timeout] begin[:]
call[name[config]][constant[timeout]] assign[=] name[timeout]
if name[memory] begin[:]
call[name[config]][constant[memory]] assign[=] name[memory]
if name[description] begin[:]
call[name[config]][constant[description]] assign[=] name[description]
if name[subnet_ids] begin[:]
call[name[config]][constant[subnet_ids]] assign[=] name[subnet_ids]
if name[security_group_ids] begin[:]
call[name[config]][constant[security_group_ids]] assign[=] name[security_group_ids]
call[name[lambder].create_project, parameter[name[name], name[bucket], name[config]]] | keyword[def] identifier[new] (
identifier[name] ,
identifier[bucket] ,
identifier[timeout] ,
identifier[memory] ,
identifier[description] ,
identifier[subnet_ids] ,
identifier[security_group_ids]
):
literal[string]
identifier[config] ={}
keyword[if] identifier[timeout] :
identifier[config] [ literal[string] ]= identifier[timeout]
keyword[if] identifier[memory] :
identifier[config] [ literal[string] ]= identifier[memory]
keyword[if] identifier[description] :
identifier[config] [ literal[string] ]= identifier[description]
keyword[if] identifier[subnet_ids] :
identifier[config] [ literal[string] ]= identifier[subnet_ids]
keyword[if] identifier[security_group_ids] :
identifier[config] [ literal[string] ]= identifier[security_group_ids]
identifier[lambder] . identifier[create_project] ( identifier[name] , identifier[bucket] , identifier[config] ) | def new(name, bucket, timeout, memory, description, subnet_ids, security_group_ids):
""" Create a new lambda project """
config = {}
if timeout:
config['timeout'] = timeout # depends on [control=['if'], data=[]]
if memory:
config['memory'] = memory # depends on [control=['if'], data=[]]
if description:
config['description'] = description # depends on [control=['if'], data=[]]
if subnet_ids:
config['subnet_ids'] = subnet_ids # depends on [control=['if'], data=[]]
if security_group_ids:
config['security_group_ids'] = security_group_ids # depends on [control=['if'], data=[]]
lambder.create_project(name, bucket, config) |
def cartesian_to_homogeneous_vectors(cartesian_vector, matrix_type="numpy"):
"""Converts a cartesian vector to an homogenous vector"""
dimension_x = cartesian_vector.shape[0]
# Vector
if matrix_type == "numpy":
homogeneous_vector = np.zeros(dimension_x + 1)
# Last item is a 1
homogeneous_vector[-1] = 1
homogeneous_vector[:-1] = cartesian_vector
return homogeneous_vector | def function[cartesian_to_homogeneous_vectors, parameter[cartesian_vector, matrix_type]]:
constant[Converts a cartesian vector to an homogenous vector]
variable[dimension_x] assign[=] call[name[cartesian_vector].shape][constant[0]]
if compare[name[matrix_type] equal[==] constant[numpy]] begin[:]
variable[homogeneous_vector] assign[=] call[name[np].zeros, parameter[binary_operation[name[dimension_x] + constant[1]]]]
call[name[homogeneous_vector]][<ast.UnaryOp object at 0x7da18fe913c0>] assign[=] constant[1]
call[name[homogeneous_vector]][<ast.Slice object at 0x7da18fe932e0>] assign[=] name[cartesian_vector]
return[name[homogeneous_vector]] | keyword[def] identifier[cartesian_to_homogeneous_vectors] ( identifier[cartesian_vector] , identifier[matrix_type] = literal[string] ):
literal[string]
identifier[dimension_x] = identifier[cartesian_vector] . identifier[shape] [ literal[int] ]
keyword[if] identifier[matrix_type] == literal[string] :
identifier[homogeneous_vector] = identifier[np] . identifier[zeros] ( identifier[dimension_x] + literal[int] )
identifier[homogeneous_vector] [- literal[int] ]= literal[int]
identifier[homogeneous_vector] [:- literal[int] ]= identifier[cartesian_vector]
keyword[return] identifier[homogeneous_vector] | def cartesian_to_homogeneous_vectors(cartesian_vector, matrix_type='numpy'):
"""Converts a cartesian vector to an homogenous vector"""
dimension_x = cartesian_vector.shape[0]
# Vector
if matrix_type == 'numpy':
homogeneous_vector = np.zeros(dimension_x + 1)
# Last item is a 1
homogeneous_vector[-1] = 1
homogeneous_vector[:-1] = cartesian_vector # depends on [control=['if'], data=[]]
return homogeneous_vector |
def add_trits(left, right):
# type: (Sequence[int], Sequence[int]) -> List[int]
"""
Adds two sequences of trits together.
The result is a list of trits equal in length to the longer of the
two sequences.
.. note::
Overflow is possible.
For example, ``add_trits([1], [1])`` returns ``[-1]``.
"""
target_len = max(len(left), len(right))
res = [0] * target_len
left += [0] * (target_len - len(left))
right += [0] * (target_len - len(right))
carry = 0
for i in range(len(res)):
res[i], carry = _full_add_trits(left[i], right[i], carry)
return res | def function[add_trits, parameter[left, right]]:
constant[
Adds two sequences of trits together.
The result is a list of trits equal in length to the longer of the
two sequences.
.. note::
Overflow is possible.
For example, ``add_trits([1], [1])`` returns ``[-1]``.
]
variable[target_len] assign[=] call[name[max], parameter[call[name[len], parameter[name[left]]], call[name[len], parameter[name[right]]]]]
variable[res] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b0295d80>]] * name[target_len]]
<ast.AugAssign object at 0x7da2046200d0>
<ast.AugAssign object at 0x7da2046224a0>
variable[carry] assign[=] constant[0]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[res]]]]]] begin[:]
<ast.Tuple object at 0x7da1b024ebf0> assign[=] call[name[_full_add_trits], parameter[call[name[left]][name[i]], call[name[right]][name[i]], name[carry]]]
return[name[res]] | keyword[def] identifier[add_trits] ( identifier[left] , identifier[right] ):
literal[string]
identifier[target_len] = identifier[max] ( identifier[len] ( identifier[left] ), identifier[len] ( identifier[right] ))
identifier[res] =[ literal[int] ]* identifier[target_len]
identifier[left] +=[ literal[int] ]*( identifier[target_len] - identifier[len] ( identifier[left] ))
identifier[right] +=[ literal[int] ]*( identifier[target_len] - identifier[len] ( identifier[right] ))
identifier[carry] = literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[res] )):
identifier[res] [ identifier[i] ], identifier[carry] = identifier[_full_add_trits] ( identifier[left] [ identifier[i] ], identifier[right] [ identifier[i] ], identifier[carry] )
keyword[return] identifier[res] | def add_trits(left, right):
# type: (Sequence[int], Sequence[int]) -> List[int]
'\n Adds two sequences of trits together.\n\n The result is a list of trits equal in length to the longer of the\n two sequences.\n\n .. note::\n Overflow is possible.\n\n For example, ``add_trits([1], [1])`` returns ``[-1]``.\n '
target_len = max(len(left), len(right))
res = [0] * target_len
left += [0] * (target_len - len(left))
right += [0] * (target_len - len(right))
carry = 0
for i in range(len(res)):
(res[i], carry) = _full_add_trits(left[i], right[i], carry) # depends on [control=['for'], data=['i']]
return res |
def convert_xml_to_text(filename: str = None,
blob: bytes = None,
config: TextProcessingConfig = _DEFAULT_CONFIG) -> str:
"""
Converts XML to text.
"""
with get_filelikeobject(filename, blob) as fp:
soup = bs4.BeautifulStoneSoup(fp)
return soup.get_text() | def function[convert_xml_to_text, parameter[filename, blob, config]]:
constant[
Converts XML to text.
]
with call[name[get_filelikeobject], parameter[name[filename], name[blob]]] begin[:]
variable[soup] assign[=] call[name[bs4].BeautifulStoneSoup, parameter[name[fp]]]
return[call[name[soup].get_text, parameter[]]] | keyword[def] identifier[convert_xml_to_text] ( identifier[filename] : identifier[str] = keyword[None] ,
identifier[blob] : identifier[bytes] = keyword[None] ,
identifier[config] : identifier[TextProcessingConfig] = identifier[_DEFAULT_CONFIG] )-> identifier[str] :
literal[string]
keyword[with] identifier[get_filelikeobject] ( identifier[filename] , identifier[blob] ) keyword[as] identifier[fp] :
identifier[soup] = identifier[bs4] . identifier[BeautifulStoneSoup] ( identifier[fp] )
keyword[return] identifier[soup] . identifier[get_text] () | def convert_xml_to_text(filename: str=None, blob: bytes=None, config: TextProcessingConfig=_DEFAULT_CONFIG) -> str:
"""
Converts XML to text.
"""
with get_filelikeobject(filename, blob) as fp:
soup = bs4.BeautifulStoneSoup(fp)
return soup.get_text() # depends on [control=['with'], data=['fp']] |
def get_new_broks(self):
"""Iter over all hosts and services to add new broks in internal lists
:return: None
"""
# ask for service and hosts their broks waiting
# be eaten
for elt in self.all_my_hosts_and_services():
for brok in elt.broks:
self.add(brok)
# We got all, clear item broks list
elt.broks = []
# Also fetch broks from contact (like contactdowntime)
for contact in self.contacts:
for brok in contact.broks:
self.add(brok)
# We got all, clear contact broks list
contact.broks = [] | def function[get_new_broks, parameter[self]]:
constant[Iter over all hosts and services to add new broks in internal lists
:return: None
]
for taget[name[elt]] in starred[call[name[self].all_my_hosts_and_services, parameter[]]] begin[:]
for taget[name[brok]] in starred[name[elt].broks] begin[:]
call[name[self].add, parameter[name[brok]]]
name[elt].broks assign[=] list[[]]
for taget[name[contact]] in starred[name[self].contacts] begin[:]
for taget[name[brok]] in starred[name[contact].broks] begin[:]
call[name[self].add, parameter[name[brok]]]
name[contact].broks assign[=] list[[]] | keyword[def] identifier[get_new_broks] ( identifier[self] ):
literal[string]
keyword[for] identifier[elt] keyword[in] identifier[self] . identifier[all_my_hosts_and_services] ():
keyword[for] identifier[brok] keyword[in] identifier[elt] . identifier[broks] :
identifier[self] . identifier[add] ( identifier[brok] )
identifier[elt] . identifier[broks] =[]
keyword[for] identifier[contact] keyword[in] identifier[self] . identifier[contacts] :
keyword[for] identifier[brok] keyword[in] identifier[contact] . identifier[broks] :
identifier[self] . identifier[add] ( identifier[brok] )
identifier[contact] . identifier[broks] =[] | def get_new_broks(self):
"""Iter over all hosts and services to add new broks in internal lists
:return: None
"""
# ask for service and hosts their broks waiting
# be eaten
for elt in self.all_my_hosts_and_services():
for brok in elt.broks:
self.add(brok) # depends on [control=['for'], data=['brok']]
# We got all, clear item broks list
elt.broks = [] # depends on [control=['for'], data=['elt']]
# Also fetch broks from contact (like contactdowntime)
for contact in self.contacts:
for brok in contact.broks:
self.add(brok) # depends on [control=['for'], data=['brok']]
# We got all, clear contact broks list
contact.broks = [] # depends on [control=['for'], data=['contact']] |
def _construct_schema(elements, nsmap):
"""Consruct fiona schema based on given elements
:param list Element: list of elements
:param dict nsmap: namespace map
:return dict: schema
"""
schema = {
'properties': {},
'geometry': None
}
schema_key = None
gml_key = None
# if nsmap is defined, use it
if nsmap:
for key in nsmap:
if nsmap[key] == XS_NAMESPACE:
schema_key = key
if nsmap[key] in GML_NAMESPACES:
gml_key = key
# if no nsmap is defined, we have to guess
else:
gml_key = 'gml'
schema_key = 'xsd'
mappings = {
'PointPropertyType': 'Point',
'PolygonPropertyType': 'Polygon',
'LineStringPropertyType': 'LineString',
'MultiPointPropertyType': 'MultiPoint',
'MultiLineStringPropertyType': 'MultiLineString',
'MultiPolygonPropertyType': 'MultiPolygon',
'MultiGeometryPropertyType': 'MultiGeometry',
'GeometryPropertyType': 'GeometryCollection',
'SurfacePropertyType': '3D Polygon',
'MultiSurfacePropertyType': '3D MultiPolygon'
}
for element in elements:
data_type = element.attrib['type'].replace(gml_key + ':', '')
name = element.attrib['name']
if data_type in mappings:
schema['geometry'] = mappings[data_type]
schema['geometry_column'] = name
else:
schema['properties'][name] = data_type.replace(schema_key+':', '')
if schema['properties'] or schema['geometry']:
return schema
else:
return None | def function[_construct_schema, parameter[elements, nsmap]]:
constant[Consruct fiona schema based on given elements
:param list Element: list of elements
:param dict nsmap: namespace map
:return dict: schema
]
variable[schema] assign[=] dictionary[[<ast.Constant object at 0x7da1b02f1630>, <ast.Constant object at 0x7da1b02f3130>], [<ast.Dict object at 0x7da1b02f0850>, <ast.Constant object at 0x7da1b02f3fd0>]]
variable[schema_key] assign[=] constant[None]
variable[gml_key] assign[=] constant[None]
if name[nsmap] begin[:]
for taget[name[key]] in starred[name[nsmap]] begin[:]
if compare[call[name[nsmap]][name[key]] equal[==] name[XS_NAMESPACE]] begin[:]
variable[schema_key] assign[=] name[key]
if compare[call[name[nsmap]][name[key]] in name[GML_NAMESPACES]] begin[:]
variable[gml_key] assign[=] name[key]
variable[mappings] assign[=] dictionary[[<ast.Constant object at 0x7da1b02f0490>, <ast.Constant object at 0x7da1b02f3430>, <ast.Constant object at 0x7da1b02f2f50>, <ast.Constant object at 0x7da1b02f3490>, <ast.Constant object at 0x7da1b02f2e30>, <ast.Constant object at 0x7da1b02f0c40>, <ast.Constant object at 0x7da1b02f0280>, <ast.Constant object at 0x7da1b02f1780>, <ast.Constant object at 0x7da1b02f32b0>, <ast.Constant object at 0x7da1b02f0760>], [<ast.Constant object at 0x7da1b02f1030>, <ast.Constant object at 0x7da1b02f11b0>, <ast.Constant object at 0x7da1b02f13c0>, <ast.Constant object at 0x7da1b02f2830>, <ast.Constant object at 0x7da1b02f0f10>, <ast.Constant object at 0x7da1b02f1930>, <ast.Constant object at 0x7da1b02f3100>, <ast.Constant object at 0x7da1b02f3c10>, <ast.Constant object at 0x7da1b02f18d0>, <ast.Constant object at 0x7da1b02f3280>]]
for taget[name[element]] in starred[name[elements]] begin[:]
variable[data_type] assign[=] call[call[name[element].attrib][constant[type]].replace, parameter[binary_operation[name[gml_key] + constant[:]], constant[]]]
variable[name] assign[=] call[name[element].attrib][constant[name]]
if compare[name[data_type] in name[mappings]] begin[:]
call[name[schema]][constant[geometry]] assign[=] call[name[mappings]][name[data_type]]
call[name[schema]][constant[geometry_column]] assign[=] name[name]
if <ast.BoolOp object at 0x7da1b02f08e0> begin[:]
return[name[schema]] | keyword[def] identifier[_construct_schema] ( identifier[elements] , identifier[nsmap] ):
literal[string]
identifier[schema] ={
literal[string] :{},
literal[string] : keyword[None]
}
identifier[schema_key] = keyword[None]
identifier[gml_key] = keyword[None]
keyword[if] identifier[nsmap] :
keyword[for] identifier[key] keyword[in] identifier[nsmap] :
keyword[if] identifier[nsmap] [ identifier[key] ]== identifier[XS_NAMESPACE] :
identifier[schema_key] = identifier[key]
keyword[if] identifier[nsmap] [ identifier[key] ] keyword[in] identifier[GML_NAMESPACES] :
identifier[gml_key] = identifier[key]
keyword[else] :
identifier[gml_key] = literal[string]
identifier[schema_key] = literal[string]
identifier[mappings] ={
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string] ,
literal[string] : literal[string]
}
keyword[for] identifier[element] keyword[in] identifier[elements] :
identifier[data_type] = identifier[element] . identifier[attrib] [ literal[string] ]. identifier[replace] ( identifier[gml_key] + literal[string] , literal[string] )
identifier[name] = identifier[element] . identifier[attrib] [ literal[string] ]
keyword[if] identifier[data_type] keyword[in] identifier[mappings] :
identifier[schema] [ literal[string] ]= identifier[mappings] [ identifier[data_type] ]
identifier[schema] [ literal[string] ]= identifier[name]
keyword[else] :
identifier[schema] [ literal[string] ][ identifier[name] ]= identifier[data_type] . identifier[replace] ( identifier[schema_key] + literal[string] , literal[string] )
keyword[if] identifier[schema] [ literal[string] ] keyword[or] identifier[schema] [ literal[string] ]:
keyword[return] identifier[schema]
keyword[else] :
keyword[return] keyword[None] | def _construct_schema(elements, nsmap):
"""Consruct fiona schema based on given elements
:param list Element: list of elements
:param dict nsmap: namespace map
:return dict: schema
"""
schema = {'properties': {}, 'geometry': None}
schema_key = None
gml_key = None
# if nsmap is defined, use it
if nsmap:
for key in nsmap:
if nsmap[key] == XS_NAMESPACE:
schema_key = key # depends on [control=['if'], data=[]]
if nsmap[key] in GML_NAMESPACES:
gml_key = key # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]]
else:
# if no nsmap is defined, we have to guess
gml_key = 'gml'
schema_key = 'xsd'
mappings = {'PointPropertyType': 'Point', 'PolygonPropertyType': 'Polygon', 'LineStringPropertyType': 'LineString', 'MultiPointPropertyType': 'MultiPoint', 'MultiLineStringPropertyType': 'MultiLineString', 'MultiPolygonPropertyType': 'MultiPolygon', 'MultiGeometryPropertyType': 'MultiGeometry', 'GeometryPropertyType': 'GeometryCollection', 'SurfacePropertyType': '3D Polygon', 'MultiSurfacePropertyType': '3D MultiPolygon'}
for element in elements:
data_type = element.attrib['type'].replace(gml_key + ':', '')
name = element.attrib['name']
if data_type in mappings:
schema['geometry'] = mappings[data_type]
schema['geometry_column'] = name # depends on [control=['if'], data=['data_type', 'mappings']]
else:
schema['properties'][name] = data_type.replace(schema_key + ':', '') # depends on [control=['for'], data=['element']]
if schema['properties'] or schema['geometry']:
return schema # depends on [control=['if'], data=[]]
else:
return None |
def compare_checkpoints(self, attr_mean):
"""Compares two checkpoints based on the attribute attr_mean param.
Greater than is used by default. If command-line parameter
checkpoint_score_attr starts with "min-" less than is used.
Arguments:
attr_mean: mean of attribute value for the current checkpoint
Returns:
True: when attr_mean is greater than previous checkpoint attr_mean
and greater than function is selected
when attr_mean is less than previous checkpoint attr_mean and
less than function is selected
False: when attr_mean is not in alignment with selected cmp fn
"""
if self._cmp_greater and attr_mean > self.best_checkpoint_attr_value:
return True
elif (not self._cmp_greater
and attr_mean < self.best_checkpoint_attr_value):
return True
return False | def function[compare_checkpoints, parameter[self, attr_mean]]:
constant[Compares two checkpoints based on the attribute attr_mean param.
Greater than is used by default. If command-line parameter
checkpoint_score_attr starts with "min-" less than is used.
Arguments:
attr_mean: mean of attribute value for the current checkpoint
Returns:
True: when attr_mean is greater than previous checkpoint attr_mean
and greater than function is selected
when attr_mean is less than previous checkpoint attr_mean and
less than function is selected
False: when attr_mean is not in alignment with selected cmp fn
]
if <ast.BoolOp object at 0x7da18f09f250> begin[:]
return[constant[True]]
return[constant[False]] | keyword[def] identifier[compare_checkpoints] ( identifier[self] , identifier[attr_mean] ):
literal[string]
keyword[if] identifier[self] . identifier[_cmp_greater] keyword[and] identifier[attr_mean] > identifier[self] . identifier[best_checkpoint_attr_value] :
keyword[return] keyword[True]
keyword[elif] ( keyword[not] identifier[self] . identifier[_cmp_greater]
keyword[and] identifier[attr_mean] < identifier[self] . identifier[best_checkpoint_attr_value] ):
keyword[return] keyword[True]
keyword[return] keyword[False] | def compare_checkpoints(self, attr_mean):
"""Compares two checkpoints based on the attribute attr_mean param.
Greater than is used by default. If command-line parameter
checkpoint_score_attr starts with "min-" less than is used.
Arguments:
attr_mean: mean of attribute value for the current checkpoint
Returns:
True: when attr_mean is greater than previous checkpoint attr_mean
and greater than function is selected
when attr_mean is less than previous checkpoint attr_mean and
less than function is selected
False: when attr_mean is not in alignment with selected cmp fn
"""
if self._cmp_greater and attr_mean > self.best_checkpoint_attr_value:
return True # depends on [control=['if'], data=[]]
elif not self._cmp_greater and attr_mean < self.best_checkpoint_attr_value:
return True # depends on [control=['if'], data=[]]
return False |
def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable)
except UncachedVariable:
return default | def function[get_cached_zone_variable, parameter[self, zone_id, variable, default]]:
constant[ Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. ]
<ast.Try object at 0x7da1b0aa7940> | keyword[def] identifier[get_cached_zone_variable] ( identifier[self] , identifier[zone_id] , identifier[variable] , identifier[default] = keyword[None] ):
literal[string]
keyword[try] :
keyword[return] identifier[self] . identifier[_retrieve_cached_zone_variable] ( identifier[zone_id] , identifier[variable] )
keyword[except] identifier[UncachedVariable] :
keyword[return] identifier[default] | def get_cached_zone_variable(self, zone_id, variable, default=None):
""" Retrieve the current value of a zone variable from the cache or
return the default value if the variable is not present. """
try:
return self._retrieve_cached_zone_variable(zone_id, variable) # depends on [control=['try'], data=[]]
except UncachedVariable:
return default # depends on [control=['except'], data=[]] |
def check_structure_chain_quality(self, seqprop, structprop, chain_id,
seq_ident_cutoff=0.5, allow_missing_on_termini=0.2,
allow_mutants=True, allow_deletions=False,
allow_insertions=False, allow_unresolved=True):
"""Report if a structure's chain meets the defined cutoffs for sequence quality."""
alignment = self._get_seqprop_to_structprop_alignment(seqprop=seqprop, structprop=structprop, chain_id=chain_id)
# Compare sequence to structure's sequence using the alignment
chain_passes_quality_check = ssbio.protein.structure.properties.quality.sequence_checker(reference_seq_aln=alignment[0],
structure_seq_aln=alignment[1],
seq_ident_cutoff=seq_ident_cutoff,
allow_missing_on_termini=allow_missing_on_termini,
allow_mutants=allow_mutants,
allow_deletions=allow_deletions,
allow_insertions=allow_insertions,
allow_unresolved=allow_unresolved)
return chain_passes_quality_check | def function[check_structure_chain_quality, parameter[self, seqprop, structprop, chain_id, seq_ident_cutoff, allow_missing_on_termini, allow_mutants, allow_deletions, allow_insertions, allow_unresolved]]:
constant[Report if a structure's chain meets the defined cutoffs for sequence quality.]
variable[alignment] assign[=] call[name[self]._get_seqprop_to_structprop_alignment, parameter[]]
variable[chain_passes_quality_check] assign[=] call[name[ssbio].protein.structure.properties.quality.sequence_checker, parameter[]]
return[name[chain_passes_quality_check]] | keyword[def] identifier[check_structure_chain_quality] ( identifier[self] , identifier[seqprop] , identifier[structprop] , identifier[chain_id] ,
identifier[seq_ident_cutoff] = literal[int] , identifier[allow_missing_on_termini] = literal[int] ,
identifier[allow_mutants] = keyword[True] , identifier[allow_deletions] = keyword[False] ,
identifier[allow_insertions] = keyword[False] , identifier[allow_unresolved] = keyword[True] ):
literal[string]
identifier[alignment] = identifier[self] . identifier[_get_seqprop_to_structprop_alignment] ( identifier[seqprop] = identifier[seqprop] , identifier[structprop] = identifier[structprop] , identifier[chain_id] = identifier[chain_id] )
identifier[chain_passes_quality_check] = identifier[ssbio] . identifier[protein] . identifier[structure] . identifier[properties] . identifier[quality] . identifier[sequence_checker] ( identifier[reference_seq_aln] = identifier[alignment] [ literal[int] ],
identifier[structure_seq_aln] = identifier[alignment] [ literal[int] ],
identifier[seq_ident_cutoff] = identifier[seq_ident_cutoff] ,
identifier[allow_missing_on_termini] = identifier[allow_missing_on_termini] ,
identifier[allow_mutants] = identifier[allow_mutants] ,
identifier[allow_deletions] = identifier[allow_deletions] ,
identifier[allow_insertions] = identifier[allow_insertions] ,
identifier[allow_unresolved] = identifier[allow_unresolved] )
keyword[return] identifier[chain_passes_quality_check] | def check_structure_chain_quality(self, seqprop, structprop, chain_id, seq_ident_cutoff=0.5, allow_missing_on_termini=0.2, allow_mutants=True, allow_deletions=False, allow_insertions=False, allow_unresolved=True):
"""Report if a structure's chain meets the defined cutoffs for sequence quality."""
alignment = self._get_seqprop_to_structprop_alignment(seqprop=seqprop, structprop=structprop, chain_id=chain_id)
# Compare sequence to structure's sequence using the alignment
chain_passes_quality_check = ssbio.protein.structure.properties.quality.sequence_checker(reference_seq_aln=alignment[0], structure_seq_aln=alignment[1], seq_ident_cutoff=seq_ident_cutoff, allow_missing_on_termini=allow_missing_on_termini, allow_mutants=allow_mutants, allow_deletions=allow_deletions, allow_insertions=allow_insertions, allow_unresolved=allow_unresolved)
return chain_passes_quality_check |
def run_once(name, cmd, env, shutdown, loop=None, utc=False):
"""Starts a child process and waits for its completion.
.. note:: This function is a coroutine.
Standard output and error streams are captured and forwarded to the parent
process' standard output. Each line is prefixed with the current time (as
measured by the parent process) and the child process ``name``.
:param name: Label for the child process. Will be used as a prefix to all
lines captured by this child process.
:param cmd: Command-line that will be used to invoke the child process.
Can be a string or sequence of strings. When a string is passed,
``shlex.split()`` will be used to break it into a sequence of strings
with smart quoting analysis. If this does not give the intended
results, break it down as you see fit and pass a sequence of strings.
:param env: Environment variables that should be injected in the child
process. If ``None``, the parent's environment will be inherited as it.
If a ``dict`` is provided, this will overwrite the entire environment;
it is the caller's responsibility to merge this with the parent's
environment if they see fit.
:param shutdown: Future that the caller will fulfill to indicate that the
process should be killed early. When this is set, the process is sent
SIGINT and then is let complete naturally.
:param loop: Event loop to use. When ``None``, the default event loop is
used.
:param utc: When ``True``, the timestamps are logged using the current time
in UTC.
:return: A future that will be completed when the process has completed.
Upon completion, the future's result will contain the process' exit
status.
"""
# Get the default event loop if necessary.
loop = loop or asyncio.get_event_loop()
# Launch the command into a child process.
if isinstance(cmd, str):
cmd = shlex.split(cmd)
process = yield from asyncio.create_subprocess_exec(
*cmd,
env=env,
stdin=asyncio.subprocess.PIPE,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.STDOUT,
)
print('%s [strawboss] %s(%d) spawned.' % (
now(utc).isoformat(), name, process.pid
))
# Exhaust the child's standard output stream.
#
# TODO: close stdin for new process.
# TODO: terminate the process after the grace period.
ready = asyncio.ensure_future(process.wait())
pending = {
shutdown,
ready,
asyncio.ensure_future(process.stdout.readline()),
}
while not ready.done():
done, pending = yield from asyncio.wait(
pending,
return_when=asyncio.FIRST_COMPLETED,
)
for future in done:
# React to a request to shutdown the process.
#
# NOTE: shutdown is asynchronous unless the process completion
# notification is "in flight". We forward the request to
# shutdown and then wait until the child process completes.
if future is shutdown:
try:
process.kill()
except ProcessLookupError:
pass
else:
print('%s [strawboss] %s(%d) killed.' % (
now(utc).isoformat(), name, process.pid
))
continue
# React to process death (natural, killed or terminated).
if future is ready:
exit_code = yield from future
print('%s [strawboss] %s(%d) completed with exit status %d.' % (
now(utc).isoformat(), name, process.pid, exit_code
))
continue
# React to stdout having a full line of text.
data = yield from future
if not data:
print('%s [strawboss] EOF from %s(%d).' % (
now(utc).isoformat(), name, process.pid,
))
continue
data = data.decode('utf-8').strip()
print('%s [%s] %s' % (
now(utc).isoformat(), name, data
))
pending.add(asyncio.ensure_future(process.stdout.readline()))
# Cancel any remaining tasks (e.g. readline).
for future in pending:
if future is shutdown:
continue
future.cancel()
# Pass the exit code back to the caller.
return exit_code | def function[run_once, parameter[name, cmd, env, shutdown, loop, utc]]:
constant[Starts a child process and waits for its completion.
.. note:: This function is a coroutine.
Standard output and error streams are captured and forwarded to the parent
process' standard output. Each line is prefixed with the current time (as
measured by the parent process) and the child process ``name``.
:param name: Label for the child process. Will be used as a prefix to all
lines captured by this child process.
:param cmd: Command-line that will be used to invoke the child process.
Can be a string or sequence of strings. When a string is passed,
``shlex.split()`` will be used to break it into a sequence of strings
with smart quoting analysis. If this does not give the intended
results, break it down as you see fit and pass a sequence of strings.
:param env: Environment variables that should be injected in the child
process. If ``None``, the parent's environment will be inherited as it.
If a ``dict`` is provided, this will overwrite the entire environment;
it is the caller's responsibility to merge this with the parent's
environment if they see fit.
:param shutdown: Future that the caller will fulfill to indicate that the
process should be killed early. When this is set, the process is sent
SIGINT and then is let complete naturally.
:param loop: Event loop to use. When ``None``, the default event loop is
used.
:param utc: When ``True``, the timestamps are logged using the current time
in UTC.
:return: A future that will be completed when the process has completed.
Upon completion, the future's result will contain the process' exit
status.
]
variable[loop] assign[=] <ast.BoolOp object at 0x7da1b14a81f0>
if call[name[isinstance], parameter[name[cmd], name[str]]] begin[:]
variable[cmd] assign[=] call[name[shlex].split, parameter[name[cmd]]]
variable[process] assign[=] <ast.YieldFrom object at 0x7da1b14abdf0>
call[name[print], parameter[binary_operation[constant[%s [strawboss] %s(%d) spawned.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b14a98a0>, <ast.Name object at 0x7da1b14aac50>, <ast.Attribute object at 0x7da1b14a84c0>]]]]]
variable[ready] assign[=] call[name[asyncio].ensure_future, parameter[call[name[process].wait, parameter[]]]]
variable[pending] assign[=] <ast.Set object at 0x7da1b14a8610>
while <ast.UnaryOp object at 0x7da1b1436e60> begin[:]
<ast.Tuple object at 0x7da1b14366b0> assign[=] <ast.YieldFrom object at 0x7da1b14342e0>
for taget[name[future]] in starred[name[done]] begin[:]
if compare[name[future] is name[shutdown]] begin[:]
<ast.Try object at 0x7da1b1434cd0>
continue
if compare[name[future] is name[ready]] begin[:]
variable[exit_code] assign[=] <ast.YieldFrom object at 0x7da1b1437460>
call[name[print], parameter[binary_operation[constant[%s [strawboss] %s(%d) completed with exit status %d.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b1436b60>, <ast.Name object at 0x7da1b14375e0>, <ast.Attribute object at 0x7da1b1435540>, <ast.Name object at 0x7da1b1435510>]]]]]
continue
variable[data] assign[=] <ast.YieldFrom object at 0x7da1b1435750>
if <ast.UnaryOp object at 0x7da1b1436890> begin[:]
call[name[print], parameter[binary_operation[constant[%s [strawboss] EOF from %s(%d).] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b14364d0>, <ast.Name object at 0x7da1b14368f0>, <ast.Attribute object at 0x7da1b1434340>]]]]]
continue
variable[data] assign[=] call[call[name[data].decode, parameter[constant[utf-8]]].strip, parameter[]]
call[name[print], parameter[binary_operation[constant[%s [%s] %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b1437f40>, <ast.Name object at 0x7da1b1436fb0>, <ast.Name object at 0x7da1b1436440>]]]]]
call[name[pending].add, parameter[call[name[asyncio].ensure_future, parameter[call[name[process].stdout.readline, parameter[]]]]]]
for taget[name[future]] in starred[name[pending]] begin[:]
if compare[name[future] is name[shutdown]] begin[:]
continue
call[name[future].cancel, parameter[]]
return[name[exit_code]] | keyword[def] identifier[run_once] ( identifier[name] , identifier[cmd] , identifier[env] , identifier[shutdown] , identifier[loop] = keyword[None] , identifier[utc] = keyword[False] ):
literal[string]
identifier[loop] = identifier[loop] keyword[or] identifier[asyncio] . identifier[get_event_loop] ()
keyword[if] identifier[isinstance] ( identifier[cmd] , identifier[str] ):
identifier[cmd] = identifier[shlex] . identifier[split] ( identifier[cmd] )
identifier[process] = keyword[yield] keyword[from] identifier[asyncio] . identifier[create_subprocess_exec] (
* identifier[cmd] ,
identifier[env] = identifier[env] ,
identifier[stdin] = identifier[asyncio] . identifier[subprocess] . identifier[PIPE] ,
identifier[stdout] = identifier[asyncio] . identifier[subprocess] . identifier[PIPE] ,
identifier[stderr] = identifier[asyncio] . identifier[subprocess] . identifier[STDOUT] ,
)
identifier[print] ( literal[string] %(
identifier[now] ( identifier[utc] ). identifier[isoformat] (), identifier[name] , identifier[process] . identifier[pid]
))
identifier[ready] = identifier[asyncio] . identifier[ensure_future] ( identifier[process] . identifier[wait] ())
identifier[pending] ={
identifier[shutdown] ,
identifier[ready] ,
identifier[asyncio] . identifier[ensure_future] ( identifier[process] . identifier[stdout] . identifier[readline] ()),
}
keyword[while] keyword[not] identifier[ready] . identifier[done] ():
identifier[done] , identifier[pending] = keyword[yield] keyword[from] identifier[asyncio] . identifier[wait] (
identifier[pending] ,
identifier[return_when] = identifier[asyncio] . identifier[FIRST_COMPLETED] ,
)
keyword[for] identifier[future] keyword[in] identifier[done] :
keyword[if] identifier[future] keyword[is] identifier[shutdown] :
keyword[try] :
identifier[process] . identifier[kill] ()
keyword[except] identifier[ProcessLookupError] :
keyword[pass]
keyword[else] :
identifier[print] ( literal[string] %(
identifier[now] ( identifier[utc] ). identifier[isoformat] (), identifier[name] , identifier[process] . identifier[pid]
))
keyword[continue]
keyword[if] identifier[future] keyword[is] identifier[ready] :
identifier[exit_code] = keyword[yield] keyword[from] identifier[future]
identifier[print] ( literal[string] %(
identifier[now] ( identifier[utc] ). identifier[isoformat] (), identifier[name] , identifier[process] . identifier[pid] , identifier[exit_code]
))
keyword[continue]
identifier[data] = keyword[yield] keyword[from] identifier[future]
keyword[if] keyword[not] identifier[data] :
identifier[print] ( literal[string] %(
identifier[now] ( identifier[utc] ). identifier[isoformat] (), identifier[name] , identifier[process] . identifier[pid] ,
))
keyword[continue]
identifier[data] = identifier[data] . identifier[decode] ( literal[string] ). identifier[strip] ()
identifier[print] ( literal[string] %(
identifier[now] ( identifier[utc] ). identifier[isoformat] (), identifier[name] , identifier[data]
))
identifier[pending] . identifier[add] ( identifier[asyncio] . identifier[ensure_future] ( identifier[process] . identifier[stdout] . identifier[readline] ()))
keyword[for] identifier[future] keyword[in] identifier[pending] :
keyword[if] identifier[future] keyword[is] identifier[shutdown] :
keyword[continue]
identifier[future] . identifier[cancel] ()
keyword[return] identifier[exit_code] | def run_once(name, cmd, env, shutdown, loop=None, utc=False):
"""Starts a child process and waits for its completion.
.. note:: This function is a coroutine.
Standard output and error streams are captured and forwarded to the parent
process' standard output. Each line is prefixed with the current time (as
measured by the parent process) and the child process ``name``.
:param name: Label for the child process. Will be used as a prefix to all
lines captured by this child process.
:param cmd: Command-line that will be used to invoke the child process.
Can be a string or sequence of strings. When a string is passed,
``shlex.split()`` will be used to break it into a sequence of strings
with smart quoting analysis. If this does not give the intended
results, break it down as you see fit and pass a sequence of strings.
:param env: Environment variables that should be injected in the child
process. If ``None``, the parent's environment will be inherited as it.
If a ``dict`` is provided, this will overwrite the entire environment;
it is the caller's responsibility to merge this with the parent's
environment if they see fit.
:param shutdown: Future that the caller will fulfill to indicate that the
process should be killed early. When this is set, the process is sent
SIGINT and then is let complete naturally.
:param loop: Event loop to use. When ``None``, the default event loop is
used.
:param utc: When ``True``, the timestamps are logged using the current time
in UTC.
:return: A future that will be completed when the process has completed.
Upon completion, the future's result will contain the process' exit
status.
"""
# Get the default event loop if necessary.
loop = loop or asyncio.get_event_loop()
# Launch the command into a child process.
if isinstance(cmd, str):
cmd = shlex.split(cmd) # depends on [control=['if'], data=[]]
process = (yield from asyncio.create_subprocess_exec(*cmd, env=env, stdin=asyncio.subprocess.PIPE, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.STDOUT))
print('%s [strawboss] %s(%d) spawned.' % (now(utc).isoformat(), name, process.pid))
# Exhaust the child's standard output stream.
#
# TODO: close stdin for new process.
# TODO: terminate the process after the grace period.
ready = asyncio.ensure_future(process.wait())
pending = {shutdown, ready, asyncio.ensure_future(process.stdout.readline())}
while not ready.done():
(done, pending) = (yield from asyncio.wait(pending, return_when=asyncio.FIRST_COMPLETED))
for future in done:
# React to a request to shutdown the process.
#
# NOTE: shutdown is asynchronous unless the process completion
# notification is "in flight". We forward the request to
# shutdown and then wait until the child process completes.
if future is shutdown:
try:
process.kill() # depends on [control=['try'], data=[]]
except ProcessLookupError:
pass # depends on [control=['except'], data=[]]
else:
print('%s [strawboss] %s(%d) killed.' % (now(utc).isoformat(), name, process.pid))
continue # depends on [control=['if'], data=[]]
# React to process death (natural, killed or terminated).
if future is ready:
exit_code = (yield from future)
print('%s [strawboss] %s(%d) completed with exit status %d.' % (now(utc).isoformat(), name, process.pid, exit_code))
continue # depends on [control=['if'], data=['future']]
# React to stdout having a full line of text.
data = (yield from future)
if not data:
print('%s [strawboss] EOF from %s(%d).' % (now(utc).isoformat(), name, process.pid))
continue # depends on [control=['if'], data=[]]
data = data.decode('utf-8').strip()
print('%s [%s] %s' % (now(utc).isoformat(), name, data))
pending.add(asyncio.ensure_future(process.stdout.readline())) # depends on [control=['for'], data=['future']] # depends on [control=['while'], data=[]]
# Cancel any remaining tasks (e.g. readline).
for future in pending:
if future is shutdown:
continue # depends on [control=['if'], data=[]]
future.cancel() # depends on [control=['for'], data=['future']]
# Pass the exit code back to the caller.
return exit_code |
def update_stage(self, stage, executable=None, force=False,
name=None, unset_name=False, folder=None, unset_folder=False, stage_input=None,
instance_type=None, edit_version=None, **kwargs):
'''
:param stage: A number for the stage index (for the nth stage, starting from 0), or a string stage index, name, or ID
:type stage: int or string
:param executable: string or a handler for an app or applet
:type executable: string, DXApplet, or DXApp
:param force: whether to use *executable* even if it is incompatible with the previous executable's spec
:type force: boolean
:param name: new name for the stage; cannot be provided with *unset_name* set to True
:type name: string
:param unset_name: whether to unset the stage name; cannot be True with string value for *name*
:type unset_name: boolean
:param folder: new default output folder for the stage; either a relative or absolute path (optional)
:type folder: string
:param unset_folder: whether to unset the stage folder; cannot be True with string value for *folder*
:type unset_folder: boolean
:param stage_input: input fields to bind as default inputs for the executable (optional)
:type stage_input: dict
:param instance_type: Default instance type on which all jobs will be run for this stage, or a dict mapping function names to instance type requests
:type instance_type: string or dict
:param edit_version: if provided, the edit version of the workflow that should be modified; if not provided, the current edit version will be used (optional)
:type edit_version: int
Removes the specified stage from the workflow
'''
stage_id = self._get_stage_id(stage)
if name is not None and unset_name:
raise DXError('dxpy.DXWorkflow.update_stage: cannot provide both "name" and set "unset_name"')
if folder is not None and unset_folder:
raise DXError('dxpy.DXWorkflow.update_stage: cannot provide both "folder" and set "unset_folder"')
if executable is not None:
if isinstance(executable, basestring):
exec_id = executable
elif isinstance(executable, DXExecutable):
exec_id = executable.get_id()
else:
raise DXError("dxpy.DXWorkflow.update_stage: executable (if provided) must be a string or an instance of DXApplet or DXApp")
update_stage_exec_input = {"stage": stage_id,
"executable": exec_id,
"force": force}
self._add_edit_version_to_request(update_stage_exec_input, edit_version)
try:
dxpy.api.workflow_update_stage_executable(self._dxid, update_stage_exec_input, **kwargs)
finally:
self.describe() # update cached describe
# Construct hash and update the workflow's stage if necessary
update_stage_input = {}
if name is not None:
update_stage_input["name"] = name
elif unset_name:
update_stage_input["name"] = None
if folder:
update_stage_input["folder"] = folder
elif unset_folder:
update_stage_input["folder"] = None
if stage_input:
update_stage_input["input"] = stage_input
if instance_type is not None:
update_stage_input["systemRequirements"] = SystemRequirementsDict.from_instance_type(instance_type).as_dict()
if update_stage_input:
update_input = {"stages": {stage_id: update_stage_input}}
self._add_edit_version_to_request(update_input, edit_version)
try:
dxpy.api.workflow_update(self._dxid, update_input, **kwargs)
finally:
self.describe() | def function[update_stage, parameter[self, stage, executable, force, name, unset_name, folder, unset_folder, stage_input, instance_type, edit_version]]:
constant[
:param stage: A number for the stage index (for the nth stage, starting from 0), or a string stage index, name, or ID
:type stage: int or string
:param executable: string or a handler for an app or applet
:type executable: string, DXApplet, or DXApp
:param force: whether to use *executable* even if it is incompatible with the previous executable's spec
:type force: boolean
:param name: new name for the stage; cannot be provided with *unset_name* set to True
:type name: string
:param unset_name: whether to unset the stage name; cannot be True with string value for *name*
:type unset_name: boolean
:param folder: new default output folder for the stage; either a relative or absolute path (optional)
:type folder: string
:param unset_folder: whether to unset the stage folder; cannot be True with string value for *folder*
:type unset_folder: boolean
:param stage_input: input fields to bind as default inputs for the executable (optional)
:type stage_input: dict
:param instance_type: Default instance type on which all jobs will be run for this stage, or a dict mapping function names to instance type requests
:type instance_type: string or dict
:param edit_version: if provided, the edit version of the workflow that should be modified; if not provided, the current edit version will be used (optional)
:type edit_version: int
Removes the specified stage from the workflow
]
variable[stage_id] assign[=] call[name[self]._get_stage_id, parameter[name[stage]]]
if <ast.BoolOp object at 0x7da20c6e41c0> begin[:]
<ast.Raise object at 0x7da20c6e77f0>
if <ast.BoolOp object at 0x7da20c6e4a00> begin[:]
<ast.Raise object at 0x7da20c6e6b60>
if compare[name[executable] is_not constant[None]] begin[:]
if call[name[isinstance], parameter[name[executable], name[basestring]]] begin[:]
variable[exec_id] assign[=] name[executable]
variable[update_stage_exec_input] assign[=] dictionary[[<ast.Constant object at 0x7da20c6e5b70>, <ast.Constant object at 0x7da20c6e6740>, <ast.Constant object at 0x7da20c6e4760>], [<ast.Name object at 0x7da20c6e5d20>, <ast.Name object at 0x7da20c6e5420>, <ast.Name object at 0x7da20c6e63b0>]]
call[name[self]._add_edit_version_to_request, parameter[name[update_stage_exec_input], name[edit_version]]]
<ast.Try object at 0x7da20c6e7ee0>
variable[update_stage_input] assign[=] dictionary[[], []]
if compare[name[name] is_not constant[None]] begin[:]
call[name[update_stage_input]][constant[name]] assign[=] name[name]
if name[folder] begin[:]
call[name[update_stage_input]][constant[folder]] assign[=] name[folder]
if name[stage_input] begin[:]
call[name[update_stage_input]][constant[input]] assign[=] name[stage_input]
if compare[name[instance_type] is_not constant[None]] begin[:]
call[name[update_stage_input]][constant[systemRequirements]] assign[=] call[call[name[SystemRequirementsDict].from_instance_type, parameter[name[instance_type]]].as_dict, parameter[]]
if name[update_stage_input] begin[:]
variable[update_input] assign[=] dictionary[[<ast.Constant object at 0x7da18dc9a710>], [<ast.Dict object at 0x7da18dc99a20>]]
call[name[self]._add_edit_version_to_request, parameter[name[update_input], name[edit_version]]]
<ast.Try object at 0x7da18dc9a5f0> | keyword[def] identifier[update_stage] ( identifier[self] , identifier[stage] , identifier[executable] = keyword[None] , identifier[force] = keyword[False] ,
identifier[name] = keyword[None] , identifier[unset_name] = keyword[False] , identifier[folder] = keyword[None] , identifier[unset_folder] = keyword[False] , identifier[stage_input] = keyword[None] ,
identifier[instance_type] = keyword[None] , identifier[edit_version] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[stage_id] = identifier[self] . identifier[_get_stage_id] ( identifier[stage] )
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] keyword[and] identifier[unset_name] :
keyword[raise] identifier[DXError] ( literal[string] )
keyword[if] identifier[folder] keyword[is] keyword[not] keyword[None] keyword[and] identifier[unset_folder] :
keyword[raise] identifier[DXError] ( literal[string] )
keyword[if] identifier[executable] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[executable] , identifier[basestring] ):
identifier[exec_id] = identifier[executable]
keyword[elif] identifier[isinstance] ( identifier[executable] , identifier[DXExecutable] ):
identifier[exec_id] = identifier[executable] . identifier[get_id] ()
keyword[else] :
keyword[raise] identifier[DXError] ( literal[string] )
identifier[update_stage_exec_input] ={ literal[string] : identifier[stage_id] ,
literal[string] : identifier[exec_id] ,
literal[string] : identifier[force] }
identifier[self] . identifier[_add_edit_version_to_request] ( identifier[update_stage_exec_input] , identifier[edit_version] )
keyword[try] :
identifier[dxpy] . identifier[api] . identifier[workflow_update_stage_executable] ( identifier[self] . identifier[_dxid] , identifier[update_stage_exec_input] ,** identifier[kwargs] )
keyword[finally] :
identifier[self] . identifier[describe] ()
identifier[update_stage_input] ={}
keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] :
identifier[update_stage_input] [ literal[string] ]= identifier[name]
keyword[elif] identifier[unset_name] :
identifier[update_stage_input] [ literal[string] ]= keyword[None]
keyword[if] identifier[folder] :
identifier[update_stage_input] [ literal[string] ]= identifier[folder]
keyword[elif] identifier[unset_folder] :
identifier[update_stage_input] [ literal[string] ]= keyword[None]
keyword[if] identifier[stage_input] :
identifier[update_stage_input] [ literal[string] ]= identifier[stage_input]
keyword[if] identifier[instance_type] keyword[is] keyword[not] keyword[None] :
identifier[update_stage_input] [ literal[string] ]= identifier[SystemRequirementsDict] . identifier[from_instance_type] ( identifier[instance_type] ). identifier[as_dict] ()
keyword[if] identifier[update_stage_input] :
identifier[update_input] ={ literal[string] :{ identifier[stage_id] : identifier[update_stage_input] }}
identifier[self] . identifier[_add_edit_version_to_request] ( identifier[update_input] , identifier[edit_version] )
keyword[try] :
identifier[dxpy] . identifier[api] . identifier[workflow_update] ( identifier[self] . identifier[_dxid] , identifier[update_input] ,** identifier[kwargs] )
keyword[finally] :
identifier[self] . identifier[describe] () | def update_stage(self, stage, executable=None, force=False, name=None, unset_name=False, folder=None, unset_folder=False, stage_input=None, instance_type=None, edit_version=None, **kwargs):
"""
:param stage: A number for the stage index (for the nth stage, starting from 0), or a string stage index, name, or ID
:type stage: int or string
:param executable: string or a handler for an app or applet
:type executable: string, DXApplet, or DXApp
:param force: whether to use *executable* even if it is incompatible with the previous executable's spec
:type force: boolean
:param name: new name for the stage; cannot be provided with *unset_name* set to True
:type name: string
:param unset_name: whether to unset the stage name; cannot be True with string value for *name*
:type unset_name: boolean
:param folder: new default output folder for the stage; either a relative or absolute path (optional)
:type folder: string
:param unset_folder: whether to unset the stage folder; cannot be True with string value for *folder*
:type unset_folder: boolean
:param stage_input: input fields to bind as default inputs for the executable (optional)
:type stage_input: dict
:param instance_type: Default instance type on which all jobs will be run for this stage, or a dict mapping function names to instance type requests
:type instance_type: string or dict
:param edit_version: if provided, the edit version of the workflow that should be modified; if not provided, the current edit version will be used (optional)
:type edit_version: int
Removes the specified stage from the workflow
"""
stage_id = self._get_stage_id(stage)
if name is not None and unset_name:
raise DXError('dxpy.DXWorkflow.update_stage: cannot provide both "name" and set "unset_name"') # depends on [control=['if'], data=[]]
if folder is not None and unset_folder:
raise DXError('dxpy.DXWorkflow.update_stage: cannot provide both "folder" and set "unset_folder"') # depends on [control=['if'], data=[]]
if executable is not None:
if isinstance(executable, basestring):
exec_id = executable # depends on [control=['if'], data=[]]
elif isinstance(executable, DXExecutable):
exec_id = executable.get_id() # depends on [control=['if'], data=[]]
else:
raise DXError('dxpy.DXWorkflow.update_stage: executable (if provided) must be a string or an instance of DXApplet or DXApp')
update_stage_exec_input = {'stage': stage_id, 'executable': exec_id, 'force': force}
self._add_edit_version_to_request(update_stage_exec_input, edit_version)
try:
dxpy.api.workflow_update_stage_executable(self._dxid, update_stage_exec_input, **kwargs) # depends on [control=['try'], data=[]]
finally:
self.describe() # update cached describe # depends on [control=['if'], data=['executable']]
# Construct hash and update the workflow's stage if necessary
update_stage_input = {}
if name is not None:
update_stage_input['name'] = name # depends on [control=['if'], data=['name']]
elif unset_name:
update_stage_input['name'] = None # depends on [control=['if'], data=[]]
if folder:
update_stage_input['folder'] = folder # depends on [control=['if'], data=[]]
elif unset_folder:
update_stage_input['folder'] = None # depends on [control=['if'], data=[]]
if stage_input:
update_stage_input['input'] = stage_input # depends on [control=['if'], data=[]]
if instance_type is not None:
update_stage_input['systemRequirements'] = SystemRequirementsDict.from_instance_type(instance_type).as_dict() # depends on [control=['if'], data=['instance_type']]
if update_stage_input:
update_input = {'stages': {stage_id: update_stage_input}}
self._add_edit_version_to_request(update_input, edit_version)
try:
dxpy.api.workflow_update(self._dxid, update_input, **kwargs) # depends on [control=['try'], data=[]]
finally:
self.describe() # depends on [control=['if'], data=[]] |
def _scene_centroid(self):
""" Compute image center coordinates
:return: Tuple of image center in lat, lon
"""
ul_lat = self.corner_ul_lat_product
ll_lat = self.corner_ll_lat_product
ul_lon = self.corner_ul_lon_product
ur_lon = self.corner_ur_lon_product
lat = (ul_lat + ll_lat) / 2.
lon = (ul_lon + ur_lon) / 2.
return lat, lon | def function[_scene_centroid, parameter[self]]:
constant[ Compute image center coordinates
:return: Tuple of image center in lat, lon
]
variable[ul_lat] assign[=] name[self].corner_ul_lat_product
variable[ll_lat] assign[=] name[self].corner_ll_lat_product
variable[ul_lon] assign[=] name[self].corner_ul_lon_product
variable[ur_lon] assign[=] name[self].corner_ur_lon_product
variable[lat] assign[=] binary_operation[binary_operation[name[ul_lat] + name[ll_lat]] / constant[2.0]]
variable[lon] assign[=] binary_operation[binary_operation[name[ul_lon] + name[ur_lon]] / constant[2.0]]
return[tuple[[<ast.Name object at 0x7da18dc9a500>, <ast.Name object at 0x7da18dc9b730>]]] | keyword[def] identifier[_scene_centroid] ( identifier[self] ):
literal[string]
identifier[ul_lat] = identifier[self] . identifier[corner_ul_lat_product]
identifier[ll_lat] = identifier[self] . identifier[corner_ll_lat_product]
identifier[ul_lon] = identifier[self] . identifier[corner_ul_lon_product]
identifier[ur_lon] = identifier[self] . identifier[corner_ur_lon_product]
identifier[lat] =( identifier[ul_lat] + identifier[ll_lat] )/ literal[int]
identifier[lon] =( identifier[ul_lon] + identifier[ur_lon] )/ literal[int]
keyword[return] identifier[lat] , identifier[lon] | def _scene_centroid(self):
""" Compute image center coordinates
:return: Tuple of image center in lat, lon
"""
ul_lat = self.corner_ul_lat_product
ll_lat = self.corner_ll_lat_product
ul_lon = self.corner_ul_lon_product
ur_lon = self.corner_ur_lon_product
lat = (ul_lat + ll_lat) / 2.0
lon = (ul_lon + ur_lon) / 2.0
return (lat, lon) |
def __write_docker_compose(path, docker_compose, already_existed):
'''
Write docker-compose to a path
in order to use it with docker-compose ( config check )
:param path:
docker_compose
contains the docker-compose file
:return:
'''
if path.lower().endswith(('.yml', '.yaml')):
file_path = path
dir_name = os.path.dirname(path)
else:
dir_name = path
file_path = os.path.join(dir_name, DEFAULT_DC_FILENAMES[0])
if os.path.isdir(dir_name) is False:
os.mkdir(dir_name)
try:
with salt.utils.files.fopen(file_path, 'w') as fl:
fl.write(salt.utils.stringutils.to_str(docker_compose))
except EnvironmentError:
return __standardize_result(False,
'Could not write {0}'.format(file_path),
None, None)
project = __load_project_from_file_path(file_path)
if isinstance(project, dict):
if not already_existed:
os.remove(file_path)
return project
return file_path | def function[__write_docker_compose, parameter[path, docker_compose, already_existed]]:
constant[
Write docker-compose to a path
in order to use it with docker-compose ( config check )
:param path:
docker_compose
contains the docker-compose file
:return:
]
if call[call[name[path].lower, parameter[]].endswith, parameter[tuple[[<ast.Constant object at 0x7da1b212fc40>, <ast.Constant object at 0x7da1b212f130>]]]] begin[:]
variable[file_path] assign[=] name[path]
variable[dir_name] assign[=] call[name[os].path.dirname, parameter[name[path]]]
if compare[call[name[os].path.isdir, parameter[name[dir_name]]] is constant[False]] begin[:]
call[name[os].mkdir, parameter[name[dir_name]]]
<ast.Try object at 0x7da1b212e800>
variable[project] assign[=] call[name[__load_project_from_file_path], parameter[name[file_path]]]
if call[name[isinstance], parameter[name[project], name[dict]]] begin[:]
if <ast.UnaryOp object at 0x7da1b212ecb0> begin[:]
call[name[os].remove, parameter[name[file_path]]]
return[name[project]]
return[name[file_path]] | keyword[def] identifier[__write_docker_compose] ( identifier[path] , identifier[docker_compose] , identifier[already_existed] ):
literal[string]
keyword[if] identifier[path] . identifier[lower] (). identifier[endswith] (( literal[string] , literal[string] )):
identifier[file_path] = identifier[path]
identifier[dir_name] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[path] )
keyword[else] :
identifier[dir_name] = identifier[path]
identifier[file_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_name] , identifier[DEFAULT_DC_FILENAMES] [ literal[int] ])
keyword[if] identifier[os] . identifier[path] . identifier[isdir] ( identifier[dir_name] ) keyword[is] keyword[False] :
identifier[os] . identifier[mkdir] ( identifier[dir_name] )
keyword[try] :
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[file_path] , literal[string] ) keyword[as] identifier[fl] :
identifier[fl] . identifier[write] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str] ( identifier[docker_compose] ))
keyword[except] identifier[EnvironmentError] :
keyword[return] identifier[__standardize_result] ( keyword[False] ,
literal[string] . identifier[format] ( identifier[file_path] ),
keyword[None] , keyword[None] )
identifier[project] = identifier[__load_project_from_file_path] ( identifier[file_path] )
keyword[if] identifier[isinstance] ( identifier[project] , identifier[dict] ):
keyword[if] keyword[not] identifier[already_existed] :
identifier[os] . identifier[remove] ( identifier[file_path] )
keyword[return] identifier[project]
keyword[return] identifier[file_path] | def __write_docker_compose(path, docker_compose, already_existed):
"""
Write docker-compose to a path
in order to use it with docker-compose ( config check )
:param path:
docker_compose
contains the docker-compose file
:return:
"""
if path.lower().endswith(('.yml', '.yaml')):
file_path = path
dir_name = os.path.dirname(path) # depends on [control=['if'], data=[]]
else:
dir_name = path
file_path = os.path.join(dir_name, DEFAULT_DC_FILENAMES[0])
if os.path.isdir(dir_name) is False:
os.mkdir(dir_name) # depends on [control=['if'], data=[]]
try:
with salt.utils.files.fopen(file_path, 'w') as fl:
fl.write(salt.utils.stringutils.to_str(docker_compose)) # depends on [control=['with'], data=['fl']] # depends on [control=['try'], data=[]]
except EnvironmentError:
return __standardize_result(False, 'Could not write {0}'.format(file_path), None, None) # depends on [control=['except'], data=[]]
project = __load_project_from_file_path(file_path)
if isinstance(project, dict):
if not already_existed:
os.remove(file_path) # depends on [control=['if'], data=[]]
return project # depends on [control=['if'], data=[]]
return file_path |
def oscltx(state, et, mu):
"""
Determine the set of osculating conic orbital elements that
corresponds to the state (position, velocity) of a body at some
epoch. In additional to the classical elements, return the true
anomaly, semi-major axis, and period, if applicable.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/oscltx_c.html
:param state: State of body at epoch of elements.
:type state: 6-Element Array of floats
:param et: Epoch of elements.
:type et: float
:param mu: Gravitational parameter (GM) of primary body.
:type mu: float
:return: Extended set of classical conic elements.
"""
state = stypes.toDoubleVector(state)
et = ctypes.c_double(et)
mu = ctypes.c_double(mu)
elts = stypes.emptyDoubleVector(20)
libspice.oscltx_c(state, et, mu, elts)
return stypes.cVectorToPython(elts)[0:11] | def function[oscltx, parameter[state, et, mu]]:
constant[
Determine the set of osculating conic orbital elements that
corresponds to the state (position, velocity) of a body at some
epoch. In additional to the classical elements, return the true
anomaly, semi-major axis, and period, if applicable.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/oscltx_c.html
:param state: State of body at epoch of elements.
:type state: 6-Element Array of floats
:param et: Epoch of elements.
:type et: float
:param mu: Gravitational parameter (GM) of primary body.
:type mu: float
:return: Extended set of classical conic elements.
]
variable[state] assign[=] call[name[stypes].toDoubleVector, parameter[name[state]]]
variable[et] assign[=] call[name[ctypes].c_double, parameter[name[et]]]
variable[mu] assign[=] call[name[ctypes].c_double, parameter[name[mu]]]
variable[elts] assign[=] call[name[stypes].emptyDoubleVector, parameter[constant[20]]]
call[name[libspice].oscltx_c, parameter[name[state], name[et], name[mu], name[elts]]]
return[call[call[name[stypes].cVectorToPython, parameter[name[elts]]]][<ast.Slice object at 0x7da2054a6d40>]] | keyword[def] identifier[oscltx] ( identifier[state] , identifier[et] , identifier[mu] ):
literal[string]
identifier[state] = identifier[stypes] . identifier[toDoubleVector] ( identifier[state] )
identifier[et] = identifier[ctypes] . identifier[c_double] ( identifier[et] )
identifier[mu] = identifier[ctypes] . identifier[c_double] ( identifier[mu] )
identifier[elts] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] )
identifier[libspice] . identifier[oscltx_c] ( identifier[state] , identifier[et] , identifier[mu] , identifier[elts] )
keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[elts] )[ literal[int] : literal[int] ] | def oscltx(state, et, mu):
"""
Determine the set of osculating conic orbital elements that
corresponds to the state (position, velocity) of a body at some
epoch. In additional to the classical elements, return the true
anomaly, semi-major axis, and period, if applicable.
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/oscltx_c.html
:param state: State of body at epoch of elements.
:type state: 6-Element Array of floats
:param et: Epoch of elements.
:type et: float
:param mu: Gravitational parameter (GM) of primary body.
:type mu: float
:return: Extended set of classical conic elements.
"""
state = stypes.toDoubleVector(state)
et = ctypes.c_double(et)
mu = ctypes.c_double(mu)
elts = stypes.emptyDoubleVector(20)
libspice.oscltx_c(state, et, mu, elts)
return stypes.cVectorToPython(elts)[0:11] |
def is_valid_program(self,p):
"""checks whether program p makes a syntactically valid tree.
checks that the accumulated program length is always greater than the
accumulated arities, indicating that the appropriate number of arguments is
alway present for functions. It then checks that the sum of arties +1
exactly equals the length of the stack, indicating that there are no
missing arguments.
"""
# print("p:",p)
arities = list(a.arity[a.in_type] for a in p)
accu_arities = list(accumulate(arities))
accu_len = list(np.arange(len(p))+1)
check = list(a < b for a,b in zip(accu_arities,accu_len))
# print("accu_arities:",accu_arities)
# print("accu_len:",accu_len)
# print("accu_arities < accu_len:",accu_arities<accu_len)
return all(check) and sum(a.arity[a.in_type] for a in p) +1 == len(p) and len(p)>0 | def function[is_valid_program, parameter[self, p]]:
constant[checks whether program p makes a syntactically valid tree.
checks that the accumulated program length is always greater than the
accumulated arities, indicating that the appropriate number of arguments is
alway present for functions. It then checks that the sum of arties +1
exactly equals the length of the stack, indicating that there are no
missing arguments.
]
variable[arities] assign[=] call[name[list], parameter[<ast.GeneratorExp object at 0x7da2041d97e0>]]
variable[accu_arities] assign[=] call[name[list], parameter[call[name[accumulate], parameter[name[arities]]]]]
variable[accu_len] assign[=] call[name[list], parameter[binary_operation[call[name[np].arange, parameter[call[name[len], parameter[name[p]]]]] + constant[1]]]]
variable[check] assign[=] call[name[list], parameter[<ast.GeneratorExp object at 0x7da20c991b70>]]
return[<ast.BoolOp object at 0x7da1b1972e90>] | keyword[def] identifier[is_valid_program] ( identifier[self] , identifier[p] ):
literal[string]
identifier[arities] = identifier[list] ( identifier[a] . identifier[arity] [ identifier[a] . identifier[in_type] ] keyword[for] identifier[a] keyword[in] identifier[p] )
identifier[accu_arities] = identifier[list] ( identifier[accumulate] ( identifier[arities] ))
identifier[accu_len] = identifier[list] ( identifier[np] . identifier[arange] ( identifier[len] ( identifier[p] ))+ literal[int] )
identifier[check] = identifier[list] ( identifier[a] < identifier[b] keyword[for] identifier[a] , identifier[b] keyword[in] identifier[zip] ( identifier[accu_arities] , identifier[accu_len] ))
keyword[return] identifier[all] ( identifier[check] ) keyword[and] identifier[sum] ( identifier[a] . identifier[arity] [ identifier[a] . identifier[in_type] ] keyword[for] identifier[a] keyword[in] identifier[p] )+ literal[int] == identifier[len] ( identifier[p] ) keyword[and] identifier[len] ( identifier[p] )> literal[int] | def is_valid_program(self, p):
"""checks whether program p makes a syntactically valid tree.
checks that the accumulated program length is always greater than the
accumulated arities, indicating that the appropriate number of arguments is
alway present for functions. It then checks that the sum of arties +1
exactly equals the length of the stack, indicating that there are no
missing arguments.
"""
# print("p:",p)
arities = list((a.arity[a.in_type] for a in p))
accu_arities = list(accumulate(arities))
accu_len = list(np.arange(len(p)) + 1)
check = list((a < b for (a, b) in zip(accu_arities, accu_len)))
# print("accu_arities:",accu_arities)
# print("accu_len:",accu_len)
# print("accu_arities < accu_len:",accu_arities<accu_len)
return all(check) and sum((a.arity[a.in_type] for a in p)) + 1 == len(p) and (len(p) > 0) |
def parsers(self):
"""Metadata item name to parser function mapping."""
parse_list = self._parse_list
parse_file = self._parse_file
parse_dict = self._parse_dict
exclude_files_parser = self._exclude_files_parser
return {
'platforms': parse_list,
'keywords': parse_list,
'provides': parse_list,
'requires': self._deprecated_config_handler(
parse_list,
"The requires parameter is deprecated, please use "
"install_requires for runtime dependencies.",
DeprecationWarning),
'obsoletes': parse_list,
'classifiers': self._get_parser_compound(parse_file, parse_list),
'license': exclude_files_parser('license'),
'description': parse_file,
'long_description': parse_file,
'version': self._parse_version,
'project_urls': parse_dict,
} | def function[parsers, parameter[self]]:
constant[Metadata item name to parser function mapping.]
variable[parse_list] assign[=] name[self]._parse_list
variable[parse_file] assign[=] name[self]._parse_file
variable[parse_dict] assign[=] name[self]._parse_dict
variable[exclude_files_parser] assign[=] name[self]._exclude_files_parser
return[dictionary[[<ast.Constant object at 0x7da1b1b36c50>, <ast.Constant object at 0x7da1b1b35a80>, <ast.Constant object at 0x7da1b1b35420>, <ast.Constant object at 0x7da1b1b34550>, <ast.Constant object at 0x7da1b1b35ab0>, <ast.Constant object at 0x7da1b1b359f0>, <ast.Constant object at 0x7da1b1b344c0>, <ast.Constant object at 0x7da1b1b37430>, <ast.Constant object at 0x7da1b1b34850>, <ast.Constant object at 0x7da1b1b35fc0>, <ast.Constant object at 0x7da1b1b372e0>], [<ast.Name object at 0x7da1b1b351b0>, <ast.Name object at 0x7da1b1b36c80>, <ast.Name object at 0x7da1b1b35630>, <ast.Call object at 0x7da1b1b34a00>, <ast.Name object at 0x7da1b1b36500>, <ast.Call object at 0x7da1b1b34190>, <ast.Call object at 0x7da1b1b369b0>, <ast.Name object at 0x7da1b1b35960>, <ast.Name object at 0x7da1b1b35510>, <ast.Attribute object at 0x7da1b1b361a0>, <ast.Name object at 0x7da1b1b35e10>]]] | keyword[def] identifier[parsers] ( identifier[self] ):
literal[string]
identifier[parse_list] = identifier[self] . identifier[_parse_list]
identifier[parse_file] = identifier[self] . identifier[_parse_file]
identifier[parse_dict] = identifier[self] . identifier[_parse_dict]
identifier[exclude_files_parser] = identifier[self] . identifier[_exclude_files_parser]
keyword[return] {
literal[string] : identifier[parse_list] ,
literal[string] : identifier[parse_list] ,
literal[string] : identifier[parse_list] ,
literal[string] : identifier[self] . identifier[_deprecated_config_handler] (
identifier[parse_list] ,
literal[string]
literal[string] ,
identifier[DeprecationWarning] ),
literal[string] : identifier[parse_list] ,
literal[string] : identifier[self] . identifier[_get_parser_compound] ( identifier[parse_file] , identifier[parse_list] ),
literal[string] : identifier[exclude_files_parser] ( literal[string] ),
literal[string] : identifier[parse_file] ,
literal[string] : identifier[parse_file] ,
literal[string] : identifier[self] . identifier[_parse_version] ,
literal[string] : identifier[parse_dict] ,
} | def parsers(self):
"""Metadata item name to parser function mapping."""
parse_list = self._parse_list
parse_file = self._parse_file
parse_dict = self._parse_dict
exclude_files_parser = self._exclude_files_parser
return {'platforms': parse_list, 'keywords': parse_list, 'provides': parse_list, 'requires': self._deprecated_config_handler(parse_list, 'The requires parameter is deprecated, please use install_requires for runtime dependencies.', DeprecationWarning), 'obsoletes': parse_list, 'classifiers': self._get_parser_compound(parse_file, parse_list), 'license': exclude_files_parser('license'), 'description': parse_file, 'long_description': parse_file, 'version': self._parse_version, 'project_urls': parse_dict} |
def get_email_template(name, language=''):
"""
Function that returns an email template instance, from cache or DB.
"""
use_cache = getattr(settings, 'POST_OFFICE_CACHE', True)
if use_cache:
use_cache = getattr(settings, 'POST_OFFICE_TEMPLATE_CACHE', True)
if not use_cache:
return EmailTemplate.objects.get(name=name, language=language)
else:
composite_name = '%s:%s' % (name, language)
email_template = cache.get(composite_name)
if email_template is not None:
return email_template
else:
email_template = EmailTemplate.objects.get(name=name,
language=language)
cache.set(composite_name, email_template)
return email_template | def function[get_email_template, parameter[name, language]]:
constant[
Function that returns an email template instance, from cache or DB.
]
variable[use_cache] assign[=] call[name[getattr], parameter[name[settings], constant[POST_OFFICE_CACHE], constant[True]]]
if name[use_cache] begin[:]
variable[use_cache] assign[=] call[name[getattr], parameter[name[settings], constant[POST_OFFICE_TEMPLATE_CACHE], constant[True]]]
if <ast.UnaryOp object at 0x7da2041d8280> begin[:]
return[call[name[EmailTemplate].objects.get, parameter[]]] | keyword[def] identifier[get_email_template] ( identifier[name] , identifier[language] = literal[string] ):
literal[string]
identifier[use_cache] = identifier[getattr] ( identifier[settings] , literal[string] , keyword[True] )
keyword[if] identifier[use_cache] :
identifier[use_cache] = identifier[getattr] ( identifier[settings] , literal[string] , keyword[True] )
keyword[if] keyword[not] identifier[use_cache] :
keyword[return] identifier[EmailTemplate] . identifier[objects] . identifier[get] ( identifier[name] = identifier[name] , identifier[language] = identifier[language] )
keyword[else] :
identifier[composite_name] = literal[string] %( identifier[name] , identifier[language] )
identifier[email_template] = identifier[cache] . identifier[get] ( identifier[composite_name] )
keyword[if] identifier[email_template] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[email_template]
keyword[else] :
identifier[email_template] = identifier[EmailTemplate] . identifier[objects] . identifier[get] ( identifier[name] = identifier[name] ,
identifier[language] = identifier[language] )
identifier[cache] . identifier[set] ( identifier[composite_name] , identifier[email_template] )
keyword[return] identifier[email_template] | def get_email_template(name, language=''):
"""
Function that returns an email template instance, from cache or DB.
"""
use_cache = getattr(settings, 'POST_OFFICE_CACHE', True)
if use_cache:
use_cache = getattr(settings, 'POST_OFFICE_TEMPLATE_CACHE', True) # depends on [control=['if'], data=[]]
if not use_cache:
return EmailTemplate.objects.get(name=name, language=language) # depends on [control=['if'], data=[]]
else:
composite_name = '%s:%s' % (name, language)
email_template = cache.get(composite_name)
if email_template is not None:
return email_template # depends on [control=['if'], data=['email_template']]
else:
email_template = EmailTemplate.objects.get(name=name, language=language)
cache.set(composite_name, email_template)
return email_template |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.