code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def delete(self, story, params={}, **options):
"""Deletes a story. A user can only delete stories they have created. Returns an empty data record.
Parameters
----------
story : {Id} Globally unique identifier for the story.
"""
path = "/stories/%s" % (story)
return self.client.delete(path, params, **options) | def function[delete, parameter[self, story, params]]:
constant[Deletes a story. A user can only delete stories they have created. Returns an empty data record.
Parameters
----------
story : {Id} Globally unique identifier for the story.
]
variable[path] assign[=] binary_operation[constant[/stories/%s] <ast.Mod object at 0x7da2590d6920> name[story]]
return[call[name[self].client.delete, parameter[name[path], name[params]]]] | keyword[def] identifier[delete] ( identifier[self] , identifier[story] , identifier[params] ={},** identifier[options] ):
literal[string]
identifier[path] = literal[string] %( identifier[story] )
keyword[return] identifier[self] . identifier[client] . identifier[delete] ( identifier[path] , identifier[params] ,** identifier[options] ) | def delete(self, story, params={}, **options):
"""Deletes a story. A user can only delete stories they have created. Returns an empty data record.
Parameters
----------
story : {Id} Globally unique identifier for the story.
"""
path = '/stories/%s' % story
return self.client.delete(path, params, **options) |
def b_pathOK(self, al_path):
"""
Checks if the absolute path specified in the al_path
is valid for current tree
"""
b_OK = True
try: self.l_allPaths.index(al_path)
except: b_OK = False
return b_OK | def function[b_pathOK, parameter[self, al_path]]:
constant[
Checks if the absolute path specified in the al_path
is valid for current tree
]
variable[b_OK] assign[=] constant[True]
<ast.Try object at 0x7da1b0a80be0>
return[name[b_OK]] | keyword[def] identifier[b_pathOK] ( identifier[self] , identifier[al_path] ):
literal[string]
identifier[b_OK] = keyword[True]
keyword[try] : identifier[self] . identifier[l_allPaths] . identifier[index] ( identifier[al_path] )
keyword[except] : identifier[b_OK] = keyword[False]
keyword[return] identifier[b_OK] | def b_pathOK(self, al_path):
"""
Checks if the absolute path specified in the al_path
is valid for current tree
"""
b_OK = True
try:
self.l_allPaths.index(al_path) # depends on [control=['try'], data=[]]
except:
b_OK = False # depends on [control=['except'], data=[]]
return b_OK |
def Tag(env, target, source, *more_tags, **kw_tags):
""" Tag a file with the given arguments, just sets the accordingly named
attribute on the file object.
TODO: FIXME
"""
if not target:
target=source
first_tag=None
else:
first_tag=source
if first_tag:
kw_tags[first_tag[0]] = ''
if len(kw_tags) == 0 and len(more_tags) == 0:
raise UserError("No tags given.")
# XXX: sanity checks
for x in more_tags:
kw_tags[x] = ''
if not SCons.Util.is_List(target):
target=[target]
else:
# hmm, sometimes the target list, is a list of a list
# make sure it is flattened prior to processing.
# TODO: perhaps some bug ?!?
target=env.Flatten(target)
for t in target:
for (k,v) in kw_tags.items():
# all file tags have to start with PACKAGING_, so we can later
# differentiate between "normal" object attributes and the
# packaging attributes. As the user should not be bothered with
# that, the prefix will be added here if missing.
if k[:10] != 'PACKAGING_':
k='PACKAGING_'+k
t.Tag(k, v) | def function[Tag, parameter[env, target, source]]:
constant[ Tag a file with the given arguments, just sets the accordingly named
attribute on the file object.
TODO: FIXME
]
if <ast.UnaryOp object at 0x7da18f813040> begin[:]
variable[target] assign[=] name[source]
variable[first_tag] assign[=] constant[None]
if name[first_tag] begin[:]
call[name[kw_tags]][call[name[first_tag]][constant[0]]] assign[=] constant[]
if <ast.BoolOp object at 0x7da18f812680> begin[:]
<ast.Raise object at 0x7da20e954340>
for taget[name[x]] in starred[name[more_tags]] begin[:]
call[name[kw_tags]][name[x]] assign[=] constant[]
if <ast.UnaryOp object at 0x7da20e956c20> begin[:]
variable[target] assign[=] list[[<ast.Name object at 0x7da20e9551b0>]]
for taget[name[t]] in starred[name[target]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da20e957c40>, <ast.Name object at 0x7da20e9573a0>]]] in starred[call[name[kw_tags].items, parameter[]]] begin[:]
if compare[call[name[k]][<ast.Slice object at 0x7da20e957df0>] not_equal[!=] constant[PACKAGING_]] begin[:]
variable[k] assign[=] binary_operation[constant[PACKAGING_] + name[k]]
call[name[t].Tag, parameter[name[k], name[v]]] | keyword[def] identifier[Tag] ( identifier[env] , identifier[target] , identifier[source] ,* identifier[more_tags] ,** identifier[kw_tags] ):
literal[string]
keyword[if] keyword[not] identifier[target] :
identifier[target] = identifier[source]
identifier[first_tag] = keyword[None]
keyword[else] :
identifier[first_tag] = identifier[source]
keyword[if] identifier[first_tag] :
identifier[kw_tags] [ identifier[first_tag] [ literal[int] ]]= literal[string]
keyword[if] identifier[len] ( identifier[kw_tags] )== literal[int] keyword[and] identifier[len] ( identifier[more_tags] )== literal[int] :
keyword[raise] identifier[UserError] ( literal[string] )
keyword[for] identifier[x] keyword[in] identifier[more_tags] :
identifier[kw_tags] [ identifier[x] ]= literal[string]
keyword[if] keyword[not] identifier[SCons] . identifier[Util] . identifier[is_List] ( identifier[target] ):
identifier[target] =[ identifier[target] ]
keyword[else] :
identifier[target] = identifier[env] . identifier[Flatten] ( identifier[target] )
keyword[for] identifier[t] keyword[in] identifier[target] :
keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[kw_tags] . identifier[items] ():
keyword[if] identifier[k] [: literal[int] ]!= literal[string] :
identifier[k] = literal[string] + identifier[k]
identifier[t] . identifier[Tag] ( identifier[k] , identifier[v] ) | def Tag(env, target, source, *more_tags, **kw_tags):
""" Tag a file with the given arguments, just sets the accordingly named
attribute on the file object.
TODO: FIXME
"""
if not target:
target = source
first_tag = None # depends on [control=['if'], data=[]]
else:
first_tag = source
if first_tag:
kw_tags[first_tag[0]] = '' # depends on [control=['if'], data=[]]
if len(kw_tags) == 0 and len(more_tags) == 0:
raise UserError('No tags given.') # depends on [control=['if'], data=[]]
# XXX: sanity checks
for x in more_tags:
kw_tags[x] = '' # depends on [control=['for'], data=['x']]
if not SCons.Util.is_List(target):
target = [target] # depends on [control=['if'], data=[]]
else:
# hmm, sometimes the target list, is a list of a list
# make sure it is flattened prior to processing.
# TODO: perhaps some bug ?!?
target = env.Flatten(target)
for t in target:
for (k, v) in kw_tags.items():
# all file tags have to start with PACKAGING_, so we can later
# differentiate between "normal" object attributes and the
# packaging attributes. As the user should not be bothered with
# that, the prefix will be added here if missing.
if k[:10] != 'PACKAGING_':
k = 'PACKAGING_' + k # depends on [control=['if'], data=[]]
t.Tag(k, v) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['t']] |
def likelihood(self, samples):
"""likelihood
Parameters
----------
sample: list
sample is a (1 * NUM_OF_FUNCTIONS) matrix, representing{w1, w2, ... wk}
Returns
-------
float
likelihood
"""
ret = np.ones(NUM_OF_INSTANCE)
for i in range(NUM_OF_INSTANCE):
for j in range(1, self.point_num + 1):
ret[i] *= self.normal_distribution(j, samples[i])
return ret | def function[likelihood, parameter[self, samples]]:
constant[likelihood
Parameters
----------
sample: list
sample is a (1 * NUM_OF_FUNCTIONS) matrix, representing{w1, w2, ... wk}
Returns
-------
float
likelihood
]
variable[ret] assign[=] call[name[np].ones, parameter[name[NUM_OF_INSTANCE]]]
for taget[name[i]] in starred[call[name[range], parameter[name[NUM_OF_INSTANCE]]]] begin[:]
for taget[name[j]] in starred[call[name[range], parameter[constant[1], binary_operation[name[self].point_num + constant[1]]]]] begin[:]
<ast.AugAssign object at 0x7da18bccb0d0>
return[name[ret]] | keyword[def] identifier[likelihood] ( identifier[self] , identifier[samples] ):
literal[string]
identifier[ret] = identifier[np] . identifier[ones] ( identifier[NUM_OF_INSTANCE] )
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[NUM_OF_INSTANCE] ):
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[point_num] + literal[int] ):
identifier[ret] [ identifier[i] ]*= identifier[self] . identifier[normal_distribution] ( identifier[j] , identifier[samples] [ identifier[i] ])
keyword[return] identifier[ret] | def likelihood(self, samples):
"""likelihood
Parameters
----------
sample: list
sample is a (1 * NUM_OF_FUNCTIONS) matrix, representing{w1, w2, ... wk}
Returns
-------
float
likelihood
"""
ret = np.ones(NUM_OF_INSTANCE)
for i in range(NUM_OF_INSTANCE):
for j in range(1, self.point_num + 1):
ret[i] *= self.normal_distribution(j, samples[i]) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']]
return ret |
def receive(self, x, mesh_axis, source_pcoord):
"""Collective receive in groups.
Each group contains the processors that differ only in mesh_axis.
```python
group_size = self.shape[mesh_axis].size
```
Args:
x: a LaidOutTensor
mesh_axis: an integer
source_pcoord: a list of optional integers. Each element is either None
or an integer in [0, group_size). If source_pcoord[k] is None, then the
output for the k-th processor in each group is a zero tensor. If
source_pcoord[k] is not None, then the output for the k-th processor in
each group is equal to the input for the source_pcoord[k]-th processor
in that group.
Returns:
a LaidOutTensor
"""
x = x.to_laid_out_tensor()
shape = x.tensor_list[0].shape
dtype = x.tensor_list[0].dtype
def _collective_receive(tensor_list, device_list):
ret = []
for pcoord, device in enumerate(device_list):
with tf.device(device):
if source_pcoord[pcoord] is None:
ret.append(tf.zeros(shape, dtype))
else:
ret.append(tf.identity(tensor_list[source_pcoord[pcoord]]))
return ret
return self._collective_with_groups(
x, [mesh_axis], _collective_receive) | def function[receive, parameter[self, x, mesh_axis, source_pcoord]]:
constant[Collective receive in groups.
Each group contains the processors that differ only in mesh_axis.
```python
group_size = self.shape[mesh_axis].size
```
Args:
x: a LaidOutTensor
mesh_axis: an integer
source_pcoord: a list of optional integers. Each element is either None
or an integer in [0, group_size). If source_pcoord[k] is None, then the
output for the k-th processor in each group is a zero tensor. If
source_pcoord[k] is not None, then the output for the k-th processor in
each group is equal to the input for the source_pcoord[k]-th processor
in that group.
Returns:
a LaidOutTensor
]
variable[x] assign[=] call[name[x].to_laid_out_tensor, parameter[]]
variable[shape] assign[=] call[name[x].tensor_list][constant[0]].shape
variable[dtype] assign[=] call[name[x].tensor_list][constant[0]].dtype
def function[_collective_receive, parameter[tensor_list, device_list]]:
variable[ret] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c9904c0>, <ast.Name object at 0x7da20c9912a0>]]] in starred[call[name[enumerate], parameter[name[device_list]]]] begin[:]
with call[name[tf].device, parameter[name[device]]] begin[:]
if compare[call[name[source_pcoord]][name[pcoord]] is constant[None]] begin[:]
call[name[ret].append, parameter[call[name[tf].zeros, parameter[name[shape], name[dtype]]]]]
return[name[ret]]
return[call[name[self]._collective_with_groups, parameter[name[x], list[[<ast.Name object at 0x7da20c990430>]], name[_collective_receive]]]] | keyword[def] identifier[receive] ( identifier[self] , identifier[x] , identifier[mesh_axis] , identifier[source_pcoord] ):
literal[string]
identifier[x] = identifier[x] . identifier[to_laid_out_tensor] ()
identifier[shape] = identifier[x] . identifier[tensor_list] [ literal[int] ]. identifier[shape]
identifier[dtype] = identifier[x] . identifier[tensor_list] [ literal[int] ]. identifier[dtype]
keyword[def] identifier[_collective_receive] ( identifier[tensor_list] , identifier[device_list] ):
identifier[ret] =[]
keyword[for] identifier[pcoord] , identifier[device] keyword[in] identifier[enumerate] ( identifier[device_list] ):
keyword[with] identifier[tf] . identifier[device] ( identifier[device] ):
keyword[if] identifier[source_pcoord] [ identifier[pcoord] ] keyword[is] keyword[None] :
identifier[ret] . identifier[append] ( identifier[tf] . identifier[zeros] ( identifier[shape] , identifier[dtype] ))
keyword[else] :
identifier[ret] . identifier[append] ( identifier[tf] . identifier[identity] ( identifier[tensor_list] [ identifier[source_pcoord] [ identifier[pcoord] ]]))
keyword[return] identifier[ret]
keyword[return] identifier[self] . identifier[_collective_with_groups] (
identifier[x] ,[ identifier[mesh_axis] ], identifier[_collective_receive] ) | def receive(self, x, mesh_axis, source_pcoord):
"""Collective receive in groups.
Each group contains the processors that differ only in mesh_axis.
```python
group_size = self.shape[mesh_axis].size
```
Args:
x: a LaidOutTensor
mesh_axis: an integer
source_pcoord: a list of optional integers. Each element is either None
or an integer in [0, group_size). If source_pcoord[k] is None, then the
output for the k-th processor in each group is a zero tensor. If
source_pcoord[k] is not None, then the output for the k-th processor in
each group is equal to the input for the source_pcoord[k]-th processor
in that group.
Returns:
a LaidOutTensor
"""
x = x.to_laid_out_tensor()
shape = x.tensor_list[0].shape
dtype = x.tensor_list[0].dtype
def _collective_receive(tensor_list, device_list):
ret = []
for (pcoord, device) in enumerate(device_list):
with tf.device(device):
if source_pcoord[pcoord] is None:
ret.append(tf.zeros(shape, dtype)) # depends on [control=['if'], data=[]]
else:
ret.append(tf.identity(tensor_list[source_pcoord[pcoord]])) # depends on [control=['with'], data=[]] # depends on [control=['for'], data=[]]
return ret
return self._collective_with_groups(x, [mesh_axis], _collective_receive) |
def reset_from_xml_string(self, xml_string):
"""Reloads the environment from an XML description of the environment."""
# if there is an active viewer window, destroy it
self.close()
# load model from xml
self.mjpy_model = load_model_from_xml(xml_string)
self.sim = MjSim(self.mjpy_model)
self.initialize_time(self.control_freq)
if self.has_renderer and self.viewer is None:
self.viewer = MujocoPyRenderer(self.sim)
self.viewer.viewer.vopt.geomgroup[0] = (
1 if self.render_collision_mesh else 0
)
self.viewer.viewer.vopt.geomgroup[1] = 1 if self.render_visual_mesh else 0
# hiding the overlay speeds up rendering significantly
self.viewer.viewer._hide_overlay = True
elif self.has_offscreen_renderer:
render_context = MjRenderContextOffscreen(self.sim)
render_context.vopt.geomgroup[0] = 1 if self.render_collision_mesh else 0
render_context.vopt.geomgroup[1] = 1 if self.render_visual_mesh else 0
self.sim.add_render_context(render_context)
self.sim_state_initial = self.sim.get_state()
self._get_reference()
self.cur_time = 0
self.timestep = 0
self.done = False
# necessary to refresh MjData
self.sim.forward() | def function[reset_from_xml_string, parameter[self, xml_string]]:
constant[Reloads the environment from an XML description of the environment.]
call[name[self].close, parameter[]]
name[self].mjpy_model assign[=] call[name[load_model_from_xml], parameter[name[xml_string]]]
name[self].sim assign[=] call[name[MjSim], parameter[name[self].mjpy_model]]
call[name[self].initialize_time, parameter[name[self].control_freq]]
if <ast.BoolOp object at 0x7da18dc9bb20> begin[:]
name[self].viewer assign[=] call[name[MujocoPyRenderer], parameter[name[self].sim]]
call[name[self].viewer.viewer.vopt.geomgroup][constant[0]] assign[=] <ast.IfExp object at 0x7da18dc99a50>
call[name[self].viewer.viewer.vopt.geomgroup][constant[1]] assign[=] <ast.IfExp object at 0x7da18dc995d0>
name[self].viewer.viewer._hide_overlay assign[=] constant[True]
name[self].sim_state_initial assign[=] call[name[self].sim.get_state, parameter[]]
call[name[self]._get_reference, parameter[]]
name[self].cur_time assign[=] constant[0]
name[self].timestep assign[=] constant[0]
name[self].done assign[=] constant[False]
call[name[self].sim.forward, parameter[]] | keyword[def] identifier[reset_from_xml_string] ( identifier[self] , identifier[xml_string] ):
literal[string]
identifier[self] . identifier[close] ()
identifier[self] . identifier[mjpy_model] = identifier[load_model_from_xml] ( identifier[xml_string] )
identifier[self] . identifier[sim] = identifier[MjSim] ( identifier[self] . identifier[mjpy_model] )
identifier[self] . identifier[initialize_time] ( identifier[self] . identifier[control_freq] )
keyword[if] identifier[self] . identifier[has_renderer] keyword[and] identifier[self] . identifier[viewer] keyword[is] keyword[None] :
identifier[self] . identifier[viewer] = identifier[MujocoPyRenderer] ( identifier[self] . identifier[sim] )
identifier[self] . identifier[viewer] . identifier[viewer] . identifier[vopt] . identifier[geomgroup] [ literal[int] ]=(
literal[int] keyword[if] identifier[self] . identifier[render_collision_mesh] keyword[else] literal[int]
)
identifier[self] . identifier[viewer] . identifier[viewer] . identifier[vopt] . identifier[geomgroup] [ literal[int] ]= literal[int] keyword[if] identifier[self] . identifier[render_visual_mesh] keyword[else] literal[int]
identifier[self] . identifier[viewer] . identifier[viewer] . identifier[_hide_overlay] = keyword[True]
keyword[elif] identifier[self] . identifier[has_offscreen_renderer] :
identifier[render_context] = identifier[MjRenderContextOffscreen] ( identifier[self] . identifier[sim] )
identifier[render_context] . identifier[vopt] . identifier[geomgroup] [ literal[int] ]= literal[int] keyword[if] identifier[self] . identifier[render_collision_mesh] keyword[else] literal[int]
identifier[render_context] . identifier[vopt] . identifier[geomgroup] [ literal[int] ]= literal[int] keyword[if] identifier[self] . identifier[render_visual_mesh] keyword[else] literal[int]
identifier[self] . identifier[sim] . identifier[add_render_context] ( identifier[render_context] )
identifier[self] . identifier[sim_state_initial] = identifier[self] . identifier[sim] . identifier[get_state] ()
identifier[self] . identifier[_get_reference] ()
identifier[self] . identifier[cur_time] = literal[int]
identifier[self] . identifier[timestep] = literal[int]
identifier[self] . identifier[done] = keyword[False]
identifier[self] . identifier[sim] . identifier[forward] () | def reset_from_xml_string(self, xml_string):
"""Reloads the environment from an XML description of the environment."""
# if there is an active viewer window, destroy it
self.close()
# load model from xml
self.mjpy_model = load_model_from_xml(xml_string)
self.sim = MjSim(self.mjpy_model)
self.initialize_time(self.control_freq)
if self.has_renderer and self.viewer is None:
self.viewer = MujocoPyRenderer(self.sim)
self.viewer.viewer.vopt.geomgroup[0] = 1 if self.render_collision_mesh else 0
self.viewer.viewer.vopt.geomgroup[1] = 1 if self.render_visual_mesh else 0
# hiding the overlay speeds up rendering significantly
self.viewer.viewer._hide_overlay = True # depends on [control=['if'], data=[]]
elif self.has_offscreen_renderer:
render_context = MjRenderContextOffscreen(self.sim)
render_context.vopt.geomgroup[0] = 1 if self.render_collision_mesh else 0
render_context.vopt.geomgroup[1] = 1 if self.render_visual_mesh else 0
self.sim.add_render_context(render_context) # depends on [control=['if'], data=[]]
self.sim_state_initial = self.sim.get_state()
self._get_reference()
self.cur_time = 0
self.timestep = 0
self.done = False
# necessary to refresh MjData
self.sim.forward() |
def append(self, event, force = False):
'''
Append an event to queue. The events are classified and appended to sub-queues
:param event: input event
:param force: if True, the event is appended even if the queue is full
:returns: None if appended successfully, or a matcher to match a QueueCanWriteEvent otherwise
'''
if self.tree is None:
if self.parent is None:
raise IndexError('The queue is removed')
else:
return self.parent.parent.append(event, force)
q = self.tree.matchfirst(event)
return q.append(event, force) | def function[append, parameter[self, event, force]]:
constant[
Append an event to queue. The events are classified and appended to sub-queues
:param event: input event
:param force: if True, the event is appended even if the queue is full
:returns: None if appended successfully, or a matcher to match a QueueCanWriteEvent otherwise
]
if compare[name[self].tree is constant[None]] begin[:]
if compare[name[self].parent is constant[None]] begin[:]
<ast.Raise object at 0x7da2041d9300>
variable[q] assign[=] call[name[self].tree.matchfirst, parameter[name[event]]]
return[call[name[q].append, parameter[name[event], name[force]]]] | keyword[def] identifier[append] ( identifier[self] , identifier[event] , identifier[force] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[tree] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[parent] keyword[is] keyword[None] :
keyword[raise] identifier[IndexError] ( literal[string] )
keyword[else] :
keyword[return] identifier[self] . identifier[parent] . identifier[parent] . identifier[append] ( identifier[event] , identifier[force] )
identifier[q] = identifier[self] . identifier[tree] . identifier[matchfirst] ( identifier[event] )
keyword[return] identifier[q] . identifier[append] ( identifier[event] , identifier[force] ) | def append(self, event, force=False):
"""
Append an event to queue. The events are classified and appended to sub-queues
:param event: input event
:param force: if True, the event is appended even if the queue is full
:returns: None if appended successfully, or a matcher to match a QueueCanWriteEvent otherwise
"""
if self.tree is None:
if self.parent is None:
raise IndexError('The queue is removed') # depends on [control=['if'], data=[]]
else:
return self.parent.parent.append(event, force) # depends on [control=['if'], data=[]]
q = self.tree.matchfirst(event)
return q.append(event, force) |
def set(self, key, value):
"""Set a value in the cache."""
value = pickle.dumps(value, protocol=constants.PICKLE_PROTOCOL)
redis_conn.set(key, value) | def function[set, parameter[self, key, value]]:
constant[Set a value in the cache.]
variable[value] assign[=] call[name[pickle].dumps, parameter[name[value]]]
call[name[redis_conn].set, parameter[name[key], name[value]]] | keyword[def] identifier[set] ( identifier[self] , identifier[key] , identifier[value] ):
literal[string]
identifier[value] = identifier[pickle] . identifier[dumps] ( identifier[value] , identifier[protocol] = identifier[constants] . identifier[PICKLE_PROTOCOL] )
identifier[redis_conn] . identifier[set] ( identifier[key] , identifier[value] ) | def set(self, key, value):
"""Set a value in the cache."""
value = pickle.dumps(value, protocol=constants.PICKLE_PROTOCOL)
redis_conn.set(key, value) |
def makefile(self):
"""Generate the documentation Makefile.
Returns:
(str): the contents of the `Makefile`.
"""
return self.env.get_template('Makefile.j2').render(
metadata=self.metadata,
package=self.package) | def function[makefile, parameter[self]]:
constant[Generate the documentation Makefile.
Returns:
(str): the contents of the `Makefile`.
]
return[call[call[name[self].env.get_template, parameter[constant[Makefile.j2]]].render, parameter[]]] | keyword[def] identifier[makefile] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[env] . identifier[get_template] ( literal[string] ). identifier[render] (
identifier[metadata] = identifier[self] . identifier[metadata] ,
identifier[package] = identifier[self] . identifier[package] ) | def makefile(self):
"""Generate the documentation Makefile.
Returns:
(str): the contents of the `Makefile`.
"""
return self.env.get_template('Makefile.j2').render(metadata=self.metadata, package=self.package) |
def encode_eternal_jwt_token(self, user, **custom_claims):
"""
This utility function encodes a jwt token that never expires
.. note:: This should be used sparingly since the token could become
a security concern if it is ever lost. If you use this
method, you should be sure that your application also
implements a blacklist so that a given token can be blocked
should it be lost or become a security concern
"""
return self.encode_jwt_token(
user,
override_access_lifespan=VITAM_AETERNUM,
override_refresh_lifespan=VITAM_AETERNUM,
**custom_claims
) | def function[encode_eternal_jwt_token, parameter[self, user]]:
constant[
This utility function encodes a jwt token that never expires
.. note:: This should be used sparingly since the token could become
a security concern if it is ever lost. If you use this
method, you should be sure that your application also
implements a blacklist so that a given token can be blocked
should it be lost or become a security concern
]
return[call[name[self].encode_jwt_token, parameter[name[user]]]] | keyword[def] identifier[encode_eternal_jwt_token] ( identifier[self] , identifier[user] ,** identifier[custom_claims] ):
literal[string]
keyword[return] identifier[self] . identifier[encode_jwt_token] (
identifier[user] ,
identifier[override_access_lifespan] = identifier[VITAM_AETERNUM] ,
identifier[override_refresh_lifespan] = identifier[VITAM_AETERNUM] ,
** identifier[custom_claims]
) | def encode_eternal_jwt_token(self, user, **custom_claims):
"""
This utility function encodes a jwt token that never expires
.. note:: This should be used sparingly since the token could become
a security concern if it is ever lost. If you use this
method, you should be sure that your application also
implements a blacklist so that a given token can be blocked
should it be lost or become a security concern
"""
return self.encode_jwt_token(user, override_access_lifespan=VITAM_AETERNUM, override_refresh_lifespan=VITAM_AETERNUM, **custom_claims) |
def get_slotname(slot, host=None, admin_username=None, admin_password=None):
'''
Get the name of a slot number in the chassis.
slot
The number of the slot for which to obtain the name.
host
The chassis host.
admin_username
The username used to access the chassis.
admin_password
The password used to access the chassis.
CLI Example:
.. code-block:: bash
salt-call --local dracr.get_slotname 0 host=111.222.333.444
admin_username=root admin_password=secret
'''
slots = list_slotnames(host=host, admin_username=admin_username,
admin_password=admin_password)
# The keys for this dictionary are strings, not integers, so convert the
# argument to a string
slot = six.text_type(slot)
return slots[slot]['slotname'] | def function[get_slotname, parameter[slot, host, admin_username, admin_password]]:
constant[
Get the name of a slot number in the chassis.
slot
The number of the slot for which to obtain the name.
host
The chassis host.
admin_username
The username used to access the chassis.
admin_password
The password used to access the chassis.
CLI Example:
.. code-block:: bash
salt-call --local dracr.get_slotname 0 host=111.222.333.444
admin_username=root admin_password=secret
]
variable[slots] assign[=] call[name[list_slotnames], parameter[]]
variable[slot] assign[=] call[name[six].text_type, parameter[name[slot]]]
return[call[call[name[slots]][name[slot]]][constant[slotname]]] | keyword[def] identifier[get_slotname] ( identifier[slot] , identifier[host] = keyword[None] , identifier[admin_username] = keyword[None] , identifier[admin_password] = keyword[None] ):
literal[string]
identifier[slots] = identifier[list_slotnames] ( identifier[host] = identifier[host] , identifier[admin_username] = identifier[admin_username] ,
identifier[admin_password] = identifier[admin_password] )
identifier[slot] = identifier[six] . identifier[text_type] ( identifier[slot] )
keyword[return] identifier[slots] [ identifier[slot] ][ literal[string] ] | def get_slotname(slot, host=None, admin_username=None, admin_password=None):
"""
Get the name of a slot number in the chassis.
slot
The number of the slot for which to obtain the name.
host
The chassis host.
admin_username
The username used to access the chassis.
admin_password
The password used to access the chassis.
CLI Example:
.. code-block:: bash
salt-call --local dracr.get_slotname 0 host=111.222.333.444
admin_username=root admin_password=secret
"""
slots = list_slotnames(host=host, admin_username=admin_username, admin_password=admin_password)
# The keys for this dictionary are strings, not integers, so convert the
# argument to a string
slot = six.text_type(slot)
return slots[slot]['slotname'] |
def update_port_precommit(self, context):
"""Adds port profile and vlan information to the DB.
Assign a port profile to this port. To do that:
1. Get the vlan_id associated with the bound segment
2. Check if a port profile already exists for this vlan_id
3. If yes, associate that port profile with this port.
4. If no, create a new port profile with this vlan_id and
associate with this port
"""
vnic_type = context.current.get(bc.portbindings.VNIC_TYPE,
bc.portbindings.VNIC_NORMAL)
profile = context.current.get(bc.portbindings.PROFILE, {})
host_id = self._get_host_id(
context.current.get(bc.portbindings.HOST_ID))
if not host_id:
LOG.warning('Host id from port context is None. '
'Ignoring this port')
return
vlan_id = self._get_vlanid(context)
if not vlan_id:
LOG.warning('Vlan_id is None. Ignoring this port')
return
ucsm_ip = self.driver.get_ucsm_ip_for_host(host_id)
if not ucsm_ip:
LOG.info('Host %s is not controlled by any known '
'UCS Manager.', host_id)
return
if not self.driver.check_vnic_type_and_vendor_info(vnic_type,
profile):
# This is a neutron virtio port.
# If VNIC templates are configured, that config would
# take precedence and the VLAN is added to the VNIC template.
physnet = self._get_physnet(context)
if not physnet:
LOG.debug('physnet is None. Not modifying VNIC '
'Template config')
else:
# Check if VNIC template is configured for this physnet
ucsm = CONF.ml2_cisco_ucsm.ucsms[ucsm_ip]
vnic_template = ucsm.vnic_template_list.get(physnet)
if vnic_template:
LOG.debug('vnic_template %s', vnic_template)
self.ucsm_db.add_vnic_template(vlan_id, ucsm_ip,
vnic_template.name, physnet)
return
else:
LOG.debug('VNIC Template not configured for '
'physnet %s', physnet)
# In the absence of VNIC Templates, VLAN is directly added
# to vNIC(s) on the SP Template.
# Check if SP Template config has been provided. If so, find
# the UCSM that controls this host and the Service Profile
# Template for this host.
sp_template_info = (CONF.ml2_cisco_ucsm.ucsms[
ucsm_ip].sp_template_list.get(host_id))
if sp_template_info:
LOG.debug('SP Template: %s, VLAN_id: %d',
sp_template_info.name, vlan_id)
self.ucsm_db.add_service_profile_template(
vlan_id, sp_template_info.name, ucsm_ip)
return
# If this is an Intel SR-IOV vnic, then no need to create port
# profile on the UCS manager. So no need to update the DB.
if not self.driver.is_vmfex_port(profile):
LOG.debug('This is a SR-IOV port and hence not updating DB.')
return
# This is a Cisco VM-FEX port
p_profile_name = self.make_profile_name(vlan_id)
LOG.debug('Port Profile: %s for VLAN_id: %d', p_profile_name, vlan_id)
# Create a new port profile entry in the db
self.ucsm_db.add_port_profile(p_profile_name, vlan_id, ucsm_ip) | def function[update_port_precommit, parameter[self, context]]:
constant[Adds port profile and vlan information to the DB.
Assign a port profile to this port. To do that:
1. Get the vlan_id associated with the bound segment
2. Check if a port profile already exists for this vlan_id
3. If yes, associate that port profile with this port.
4. If no, create a new port profile with this vlan_id and
associate with this port
]
variable[vnic_type] assign[=] call[name[context].current.get, parameter[name[bc].portbindings.VNIC_TYPE, name[bc].portbindings.VNIC_NORMAL]]
variable[profile] assign[=] call[name[context].current.get, parameter[name[bc].portbindings.PROFILE, dictionary[[], []]]]
variable[host_id] assign[=] call[name[self]._get_host_id, parameter[call[name[context].current.get, parameter[name[bc].portbindings.HOST_ID]]]]
if <ast.UnaryOp object at 0x7da1b1be6530> begin[:]
call[name[LOG].warning, parameter[constant[Host id from port context is None. Ignoring this port]]]
return[None]
variable[vlan_id] assign[=] call[name[self]._get_vlanid, parameter[name[context]]]
if <ast.UnaryOp object at 0x7da1b1be5ff0> begin[:]
call[name[LOG].warning, parameter[constant[Vlan_id is None. Ignoring this port]]]
return[None]
variable[ucsm_ip] assign[=] call[name[self].driver.get_ucsm_ip_for_host, parameter[name[host_id]]]
if <ast.UnaryOp object at 0x7da1b1be5840> begin[:]
call[name[LOG].info, parameter[constant[Host %s is not controlled by any known UCS Manager.], name[host_id]]]
return[None]
if <ast.UnaryOp object at 0x7da1b1be5f00> begin[:]
variable[physnet] assign[=] call[name[self]._get_physnet, parameter[name[context]]]
if <ast.UnaryOp object at 0x7da1b1be6260> begin[:]
call[name[LOG].debug, parameter[constant[physnet is None. Not modifying VNIC Template config]]]
variable[sp_template_info] assign[=] call[call[name[CONF].ml2_cisco_ucsm.ucsms][name[ucsm_ip]].sp_template_list.get, parameter[name[host_id]]]
if name[sp_template_info] begin[:]
call[name[LOG].debug, parameter[constant[SP Template: %s, VLAN_id: %d], name[sp_template_info].name, name[vlan_id]]]
call[name[self].ucsm_db.add_service_profile_template, parameter[name[vlan_id], name[sp_template_info].name, name[ucsm_ip]]]
return[None]
if <ast.UnaryOp object at 0x7da1b1b7c7f0> begin[:]
call[name[LOG].debug, parameter[constant[This is a SR-IOV port and hence not updating DB.]]]
return[None]
variable[p_profile_name] assign[=] call[name[self].make_profile_name, parameter[name[vlan_id]]]
call[name[LOG].debug, parameter[constant[Port Profile: %s for VLAN_id: %d], name[p_profile_name], name[vlan_id]]]
call[name[self].ucsm_db.add_port_profile, parameter[name[p_profile_name], name[vlan_id], name[ucsm_ip]]] | keyword[def] identifier[update_port_precommit] ( identifier[self] , identifier[context] ):
literal[string]
identifier[vnic_type] = identifier[context] . identifier[current] . identifier[get] ( identifier[bc] . identifier[portbindings] . identifier[VNIC_TYPE] ,
identifier[bc] . identifier[portbindings] . identifier[VNIC_NORMAL] )
identifier[profile] = identifier[context] . identifier[current] . identifier[get] ( identifier[bc] . identifier[portbindings] . identifier[PROFILE] ,{})
identifier[host_id] = identifier[self] . identifier[_get_host_id] (
identifier[context] . identifier[current] . identifier[get] ( identifier[bc] . identifier[portbindings] . identifier[HOST_ID] ))
keyword[if] keyword[not] identifier[host_id] :
identifier[LOG] . identifier[warning] ( literal[string]
literal[string] )
keyword[return]
identifier[vlan_id] = identifier[self] . identifier[_get_vlanid] ( identifier[context] )
keyword[if] keyword[not] identifier[vlan_id] :
identifier[LOG] . identifier[warning] ( literal[string] )
keyword[return]
identifier[ucsm_ip] = identifier[self] . identifier[driver] . identifier[get_ucsm_ip_for_host] ( identifier[host_id] )
keyword[if] keyword[not] identifier[ucsm_ip] :
identifier[LOG] . identifier[info] ( literal[string]
literal[string] , identifier[host_id] )
keyword[return]
keyword[if] keyword[not] identifier[self] . identifier[driver] . identifier[check_vnic_type_and_vendor_info] ( identifier[vnic_type] ,
identifier[profile] ):
identifier[physnet] = identifier[self] . identifier[_get_physnet] ( identifier[context] )
keyword[if] keyword[not] identifier[physnet] :
identifier[LOG] . identifier[debug] ( literal[string]
literal[string] )
keyword[else] :
identifier[ucsm] = identifier[CONF] . identifier[ml2_cisco_ucsm] . identifier[ucsms] [ identifier[ucsm_ip] ]
identifier[vnic_template] = identifier[ucsm] . identifier[vnic_template_list] . identifier[get] ( identifier[physnet] )
keyword[if] identifier[vnic_template] :
identifier[LOG] . identifier[debug] ( literal[string] , identifier[vnic_template] )
identifier[self] . identifier[ucsm_db] . identifier[add_vnic_template] ( identifier[vlan_id] , identifier[ucsm_ip] ,
identifier[vnic_template] . identifier[name] , identifier[physnet] )
keyword[return]
keyword[else] :
identifier[LOG] . identifier[debug] ( literal[string]
literal[string] , identifier[physnet] )
identifier[sp_template_info] =( identifier[CONF] . identifier[ml2_cisco_ucsm] . identifier[ucsms] [
identifier[ucsm_ip] ]. identifier[sp_template_list] . identifier[get] ( identifier[host_id] ))
keyword[if] identifier[sp_template_info] :
identifier[LOG] . identifier[debug] ( literal[string] ,
identifier[sp_template_info] . identifier[name] , identifier[vlan_id] )
identifier[self] . identifier[ucsm_db] . identifier[add_service_profile_template] (
identifier[vlan_id] , identifier[sp_template_info] . identifier[name] , identifier[ucsm_ip] )
keyword[return]
keyword[if] keyword[not] identifier[self] . identifier[driver] . identifier[is_vmfex_port] ( identifier[profile] ):
identifier[LOG] . identifier[debug] ( literal[string] )
keyword[return]
identifier[p_profile_name] = identifier[self] . identifier[make_profile_name] ( identifier[vlan_id] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[p_profile_name] , identifier[vlan_id] )
identifier[self] . identifier[ucsm_db] . identifier[add_port_profile] ( identifier[p_profile_name] , identifier[vlan_id] , identifier[ucsm_ip] ) | def update_port_precommit(self, context):
"""Adds port profile and vlan information to the DB.
Assign a port profile to this port. To do that:
1. Get the vlan_id associated with the bound segment
2. Check if a port profile already exists for this vlan_id
3. If yes, associate that port profile with this port.
4. If no, create a new port profile with this vlan_id and
associate with this port
"""
vnic_type = context.current.get(bc.portbindings.VNIC_TYPE, bc.portbindings.VNIC_NORMAL)
profile = context.current.get(bc.portbindings.PROFILE, {})
host_id = self._get_host_id(context.current.get(bc.portbindings.HOST_ID))
if not host_id:
LOG.warning('Host id from port context is None. Ignoring this port')
return # depends on [control=['if'], data=[]]
vlan_id = self._get_vlanid(context)
if not vlan_id:
LOG.warning('Vlan_id is None. Ignoring this port')
return # depends on [control=['if'], data=[]]
ucsm_ip = self.driver.get_ucsm_ip_for_host(host_id)
if not ucsm_ip:
LOG.info('Host %s is not controlled by any known UCS Manager.', host_id)
return # depends on [control=['if'], data=[]]
if not self.driver.check_vnic_type_and_vendor_info(vnic_type, profile):
# This is a neutron virtio port.
# If VNIC templates are configured, that config would
# take precedence and the VLAN is added to the VNIC template.
physnet = self._get_physnet(context)
if not physnet:
LOG.debug('physnet is None. Not modifying VNIC Template config') # depends on [control=['if'], data=[]]
else:
# Check if VNIC template is configured for this physnet
ucsm = CONF.ml2_cisco_ucsm.ucsms[ucsm_ip]
vnic_template = ucsm.vnic_template_list.get(physnet)
if vnic_template:
LOG.debug('vnic_template %s', vnic_template)
self.ucsm_db.add_vnic_template(vlan_id, ucsm_ip, vnic_template.name, physnet)
return # depends on [control=['if'], data=[]]
else:
LOG.debug('VNIC Template not configured for physnet %s', physnet)
# In the absence of VNIC Templates, VLAN is directly added
# to vNIC(s) on the SP Template.
# Check if SP Template config has been provided. If so, find
# the UCSM that controls this host and the Service Profile
# Template for this host.
sp_template_info = CONF.ml2_cisco_ucsm.ucsms[ucsm_ip].sp_template_list.get(host_id)
if sp_template_info:
LOG.debug('SP Template: %s, VLAN_id: %d', sp_template_info.name, vlan_id)
self.ucsm_db.add_service_profile_template(vlan_id, sp_template_info.name, ucsm_ip)
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# If this is an Intel SR-IOV vnic, then no need to create port
# profile on the UCS manager. So no need to update the DB.
if not self.driver.is_vmfex_port(profile):
LOG.debug('This is a SR-IOV port and hence not updating DB.')
return # depends on [control=['if'], data=[]]
# This is a Cisco VM-FEX port
p_profile_name = self.make_profile_name(vlan_id)
LOG.debug('Port Profile: %s for VLAN_id: %d', p_profile_name, vlan_id)
# Create a new port profile entry in the db
self.ucsm_db.add_port_profile(p_profile_name, vlan_id, ucsm_ip) |
async def AddCharm(self, channel, url):
'''
channel : str
url : str
Returns -> None
'''
# map input types to rpc msg
_params = dict()
msg = dict(type='Client',
request='AddCharm',
version=1,
params=_params)
_params['channel'] = channel
_params['url'] = url
reply = await self.rpc(msg)
return reply | <ast.AsyncFunctionDef object at 0x7da2041d83d0> | keyword[async] keyword[def] identifier[AddCharm] ( identifier[self] , identifier[channel] , identifier[url] ):
literal[string]
identifier[_params] = identifier[dict] ()
identifier[msg] = identifier[dict] ( identifier[type] = literal[string] ,
identifier[request] = literal[string] ,
identifier[version] = literal[int] ,
identifier[params] = identifier[_params] )
identifier[_params] [ literal[string] ]= identifier[channel]
identifier[_params] [ literal[string] ]= identifier[url]
identifier[reply] = keyword[await] identifier[self] . identifier[rpc] ( identifier[msg] )
keyword[return] identifier[reply] | async def AddCharm(self, channel, url):
"""
channel : str
url : str
Returns -> None
"""
# map input types to rpc msg
_params = dict()
msg = dict(type='Client', request='AddCharm', version=1, params=_params)
_params['channel'] = channel
_params['url'] = url
reply = await self.rpc(msg)
return reply |
def _split_str(s, n):
"""
split string into list of strings by specified number.
"""
length = len(s)
return [s[i:i + n] for i in range(0, length, n)] | def function[_split_str, parameter[s, n]]:
constant[
split string into list of strings by specified number.
]
variable[length] assign[=] call[name[len], parameter[name[s]]]
return[<ast.ListComp object at 0x7da1b1b79210>] | keyword[def] identifier[_split_str] ( identifier[s] , identifier[n] ):
literal[string]
identifier[length] = identifier[len] ( identifier[s] )
keyword[return] [ identifier[s] [ identifier[i] : identifier[i] + identifier[n] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[length] , identifier[n] )] | def _split_str(s, n):
"""
split string into list of strings by specified number.
"""
length = len(s)
return [s[i:i + n] for i in range(0, length, n)] |
def output_scores(self, name=None):
""" Returns: N x #class scores, summed to one for each box."""
return tf.nn.softmax(self.label_logits, name=name) | def function[output_scores, parameter[self, name]]:
constant[ Returns: N x #class scores, summed to one for each box.]
return[call[name[tf].nn.softmax, parameter[name[self].label_logits]]] | keyword[def] identifier[output_scores] ( identifier[self] , identifier[name] = keyword[None] ):
literal[string]
keyword[return] identifier[tf] . identifier[nn] . identifier[softmax] ( identifier[self] . identifier[label_logits] , identifier[name] = identifier[name] ) | def output_scores(self, name=None):
""" Returns: N x #class scores, summed to one for each box."""
return tf.nn.softmax(self.label_logits, name=name) |
def inline(width=900):
"""display the map inline in ipython
:param width: image width for the browser
"""
from IPython.display import Image, HTML, display, clear_output
import random
import string
import urllib
import os
while True:
fname = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(32))
if not os.path.isfile(fname + '.png'):
break
savefig(fname)
if os.path.isfile(fname + '.png'):
with open(fname + '.png', 'rb') as fin:
encoded = base64.b64encode(fin.read())
b64 = urllib.parse.quote(encoded)
image_html = "<img style='width: %dpx; margin: 0px; float: left; border: 1px solid black;' src='data:image/png;base64,%s' />" % (width, b64)
display(HTML(image_html))
os.remove(fname + '.png') | def function[inline, parameter[width]]:
constant[display the map inline in ipython
:param width: image width for the browser
]
from relative_module[IPython.display] import module[Image], module[HTML], module[display], module[clear_output]
import module[random]
import module[string]
import module[urllib]
import module[os]
while constant[True] begin[:]
variable[fname] assign[=] call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da1b16fadd0>]]
if <ast.UnaryOp object at 0x7da1b16f9c90> begin[:]
break
call[name[savefig], parameter[name[fname]]]
if call[name[os].path.isfile, parameter[binary_operation[name[fname] + constant[.png]]]] begin[:]
with call[name[open], parameter[binary_operation[name[fname] + constant[.png]], constant[rb]]] begin[:]
variable[encoded] assign[=] call[name[base64].b64encode, parameter[call[name[fin].read, parameter[]]]]
variable[b64] assign[=] call[name[urllib].parse.quote, parameter[name[encoded]]]
variable[image_html] assign[=] binary_operation[constant[<img style='width: %dpx; margin: 0px; float: left; border: 1px solid black;' src='data:image/png;base64,%s' />] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b16f98a0>, <ast.Name object at 0x7da1b16fbb80>]]]
call[name[display], parameter[call[name[HTML], parameter[name[image_html]]]]]
call[name[os].remove, parameter[binary_operation[name[fname] + constant[.png]]]] | keyword[def] identifier[inline] ( identifier[width] = literal[int] ):
literal[string]
keyword[from] identifier[IPython] . identifier[display] keyword[import] identifier[Image] , identifier[HTML] , identifier[display] , identifier[clear_output]
keyword[import] identifier[random]
keyword[import] identifier[string]
keyword[import] identifier[urllib]
keyword[import] identifier[os]
keyword[while] keyword[True] :
identifier[fname] = literal[string] . identifier[join] ( identifier[random] . identifier[choice] ( identifier[string] . identifier[ascii_uppercase] + identifier[string] . identifier[digits] ) keyword[for] identifier[_] keyword[in] identifier[range] ( literal[int] ))
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[fname] + literal[string] ):
keyword[break]
identifier[savefig] ( identifier[fname] )
keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[fname] + literal[string] ):
keyword[with] identifier[open] ( identifier[fname] + literal[string] , literal[string] ) keyword[as] identifier[fin] :
identifier[encoded] = identifier[base64] . identifier[b64encode] ( identifier[fin] . identifier[read] ())
identifier[b64] = identifier[urllib] . identifier[parse] . identifier[quote] ( identifier[encoded] )
identifier[image_html] = literal[string] %( identifier[width] , identifier[b64] )
identifier[display] ( identifier[HTML] ( identifier[image_html] ))
identifier[os] . identifier[remove] ( identifier[fname] + literal[string] ) | def inline(width=900):
"""display the map inline in ipython
:param width: image width for the browser
"""
from IPython.display import Image, HTML, display, clear_output
import random
import string
import urllib
import os
while True:
fname = ''.join((random.choice(string.ascii_uppercase + string.digits) for _ in range(32)))
if not os.path.isfile(fname + '.png'):
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
savefig(fname)
if os.path.isfile(fname + '.png'):
with open(fname + '.png', 'rb') as fin:
encoded = base64.b64encode(fin.read())
b64 = urllib.parse.quote(encoded) # depends on [control=['with'], data=['fin']]
image_html = "<img style='width: %dpx; margin: 0px; float: left; border: 1px solid black;' src='data:image/png;base64,%s' />" % (width, b64)
display(HTML(image_html))
os.remove(fname + '.png') # depends on [control=['if'], data=[]] |
def get_siblings_treepos(self, treepos):
"""Given a treeposition, return the treepositions of its siblings."""
parent_pos = self.get_parent_treepos(treepos)
siblings_treepos = []
if parent_pos is not None:
for child_treepos in self.get_children_treepos(parent_pos):
if child_treepos != treepos:
siblings_treepos.append(child_treepos)
return siblings_treepos | def function[get_siblings_treepos, parameter[self, treepos]]:
constant[Given a treeposition, return the treepositions of its siblings.]
variable[parent_pos] assign[=] call[name[self].get_parent_treepos, parameter[name[treepos]]]
variable[siblings_treepos] assign[=] list[[]]
if compare[name[parent_pos] is_not constant[None]] begin[:]
for taget[name[child_treepos]] in starred[call[name[self].get_children_treepos, parameter[name[parent_pos]]]] begin[:]
if compare[name[child_treepos] not_equal[!=] name[treepos]] begin[:]
call[name[siblings_treepos].append, parameter[name[child_treepos]]]
return[name[siblings_treepos]] | keyword[def] identifier[get_siblings_treepos] ( identifier[self] , identifier[treepos] ):
literal[string]
identifier[parent_pos] = identifier[self] . identifier[get_parent_treepos] ( identifier[treepos] )
identifier[siblings_treepos] =[]
keyword[if] identifier[parent_pos] keyword[is] keyword[not] keyword[None] :
keyword[for] identifier[child_treepos] keyword[in] identifier[self] . identifier[get_children_treepos] ( identifier[parent_pos] ):
keyword[if] identifier[child_treepos] != identifier[treepos] :
identifier[siblings_treepos] . identifier[append] ( identifier[child_treepos] )
keyword[return] identifier[siblings_treepos] | def get_siblings_treepos(self, treepos):
"""Given a treeposition, return the treepositions of its siblings."""
parent_pos = self.get_parent_treepos(treepos)
siblings_treepos = []
if parent_pos is not None:
for child_treepos in self.get_children_treepos(parent_pos):
if child_treepos != treepos:
siblings_treepos.append(child_treepos) # depends on [control=['if'], data=['child_treepos']] # depends on [control=['for'], data=['child_treepos']] # depends on [control=['if'], data=['parent_pos']]
return siblings_treepos |
def _get_min_mag_and_num_bins(self):
"""
Estimate the number of bins in the histogram and return it along with
the first bin center value.
Rounds ``min_mag`` and ``max_mag`` with respect to ``bin_width`` to
make the distance between them include integer number of bins.
:returns:
A tuple of 2 items: first bin center, and total number of bins.
"""
min_mag = round(self.min_mag / self.bin_width) * self.bin_width
max_mag = (round((self.char_mag + DELTA_CHAR / 2) /
self.bin_width) * self.bin_width)
min_mag += self.bin_width / 2.0
max_mag -= self.bin_width / 2.0
# here we use math round on the result of division and not just
# cast it to integer because for some magnitude values that can't
# be represented as an IEEE 754 double precisely the result can
# look like 7.999999999999 which would become 7 instead of 8
# being naively casted to int so we would lose the last bin.
num_bins = int(round((max_mag - min_mag) / self.bin_width)) + 1
return min_mag, num_bins | def function[_get_min_mag_and_num_bins, parameter[self]]:
constant[
Estimate the number of bins in the histogram and return it along with
the first bin center value.
Rounds ``min_mag`` and ``max_mag`` with respect to ``bin_width`` to
make the distance between them include integer number of bins.
:returns:
A tuple of 2 items: first bin center, and total number of bins.
]
variable[min_mag] assign[=] binary_operation[call[name[round], parameter[binary_operation[name[self].min_mag / name[self].bin_width]]] * name[self].bin_width]
variable[max_mag] assign[=] binary_operation[call[name[round], parameter[binary_operation[binary_operation[name[self].char_mag + binary_operation[name[DELTA_CHAR] / constant[2]]] / name[self].bin_width]]] * name[self].bin_width]
<ast.AugAssign object at 0x7da18bcca4d0>
<ast.AugAssign object at 0x7da18bcca8f0>
variable[num_bins] assign[=] binary_operation[call[name[int], parameter[call[name[round], parameter[binary_operation[binary_operation[name[max_mag] - name[min_mag]] / name[self].bin_width]]]]] + constant[1]]
return[tuple[[<ast.Name object at 0x7da18bcca0e0>, <ast.Name object at 0x7da18bccab90>]]] | keyword[def] identifier[_get_min_mag_and_num_bins] ( identifier[self] ):
literal[string]
identifier[min_mag] = identifier[round] ( identifier[self] . identifier[min_mag] / identifier[self] . identifier[bin_width] )* identifier[self] . identifier[bin_width]
identifier[max_mag] =( identifier[round] (( identifier[self] . identifier[char_mag] + identifier[DELTA_CHAR] / literal[int] )/
identifier[self] . identifier[bin_width] )* identifier[self] . identifier[bin_width] )
identifier[min_mag] += identifier[self] . identifier[bin_width] / literal[int]
identifier[max_mag] -= identifier[self] . identifier[bin_width] / literal[int]
identifier[num_bins] = identifier[int] ( identifier[round] (( identifier[max_mag] - identifier[min_mag] )/ identifier[self] . identifier[bin_width] ))+ literal[int]
keyword[return] identifier[min_mag] , identifier[num_bins] | def _get_min_mag_and_num_bins(self):
"""
Estimate the number of bins in the histogram and return it along with
the first bin center value.
Rounds ``min_mag`` and ``max_mag`` with respect to ``bin_width`` to
make the distance between them include integer number of bins.
:returns:
A tuple of 2 items: first bin center, and total number of bins.
"""
min_mag = round(self.min_mag / self.bin_width) * self.bin_width
max_mag = round((self.char_mag + DELTA_CHAR / 2) / self.bin_width) * self.bin_width
min_mag += self.bin_width / 2.0
max_mag -= self.bin_width / 2.0
# here we use math round on the result of division and not just
# cast it to integer because for some magnitude values that can't
# be represented as an IEEE 754 double precisely the result can
# look like 7.999999999999 which would become 7 instead of 8
# being naively casted to int so we would lose the last bin.
num_bins = int(round((max_mag - min_mag) / self.bin_width)) + 1
return (min_mag, num_bins) |
def parse_request(self):
"""Override parse_request method to enrich basic functionality of `BaseHTTPRequestHandler` class
Original class can only invoke do_GET, do_POST, do_PUT, etc method implementations if they are defined.
But we would like to have at least some simple routing mechanism, i.e.:
GET /uri1/part2 request should invoke `do_GET_uri1()`
POST /other should invoke `do_POST_other()`
If the `do_<REQUEST_METHOD>_<first_part_url>` method does not exists we'll fallback to original behavior."""
ret = BaseHTTPRequestHandler.parse_request(self)
if ret:
mname = self.path.lstrip('/').split('/')[0]
mname = self.command + ('_' + mname if mname else '')
if hasattr(self, 'do_' + mname):
self.command = mname
return ret | def function[parse_request, parameter[self]]:
constant[Override parse_request method to enrich basic functionality of `BaseHTTPRequestHandler` class
Original class can only invoke do_GET, do_POST, do_PUT, etc method implementations if they are defined.
But we would like to have at least some simple routing mechanism, i.e.:
GET /uri1/part2 request should invoke `do_GET_uri1()`
POST /other should invoke `do_POST_other()`
If the `do_<REQUEST_METHOD>_<first_part_url>` method does not exists we'll fallback to original behavior.]
variable[ret] assign[=] call[name[BaseHTTPRequestHandler].parse_request, parameter[name[self]]]
if name[ret] begin[:]
variable[mname] assign[=] call[call[call[name[self].path.lstrip, parameter[constant[/]]].split, parameter[constant[/]]]][constant[0]]
variable[mname] assign[=] binary_operation[name[self].command + <ast.IfExp object at 0x7da1b2184070>]
if call[name[hasattr], parameter[name[self], binary_operation[constant[do_] + name[mname]]]] begin[:]
name[self].command assign[=] name[mname]
return[name[ret]] | keyword[def] identifier[parse_request] ( identifier[self] ):
literal[string]
identifier[ret] = identifier[BaseHTTPRequestHandler] . identifier[parse_request] ( identifier[self] )
keyword[if] identifier[ret] :
identifier[mname] = identifier[self] . identifier[path] . identifier[lstrip] ( literal[string] ). identifier[split] ( literal[string] )[ literal[int] ]
identifier[mname] = identifier[self] . identifier[command] +( literal[string] + identifier[mname] keyword[if] identifier[mname] keyword[else] literal[string] )
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] + identifier[mname] ):
identifier[self] . identifier[command] = identifier[mname]
keyword[return] identifier[ret] | def parse_request(self):
"""Override parse_request method to enrich basic functionality of `BaseHTTPRequestHandler` class
Original class can only invoke do_GET, do_POST, do_PUT, etc method implementations if they are defined.
But we would like to have at least some simple routing mechanism, i.e.:
GET /uri1/part2 request should invoke `do_GET_uri1()`
POST /other should invoke `do_POST_other()`
If the `do_<REQUEST_METHOD>_<first_part_url>` method does not exists we'll fallback to original behavior."""
ret = BaseHTTPRequestHandler.parse_request(self)
if ret:
mname = self.path.lstrip('/').split('/')[0]
mname = self.command + ('_' + mname if mname else '')
if hasattr(self, 'do_' + mname):
self.command = mname # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return ret |
def delete_table_rate_rule_by_id(cls, table_rate_rule_id, **kwargs):
"""Delete TableRateRule
Delete an instance of TableRateRule by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_table_rate_rule_by_id(table_rate_rule_id, async=True)
>>> result = thread.get()
:param async bool
:param str table_rate_rule_id: ID of tableRateRule to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._delete_table_rate_rule_by_id_with_http_info(table_rate_rule_id, **kwargs)
else:
(data) = cls._delete_table_rate_rule_by_id_with_http_info(table_rate_rule_id, **kwargs)
return data | def function[delete_table_rate_rule_by_id, parameter[cls, table_rate_rule_id]]:
constant[Delete TableRateRule
Delete an instance of TableRateRule by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_table_rate_rule_by_id(table_rate_rule_id, async=True)
>>> result = thread.get()
:param async bool
:param str table_rate_rule_id: ID of tableRateRule to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async]]] begin[:]
return[call[name[cls]._delete_table_rate_rule_by_id_with_http_info, parameter[name[table_rate_rule_id]]]] | keyword[def] identifier[delete_table_rate_rule_by_id] ( identifier[cls] , identifier[table_rate_rule_id] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[cls] . identifier[_delete_table_rate_rule_by_id_with_http_info] ( identifier[table_rate_rule_id] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[cls] . identifier[_delete_table_rate_rule_by_id_with_http_info] ( identifier[table_rate_rule_id] ,** identifier[kwargs] )
keyword[return] identifier[data] | def delete_table_rate_rule_by_id(cls, table_rate_rule_id, **kwargs):
"""Delete TableRateRule
Delete an instance of TableRateRule by its ID.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.delete_table_rate_rule_by_id(table_rate_rule_id, async=True)
>>> result = thread.get()
:param async bool
:param str table_rate_rule_id: ID of tableRateRule to delete. (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._delete_table_rate_rule_by_id_with_http_info(table_rate_rule_id, **kwargs) # depends on [control=['if'], data=[]]
else:
data = cls._delete_table_rate_rule_by_id_with_http_info(table_rate_rule_id, **kwargs)
return data |
def _get_portfolio_info(self, portfolio_code):
"""
获取组合信息
"""
url = self.PORTFOLIO_URL + portfolio_code
portfolio_page = self.s.get(url)
match_info = re.search(r'(?<=SNB.cubeInfo = ).*(?=;\n)',
portfolio_page.text)
if match_info is None:
raise Exception(
'cant get portfolio info, portfolio url : {}'.format(url))
try:
portfolio_info = json.loads(match_info.group())
except Exception as e:
raise Exception('get portfolio info error: {}'.format(e))
return portfolio_info | def function[_get_portfolio_info, parameter[self, portfolio_code]]:
constant[
获取组合信息
]
variable[url] assign[=] binary_operation[name[self].PORTFOLIO_URL + name[portfolio_code]]
variable[portfolio_page] assign[=] call[name[self].s.get, parameter[name[url]]]
variable[match_info] assign[=] call[name[re].search, parameter[constant[(?<=SNB.cubeInfo = ).*(?=;\n)], name[portfolio_page].text]]
if compare[name[match_info] is constant[None]] begin[:]
<ast.Raise object at 0x7da18dc04df0>
<ast.Try object at 0x7da18dc05cf0>
return[name[portfolio_info]] | keyword[def] identifier[_get_portfolio_info] ( identifier[self] , identifier[portfolio_code] ):
literal[string]
identifier[url] = identifier[self] . identifier[PORTFOLIO_URL] + identifier[portfolio_code]
identifier[portfolio_page] = identifier[self] . identifier[s] . identifier[get] ( identifier[url] )
identifier[match_info] = identifier[re] . identifier[search] ( literal[string] ,
identifier[portfolio_page] . identifier[text] )
keyword[if] identifier[match_info] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] (
literal[string] . identifier[format] ( identifier[url] ))
keyword[try] :
identifier[portfolio_info] = identifier[json] . identifier[loads] ( identifier[match_info] . identifier[group] ())
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[e] ))
keyword[return] identifier[portfolio_info] | def _get_portfolio_info(self, portfolio_code):
"""
获取组合信息
"""
url = self.PORTFOLIO_URL + portfolio_code
portfolio_page = self.s.get(url)
match_info = re.search('(?<=SNB.cubeInfo = ).*(?=;\\n)', portfolio_page.text)
if match_info is None:
raise Exception('cant get portfolio info, portfolio url : {}'.format(url)) # depends on [control=['if'], data=[]]
try:
portfolio_info = json.loads(match_info.group()) # depends on [control=['try'], data=[]]
except Exception as e:
raise Exception('get portfolio info error: {}'.format(e)) # depends on [control=['except'], data=['e']]
return portfolio_info |
def sendMessage(self, msg_dict):
"""Serialize and send a dict LLRP Message
Note: IDs should be modified in original msg_dict as it is a reference.
That should be ok.
"""
sent_ids = []
for name in msg_dict:
self.last_msg_id += 1
msg_dict[name]['ID'] = self.last_msg_id
sent_ids.append((name, self.last_msg_id))
llrp_msg = LLRPMessage(msgdict=msg_dict)
assert llrp_msg.msgbytes, "LLRPMessage is empty"
self.transport.write(llrp_msg.msgbytes)
return sent_ids | def function[sendMessage, parameter[self, msg_dict]]:
constant[Serialize and send a dict LLRP Message
Note: IDs should be modified in original msg_dict as it is a reference.
That should be ok.
]
variable[sent_ids] assign[=] list[[]]
for taget[name[name]] in starred[name[msg_dict]] begin[:]
<ast.AugAssign object at 0x7da2049607c0>
call[call[name[msg_dict]][name[name]]][constant[ID]] assign[=] name[self].last_msg_id
call[name[sent_ids].append, parameter[tuple[[<ast.Name object at 0x7da18f58fd00>, <ast.Attribute object at 0x7da18f58f700>]]]]
variable[llrp_msg] assign[=] call[name[LLRPMessage], parameter[]]
assert[name[llrp_msg].msgbytes]
call[name[self].transport.write, parameter[name[llrp_msg].msgbytes]]
return[name[sent_ids]] | keyword[def] identifier[sendMessage] ( identifier[self] , identifier[msg_dict] ):
literal[string]
identifier[sent_ids] =[]
keyword[for] identifier[name] keyword[in] identifier[msg_dict] :
identifier[self] . identifier[last_msg_id] += literal[int]
identifier[msg_dict] [ identifier[name] ][ literal[string] ]= identifier[self] . identifier[last_msg_id]
identifier[sent_ids] . identifier[append] (( identifier[name] , identifier[self] . identifier[last_msg_id] ))
identifier[llrp_msg] = identifier[LLRPMessage] ( identifier[msgdict] = identifier[msg_dict] )
keyword[assert] identifier[llrp_msg] . identifier[msgbytes] , literal[string]
identifier[self] . identifier[transport] . identifier[write] ( identifier[llrp_msg] . identifier[msgbytes] )
keyword[return] identifier[sent_ids] | def sendMessage(self, msg_dict):
"""Serialize and send a dict LLRP Message
Note: IDs should be modified in original msg_dict as it is a reference.
That should be ok.
"""
sent_ids = []
for name in msg_dict:
self.last_msg_id += 1
msg_dict[name]['ID'] = self.last_msg_id
sent_ids.append((name, self.last_msg_id)) # depends on [control=['for'], data=['name']]
llrp_msg = LLRPMessage(msgdict=msg_dict)
assert llrp_msg.msgbytes, 'LLRPMessage is empty'
self.transport.write(llrp_msg.msgbytes)
return sent_ids |
def calculate_progress(self, scan_id):
""" Calculate the total scan progress from the
partial target progress. """
t_prog = dict()
for target in self.get_scan_target(scan_id):
t_prog[target] = self.get_scan_target_progress(scan_id, target)
return sum(t_prog.values())/len(t_prog) | def function[calculate_progress, parameter[self, scan_id]]:
constant[ Calculate the total scan progress from the
partial target progress. ]
variable[t_prog] assign[=] call[name[dict], parameter[]]
for taget[name[target]] in starred[call[name[self].get_scan_target, parameter[name[scan_id]]]] begin[:]
call[name[t_prog]][name[target]] assign[=] call[name[self].get_scan_target_progress, parameter[name[scan_id], name[target]]]
return[binary_operation[call[name[sum], parameter[call[name[t_prog].values, parameter[]]]] / call[name[len], parameter[name[t_prog]]]]] | keyword[def] identifier[calculate_progress] ( identifier[self] , identifier[scan_id] ):
literal[string]
identifier[t_prog] = identifier[dict] ()
keyword[for] identifier[target] keyword[in] identifier[self] . identifier[get_scan_target] ( identifier[scan_id] ):
identifier[t_prog] [ identifier[target] ]= identifier[self] . identifier[get_scan_target_progress] ( identifier[scan_id] , identifier[target] )
keyword[return] identifier[sum] ( identifier[t_prog] . identifier[values] ())/ identifier[len] ( identifier[t_prog] ) | def calculate_progress(self, scan_id):
""" Calculate the total scan progress from the
partial target progress. """
t_prog = dict()
for target in self.get_scan_target(scan_id):
t_prog[target] = self.get_scan_target_progress(scan_id, target) # depends on [control=['for'], data=['target']]
return sum(t_prog.values()) / len(t_prog) |
async def send_mail(self, mail):
"""Pipe the given mail to the configured sendmail command. Display a
short message on success or a notification on error.
:param mail: the mail to send out
:type mail: :class:`email.message.Message` or string
:raises: class:`SendingMailFailed` if sending failes
"""
cmdlist = split_commandstring(self.cmd)
try:
# make sure self.mail is a string
out, err, code = await call_cmd_async(cmdlist, stdin=str(mail))
if code != 0:
msg = 'The sendmail command {} returned with code {}{}'.format(
self.cmd, code, ':\n' + err.strip() if err else '.')
raise Exception(msg)
except Exception as e:
logging.error(str(e))
raise SendingMailFailed(str(e))
logging.info('sent mail successfully')
logging.info(out) | <ast.AsyncFunctionDef object at 0x7da1b07960e0> | keyword[async] keyword[def] identifier[send_mail] ( identifier[self] , identifier[mail] ):
literal[string]
identifier[cmdlist] = identifier[split_commandstring] ( identifier[self] . identifier[cmd] )
keyword[try] :
identifier[out] , identifier[err] , identifier[code] = keyword[await] identifier[call_cmd_async] ( identifier[cmdlist] , identifier[stdin] = identifier[str] ( identifier[mail] ))
keyword[if] identifier[code] != literal[int] :
identifier[msg] = literal[string] . identifier[format] (
identifier[self] . identifier[cmd] , identifier[code] , literal[string] + identifier[err] . identifier[strip] () keyword[if] identifier[err] keyword[else] literal[string] )
keyword[raise] identifier[Exception] ( identifier[msg] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logging] . identifier[error] ( identifier[str] ( identifier[e] ))
keyword[raise] identifier[SendingMailFailed] ( identifier[str] ( identifier[e] ))
identifier[logging] . identifier[info] ( literal[string] )
identifier[logging] . identifier[info] ( identifier[out] ) | async def send_mail(self, mail):
"""Pipe the given mail to the configured sendmail command. Display a
short message on success or a notification on error.
:param mail: the mail to send out
:type mail: :class:`email.message.Message` or string
:raises: class:`SendingMailFailed` if sending failes
"""
cmdlist = split_commandstring(self.cmd)
try:
# make sure self.mail is a string
(out, err, code) = await call_cmd_async(cmdlist, stdin=str(mail))
if code != 0:
msg = 'The sendmail command {} returned with code {}{}'.format(self.cmd, code, ':\n' + err.strip() if err else '.')
raise Exception(msg) # depends on [control=['if'], data=['code']] # depends on [control=['try'], data=[]]
except Exception as e:
logging.error(str(e))
raise SendingMailFailed(str(e)) # depends on [control=['except'], data=['e']]
logging.info('sent mail successfully')
logging.info(out) |
def spanning_1d_chain(length):
"""
Generate a linear chain with auxiliary nodes for spanning cluster detection
Parameters
----------
length : int
Number of nodes in the chain, excluding the auxiliary nodes.
Returns
-------
networkx.Graph
A linear chain graph with auxiliary nodes for spanning cluster detection
See Also
--------
sample_states : spanning cluster detection
"""
ret = nx.grid_graph(dim=[int(length + 2)])
ret.node[0]['span'] = 0
ret[0][1]['span'] = 0
ret.node[length + 1]['span'] = 1
ret[length][length + 1]['span'] = 1
return ret | def function[spanning_1d_chain, parameter[length]]:
constant[
Generate a linear chain with auxiliary nodes for spanning cluster detection
Parameters
----------
length : int
Number of nodes in the chain, excluding the auxiliary nodes.
Returns
-------
networkx.Graph
A linear chain graph with auxiliary nodes for spanning cluster detection
See Also
--------
sample_states : spanning cluster detection
]
variable[ret] assign[=] call[name[nx].grid_graph, parameter[]]
call[call[name[ret].node][constant[0]]][constant[span]] assign[=] constant[0]
call[call[call[name[ret]][constant[0]]][constant[1]]][constant[span]] assign[=] constant[0]
call[call[name[ret].node][binary_operation[name[length] + constant[1]]]][constant[span]] assign[=] constant[1]
call[call[call[name[ret]][name[length]]][binary_operation[name[length] + constant[1]]]][constant[span]] assign[=] constant[1]
return[name[ret]] | keyword[def] identifier[spanning_1d_chain] ( identifier[length] ):
literal[string]
identifier[ret] = identifier[nx] . identifier[grid_graph] ( identifier[dim] =[ identifier[int] ( identifier[length] + literal[int] )])
identifier[ret] . identifier[node] [ literal[int] ][ literal[string] ]= literal[int]
identifier[ret] [ literal[int] ][ literal[int] ][ literal[string] ]= literal[int]
identifier[ret] . identifier[node] [ identifier[length] + literal[int] ][ literal[string] ]= literal[int]
identifier[ret] [ identifier[length] ][ identifier[length] + literal[int] ][ literal[string] ]= literal[int]
keyword[return] identifier[ret] | def spanning_1d_chain(length):
"""
Generate a linear chain with auxiliary nodes for spanning cluster detection
Parameters
----------
length : int
Number of nodes in the chain, excluding the auxiliary nodes.
Returns
-------
networkx.Graph
A linear chain graph with auxiliary nodes for spanning cluster detection
See Also
--------
sample_states : spanning cluster detection
"""
ret = nx.grid_graph(dim=[int(length + 2)])
ret.node[0]['span'] = 0
ret[0][1]['span'] = 0
ret.node[length + 1]['span'] = 1
ret[length][length + 1]['span'] = 1
return ret |
def _contextualise_connection(self, connection):
"""
Add a connection to the appcontext so it can be freed/unbound at
a later time if an exception occured and it was not freed.
Args:
connection (ldap3.Connection): Connection to add to the appcontext
"""
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'ldap3_manager_connections'):
ctx.ldap3_manager_connections = [connection]
else:
ctx.ldap3_manager_connections.append(connection) | def function[_contextualise_connection, parameter[self, connection]]:
constant[
Add a connection to the appcontext so it can be freed/unbound at
a later time if an exception occured and it was not freed.
Args:
connection (ldap3.Connection): Connection to add to the appcontext
]
variable[ctx] assign[=] name[stack].top
if compare[name[ctx] is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da1b02bbbb0> begin[:]
name[ctx].ldap3_manager_connections assign[=] list[[<ast.Name object at 0x7da1b02bb520>]] | keyword[def] identifier[_contextualise_connection] ( identifier[self] , identifier[connection] ):
literal[string]
identifier[ctx] = identifier[stack] . identifier[top]
keyword[if] identifier[ctx] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[hasattr] ( identifier[ctx] , literal[string] ):
identifier[ctx] . identifier[ldap3_manager_connections] =[ identifier[connection] ]
keyword[else] :
identifier[ctx] . identifier[ldap3_manager_connections] . identifier[append] ( identifier[connection] ) | def _contextualise_connection(self, connection):
"""
Add a connection to the appcontext so it can be freed/unbound at
a later time if an exception occured and it was not freed.
Args:
connection (ldap3.Connection): Connection to add to the appcontext
"""
ctx = stack.top
if ctx is not None:
if not hasattr(ctx, 'ldap3_manager_connections'):
ctx.ldap3_manager_connections = [connection] # depends on [control=['if'], data=[]]
else:
ctx.ldap3_manager_connections.append(connection) # depends on [control=['if'], data=['ctx']] |
def terminate(self, nodes=None):
"""Destroy one or many nodes.
:param nodes: Nodes to be destroyed.
:type nodes: ``list``
:return: List of nodes which failed to terminate.
:rtype: ``list``
"""
if not self.is_connected():
return None
nodes = nodes or self.nodes
failed_kill = []
result = self.gce.ex_destroy_multiple_nodes(nodes, poll_interval=1, ignore_errors=False)
# Verify whether all instances have been terminated.
for i, success in enumerate(result):
if success:
logging.info('Successfully destroyed: %s', nodes[i].name)
else:
logging.error('Failed to destroy: %s', nodes[i].name)
failed_kill.append(nodes[i])
return failed_kill | def function[terminate, parameter[self, nodes]]:
constant[Destroy one or many nodes.
:param nodes: Nodes to be destroyed.
:type nodes: ``list``
:return: List of nodes which failed to terminate.
:rtype: ``list``
]
if <ast.UnaryOp object at 0x7da1b13b46a0> begin[:]
return[constant[None]]
variable[nodes] assign[=] <ast.BoolOp object at 0x7da1b13b7520>
variable[failed_kill] assign[=] list[[]]
variable[result] assign[=] call[name[self].gce.ex_destroy_multiple_nodes, parameter[name[nodes]]]
for taget[tuple[[<ast.Name object at 0x7da1b13b5180>, <ast.Name object at 0x7da1b13b6530>]]] in starred[call[name[enumerate], parameter[name[result]]]] begin[:]
if name[success] begin[:]
call[name[logging].info, parameter[constant[Successfully destroyed: %s], call[name[nodes]][name[i]].name]]
return[name[failed_kill]] | keyword[def] identifier[terminate] ( identifier[self] , identifier[nodes] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[is_connected] ():
keyword[return] keyword[None]
identifier[nodes] = identifier[nodes] keyword[or] identifier[self] . identifier[nodes]
identifier[failed_kill] =[]
identifier[result] = identifier[self] . identifier[gce] . identifier[ex_destroy_multiple_nodes] ( identifier[nodes] , identifier[poll_interval] = literal[int] , identifier[ignore_errors] = keyword[False] )
keyword[for] identifier[i] , identifier[success] keyword[in] identifier[enumerate] ( identifier[result] ):
keyword[if] identifier[success] :
identifier[logging] . identifier[info] ( literal[string] , identifier[nodes] [ identifier[i] ]. identifier[name] )
keyword[else] :
identifier[logging] . identifier[error] ( literal[string] , identifier[nodes] [ identifier[i] ]. identifier[name] )
identifier[failed_kill] . identifier[append] ( identifier[nodes] [ identifier[i] ])
keyword[return] identifier[failed_kill] | def terminate(self, nodes=None):
"""Destroy one or many nodes.
:param nodes: Nodes to be destroyed.
:type nodes: ``list``
:return: List of nodes which failed to terminate.
:rtype: ``list``
"""
if not self.is_connected():
return None # depends on [control=['if'], data=[]]
nodes = nodes or self.nodes
failed_kill = []
result = self.gce.ex_destroy_multiple_nodes(nodes, poll_interval=1, ignore_errors=False)
# Verify whether all instances have been terminated.
for (i, success) in enumerate(result):
if success:
logging.info('Successfully destroyed: %s', nodes[i].name) # depends on [control=['if'], data=[]]
else:
logging.error('Failed to destroy: %s', nodes[i].name)
failed_kill.append(nodes[i]) # depends on [control=['for'], data=[]]
return failed_kill |
def determine_optimum_fraction_correct_cutoffs(self, analysis_set, dataframe, stability_classication_x_cutoff):
'''Determines the value of stability_classication_y_cutoff which approximately maximizes the fraction correct
measurement w.r.t. a fixed stability_classication_x_cutoff. This function uses discrete sampling and so it
may miss the actual maximum. We use two rounds of sampling: i) a coarse-grained sampling (0.1 energy unit
intervals); and ii) finer sampling (0.01 unit intervals).
In both rounds, we choose the one corresponding to a lower value for the cutoff in cases of multiple maxima.'''
# Determine the value for the fraction correct y-value (predicted) cutoff which will approximately yield the
# maximum fraction-correct value
fraction_correct_range = []
experimental_field = BenchmarkRun.get_analysis_set_fieldname('Experimental', analysis_set)
# Round 1 : Coarse sampling. Test 0.5 -> 8.0 in 0.1 increments
for z in range(5, 80):
w = float(z) / 10.0
fraction_correct_range.append((w, fraction_correct_pandas(dataframe, experimental_field, 'Predicted', x_cutoff = stability_classication_x_cutoff, y_cutoff = w, ignore_null_values = True)))
max_value_cutoff, max_value = fraction_correct_range[0][0], fraction_correct_range[0][1]
for p in fraction_correct_range:
if p[1] > max_value:
max_value_cutoff, max_value = p[0], p[1]
# Round 2 : Finer sampling. Test max_value_cutoff - 0.1 -> max_value_cutoff + 0.1 in 0.01 increments
for z in range(int((max_value_cutoff - 0.1) * 100), int((max_value_cutoff + 0.1) * 100)):
w = float(z) / 100.0
fraction_correct_range.append((w, fraction_correct_pandas(dataframe, experimental_field, 'Predicted', x_cutoff = stability_classication_x_cutoff, y_cutoff = w, ignore_null_values = True)))
fraction_correct_range = sorted(set(fraction_correct_range)) # sort so that we find the lowest cutoff value in case of duplicate fraction correct values
max_value_cutoff, max_value = fraction_correct_range[0][0], fraction_correct_range[0][1]
for p in fraction_correct_range:
if p[1] > max_value:
max_value_cutoff, max_value = p[0], p[1]
return max_value_cutoff, max_value, fraction_correct_range | def function[determine_optimum_fraction_correct_cutoffs, parameter[self, analysis_set, dataframe, stability_classication_x_cutoff]]:
constant[Determines the value of stability_classication_y_cutoff which approximately maximizes the fraction correct
measurement w.r.t. a fixed stability_classication_x_cutoff. This function uses discrete sampling and so it
may miss the actual maximum. We use two rounds of sampling: i) a coarse-grained sampling (0.1 energy unit
intervals); and ii) finer sampling (0.01 unit intervals).
In both rounds, we choose the one corresponding to a lower value for the cutoff in cases of multiple maxima.]
variable[fraction_correct_range] assign[=] list[[]]
variable[experimental_field] assign[=] call[name[BenchmarkRun].get_analysis_set_fieldname, parameter[constant[Experimental], name[analysis_set]]]
for taget[name[z]] in starred[call[name[range], parameter[constant[5], constant[80]]]] begin[:]
variable[w] assign[=] binary_operation[call[name[float], parameter[name[z]]] / constant[10.0]]
call[name[fraction_correct_range].append, parameter[tuple[[<ast.Name object at 0x7da20c794370>, <ast.Call object at 0x7da20c794700>]]]]
<ast.Tuple object at 0x7da20c795c00> assign[=] tuple[[<ast.Subscript object at 0x7da20c794b50>, <ast.Subscript object at 0x7da20c795270>]]
for taget[name[p]] in starred[name[fraction_correct_range]] begin[:]
if compare[call[name[p]][constant[1]] greater[>] name[max_value]] begin[:]
<ast.Tuple object at 0x7da20c796740> assign[=] tuple[[<ast.Subscript object at 0x7da20c796350>, <ast.Subscript object at 0x7da20c7966e0>]]
for taget[name[z]] in starred[call[name[range], parameter[call[name[int], parameter[binary_operation[binary_operation[name[max_value_cutoff] - constant[0.1]] * constant[100]]]], call[name[int], parameter[binary_operation[binary_operation[name[max_value_cutoff] + constant[0.1]] * constant[100]]]]]]] begin[:]
variable[w] assign[=] binary_operation[call[name[float], parameter[name[z]]] / constant[100.0]]
call[name[fraction_correct_range].append, parameter[tuple[[<ast.Name object at 0x7da207f03b50>, <ast.Call object at 0x7da207f02200>]]]]
variable[fraction_correct_range] assign[=] call[name[sorted], parameter[call[name[set], parameter[name[fraction_correct_range]]]]]
<ast.Tuple object at 0x7da207f020b0> assign[=] tuple[[<ast.Subscript object at 0x7da207f02770>, <ast.Subscript object at 0x7da207f03610>]]
for taget[name[p]] in starred[name[fraction_correct_range]] begin[:]
if compare[call[name[p]][constant[1]] greater[>] name[max_value]] begin[:]
<ast.Tuple object at 0x7da207f01810> assign[=] tuple[[<ast.Subscript object at 0x7da207f03940>, <ast.Subscript object at 0x7da207f02e00>]]
return[tuple[[<ast.Name object at 0x7da207f03a60>, <ast.Name object at 0x7da207f01330>, <ast.Name object at 0x7da207f00ee0>]]] | keyword[def] identifier[determine_optimum_fraction_correct_cutoffs] ( identifier[self] , identifier[analysis_set] , identifier[dataframe] , identifier[stability_classication_x_cutoff] ):
literal[string]
identifier[fraction_correct_range] =[]
identifier[experimental_field] = identifier[BenchmarkRun] . identifier[get_analysis_set_fieldname] ( literal[string] , identifier[analysis_set] )
keyword[for] identifier[z] keyword[in] identifier[range] ( literal[int] , literal[int] ):
identifier[w] = identifier[float] ( identifier[z] )/ literal[int]
identifier[fraction_correct_range] . identifier[append] (( identifier[w] , identifier[fraction_correct_pandas] ( identifier[dataframe] , identifier[experimental_field] , literal[string] , identifier[x_cutoff] = identifier[stability_classication_x_cutoff] , identifier[y_cutoff] = identifier[w] , identifier[ignore_null_values] = keyword[True] )))
identifier[max_value_cutoff] , identifier[max_value] = identifier[fraction_correct_range] [ literal[int] ][ literal[int] ], identifier[fraction_correct_range] [ literal[int] ][ literal[int] ]
keyword[for] identifier[p] keyword[in] identifier[fraction_correct_range] :
keyword[if] identifier[p] [ literal[int] ]> identifier[max_value] :
identifier[max_value_cutoff] , identifier[max_value] = identifier[p] [ literal[int] ], identifier[p] [ literal[int] ]
keyword[for] identifier[z] keyword[in] identifier[range] ( identifier[int] (( identifier[max_value_cutoff] - literal[int] )* literal[int] ), identifier[int] (( identifier[max_value_cutoff] + literal[int] )* literal[int] )):
identifier[w] = identifier[float] ( identifier[z] )/ literal[int]
identifier[fraction_correct_range] . identifier[append] (( identifier[w] , identifier[fraction_correct_pandas] ( identifier[dataframe] , identifier[experimental_field] , literal[string] , identifier[x_cutoff] = identifier[stability_classication_x_cutoff] , identifier[y_cutoff] = identifier[w] , identifier[ignore_null_values] = keyword[True] )))
identifier[fraction_correct_range] = identifier[sorted] ( identifier[set] ( identifier[fraction_correct_range] ))
identifier[max_value_cutoff] , identifier[max_value] = identifier[fraction_correct_range] [ literal[int] ][ literal[int] ], identifier[fraction_correct_range] [ literal[int] ][ literal[int] ]
keyword[for] identifier[p] keyword[in] identifier[fraction_correct_range] :
keyword[if] identifier[p] [ literal[int] ]> identifier[max_value] :
identifier[max_value_cutoff] , identifier[max_value] = identifier[p] [ literal[int] ], identifier[p] [ literal[int] ]
keyword[return] identifier[max_value_cutoff] , identifier[max_value] , identifier[fraction_correct_range] | def determine_optimum_fraction_correct_cutoffs(self, analysis_set, dataframe, stability_classication_x_cutoff):
"""Determines the value of stability_classication_y_cutoff which approximately maximizes the fraction correct
measurement w.r.t. a fixed stability_classication_x_cutoff. This function uses discrete sampling and so it
may miss the actual maximum. We use two rounds of sampling: i) a coarse-grained sampling (0.1 energy unit
intervals); and ii) finer sampling (0.01 unit intervals).
In both rounds, we choose the one corresponding to a lower value for the cutoff in cases of multiple maxima."""
# Determine the value for the fraction correct y-value (predicted) cutoff which will approximately yield the
# maximum fraction-correct value
fraction_correct_range = []
experimental_field = BenchmarkRun.get_analysis_set_fieldname('Experimental', analysis_set)
# Round 1 : Coarse sampling. Test 0.5 -> 8.0 in 0.1 increments
for z in range(5, 80):
w = float(z) / 10.0
fraction_correct_range.append((w, fraction_correct_pandas(dataframe, experimental_field, 'Predicted', x_cutoff=stability_classication_x_cutoff, y_cutoff=w, ignore_null_values=True))) # depends on [control=['for'], data=['z']]
(max_value_cutoff, max_value) = (fraction_correct_range[0][0], fraction_correct_range[0][1])
for p in fraction_correct_range:
if p[1] > max_value:
(max_value_cutoff, max_value) = (p[0], p[1]) # depends on [control=['if'], data=['max_value']] # depends on [control=['for'], data=['p']]
# Round 2 : Finer sampling. Test max_value_cutoff - 0.1 -> max_value_cutoff + 0.1 in 0.01 increments
for z in range(int((max_value_cutoff - 0.1) * 100), int((max_value_cutoff + 0.1) * 100)):
w = float(z) / 100.0
fraction_correct_range.append((w, fraction_correct_pandas(dataframe, experimental_field, 'Predicted', x_cutoff=stability_classication_x_cutoff, y_cutoff=w, ignore_null_values=True))) # depends on [control=['for'], data=['z']]
fraction_correct_range = sorted(set(fraction_correct_range)) # sort so that we find the lowest cutoff value in case of duplicate fraction correct values
(max_value_cutoff, max_value) = (fraction_correct_range[0][0], fraction_correct_range[0][1])
for p in fraction_correct_range:
if p[1] > max_value:
(max_value_cutoff, max_value) = (p[0], p[1]) # depends on [control=['if'], data=['max_value']] # depends on [control=['for'], data=['p']]
return (max_value_cutoff, max_value, fraction_correct_range) |
def constant_jump_targets(self):
"""
A set of the static jump targets of the basic block.
"""
exits = set()
if self.exit_statements:
for _, _, stmt_ in self.exit_statements:
exits.add(stmt_.dst.value)
default_target = self.default_exit_target
if default_target is not None:
exits.add(default_target)
return exits | def function[constant_jump_targets, parameter[self]]:
constant[
A set of the static jump targets of the basic block.
]
variable[exits] assign[=] call[name[set], parameter[]]
if name[self].exit_statements begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b1600ac0>, <ast.Name object at 0x7da1b1600460>, <ast.Name object at 0x7da1b1600910>]]] in starred[name[self].exit_statements] begin[:]
call[name[exits].add, parameter[name[stmt_].dst.value]]
variable[default_target] assign[=] name[self].default_exit_target
if compare[name[default_target] is_not constant[None]] begin[:]
call[name[exits].add, parameter[name[default_target]]]
return[name[exits]] | keyword[def] identifier[constant_jump_targets] ( identifier[self] ):
literal[string]
identifier[exits] = identifier[set] ()
keyword[if] identifier[self] . identifier[exit_statements] :
keyword[for] identifier[_] , identifier[_] , identifier[stmt_] keyword[in] identifier[self] . identifier[exit_statements] :
identifier[exits] . identifier[add] ( identifier[stmt_] . identifier[dst] . identifier[value] )
identifier[default_target] = identifier[self] . identifier[default_exit_target]
keyword[if] identifier[default_target] keyword[is] keyword[not] keyword[None] :
identifier[exits] . identifier[add] ( identifier[default_target] )
keyword[return] identifier[exits] | def constant_jump_targets(self):
"""
A set of the static jump targets of the basic block.
"""
exits = set()
if self.exit_statements:
for (_, _, stmt_) in self.exit_statements:
exits.add(stmt_.dst.value) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
default_target = self.default_exit_target
if default_target is not None:
exits.add(default_target) # depends on [control=['if'], data=['default_target']]
return exits |
def request(self, method, api_url, params={}, **kwargs):
"""Generate the API call to the device."""
LOG.debug("axapi_http: full url = %s", self.url_base + api_url)
LOG.debug("axapi_http: %s url = %s", method, api_url)
LOG.debug("axapi_http: params = %s", json.dumps(logutils.clean(params), indent=4))
# Set "data" variable for the request
if params:
extra_params = kwargs.get('axapi_args', {})
params_copy = merge_dicts(params, extra_params)
LOG.debug("axapi_http: params_all = %s", logutils.clean(params_copy))
payload = json.dumps(params_copy)
else:
try:
payload = kwargs.pop('payload', None)
self.headers = dict(self.HEADERS, **kwargs.pop('headers', {}))
LOG.debug("axapi_http: headers_all = %s", logutils.clean(self.headers))
except KeyError:
payload = None
max_retries = kwargs.get('max_retries', self.max_retries)
timeout = kwargs.get('timeout', self.timeout)
# Create session to set HTTPAdapter or SSLAdapter
session = Session()
if self.port == 443:
# Add adapter for any https session to force TLS1_0 connection for v21 of AXAPI
session.mount('https://', SSLAdapter(max_retries=max_retries))
else:
session.mount('http://', HTTPAdapter(max_retries=max_retries))
session_request = getattr(session, method.lower())
# Make actual request and handle any errors
try:
device_response = session_request(
self.url_base + api_url, verify=False, data=payload, headers=self.HEADERS, timeout=timeout
)
except (Exception) as e:
LOG.error("acos_client failing with error %s after %s retries", e.__class__.__name__, max_retries)
raise e
finally:
session.close()
# Log if the reponse is one of the known broken response
if device_response in broken_replies:
device_response = broken_replies[device_response]
LOG.debug("axapi_http: broken reply, new response: %s", logutils.clean(device_response))
# Validate json response
try:
json_response = device_response.json()
LOG.debug("axapi_http: data = %s", json.dumps(logutils.clean(json_response), indent=4))
except ValueError as e:
# The response is not JSON but it still succeeded.
LOG.debug("axapi_http: json = %s", e)
return device_response
# Handle "fail" responses returned by AXAPI
if 'response' in json_response and 'status' in json_response['response']:
if json_response['response']['status'] == 'fail':
acos_responses.raise_axapi_ex(json_response, action=extract_method(api_url))
# Return json portion of response
return json_response | def function[request, parameter[self, method, api_url, params]]:
constant[Generate the API call to the device.]
call[name[LOG].debug, parameter[constant[axapi_http: full url = %s], binary_operation[name[self].url_base + name[api_url]]]]
call[name[LOG].debug, parameter[constant[axapi_http: %s url = %s], name[method], name[api_url]]]
call[name[LOG].debug, parameter[constant[axapi_http: params = %s], call[name[json].dumps, parameter[call[name[logutils].clean, parameter[name[params]]]]]]]
if name[params] begin[:]
variable[extra_params] assign[=] call[name[kwargs].get, parameter[constant[axapi_args], dictionary[[], []]]]
variable[params_copy] assign[=] call[name[merge_dicts], parameter[name[params], name[extra_params]]]
call[name[LOG].debug, parameter[constant[axapi_http: params_all = %s], call[name[logutils].clean, parameter[name[params_copy]]]]]
variable[payload] assign[=] call[name[json].dumps, parameter[name[params_copy]]]
variable[max_retries] assign[=] call[name[kwargs].get, parameter[constant[max_retries], name[self].max_retries]]
variable[timeout] assign[=] call[name[kwargs].get, parameter[constant[timeout], name[self].timeout]]
variable[session] assign[=] call[name[Session], parameter[]]
if compare[name[self].port equal[==] constant[443]] begin[:]
call[name[session].mount, parameter[constant[https://], call[name[SSLAdapter], parameter[]]]]
variable[session_request] assign[=] call[name[getattr], parameter[name[session], call[name[method].lower, parameter[]]]]
<ast.Try object at 0x7da18f8120e0>
if compare[name[device_response] in name[broken_replies]] begin[:]
variable[device_response] assign[=] call[name[broken_replies]][name[device_response]]
call[name[LOG].debug, parameter[constant[axapi_http: broken reply, new response: %s], call[name[logutils].clean, parameter[name[device_response]]]]]
<ast.Try object at 0x7da1b2345300>
if <ast.BoolOp object at 0x7da1b2347a60> begin[:]
if compare[call[call[name[json_response]][constant[response]]][constant[status]] equal[==] constant[fail]] begin[:]
call[name[acos_responses].raise_axapi_ex, parameter[name[json_response]]]
return[name[json_response]] | keyword[def] identifier[request] ( identifier[self] , identifier[method] , identifier[api_url] , identifier[params] ={},** identifier[kwargs] ):
literal[string]
identifier[LOG] . identifier[debug] ( literal[string] , identifier[self] . identifier[url_base] + identifier[api_url] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[method] , identifier[api_url] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[json] . identifier[dumps] ( identifier[logutils] . identifier[clean] ( identifier[params] ), identifier[indent] = literal[int] ))
keyword[if] identifier[params] :
identifier[extra_params] = identifier[kwargs] . identifier[get] ( literal[string] ,{})
identifier[params_copy] = identifier[merge_dicts] ( identifier[params] , identifier[extra_params] )
identifier[LOG] . identifier[debug] ( literal[string] , identifier[logutils] . identifier[clean] ( identifier[params_copy] ))
identifier[payload] = identifier[json] . identifier[dumps] ( identifier[params_copy] )
keyword[else] :
keyword[try] :
identifier[payload] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
identifier[self] . identifier[headers] = identifier[dict] ( identifier[self] . identifier[HEADERS] ,** identifier[kwargs] . identifier[pop] ( literal[string] ,{}))
identifier[LOG] . identifier[debug] ( literal[string] , identifier[logutils] . identifier[clean] ( identifier[self] . identifier[headers] ))
keyword[except] identifier[KeyError] :
identifier[payload] = keyword[None]
identifier[max_retries] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[max_retries] )
identifier[timeout] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[self] . identifier[timeout] )
identifier[session] = identifier[Session] ()
keyword[if] identifier[self] . identifier[port] == literal[int] :
identifier[session] . identifier[mount] ( literal[string] , identifier[SSLAdapter] ( identifier[max_retries] = identifier[max_retries] ))
keyword[else] :
identifier[session] . identifier[mount] ( literal[string] , identifier[HTTPAdapter] ( identifier[max_retries] = identifier[max_retries] ))
identifier[session_request] = identifier[getattr] ( identifier[session] , identifier[method] . identifier[lower] ())
keyword[try] :
identifier[device_response] = identifier[session_request] (
identifier[self] . identifier[url_base] + identifier[api_url] , identifier[verify] = keyword[False] , identifier[data] = identifier[payload] , identifier[headers] = identifier[self] . identifier[HEADERS] , identifier[timeout] = identifier[timeout]
)
keyword[except] ( identifier[Exception] ) keyword[as] identifier[e] :
identifier[LOG] . identifier[error] ( literal[string] , identifier[e] . identifier[__class__] . identifier[__name__] , identifier[max_retries] )
keyword[raise] identifier[e]
keyword[finally] :
identifier[session] . identifier[close] ()
keyword[if] identifier[device_response] keyword[in] identifier[broken_replies] :
identifier[device_response] = identifier[broken_replies] [ identifier[device_response] ]
identifier[LOG] . identifier[debug] ( literal[string] , identifier[logutils] . identifier[clean] ( identifier[device_response] ))
keyword[try] :
identifier[json_response] = identifier[device_response] . identifier[json] ()
identifier[LOG] . identifier[debug] ( literal[string] , identifier[json] . identifier[dumps] ( identifier[logutils] . identifier[clean] ( identifier[json_response] ), identifier[indent] = literal[int] ))
keyword[except] identifier[ValueError] keyword[as] identifier[e] :
identifier[LOG] . identifier[debug] ( literal[string] , identifier[e] )
keyword[return] identifier[device_response]
keyword[if] literal[string] keyword[in] identifier[json_response] keyword[and] literal[string] keyword[in] identifier[json_response] [ literal[string] ]:
keyword[if] identifier[json_response] [ literal[string] ][ literal[string] ]== literal[string] :
identifier[acos_responses] . identifier[raise_axapi_ex] ( identifier[json_response] , identifier[action] = identifier[extract_method] ( identifier[api_url] ))
keyword[return] identifier[json_response] | def request(self, method, api_url, params={}, **kwargs):
"""Generate the API call to the device."""
LOG.debug('axapi_http: full url = %s', self.url_base + api_url)
LOG.debug('axapi_http: %s url = %s', method, api_url)
LOG.debug('axapi_http: params = %s', json.dumps(logutils.clean(params), indent=4))
# Set "data" variable for the request
if params:
extra_params = kwargs.get('axapi_args', {})
params_copy = merge_dicts(params, extra_params)
LOG.debug('axapi_http: params_all = %s', logutils.clean(params_copy))
payload = json.dumps(params_copy) # depends on [control=['if'], data=[]]
else:
try:
payload = kwargs.pop('payload', None)
self.headers = dict(self.HEADERS, **kwargs.pop('headers', {}))
LOG.debug('axapi_http: headers_all = %s', logutils.clean(self.headers)) # depends on [control=['try'], data=[]]
except KeyError:
payload = None # depends on [control=['except'], data=[]]
max_retries = kwargs.get('max_retries', self.max_retries)
timeout = kwargs.get('timeout', self.timeout)
# Create session to set HTTPAdapter or SSLAdapter
session = Session()
if self.port == 443:
# Add adapter for any https session to force TLS1_0 connection for v21 of AXAPI
session.mount('https://', SSLAdapter(max_retries=max_retries)) # depends on [control=['if'], data=[]]
else:
session.mount('http://', HTTPAdapter(max_retries=max_retries))
session_request = getattr(session, method.lower())
# Make actual request and handle any errors
try:
device_response = session_request(self.url_base + api_url, verify=False, data=payload, headers=self.HEADERS, timeout=timeout) # depends on [control=['try'], data=[]]
except Exception as e:
LOG.error('acos_client failing with error %s after %s retries', e.__class__.__name__, max_retries)
raise e # depends on [control=['except'], data=['e']]
finally:
session.close()
# Log if the reponse is one of the known broken response
if device_response in broken_replies:
device_response = broken_replies[device_response]
LOG.debug('axapi_http: broken reply, new response: %s', logutils.clean(device_response)) # depends on [control=['if'], data=['device_response', 'broken_replies']]
# Validate json response
try:
json_response = device_response.json()
LOG.debug('axapi_http: data = %s', json.dumps(logutils.clean(json_response), indent=4)) # depends on [control=['try'], data=[]]
except ValueError as e:
# The response is not JSON but it still succeeded.
LOG.debug('axapi_http: json = %s', e)
return device_response # depends on [control=['except'], data=['e']]
# Handle "fail" responses returned by AXAPI
if 'response' in json_response and 'status' in json_response['response']:
if json_response['response']['status'] == 'fail':
acos_responses.raise_axapi_ex(json_response, action=extract_method(api_url)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Return json portion of response
return json_response |
def categorize_functional_groups(self, groups):
"""
Determine classes of functional groups present in a set.
:param groups: Set of functional groups.
:return: dict containing representations of the groups, the indices of
where the group occurs in the MoleculeGraph, and how many of each
type of group there is.
"""
categories = {}
em = iso.numerical_edge_match("weight", 1)
nm = iso.categorical_node_match("specie", "C")
for group in groups:
atoms = [self.molecule[a] for a in group]
species = [a.specie for a in atoms]
coords = [a.coords for a in atoms]
adaptor = BabelMolAdaptor(Molecule(species, coords))
# Use Canonical SMILES to ensure uniqueness
smiles = adaptor.pybel_mol.write("can").strip()
if smiles in categories:
this_subgraph = self.molgraph.graph.subgraph(list(group)).to_undirected()
for other in categories[smiles]["groups"]:
other_subgraph = self.molgraph.graph.subgraph(list(other)).to_undirected()
if not nx.is_isomorphic(this_subgraph, other_subgraph,
edge_match=em, node_match=nm):
break
if group not in categories[smiles]["groups"]:
categories[smiles]["groups"].append(group)
categories[smiles]["count"] += 1
else:
categories[smiles] = {"groups": [group],
"count": 1}
return categories | def function[categorize_functional_groups, parameter[self, groups]]:
constant[
Determine classes of functional groups present in a set.
:param groups: Set of functional groups.
:return: dict containing representations of the groups, the indices of
where the group occurs in the MoleculeGraph, and how many of each
type of group there is.
]
variable[categories] assign[=] dictionary[[], []]
variable[em] assign[=] call[name[iso].numerical_edge_match, parameter[constant[weight], constant[1]]]
variable[nm] assign[=] call[name[iso].categorical_node_match, parameter[constant[specie], constant[C]]]
for taget[name[group]] in starred[name[groups]] begin[:]
variable[atoms] assign[=] <ast.ListComp object at 0x7da18f09f5e0>
variable[species] assign[=] <ast.ListComp object at 0x7da18f09f970>
variable[coords] assign[=] <ast.ListComp object at 0x7da18f09caf0>
variable[adaptor] assign[=] call[name[BabelMolAdaptor], parameter[call[name[Molecule], parameter[name[species], name[coords]]]]]
variable[smiles] assign[=] call[call[name[adaptor].pybel_mol.write, parameter[constant[can]]].strip, parameter[]]
if compare[name[smiles] in name[categories]] begin[:]
variable[this_subgraph] assign[=] call[call[name[self].molgraph.graph.subgraph, parameter[call[name[list], parameter[name[group]]]]].to_undirected, parameter[]]
for taget[name[other]] in starred[call[call[name[categories]][name[smiles]]][constant[groups]]] begin[:]
variable[other_subgraph] assign[=] call[call[name[self].molgraph.graph.subgraph, parameter[call[name[list], parameter[name[other]]]]].to_undirected, parameter[]]
if <ast.UnaryOp object at 0x7da18f09e620> begin[:]
break
if compare[name[group] <ast.NotIn object at 0x7da2590d7190> call[call[name[categories]][name[smiles]]][constant[groups]]] begin[:]
call[call[call[name[categories]][name[smiles]]][constant[groups]].append, parameter[name[group]]]
<ast.AugAssign object at 0x7da18f09fbe0>
return[name[categories]] | keyword[def] identifier[categorize_functional_groups] ( identifier[self] , identifier[groups] ):
literal[string]
identifier[categories] ={}
identifier[em] = identifier[iso] . identifier[numerical_edge_match] ( literal[string] , literal[int] )
identifier[nm] = identifier[iso] . identifier[categorical_node_match] ( literal[string] , literal[string] )
keyword[for] identifier[group] keyword[in] identifier[groups] :
identifier[atoms] =[ identifier[self] . identifier[molecule] [ identifier[a] ] keyword[for] identifier[a] keyword[in] identifier[group] ]
identifier[species] =[ identifier[a] . identifier[specie] keyword[for] identifier[a] keyword[in] identifier[atoms] ]
identifier[coords] =[ identifier[a] . identifier[coords] keyword[for] identifier[a] keyword[in] identifier[atoms] ]
identifier[adaptor] = identifier[BabelMolAdaptor] ( identifier[Molecule] ( identifier[species] , identifier[coords] ))
identifier[smiles] = identifier[adaptor] . identifier[pybel_mol] . identifier[write] ( literal[string] ). identifier[strip] ()
keyword[if] identifier[smiles] keyword[in] identifier[categories] :
identifier[this_subgraph] = identifier[self] . identifier[molgraph] . identifier[graph] . identifier[subgraph] ( identifier[list] ( identifier[group] )). identifier[to_undirected] ()
keyword[for] identifier[other] keyword[in] identifier[categories] [ identifier[smiles] ][ literal[string] ]:
identifier[other_subgraph] = identifier[self] . identifier[molgraph] . identifier[graph] . identifier[subgraph] ( identifier[list] ( identifier[other] )). identifier[to_undirected] ()
keyword[if] keyword[not] identifier[nx] . identifier[is_isomorphic] ( identifier[this_subgraph] , identifier[other_subgraph] ,
identifier[edge_match] = identifier[em] , identifier[node_match] = identifier[nm] ):
keyword[break]
keyword[if] identifier[group] keyword[not] keyword[in] identifier[categories] [ identifier[smiles] ][ literal[string] ]:
identifier[categories] [ identifier[smiles] ][ literal[string] ]. identifier[append] ( identifier[group] )
identifier[categories] [ identifier[smiles] ][ literal[string] ]+= literal[int]
keyword[else] :
identifier[categories] [ identifier[smiles] ]={ literal[string] :[ identifier[group] ],
literal[string] : literal[int] }
keyword[return] identifier[categories] | def categorize_functional_groups(self, groups):
"""
Determine classes of functional groups present in a set.
:param groups: Set of functional groups.
:return: dict containing representations of the groups, the indices of
where the group occurs in the MoleculeGraph, and how many of each
type of group there is.
"""
categories = {}
em = iso.numerical_edge_match('weight', 1)
nm = iso.categorical_node_match('specie', 'C')
for group in groups:
atoms = [self.molecule[a] for a in group]
species = [a.specie for a in atoms]
coords = [a.coords for a in atoms]
adaptor = BabelMolAdaptor(Molecule(species, coords))
# Use Canonical SMILES to ensure uniqueness
smiles = adaptor.pybel_mol.write('can').strip()
if smiles in categories:
this_subgraph = self.molgraph.graph.subgraph(list(group)).to_undirected()
for other in categories[smiles]['groups']:
other_subgraph = self.molgraph.graph.subgraph(list(other)).to_undirected()
if not nx.is_isomorphic(this_subgraph, other_subgraph, edge_match=em, node_match=nm):
break # depends on [control=['if'], data=[]]
if group not in categories[smiles]['groups']:
categories[smiles]['groups'].append(group)
categories[smiles]['count'] += 1 # depends on [control=['if'], data=['group']] # depends on [control=['for'], data=['other']] # depends on [control=['if'], data=['smiles', 'categories']]
else:
categories[smiles] = {'groups': [group], 'count': 1} # depends on [control=['for'], data=['group']]
return categories |
def order_limit_buy(self, timeInForce=TIME_IN_FORCE_GTC, **params):
"""Send in a new limit buy order
Any order with an icebergQty MUST have timeInForce set to GTC.
:param symbol: required
:type symbol: str
:param quantity: required
:type quantity: decimal
:param price: required
:type price: str
:param timeInForce: default Good till cancelled
:type timeInForce: str
:param newClientOrderId: A unique id for the order. Automatically generated if not sent.
:type newClientOrderId: str
:param stopPrice: Used with stop orders
:type stopPrice: decimal
:param icebergQty: Used with iceberg orders
:type icebergQty: decimal
:param newOrderRespType: Set the response JSON. ACK, RESULT, or FULL; default: RESULT.
:type newOrderRespType: str
:param recvWindow: the number of milliseconds the request is valid for
:type recvWindow: int
:returns: API response
See order endpoint for full response options
:raises: BinanceRequestException, BinanceAPIException, BinanceOrderException, BinanceOrderMinAmountException, BinanceOrderMinPriceException, BinanceOrderMinTotalException, BinanceOrderUnknownSymbolException, BinanceOrderInactiveSymbolException
"""
params.update({
'side': self.SIDE_BUY,
})
return self.order_limit(timeInForce=timeInForce, **params) | def function[order_limit_buy, parameter[self, timeInForce]]:
constant[Send in a new limit buy order
Any order with an icebergQty MUST have timeInForce set to GTC.
:param symbol: required
:type symbol: str
:param quantity: required
:type quantity: decimal
:param price: required
:type price: str
:param timeInForce: default Good till cancelled
:type timeInForce: str
:param newClientOrderId: A unique id for the order. Automatically generated if not sent.
:type newClientOrderId: str
:param stopPrice: Used with stop orders
:type stopPrice: decimal
:param icebergQty: Used with iceberg orders
:type icebergQty: decimal
:param newOrderRespType: Set the response JSON. ACK, RESULT, or FULL; default: RESULT.
:type newOrderRespType: str
:param recvWindow: the number of milliseconds the request is valid for
:type recvWindow: int
:returns: API response
See order endpoint for full response options
:raises: BinanceRequestException, BinanceAPIException, BinanceOrderException, BinanceOrderMinAmountException, BinanceOrderMinPriceException, BinanceOrderMinTotalException, BinanceOrderUnknownSymbolException, BinanceOrderInactiveSymbolException
]
call[name[params].update, parameter[dictionary[[<ast.Constant object at 0x7da18c4cfe50>], [<ast.Attribute object at 0x7da18c4cf970>]]]]
return[call[name[self].order_limit, parameter[]]] | keyword[def] identifier[order_limit_buy] ( identifier[self] , identifier[timeInForce] = identifier[TIME_IN_FORCE_GTC] ,** identifier[params] ):
literal[string]
identifier[params] . identifier[update] ({
literal[string] : identifier[self] . identifier[SIDE_BUY] ,
})
keyword[return] identifier[self] . identifier[order_limit] ( identifier[timeInForce] = identifier[timeInForce] ,** identifier[params] ) | def order_limit_buy(self, timeInForce=TIME_IN_FORCE_GTC, **params):
"""Send in a new limit buy order
Any order with an icebergQty MUST have timeInForce set to GTC.
:param symbol: required
:type symbol: str
:param quantity: required
:type quantity: decimal
:param price: required
:type price: str
:param timeInForce: default Good till cancelled
:type timeInForce: str
:param newClientOrderId: A unique id for the order. Automatically generated if not sent.
:type newClientOrderId: str
:param stopPrice: Used with stop orders
:type stopPrice: decimal
:param icebergQty: Used with iceberg orders
:type icebergQty: decimal
:param newOrderRespType: Set the response JSON. ACK, RESULT, or FULL; default: RESULT.
:type newOrderRespType: str
:param recvWindow: the number of milliseconds the request is valid for
:type recvWindow: int
:returns: API response
See order endpoint for full response options
:raises: BinanceRequestException, BinanceAPIException, BinanceOrderException, BinanceOrderMinAmountException, BinanceOrderMinPriceException, BinanceOrderMinTotalException, BinanceOrderUnknownSymbolException, BinanceOrderInactiveSymbolException
"""
params.update({'side': self.SIDE_BUY})
return self.order_limit(timeInForce=timeInForce, **params) |
def emit_event(project_slug, action_slug, payload, sender_name, sender_secret,
event_uuid=None):
"""Emit Event.
:param project_slug: the slug of the project
:param action_slug: the slug of the action
:param payload: the payload that emit with action
:param sender_name: name that identified the sender
:parma sender_secret: secret string
:return: dict with task_id and event_uuid
raise MissingSender if sender does not exist
raise WrongSenderSecret if sender_secret is wrong
raise NotAllowed if sender is not allowed to emit action to project
"""
project_graph = graph.get_project_graph(project_slug)
project_graph.verify_sender(sender_name, sender_secret)
action = project_graph.get_action(action_slug)
project = project_graph.project
# execute event
event_uuid = event_uuid or uuid4()
event = {'uuid': event_uuid, 'project': project['slug'], 'action': action['slug']}
res = exec_event(event, action['webhooks'], payload)
logger.info('EMIT %s "%s" "%s" %s',
event_uuid, project_slug, action_slug, json.dumps(payload))
return dict(
task=dict(
id=res.id,
),
event=dict(
uuid=event_uuid,
),
) | def function[emit_event, parameter[project_slug, action_slug, payload, sender_name, sender_secret, event_uuid]]:
constant[Emit Event.
:param project_slug: the slug of the project
:param action_slug: the slug of the action
:param payload: the payload that emit with action
:param sender_name: name that identified the sender
:parma sender_secret: secret string
:return: dict with task_id and event_uuid
raise MissingSender if sender does not exist
raise WrongSenderSecret if sender_secret is wrong
raise NotAllowed if sender is not allowed to emit action to project
]
variable[project_graph] assign[=] call[name[graph].get_project_graph, parameter[name[project_slug]]]
call[name[project_graph].verify_sender, parameter[name[sender_name], name[sender_secret]]]
variable[action] assign[=] call[name[project_graph].get_action, parameter[name[action_slug]]]
variable[project] assign[=] name[project_graph].project
variable[event_uuid] assign[=] <ast.BoolOp object at 0x7da1b0a802b0>
variable[event] assign[=] dictionary[[<ast.Constant object at 0x7da1b0a83280>, <ast.Constant object at 0x7da1b0a83460>, <ast.Constant object at 0x7da1b0a83100>], [<ast.Name object at 0x7da1b0a83040>, <ast.Subscript object at 0x7da1b0a812a0>, <ast.Subscript object at 0x7da1b0a81870>]]
variable[res] assign[=] call[name[exec_event], parameter[name[event], call[name[action]][constant[webhooks]], name[payload]]]
call[name[logger].info, parameter[constant[EMIT %s "%s" "%s" %s], name[event_uuid], name[project_slug], name[action_slug], call[name[json].dumps, parameter[name[payload]]]]]
return[call[name[dict], parameter[]]] | keyword[def] identifier[emit_event] ( identifier[project_slug] , identifier[action_slug] , identifier[payload] , identifier[sender_name] , identifier[sender_secret] ,
identifier[event_uuid] = keyword[None] ):
literal[string]
identifier[project_graph] = identifier[graph] . identifier[get_project_graph] ( identifier[project_slug] )
identifier[project_graph] . identifier[verify_sender] ( identifier[sender_name] , identifier[sender_secret] )
identifier[action] = identifier[project_graph] . identifier[get_action] ( identifier[action_slug] )
identifier[project] = identifier[project_graph] . identifier[project]
identifier[event_uuid] = identifier[event_uuid] keyword[or] identifier[uuid4] ()
identifier[event] ={ literal[string] : identifier[event_uuid] , literal[string] : identifier[project] [ literal[string] ], literal[string] : identifier[action] [ literal[string] ]}
identifier[res] = identifier[exec_event] ( identifier[event] , identifier[action] [ literal[string] ], identifier[payload] )
identifier[logger] . identifier[info] ( literal[string] ,
identifier[event_uuid] , identifier[project_slug] , identifier[action_slug] , identifier[json] . identifier[dumps] ( identifier[payload] ))
keyword[return] identifier[dict] (
identifier[task] = identifier[dict] (
identifier[id] = identifier[res] . identifier[id] ,
),
identifier[event] = identifier[dict] (
identifier[uuid] = identifier[event_uuid] ,
),
) | def emit_event(project_slug, action_slug, payload, sender_name, sender_secret, event_uuid=None):
"""Emit Event.
:param project_slug: the slug of the project
:param action_slug: the slug of the action
:param payload: the payload that emit with action
:param sender_name: name that identified the sender
:parma sender_secret: secret string
:return: dict with task_id and event_uuid
raise MissingSender if sender does not exist
raise WrongSenderSecret if sender_secret is wrong
raise NotAllowed if sender is not allowed to emit action to project
"""
project_graph = graph.get_project_graph(project_slug)
project_graph.verify_sender(sender_name, sender_secret)
action = project_graph.get_action(action_slug)
project = project_graph.project
# execute event
event_uuid = event_uuid or uuid4()
event = {'uuid': event_uuid, 'project': project['slug'], 'action': action['slug']}
res = exec_event(event, action['webhooks'], payload)
logger.info('EMIT %s "%s" "%s" %s', event_uuid, project_slug, action_slug, json.dumps(payload))
return dict(task=dict(id=res.id), event=dict(uuid=event_uuid)) |
def continuous_future(self,
root_symbol_str,
offset=0,
roll='volume',
adjustment='mul'):
"""Create a specifier for a continuous contract.
Parameters
----------
root_symbol_str : str
The root symbol for the future chain.
offset : int, optional
The distance from the primary contract. Default is 0.
roll_style : str, optional
How rolls are determined. Default is 'volume'.
adjustment : str, optional
Method for adjusting lookback prices between rolls. Options are
'mul', 'add', and None. Default is 'mul'.
Returns
-------
continuous_future : ContinuousFuture
The continuous future specifier.
"""
return self.asset_finder.create_continuous_future(
root_symbol_str,
offset,
roll,
adjustment,
) | def function[continuous_future, parameter[self, root_symbol_str, offset, roll, adjustment]]:
constant[Create a specifier for a continuous contract.
Parameters
----------
root_symbol_str : str
The root symbol for the future chain.
offset : int, optional
The distance from the primary contract. Default is 0.
roll_style : str, optional
How rolls are determined. Default is 'volume'.
adjustment : str, optional
Method for adjusting lookback prices between rolls. Options are
'mul', 'add', and None. Default is 'mul'.
Returns
-------
continuous_future : ContinuousFuture
The continuous future specifier.
]
return[call[name[self].asset_finder.create_continuous_future, parameter[name[root_symbol_str], name[offset], name[roll], name[adjustment]]]] | keyword[def] identifier[continuous_future] ( identifier[self] ,
identifier[root_symbol_str] ,
identifier[offset] = literal[int] ,
identifier[roll] = literal[string] ,
identifier[adjustment] = literal[string] ):
literal[string]
keyword[return] identifier[self] . identifier[asset_finder] . identifier[create_continuous_future] (
identifier[root_symbol_str] ,
identifier[offset] ,
identifier[roll] ,
identifier[adjustment] ,
) | def continuous_future(self, root_symbol_str, offset=0, roll='volume', adjustment='mul'):
"""Create a specifier for a continuous contract.
Parameters
----------
root_symbol_str : str
The root symbol for the future chain.
offset : int, optional
The distance from the primary contract. Default is 0.
roll_style : str, optional
How rolls are determined. Default is 'volume'.
adjustment : str, optional
Method for adjusting lookback prices between rolls. Options are
'mul', 'add', and None. Default is 'mul'.
Returns
-------
continuous_future : ContinuousFuture
The continuous future specifier.
"""
return self.asset_finder.create_continuous_future(root_symbol_str, offset, roll, adjustment) |
def increment(self, gold_set, test_set):
"Add examples from sets."
self.gold += len(gold_set)
self.test += len(test_set)
self.correct += len(gold_set & test_set) | def function[increment, parameter[self, gold_set, test_set]]:
constant[Add examples from sets.]
<ast.AugAssign object at 0x7da18dc984c0>
<ast.AugAssign object at 0x7da18dc9b460>
<ast.AugAssign object at 0x7da18dc99db0> | keyword[def] identifier[increment] ( identifier[self] , identifier[gold_set] , identifier[test_set] ):
literal[string]
identifier[self] . identifier[gold] += identifier[len] ( identifier[gold_set] )
identifier[self] . identifier[test] += identifier[len] ( identifier[test_set] )
identifier[self] . identifier[correct] += identifier[len] ( identifier[gold_set] & identifier[test_set] ) | def increment(self, gold_set, test_set):
"""Add examples from sets."""
self.gold += len(gold_set)
self.test += len(test_set)
self.correct += len(gold_set & test_set) |
def reply_topic(self, topic_id, content):
"""小组回帖
:return: 帖子 id 或 ``None``
"""
data = {
'body': content,
'csrfmiddlewaretoken': self._request.cookies.get('csrftoken')
}
url = 'http://www.shanbay.com/api/v1/forum/thread/%s/post/' % topic_id
r = self.request(url, 'post', data=data)
j = r.json()
if j['status_code'] == 0:
return j['data']['thread']['id'] | def function[reply_topic, parameter[self, topic_id, content]]:
constant[小组回帖
:return: 帖子 id 或 ``None``
]
variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b257cf10>, <ast.Constant object at 0x7da1b257d510>], [<ast.Name object at 0x7da1b257c370>, <ast.Call object at 0x7da1b257fa90>]]
variable[url] assign[=] binary_operation[constant[http://www.shanbay.com/api/v1/forum/thread/%s/post/] <ast.Mod object at 0x7da2590d6920> name[topic_id]]
variable[r] assign[=] call[name[self].request, parameter[name[url], constant[post]]]
variable[j] assign[=] call[name[r].json, parameter[]]
if compare[call[name[j]][constant[status_code]] equal[==] constant[0]] begin[:]
return[call[call[call[name[j]][constant[data]]][constant[thread]]][constant[id]]] | keyword[def] identifier[reply_topic] ( identifier[self] , identifier[topic_id] , identifier[content] ):
literal[string]
identifier[data] ={
literal[string] : identifier[content] ,
literal[string] : identifier[self] . identifier[_request] . identifier[cookies] . identifier[get] ( literal[string] )
}
identifier[url] = literal[string] % identifier[topic_id]
identifier[r] = identifier[self] . identifier[request] ( identifier[url] , literal[string] , identifier[data] = identifier[data] )
identifier[j] = identifier[r] . identifier[json] ()
keyword[if] identifier[j] [ literal[string] ]== literal[int] :
keyword[return] identifier[j] [ literal[string] ][ literal[string] ][ literal[string] ] | def reply_topic(self, topic_id, content):
"""小组回帖
:return: 帖子 id 或 ``None``
"""
data = {'body': content, 'csrfmiddlewaretoken': self._request.cookies.get('csrftoken')}
url = 'http://www.shanbay.com/api/v1/forum/thread/%s/post/' % topic_id
r = self.request(url, 'post', data=data)
j = r.json()
if j['status_code'] == 0:
return j['data']['thread']['id'] # depends on [control=['if'], data=[]] |
def StatusActionFactory(version, build=None, base_class=BaseStatusAction): # noqa
"""
A factory for creating a new status action class specific to a service.
:param version: The service version
:type version: union[str, unicode]
:param build: The optional service build identifier
:type build: union[str, unicode]
:param base_class: The optional base class, to override `BaseStatusAction` as the base class
:type base_class: BaseStatusAction
:return: A class named `StatusAction`, extending `base_class`, with version and build matching the input parameters
:rtype: class
"""
return type(
str('StatusAction'),
(base_class, ),
{str('_version'): version, str('_build'): build},
) | def function[StatusActionFactory, parameter[version, build, base_class]]:
constant[
A factory for creating a new status action class specific to a service.
:param version: The service version
:type version: union[str, unicode]
:param build: The optional service build identifier
:type build: union[str, unicode]
:param base_class: The optional base class, to override `BaseStatusAction` as the base class
:type base_class: BaseStatusAction
:return: A class named `StatusAction`, extending `base_class`, with version and build matching the input parameters
:rtype: class
]
return[call[name[type], parameter[call[name[str], parameter[constant[StatusAction]]], tuple[[<ast.Name object at 0x7da18ede5cc0>]], dictionary[[<ast.Call object at 0x7da18ede7c10>, <ast.Call object at 0x7da18ede5fc0>], [<ast.Name object at 0x7da18ede4820>, <ast.Name object at 0x7da18ede7220>]]]]] | keyword[def] identifier[StatusActionFactory] ( identifier[version] , identifier[build] = keyword[None] , identifier[base_class] = identifier[BaseStatusAction] ):
literal[string]
keyword[return] identifier[type] (
identifier[str] ( literal[string] ),
( identifier[base_class] ,),
{ identifier[str] ( literal[string] ): identifier[version] , identifier[str] ( literal[string] ): identifier[build] },
) | def StatusActionFactory(version, build=None, base_class=BaseStatusAction): # noqa
'\n A factory for creating a new status action class specific to a service.\n\n :param version: The service version\n :type version: union[str, unicode]\n :param build: The optional service build identifier\n :type build: union[str, unicode]\n :param base_class: The optional base class, to override `BaseStatusAction` as the base class\n :type base_class: BaseStatusAction\n\n :return: A class named `StatusAction`, extending `base_class`, with version and build matching the input parameters\n :rtype: class\n '
return type(str('StatusAction'), (base_class,), {str('_version'): version, str('_build'): build}) |
def claim_messages(self, ttl, grace, count=None):
"""
Claims up to `count` unclaimed messages from this queue. If count is
not specified, the default is to claim 10 messages.
The `ttl` parameter specifies how long the server should wait before
releasing the claim. The ttl value MUST be between 60 and 43200 seconds.
The `grace` parameter is the message grace period in seconds. The value
of grace MUST be between 60 and 43200 seconds. The server extends the
lifetime of claimed messages to be at least as long as the lifetime of
the claim itself, plus a specified grace period to deal with crashed
workers (up to 1209600 or 14 days including claim lifetime). If a
claimed message would normally live longer than the grace period, its
expiration will not be adjusted.
Returns a QueueClaim object, whose 'messages' attribute contains the
list of QueueMessage objects representing the claimed messages.
"""
return self._claim_manager.claim(ttl, grace, count=count) | def function[claim_messages, parameter[self, ttl, grace, count]]:
constant[
Claims up to `count` unclaimed messages from this queue. If count is
not specified, the default is to claim 10 messages.
The `ttl` parameter specifies how long the server should wait before
releasing the claim. The ttl value MUST be between 60 and 43200 seconds.
The `grace` parameter is the message grace period in seconds. The value
of grace MUST be between 60 and 43200 seconds. The server extends the
lifetime of claimed messages to be at least as long as the lifetime of
the claim itself, plus a specified grace period to deal with crashed
workers (up to 1209600 or 14 days including claim lifetime). If a
claimed message would normally live longer than the grace period, its
expiration will not be adjusted.
Returns a QueueClaim object, whose 'messages' attribute contains the
list of QueueMessage objects representing the claimed messages.
]
return[call[name[self]._claim_manager.claim, parameter[name[ttl], name[grace]]]] | keyword[def] identifier[claim_messages] ( identifier[self] , identifier[ttl] , identifier[grace] , identifier[count] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[_claim_manager] . identifier[claim] ( identifier[ttl] , identifier[grace] , identifier[count] = identifier[count] ) | def claim_messages(self, ttl, grace, count=None):
"""
Claims up to `count` unclaimed messages from this queue. If count is
not specified, the default is to claim 10 messages.
The `ttl` parameter specifies how long the server should wait before
releasing the claim. The ttl value MUST be between 60 and 43200 seconds.
The `grace` parameter is the message grace period in seconds. The value
of grace MUST be between 60 and 43200 seconds. The server extends the
lifetime of claimed messages to be at least as long as the lifetime of
the claim itself, plus a specified grace period to deal with crashed
workers (up to 1209600 or 14 days including claim lifetime). If a
claimed message would normally live longer than the grace period, its
expiration will not be adjusted.
Returns a QueueClaim object, whose 'messages' attribute contains the
list of QueueMessage objects representing the claimed messages.
"""
return self._claim_manager.claim(ttl, grace, count=count) |
def head_object_async(self, path, **kwds):
"""HEAD an object.
Depending on request headers, HEAD returns various object properties,
e.g. Content-Length, Last-Modified, and ETag.
Note: No payload argument is supported.
"""
return self.do_request_async(self.api_url + path, 'HEAD', **kwds) | def function[head_object_async, parameter[self, path]]:
constant[HEAD an object.
Depending on request headers, HEAD returns various object properties,
e.g. Content-Length, Last-Modified, and ETag.
Note: No payload argument is supported.
]
return[call[name[self].do_request_async, parameter[binary_operation[name[self].api_url + name[path]], constant[HEAD]]]] | keyword[def] identifier[head_object_async] ( identifier[self] , identifier[path] ,** identifier[kwds] ):
literal[string]
keyword[return] identifier[self] . identifier[do_request_async] ( identifier[self] . identifier[api_url] + identifier[path] , literal[string] ,** identifier[kwds] ) | def head_object_async(self, path, **kwds):
"""HEAD an object.
Depending on request headers, HEAD returns various object properties,
e.g. Content-Length, Last-Modified, and ETag.
Note: No payload argument is supported.
"""
return self.do_request_async(self.api_url + path, 'HEAD', **kwds) |
def np2model_tensor(a):
"Tranform numpy array `a` to a tensor of the same type."
dtype = model_type(a.dtype)
res = as_tensor(a)
if not dtype: return res
return res.type(dtype) | def function[np2model_tensor, parameter[a]]:
constant[Tranform numpy array `a` to a tensor of the same type.]
variable[dtype] assign[=] call[name[model_type], parameter[name[a].dtype]]
variable[res] assign[=] call[name[as_tensor], parameter[name[a]]]
if <ast.UnaryOp object at 0x7da1b1e9a050> begin[:]
return[name[res]]
return[call[name[res].type, parameter[name[dtype]]]] | keyword[def] identifier[np2model_tensor] ( identifier[a] ):
literal[string]
identifier[dtype] = identifier[model_type] ( identifier[a] . identifier[dtype] )
identifier[res] = identifier[as_tensor] ( identifier[a] )
keyword[if] keyword[not] identifier[dtype] : keyword[return] identifier[res]
keyword[return] identifier[res] . identifier[type] ( identifier[dtype] ) | def np2model_tensor(a):
"""Tranform numpy array `a` to a tensor of the same type."""
dtype = model_type(a.dtype)
res = as_tensor(a)
if not dtype:
return res # depends on [control=['if'], data=[]]
return res.type(dtype) |
def resource_url(self):
"""str: Root URL for IBM Streams REST API"""
if self._iam:
self._resource_url = self._resource_url or _get_iam_rest_api_url_from_creds(self.rest_client, self.credentials)
else:
self._resource_url = self._resource_url or _get_rest_api_url_from_creds(self.session, self.credentials)
return self._resource_url | def function[resource_url, parameter[self]]:
constant[str: Root URL for IBM Streams REST API]
if name[self]._iam begin[:]
name[self]._resource_url assign[=] <ast.BoolOp object at 0x7da18f00e890>
return[name[self]._resource_url] | keyword[def] identifier[resource_url] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_iam] :
identifier[self] . identifier[_resource_url] = identifier[self] . identifier[_resource_url] keyword[or] identifier[_get_iam_rest_api_url_from_creds] ( identifier[self] . identifier[rest_client] , identifier[self] . identifier[credentials] )
keyword[else] :
identifier[self] . identifier[_resource_url] = identifier[self] . identifier[_resource_url] keyword[or] identifier[_get_rest_api_url_from_creds] ( identifier[self] . identifier[session] , identifier[self] . identifier[credentials] )
keyword[return] identifier[self] . identifier[_resource_url] | def resource_url(self):
"""str: Root URL for IBM Streams REST API"""
if self._iam:
self._resource_url = self._resource_url or _get_iam_rest_api_url_from_creds(self.rest_client, self.credentials) # depends on [control=['if'], data=[]]
else:
self._resource_url = self._resource_url or _get_rest_api_url_from_creds(self.session, self.credentials)
return self._resource_url |
def calculate(self):
"""
calculates the estimated happiness of a person
living in a world
self._update_pref(self.person.prefs['tax_min'], self.person.prefs['tax_max'], self.world.tax_rate)
self._update_pref(self.person.prefs['tradition'], self.person.prefs['tradition'], self.world.tradition)
self._update_pref(self.person.prefs['equity'], self.person.prefs['equity'], self.world.equity)
"""
self.rating = 0
for f in self.factors:
self._update_pref(f.min, f.max, self.world.tax_rate) | def function[calculate, parameter[self]]:
constant[
calculates the estimated happiness of a person
living in a world
self._update_pref(self.person.prefs['tax_min'], self.person.prefs['tax_max'], self.world.tax_rate)
self._update_pref(self.person.prefs['tradition'], self.person.prefs['tradition'], self.world.tradition)
self._update_pref(self.person.prefs['equity'], self.person.prefs['equity'], self.world.equity)
]
name[self].rating assign[=] constant[0]
for taget[name[f]] in starred[name[self].factors] begin[:]
call[name[self]._update_pref, parameter[name[f].min, name[f].max, name[self].world.tax_rate]] | keyword[def] identifier[calculate] ( identifier[self] ):
literal[string]
identifier[self] . identifier[rating] = literal[int]
keyword[for] identifier[f] keyword[in] identifier[self] . identifier[factors] :
identifier[self] . identifier[_update_pref] ( identifier[f] . identifier[min] , identifier[f] . identifier[max] , identifier[self] . identifier[world] . identifier[tax_rate] ) | def calculate(self):
"""
calculates the estimated happiness of a person
living in a world
self._update_pref(self.person.prefs['tax_min'], self.person.prefs['tax_max'], self.world.tax_rate)
self._update_pref(self.person.prefs['tradition'], self.person.prefs['tradition'], self.world.tradition)
self._update_pref(self.person.prefs['equity'], self.person.prefs['equity'], self.world.equity)
"""
self.rating = 0
for f in self.factors:
self._update_pref(f.min, f.max, self.world.tax_rate) # depends on [control=['for'], data=['f']] |
def EQ106(T, Tc, A, B, C=0, D=0, E=0):
r'''DIPPR Equation #106. Often used in calculating liquid surface tension,
and heat of vaporization.
Only parameters A and B parameters are required; many fits include no
further parameters. Critical temperature is also required.
.. math::
Y = A(1-T_r)^{B + C T_r + D T_r^2 + E T_r^3}
Tr = \frac{T}{Tc}
Parameters
----------
T : float
Temperature, [K]
Tc : float
Critical temperature, [K]
A-D : float
Parameter for the equation; chemical and property specific [-]
Returns
-------
Y : float
Property [constant-specific]
Notes
-----
The integral could not be found, but the integral over T actually could,
again in terms of hypergeometric functions.
Examples
--------
Water surface tension; DIPPR coefficients normally in Pa*s.
>>> EQ106(300, 647.096, 0.17766, 2.567, -3.3377, 1.9699)
0.07231499373541
References
----------
.. [1] Design Institute for Physical Properties, 1996. DIPPR Project 801
DIPPR/AIChE
'''
Tr = T/Tc
return A*(1. - Tr)**(B + Tr*(C + Tr*(D + E*Tr))) | def function[EQ106, parameter[T, Tc, A, B, C, D, E]]:
constant[DIPPR Equation #106. Often used in calculating liquid surface tension,
and heat of vaporization.
Only parameters A and B parameters are required; many fits include no
further parameters. Critical temperature is also required.
.. math::
Y = A(1-T_r)^{B + C T_r + D T_r^2 + E T_r^3}
Tr = \frac{T}{Tc}
Parameters
----------
T : float
Temperature, [K]
Tc : float
Critical temperature, [K]
A-D : float
Parameter for the equation; chemical and property specific [-]
Returns
-------
Y : float
Property [constant-specific]
Notes
-----
The integral could not be found, but the integral over T actually could,
again in terms of hypergeometric functions.
Examples
--------
Water surface tension; DIPPR coefficients normally in Pa*s.
>>> EQ106(300, 647.096, 0.17766, 2.567, -3.3377, 1.9699)
0.07231499373541
References
----------
.. [1] Design Institute for Physical Properties, 1996. DIPPR Project 801
DIPPR/AIChE
]
variable[Tr] assign[=] binary_operation[name[T] / name[Tc]]
return[binary_operation[name[A] * binary_operation[binary_operation[constant[1.0] - name[Tr]] ** binary_operation[name[B] + binary_operation[name[Tr] * binary_operation[name[C] + binary_operation[name[Tr] * binary_operation[name[D] + binary_operation[name[E] * name[Tr]]]]]]]]]] | keyword[def] identifier[EQ106] ( identifier[T] , identifier[Tc] , identifier[A] , identifier[B] , identifier[C] = literal[int] , identifier[D] = literal[int] , identifier[E] = literal[int] ):
literal[string]
identifier[Tr] = identifier[T] / identifier[Tc]
keyword[return] identifier[A] *( literal[int] - identifier[Tr] )**( identifier[B] + identifier[Tr] *( identifier[C] + identifier[Tr] *( identifier[D] + identifier[E] * identifier[Tr] ))) | def EQ106(T, Tc, A, B, C=0, D=0, E=0):
"""DIPPR Equation #106. Often used in calculating liquid surface tension,
and heat of vaporization.
Only parameters A and B parameters are required; many fits include no
further parameters. Critical temperature is also required.
.. math::
Y = A(1-T_r)^{B + C T_r + D T_r^2 + E T_r^3}
Tr = \\frac{T}{Tc}
Parameters
----------
T : float
Temperature, [K]
Tc : float
Critical temperature, [K]
A-D : float
Parameter for the equation; chemical and property specific [-]
Returns
-------
Y : float
Property [constant-specific]
Notes
-----
The integral could not be found, but the integral over T actually could,
again in terms of hypergeometric functions.
Examples
--------
Water surface tension; DIPPR coefficients normally in Pa*s.
>>> EQ106(300, 647.096, 0.17766, 2.567, -3.3377, 1.9699)
0.07231499373541
References
----------
.. [1] Design Institute for Physical Properties, 1996. DIPPR Project 801
DIPPR/AIChE
"""
Tr = T / Tc
return A * (1.0 - Tr) ** (B + Tr * (C + Tr * (D + E * Tr))) |
def get_es_action_item(data_item, action_settings, es_type, id_field=None):
''' This method will return an item formated and ready to append
to the action list '''
action_item = dict.copy(action_settings)
if id_field is not None:
id_val = first(list(get_dict_key(data_item, id_field)))
if id_val is not None:
action_item['_id'] = id_val
elif data_item.get('id'):
if data_item['id'].startswith("%s/" % action_settings['_index']):
action_item['_id'] = "/".join(data_item['id'].split("/")[2:])
else:
action_item['_id'] = data_item['id']
if data_item.get('data'):
action_item['_source'] = data_item['data']
else:
action_item['_source'] = data_item
action_item['_type'] = es_type
return action_item | def function[get_es_action_item, parameter[data_item, action_settings, es_type, id_field]]:
constant[ This method will return an item formated and ready to append
to the action list ]
variable[action_item] assign[=] call[name[dict].copy, parameter[name[action_settings]]]
if compare[name[id_field] is_not constant[None]] begin[:]
variable[id_val] assign[=] call[name[first], parameter[call[name[list], parameter[call[name[get_dict_key], parameter[name[data_item], name[id_field]]]]]]]
if compare[name[id_val] is_not constant[None]] begin[:]
call[name[action_item]][constant[_id]] assign[=] name[id_val]
if call[name[data_item].get, parameter[constant[data]]] begin[:]
call[name[action_item]][constant[_source]] assign[=] call[name[data_item]][constant[data]]
call[name[action_item]][constant[_type]] assign[=] name[es_type]
return[name[action_item]] | keyword[def] identifier[get_es_action_item] ( identifier[data_item] , identifier[action_settings] , identifier[es_type] , identifier[id_field] = keyword[None] ):
literal[string]
identifier[action_item] = identifier[dict] . identifier[copy] ( identifier[action_settings] )
keyword[if] identifier[id_field] keyword[is] keyword[not] keyword[None] :
identifier[id_val] = identifier[first] ( identifier[list] ( identifier[get_dict_key] ( identifier[data_item] , identifier[id_field] )))
keyword[if] identifier[id_val] keyword[is] keyword[not] keyword[None] :
identifier[action_item] [ literal[string] ]= identifier[id_val]
keyword[elif] identifier[data_item] . identifier[get] ( literal[string] ):
keyword[if] identifier[data_item] [ literal[string] ]. identifier[startswith] ( literal[string] % identifier[action_settings] [ literal[string] ]):
identifier[action_item] [ literal[string] ]= literal[string] . identifier[join] ( identifier[data_item] [ literal[string] ]. identifier[split] ( literal[string] )[ literal[int] :])
keyword[else] :
identifier[action_item] [ literal[string] ]= identifier[data_item] [ literal[string] ]
keyword[if] identifier[data_item] . identifier[get] ( literal[string] ):
identifier[action_item] [ literal[string] ]= identifier[data_item] [ literal[string] ]
keyword[else] :
identifier[action_item] [ literal[string] ]= identifier[data_item]
identifier[action_item] [ literal[string] ]= identifier[es_type]
keyword[return] identifier[action_item] | def get_es_action_item(data_item, action_settings, es_type, id_field=None):
""" This method will return an item formated and ready to append
to the action list """
action_item = dict.copy(action_settings)
if id_field is not None:
id_val = first(list(get_dict_key(data_item, id_field)))
if id_val is not None:
action_item['_id'] = id_val # depends on [control=['if'], data=['id_val']] # depends on [control=['if'], data=['id_field']]
elif data_item.get('id'):
if data_item['id'].startswith('%s/' % action_settings['_index']):
action_item['_id'] = '/'.join(data_item['id'].split('/')[2:]) # depends on [control=['if'], data=[]]
else:
action_item['_id'] = data_item['id'] # depends on [control=['if'], data=[]]
if data_item.get('data'):
action_item['_source'] = data_item['data'] # depends on [control=['if'], data=[]]
else:
action_item['_source'] = data_item
action_item['_type'] = es_type
return action_item |
def _create_ca_file(anchor_list, filename):
"""
Concatenate all the certificates (PEM format for the export) in
'anchor_list' and write the result to file 'filename'. On success
'filename' is returned, None otherwise.
If you are used to OpenSSL tools, this function builds a CAfile
that can be used for certificate and CRL check.
"""
try:
with open(filename, "w") as f:
for a in anchor_list:
s = a.output(fmt="PEM")
f.write(s)
except IOError:
return None
return filename | def function[_create_ca_file, parameter[anchor_list, filename]]:
constant[
Concatenate all the certificates (PEM format for the export) in
'anchor_list' and write the result to file 'filename'. On success
'filename' is returned, None otherwise.
If you are used to OpenSSL tools, this function builds a CAfile
that can be used for certificate and CRL check.
]
<ast.Try object at 0x7da1b21e2e90>
return[name[filename]] | keyword[def] identifier[_create_ca_file] ( identifier[anchor_list] , identifier[filename] ):
literal[string]
keyword[try] :
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] :
keyword[for] identifier[a] keyword[in] identifier[anchor_list] :
identifier[s] = identifier[a] . identifier[output] ( identifier[fmt] = literal[string] )
identifier[f] . identifier[write] ( identifier[s] )
keyword[except] identifier[IOError] :
keyword[return] keyword[None]
keyword[return] identifier[filename] | def _create_ca_file(anchor_list, filename):
"""
Concatenate all the certificates (PEM format for the export) in
'anchor_list' and write the result to file 'filename'. On success
'filename' is returned, None otherwise.
If you are used to OpenSSL tools, this function builds a CAfile
that can be used for certificate and CRL check.
"""
try:
with open(filename, 'w') as f:
for a in anchor_list:
s = a.output(fmt='PEM')
f.write(s) # depends on [control=['for'], data=['a']] # depends on [control=['with'], data=['f']] # depends on [control=['try'], data=[]]
except IOError:
return None # depends on [control=['except'], data=[]]
return filename |
def find_lexer_class_for_filename(_fn, code=None):
"""Get a lexer for a filename.
If multiple lexers match the filename pattern, use ``analyse_text()`` to
figure out which one is more appropriate.
Returns None if not found.
"""
matches = []
fn = basename(_fn)
for modname, name, _, filenames, _ in itervalues(LEXERS):
for filename in filenames:
if _fn_matches(fn, filename):
if name not in _lexer_cache:
_load_lexers(modname)
matches.append((_lexer_cache[name], filename))
for cls in find_plugin_lexers():
for filename in cls.filenames:
if _fn_matches(fn, filename):
matches.append((cls, filename))
if sys.version_info > (3,) and isinstance(code, bytes):
# decode it, since all analyse_text functions expect unicode
code = guess_decode(code)
def get_rating(info):
cls, filename = info
# explicit patterns get a bonus
bonus = '*' not in filename and 0.5 or 0
# The class _always_ defines analyse_text because it's included in
# the Lexer class. The default implementation returns None which
# gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
# to find lexers which need it overridden.
if code:
return cls.analyse_text(code) + bonus, cls.__name__
return cls.priority + bonus, cls.__name__
if matches:
matches.sort(key=get_rating)
# print "Possible lexers, after sort:", matches
return matches[-1][0] | def function[find_lexer_class_for_filename, parameter[_fn, code]]:
constant[Get a lexer for a filename.
If multiple lexers match the filename pattern, use ``analyse_text()`` to
figure out which one is more appropriate.
Returns None if not found.
]
variable[matches] assign[=] list[[]]
variable[fn] assign[=] call[name[basename], parameter[name[_fn]]]
for taget[tuple[[<ast.Name object at 0x7da2047eb460>, <ast.Name object at 0x7da2047e8550>, <ast.Name object at 0x7da2047e8a60>, <ast.Name object at 0x7da2047eb0a0>, <ast.Name object at 0x7da2047ea8f0>]]] in starred[call[name[itervalues], parameter[name[LEXERS]]]] begin[:]
for taget[name[filename]] in starred[name[filenames]] begin[:]
if call[name[_fn_matches], parameter[name[fn], name[filename]]] begin[:]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[_lexer_cache]] begin[:]
call[name[_load_lexers], parameter[name[modname]]]
call[name[matches].append, parameter[tuple[[<ast.Subscript object at 0x7da2047e9f60>, <ast.Name object at 0x7da2047e8370>]]]]
for taget[name[cls]] in starred[call[name[find_plugin_lexers], parameter[]]] begin[:]
for taget[name[filename]] in starred[name[cls].filenames] begin[:]
if call[name[_fn_matches], parameter[name[fn], name[filename]]] begin[:]
call[name[matches].append, parameter[tuple[[<ast.Name object at 0x7da2047ea350>, <ast.Name object at 0x7da2047e9ea0>]]]]
if <ast.BoolOp object at 0x7da2047e8c70> begin[:]
variable[code] assign[=] call[name[guess_decode], parameter[name[code]]]
def function[get_rating, parameter[info]]:
<ast.Tuple object at 0x7da1b11f4ac0> assign[=] name[info]
variable[bonus] assign[=] <ast.BoolOp object at 0x7da1b11f6500>
if name[code] begin[:]
return[tuple[[<ast.BinOp object at 0x7da1b11f4940>, <ast.Attribute object at 0x7da1b11f4c10>]]]
return[tuple[[<ast.BinOp object at 0x7da1b11f4220>, <ast.Attribute object at 0x7da1b11f6740>]]]
if name[matches] begin[:]
call[name[matches].sort, parameter[]]
return[call[call[name[matches]][<ast.UnaryOp object at 0x7da1b11f59c0>]][constant[0]]] | keyword[def] identifier[find_lexer_class_for_filename] ( identifier[_fn] , identifier[code] = keyword[None] ):
literal[string]
identifier[matches] =[]
identifier[fn] = identifier[basename] ( identifier[_fn] )
keyword[for] identifier[modname] , identifier[name] , identifier[_] , identifier[filenames] , identifier[_] keyword[in] identifier[itervalues] ( identifier[LEXERS] ):
keyword[for] identifier[filename] keyword[in] identifier[filenames] :
keyword[if] identifier[_fn_matches] ( identifier[fn] , identifier[filename] ):
keyword[if] identifier[name] keyword[not] keyword[in] identifier[_lexer_cache] :
identifier[_load_lexers] ( identifier[modname] )
identifier[matches] . identifier[append] (( identifier[_lexer_cache] [ identifier[name] ], identifier[filename] ))
keyword[for] identifier[cls] keyword[in] identifier[find_plugin_lexers] ():
keyword[for] identifier[filename] keyword[in] identifier[cls] . identifier[filenames] :
keyword[if] identifier[_fn_matches] ( identifier[fn] , identifier[filename] ):
identifier[matches] . identifier[append] (( identifier[cls] , identifier[filename] ))
keyword[if] identifier[sys] . identifier[version_info] >( literal[int] ,) keyword[and] identifier[isinstance] ( identifier[code] , identifier[bytes] ):
identifier[code] = identifier[guess_decode] ( identifier[code] )
keyword[def] identifier[get_rating] ( identifier[info] ):
identifier[cls] , identifier[filename] = identifier[info]
identifier[bonus] = literal[string] keyword[not] keyword[in] identifier[filename] keyword[and] literal[int] keyword[or] literal[int]
keyword[if] identifier[code] :
keyword[return] identifier[cls] . identifier[analyse_text] ( identifier[code] )+ identifier[bonus] , identifier[cls] . identifier[__name__]
keyword[return] identifier[cls] . identifier[priority] + identifier[bonus] , identifier[cls] . identifier[__name__]
keyword[if] identifier[matches] :
identifier[matches] . identifier[sort] ( identifier[key] = identifier[get_rating] )
keyword[return] identifier[matches] [- literal[int] ][ literal[int] ] | def find_lexer_class_for_filename(_fn, code=None):
"""Get a lexer for a filename.
If multiple lexers match the filename pattern, use ``analyse_text()`` to
figure out which one is more appropriate.
Returns None if not found.
"""
matches = []
fn = basename(_fn)
for (modname, name, _, filenames, _) in itervalues(LEXERS):
for filename in filenames:
if _fn_matches(fn, filename):
if name not in _lexer_cache:
_load_lexers(modname) # depends on [control=['if'], data=[]]
matches.append((_lexer_cache[name], filename)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']] # depends on [control=['for'], data=[]]
for cls in find_plugin_lexers():
for filename in cls.filenames:
if _fn_matches(fn, filename):
matches.append((cls, filename)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['filename']] # depends on [control=['for'], data=['cls']]
if sys.version_info > (3,) and isinstance(code, bytes):
# decode it, since all analyse_text functions expect unicode
code = guess_decode(code) # depends on [control=['if'], data=[]]
def get_rating(info):
(cls, filename) = info
# explicit patterns get a bonus
bonus = '*' not in filename and 0.5 or 0
# The class _always_ defines analyse_text because it's included in
# the Lexer class. The default implementation returns None which
# gets turned into 0.0. Run scripts/detect_missing_analyse_text.py
# to find lexers which need it overridden.
if code:
return (cls.analyse_text(code) + bonus, cls.__name__) # depends on [control=['if'], data=[]]
return (cls.priority + bonus, cls.__name__)
if matches:
matches.sort(key=get_rating)
# print "Possible lexers, after sort:", matches
return matches[-1][0] # depends on [control=['if'], data=[]] |
def verify_signature(self, signedtext, cert_file=None, cert_type='pem', node_name=NODE_NAME, node_id=None, id_attr=''):
""" Verifies the signature of a XML document.
:param signedtext: The XML document as a string
:param cert_file: The public key that was used to sign the document
:param cert_type: The file type of the certificate
:param node_name: The name of the class that is signed
:param node_id: The identifier of the node
:param id_attr: The attribute name for the identifier, normally one of
'id','Id' or 'ID'
:return: Boolean True if the signature was correct otherwise False.
"""
# This is only for testing purposes, otherwise when would you receive
# stuff that is signed with your key !?
if not cert_file:
cert_file = self.cert_file
cert_type = self.cert_type
if not id_attr:
id_attr = self.id_attr
return self.crypto.validate_signature(
signedtext,
cert_file=cert_file,
cert_type=cert_type,
node_name=node_name,
node_id=node_id,
id_attr=id_attr) | def function[verify_signature, parameter[self, signedtext, cert_file, cert_type, node_name, node_id, id_attr]]:
constant[ Verifies the signature of a XML document.
:param signedtext: The XML document as a string
:param cert_file: The public key that was used to sign the document
:param cert_type: The file type of the certificate
:param node_name: The name of the class that is signed
:param node_id: The identifier of the node
:param id_attr: The attribute name for the identifier, normally one of
'id','Id' or 'ID'
:return: Boolean True if the signature was correct otherwise False.
]
if <ast.UnaryOp object at 0x7da1b20b6b30> begin[:]
variable[cert_file] assign[=] name[self].cert_file
variable[cert_type] assign[=] name[self].cert_type
if <ast.UnaryOp object at 0x7da1b20b46d0> begin[:]
variable[id_attr] assign[=] name[self].id_attr
return[call[name[self].crypto.validate_signature, parameter[name[signedtext]]]] | keyword[def] identifier[verify_signature] ( identifier[self] , identifier[signedtext] , identifier[cert_file] = keyword[None] , identifier[cert_type] = literal[string] , identifier[node_name] = identifier[NODE_NAME] , identifier[node_id] = keyword[None] , identifier[id_attr] = literal[string] ):
literal[string]
keyword[if] keyword[not] identifier[cert_file] :
identifier[cert_file] = identifier[self] . identifier[cert_file]
identifier[cert_type] = identifier[self] . identifier[cert_type]
keyword[if] keyword[not] identifier[id_attr] :
identifier[id_attr] = identifier[self] . identifier[id_attr]
keyword[return] identifier[self] . identifier[crypto] . identifier[validate_signature] (
identifier[signedtext] ,
identifier[cert_file] = identifier[cert_file] ,
identifier[cert_type] = identifier[cert_type] ,
identifier[node_name] = identifier[node_name] ,
identifier[node_id] = identifier[node_id] ,
identifier[id_attr] = identifier[id_attr] ) | def verify_signature(self, signedtext, cert_file=None, cert_type='pem', node_name=NODE_NAME, node_id=None, id_attr=''):
""" Verifies the signature of a XML document.
:param signedtext: The XML document as a string
:param cert_file: The public key that was used to sign the document
:param cert_type: The file type of the certificate
:param node_name: The name of the class that is signed
:param node_id: The identifier of the node
:param id_attr: The attribute name for the identifier, normally one of
'id','Id' or 'ID'
:return: Boolean True if the signature was correct otherwise False.
"""
# This is only for testing purposes, otherwise when would you receive
# stuff that is signed with your key !?
if not cert_file:
cert_file = self.cert_file
cert_type = self.cert_type # depends on [control=['if'], data=[]]
if not id_attr:
id_attr = self.id_attr # depends on [control=['if'], data=[]]
return self.crypto.validate_signature(signedtext, cert_file=cert_file, cert_type=cert_type, node_name=node_name, node_id=node_id, id_attr=id_attr) |
def flash_block(self, addr, data, smart_flash=True, chip_erase=None, progress_cb=None, fast_verify=False):
"""!
@brief Flash a block of data.
"""
assert self.region is not None
assert self.region.contains_range(start=addr, length=len(data))
fb = FlashBuilder(self)
fb.add_data(addr, data)
info = fb.program(chip_erase, progress_cb, smart_flash, fast_verify)
return info | def function[flash_block, parameter[self, addr, data, smart_flash, chip_erase, progress_cb, fast_verify]]:
constant[!
@brief Flash a block of data.
]
assert[compare[name[self].region is_not constant[None]]]
assert[call[name[self].region.contains_range, parameter[]]]
variable[fb] assign[=] call[name[FlashBuilder], parameter[name[self]]]
call[name[fb].add_data, parameter[name[addr], name[data]]]
variable[info] assign[=] call[name[fb].program, parameter[name[chip_erase], name[progress_cb], name[smart_flash], name[fast_verify]]]
return[name[info]] | keyword[def] identifier[flash_block] ( identifier[self] , identifier[addr] , identifier[data] , identifier[smart_flash] = keyword[True] , identifier[chip_erase] = keyword[None] , identifier[progress_cb] = keyword[None] , identifier[fast_verify] = keyword[False] ):
literal[string]
keyword[assert] identifier[self] . identifier[region] keyword[is] keyword[not] keyword[None]
keyword[assert] identifier[self] . identifier[region] . identifier[contains_range] ( identifier[start] = identifier[addr] , identifier[length] = identifier[len] ( identifier[data] ))
identifier[fb] = identifier[FlashBuilder] ( identifier[self] )
identifier[fb] . identifier[add_data] ( identifier[addr] , identifier[data] )
identifier[info] = identifier[fb] . identifier[program] ( identifier[chip_erase] , identifier[progress_cb] , identifier[smart_flash] , identifier[fast_verify] )
keyword[return] identifier[info] | def flash_block(self, addr, data, smart_flash=True, chip_erase=None, progress_cb=None, fast_verify=False):
"""!
@brief Flash a block of data.
"""
assert self.region is not None
assert self.region.contains_range(start=addr, length=len(data))
fb = FlashBuilder(self)
fb.add_data(addr, data)
info = fb.program(chip_erase, progress_cb, smart_flash, fast_verify)
return info |
def gaussian_polygons(points, n=10):
"""
Returns an array of approximately `n` `shapely.geometry.Polygon` objects for an array of `shapely.geometry.Point`
objects.
"""
gdf = gpd.GeoDataFrame(data={'cluster_number': classify_clusters(points, n=n)}, geometry=points)
polygons = []
for i in range(n):
sel_points = gdf[gdf['cluster_number'] == i].geometry
polygons.append(shapely.geometry.MultiPoint([(p.x, p.y) for p in sel_points]).convex_hull)
polygons = [p for p in polygons if
(not isinstance(p, shapely.geometry.Point)) and (not isinstance(p, shapely.geometry.LineString))]
return gpd.GeoSeries(polygons) | def function[gaussian_polygons, parameter[points, n]]:
constant[
Returns an array of approximately `n` `shapely.geometry.Polygon` objects for an array of `shapely.geometry.Point`
objects.
]
variable[gdf] assign[=] call[name[gpd].GeoDataFrame, parameter[]]
variable[polygons] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:]
variable[sel_points] assign[=] call[name[gdf]][compare[call[name[gdf]][constant[cluster_number]] equal[==] name[i]]].geometry
call[name[polygons].append, parameter[call[name[shapely].geometry.MultiPoint, parameter[<ast.ListComp object at 0x7da20c794c10>]].convex_hull]]
variable[polygons] assign[=] <ast.ListComp object at 0x7da20c7954e0>
return[call[name[gpd].GeoSeries, parameter[name[polygons]]]] | keyword[def] identifier[gaussian_polygons] ( identifier[points] , identifier[n] = literal[int] ):
literal[string]
identifier[gdf] = identifier[gpd] . identifier[GeoDataFrame] ( identifier[data] ={ literal[string] : identifier[classify_clusters] ( identifier[points] , identifier[n] = identifier[n] )}, identifier[geometry] = identifier[points] )
identifier[polygons] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ):
identifier[sel_points] = identifier[gdf] [ identifier[gdf] [ literal[string] ]== identifier[i] ]. identifier[geometry]
identifier[polygons] . identifier[append] ( identifier[shapely] . identifier[geometry] . identifier[MultiPoint] ([( identifier[p] . identifier[x] , identifier[p] . identifier[y] ) keyword[for] identifier[p] keyword[in] identifier[sel_points] ]). identifier[convex_hull] )
identifier[polygons] =[ identifier[p] keyword[for] identifier[p] keyword[in] identifier[polygons] keyword[if]
( keyword[not] identifier[isinstance] ( identifier[p] , identifier[shapely] . identifier[geometry] . identifier[Point] )) keyword[and] ( keyword[not] identifier[isinstance] ( identifier[p] , identifier[shapely] . identifier[geometry] . identifier[LineString] ))]
keyword[return] identifier[gpd] . identifier[GeoSeries] ( identifier[polygons] ) | def gaussian_polygons(points, n=10):
"""
Returns an array of approximately `n` `shapely.geometry.Polygon` objects for an array of `shapely.geometry.Point`
objects.
"""
gdf = gpd.GeoDataFrame(data={'cluster_number': classify_clusters(points, n=n)}, geometry=points)
polygons = []
for i in range(n):
sel_points = gdf[gdf['cluster_number'] == i].geometry
polygons.append(shapely.geometry.MultiPoint([(p.x, p.y) for p in sel_points]).convex_hull) # depends on [control=['for'], data=['i']]
polygons = [p for p in polygons if not isinstance(p, shapely.geometry.Point) and (not isinstance(p, shapely.geometry.LineString))]
return gpd.GeoSeries(polygons) |
def add_blackbox_or_builtin_call(self, node, blackbox): # noqa: C901
"""Processes a blackbox or builtin function when it is called.
Nothing gets assigned to ret_func_foo in the builtin/blackbox case.
Increments self.function_call_index each time it is called, we can refer to it as N in the comments.
Create e.g. ~call_1 = ret_func_foo RestoreNode.
Create e.g. temp_N_def_arg1 = call_arg1_label_visitor.result for each argument.
Visit the arguments if they're calls. (save_def_args_in_temp)
I do not think I care about this one actually -- Create e.g. def_arg1 = temp_N_def_arg1 for each argument.
(create_local_scope_from_def_args)
Add RestoreNode to the end of the Nodes.
Args:
node(ast.Call) : The node that calls the definition.
blackbox(bool): Whether or not it is a builtin or blackbox call.
Returns:
call_node(BBorBInode): The call node.
"""
self.function_call_index += 1
saved_function_call_index = self.function_call_index
self.undecided = False
call_label_visitor = LabelVisitor()
call_label_visitor.visit(node)
call_function_label = call_label_visitor.result[:call_label_visitor.result.find('(')]
# Check if function call matches a blackbox/built-in alias and if so, resolve it
# This resolves aliases like "from os import system as mysys" as: mysys -> os.system
local_definitions = self.module_definitions_stack[-1]
call_function_label = fully_qualify_alias_labels(call_function_label, local_definitions.import_alias_mapping)
# Create e.g. ~call_1 = ret_func_foo
LHS = CALL_IDENTIFIER + 'call_' + str(saved_function_call_index)
RHS = 'ret_' + call_function_label + '('
call_node = BBorBInode(
label='',
left_hand_side=LHS,
ast_node=node,
right_hand_side_variables=[],
line_number=node.lineno,
path=self.filenames[-1],
func_name=call_function_label
)
visual_args = list()
rhs_vars = list()
last_return_value_of_nested_call = None
for arg_node in itertools.chain(node.args, node.keywords):
arg = arg_node.value if isinstance(arg_node, ast.keyword) else arg_node
if isinstance(arg, ast.Call):
return_value_of_nested_call = self.visit(arg)
if last_return_value_of_nested_call:
# connect inner to other_inner in e.g.
# `scrypt.outer(scrypt.inner(image_name), scrypt.other_inner(image_name))`
# I should probably loop to the inner most call of other_inner here.
try:
last_return_value_of_nested_call.connect(return_value_of_nested_call.first_node)
except AttributeError:
last_return_value_of_nested_call.connect(return_value_of_nested_call)
else:
# I should only set this once per loop, inner in e.g.
# `scrypt.outer(scrypt.inner(image_name), scrypt.other_inner(image_name))`
# (inner_most_call is used when predecessor is a ControlFlowNode in connect_control_flow_node)
call_node.inner_most_call = return_value_of_nested_call
last_return_value_of_nested_call = return_value_of_nested_call
if isinstance(arg_node, ast.keyword) and arg_node.arg is not None:
visual_args.append(arg_node.arg + '=' + return_value_of_nested_call.left_hand_side)
else:
visual_args.append(return_value_of_nested_call.left_hand_side)
rhs_vars.append(return_value_of_nested_call.left_hand_side)
else:
label = LabelVisitor()
label.visit(arg_node)
visual_args.append(label.result)
vv = VarsVisitor()
vv.visit(arg_node)
rhs_vars.extend(vv.result)
if last_return_value_of_nested_call:
# connect other_inner to outer in e.g.
# `scrypt.outer(scrypt.inner(image_name), scrypt.other_inner(image_name))`
last_return_value_of_nested_call.connect(call_node)
call_names = list(get_call_names(node.func))
if len(call_names) > 1:
# taint is a RHS variable (self) of taint.lower()
rhs_vars.append(call_names[0])
if len(visual_args) > 0:
for arg in visual_args:
RHS = RHS + arg + ", "
# Replace the last ", " with a )
RHS = RHS[:len(RHS) - 2] + ')'
else:
RHS = RHS + ')'
call_node.label = LHS + " = " + RHS
call_node.right_hand_side_variables = rhs_vars
# Used in get_sink_args
rhs_visitor = RHSVisitor()
rhs_visitor.visit(node)
call_node.args = rhs_visitor.result
if blackbox:
self.blackbox_assignments.add(call_node)
self.connect_if_allowed(self.nodes[-1], call_node)
self.nodes.append(call_node)
return call_node | def function[add_blackbox_or_builtin_call, parameter[self, node, blackbox]]:
constant[Processes a blackbox or builtin function when it is called.
Nothing gets assigned to ret_func_foo in the builtin/blackbox case.
Increments self.function_call_index each time it is called, we can refer to it as N in the comments.
Create e.g. ~call_1 = ret_func_foo RestoreNode.
Create e.g. temp_N_def_arg1 = call_arg1_label_visitor.result for each argument.
Visit the arguments if they're calls. (save_def_args_in_temp)
I do not think I care about this one actually -- Create e.g. def_arg1 = temp_N_def_arg1 for each argument.
(create_local_scope_from_def_args)
Add RestoreNode to the end of the Nodes.
Args:
node(ast.Call) : The node that calls the definition.
blackbox(bool): Whether or not it is a builtin or blackbox call.
Returns:
call_node(BBorBInode): The call node.
]
<ast.AugAssign object at 0x7da1b1e2be20>
variable[saved_function_call_index] assign[=] name[self].function_call_index
name[self].undecided assign[=] constant[False]
variable[call_label_visitor] assign[=] call[name[LabelVisitor], parameter[]]
call[name[call_label_visitor].visit, parameter[name[node]]]
variable[call_function_label] assign[=] call[name[call_label_visitor].result][<ast.Slice object at 0x7da1b1e2b910>]
variable[local_definitions] assign[=] call[name[self].module_definitions_stack][<ast.UnaryOp object at 0x7da1b1e2b700>]
variable[call_function_label] assign[=] call[name[fully_qualify_alias_labels], parameter[name[call_function_label], name[local_definitions].import_alias_mapping]]
variable[LHS] assign[=] binary_operation[binary_operation[name[CALL_IDENTIFIER] + constant[call_]] + call[name[str], parameter[name[saved_function_call_index]]]]
variable[RHS] assign[=] binary_operation[binary_operation[constant[ret_] + name[call_function_label]] + constant[(]]
variable[call_node] assign[=] call[name[BBorBInode], parameter[]]
variable[visual_args] assign[=] call[name[list], parameter[]]
variable[rhs_vars] assign[=] call[name[list], parameter[]]
variable[last_return_value_of_nested_call] assign[=] constant[None]
for taget[name[arg_node]] in starred[call[name[itertools].chain, parameter[name[node].args, name[node].keywords]]] begin[:]
variable[arg] assign[=] <ast.IfExp object at 0x7da1b1e2a9e0>
if call[name[isinstance], parameter[name[arg], name[ast].Call]] begin[:]
variable[return_value_of_nested_call] assign[=] call[name[self].visit, parameter[name[arg]]]
if name[last_return_value_of_nested_call] begin[:]
<ast.Try object at 0x7da1b1e2a560>
variable[last_return_value_of_nested_call] assign[=] name[return_value_of_nested_call]
if <ast.BoolOp object at 0x7da1b1e2a0e0> begin[:]
call[name[visual_args].append, parameter[binary_operation[binary_operation[name[arg_node].arg + constant[=]] + name[return_value_of_nested_call].left_hand_side]]]
call[name[rhs_vars].append, parameter[name[return_value_of_nested_call].left_hand_side]]
if name[last_return_value_of_nested_call] begin[:]
call[name[last_return_value_of_nested_call].connect, parameter[name[call_node]]]
variable[call_names] assign[=] call[name[list], parameter[call[name[get_call_names], parameter[name[node].func]]]]
if compare[call[name[len], parameter[name[call_names]]] greater[>] constant[1]] begin[:]
call[name[rhs_vars].append, parameter[call[name[call_names]][constant[0]]]]
if compare[call[name[len], parameter[name[visual_args]]] greater[>] constant[0]] begin[:]
for taget[name[arg]] in starred[name[visual_args]] begin[:]
variable[RHS] assign[=] binary_operation[binary_operation[name[RHS] + name[arg]] + constant[, ]]
variable[RHS] assign[=] binary_operation[call[name[RHS]][<ast.Slice object at 0x7da1b1e28be0>] + constant[)]]
name[call_node].label assign[=] binary_operation[binary_operation[name[LHS] + constant[ = ]] + name[RHS]]
name[call_node].right_hand_side_variables assign[=] name[rhs_vars]
variable[rhs_visitor] assign[=] call[name[RHSVisitor], parameter[]]
call[name[rhs_visitor].visit, parameter[name[node]]]
name[call_node].args assign[=] name[rhs_visitor].result
if name[blackbox] begin[:]
call[name[self].blackbox_assignments.add, parameter[name[call_node]]]
call[name[self].connect_if_allowed, parameter[call[name[self].nodes][<ast.UnaryOp object at 0x7da1b1e22f80>], name[call_node]]]
call[name[self].nodes.append, parameter[name[call_node]]]
return[name[call_node]] | keyword[def] identifier[add_blackbox_or_builtin_call] ( identifier[self] , identifier[node] , identifier[blackbox] ):
literal[string]
identifier[self] . identifier[function_call_index] += literal[int]
identifier[saved_function_call_index] = identifier[self] . identifier[function_call_index]
identifier[self] . identifier[undecided] = keyword[False]
identifier[call_label_visitor] = identifier[LabelVisitor] ()
identifier[call_label_visitor] . identifier[visit] ( identifier[node] )
identifier[call_function_label] = identifier[call_label_visitor] . identifier[result] [: identifier[call_label_visitor] . identifier[result] . identifier[find] ( literal[string] )]
identifier[local_definitions] = identifier[self] . identifier[module_definitions_stack] [- literal[int] ]
identifier[call_function_label] = identifier[fully_qualify_alias_labels] ( identifier[call_function_label] , identifier[local_definitions] . identifier[import_alias_mapping] )
identifier[LHS] = identifier[CALL_IDENTIFIER] + literal[string] + identifier[str] ( identifier[saved_function_call_index] )
identifier[RHS] = literal[string] + identifier[call_function_label] + literal[string]
identifier[call_node] = identifier[BBorBInode] (
identifier[label] = literal[string] ,
identifier[left_hand_side] = identifier[LHS] ,
identifier[ast_node] = identifier[node] ,
identifier[right_hand_side_variables] =[],
identifier[line_number] = identifier[node] . identifier[lineno] ,
identifier[path] = identifier[self] . identifier[filenames] [- literal[int] ],
identifier[func_name] = identifier[call_function_label]
)
identifier[visual_args] = identifier[list] ()
identifier[rhs_vars] = identifier[list] ()
identifier[last_return_value_of_nested_call] = keyword[None]
keyword[for] identifier[arg_node] keyword[in] identifier[itertools] . identifier[chain] ( identifier[node] . identifier[args] , identifier[node] . identifier[keywords] ):
identifier[arg] = identifier[arg_node] . identifier[value] keyword[if] identifier[isinstance] ( identifier[arg_node] , identifier[ast] . identifier[keyword] ) keyword[else] identifier[arg_node]
keyword[if] identifier[isinstance] ( identifier[arg] , identifier[ast] . identifier[Call] ):
identifier[return_value_of_nested_call] = identifier[self] . identifier[visit] ( identifier[arg] )
keyword[if] identifier[last_return_value_of_nested_call] :
keyword[try] :
identifier[last_return_value_of_nested_call] . identifier[connect] ( identifier[return_value_of_nested_call] . identifier[first_node] )
keyword[except] identifier[AttributeError] :
identifier[last_return_value_of_nested_call] . identifier[connect] ( identifier[return_value_of_nested_call] )
keyword[else] :
identifier[call_node] . identifier[inner_most_call] = identifier[return_value_of_nested_call]
identifier[last_return_value_of_nested_call] = identifier[return_value_of_nested_call]
keyword[if] identifier[isinstance] ( identifier[arg_node] , identifier[ast] . identifier[keyword] ) keyword[and] identifier[arg_node] . identifier[arg] keyword[is] keyword[not] keyword[None] :
identifier[visual_args] . identifier[append] ( identifier[arg_node] . identifier[arg] + literal[string] + identifier[return_value_of_nested_call] . identifier[left_hand_side] )
keyword[else] :
identifier[visual_args] . identifier[append] ( identifier[return_value_of_nested_call] . identifier[left_hand_side] )
identifier[rhs_vars] . identifier[append] ( identifier[return_value_of_nested_call] . identifier[left_hand_side] )
keyword[else] :
identifier[label] = identifier[LabelVisitor] ()
identifier[label] . identifier[visit] ( identifier[arg_node] )
identifier[visual_args] . identifier[append] ( identifier[label] . identifier[result] )
identifier[vv] = identifier[VarsVisitor] ()
identifier[vv] . identifier[visit] ( identifier[arg_node] )
identifier[rhs_vars] . identifier[extend] ( identifier[vv] . identifier[result] )
keyword[if] identifier[last_return_value_of_nested_call] :
identifier[last_return_value_of_nested_call] . identifier[connect] ( identifier[call_node] )
identifier[call_names] = identifier[list] ( identifier[get_call_names] ( identifier[node] . identifier[func] ))
keyword[if] identifier[len] ( identifier[call_names] )> literal[int] :
identifier[rhs_vars] . identifier[append] ( identifier[call_names] [ literal[int] ])
keyword[if] identifier[len] ( identifier[visual_args] )> literal[int] :
keyword[for] identifier[arg] keyword[in] identifier[visual_args] :
identifier[RHS] = identifier[RHS] + identifier[arg] + literal[string]
identifier[RHS] = identifier[RHS] [: identifier[len] ( identifier[RHS] )- literal[int] ]+ literal[string]
keyword[else] :
identifier[RHS] = identifier[RHS] + literal[string]
identifier[call_node] . identifier[label] = identifier[LHS] + literal[string] + identifier[RHS]
identifier[call_node] . identifier[right_hand_side_variables] = identifier[rhs_vars]
identifier[rhs_visitor] = identifier[RHSVisitor] ()
identifier[rhs_visitor] . identifier[visit] ( identifier[node] )
identifier[call_node] . identifier[args] = identifier[rhs_visitor] . identifier[result]
keyword[if] identifier[blackbox] :
identifier[self] . identifier[blackbox_assignments] . identifier[add] ( identifier[call_node] )
identifier[self] . identifier[connect_if_allowed] ( identifier[self] . identifier[nodes] [- literal[int] ], identifier[call_node] )
identifier[self] . identifier[nodes] . identifier[append] ( identifier[call_node] )
keyword[return] identifier[call_node] | def add_blackbox_or_builtin_call(self, node, blackbox): # noqa: C901
"Processes a blackbox or builtin function when it is called.\n Nothing gets assigned to ret_func_foo in the builtin/blackbox case.\n\n Increments self.function_call_index each time it is called, we can refer to it as N in the comments.\n Create e.g. ~call_1 = ret_func_foo RestoreNode.\n\n Create e.g. temp_N_def_arg1 = call_arg1_label_visitor.result for each argument.\n Visit the arguments if they're calls. (save_def_args_in_temp)\n\n I do not think I care about this one actually -- Create e.g. def_arg1 = temp_N_def_arg1 for each argument.\n (create_local_scope_from_def_args)\n\n Add RestoreNode to the end of the Nodes.\n\n Args:\n node(ast.Call) : The node that calls the definition.\n blackbox(bool): Whether or not it is a builtin or blackbox call.\n Returns:\n call_node(BBorBInode): The call node.\n "
self.function_call_index += 1
saved_function_call_index = self.function_call_index
self.undecided = False
call_label_visitor = LabelVisitor()
call_label_visitor.visit(node)
call_function_label = call_label_visitor.result[:call_label_visitor.result.find('(')]
# Check if function call matches a blackbox/built-in alias and if so, resolve it
# This resolves aliases like "from os import system as mysys" as: mysys -> os.system
local_definitions = self.module_definitions_stack[-1]
call_function_label = fully_qualify_alias_labels(call_function_label, local_definitions.import_alias_mapping)
# Create e.g. ~call_1 = ret_func_foo
LHS = CALL_IDENTIFIER + 'call_' + str(saved_function_call_index)
RHS = 'ret_' + call_function_label + '('
call_node = BBorBInode(label='', left_hand_side=LHS, ast_node=node, right_hand_side_variables=[], line_number=node.lineno, path=self.filenames[-1], func_name=call_function_label)
visual_args = list()
rhs_vars = list()
last_return_value_of_nested_call = None
for arg_node in itertools.chain(node.args, node.keywords):
arg = arg_node.value if isinstance(arg_node, ast.keyword) else arg_node
if isinstance(arg, ast.Call):
return_value_of_nested_call = self.visit(arg)
if last_return_value_of_nested_call:
# connect inner to other_inner in e.g.
# `scrypt.outer(scrypt.inner(image_name), scrypt.other_inner(image_name))`
# I should probably loop to the inner most call of other_inner here.
try:
last_return_value_of_nested_call.connect(return_value_of_nested_call.first_node) # depends on [control=['try'], data=[]]
except AttributeError:
last_return_value_of_nested_call.connect(return_value_of_nested_call) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
# I should only set this once per loop, inner in e.g.
# `scrypt.outer(scrypt.inner(image_name), scrypt.other_inner(image_name))`
# (inner_most_call is used when predecessor is a ControlFlowNode in connect_control_flow_node)
call_node.inner_most_call = return_value_of_nested_call
last_return_value_of_nested_call = return_value_of_nested_call
if isinstance(arg_node, ast.keyword) and arg_node.arg is not None:
visual_args.append(arg_node.arg + '=' + return_value_of_nested_call.left_hand_side) # depends on [control=['if'], data=[]]
else:
visual_args.append(return_value_of_nested_call.left_hand_side)
rhs_vars.append(return_value_of_nested_call.left_hand_side) # depends on [control=['if'], data=[]]
else:
label = LabelVisitor()
label.visit(arg_node)
visual_args.append(label.result)
vv = VarsVisitor()
vv.visit(arg_node)
rhs_vars.extend(vv.result) # depends on [control=['for'], data=['arg_node']]
if last_return_value_of_nested_call:
# connect other_inner to outer in e.g.
# `scrypt.outer(scrypt.inner(image_name), scrypt.other_inner(image_name))`
last_return_value_of_nested_call.connect(call_node) # depends on [control=['if'], data=[]]
call_names = list(get_call_names(node.func))
if len(call_names) > 1:
# taint is a RHS variable (self) of taint.lower()
rhs_vars.append(call_names[0]) # depends on [control=['if'], data=[]]
if len(visual_args) > 0:
for arg in visual_args:
RHS = RHS + arg + ', ' # depends on [control=['for'], data=['arg']]
# Replace the last ", " with a )
RHS = RHS[:len(RHS) - 2] + ')' # depends on [control=['if'], data=[]]
else:
RHS = RHS + ')'
call_node.label = LHS + ' = ' + RHS
call_node.right_hand_side_variables = rhs_vars
# Used in get_sink_args
rhs_visitor = RHSVisitor()
rhs_visitor.visit(node)
call_node.args = rhs_visitor.result
if blackbox:
self.blackbox_assignments.add(call_node) # depends on [control=['if'], data=[]]
self.connect_if_allowed(self.nodes[-1], call_node)
self.nodes.append(call_node)
return call_node |
def plan_tr(p, *args, **kwargs):
'''
plan_tr(p, ...) yields a copy of plan p in which the afferent and efferent values of its
functions have been translated. The translation is found from merging the list of 0 or more
dictionary arguments given left-to-right followed by the keyword arguments. If the plan that
is given is not a plan object explicitly, calc_tr will attempt to coerce it to one.
'''
if not is_plan(p):
p = plan(p)
return p.tr(*args, **kwargs) | def function[plan_tr, parameter[p]]:
constant[
plan_tr(p, ...) yields a copy of plan p in which the afferent and efferent values of its
functions have been translated. The translation is found from merging the list of 0 or more
dictionary arguments given left-to-right followed by the keyword arguments. If the plan that
is given is not a plan object explicitly, calc_tr will attempt to coerce it to one.
]
if <ast.UnaryOp object at 0x7da20c795600> begin[:]
variable[p] assign[=] call[name[plan], parameter[name[p]]]
return[call[name[p].tr, parameter[<ast.Starred object at 0x7da20c794220>]]] | keyword[def] identifier[plan_tr] ( identifier[p] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[is_plan] ( identifier[p] ):
identifier[p] = identifier[plan] ( identifier[p] )
keyword[return] identifier[p] . identifier[tr] (* identifier[args] ,** identifier[kwargs] ) | def plan_tr(p, *args, **kwargs):
"""
plan_tr(p, ...) yields a copy of plan p in which the afferent and efferent values of its
functions have been translated. The translation is found from merging the list of 0 or more
dictionary arguments given left-to-right followed by the keyword arguments. If the plan that
is given is not a plan object explicitly, calc_tr will attempt to coerce it to one.
"""
if not is_plan(p):
p = plan(p) # depends on [control=['if'], data=[]]
return p.tr(*args, **kwargs) |
def write_image(self, stream, image_format="svg", **kwargs):
"""
Writes the phase diagram to an image in a stream.
Args:
stream:
stream to write to. Can be a file stream or a StringIO stream.
image_format
format for image. Can be any of matplotlib supported formats.
Defaults to svg for best results for vector graphics.
\\*\\*kwargs: Pass through to get_plot functino.
"""
plt = self.get_plot(**kwargs)
f = plt.gcf()
f.set_size_inches((12, 10))
plt.savefig(stream, format=image_format) | def function[write_image, parameter[self, stream, image_format]]:
constant[
Writes the phase diagram to an image in a stream.
Args:
stream:
stream to write to. Can be a file stream or a StringIO stream.
image_format
format for image. Can be any of matplotlib supported formats.
Defaults to svg for best results for vector graphics.
\*\*kwargs: Pass through to get_plot functino.
]
variable[plt] assign[=] call[name[self].get_plot, parameter[]]
variable[f] assign[=] call[name[plt].gcf, parameter[]]
call[name[f].set_size_inches, parameter[tuple[[<ast.Constant object at 0x7da18f8111b0>, <ast.Constant object at 0x7da18f810730>]]]]
call[name[plt].savefig, parameter[name[stream]]] | keyword[def] identifier[write_image] ( identifier[self] , identifier[stream] , identifier[image_format] = literal[string] ,** identifier[kwargs] ):
literal[string]
identifier[plt] = identifier[self] . identifier[get_plot] (** identifier[kwargs] )
identifier[f] = identifier[plt] . identifier[gcf] ()
identifier[f] . identifier[set_size_inches] (( literal[int] , literal[int] ))
identifier[plt] . identifier[savefig] ( identifier[stream] , identifier[format] = identifier[image_format] ) | def write_image(self, stream, image_format='svg', **kwargs):
"""
Writes the phase diagram to an image in a stream.
Args:
stream:
stream to write to. Can be a file stream or a StringIO stream.
image_format
format for image. Can be any of matplotlib supported formats.
Defaults to svg for best results for vector graphics.
\\*\\*kwargs: Pass through to get_plot functino.
"""
plt = self.get_plot(**kwargs)
f = plt.gcf()
f.set_size_inches((12, 10))
plt.savefig(stream, format=image_format) |
def size(self):
"""Returns the size of the cache in bytes."""
total_size = 0
for dir_path, dir_names, filenames in os.walk(self.dir):
for f in filenames:
fp = os.path.join(dir_path, f)
total_size += os.path.getsize(fp)
return total_size | def function[size, parameter[self]]:
constant[Returns the size of the cache in bytes.]
variable[total_size] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da18bcca200>, <ast.Name object at 0x7da18bcc8490>, <ast.Name object at 0x7da18bcc8f40>]]] in starred[call[name[os].walk, parameter[name[self].dir]]] begin[:]
for taget[name[f]] in starred[name[filenames]] begin[:]
variable[fp] assign[=] call[name[os].path.join, parameter[name[dir_path], name[f]]]
<ast.AugAssign object at 0x7da18bcc8970>
return[name[total_size]] | keyword[def] identifier[size] ( identifier[self] ):
literal[string]
identifier[total_size] = literal[int]
keyword[for] identifier[dir_path] , identifier[dir_names] , identifier[filenames] keyword[in] identifier[os] . identifier[walk] ( identifier[self] . identifier[dir] ):
keyword[for] identifier[f] keyword[in] identifier[filenames] :
identifier[fp] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[f] )
identifier[total_size] += identifier[os] . identifier[path] . identifier[getsize] ( identifier[fp] )
keyword[return] identifier[total_size] | def size(self):
"""Returns the size of the cache in bytes."""
total_size = 0
for (dir_path, dir_names, filenames) in os.walk(self.dir):
for f in filenames:
fp = os.path.join(dir_path, f)
total_size += os.path.getsize(fp) # depends on [control=['for'], data=['f']] # depends on [control=['for'], data=[]]
return total_size |
def DEFINE_float( # pylint: disable=invalid-name,redefined-builtin
name, default, help, lower_bound=None, upper_bound=None,
flag_values=_flagvalues.FLAGS, **args): # pylint: disable=invalid-name
"""Registers a flag whose value must be a float.
If lower_bound or upper_bound are set, then this flag must be
within the given range.
Args:
name: str, the flag name.
default: float|str|None, the default value of the flag.
help: str, the help message.
lower_bound: float, min value of the flag.
upper_bound: float, max value of the flag.
flag_values: FlagValues, the FlagValues instance with which the flag will
be registered. This should almost never need to be overridden.
**args: dict, the extra keyword args that are passed to DEFINE.
"""
parser = _argument_parser.FloatParser(lower_bound, upper_bound)
serializer = _argument_parser.ArgumentSerializer()
DEFINE(parser, name, default, help, flag_values, serializer, **args)
_register_bounds_validator_if_needed(parser, name, flag_values=flag_values) | def function[DEFINE_float, parameter[name, default, help, lower_bound, upper_bound, flag_values]]:
constant[Registers a flag whose value must be a float.
If lower_bound or upper_bound are set, then this flag must be
within the given range.
Args:
name: str, the flag name.
default: float|str|None, the default value of the flag.
help: str, the help message.
lower_bound: float, min value of the flag.
upper_bound: float, max value of the flag.
flag_values: FlagValues, the FlagValues instance with which the flag will
be registered. This should almost never need to be overridden.
**args: dict, the extra keyword args that are passed to DEFINE.
]
variable[parser] assign[=] call[name[_argument_parser].FloatParser, parameter[name[lower_bound], name[upper_bound]]]
variable[serializer] assign[=] call[name[_argument_parser].ArgumentSerializer, parameter[]]
call[name[DEFINE], parameter[name[parser], name[name], name[default], name[help], name[flag_values], name[serializer]]]
call[name[_register_bounds_validator_if_needed], parameter[name[parser], name[name]]] | keyword[def] identifier[DEFINE_float] (
identifier[name] , identifier[default] , identifier[help] , identifier[lower_bound] = keyword[None] , identifier[upper_bound] = keyword[None] ,
identifier[flag_values] = identifier[_flagvalues] . identifier[FLAGS] ,** identifier[args] ):
literal[string]
identifier[parser] = identifier[_argument_parser] . identifier[FloatParser] ( identifier[lower_bound] , identifier[upper_bound] )
identifier[serializer] = identifier[_argument_parser] . identifier[ArgumentSerializer] ()
identifier[DEFINE] ( identifier[parser] , identifier[name] , identifier[default] , identifier[help] , identifier[flag_values] , identifier[serializer] ,** identifier[args] )
identifier[_register_bounds_validator_if_needed] ( identifier[parser] , identifier[name] , identifier[flag_values] = identifier[flag_values] ) | def DEFINE_float(name, default, help, lower_bound=None, upper_bound=None, flag_values=_flagvalues.FLAGS, **args): # pylint: disable=invalid-name,redefined-builtin
# pylint: disable=invalid-name
'Registers a flag whose value must be a float.\n\n If lower_bound or upper_bound are set, then this flag must be\n within the given range.\n\n Args:\n name: str, the flag name.\n default: float|str|None, the default value of the flag.\n help: str, the help message.\n lower_bound: float, min value of the flag.\n upper_bound: float, max value of the flag.\n flag_values: FlagValues, the FlagValues instance with which the flag will\n be registered. This should almost never need to be overridden.\n **args: dict, the extra keyword args that are passed to DEFINE.\n '
parser = _argument_parser.FloatParser(lower_bound, upper_bound)
serializer = _argument_parser.ArgumentSerializer()
DEFINE(parser, name, default, help, flag_values, serializer, **args)
_register_bounds_validator_if_needed(parser, name, flag_values=flag_values) |
def file_hash(load, fnd):
'''
Return a file hash, the hash type is set in the master config file
'''
path = fnd['path']
ret = {}
if 'env' in load:
# "env" is not supported; Use "saltenv".
load.pop('env')
if load['saltenv'] not in envs():
return {}
# if the file doesn't exist, we can't get a hash
if not path or not os.path.isfile(path):
return ret
# set the hash_type as it is determined by config-- so mechanism won't change that
ret['hash_type'] = __opts__['hash_type']
# check if the hash is cached
# cache file's contents should be "hash:mtime"
cache_path = os.path.join(
__opts__['cachedir'],
'minionfs',
'hash',
load['saltenv'],
'{0}.hash.{1}'.format(fnd['rel'], __opts__['hash_type'])
)
# if we have a cache, serve that if the mtime hasn't changed
if os.path.exists(cache_path):
try:
with salt.utils.files.fopen(cache_path, 'rb') as fp_:
try:
hsum, mtime = salt.utils.stringutils.to_unicode(fp_.read()).split(':')
except ValueError:
log.debug(
'Fileserver attempted to read incomplete cache file. '
'Retrying.'
)
file_hash(load, fnd)
return ret
if os.path.getmtime(path) == mtime:
# check if mtime changed
ret['hsum'] = hsum
return ret
# Can't use Python select() because we need Windows support
except os.error:
log.debug(
'Fileserver encountered lock when reading cache file. '
'Retrying.'
)
file_hash(load, fnd)
return ret
# if we don't have a cache entry-- lets make one
ret['hsum'] = salt.utils.hashutils.get_hash(path, __opts__['hash_type'])
cache_dir = os.path.dirname(cache_path)
# make cache directory if it doesn't exist
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
# save the cache object "hash:mtime"
cache_object = '{0}:{1}'.format(ret['hsum'], os.path.getmtime(path))
with salt.utils.files.flopen(cache_path, 'w') as fp_:
fp_.write(cache_object)
return ret | def function[file_hash, parameter[load, fnd]]:
constant[
Return a file hash, the hash type is set in the master config file
]
variable[path] assign[=] call[name[fnd]][constant[path]]
variable[ret] assign[=] dictionary[[], []]
if compare[constant[env] in name[load]] begin[:]
call[name[load].pop, parameter[constant[env]]]
if compare[call[name[load]][constant[saltenv]] <ast.NotIn object at 0x7da2590d7190> call[name[envs], parameter[]]] begin[:]
return[dictionary[[], []]]
if <ast.BoolOp object at 0x7da1b208b880> begin[:]
return[name[ret]]
call[name[ret]][constant[hash_type]] assign[=] call[name[__opts__]][constant[hash_type]]
variable[cache_path] assign[=] call[name[os].path.join, parameter[call[name[__opts__]][constant[cachedir]], constant[minionfs], constant[hash], call[name[load]][constant[saltenv]], call[constant[{0}.hash.{1}].format, parameter[call[name[fnd]][constant[rel]], call[name[__opts__]][constant[hash_type]]]]]]
if call[name[os].path.exists, parameter[name[cache_path]]] begin[:]
<ast.Try object at 0x7da1b20880d0>
call[name[ret]][constant[hsum]] assign[=] call[name[salt].utils.hashutils.get_hash, parameter[name[path], call[name[__opts__]][constant[hash_type]]]]
variable[cache_dir] assign[=] call[name[os].path.dirname, parameter[name[cache_path]]]
if <ast.UnaryOp object at 0x7da1b2346920> begin[:]
call[name[os].makedirs, parameter[name[cache_dir]]]
variable[cache_object] assign[=] call[constant[{0}:{1}].format, parameter[call[name[ret]][constant[hsum]], call[name[os].path.getmtime, parameter[name[path]]]]]
with call[name[salt].utils.files.flopen, parameter[name[cache_path], constant[w]]] begin[:]
call[name[fp_].write, parameter[name[cache_object]]]
return[name[ret]] | keyword[def] identifier[file_hash] ( identifier[load] , identifier[fnd] ):
literal[string]
identifier[path] = identifier[fnd] [ literal[string] ]
identifier[ret] ={}
keyword[if] literal[string] keyword[in] identifier[load] :
identifier[load] . identifier[pop] ( literal[string] )
keyword[if] identifier[load] [ literal[string] ] keyword[not] keyword[in] identifier[envs] ():
keyword[return] {}
keyword[if] keyword[not] identifier[path] keyword[or] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[path] ):
keyword[return] identifier[ret]
identifier[ret] [ literal[string] ]= identifier[__opts__] [ literal[string] ]
identifier[cache_path] = identifier[os] . identifier[path] . identifier[join] (
identifier[__opts__] [ literal[string] ],
literal[string] ,
literal[string] ,
identifier[load] [ literal[string] ],
literal[string] . identifier[format] ( identifier[fnd] [ literal[string] ], identifier[__opts__] [ literal[string] ])
)
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[cache_path] ):
keyword[try] :
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[cache_path] , literal[string] ) keyword[as] identifier[fp_] :
keyword[try] :
identifier[hsum] , identifier[mtime] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[fp_] . identifier[read] ()). identifier[split] ( literal[string] )
keyword[except] identifier[ValueError] :
identifier[log] . identifier[debug] (
literal[string]
literal[string]
)
identifier[file_hash] ( identifier[load] , identifier[fnd] )
keyword[return] identifier[ret]
keyword[if] identifier[os] . identifier[path] . identifier[getmtime] ( identifier[path] )== identifier[mtime] :
identifier[ret] [ literal[string] ]= identifier[hsum]
keyword[return] identifier[ret]
keyword[except] identifier[os] . identifier[error] :
identifier[log] . identifier[debug] (
literal[string]
literal[string]
)
identifier[file_hash] ( identifier[load] , identifier[fnd] )
keyword[return] identifier[ret]
identifier[ret] [ literal[string] ]= identifier[salt] . identifier[utils] . identifier[hashutils] . identifier[get_hash] ( identifier[path] , identifier[__opts__] [ literal[string] ])
identifier[cache_dir] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[cache_path] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[cache_dir] ):
identifier[os] . identifier[makedirs] ( identifier[cache_dir] )
identifier[cache_object] = literal[string] . identifier[format] ( identifier[ret] [ literal[string] ], identifier[os] . identifier[path] . identifier[getmtime] ( identifier[path] ))
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[flopen] ( identifier[cache_path] , literal[string] ) keyword[as] identifier[fp_] :
identifier[fp_] . identifier[write] ( identifier[cache_object] )
keyword[return] identifier[ret] | def file_hash(load, fnd):
"""
Return a file hash, the hash type is set in the master config file
"""
path = fnd['path']
ret = {}
if 'env' in load:
# "env" is not supported; Use "saltenv".
load.pop('env') # depends on [control=['if'], data=['load']]
if load['saltenv'] not in envs():
return {} # depends on [control=['if'], data=[]]
# if the file doesn't exist, we can't get a hash
if not path or not os.path.isfile(path):
return ret # depends on [control=['if'], data=[]]
# set the hash_type as it is determined by config-- so mechanism won't change that
ret['hash_type'] = __opts__['hash_type']
# check if the hash is cached
# cache file's contents should be "hash:mtime"
cache_path = os.path.join(__opts__['cachedir'], 'minionfs', 'hash', load['saltenv'], '{0}.hash.{1}'.format(fnd['rel'], __opts__['hash_type']))
# if we have a cache, serve that if the mtime hasn't changed
if os.path.exists(cache_path):
try:
with salt.utils.files.fopen(cache_path, 'rb') as fp_:
try:
(hsum, mtime) = salt.utils.stringutils.to_unicode(fp_.read()).split(':') # depends on [control=['try'], data=[]]
except ValueError:
log.debug('Fileserver attempted to read incomplete cache file. Retrying.')
file_hash(load, fnd)
return ret # depends on [control=['except'], data=[]]
if os.path.getmtime(path) == mtime:
# check if mtime changed
ret['hsum'] = hsum
return ret # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['fp_']] # depends on [control=['try'], data=[]]
# Can't use Python select() because we need Windows support
except os.error:
log.debug('Fileserver encountered lock when reading cache file. Retrying.')
file_hash(load, fnd)
return ret # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
# if we don't have a cache entry-- lets make one
ret['hsum'] = salt.utils.hashutils.get_hash(path, __opts__['hash_type'])
cache_dir = os.path.dirname(cache_path)
# make cache directory if it doesn't exist
if not os.path.exists(cache_dir):
os.makedirs(cache_dir) # depends on [control=['if'], data=[]]
# save the cache object "hash:mtime"
cache_object = '{0}:{1}'.format(ret['hsum'], os.path.getmtime(path))
with salt.utils.files.flopen(cache_path, 'w') as fp_:
fp_.write(cache_object) # depends on [control=['with'], data=['fp_']]
return ret |
def _search(self, category, name, recurse=True):
"""Search the scope stack for the name in the specified
category (types/locals/vars).
:category: the category to search in (locals/types/vars)
:name: name to search for
:returns: None if not found, the result of the found local/type/id
"""
idx = len(self._scope_stack) - 1
curr = self._curr_scope
for scope in reversed(self._scope_stack):
res = scope[category].get(name, None)
if res is not None:
return res
if recurse and self._parent is not None:
return self._parent._search(category, name, recurse)
return None | def function[_search, parameter[self, category, name, recurse]]:
constant[Search the scope stack for the name in the specified
category (types/locals/vars).
:category: the category to search in (locals/types/vars)
:name: name to search for
:returns: None if not found, the result of the found local/type/id
]
variable[idx] assign[=] binary_operation[call[name[len], parameter[name[self]._scope_stack]] - constant[1]]
variable[curr] assign[=] name[self]._curr_scope
for taget[name[scope]] in starred[call[name[reversed], parameter[name[self]._scope_stack]]] begin[:]
variable[res] assign[=] call[call[name[scope]][name[category]].get, parameter[name[name], constant[None]]]
if compare[name[res] is_not constant[None]] begin[:]
return[name[res]]
if <ast.BoolOp object at 0x7da1b10d7820> begin[:]
return[call[name[self]._parent._search, parameter[name[category], name[name], name[recurse]]]]
return[constant[None]] | keyword[def] identifier[_search] ( identifier[self] , identifier[category] , identifier[name] , identifier[recurse] = keyword[True] ):
literal[string]
identifier[idx] = identifier[len] ( identifier[self] . identifier[_scope_stack] )- literal[int]
identifier[curr] = identifier[self] . identifier[_curr_scope]
keyword[for] identifier[scope] keyword[in] identifier[reversed] ( identifier[self] . identifier[_scope_stack] ):
identifier[res] = identifier[scope] [ identifier[category] ]. identifier[get] ( identifier[name] , keyword[None] )
keyword[if] identifier[res] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[res]
keyword[if] identifier[recurse] keyword[and] identifier[self] . identifier[_parent] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_parent] . identifier[_search] ( identifier[category] , identifier[name] , identifier[recurse] )
keyword[return] keyword[None] | def _search(self, category, name, recurse=True):
"""Search the scope stack for the name in the specified
category (types/locals/vars).
:category: the category to search in (locals/types/vars)
:name: name to search for
:returns: None if not found, the result of the found local/type/id
"""
idx = len(self._scope_stack) - 1
curr = self._curr_scope
for scope in reversed(self._scope_stack):
res = scope[category].get(name, None)
if res is not None:
return res # depends on [control=['if'], data=['res']] # depends on [control=['for'], data=['scope']]
if recurse and self._parent is not None:
return self._parent._search(category, name, recurse) # depends on [control=['if'], data=[]]
return None |
def rescale_taps(taps):
"""
Rescale taps in that way that their sum equals 1
"""
taps = np.array(taps)
cs = sum(taps)
# fixme: not sure here, abs seems right as it avoids overflows in core,
# then again it reduces the fir gain
# cs = sum(abs(taps))
for (i, x) in enumerate(taps):
taps[i] = x / cs
return taps.tolist() | def function[rescale_taps, parameter[taps]]:
constant[
Rescale taps in that way that their sum equals 1
]
variable[taps] assign[=] call[name[np].array, parameter[name[taps]]]
variable[cs] assign[=] call[name[sum], parameter[name[taps]]]
for taget[tuple[[<ast.Name object at 0x7da1b0a82500>, <ast.Name object at 0x7da1b0a80490>]]] in starred[call[name[enumerate], parameter[name[taps]]]] begin[:]
call[name[taps]][name[i]] assign[=] binary_operation[name[x] / name[cs]]
return[call[name[taps].tolist, parameter[]]] | keyword[def] identifier[rescale_taps] ( identifier[taps] ):
literal[string]
identifier[taps] = identifier[np] . identifier[array] ( identifier[taps] )
identifier[cs] = identifier[sum] ( identifier[taps] )
keyword[for] ( identifier[i] , identifier[x] ) keyword[in] identifier[enumerate] ( identifier[taps] ):
identifier[taps] [ identifier[i] ]= identifier[x] / identifier[cs]
keyword[return] identifier[taps] . identifier[tolist] () | def rescale_taps(taps):
"""
Rescale taps in that way that their sum equals 1
"""
taps = np.array(taps)
cs = sum(taps)
# fixme: not sure here, abs seems right as it avoids overflows in core,
# then again it reduces the fir gain
# cs = sum(abs(taps))
for (i, x) in enumerate(taps):
taps[i] = x / cs # depends on [control=['for'], data=[]]
return taps.tolist() |
def _pathology_iterator(graph):
"""Iterate over edges in which either the source or target is a pathology node.
:param pybel.BELGraph graph: A BEL graph
:rtype: iter
"""
for node in itt.chain.from_iterable(graph.edges()):
if isinstance(node, Pathology):
yield node | def function[_pathology_iterator, parameter[graph]]:
constant[Iterate over edges in which either the source or target is a pathology node.
:param pybel.BELGraph graph: A BEL graph
:rtype: iter
]
for taget[name[node]] in starred[call[name[itt].chain.from_iterable, parameter[call[name[graph].edges, parameter[]]]]] begin[:]
if call[name[isinstance], parameter[name[node], name[Pathology]]] begin[:]
<ast.Yield object at 0x7da1b0e2cc70> | keyword[def] identifier[_pathology_iterator] ( identifier[graph] ):
literal[string]
keyword[for] identifier[node] keyword[in] identifier[itt] . identifier[chain] . identifier[from_iterable] ( identifier[graph] . identifier[edges] ()):
keyword[if] identifier[isinstance] ( identifier[node] , identifier[Pathology] ):
keyword[yield] identifier[node] | def _pathology_iterator(graph):
"""Iterate over edges in which either the source or target is a pathology node.
:param pybel.BELGraph graph: A BEL graph
:rtype: iter
"""
for node in itt.chain.from_iterable(graph.edges()):
if isinstance(node, Pathology):
yield node # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']] |
def validate(self, name, value):
"""Validate and return a value."""
if self.valid_set and value not in self.valid_set:
raise ImproperlyConfigured(
"%s: \"%s\" is not a valid setting (choose between %s)." %
(name, value, ", ".join("\"%s\"" % x for x in self.valid_set)))
return value | def function[validate, parameter[self, name, value]]:
constant[Validate and return a value.]
if <ast.BoolOp object at 0x7da18f00eda0> begin[:]
<ast.Raise object at 0x7da18f00e1a0>
return[name[value]] | keyword[def] identifier[validate] ( identifier[self] , identifier[name] , identifier[value] ):
literal[string]
keyword[if] identifier[self] . identifier[valid_set] keyword[and] identifier[value] keyword[not] keyword[in] identifier[self] . identifier[valid_set] :
keyword[raise] identifier[ImproperlyConfigured] (
literal[string] %
( identifier[name] , identifier[value] , literal[string] . identifier[join] ( literal[string] % identifier[x] keyword[for] identifier[x] keyword[in] identifier[self] . identifier[valid_set] )))
keyword[return] identifier[value] | def validate(self, name, value):
"""Validate and return a value."""
if self.valid_set and value not in self.valid_set:
raise ImproperlyConfigured('%s: "%s" is not a valid setting (choose between %s).' % (name, value, ', '.join(('"%s"' % x for x in self.valid_set)))) # depends on [control=['if'], data=[]]
return value |
def getPhysicalMaximum(self,chn=None):
"""
Returns the maximum physical value of signal edfsignal.
Parameters
----------
chn : int
channel number
Examples
--------
>>> import pyedflib
>>> f = pyedflib.data.test_generator()
>>> f.getPhysicalMaximum(0)==1000.0
True
>>> f._close()
>>> del f
"""
if chn is not None:
if 0 <= chn < self.signals_in_file:
return self.physical_max(chn)
else:
return 0
else:
physMax = np.zeros(self.signals_in_file)
for i in np.arange(self.signals_in_file):
physMax[i] = self.physical_max(i)
return physMax | def function[getPhysicalMaximum, parameter[self, chn]]:
constant[
Returns the maximum physical value of signal edfsignal.
Parameters
----------
chn : int
channel number
Examples
--------
>>> import pyedflib
>>> f = pyedflib.data.test_generator()
>>> f.getPhysicalMaximum(0)==1000.0
True
>>> f._close()
>>> del f
]
if compare[name[chn] is_not constant[None]] begin[:]
if compare[constant[0] less_or_equal[<=] name[chn]] begin[:]
return[call[name[self].physical_max, parameter[name[chn]]]] | keyword[def] identifier[getPhysicalMaximum] ( identifier[self] , identifier[chn] = keyword[None] ):
literal[string]
keyword[if] identifier[chn] keyword[is] keyword[not] keyword[None] :
keyword[if] literal[int] <= identifier[chn] < identifier[self] . identifier[signals_in_file] :
keyword[return] identifier[self] . identifier[physical_max] ( identifier[chn] )
keyword[else] :
keyword[return] literal[int]
keyword[else] :
identifier[physMax] = identifier[np] . identifier[zeros] ( identifier[self] . identifier[signals_in_file] )
keyword[for] identifier[i] keyword[in] identifier[np] . identifier[arange] ( identifier[self] . identifier[signals_in_file] ):
identifier[physMax] [ identifier[i] ]= identifier[self] . identifier[physical_max] ( identifier[i] )
keyword[return] identifier[physMax] | def getPhysicalMaximum(self, chn=None):
"""
Returns the maximum physical value of signal edfsignal.
Parameters
----------
chn : int
channel number
Examples
--------
>>> import pyedflib
>>> f = pyedflib.data.test_generator()
>>> f.getPhysicalMaximum(0)==1000.0
True
>>> f._close()
>>> del f
"""
if chn is not None:
if 0 <= chn < self.signals_in_file:
return self.physical_max(chn) # depends on [control=['if'], data=['chn']]
else:
return 0 # depends on [control=['if'], data=['chn']]
else:
physMax = np.zeros(self.signals_in_file)
for i in np.arange(self.signals_in_file):
physMax[i] = self.physical_max(i) # depends on [control=['for'], data=['i']]
return physMax |
def _group_perm_cache(self):
"""
cached_permissions will generate the cache in a lazy fashion.
"""
# Check to see if the cache has been primed.
if not self.group:
return {}
cache_filled = getattr(
self.group,
'_authority_perm_cache_filled',
False,
)
if cache_filled:
# Don't really like the name for this, but this matches how Django
# does it.
return self.group._authority_perm_cache
# Prime the cache.
self._prime_group_perm_caches()
return self.group._authority_perm_cache | def function[_group_perm_cache, parameter[self]]:
constant[
cached_permissions will generate the cache in a lazy fashion.
]
if <ast.UnaryOp object at 0x7da1b0539360> begin[:]
return[dictionary[[], []]]
variable[cache_filled] assign[=] call[name[getattr], parameter[name[self].group, constant[_authority_perm_cache_filled], constant[False]]]
if name[cache_filled] begin[:]
return[name[self].group._authority_perm_cache]
call[name[self]._prime_group_perm_caches, parameter[]]
return[name[self].group._authority_perm_cache] | keyword[def] identifier[_group_perm_cache] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[group] :
keyword[return] {}
identifier[cache_filled] = identifier[getattr] (
identifier[self] . identifier[group] ,
literal[string] ,
keyword[False] ,
)
keyword[if] identifier[cache_filled] :
keyword[return] identifier[self] . identifier[group] . identifier[_authority_perm_cache]
identifier[self] . identifier[_prime_group_perm_caches] ()
keyword[return] identifier[self] . identifier[group] . identifier[_authority_perm_cache] | def _group_perm_cache(self):
"""
cached_permissions will generate the cache in a lazy fashion.
"""
# Check to see if the cache has been primed.
if not self.group:
return {} # depends on [control=['if'], data=[]]
cache_filled = getattr(self.group, '_authority_perm_cache_filled', False)
if cache_filled:
# Don't really like the name for this, but this matches how Django
# does it.
return self.group._authority_perm_cache # depends on [control=['if'], data=[]]
# Prime the cache.
self._prime_group_perm_caches()
return self.group._authority_perm_cache |
def aggregated_records(all_records, key_fields=KEY_FIELDS):
"""
Yield dicts that correspond to aggregates of the flow records given by
the sequence of FlowRecords in `all_records`. Skips incomplete records.
This will consume the `all_records` iterator, and requires enough memory to
be able to read it entirely.
`key_fields` optionally contains the fields over which to aggregate. By
default it's the typical flow 5-tuple.
"""
flow_table = defaultdict(_FlowStats)
for flow_record in all_records:
key = tuple(getattr(flow_record, attr) for attr in key_fields)
if any(x is None for x in key):
continue
flow_table[key].update(flow_record)
for key in flow_table:
item = {k: v for k, v in zip(key_fields, key)}
item.update(flow_table[key].to_dict())
yield item | def function[aggregated_records, parameter[all_records, key_fields]]:
constant[
Yield dicts that correspond to aggregates of the flow records given by
the sequence of FlowRecords in `all_records`. Skips incomplete records.
This will consume the `all_records` iterator, and requires enough memory to
be able to read it entirely.
`key_fields` optionally contains the fields over which to aggregate. By
default it's the typical flow 5-tuple.
]
variable[flow_table] assign[=] call[name[defaultdict], parameter[name[_FlowStats]]]
for taget[name[flow_record]] in starred[name[all_records]] begin[:]
variable[key] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da2046223e0>]]
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da204621f90>]] begin[:]
continue
call[call[name[flow_table]][name[key]].update, parameter[name[flow_record]]]
for taget[name[key]] in starred[name[flow_table]] begin[:]
variable[item] assign[=] <ast.DictComp object at 0x7da2046207c0>
call[name[item].update, parameter[call[call[name[flow_table]][name[key]].to_dict, parameter[]]]]
<ast.Yield object at 0x7da204621090> | keyword[def] identifier[aggregated_records] ( identifier[all_records] , identifier[key_fields] = identifier[KEY_FIELDS] ):
literal[string]
identifier[flow_table] = identifier[defaultdict] ( identifier[_FlowStats] )
keyword[for] identifier[flow_record] keyword[in] identifier[all_records] :
identifier[key] = identifier[tuple] ( identifier[getattr] ( identifier[flow_record] , identifier[attr] ) keyword[for] identifier[attr] keyword[in] identifier[key_fields] )
keyword[if] identifier[any] ( identifier[x] keyword[is] keyword[None] keyword[for] identifier[x] keyword[in] identifier[key] ):
keyword[continue]
identifier[flow_table] [ identifier[key] ]. identifier[update] ( identifier[flow_record] )
keyword[for] identifier[key] keyword[in] identifier[flow_table] :
identifier[item] ={ identifier[k] : identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[zip] ( identifier[key_fields] , identifier[key] )}
identifier[item] . identifier[update] ( identifier[flow_table] [ identifier[key] ]. identifier[to_dict] ())
keyword[yield] identifier[item] | def aggregated_records(all_records, key_fields=KEY_FIELDS):
"""
Yield dicts that correspond to aggregates of the flow records given by
the sequence of FlowRecords in `all_records`. Skips incomplete records.
This will consume the `all_records` iterator, and requires enough memory to
be able to read it entirely.
`key_fields` optionally contains the fields over which to aggregate. By
default it's the typical flow 5-tuple.
"""
flow_table = defaultdict(_FlowStats)
for flow_record in all_records:
key = tuple((getattr(flow_record, attr) for attr in key_fields))
if any((x is None for x in key)):
continue # depends on [control=['if'], data=[]]
flow_table[key].update(flow_record) # depends on [control=['for'], data=['flow_record']]
for key in flow_table:
item = {k: v for (k, v) in zip(key_fields, key)}
item.update(flow_table[key].to_dict())
yield item # depends on [control=['for'], data=['key']] |
def delete(self, subscription_id, data=None):
"""Cancel subscription and return the subscription object.
Deleting a subscription causes the subscription status to changed to 'canceled'.
The updated subscription object is returned.
"""
if not subscription_id or not subscription_id.startswith(self.RESOURCE_ID_PREFIX):
raise IdentifierError(
"Invalid subscription ID: '{id}'. A subscription ID should start with '{prefix}'.".format(
id=subscription_id, prefix=self.RESOURCE_ID_PREFIX)
)
result = super(CustomerSubscriptions, self).delete(subscription_id, data)
return self.get_resource_object(result) | def function[delete, parameter[self, subscription_id, data]]:
constant[Cancel subscription and return the subscription object.
Deleting a subscription causes the subscription status to changed to 'canceled'.
The updated subscription object is returned.
]
if <ast.BoolOp object at 0x7da204620160> begin[:]
<ast.Raise object at 0x7da204621720>
variable[result] assign[=] call[call[name[super], parameter[name[CustomerSubscriptions], name[self]]].delete, parameter[name[subscription_id], name[data]]]
return[call[name[self].get_resource_object, parameter[name[result]]]] | keyword[def] identifier[delete] ( identifier[self] , identifier[subscription_id] , identifier[data] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[subscription_id] keyword[or] keyword[not] identifier[subscription_id] . identifier[startswith] ( identifier[self] . identifier[RESOURCE_ID_PREFIX] ):
keyword[raise] identifier[IdentifierError] (
literal[string] . identifier[format] (
identifier[id] = identifier[subscription_id] , identifier[prefix] = identifier[self] . identifier[RESOURCE_ID_PREFIX] )
)
identifier[result] = identifier[super] ( identifier[CustomerSubscriptions] , identifier[self] ). identifier[delete] ( identifier[subscription_id] , identifier[data] )
keyword[return] identifier[self] . identifier[get_resource_object] ( identifier[result] ) | def delete(self, subscription_id, data=None):
"""Cancel subscription and return the subscription object.
Deleting a subscription causes the subscription status to changed to 'canceled'.
The updated subscription object is returned.
"""
if not subscription_id or not subscription_id.startswith(self.RESOURCE_ID_PREFIX):
raise IdentifierError("Invalid subscription ID: '{id}'. A subscription ID should start with '{prefix}'.".format(id=subscription_id, prefix=self.RESOURCE_ID_PREFIX)) # depends on [control=['if'], data=[]]
result = super(CustomerSubscriptions, self).delete(subscription_id, data)
return self.get_resource_object(result) |
def chunk(self, chunks=None, name_prefix='xarray-', token=None,
lock=False):
"""Coerce all arrays in this dataset into dask arrays with the given
chunks.
Non-dask arrays in this dataset will be converted to dask arrays. Dask
arrays will be rechunked to the given chunk sizes.
If neither chunks is not provided for one or more dimensions, chunk
sizes along that dimension will not be updated; non-dask arrays will be
converted into dask arrays with a single block.
Parameters
----------
chunks : int or dict, optional
Chunk sizes along each dimension, e.g., ``5`` or
``{'x': 5, 'y': 5}``.
name_prefix : str, optional
Prefix for the name of any new dask arrays.
token : str, optional
Token uniquely identifying this dataset.
lock : optional
Passed on to :py:func:`dask.array.from_array`, if the array is not
already as dask array.
Returns
-------
chunked : xarray.Dataset
"""
try:
from dask.base import tokenize
except ImportError:
# raise the usual error if dask is entirely missing
import dask # noqa
raise ImportError('xarray requires dask version 0.9 or newer')
if isinstance(chunks, Number):
chunks = dict.fromkeys(self.dims, chunks)
if chunks is not None:
bad_dims = [d for d in chunks if d not in self.dims]
if bad_dims:
raise ValueError('some chunks keys are not dimensions on this '
'object: %s' % bad_dims)
def selkeys(dict_, keys):
if dict_ is None:
return None
return dict((d, dict_[d]) for d in keys if d in dict_)
def maybe_chunk(name, var, chunks):
chunks = selkeys(chunks, var.dims)
if not chunks:
chunks = None
if var.ndim > 0:
token2 = tokenize(name, token if token else var._data)
name2 = '%s%s-%s' % (name_prefix, name, token2)
return var.chunk(chunks, name=name2, lock=lock)
else:
return var
variables = OrderedDict([(k, maybe_chunk(k, v, chunks))
for k, v in self.variables.items()])
return self._replace(variables) | def function[chunk, parameter[self, chunks, name_prefix, token, lock]]:
constant[Coerce all arrays in this dataset into dask arrays with the given
chunks.
Non-dask arrays in this dataset will be converted to dask arrays. Dask
arrays will be rechunked to the given chunk sizes.
If neither chunks is not provided for one or more dimensions, chunk
sizes along that dimension will not be updated; non-dask arrays will be
converted into dask arrays with a single block.
Parameters
----------
chunks : int or dict, optional
Chunk sizes along each dimension, e.g., ``5`` or
``{'x': 5, 'y': 5}``.
name_prefix : str, optional
Prefix for the name of any new dask arrays.
token : str, optional
Token uniquely identifying this dataset.
lock : optional
Passed on to :py:func:`dask.array.from_array`, if the array is not
already as dask array.
Returns
-------
chunked : xarray.Dataset
]
<ast.Try object at 0x7da1b1f95cc0>
if call[name[isinstance], parameter[name[chunks], name[Number]]] begin[:]
variable[chunks] assign[=] call[name[dict].fromkeys, parameter[name[self].dims, name[chunks]]]
if compare[name[chunks] is_not constant[None]] begin[:]
variable[bad_dims] assign[=] <ast.ListComp object at 0x7da1b1f95db0>
if name[bad_dims] begin[:]
<ast.Raise object at 0x7da1b1f97d90>
def function[selkeys, parameter[dict_, keys]]:
if compare[name[dict_] is constant[None]] begin[:]
return[constant[None]]
return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b1f951e0>]]]
def function[maybe_chunk, parameter[name, var, chunks]]:
variable[chunks] assign[=] call[name[selkeys], parameter[name[chunks], name[var].dims]]
if <ast.UnaryOp object at 0x7da1b1f96260> begin[:]
variable[chunks] assign[=] constant[None]
if compare[name[var].ndim greater[>] constant[0]] begin[:]
variable[token2] assign[=] call[name[tokenize], parameter[name[name], <ast.IfExp object at 0x7da1b1f969e0>]]
variable[name2] assign[=] binary_operation[constant[%s%s-%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1f96230>, <ast.Name object at 0x7da1b1f96fe0>, <ast.Name object at 0x7da1b1f97760>]]]
return[call[name[var].chunk, parameter[name[chunks]]]]
variable[variables] assign[=] call[name[OrderedDict], parameter[<ast.ListComp object at 0x7da1b1f95300>]]
return[call[name[self]._replace, parameter[name[variables]]]] | keyword[def] identifier[chunk] ( identifier[self] , identifier[chunks] = keyword[None] , identifier[name_prefix] = literal[string] , identifier[token] = keyword[None] ,
identifier[lock] = keyword[False] ):
literal[string]
keyword[try] :
keyword[from] identifier[dask] . identifier[base] keyword[import] identifier[tokenize]
keyword[except] identifier[ImportError] :
keyword[import] identifier[dask]
keyword[raise] identifier[ImportError] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[chunks] , identifier[Number] ):
identifier[chunks] = identifier[dict] . identifier[fromkeys] ( identifier[self] . identifier[dims] , identifier[chunks] )
keyword[if] identifier[chunks] keyword[is] keyword[not] keyword[None] :
identifier[bad_dims] =[ identifier[d] keyword[for] identifier[d] keyword[in] identifier[chunks] keyword[if] identifier[d] keyword[not] keyword[in] identifier[self] . identifier[dims] ]
keyword[if] identifier[bad_dims] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] % identifier[bad_dims] )
keyword[def] identifier[selkeys] ( identifier[dict_] , identifier[keys] ):
keyword[if] identifier[dict_] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[dict] (( identifier[d] , identifier[dict_] [ identifier[d] ]) keyword[for] identifier[d] keyword[in] identifier[keys] keyword[if] identifier[d] keyword[in] identifier[dict_] )
keyword[def] identifier[maybe_chunk] ( identifier[name] , identifier[var] , identifier[chunks] ):
identifier[chunks] = identifier[selkeys] ( identifier[chunks] , identifier[var] . identifier[dims] )
keyword[if] keyword[not] identifier[chunks] :
identifier[chunks] = keyword[None]
keyword[if] identifier[var] . identifier[ndim] > literal[int] :
identifier[token2] = identifier[tokenize] ( identifier[name] , identifier[token] keyword[if] identifier[token] keyword[else] identifier[var] . identifier[_data] )
identifier[name2] = literal[string] %( identifier[name_prefix] , identifier[name] , identifier[token2] )
keyword[return] identifier[var] . identifier[chunk] ( identifier[chunks] , identifier[name] = identifier[name2] , identifier[lock] = identifier[lock] )
keyword[else] :
keyword[return] identifier[var]
identifier[variables] = identifier[OrderedDict] ([( identifier[k] , identifier[maybe_chunk] ( identifier[k] , identifier[v] , identifier[chunks] ))
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[variables] . identifier[items] ()])
keyword[return] identifier[self] . identifier[_replace] ( identifier[variables] ) | def chunk(self, chunks=None, name_prefix='xarray-', token=None, lock=False):
"""Coerce all arrays in this dataset into dask arrays with the given
chunks.
Non-dask arrays in this dataset will be converted to dask arrays. Dask
arrays will be rechunked to the given chunk sizes.
If neither chunks is not provided for one or more dimensions, chunk
sizes along that dimension will not be updated; non-dask arrays will be
converted into dask arrays with a single block.
Parameters
----------
chunks : int or dict, optional
Chunk sizes along each dimension, e.g., ``5`` or
``{'x': 5, 'y': 5}``.
name_prefix : str, optional
Prefix for the name of any new dask arrays.
token : str, optional
Token uniquely identifying this dataset.
lock : optional
Passed on to :py:func:`dask.array.from_array`, if the array is not
already as dask array.
Returns
-------
chunked : xarray.Dataset
"""
try:
from dask.base import tokenize # depends on [control=['try'], data=[]]
except ImportError:
# raise the usual error if dask is entirely missing
import dask # noqa
raise ImportError('xarray requires dask version 0.9 or newer') # depends on [control=['except'], data=[]]
if isinstance(chunks, Number):
chunks = dict.fromkeys(self.dims, chunks) # depends on [control=['if'], data=[]]
if chunks is not None:
bad_dims = [d for d in chunks if d not in self.dims]
if bad_dims:
raise ValueError('some chunks keys are not dimensions on this object: %s' % bad_dims) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['chunks']]
def selkeys(dict_, keys):
if dict_ is None:
return None # depends on [control=['if'], data=[]]
return dict(((d, dict_[d]) for d in keys if d in dict_))
def maybe_chunk(name, var, chunks):
chunks = selkeys(chunks, var.dims)
if not chunks:
chunks = None # depends on [control=['if'], data=[]]
if var.ndim > 0:
token2 = tokenize(name, token if token else var._data)
name2 = '%s%s-%s' % (name_prefix, name, token2)
return var.chunk(chunks, name=name2, lock=lock) # depends on [control=['if'], data=[]]
else:
return var
variables = OrderedDict([(k, maybe_chunk(k, v, chunks)) for (k, v) in self.variables.items()])
return self._replace(variables) |
def createuser(ctx, email, password, superuser, no_password, prompt):
'Create a new user.'
if prompt:
if not email:
email = click.prompt('Email')
if not (password or no_password):
password = click.prompt('Password')
if superuser is None:
superuser = click.confirm('Should this user be a superuser?', default=False)
if superuser is None:
superuser = False
if not email:
raise click.ClickException('Invalid or missing email address.')
if not no_password and not password:
raise click.ClickException('No password set and --no-password not passed.')
import django
django.setup()
from bitcaster.models import User
user = User.objects.filter(email=email).first()
if user:
if prompt:
change = click.confirm(f'User {email} already exists. Proceed updating it?', default=False)
if not change:
ctx.exit()
user.set_password(password)
if superuser:
user.is_superuser = superuser
op = 'updated'
else:
click.echo('Nothing to do. User exists', err=True, color='red')
sys.exit(1)
else:
op = 'created'
user = User(
email=email,
is_superuser=superuser,
is_staff=superuser,
is_active=True,
)
if password:
user.set_password(password)
try:
user.save()
except Exception as e:
raise click.ClickException(e)
click.echo(f'User {email} {op}') | def function[createuser, parameter[ctx, email, password, superuser, no_password, prompt]]:
constant[Create a new user.]
if name[prompt] begin[:]
if <ast.UnaryOp object at 0x7da18fe90a60> begin[:]
variable[email] assign[=] call[name[click].prompt, parameter[constant[Email]]]
if <ast.UnaryOp object at 0x7da18fe924a0> begin[:]
variable[password] assign[=] call[name[click].prompt, parameter[constant[Password]]]
if compare[name[superuser] is constant[None]] begin[:]
variable[superuser] assign[=] call[name[click].confirm, parameter[constant[Should this user be a superuser?]]]
if compare[name[superuser] is constant[None]] begin[:]
variable[superuser] assign[=] constant[False]
if <ast.UnaryOp object at 0x7da1b1294430> begin[:]
<ast.Raise object at 0x7da1b1297220>
if <ast.BoolOp object at 0x7da1b1294880> begin[:]
<ast.Raise object at 0x7da1b1296500>
import module[django]
call[name[django].setup, parameter[]]
from relative_module[bitcaster.models] import module[User]
variable[user] assign[=] call[call[name[User].objects.filter, parameter[]].first, parameter[]]
if name[user] begin[:]
if name[prompt] begin[:]
variable[change] assign[=] call[name[click].confirm, parameter[<ast.JoinedStr object at 0x7da1b12953c0>]]
if <ast.UnaryOp object at 0x7da1b1296830> begin[:]
call[name[ctx].exit, parameter[]]
call[name[user].set_password, parameter[name[password]]]
if name[superuser] begin[:]
name[user].is_superuser assign[=] name[superuser]
variable[op] assign[=] constant[updated]
<ast.Try object at 0x7da18f8131c0>
call[name[click].echo, parameter[<ast.JoinedStr object at 0x7da18f811b10>]] | keyword[def] identifier[createuser] ( identifier[ctx] , identifier[email] , identifier[password] , identifier[superuser] , identifier[no_password] , identifier[prompt] ):
literal[string]
keyword[if] identifier[prompt] :
keyword[if] keyword[not] identifier[email] :
identifier[email] = identifier[click] . identifier[prompt] ( literal[string] )
keyword[if] keyword[not] ( identifier[password] keyword[or] identifier[no_password] ):
identifier[password] = identifier[click] . identifier[prompt] ( literal[string] )
keyword[if] identifier[superuser] keyword[is] keyword[None] :
identifier[superuser] = identifier[click] . identifier[confirm] ( literal[string] , identifier[default] = keyword[False] )
keyword[if] identifier[superuser] keyword[is] keyword[None] :
identifier[superuser] = keyword[False]
keyword[if] keyword[not] identifier[email] :
keyword[raise] identifier[click] . identifier[ClickException] ( literal[string] )
keyword[if] keyword[not] identifier[no_password] keyword[and] keyword[not] identifier[password] :
keyword[raise] identifier[click] . identifier[ClickException] ( literal[string] )
keyword[import] identifier[django]
identifier[django] . identifier[setup] ()
keyword[from] identifier[bitcaster] . identifier[models] keyword[import] identifier[User]
identifier[user] = identifier[User] . identifier[objects] . identifier[filter] ( identifier[email] = identifier[email] ). identifier[first] ()
keyword[if] identifier[user] :
keyword[if] identifier[prompt] :
identifier[change] = identifier[click] . identifier[confirm] ( literal[string] , identifier[default] = keyword[False] )
keyword[if] keyword[not] identifier[change] :
identifier[ctx] . identifier[exit] ()
identifier[user] . identifier[set_password] ( identifier[password] )
keyword[if] identifier[superuser] :
identifier[user] . identifier[is_superuser] = identifier[superuser]
identifier[op] = literal[string]
keyword[else] :
identifier[click] . identifier[echo] ( literal[string] , identifier[err] = keyword[True] , identifier[color] = literal[string] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[else] :
identifier[op] = literal[string]
identifier[user] = identifier[User] (
identifier[email] = identifier[email] ,
identifier[is_superuser] = identifier[superuser] ,
identifier[is_staff] = identifier[superuser] ,
identifier[is_active] = keyword[True] ,
)
keyword[if] identifier[password] :
identifier[user] . identifier[set_password] ( identifier[password] )
keyword[try] :
identifier[user] . identifier[save] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[raise] identifier[click] . identifier[ClickException] ( identifier[e] )
identifier[click] . identifier[echo] ( literal[string] ) | def createuser(ctx, email, password, superuser, no_password, prompt):
"""Create a new user."""
if prompt:
if not email:
email = click.prompt('Email') # depends on [control=['if'], data=[]]
if not (password or no_password):
password = click.prompt('Password') # depends on [control=['if'], data=[]]
if superuser is None:
superuser = click.confirm('Should this user be a superuser?', default=False) # depends on [control=['if'], data=['superuser']] # depends on [control=['if'], data=[]]
if superuser is None:
superuser = False # depends on [control=['if'], data=['superuser']]
if not email:
raise click.ClickException('Invalid or missing email address.') # depends on [control=['if'], data=[]]
if not no_password and (not password):
raise click.ClickException('No password set and --no-password not passed.') # depends on [control=['if'], data=[]]
import django
django.setup()
from bitcaster.models import User
user = User.objects.filter(email=email).first()
if user:
if prompt:
change = click.confirm(f'User {email} already exists. Proceed updating it?', default=False)
if not change:
ctx.exit() # depends on [control=['if'], data=[]]
user.set_password(password)
if superuser:
user.is_superuser = superuser # depends on [control=['if'], data=[]]
op = 'updated' # depends on [control=['if'], data=[]]
else:
click.echo('Nothing to do. User exists', err=True, color='red')
sys.exit(1) # depends on [control=['if'], data=[]]
else:
op = 'created'
user = User(email=email, is_superuser=superuser, is_staff=superuser, is_active=True)
if password:
user.set_password(password) # depends on [control=['if'], data=[]]
try:
user.save() # depends on [control=['try'], data=[]]
except Exception as e:
raise click.ClickException(e) # depends on [control=['except'], data=['e']]
click.echo(f'User {email} {op}') |
def get_reqd_headers(self, table_name):
"""
Return a list of all required headers for a particular table
"""
df = self.dm[table_name]
cond = df['validations'].map(lambda x: 'required()' in str(x))
return df[cond].index | def function[get_reqd_headers, parameter[self, table_name]]:
constant[
Return a list of all required headers for a particular table
]
variable[df] assign[=] call[name[self].dm][name[table_name]]
variable[cond] assign[=] call[call[name[df]][constant[validations]].map, parameter[<ast.Lambda object at 0x7da1b01469b0>]]
return[call[name[df]][name[cond]].index] | keyword[def] identifier[get_reqd_headers] ( identifier[self] , identifier[table_name] ):
literal[string]
identifier[df] = identifier[self] . identifier[dm] [ identifier[table_name] ]
identifier[cond] = identifier[df] [ literal[string] ]. identifier[map] ( keyword[lambda] identifier[x] : literal[string] keyword[in] identifier[str] ( identifier[x] ))
keyword[return] identifier[df] [ identifier[cond] ]. identifier[index] | def get_reqd_headers(self, table_name):
"""
Return a list of all required headers for a particular table
"""
df = self.dm[table_name]
cond = df['validations'].map(lambda x: 'required()' in str(x))
return df[cond].index |
def compand(self, attack_time=0.3, decay_time=0.8, soft_knee_db=6.0,
tf_points=[(-70, -70), (-60, -20), (0, 0)],
):
'''Compand (compress or expand) the dynamic range of the audio.
Parameters
----------
attack_time : float, default=0.3
The time in seconds over which the instantaneous level of the input
signal is averaged to determine increases in volume.
decay_time : float, default=0.8
The time in seconds over which the instantaneous level of the input
signal is averaged to determine decreases in volume.
soft_knee_db : float or None, default=6.0
The ammount (in dB) for which the points at where adjacent line
segments on the transfer function meet will be rounded.
If None, no soft_knee is applied.
tf_points : list of tuples
Transfer function points as a list of tuples corresponding to
points in (dB, dB) defining the compander's transfer function.
See Also
--------
mcompand, contrast
'''
if not is_number(attack_time) or attack_time <= 0:
raise ValueError("attack_time must be a positive number.")
if not is_number(decay_time) or decay_time <= 0:
raise ValueError("decay_time must be a positive number.")
if attack_time > decay_time:
logger.warning(
"attack_time is larger than decay_time.\n"
"For most situations, attack_time should be shorter than "
"decay time because the human ear is more sensitive to sudden "
"loud music than sudden soft music."
)
if not (is_number(soft_knee_db) or soft_knee_db is None):
raise ValueError("soft_knee_db must be a number or None.")
if not isinstance(tf_points, list):
raise TypeError("tf_points must be a list.")
if len(tf_points) == 0:
raise ValueError("tf_points must have at least one point.")
if any(not isinstance(pair, tuple) for pair in tf_points):
raise ValueError("elements of tf_points must be pairs")
if any(len(pair) != 2 for pair in tf_points):
raise ValueError("Tuples in tf_points must be length 2")
if any(not (is_number(p[0]) and is_number(p[1])) for p in tf_points):
raise ValueError("Tuples in tf_points must be pairs of numbers.")
if any((p[0] > 0 or p[1] > 0) for p in tf_points):
raise ValueError("Tuple values in tf_points must be <= 0 (dB).")
if len(tf_points) > len(set([p[0] for p in tf_points])):
raise ValueError("Found duplicate x-value in tf_points.")
tf_points = sorted(
tf_points,
key=lambda tf_points: tf_points[0]
)
transfer_list = []
for point in tf_points:
transfer_list.extend([
"{:f}".format(point[0]), "{:f}".format(point[1])
])
effect_args = [
'compand',
"{:f},{:f}".format(attack_time, decay_time)
]
if soft_knee_db is not None:
effect_args.append(
"{:f}:{}".format(soft_knee_db, ",".join(transfer_list))
)
else:
effect_args.append(",".join(transfer_list))
self.effects.extend(effect_args)
self.effects_log.append('compand')
return self | def function[compand, parameter[self, attack_time, decay_time, soft_knee_db, tf_points]]:
constant[Compand (compress or expand) the dynamic range of the audio.
Parameters
----------
attack_time : float, default=0.3
The time in seconds over which the instantaneous level of the input
signal is averaged to determine increases in volume.
decay_time : float, default=0.8
The time in seconds over which the instantaneous level of the input
signal is averaged to determine decreases in volume.
soft_knee_db : float or None, default=6.0
The ammount (in dB) for which the points at where adjacent line
segments on the transfer function meet will be rounded.
If None, no soft_knee is applied.
tf_points : list of tuples
Transfer function points as a list of tuples corresponding to
points in (dB, dB) defining the compander's transfer function.
See Also
--------
mcompand, contrast
]
if <ast.BoolOp object at 0x7da1b01224d0> begin[:]
<ast.Raise object at 0x7da1b0120100>
if <ast.BoolOp object at 0x7da1b0122440> begin[:]
<ast.Raise object at 0x7da1b0120cd0>
if compare[name[attack_time] greater[>] name[decay_time]] begin[:]
call[name[logger].warning, parameter[constant[attack_time is larger than decay_time.
For most situations, attack_time should be shorter than decay time because the human ear is more sensitive to sudden loud music than sudden soft music.]]]
if <ast.UnaryOp object at 0x7da1b0122320> begin[:]
<ast.Raise object at 0x7da1b0122830>
if <ast.UnaryOp object at 0x7da1b0121e70> begin[:]
<ast.Raise object at 0x7da1b0122050>
if compare[call[name[len], parameter[name[tf_points]]] equal[==] constant[0]] begin[:]
<ast.Raise object at 0x7da20c992980>
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da20c992ec0>]] begin[:]
<ast.Raise object at 0x7da1b0109b10>
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b01096f0>]] begin[:]
<ast.Raise object at 0x7da1b01086a0>
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b0108b20>]] begin[:]
<ast.Raise object at 0x7da1b00605e0>
if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b0061ba0>]] begin[:]
<ast.Raise object at 0x7da1b00613c0>
if compare[call[name[len], parameter[name[tf_points]]] greater[>] call[name[len], parameter[call[name[set], parameter[<ast.ListComp object at 0x7da1b0060580>]]]]] begin[:]
<ast.Raise object at 0x7da1b0063940>
variable[tf_points] assign[=] call[name[sorted], parameter[name[tf_points]]]
variable[transfer_list] assign[=] list[[]]
for taget[name[point]] in starred[name[tf_points]] begin[:]
call[name[transfer_list].extend, parameter[list[[<ast.Call object at 0x7da1b0060670>, <ast.Call object at 0x7da1b0063a30>]]]]
variable[effect_args] assign[=] list[[<ast.Constant object at 0x7da1b0061630>, <ast.Call object at 0x7da1b00617b0>]]
if compare[name[soft_knee_db] is_not constant[None]] begin[:]
call[name[effect_args].append, parameter[call[constant[{:f}:{}].format, parameter[name[soft_knee_db], call[constant[,].join, parameter[name[transfer_list]]]]]]]
call[name[self].effects.extend, parameter[name[effect_args]]]
call[name[self].effects_log.append, parameter[constant[compand]]]
return[name[self]] | keyword[def] identifier[compand] ( identifier[self] , identifier[attack_time] = literal[int] , identifier[decay_time] = literal[int] , identifier[soft_knee_db] = literal[int] ,
identifier[tf_points] =[(- literal[int] ,- literal[int] ),(- literal[int] ,- literal[int] ),( literal[int] , literal[int] )],
):
literal[string]
keyword[if] keyword[not] identifier[is_number] ( identifier[attack_time] ) keyword[or] identifier[attack_time] <= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[is_number] ( identifier[decay_time] ) keyword[or] identifier[decay_time] <= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[attack_time] > identifier[decay_time] :
identifier[logger] . identifier[warning] (
literal[string]
literal[string]
literal[string]
literal[string]
)
keyword[if] keyword[not] ( identifier[is_number] ( identifier[soft_knee_db] ) keyword[or] identifier[soft_knee_db] keyword[is] keyword[None] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[tf_points] , identifier[list] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[len] ( identifier[tf_points] )== literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[any] ( keyword[not] identifier[isinstance] ( identifier[pair] , identifier[tuple] ) keyword[for] identifier[pair] keyword[in] identifier[tf_points] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[any] ( identifier[len] ( identifier[pair] )!= literal[int] keyword[for] identifier[pair] keyword[in] identifier[tf_points] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[any] ( keyword[not] ( identifier[is_number] ( identifier[p] [ literal[int] ]) keyword[and] identifier[is_number] ( identifier[p] [ literal[int] ])) keyword[for] identifier[p] keyword[in] identifier[tf_points] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[any] (( identifier[p] [ literal[int] ]> literal[int] keyword[or] identifier[p] [ literal[int] ]> literal[int] ) keyword[for] identifier[p] keyword[in] identifier[tf_points] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[len] ( identifier[tf_points] )> identifier[len] ( identifier[set] ([ identifier[p] [ literal[int] ] keyword[for] identifier[p] keyword[in] identifier[tf_points] ])):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[tf_points] = identifier[sorted] (
identifier[tf_points] ,
identifier[key] = keyword[lambda] identifier[tf_points] : identifier[tf_points] [ literal[int] ]
)
identifier[transfer_list] =[]
keyword[for] identifier[point] keyword[in] identifier[tf_points] :
identifier[transfer_list] . identifier[extend] ([
literal[string] . identifier[format] ( identifier[point] [ literal[int] ]), literal[string] . identifier[format] ( identifier[point] [ literal[int] ])
])
identifier[effect_args] =[
literal[string] ,
literal[string] . identifier[format] ( identifier[attack_time] , identifier[decay_time] )
]
keyword[if] identifier[soft_knee_db] keyword[is] keyword[not] keyword[None] :
identifier[effect_args] . identifier[append] (
literal[string] . identifier[format] ( identifier[soft_knee_db] , literal[string] . identifier[join] ( identifier[transfer_list] ))
)
keyword[else] :
identifier[effect_args] . identifier[append] ( literal[string] . identifier[join] ( identifier[transfer_list] ))
identifier[self] . identifier[effects] . identifier[extend] ( identifier[effect_args] )
identifier[self] . identifier[effects_log] . identifier[append] ( literal[string] )
keyword[return] identifier[self] | def compand(self, attack_time=0.3, decay_time=0.8, soft_knee_db=6.0, tf_points=[(-70, -70), (-60, -20), (0, 0)]):
"""Compand (compress or expand) the dynamic range of the audio.
Parameters
----------
attack_time : float, default=0.3
The time in seconds over which the instantaneous level of the input
signal is averaged to determine increases in volume.
decay_time : float, default=0.8
The time in seconds over which the instantaneous level of the input
signal is averaged to determine decreases in volume.
soft_knee_db : float or None, default=6.0
The ammount (in dB) for which the points at where adjacent line
segments on the transfer function meet will be rounded.
If None, no soft_knee is applied.
tf_points : list of tuples
Transfer function points as a list of tuples corresponding to
points in (dB, dB) defining the compander's transfer function.
See Also
--------
mcompand, contrast
"""
if not is_number(attack_time) or attack_time <= 0:
raise ValueError('attack_time must be a positive number.') # depends on [control=['if'], data=[]]
if not is_number(decay_time) or decay_time <= 0:
raise ValueError('decay_time must be a positive number.') # depends on [control=['if'], data=[]]
if attack_time > decay_time:
logger.warning('attack_time is larger than decay_time.\nFor most situations, attack_time should be shorter than decay time because the human ear is more sensitive to sudden loud music than sudden soft music.') # depends on [control=['if'], data=[]]
if not (is_number(soft_knee_db) or soft_knee_db is None):
raise ValueError('soft_knee_db must be a number or None.') # depends on [control=['if'], data=[]]
if not isinstance(tf_points, list):
raise TypeError('tf_points must be a list.') # depends on [control=['if'], data=[]]
if len(tf_points) == 0:
raise ValueError('tf_points must have at least one point.') # depends on [control=['if'], data=[]]
if any((not isinstance(pair, tuple) for pair in tf_points)):
raise ValueError('elements of tf_points must be pairs') # depends on [control=['if'], data=[]]
if any((len(pair) != 2 for pair in tf_points)):
raise ValueError('Tuples in tf_points must be length 2') # depends on [control=['if'], data=[]]
if any((not (is_number(p[0]) and is_number(p[1])) for p in tf_points)):
raise ValueError('Tuples in tf_points must be pairs of numbers.') # depends on [control=['if'], data=[]]
if any((p[0] > 0 or p[1] > 0 for p in tf_points)):
raise ValueError('Tuple values in tf_points must be <= 0 (dB).') # depends on [control=['if'], data=[]]
if len(tf_points) > len(set([p[0] for p in tf_points])):
raise ValueError('Found duplicate x-value in tf_points.') # depends on [control=['if'], data=[]]
tf_points = sorted(tf_points, key=lambda tf_points: tf_points[0])
transfer_list = []
for point in tf_points:
transfer_list.extend(['{:f}'.format(point[0]), '{:f}'.format(point[1])]) # depends on [control=['for'], data=['point']]
effect_args = ['compand', '{:f},{:f}'.format(attack_time, decay_time)]
if soft_knee_db is not None:
effect_args.append('{:f}:{}'.format(soft_knee_db, ','.join(transfer_list))) # depends on [control=['if'], data=['soft_knee_db']]
else:
effect_args.append(','.join(transfer_list))
self.effects.extend(effect_args)
self.effects_log.append('compand')
return self |
def _create_template(self, name):
"""Create an instance of a tornado.template.Template object for the
given template name.
:param str name: The name/path to the template
:rtype: tornado.template.Template
"""
url = '%s/%s' % (self._base_url, escape.url_escape(name))
LOGGER.debug('Making HTTP GET request to %s', url)
response = self._http_client.fetch(url)
data = json.loads(response.body, ensure_ascii=False)
return template.Template(data['template'], name=name, loader=self) | def function[_create_template, parameter[self, name]]:
constant[Create an instance of a tornado.template.Template object for the
given template name.
:param str name: The name/path to the template
:rtype: tornado.template.Template
]
variable[url] assign[=] binary_operation[constant[%s/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b26af640>, <ast.Call object at 0x7da1b26afb80>]]]
call[name[LOGGER].debug, parameter[constant[Making HTTP GET request to %s], name[url]]]
variable[response] assign[=] call[name[self]._http_client.fetch, parameter[name[url]]]
variable[data] assign[=] call[name[json].loads, parameter[name[response].body]]
return[call[name[template].Template, parameter[call[name[data]][constant[template]]]]] | keyword[def] identifier[_create_template] ( identifier[self] , identifier[name] ):
literal[string]
identifier[url] = literal[string] %( identifier[self] . identifier[_base_url] , identifier[escape] . identifier[url_escape] ( identifier[name] ))
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[url] )
identifier[response] = identifier[self] . identifier[_http_client] . identifier[fetch] ( identifier[url] )
identifier[data] = identifier[json] . identifier[loads] ( identifier[response] . identifier[body] , identifier[ensure_ascii] = keyword[False] )
keyword[return] identifier[template] . identifier[Template] ( identifier[data] [ literal[string] ], identifier[name] = identifier[name] , identifier[loader] = identifier[self] ) | def _create_template(self, name):
"""Create an instance of a tornado.template.Template object for the
given template name.
:param str name: The name/path to the template
:rtype: tornado.template.Template
"""
url = '%s/%s' % (self._base_url, escape.url_escape(name))
LOGGER.debug('Making HTTP GET request to %s', url)
response = self._http_client.fetch(url)
data = json.loads(response.body, ensure_ascii=False)
return template.Template(data['template'], name=name, loader=self) |
def lock(self, name, timeout=None, sleep=0.1, blocking_timeout=None,
lock_class=None, thread_local=True):
"""
Return a new Lock object using key ``name`` that mimics
the behavior of threading.Lock.
If specified, ``timeout`` indicates a maximum life for the lock.
By default, it will remain locked until release() is called.
``sleep`` indicates the amount of time to sleep per loop iteration
when the lock is in blocking mode and another client is currently
holding the lock.
``blocking_timeout`` indicates the maximum amount of time in seconds to
spend trying to acquire the lock. A value of ``None`` indicates
continue trying forever. ``blocking_timeout`` can be specified as a
float or integer, both representing the number of seconds to wait.
``lock_class`` forces the specified lock implementation.
``thread_local`` indicates whether the lock token is placed in
thread-local storage. By default, the token is placed in thread local
storage so that a thread only sees its token, not a token set by
another thread. Consider the following timeline:
time: 0, thread-1 acquires `my-lock`, with a timeout of 5 seconds.
thread-1 sets the token to "abc"
time: 1, thread-2 blocks trying to acquire `my-lock` using the
Lock instance.
time: 5, thread-1 has not yet completed. redis expires the lock
key.
time: 5, thread-2 acquired `my-lock` now that it's available.
thread-2 sets the token to "xyz"
time: 6, thread-1 finishes its work and calls release(). if the
token is *not* stored in thread local storage, then
thread-1 would see the token value as "xyz" and would be
able to successfully release the thread-2's lock.
In some use cases it's necessary to disable thread local storage. For
example, if you have code where one thread acquires a lock and passes
that lock instance to a worker thread to release later. If thread
local storage isn't disabled in this case, the worker thread won't see
the token set by the thread that acquired the lock. Our assumption
is that these cases aren't common and as such default to using
thread local storage. """
if lock_class is None:
if self._use_lua_lock is None:
# the first time .lock() is called, determine if we can use
# Lua by attempting to register the necessary scripts
try:
LuaLock.register_scripts(self)
self._use_lua_lock = True
except ResponseError:
self._use_lua_lock = False
lock_class = self._use_lua_lock and LuaLock or Lock
return lock_class(self, name, timeout=timeout, sleep=sleep,
blocking_timeout=blocking_timeout,
thread_local=thread_local) | def function[lock, parameter[self, name, timeout, sleep, blocking_timeout, lock_class, thread_local]]:
constant[
Return a new Lock object using key ``name`` that mimics
the behavior of threading.Lock.
If specified, ``timeout`` indicates a maximum life for the lock.
By default, it will remain locked until release() is called.
``sleep`` indicates the amount of time to sleep per loop iteration
when the lock is in blocking mode and another client is currently
holding the lock.
``blocking_timeout`` indicates the maximum amount of time in seconds to
spend trying to acquire the lock. A value of ``None`` indicates
continue trying forever. ``blocking_timeout`` can be specified as a
float or integer, both representing the number of seconds to wait.
``lock_class`` forces the specified lock implementation.
``thread_local`` indicates whether the lock token is placed in
thread-local storage. By default, the token is placed in thread local
storage so that a thread only sees its token, not a token set by
another thread. Consider the following timeline:
time: 0, thread-1 acquires `my-lock`, with a timeout of 5 seconds.
thread-1 sets the token to "abc"
time: 1, thread-2 blocks trying to acquire `my-lock` using the
Lock instance.
time: 5, thread-1 has not yet completed. redis expires the lock
key.
time: 5, thread-2 acquired `my-lock` now that it's available.
thread-2 sets the token to "xyz"
time: 6, thread-1 finishes its work and calls release(). if the
token is *not* stored in thread local storage, then
thread-1 would see the token value as "xyz" and would be
able to successfully release the thread-2's lock.
In some use cases it's necessary to disable thread local storage. For
example, if you have code where one thread acquires a lock and passes
that lock instance to a worker thread to release later. If thread
local storage isn't disabled in this case, the worker thread won't see
the token set by the thread that acquired the lock. Our assumption
is that these cases aren't common and as such default to using
thread local storage. ]
if compare[name[lock_class] is constant[None]] begin[:]
if compare[name[self]._use_lua_lock is constant[None]] begin[:]
<ast.Try object at 0x7da1b07b5570>
variable[lock_class] assign[=] <ast.BoolOp object at 0x7da1b07b5b10>
return[call[name[lock_class], parameter[name[self], name[name]]]] | keyword[def] identifier[lock] ( identifier[self] , identifier[name] , identifier[timeout] = keyword[None] , identifier[sleep] = literal[int] , identifier[blocking_timeout] = keyword[None] ,
identifier[lock_class] = keyword[None] , identifier[thread_local] = keyword[True] ):
literal[string]
keyword[if] identifier[lock_class] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[_use_lua_lock] keyword[is] keyword[None] :
keyword[try] :
identifier[LuaLock] . identifier[register_scripts] ( identifier[self] )
identifier[self] . identifier[_use_lua_lock] = keyword[True]
keyword[except] identifier[ResponseError] :
identifier[self] . identifier[_use_lua_lock] = keyword[False]
identifier[lock_class] = identifier[self] . identifier[_use_lua_lock] keyword[and] identifier[LuaLock] keyword[or] identifier[Lock]
keyword[return] identifier[lock_class] ( identifier[self] , identifier[name] , identifier[timeout] = identifier[timeout] , identifier[sleep] = identifier[sleep] ,
identifier[blocking_timeout] = identifier[blocking_timeout] ,
identifier[thread_local] = identifier[thread_local] ) | def lock(self, name, timeout=None, sleep=0.1, blocking_timeout=None, lock_class=None, thread_local=True):
"""
Return a new Lock object using key ``name`` that mimics
the behavior of threading.Lock.
If specified, ``timeout`` indicates a maximum life for the lock.
By default, it will remain locked until release() is called.
``sleep`` indicates the amount of time to sleep per loop iteration
when the lock is in blocking mode and another client is currently
holding the lock.
``blocking_timeout`` indicates the maximum amount of time in seconds to
spend trying to acquire the lock. A value of ``None`` indicates
continue trying forever. ``blocking_timeout`` can be specified as a
float or integer, both representing the number of seconds to wait.
``lock_class`` forces the specified lock implementation.
``thread_local`` indicates whether the lock token is placed in
thread-local storage. By default, the token is placed in thread local
storage so that a thread only sees its token, not a token set by
another thread. Consider the following timeline:
time: 0, thread-1 acquires `my-lock`, with a timeout of 5 seconds.
thread-1 sets the token to "abc"
time: 1, thread-2 blocks trying to acquire `my-lock` using the
Lock instance.
time: 5, thread-1 has not yet completed. redis expires the lock
key.
time: 5, thread-2 acquired `my-lock` now that it's available.
thread-2 sets the token to "xyz"
time: 6, thread-1 finishes its work and calls release(). if the
token is *not* stored in thread local storage, then
thread-1 would see the token value as "xyz" and would be
able to successfully release the thread-2's lock.
In some use cases it's necessary to disable thread local storage. For
example, if you have code where one thread acquires a lock and passes
that lock instance to a worker thread to release later. If thread
local storage isn't disabled in this case, the worker thread won't see
the token set by the thread that acquired the lock. Our assumption
is that these cases aren't common and as such default to using
thread local storage. """
if lock_class is None:
if self._use_lua_lock is None:
# the first time .lock() is called, determine if we can use
# Lua by attempting to register the necessary scripts
try:
LuaLock.register_scripts(self)
self._use_lua_lock = True # depends on [control=['try'], data=[]]
except ResponseError:
self._use_lua_lock = False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
lock_class = self._use_lua_lock and LuaLock or Lock # depends on [control=['if'], data=['lock_class']]
return lock_class(self, name, timeout=timeout, sleep=sleep, blocking_timeout=blocking_timeout, thread_local=thread_local) |
def item_create(item, item_id, item_type, create='create', extra_args=None, cibfile=None):
'''
Create an item via pcs command
(mainly for use with the pcs state module)
item
config, property, resource, constraint etc.
item_id
id of the item
item_type
item type
create
create command (create or set f.e., default: create)
extra_args
additional options for the pcs command
cibfile
use cibfile instead of the live CIB
'''
cmd = ['pcs']
if isinstance(cibfile, six.string_types):
cmd += ['-f', cibfile]
if isinstance(item, six.string_types):
cmd += [item]
elif isinstance(item, (list, tuple)):
cmd += item
# constraint command follows a different order
if item in ['constraint']:
if isinstance(item_type, six.string_types):
cmd += [item_type]
if isinstance(create, six.string_types):
cmd += [create]
elif isinstance(create, (list, tuple)):
cmd += create
# constraint command needs item_id in format 'id=<id' after all params
# constraint command follows a different order
if item not in ['constraint']:
cmd += [item_id]
if isinstance(item_type, six.string_types):
cmd += [item_type]
if isinstance(extra_args, (list, tuple)):
# constraint command needs item_id in format 'id=<id' after all params
if item in ['constraint']:
extra_args = extra_args + ['id={0}'.format(item_id)]
cmd += extra_args
return __salt__['cmd.run_all'](cmd, output_loglevel='trace', python_shell=False) | def function[item_create, parameter[item, item_id, item_type, create, extra_args, cibfile]]:
constant[
Create an item via pcs command
(mainly for use with the pcs state module)
item
config, property, resource, constraint etc.
item_id
id of the item
item_type
item type
create
create command (create or set f.e., default: create)
extra_args
additional options for the pcs command
cibfile
use cibfile instead of the live CIB
]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da207f02c50>]]
if call[name[isinstance], parameter[name[cibfile], name[six].string_types]] begin[:]
<ast.AugAssign object at 0x7da207f017e0>
if call[name[isinstance], parameter[name[item], name[six].string_types]] begin[:]
<ast.AugAssign object at 0x7da207f02110>
if compare[name[item] in list[[<ast.Constant object at 0x7da207f03490>]]] begin[:]
if call[name[isinstance], parameter[name[item_type], name[six].string_types]] begin[:]
<ast.AugAssign object at 0x7da207f03430>
if call[name[isinstance], parameter[name[create], name[six].string_types]] begin[:]
<ast.AugAssign object at 0x7da207f03190>
if compare[name[item] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da207f03d30>]]] begin[:]
<ast.AugAssign object at 0x7da207f02ec0>
if call[name[isinstance], parameter[name[item_type], name[six].string_types]] begin[:]
<ast.AugAssign object at 0x7da207f00250>
if call[name[isinstance], parameter[name[extra_args], tuple[[<ast.Name object at 0x7da207f00a30>, <ast.Name object at 0x7da207f03bb0>]]]] begin[:]
if compare[name[item] in list[[<ast.Constant object at 0x7da207f03790>]]] begin[:]
variable[extra_args] assign[=] binary_operation[name[extra_args] + list[[<ast.Call object at 0x7da207f01480>]]]
<ast.AugAssign object at 0x7da207f03610>
return[call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]]] | keyword[def] identifier[item_create] ( identifier[item] , identifier[item_id] , identifier[item_type] , identifier[create] = literal[string] , identifier[extra_args] = keyword[None] , identifier[cibfile] = keyword[None] ):
literal[string]
identifier[cmd] =[ literal[string] ]
keyword[if] identifier[isinstance] ( identifier[cibfile] , identifier[six] . identifier[string_types] ):
identifier[cmd] +=[ literal[string] , identifier[cibfile] ]
keyword[if] identifier[isinstance] ( identifier[item] , identifier[six] . identifier[string_types] ):
identifier[cmd] +=[ identifier[item] ]
keyword[elif] identifier[isinstance] ( identifier[item] ,( identifier[list] , identifier[tuple] )):
identifier[cmd] += identifier[item]
keyword[if] identifier[item] keyword[in] [ literal[string] ]:
keyword[if] identifier[isinstance] ( identifier[item_type] , identifier[six] . identifier[string_types] ):
identifier[cmd] +=[ identifier[item_type] ]
keyword[if] identifier[isinstance] ( identifier[create] , identifier[six] . identifier[string_types] ):
identifier[cmd] +=[ identifier[create] ]
keyword[elif] identifier[isinstance] ( identifier[create] ,( identifier[list] , identifier[tuple] )):
identifier[cmd] += identifier[create]
keyword[if] identifier[item] keyword[not] keyword[in] [ literal[string] ]:
identifier[cmd] +=[ identifier[item_id] ]
keyword[if] identifier[isinstance] ( identifier[item_type] , identifier[six] . identifier[string_types] ):
identifier[cmd] +=[ identifier[item_type] ]
keyword[if] identifier[isinstance] ( identifier[extra_args] ,( identifier[list] , identifier[tuple] )):
keyword[if] identifier[item] keyword[in] [ literal[string] ]:
identifier[extra_args] = identifier[extra_args] +[ literal[string] . identifier[format] ( identifier[item_id] )]
identifier[cmd] += identifier[extra_args]
keyword[return] identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[output_loglevel] = literal[string] , identifier[python_shell] = keyword[False] ) | def item_create(item, item_id, item_type, create='create', extra_args=None, cibfile=None):
"""
Create an item via pcs command
(mainly for use with the pcs state module)
item
config, property, resource, constraint etc.
item_id
id of the item
item_type
item type
create
create command (create or set f.e., default: create)
extra_args
additional options for the pcs command
cibfile
use cibfile instead of the live CIB
"""
cmd = ['pcs']
if isinstance(cibfile, six.string_types):
cmd += ['-f', cibfile] # depends on [control=['if'], data=[]]
if isinstance(item, six.string_types):
cmd += [item] # depends on [control=['if'], data=[]]
elif isinstance(item, (list, tuple)):
cmd += item # depends on [control=['if'], data=[]]
# constraint command follows a different order
if item in ['constraint']:
if isinstance(item_type, six.string_types):
cmd += [item_type] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if isinstance(create, six.string_types):
cmd += [create] # depends on [control=['if'], data=[]]
elif isinstance(create, (list, tuple)):
cmd += create # depends on [control=['if'], data=[]]
# constraint command needs item_id in format 'id=<id' after all params
# constraint command follows a different order
if item not in ['constraint']:
cmd += [item_id]
if isinstance(item_type, six.string_types):
cmd += [item_type] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if isinstance(extra_args, (list, tuple)):
# constraint command needs item_id in format 'id=<id' after all params
if item in ['constraint']:
extra_args = extra_args + ['id={0}'.format(item_id)] # depends on [control=['if'], data=[]]
cmd += extra_args # depends on [control=['if'], data=[]]
return __salt__['cmd.run_all'](cmd, output_loglevel='trace', python_shell=False) |
def visit_return(self, node, parent):
"""visit a Return node by returning a fresh instance of it"""
newnode = nodes.Return(node.lineno, node.col_offset, parent)
if node.value is not None:
newnode.postinit(self.visit(node.value, newnode))
return newnode | def function[visit_return, parameter[self, node, parent]]:
constant[visit a Return node by returning a fresh instance of it]
variable[newnode] assign[=] call[name[nodes].Return, parameter[name[node].lineno, name[node].col_offset, name[parent]]]
if compare[name[node].value is_not constant[None]] begin[:]
call[name[newnode].postinit, parameter[call[name[self].visit, parameter[name[node].value, name[newnode]]]]]
return[name[newnode]] | keyword[def] identifier[visit_return] ( identifier[self] , identifier[node] , identifier[parent] ):
literal[string]
identifier[newnode] = identifier[nodes] . identifier[Return] ( identifier[node] . identifier[lineno] , identifier[node] . identifier[col_offset] , identifier[parent] )
keyword[if] identifier[node] . identifier[value] keyword[is] keyword[not] keyword[None] :
identifier[newnode] . identifier[postinit] ( identifier[self] . identifier[visit] ( identifier[node] . identifier[value] , identifier[newnode] ))
keyword[return] identifier[newnode] | def visit_return(self, node, parent):
"""visit a Return node by returning a fresh instance of it"""
newnode = nodes.Return(node.lineno, node.col_offset, parent)
if node.value is not None:
newnode.postinit(self.visit(node.value, newnode)) # depends on [control=['if'], data=[]]
return newnode |
def show_network(kwargs=None, call=None):
'''
Show the details of an existing network.
CLI Example:
.. code-block:: bash
salt-cloud -f show_network gce name=mynet
'''
if call != 'function':
raise SaltCloudSystemExit(
'The show_network function must be called with -f or --function.'
)
if not kwargs or 'name' not in kwargs:
log.error(
'Must specify name of network.'
)
return False
conn = get_conn()
return _expand_item(conn.ex_get_network(kwargs['name'])) | def function[show_network, parameter[kwargs, call]]:
constant[
Show the details of an existing network.
CLI Example:
.. code-block:: bash
salt-cloud -f show_network gce name=mynet
]
if compare[name[call] not_equal[!=] constant[function]] begin[:]
<ast.Raise object at 0x7da1b1c178b0>
if <ast.BoolOp object at 0x7da1b1c142b0> begin[:]
call[name[log].error, parameter[constant[Must specify name of network.]]]
return[constant[False]]
variable[conn] assign[=] call[name[get_conn], parameter[]]
return[call[name[_expand_item], parameter[call[name[conn].ex_get_network, parameter[call[name[kwargs]][constant[name]]]]]]] | keyword[def] identifier[show_network] ( identifier[kwargs] = keyword[None] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
)
keyword[if] keyword[not] identifier[kwargs] keyword[or] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[log] . identifier[error] (
literal[string]
)
keyword[return] keyword[False]
identifier[conn] = identifier[get_conn] ()
keyword[return] identifier[_expand_item] ( identifier[conn] . identifier[ex_get_network] ( identifier[kwargs] [ literal[string] ])) | def show_network(kwargs=None, call=None):
"""
Show the details of an existing network.
CLI Example:
.. code-block:: bash
salt-cloud -f show_network gce name=mynet
"""
if call != 'function':
raise SaltCloudSystemExit('The show_network function must be called with -f or --function.') # depends on [control=['if'], data=[]]
if not kwargs or 'name' not in kwargs:
log.error('Must specify name of network.')
return False # depends on [control=['if'], data=[]]
conn = get_conn()
return _expand_item(conn.ex_get_network(kwargs['name'])) |
def setupTable_VORG(self):
"""
Make the VORG table.
**This should not be called externally.** Subclasses
may override or supplement this method to handle the
table creation in a different way if desired.
"""
if "VORG" not in self.tables:
return
self.otf["VORG"] = vorg = newTable("VORG")
vorg.majorVersion = 1
vorg.minorVersion = 0
vorg.VOriginRecords = {}
# Find the most frequent verticalOrigin
vorg_count = Counter(_getVerticalOrigin(self.otf, glyph)
for glyph in self.allGlyphs.values())
vorg.defaultVertOriginY = vorg_count.most_common(1)[0][0]
if len(vorg_count) > 1:
for glyphName, glyph in self.allGlyphs.items():
vorg.VOriginRecords[glyphName] = _getVerticalOrigin(
self.otf, glyph)
vorg.numVertOriginYMetrics = len(vorg.VOriginRecords) | def function[setupTable_VORG, parameter[self]]:
constant[
Make the VORG table.
**This should not be called externally.** Subclasses
may override or supplement this method to handle the
table creation in a different way if desired.
]
if compare[constant[VORG] <ast.NotIn object at 0x7da2590d7190> name[self].tables] begin[:]
return[None]
call[name[self].otf][constant[VORG]] assign[=] call[name[newTable], parameter[constant[VORG]]]
name[vorg].majorVersion assign[=] constant[1]
name[vorg].minorVersion assign[=] constant[0]
name[vorg].VOriginRecords assign[=] dictionary[[], []]
variable[vorg_count] assign[=] call[name[Counter], parameter[<ast.GeneratorExp object at 0x7da18dc074f0>]]
name[vorg].defaultVertOriginY assign[=] call[call[call[name[vorg_count].most_common, parameter[constant[1]]]][constant[0]]][constant[0]]
if compare[call[name[len], parameter[name[vorg_count]]] greater[>] constant[1]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da2041dbee0>, <ast.Name object at 0x7da2041db1c0>]]] in starred[call[name[self].allGlyphs.items, parameter[]]] begin[:]
call[name[vorg].VOriginRecords][name[glyphName]] assign[=] call[name[_getVerticalOrigin], parameter[name[self].otf, name[glyph]]]
name[vorg].numVertOriginYMetrics assign[=] call[name[len], parameter[name[vorg].VOriginRecords]] | keyword[def] identifier[setupTable_VORG] ( identifier[self] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[tables] :
keyword[return]
identifier[self] . identifier[otf] [ literal[string] ]= identifier[vorg] = identifier[newTable] ( literal[string] )
identifier[vorg] . identifier[majorVersion] = literal[int]
identifier[vorg] . identifier[minorVersion] = literal[int]
identifier[vorg] . identifier[VOriginRecords] ={}
identifier[vorg_count] = identifier[Counter] ( identifier[_getVerticalOrigin] ( identifier[self] . identifier[otf] , identifier[glyph] )
keyword[for] identifier[glyph] keyword[in] identifier[self] . identifier[allGlyphs] . identifier[values] ())
identifier[vorg] . identifier[defaultVertOriginY] = identifier[vorg_count] . identifier[most_common] ( literal[int] )[ literal[int] ][ literal[int] ]
keyword[if] identifier[len] ( identifier[vorg_count] )> literal[int] :
keyword[for] identifier[glyphName] , identifier[glyph] keyword[in] identifier[self] . identifier[allGlyphs] . identifier[items] ():
identifier[vorg] . identifier[VOriginRecords] [ identifier[glyphName] ]= identifier[_getVerticalOrigin] (
identifier[self] . identifier[otf] , identifier[glyph] )
identifier[vorg] . identifier[numVertOriginYMetrics] = identifier[len] ( identifier[vorg] . identifier[VOriginRecords] ) | def setupTable_VORG(self):
"""
Make the VORG table.
**This should not be called externally.** Subclasses
may override or supplement this method to handle the
table creation in a different way if desired.
"""
if 'VORG' not in self.tables:
return # depends on [control=['if'], data=[]]
self.otf['VORG'] = vorg = newTable('VORG')
vorg.majorVersion = 1
vorg.minorVersion = 0
vorg.VOriginRecords = {}
# Find the most frequent verticalOrigin
vorg_count = Counter((_getVerticalOrigin(self.otf, glyph) for glyph in self.allGlyphs.values()))
vorg.defaultVertOriginY = vorg_count.most_common(1)[0][0]
if len(vorg_count) > 1:
for (glyphName, glyph) in self.allGlyphs.items():
vorg.VOriginRecords[glyphName] = _getVerticalOrigin(self.otf, glyph) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
vorg.numVertOriginYMetrics = len(vorg.VOriginRecords) |
def set_chance(cls, files, equal=False, offensive=False, lang=None): # where files are (name, chance)
"""Initialize based on a list of fortune files with set chances"""
self = cls.__new__(cls)
total = 0.
file = []
leftover = []
for name, chance in files:
if total >= 1:
break
fortune = load_fortune(name, offensive=offensive, lang=lang)
if fortune is None or not fortune.size:
continue
if chance:
file.append((fortune, chance))
total += chance
else:
leftover.append(fortune)
if leftover and total < 1:
left = 1 - total
if equal:
perfile = left / len(leftover)
for fortune in leftover:
file.append((fortune, perfile))
else:
entries = sum(map(attrgetter('size'), leftover))
logger.debug('%d entries left', entries)
for fortune in leftover:
chance = left * fortune.size / entries
file.append((fortune, chance))
# Arbitrary limit to calculate upper bound with, nice round number
self.count = count = 65536
bound = 0
self.files = fortunes = []
for file, chance in file:
bound += int(chance * count)
fortunes.append((file, bound))
self.keys = [i[1] for i in self.files]
return self | def function[set_chance, parameter[cls, files, equal, offensive, lang]]:
constant[Initialize based on a list of fortune files with set chances]
variable[self] assign[=] call[name[cls].__new__, parameter[name[cls]]]
variable[total] assign[=] constant[0.0]
variable[file] assign[=] list[[]]
variable[leftover] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b01d7250>, <ast.Name object at 0x7da1b01d5ae0>]]] in starred[name[files]] begin[:]
if compare[name[total] greater_or_equal[>=] constant[1]] begin[:]
break
variable[fortune] assign[=] call[name[load_fortune], parameter[name[name]]]
if <ast.BoolOp object at 0x7da1b01d7a00> begin[:]
continue
if name[chance] begin[:]
call[name[file].append, parameter[tuple[[<ast.Name object at 0x7da1b01d7790>, <ast.Name object at 0x7da1b01d6b60>]]]]
<ast.AugAssign object at 0x7da1b01d5120>
if <ast.BoolOp object at 0x7da1b01d71c0> begin[:]
variable[left] assign[=] binary_operation[constant[1] - name[total]]
if name[equal] begin[:]
variable[perfile] assign[=] binary_operation[name[left] / call[name[len], parameter[name[leftover]]]]
for taget[name[fortune]] in starred[name[leftover]] begin[:]
call[name[file].append, parameter[tuple[[<ast.Name object at 0x7da1b01daaa0>, <ast.Name object at 0x7da1b01d8430>]]]]
name[self].count assign[=] constant[65536]
variable[bound] assign[=] constant[0]
name[self].files assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b013e710>, <ast.Name object at 0x7da1b013cee0>]]] in starred[name[file]] begin[:]
<ast.AugAssign object at 0x7da1b013ec80>
call[name[fortunes].append, parameter[tuple[[<ast.Name object at 0x7da1b013fc40>, <ast.Name object at 0x7da1b013fb20>]]]]
name[self].keys assign[=] <ast.ListComp object at 0x7da1b013f6d0>
return[name[self]] | keyword[def] identifier[set_chance] ( identifier[cls] , identifier[files] , identifier[equal] = keyword[False] , identifier[offensive] = keyword[False] , identifier[lang] = keyword[None] ):
literal[string]
identifier[self] = identifier[cls] . identifier[__new__] ( identifier[cls] )
identifier[total] = literal[int]
identifier[file] =[]
identifier[leftover] =[]
keyword[for] identifier[name] , identifier[chance] keyword[in] identifier[files] :
keyword[if] identifier[total] >= literal[int] :
keyword[break]
identifier[fortune] = identifier[load_fortune] ( identifier[name] , identifier[offensive] = identifier[offensive] , identifier[lang] = identifier[lang] )
keyword[if] identifier[fortune] keyword[is] keyword[None] keyword[or] keyword[not] identifier[fortune] . identifier[size] :
keyword[continue]
keyword[if] identifier[chance] :
identifier[file] . identifier[append] (( identifier[fortune] , identifier[chance] ))
identifier[total] += identifier[chance]
keyword[else] :
identifier[leftover] . identifier[append] ( identifier[fortune] )
keyword[if] identifier[leftover] keyword[and] identifier[total] < literal[int] :
identifier[left] = literal[int] - identifier[total]
keyword[if] identifier[equal] :
identifier[perfile] = identifier[left] / identifier[len] ( identifier[leftover] )
keyword[for] identifier[fortune] keyword[in] identifier[leftover] :
identifier[file] . identifier[append] (( identifier[fortune] , identifier[perfile] ))
keyword[else] :
identifier[entries] = identifier[sum] ( identifier[map] ( identifier[attrgetter] ( literal[string] ), identifier[leftover] ))
identifier[logger] . identifier[debug] ( literal[string] , identifier[entries] )
keyword[for] identifier[fortune] keyword[in] identifier[leftover] :
identifier[chance] = identifier[left] * identifier[fortune] . identifier[size] / identifier[entries]
identifier[file] . identifier[append] (( identifier[fortune] , identifier[chance] ))
identifier[self] . identifier[count] = identifier[count] = literal[int]
identifier[bound] = literal[int]
identifier[self] . identifier[files] = identifier[fortunes] =[]
keyword[for] identifier[file] , identifier[chance] keyword[in] identifier[file] :
identifier[bound] += identifier[int] ( identifier[chance] * identifier[count] )
identifier[fortunes] . identifier[append] (( identifier[file] , identifier[bound] ))
identifier[self] . identifier[keys] =[ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[files] ]
keyword[return] identifier[self] | def set_chance(cls, files, equal=False, offensive=False, lang=None): # where files are (name, chance)
'Initialize based on a list of fortune files with set chances'
self = cls.__new__(cls)
total = 0.0
file = []
leftover = []
for (name, chance) in files:
if total >= 1:
break # depends on [control=['if'], data=[]]
fortune = load_fortune(name, offensive=offensive, lang=lang)
if fortune is None or not fortune.size:
continue # depends on [control=['if'], data=[]]
if chance:
file.append((fortune, chance))
total += chance # depends on [control=['if'], data=[]]
else:
leftover.append(fortune) # depends on [control=['for'], data=[]]
if leftover and total < 1:
left = 1 - total
if equal:
perfile = left / len(leftover)
for fortune in leftover:
file.append((fortune, perfile)) # depends on [control=['for'], data=['fortune']] # depends on [control=['if'], data=[]]
else:
entries = sum(map(attrgetter('size'), leftover))
logger.debug('%d entries left', entries)
for fortune in leftover:
chance = left * fortune.size / entries
file.append((fortune, chance)) # depends on [control=['for'], data=['fortune']] # depends on [control=['if'], data=[]]
# Arbitrary limit to calculate upper bound with, nice round number
self.count = count = 65536
bound = 0
self.files = fortunes = []
for (file, chance) in file:
bound += int(chance * count)
fortunes.append((file, bound)) # depends on [control=['for'], data=[]]
self.keys = [i[1] for i in self.files]
return self |
def createStore(self, debug, journalMode=None):
"""
Create the actual Store this Substore represents.
"""
if self.storepath is None:
self.store._memorySubstores.append(self) # don't fall out of cache
if self.store.filesdir is None:
filesdir = None
else:
filesdir = (self.store.filesdir.child("_substore_files")
.child(str(self.storeID))
.path)
return Store(parent=self.store,
filesdir=filesdir,
idInParent=self.storeID,
debug=debug,
journalMode=journalMode)
else:
return Store(self.storepath.path,
parent=self.store,
idInParent=self.storeID,
debug=debug,
journalMode=journalMode) | def function[createStore, parameter[self, debug, journalMode]]:
constant[
Create the actual Store this Substore represents.
]
if compare[name[self].storepath is constant[None]] begin[:]
call[name[self].store._memorySubstores.append, parameter[name[self]]]
if compare[name[self].store.filesdir is constant[None]] begin[:]
variable[filesdir] assign[=] constant[None]
return[call[name[Store], parameter[]]] | keyword[def] identifier[createStore] ( identifier[self] , identifier[debug] , identifier[journalMode] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[storepath] keyword[is] keyword[None] :
identifier[self] . identifier[store] . identifier[_memorySubstores] . identifier[append] ( identifier[self] )
keyword[if] identifier[self] . identifier[store] . identifier[filesdir] keyword[is] keyword[None] :
identifier[filesdir] = keyword[None]
keyword[else] :
identifier[filesdir] =( identifier[self] . identifier[store] . identifier[filesdir] . identifier[child] ( literal[string] )
. identifier[child] ( identifier[str] ( identifier[self] . identifier[storeID] ))
. identifier[path] )
keyword[return] identifier[Store] ( identifier[parent] = identifier[self] . identifier[store] ,
identifier[filesdir] = identifier[filesdir] ,
identifier[idInParent] = identifier[self] . identifier[storeID] ,
identifier[debug] = identifier[debug] ,
identifier[journalMode] = identifier[journalMode] )
keyword[else] :
keyword[return] identifier[Store] ( identifier[self] . identifier[storepath] . identifier[path] ,
identifier[parent] = identifier[self] . identifier[store] ,
identifier[idInParent] = identifier[self] . identifier[storeID] ,
identifier[debug] = identifier[debug] ,
identifier[journalMode] = identifier[journalMode] ) | def createStore(self, debug, journalMode=None):
"""
Create the actual Store this Substore represents.
"""
if self.storepath is None:
self.store._memorySubstores.append(self) # don't fall out of cache
if self.store.filesdir is None:
filesdir = None # depends on [control=['if'], data=[]]
else:
filesdir = self.store.filesdir.child('_substore_files').child(str(self.storeID)).path
return Store(parent=self.store, filesdir=filesdir, idInParent=self.storeID, debug=debug, journalMode=journalMode) # depends on [control=['if'], data=[]]
else:
return Store(self.storepath.path, parent=self.store, idInParent=self.storeID, debug=debug, journalMode=journalMode) |
def leave(self, node):
"""walk on the tree from <node>, getting callbacks from handler"""
method = self.get_callbacks(node)[1]
if method is not None:
method(node) | def function[leave, parameter[self, node]]:
constant[walk on the tree from <node>, getting callbacks from handler]
variable[method] assign[=] call[call[name[self].get_callbacks, parameter[name[node]]]][constant[1]]
if compare[name[method] is_not constant[None]] begin[:]
call[name[method], parameter[name[node]]] | keyword[def] identifier[leave] ( identifier[self] , identifier[node] ):
literal[string]
identifier[method] = identifier[self] . identifier[get_callbacks] ( identifier[node] )[ literal[int] ]
keyword[if] identifier[method] keyword[is] keyword[not] keyword[None] :
identifier[method] ( identifier[node] ) | def leave(self, node):
"""walk on the tree from <node>, getting callbacks from handler"""
method = self.get_callbacks(node)[1]
if method is not None:
method(node) # depends on [control=['if'], data=['method']] |
def url_norm (url, encoding=None):
"""Wrapper for url.url_norm() to convert UnicodeError in
LinkCheckerError."""
try:
return urlutil.url_norm(url, encoding=encoding)
except UnicodeError:
msg = _("URL has unparsable domain name: %(name)s") % \
{"name": sys.exc_info()[1]}
raise LinkCheckerError(msg) | def function[url_norm, parameter[url, encoding]]:
constant[Wrapper for url.url_norm() to convert UnicodeError in
LinkCheckerError.]
<ast.Try object at 0x7da20e9638e0> | keyword[def] identifier[url_norm] ( identifier[url] , identifier[encoding] = keyword[None] ):
literal[string]
keyword[try] :
keyword[return] identifier[urlutil] . identifier[url_norm] ( identifier[url] , identifier[encoding] = identifier[encoding] )
keyword[except] identifier[UnicodeError] :
identifier[msg] = identifier[_] ( literal[string] )%{ literal[string] : identifier[sys] . identifier[exc_info] ()[ literal[int] ]}
keyword[raise] identifier[LinkCheckerError] ( identifier[msg] ) | def url_norm(url, encoding=None):
"""Wrapper for url.url_norm() to convert UnicodeError in
LinkCheckerError."""
try:
return urlutil.url_norm(url, encoding=encoding) # depends on [control=['try'], data=[]]
except UnicodeError:
msg = _('URL has unparsable domain name: %(name)s') % {'name': sys.exc_info()[1]}
raise LinkCheckerError(msg) # depends on [control=['except'], data=[]] |
def unpack_value(format_string, stream):
"""Helper function to unpack struct data from a stream and update the signature verifier.
:param str format_string: Struct format string
:param stream: Source data stream
:type stream: io.BytesIO
:returns: Unpacked values
:rtype: tuple
"""
message_bytes = stream.read(struct.calcsize(format_string))
return struct.unpack(format_string, message_bytes) | def function[unpack_value, parameter[format_string, stream]]:
constant[Helper function to unpack struct data from a stream and update the signature verifier.
:param str format_string: Struct format string
:param stream: Source data stream
:type stream: io.BytesIO
:returns: Unpacked values
:rtype: tuple
]
variable[message_bytes] assign[=] call[name[stream].read, parameter[call[name[struct].calcsize, parameter[name[format_string]]]]]
return[call[name[struct].unpack, parameter[name[format_string], name[message_bytes]]]] | keyword[def] identifier[unpack_value] ( identifier[format_string] , identifier[stream] ):
literal[string]
identifier[message_bytes] = identifier[stream] . identifier[read] ( identifier[struct] . identifier[calcsize] ( identifier[format_string] ))
keyword[return] identifier[struct] . identifier[unpack] ( identifier[format_string] , identifier[message_bytes] ) | def unpack_value(format_string, stream):
"""Helper function to unpack struct data from a stream and update the signature verifier.
:param str format_string: Struct format string
:param stream: Source data stream
:type stream: io.BytesIO
:returns: Unpacked values
:rtype: tuple
"""
message_bytes = stream.read(struct.calcsize(format_string))
return struct.unpack(format_string, message_bytes) |
def machines(self, machine_type=None, name=None, dataset=None, state=None,
memory=None, tombstone=None, tags=None, credentials=False,
paged=False, limit=None, offset=None):
"""
::
GET /:login/machines
Query for machines in the current DataCenter matching the input
criteria, returning a :py:class:`list` of instantiated
:py:class:`smartdc.machine.Machine` objects.
:param machine_type: virtualmachine or smartmachine
:type machine_type: :py:class:`basestring`
:param name: machine name to find (will make the return list size
1 or 0)
:type name: :py:class:`basestring`
:param dataset: unique ID or URN for a dataset
:type dataset: :py:class:`basestring` or :py:class:`dict`
:param state: current running state
:type state: :py:class:`basestring`
:param memory: current size of the RAM deployed for the machine (Mb)
:type memory: :py:class:`int`
:param tombstone: include machines destroyed in the last N minutes
:type tombstone: :py:class:`int`
:param tags: keys and values to query in the machines' tag space
:type tags: :py:class:`dict`
:param credentials: whether to include the generated credentials for
machines, if present
:type credentials: :py:class:`bool`
:param paged: whether to return in pages
:type paged: :py:class:`bool`
:param limit: return N machines
:type limit: :py:class:`int`
:param offset: get the next `limit` of machines starting at this point
:type offset: :py:class:`int`
:rtype: :py:class:`list` of :py:class:`smartdc.machine.Machine`\s
The `limit` and `offset` are the REST API's raw paging mechanism.
Alternatively, one can let `paged` remain `False`, and let the method
call attempt to collect all of the machines in multiple calls.
"""
params = {}
if machine_type:
params['type'] = machine_type
if name:
params['name'] = name
if dataset:
if isinstance(dataset, dict):
dataset = dataset.get('urn', dataset['id'])
params['dataset'] = dataset
if state:
params['state'] = state
if memory:
params['memory'] = memory
if tombstone:
params['tombstone'] = tombstone
if tags:
for k, v in tags.items():
params['tag.' + str(k)] = v
if credentials:
params['credentials'] = True
if limit:
params['limit'] = limit
else:
limit = 1000
if offset:
params['offset'] = offset
else:
offset = 0
machines = []
while True:
j, r = self.request('GET', '/machines', params=params)
machines.extend(j)
if not paged:
query_limit = int(r.headers['x-query-limit'])
resource_count = int(r.headers['x-resource-count'])
if resource_count > query_limit:
data['offset'] = (params.get('offset', offset) +
params.get('limit', limit) )
else:
break
else:
break
return [Machine(datacenter=self, data=m) for m in machines] | def function[machines, parameter[self, machine_type, name, dataset, state, memory, tombstone, tags, credentials, paged, limit, offset]]:
constant[
::
GET /:login/machines
Query for machines in the current DataCenter matching the input
criteria, returning a :py:class:`list` of instantiated
:py:class:`smartdc.machine.Machine` objects.
:param machine_type: virtualmachine or smartmachine
:type machine_type: :py:class:`basestring`
:param name: machine name to find (will make the return list size
1 or 0)
:type name: :py:class:`basestring`
:param dataset: unique ID or URN for a dataset
:type dataset: :py:class:`basestring` or :py:class:`dict`
:param state: current running state
:type state: :py:class:`basestring`
:param memory: current size of the RAM deployed for the machine (Mb)
:type memory: :py:class:`int`
:param tombstone: include machines destroyed in the last N minutes
:type tombstone: :py:class:`int`
:param tags: keys and values to query in the machines' tag space
:type tags: :py:class:`dict`
:param credentials: whether to include the generated credentials for
machines, if present
:type credentials: :py:class:`bool`
:param paged: whether to return in pages
:type paged: :py:class:`bool`
:param limit: return N machines
:type limit: :py:class:`int`
:param offset: get the next `limit` of machines starting at this point
:type offset: :py:class:`int`
:rtype: :py:class:`list` of :py:class:`smartdc.machine.Machine`\s
The `limit` and `offset` are the REST API's raw paging mechanism.
Alternatively, one can let `paged` remain `False`, and let the method
call attempt to collect all of the machines in multiple calls.
]
variable[params] assign[=] dictionary[[], []]
if name[machine_type] begin[:]
call[name[params]][constant[type]] assign[=] name[machine_type]
if name[name] begin[:]
call[name[params]][constant[name]] assign[=] name[name]
if name[dataset] begin[:]
if call[name[isinstance], parameter[name[dataset], name[dict]]] begin[:]
variable[dataset] assign[=] call[name[dataset].get, parameter[constant[urn], call[name[dataset]][constant[id]]]]
call[name[params]][constant[dataset]] assign[=] name[dataset]
if name[state] begin[:]
call[name[params]][constant[state]] assign[=] name[state]
if name[memory] begin[:]
call[name[params]][constant[memory]] assign[=] name[memory]
if name[tombstone] begin[:]
call[name[params]][constant[tombstone]] assign[=] name[tombstone]
if name[tags] begin[:]
for taget[tuple[[<ast.Name object at 0x7da2049635b0>, <ast.Name object at 0x7da204960850>]]] in starred[call[name[tags].items, parameter[]]] begin[:]
call[name[params]][binary_operation[constant[tag.] + call[name[str], parameter[name[k]]]]] assign[=] name[v]
if name[credentials] begin[:]
call[name[params]][constant[credentials]] assign[=] constant[True]
if name[limit] begin[:]
call[name[params]][constant[limit]] assign[=] name[limit]
if name[offset] begin[:]
call[name[params]][constant[offset]] assign[=] name[offset]
variable[machines] assign[=] list[[]]
while constant[True] begin[:]
<ast.Tuple object at 0x7da1b2373f40> assign[=] call[name[self].request, parameter[constant[GET], constant[/machines]]]
call[name[machines].extend, parameter[name[j]]]
if <ast.UnaryOp object at 0x7da20e9b3c40> begin[:]
variable[query_limit] assign[=] call[name[int], parameter[call[name[r].headers][constant[x-query-limit]]]]
variable[resource_count] assign[=] call[name[int], parameter[call[name[r].headers][constant[x-resource-count]]]]
if compare[name[resource_count] greater[>] name[query_limit]] begin[:]
call[name[data]][constant[offset]] assign[=] binary_operation[call[name[params].get, parameter[constant[offset], name[offset]]] + call[name[params].get, parameter[constant[limit], name[limit]]]]
return[<ast.ListComp object at 0x7da20e9b24d0>] | keyword[def] identifier[machines] ( identifier[self] , identifier[machine_type] = keyword[None] , identifier[name] = keyword[None] , identifier[dataset] = keyword[None] , identifier[state] = keyword[None] ,
identifier[memory] = keyword[None] , identifier[tombstone] = keyword[None] , identifier[tags] = keyword[None] , identifier[credentials] = keyword[False] ,
identifier[paged] = keyword[False] , identifier[limit] = keyword[None] , identifier[offset] = keyword[None] ):
literal[string]
identifier[params] ={}
keyword[if] identifier[machine_type] :
identifier[params] [ literal[string] ]= identifier[machine_type]
keyword[if] identifier[name] :
identifier[params] [ literal[string] ]= identifier[name]
keyword[if] identifier[dataset] :
keyword[if] identifier[isinstance] ( identifier[dataset] , identifier[dict] ):
identifier[dataset] = identifier[dataset] . identifier[get] ( literal[string] , identifier[dataset] [ literal[string] ])
identifier[params] [ literal[string] ]= identifier[dataset]
keyword[if] identifier[state] :
identifier[params] [ literal[string] ]= identifier[state]
keyword[if] identifier[memory] :
identifier[params] [ literal[string] ]= identifier[memory]
keyword[if] identifier[tombstone] :
identifier[params] [ literal[string] ]= identifier[tombstone]
keyword[if] identifier[tags] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[tags] . identifier[items] ():
identifier[params] [ literal[string] + identifier[str] ( identifier[k] )]= identifier[v]
keyword[if] identifier[credentials] :
identifier[params] [ literal[string] ]= keyword[True]
keyword[if] identifier[limit] :
identifier[params] [ literal[string] ]= identifier[limit]
keyword[else] :
identifier[limit] = literal[int]
keyword[if] identifier[offset] :
identifier[params] [ literal[string] ]= identifier[offset]
keyword[else] :
identifier[offset] = literal[int]
identifier[machines] =[]
keyword[while] keyword[True] :
identifier[j] , identifier[r] = identifier[self] . identifier[request] ( literal[string] , literal[string] , identifier[params] = identifier[params] )
identifier[machines] . identifier[extend] ( identifier[j] )
keyword[if] keyword[not] identifier[paged] :
identifier[query_limit] = identifier[int] ( identifier[r] . identifier[headers] [ literal[string] ])
identifier[resource_count] = identifier[int] ( identifier[r] . identifier[headers] [ literal[string] ])
keyword[if] identifier[resource_count] > identifier[query_limit] :
identifier[data] [ literal[string] ]=( identifier[params] . identifier[get] ( literal[string] , identifier[offset] )+
identifier[params] . identifier[get] ( literal[string] , identifier[limit] ))
keyword[else] :
keyword[break]
keyword[else] :
keyword[break]
keyword[return] [ identifier[Machine] ( identifier[datacenter] = identifier[self] , identifier[data] = identifier[m] ) keyword[for] identifier[m] keyword[in] identifier[machines] ] | def machines(self, machine_type=None, name=None, dataset=None, state=None, memory=None, tombstone=None, tags=None, credentials=False, paged=False, limit=None, offset=None):
"""
::
GET /:login/machines
Query for machines in the current DataCenter matching the input
criteria, returning a :py:class:`list` of instantiated
:py:class:`smartdc.machine.Machine` objects.
:param machine_type: virtualmachine or smartmachine
:type machine_type: :py:class:`basestring`
:param name: machine name to find (will make the return list size
1 or 0)
:type name: :py:class:`basestring`
:param dataset: unique ID or URN for a dataset
:type dataset: :py:class:`basestring` or :py:class:`dict`
:param state: current running state
:type state: :py:class:`basestring`
:param memory: current size of the RAM deployed for the machine (Mb)
:type memory: :py:class:`int`
:param tombstone: include machines destroyed in the last N minutes
:type tombstone: :py:class:`int`
:param tags: keys and values to query in the machines' tag space
:type tags: :py:class:`dict`
:param credentials: whether to include the generated credentials for
machines, if present
:type credentials: :py:class:`bool`
:param paged: whether to return in pages
:type paged: :py:class:`bool`
:param limit: return N machines
:type limit: :py:class:`int`
:param offset: get the next `limit` of machines starting at this point
:type offset: :py:class:`int`
:rtype: :py:class:`list` of :py:class:`smartdc.machine.Machine`\\s
The `limit` and `offset` are the REST API's raw paging mechanism.
Alternatively, one can let `paged` remain `False`, and let the method
call attempt to collect all of the machines in multiple calls.
"""
params = {}
if machine_type:
params['type'] = machine_type # depends on [control=['if'], data=[]]
if name:
params['name'] = name # depends on [control=['if'], data=[]]
if dataset:
if isinstance(dataset, dict):
dataset = dataset.get('urn', dataset['id']) # depends on [control=['if'], data=[]]
params['dataset'] = dataset # depends on [control=['if'], data=[]]
if state:
params['state'] = state # depends on [control=['if'], data=[]]
if memory:
params['memory'] = memory # depends on [control=['if'], data=[]]
if tombstone:
params['tombstone'] = tombstone # depends on [control=['if'], data=[]]
if tags:
for (k, v) in tags.items():
params['tag.' + str(k)] = v # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
if credentials:
params['credentials'] = True # depends on [control=['if'], data=[]]
if limit:
params['limit'] = limit # depends on [control=['if'], data=[]]
else:
limit = 1000
if offset:
params['offset'] = offset # depends on [control=['if'], data=[]]
else:
offset = 0
machines = []
while True:
(j, r) = self.request('GET', '/machines', params=params)
machines.extend(j)
if not paged:
query_limit = int(r.headers['x-query-limit'])
resource_count = int(r.headers['x-resource-count'])
if resource_count > query_limit:
data['offset'] = params.get('offset', offset) + params.get('limit', limit) # depends on [control=['if'], data=[]]
else:
break # depends on [control=['if'], data=[]]
else:
break # depends on [control=['while'], data=[]]
return [Machine(datacenter=self, data=m) for m in machines] |
def merge_Fm(dfs_data):
"""Merges Fm-1 and Fm, as defined on page 19 of the paper."""
FG = dfs_data['FG']
m = FG['m']
FGm = FG[m]
FGm1 = FG[m-1]
if FGm[0]['u'] < FGm1[0]['u']:
FGm1[0]['u'] = FGm[0]['u']
if FGm[0]['v'] > FGm1[0]['v']:
FGm1[0]['v'] = FGm[0]['v']
if FGm[1]['x'] < FGm1[1]['x']:
FGm1[1]['x'] = FGm[1]['x']
if FGm[1]['y'] > FGm1[1]['y']:
FGm1[1]['y'] = FGm[1]['y']
del FG[m]
FG['m'] -= 1 | def function[merge_Fm, parameter[dfs_data]]:
constant[Merges Fm-1 and Fm, as defined on page 19 of the paper.]
variable[FG] assign[=] call[name[dfs_data]][constant[FG]]
variable[m] assign[=] call[name[FG]][constant[m]]
variable[FGm] assign[=] call[name[FG]][name[m]]
variable[FGm1] assign[=] call[name[FG]][binary_operation[name[m] - constant[1]]]
if compare[call[call[name[FGm]][constant[0]]][constant[u]] less[<] call[call[name[FGm1]][constant[0]]][constant[u]]] begin[:]
call[call[name[FGm1]][constant[0]]][constant[u]] assign[=] call[call[name[FGm]][constant[0]]][constant[u]]
if compare[call[call[name[FGm]][constant[0]]][constant[v]] greater[>] call[call[name[FGm1]][constant[0]]][constant[v]]] begin[:]
call[call[name[FGm1]][constant[0]]][constant[v]] assign[=] call[call[name[FGm]][constant[0]]][constant[v]]
if compare[call[call[name[FGm]][constant[1]]][constant[x]] less[<] call[call[name[FGm1]][constant[1]]][constant[x]]] begin[:]
call[call[name[FGm1]][constant[1]]][constant[x]] assign[=] call[call[name[FGm]][constant[1]]][constant[x]]
if compare[call[call[name[FGm]][constant[1]]][constant[y]] greater[>] call[call[name[FGm1]][constant[1]]][constant[y]]] begin[:]
call[call[name[FGm1]][constant[1]]][constant[y]] assign[=] call[call[name[FGm]][constant[1]]][constant[y]]
<ast.Delete object at 0x7da1b2867880>
<ast.AugAssign object at 0x7da1b2865ba0> | keyword[def] identifier[merge_Fm] ( identifier[dfs_data] ):
literal[string]
identifier[FG] = identifier[dfs_data] [ literal[string] ]
identifier[m] = identifier[FG] [ literal[string] ]
identifier[FGm] = identifier[FG] [ identifier[m] ]
identifier[FGm1] = identifier[FG] [ identifier[m] - literal[int] ]
keyword[if] identifier[FGm] [ literal[int] ][ literal[string] ]< identifier[FGm1] [ literal[int] ][ literal[string] ]:
identifier[FGm1] [ literal[int] ][ literal[string] ]= identifier[FGm] [ literal[int] ][ literal[string] ]
keyword[if] identifier[FGm] [ literal[int] ][ literal[string] ]> identifier[FGm1] [ literal[int] ][ literal[string] ]:
identifier[FGm1] [ literal[int] ][ literal[string] ]= identifier[FGm] [ literal[int] ][ literal[string] ]
keyword[if] identifier[FGm] [ literal[int] ][ literal[string] ]< identifier[FGm1] [ literal[int] ][ literal[string] ]:
identifier[FGm1] [ literal[int] ][ literal[string] ]= identifier[FGm] [ literal[int] ][ literal[string] ]
keyword[if] identifier[FGm] [ literal[int] ][ literal[string] ]> identifier[FGm1] [ literal[int] ][ literal[string] ]:
identifier[FGm1] [ literal[int] ][ literal[string] ]= identifier[FGm] [ literal[int] ][ literal[string] ]
keyword[del] identifier[FG] [ identifier[m] ]
identifier[FG] [ literal[string] ]-= literal[int] | def merge_Fm(dfs_data):
"""Merges Fm-1 and Fm, as defined on page 19 of the paper."""
FG = dfs_data['FG']
m = FG['m']
FGm = FG[m]
FGm1 = FG[m - 1]
if FGm[0]['u'] < FGm1[0]['u']:
FGm1[0]['u'] = FGm[0]['u'] # depends on [control=['if'], data=[]]
if FGm[0]['v'] > FGm1[0]['v']:
FGm1[0]['v'] = FGm[0]['v'] # depends on [control=['if'], data=[]]
if FGm[1]['x'] < FGm1[1]['x']:
FGm1[1]['x'] = FGm[1]['x'] # depends on [control=['if'], data=[]]
if FGm[1]['y'] > FGm1[1]['y']:
FGm1[1]['y'] = FGm[1]['y'] # depends on [control=['if'], data=[]]
del FG[m]
FG['m'] -= 1 |
def flush(self):
""" Flush the buffer of buffered tiers to our destination tiers. """
if self.buffer is None:
return
data = self.buffer
self.buffer = []
for x in self.dests:
yield from x.enqueue_task(self, *data) | def function[flush, parameter[self]]:
constant[ Flush the buffer of buffered tiers to our destination tiers. ]
if compare[name[self].buffer is constant[None]] begin[:]
return[None]
variable[data] assign[=] name[self].buffer
name[self].buffer assign[=] list[[]]
for taget[name[x]] in starred[name[self].dests] begin[:]
<ast.YieldFrom object at 0x7da204621630> | keyword[def] identifier[flush] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[buffer] keyword[is] keyword[None] :
keyword[return]
identifier[data] = identifier[self] . identifier[buffer]
identifier[self] . identifier[buffer] =[]
keyword[for] identifier[x] keyword[in] identifier[self] . identifier[dests] :
keyword[yield] keyword[from] identifier[x] . identifier[enqueue_task] ( identifier[self] ,* identifier[data] ) | def flush(self):
""" Flush the buffer of buffered tiers to our destination tiers. """
if self.buffer is None:
return # depends on [control=['if'], data=[]]
data = self.buffer
self.buffer = []
for x in self.dests:
yield from x.enqueue_task(self, *data) # depends on [control=['for'], data=['x']] |
def put_key(key_name,
value,
description,
meta,
modify,
add,
lock,
key_type,
stash,
passphrase,
backend):
"""Insert a key to the stash
`KEY_NAME` is the name of the key to insert
`VALUE` is a key=value argument which can be provided multiple times.
it is the encrypted value of your key
"""
stash = _get_stash(backend, stash, passphrase)
try:
click.echo('Stashing {0} key...'.format(key_type))
stash.put(
name=key_name,
value=_build_dict_from_key_value(value),
modify=modify,
metadata=_build_dict_from_key_value(meta),
description=description,
lock=lock,
key_type=key_type,
add=add)
click.echo('Key stashed successfully')
except GhostError as ex:
sys.exit(ex) | def function[put_key, parameter[key_name, value, description, meta, modify, add, lock, key_type, stash, passphrase, backend]]:
constant[Insert a key to the stash
`KEY_NAME` is the name of the key to insert
`VALUE` is a key=value argument which can be provided multiple times.
it is the encrypted value of your key
]
variable[stash] assign[=] call[name[_get_stash], parameter[name[backend], name[stash], name[passphrase]]]
<ast.Try object at 0x7da1b00c9ae0> | keyword[def] identifier[put_key] ( identifier[key_name] ,
identifier[value] ,
identifier[description] ,
identifier[meta] ,
identifier[modify] ,
identifier[add] ,
identifier[lock] ,
identifier[key_type] ,
identifier[stash] ,
identifier[passphrase] ,
identifier[backend] ):
literal[string]
identifier[stash] = identifier[_get_stash] ( identifier[backend] , identifier[stash] , identifier[passphrase] )
keyword[try] :
identifier[click] . identifier[echo] ( literal[string] . identifier[format] ( identifier[key_type] ))
identifier[stash] . identifier[put] (
identifier[name] = identifier[key_name] ,
identifier[value] = identifier[_build_dict_from_key_value] ( identifier[value] ),
identifier[modify] = identifier[modify] ,
identifier[metadata] = identifier[_build_dict_from_key_value] ( identifier[meta] ),
identifier[description] = identifier[description] ,
identifier[lock] = identifier[lock] ,
identifier[key_type] = identifier[key_type] ,
identifier[add] = identifier[add] )
identifier[click] . identifier[echo] ( literal[string] )
keyword[except] identifier[GhostError] keyword[as] identifier[ex] :
identifier[sys] . identifier[exit] ( identifier[ex] ) | def put_key(key_name, value, description, meta, modify, add, lock, key_type, stash, passphrase, backend):
"""Insert a key to the stash
`KEY_NAME` is the name of the key to insert
`VALUE` is a key=value argument which can be provided multiple times.
it is the encrypted value of your key
"""
stash = _get_stash(backend, stash, passphrase)
try:
click.echo('Stashing {0} key...'.format(key_type))
stash.put(name=key_name, value=_build_dict_from_key_value(value), modify=modify, metadata=_build_dict_from_key_value(meta), description=description, lock=lock, key_type=key_type, add=add)
click.echo('Key stashed successfully') # depends on [control=['try'], data=[]]
except GhostError as ex:
sys.exit(ex) # depends on [control=['except'], data=['ex']] |
def _load(self, url):
"""
Load from remote, but check local file content to identify duplicate content. If local file is found with
same hash then it is used with metadata from remote object to avoid fetching full content.
:param url:
:return:
"""
self._logger.debug('Loading url %s into resource cache' % url)
retriever = get_resource_retriever(url)
content_path = os.path.join(self.path, self._hash_path(url))
try:
if url in self.metadata:
headers = retriever.fetch(content_path, last_etag=self.metadata[url]['etag'], progress_bar=self.progress_bars)
if headers is None:
# no update, return what is already loaded
self._logger.info('Cached %s is up-to-date. No data download needed' % url)
return self.metadata[url]
else:
headers = retriever.fetch(content_path, progress_bar=self.progress_bars)
if headers is None:
raise Exception('Fetch of %s failed' % url)
if 'etag' not in headers:
# No Etag returned, so generate it
headers['etag'] = fs_util.calc_file_md5(content_path)
# Populate metadata from the headers
self.metadata[url] = headers.copy()
self.metadata[url]['content'] = content_path
return self.metadata[url]
except:
self._logger.error('Failed getting resource: %s')
# forcibly flush local entry if found
if url in self.metadata:
self.metadata.pop(url)
raise
finally:
if url not in self.metadata:
self._logger.debug('Cleaning up on failed load of %s' % url)
# Cleanup on failure
if content_path is not None and os.path.exists(content_path):
os.remove(content_path) | def function[_load, parameter[self, url]]:
constant[
Load from remote, but check local file content to identify duplicate content. If local file is found with
same hash then it is used with metadata from remote object to avoid fetching full content.
:param url:
:return:
]
call[name[self]._logger.debug, parameter[binary_operation[constant[Loading url %s into resource cache] <ast.Mod object at 0x7da2590d6920> name[url]]]]
variable[retriever] assign[=] call[name[get_resource_retriever], parameter[name[url]]]
variable[content_path] assign[=] call[name[os].path.join, parameter[name[self].path, call[name[self]._hash_path, parameter[name[url]]]]]
<ast.Try object at 0x7da1b0bf01c0> | keyword[def] identifier[_load] ( identifier[self] , identifier[url] ):
literal[string]
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] % identifier[url] )
identifier[retriever] = identifier[get_resource_retriever] ( identifier[url] )
identifier[content_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[path] , identifier[self] . identifier[_hash_path] ( identifier[url] ))
keyword[try] :
keyword[if] identifier[url] keyword[in] identifier[self] . identifier[metadata] :
identifier[headers] = identifier[retriever] . identifier[fetch] ( identifier[content_path] , identifier[last_etag] = identifier[self] . identifier[metadata] [ identifier[url] ][ literal[string] ], identifier[progress_bar] = identifier[self] . identifier[progress_bars] )
keyword[if] identifier[headers] keyword[is] keyword[None] :
identifier[self] . identifier[_logger] . identifier[info] ( literal[string] % identifier[url] )
keyword[return] identifier[self] . identifier[metadata] [ identifier[url] ]
keyword[else] :
identifier[headers] = identifier[retriever] . identifier[fetch] ( identifier[content_path] , identifier[progress_bar] = identifier[self] . identifier[progress_bars] )
keyword[if] identifier[headers] keyword[is] keyword[None] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[url] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[headers] :
identifier[headers] [ literal[string] ]= identifier[fs_util] . identifier[calc_file_md5] ( identifier[content_path] )
identifier[self] . identifier[metadata] [ identifier[url] ]= identifier[headers] . identifier[copy] ()
identifier[self] . identifier[metadata] [ identifier[url] ][ literal[string] ]= identifier[content_path]
keyword[return] identifier[self] . identifier[metadata] [ identifier[url] ]
keyword[except] :
identifier[self] . identifier[_logger] . identifier[error] ( literal[string] )
keyword[if] identifier[url] keyword[in] identifier[self] . identifier[metadata] :
identifier[self] . identifier[metadata] . identifier[pop] ( identifier[url] )
keyword[raise]
keyword[finally] :
keyword[if] identifier[url] keyword[not] keyword[in] identifier[self] . identifier[metadata] :
identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] % identifier[url] )
keyword[if] identifier[content_path] keyword[is] keyword[not] keyword[None] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[content_path] ):
identifier[os] . identifier[remove] ( identifier[content_path] ) | def _load(self, url):
"""
Load from remote, but check local file content to identify duplicate content. If local file is found with
same hash then it is used with metadata from remote object to avoid fetching full content.
:param url:
:return:
"""
self._logger.debug('Loading url %s into resource cache' % url)
retriever = get_resource_retriever(url)
content_path = os.path.join(self.path, self._hash_path(url))
try:
if url in self.metadata:
headers = retriever.fetch(content_path, last_etag=self.metadata[url]['etag'], progress_bar=self.progress_bars)
if headers is None:
# no update, return what is already loaded
self._logger.info('Cached %s is up-to-date. No data download needed' % url)
return self.metadata[url] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['url']]
else:
headers = retriever.fetch(content_path, progress_bar=self.progress_bars)
if headers is None:
raise Exception('Fetch of %s failed' % url) # depends on [control=['if'], data=[]]
if 'etag' not in headers:
# No Etag returned, so generate it
headers['etag'] = fs_util.calc_file_md5(content_path) # depends on [control=['if'], data=['headers']]
# Populate metadata from the headers
self.metadata[url] = headers.copy()
self.metadata[url]['content'] = content_path
return self.metadata[url] # depends on [control=['try'], data=[]]
except:
self._logger.error('Failed getting resource: %s')
# forcibly flush local entry if found
if url in self.metadata:
self.metadata.pop(url) # depends on [control=['if'], data=['url']]
raise # depends on [control=['except'], data=[]]
finally:
if url not in self.metadata:
self._logger.debug('Cleaning up on failed load of %s' % url)
# Cleanup on failure
if content_path is not None and os.path.exists(content_path):
os.remove(content_path) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['url']] |
def print_stack(sig, frame):
"""Signal handler to print a stack trace and some interesting values."""
if _debug: print_stack._debug("print_stack %r %r", sig, frame)
global running, deferredFns, sleeptime
sys.stderr.write("==== USR1 Signal, %s\n" % time.strftime("%d-%b-%Y %H:%M:%S"))
sys.stderr.write("---------- globals\n")
sys.stderr.write(" running: %r\n" % (running,))
sys.stderr.write(" deferredFns: %r\n" % (deferredFns,))
sys.stderr.write(" sleeptime: %r\n" % (sleeptime,))
sys.stderr.write("---------- stack\n")
traceback.print_stack(frame)
# make a list of interesting frames
flist = []
f = frame
while f.f_back:
flist.append(f)
f = f.f_back
# reverse the list so it is in the same order as print_stack
flist.reverse()
for f in flist:
sys.stderr.write("---------- frame: %s\n" % (f,))
for k, v in f.f_locals.items():
sys.stderr.write(" %s: %r\n" % (k, v))
sys.stderr.flush() | def function[print_stack, parameter[sig, frame]]:
constant[Signal handler to print a stack trace and some interesting values.]
if name[_debug] begin[:]
call[name[print_stack]._debug, parameter[constant[print_stack %r %r], name[sig], name[frame]]]
<ast.Global object at 0x7da2054a40d0>
call[name[sys].stderr.write, parameter[binary_operation[constant[==== USR1 Signal, %s
] <ast.Mod object at 0x7da2590d6920> call[name[time].strftime, parameter[constant[%d-%b-%Y %H:%M:%S]]]]]]
call[name[sys].stderr.write, parameter[constant[---------- globals
]]]
call[name[sys].stderr.write, parameter[binary_operation[constant[ running: %r
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2054a5870>]]]]]
call[name[sys].stderr.write, parameter[binary_operation[constant[ deferredFns: %r
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2054a66b0>]]]]]
call[name[sys].stderr.write, parameter[binary_operation[constant[ sleeptime: %r
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2054a6320>]]]]]
call[name[sys].stderr.write, parameter[constant[---------- stack
]]]
call[name[traceback].print_stack, parameter[name[frame]]]
variable[flist] assign[=] list[[]]
variable[f] assign[=] name[frame]
while name[f].f_back begin[:]
call[name[flist].append, parameter[name[f]]]
variable[f] assign[=] name[f].f_back
call[name[flist].reverse, parameter[]]
for taget[name[f]] in starred[name[flist]] begin[:]
call[name[sys].stderr.write, parameter[binary_operation[constant[---------- frame: %s
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2054a79a0>]]]]]
for taget[tuple[[<ast.Name object at 0x7da2054a47c0>, <ast.Name object at 0x7da2054a5150>]]] in starred[call[name[f].f_locals.items, parameter[]]] begin[:]
call[name[sys].stderr.write, parameter[binary_operation[constant[ %s: %r
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b084e950>, <ast.Name object at 0x7da1b084c100>]]]]]
call[name[sys].stderr.flush, parameter[]] | keyword[def] identifier[print_stack] ( identifier[sig] , identifier[frame] ):
literal[string]
keyword[if] identifier[_debug] : identifier[print_stack] . identifier[_debug] ( literal[string] , identifier[sig] , identifier[frame] )
keyword[global] identifier[running] , identifier[deferredFns] , identifier[sleeptime]
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] % identifier[time] . identifier[strftime] ( literal[string] ))
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] )
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] %( identifier[running] ,))
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] %( identifier[deferredFns] ,))
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] %( identifier[sleeptime] ,))
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] )
identifier[traceback] . identifier[print_stack] ( identifier[frame] )
identifier[flist] =[]
identifier[f] = identifier[frame]
keyword[while] identifier[f] . identifier[f_back] :
identifier[flist] . identifier[append] ( identifier[f] )
identifier[f] = identifier[f] . identifier[f_back]
identifier[flist] . identifier[reverse] ()
keyword[for] identifier[f] keyword[in] identifier[flist] :
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] %( identifier[f] ,))
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[f] . identifier[f_locals] . identifier[items] ():
identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] %( identifier[k] , identifier[v] ))
identifier[sys] . identifier[stderr] . identifier[flush] () | def print_stack(sig, frame):
"""Signal handler to print a stack trace and some interesting values."""
if _debug:
print_stack._debug('print_stack %r %r', sig, frame) # depends on [control=['if'], data=[]]
global running, deferredFns, sleeptime
sys.stderr.write('==== USR1 Signal, %s\n' % time.strftime('%d-%b-%Y %H:%M:%S'))
sys.stderr.write('---------- globals\n')
sys.stderr.write(' running: %r\n' % (running,))
sys.stderr.write(' deferredFns: %r\n' % (deferredFns,))
sys.stderr.write(' sleeptime: %r\n' % (sleeptime,))
sys.stderr.write('---------- stack\n')
traceback.print_stack(frame)
# make a list of interesting frames
flist = []
f = frame
while f.f_back:
flist.append(f)
f = f.f_back # depends on [control=['while'], data=[]]
# reverse the list so it is in the same order as print_stack
flist.reverse()
for f in flist:
sys.stderr.write('---------- frame: %s\n' % (f,))
for (k, v) in f.f_locals.items():
sys.stderr.write(' %s: %r\n' % (k, v)) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['f']]
sys.stderr.flush() |
def close_idle_connections(self, pool_id=None):
"""close idle connections to mongo"""
if not hasattr(self, '_pools'):
return
if pool_id:
if pool_id not in self._pools:
raise ProgrammingError("pool %r does not exist" % pool_id)
else:
pool = self._pools[pool_id]
pool.close()
else:
for pool_id, pool in self._pools.items():
pool.close() | def function[close_idle_connections, parameter[self, pool_id]]:
constant[close idle connections to mongo]
if <ast.UnaryOp object at 0x7da18f721ae0> begin[:]
return[None]
if name[pool_id] begin[:]
if compare[name[pool_id] <ast.NotIn object at 0x7da2590d7190> name[self]._pools] begin[:]
<ast.Raise object at 0x7da1b0c52920> | keyword[def] identifier[close_idle_connections] ( identifier[self] , identifier[pool_id] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ):
keyword[return]
keyword[if] identifier[pool_id] :
keyword[if] identifier[pool_id] keyword[not] keyword[in] identifier[self] . identifier[_pools] :
keyword[raise] identifier[ProgrammingError] ( literal[string] % identifier[pool_id] )
keyword[else] :
identifier[pool] = identifier[self] . identifier[_pools] [ identifier[pool_id] ]
identifier[pool] . identifier[close] ()
keyword[else] :
keyword[for] identifier[pool_id] , identifier[pool] keyword[in] identifier[self] . identifier[_pools] . identifier[items] ():
identifier[pool] . identifier[close] () | def close_idle_connections(self, pool_id=None):
"""close idle connections to mongo"""
if not hasattr(self, '_pools'):
return # depends on [control=['if'], data=[]]
if pool_id:
if pool_id not in self._pools:
raise ProgrammingError('pool %r does not exist' % pool_id) # depends on [control=['if'], data=['pool_id']]
else:
pool = self._pools[pool_id]
pool.close() # depends on [control=['if'], data=[]]
else:
for (pool_id, pool) in self._pools.items():
pool.close() # depends on [control=['for'], data=[]] |
def _get_values(cls, diff_dict, type='new'):
'''
Returns a dictionaries with the 'new' values in a diff dict.
type
Which values to return, 'new' or 'old'
'''
ret_dict = {}
for p in diff_dict.keys():
if type in diff_dict[p].keys():
ret_dict.update({p: diff_dict[p][type]})
else:
ret_dict.update(
{p: cls._get_values(diff_dict[p], type=type)})
return ret_dict | def function[_get_values, parameter[cls, diff_dict, type]]:
constant[
Returns a dictionaries with the 'new' values in a diff dict.
type
Which values to return, 'new' or 'old'
]
variable[ret_dict] assign[=] dictionary[[], []]
for taget[name[p]] in starred[call[name[diff_dict].keys, parameter[]]] begin[:]
if compare[name[type] in call[call[name[diff_dict]][name[p]].keys, parameter[]]] begin[:]
call[name[ret_dict].update, parameter[dictionary[[<ast.Name object at 0x7da1b1ca7730>], [<ast.Subscript object at 0x7da1b1ca4a90>]]]]
return[name[ret_dict]] | keyword[def] identifier[_get_values] ( identifier[cls] , identifier[diff_dict] , identifier[type] = literal[string] ):
literal[string]
identifier[ret_dict] ={}
keyword[for] identifier[p] keyword[in] identifier[diff_dict] . identifier[keys] ():
keyword[if] identifier[type] keyword[in] identifier[diff_dict] [ identifier[p] ]. identifier[keys] ():
identifier[ret_dict] . identifier[update] ({ identifier[p] : identifier[diff_dict] [ identifier[p] ][ identifier[type] ]})
keyword[else] :
identifier[ret_dict] . identifier[update] (
{ identifier[p] : identifier[cls] . identifier[_get_values] ( identifier[diff_dict] [ identifier[p] ], identifier[type] = identifier[type] )})
keyword[return] identifier[ret_dict] | def _get_values(cls, diff_dict, type='new'):
"""
Returns a dictionaries with the 'new' values in a diff dict.
type
Which values to return, 'new' or 'old'
"""
ret_dict = {}
for p in diff_dict.keys():
if type in diff_dict[p].keys():
ret_dict.update({p: diff_dict[p][type]}) # depends on [control=['if'], data=['type']]
else:
ret_dict.update({p: cls._get_values(diff_dict[p], type=type)}) # depends on [control=['for'], data=['p']]
return ret_dict |
def filter_effects(effect_collection, variant_collection, patient, filter_fn, all_effects, **kwargs):
"""Filter variants from the Effect Collection
Parameters
----------
effect_collection : varcode.EffectCollection
variant_collection : varcode.VariantCollection
patient : cohorts.Patient
filter_fn : function
Takes a FilterableEffect and returns a boolean. Only effects returning True are preserved.
all_effects : boolean
Return the single, top-priority effect if False. If True, return all effects (don't filter to top-priority).
Returns
-------
varcode.EffectCollection
Filtered effect collection, with only the variants passing the filter
"""
def top_priority_maybe(effect_collection):
"""
Always (unless all_effects=True) take the top priority effect per variant
so we end up with a single effect per variant.
"""
if all_effects:
return effect_collection
return EffectCollection(list(effect_collection.top_priority_effect_per_variant().values()))
def apply_filter_fn(filter_fn, effect):
"""
Return True if filter_fn is true for the effect or its alternate_effect.
If no alternate_effect, then just return True if filter_fn is True.
"""
applied = filter_fn(FilterableEffect(
effect=effect,
variant_collection=variant_collection,
patient=patient), **kwargs)
if hasattr(effect, "alternate_effect"):
applied_alternate = filter_fn(FilterableEffect(
effect=effect.alternate_effect,
variant_collection=variant_collection,
patient=patient), **kwargs)
return applied or applied_alternate
return applied
if filter_fn:
return top_priority_maybe(EffectCollection([
effect
for effect in effect_collection
if apply_filter_fn(filter_fn, effect)]))
else:
return top_priority_maybe(effect_collection) | def function[filter_effects, parameter[effect_collection, variant_collection, patient, filter_fn, all_effects]]:
constant[Filter variants from the Effect Collection
Parameters
----------
effect_collection : varcode.EffectCollection
variant_collection : varcode.VariantCollection
patient : cohorts.Patient
filter_fn : function
Takes a FilterableEffect and returns a boolean. Only effects returning True are preserved.
all_effects : boolean
Return the single, top-priority effect if False. If True, return all effects (don't filter to top-priority).
Returns
-------
varcode.EffectCollection
Filtered effect collection, with only the variants passing the filter
]
def function[top_priority_maybe, parameter[effect_collection]]:
constant[
Always (unless all_effects=True) take the top priority effect per variant
so we end up with a single effect per variant.
]
if name[all_effects] begin[:]
return[name[effect_collection]]
return[call[name[EffectCollection], parameter[call[name[list], parameter[call[call[name[effect_collection].top_priority_effect_per_variant, parameter[]].values, parameter[]]]]]]]
def function[apply_filter_fn, parameter[filter_fn, effect]]:
constant[
Return True if filter_fn is true for the effect or its alternate_effect.
If no alternate_effect, then just return True if filter_fn is True.
]
variable[applied] assign[=] call[name[filter_fn], parameter[call[name[FilterableEffect], parameter[]]]]
if call[name[hasattr], parameter[name[effect], constant[alternate_effect]]] begin[:]
variable[applied_alternate] assign[=] call[name[filter_fn], parameter[call[name[FilterableEffect], parameter[]]]]
return[<ast.BoolOp object at 0x7da18f58c9a0>]
return[name[applied]]
if name[filter_fn] begin[:]
return[call[name[top_priority_maybe], parameter[call[name[EffectCollection], parameter[<ast.ListComp object at 0x7da18f58f730>]]]]] | keyword[def] identifier[filter_effects] ( identifier[effect_collection] , identifier[variant_collection] , identifier[patient] , identifier[filter_fn] , identifier[all_effects] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[top_priority_maybe] ( identifier[effect_collection] ):
literal[string]
keyword[if] identifier[all_effects] :
keyword[return] identifier[effect_collection]
keyword[return] identifier[EffectCollection] ( identifier[list] ( identifier[effect_collection] . identifier[top_priority_effect_per_variant] (). identifier[values] ()))
keyword[def] identifier[apply_filter_fn] ( identifier[filter_fn] , identifier[effect] ):
literal[string]
identifier[applied] = identifier[filter_fn] ( identifier[FilterableEffect] (
identifier[effect] = identifier[effect] ,
identifier[variant_collection] = identifier[variant_collection] ,
identifier[patient] = identifier[patient] ),** identifier[kwargs] )
keyword[if] identifier[hasattr] ( identifier[effect] , literal[string] ):
identifier[applied_alternate] = identifier[filter_fn] ( identifier[FilterableEffect] (
identifier[effect] = identifier[effect] . identifier[alternate_effect] ,
identifier[variant_collection] = identifier[variant_collection] ,
identifier[patient] = identifier[patient] ),** identifier[kwargs] )
keyword[return] identifier[applied] keyword[or] identifier[applied_alternate]
keyword[return] identifier[applied]
keyword[if] identifier[filter_fn] :
keyword[return] identifier[top_priority_maybe] ( identifier[EffectCollection] ([
identifier[effect]
keyword[for] identifier[effect] keyword[in] identifier[effect_collection]
keyword[if] identifier[apply_filter_fn] ( identifier[filter_fn] , identifier[effect] )]))
keyword[else] :
keyword[return] identifier[top_priority_maybe] ( identifier[effect_collection] ) | def filter_effects(effect_collection, variant_collection, patient, filter_fn, all_effects, **kwargs):
"""Filter variants from the Effect Collection
Parameters
----------
effect_collection : varcode.EffectCollection
variant_collection : varcode.VariantCollection
patient : cohorts.Patient
filter_fn : function
Takes a FilterableEffect and returns a boolean. Only effects returning True are preserved.
all_effects : boolean
Return the single, top-priority effect if False. If True, return all effects (don't filter to top-priority).
Returns
-------
varcode.EffectCollection
Filtered effect collection, with only the variants passing the filter
"""
def top_priority_maybe(effect_collection):
"""
Always (unless all_effects=True) take the top priority effect per variant
so we end up with a single effect per variant.
"""
if all_effects:
return effect_collection # depends on [control=['if'], data=[]]
return EffectCollection(list(effect_collection.top_priority_effect_per_variant().values()))
def apply_filter_fn(filter_fn, effect):
"""
Return True if filter_fn is true for the effect or its alternate_effect.
If no alternate_effect, then just return True if filter_fn is True.
"""
applied = filter_fn(FilterableEffect(effect=effect, variant_collection=variant_collection, patient=patient), **kwargs)
if hasattr(effect, 'alternate_effect'):
applied_alternate = filter_fn(FilterableEffect(effect=effect.alternate_effect, variant_collection=variant_collection, patient=patient), **kwargs)
return applied or applied_alternate # depends on [control=['if'], data=[]]
return applied
if filter_fn:
return top_priority_maybe(EffectCollection([effect for effect in effect_collection if apply_filter_fn(filter_fn, effect)])) # depends on [control=['if'], data=[]]
else:
return top_priority_maybe(effect_collection) |
def _find_clim_vars(self, ds, refresh=False):
'''
Returns a list of variables that are likely to be climatology variables based on CF §7.4
:param netCDF4.Dataset ds: An open netCDF dataset
:param bool refresh: if refresh is set to True, the cache is
invalidated.
:rtype: list
:return: A list containing strings with geophysical variable
names.
'''
if self._clim_vars.get(ds, None) and refresh is False:
return self._clim_vars[ds]
climatology_variable = cfutil.get_climatology_variable(ds)
if climatology_variable:
self._clim_vars[ds].append(climatology_variable)
return self._clim_vars[ds] | def function[_find_clim_vars, parameter[self, ds, refresh]]:
constant[
Returns a list of variables that are likely to be climatology variables based on CF §7.4
:param netCDF4.Dataset ds: An open netCDF dataset
:param bool refresh: if refresh is set to True, the cache is
invalidated.
:rtype: list
:return: A list containing strings with geophysical variable
names.
]
if <ast.BoolOp object at 0x7da2054a6860> begin[:]
return[call[name[self]._clim_vars][name[ds]]]
variable[climatology_variable] assign[=] call[name[cfutil].get_climatology_variable, parameter[name[ds]]]
if name[climatology_variable] begin[:]
call[call[name[self]._clim_vars][name[ds]].append, parameter[name[climatology_variable]]]
return[call[name[self]._clim_vars][name[ds]]] | keyword[def] identifier[_find_clim_vars] ( identifier[self] , identifier[ds] , identifier[refresh] = keyword[False] ):
literal[string]
keyword[if] identifier[self] . identifier[_clim_vars] . identifier[get] ( identifier[ds] , keyword[None] ) keyword[and] identifier[refresh] keyword[is] keyword[False] :
keyword[return] identifier[self] . identifier[_clim_vars] [ identifier[ds] ]
identifier[climatology_variable] = identifier[cfutil] . identifier[get_climatology_variable] ( identifier[ds] )
keyword[if] identifier[climatology_variable] :
identifier[self] . identifier[_clim_vars] [ identifier[ds] ]. identifier[append] ( identifier[climatology_variable] )
keyword[return] identifier[self] . identifier[_clim_vars] [ identifier[ds] ] | def _find_clim_vars(self, ds, refresh=False):
"""
Returns a list of variables that are likely to be climatology variables based on CF §7.4
:param netCDF4.Dataset ds: An open netCDF dataset
:param bool refresh: if refresh is set to True, the cache is
invalidated.
:rtype: list
:return: A list containing strings with geophysical variable
names.
"""
if self._clim_vars.get(ds, None) and refresh is False:
return self._clim_vars[ds] # depends on [control=['if'], data=[]]
climatology_variable = cfutil.get_climatology_variable(ds)
if climatology_variable:
self._clim_vars[ds].append(climatology_variable) # depends on [control=['if'], data=[]]
return self._clim_vars[ds] |
def getJSModuleURL(self, moduleName):
"""
Retrieve an L{URL} object which references the given module name.
This makes a 'best effort' guess as to an fully qualified HTTPS URL
based on the hostname provided during rendering and the configuration
of the site. This is to avoid unnecessary duplicate retrieval of the
same scripts from two different URLs by the browser.
If such configuration does not exist, however, it will simply return an
absolute path URL with no hostname or port.
@raise NotImplementedError: if rendering has not begun yet and
therefore beforeRender has not provided us with a usable hostname.
"""
if self._moduleRoot is None:
raise NotImplementedError(
"JS module URLs cannot be requested before rendering.")
moduleHash = self.hashCache.getModule(moduleName).hashValue
return self._moduleRoot.child(moduleHash).child(moduleName) | def function[getJSModuleURL, parameter[self, moduleName]]:
constant[
Retrieve an L{URL} object which references the given module name.
This makes a 'best effort' guess as to an fully qualified HTTPS URL
based on the hostname provided during rendering and the configuration
of the site. This is to avoid unnecessary duplicate retrieval of the
same scripts from two different URLs by the browser.
If such configuration does not exist, however, it will simply return an
absolute path URL with no hostname or port.
@raise NotImplementedError: if rendering has not begun yet and
therefore beforeRender has not provided us with a usable hostname.
]
if compare[name[self]._moduleRoot is constant[None]] begin[:]
<ast.Raise object at 0x7da1b0bd5c60>
variable[moduleHash] assign[=] call[name[self].hashCache.getModule, parameter[name[moduleName]]].hashValue
return[call[call[name[self]._moduleRoot.child, parameter[name[moduleHash]]].child, parameter[name[moduleName]]]] | keyword[def] identifier[getJSModuleURL] ( identifier[self] , identifier[moduleName] ):
literal[string]
keyword[if] identifier[self] . identifier[_moduleRoot] keyword[is] keyword[None] :
keyword[raise] identifier[NotImplementedError] (
literal[string] )
identifier[moduleHash] = identifier[self] . identifier[hashCache] . identifier[getModule] ( identifier[moduleName] ). identifier[hashValue]
keyword[return] identifier[self] . identifier[_moduleRoot] . identifier[child] ( identifier[moduleHash] ). identifier[child] ( identifier[moduleName] ) | def getJSModuleURL(self, moduleName):
"""
Retrieve an L{URL} object which references the given module name.
This makes a 'best effort' guess as to an fully qualified HTTPS URL
based on the hostname provided during rendering and the configuration
of the site. This is to avoid unnecessary duplicate retrieval of the
same scripts from two different URLs by the browser.
If such configuration does not exist, however, it will simply return an
absolute path URL with no hostname or port.
@raise NotImplementedError: if rendering has not begun yet and
therefore beforeRender has not provided us with a usable hostname.
"""
if self._moduleRoot is None:
raise NotImplementedError('JS module URLs cannot be requested before rendering.') # depends on [control=['if'], data=[]]
moduleHash = self.hashCache.getModule(moduleName).hashValue
return self._moduleRoot.child(moduleHash).child(moduleName) |
def _create_dictionary(self, document):
"""Creates mapping key = word, value = row index"""
words = map(self.normalize_word, document.words)
unique_words = frozenset(self.stem_word(w) for w in words if w not in self._stop_words)
return dict((w, i) for i, w in enumerate(unique_words)) | def function[_create_dictionary, parameter[self, document]]:
constant[Creates mapping key = word, value = row index]
variable[words] assign[=] call[name[map], parameter[name[self].normalize_word, name[document].words]]
variable[unique_words] assign[=] call[name[frozenset], parameter[<ast.GeneratorExp object at 0x7da18fe90c10>]]
return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da1b2346200>]]] | keyword[def] identifier[_create_dictionary] ( identifier[self] , identifier[document] ):
literal[string]
identifier[words] = identifier[map] ( identifier[self] . identifier[normalize_word] , identifier[document] . identifier[words] )
identifier[unique_words] = identifier[frozenset] ( identifier[self] . identifier[stem_word] ( identifier[w] ) keyword[for] identifier[w] keyword[in] identifier[words] keyword[if] identifier[w] keyword[not] keyword[in] identifier[self] . identifier[_stop_words] )
keyword[return] identifier[dict] (( identifier[w] , identifier[i] ) keyword[for] identifier[i] , identifier[w] keyword[in] identifier[enumerate] ( identifier[unique_words] )) | def _create_dictionary(self, document):
"""Creates mapping key = word, value = row index"""
words = map(self.normalize_word, document.words)
unique_words = frozenset((self.stem_word(w) for w in words if w not in self._stop_words))
return dict(((w, i) for (i, w) in enumerate(unique_words))) |
def corpus_token_counts(
text_filepattern, corpus_max_lines, split_on_newlines=True):
"""Read the corpus and compute a dictionary of token counts.
Args:
text_filepattern: A pattern matching one or more files.
corpus_max_lines: An integer; maximum total lines to read.
split_on_newlines: A boolean. If true, then split files by lines and strip
leading and trailing whitespace from each line. Otherwise, treat each
file as a single string.
Returns:
a dictionary mapping token to count.
"""
counts = collections.Counter()
for doc in _read_filepattern(
text_filepattern,
max_lines=corpus_max_lines,
split_on_newlines=split_on_newlines):
counts.update(encode(_native_to_unicode(doc)))
mlperf_log.transformer_print(
key=mlperf_log.PREPROC_VOCAB_SIZE, value=len(counts))
return counts | def function[corpus_token_counts, parameter[text_filepattern, corpus_max_lines, split_on_newlines]]:
constant[Read the corpus and compute a dictionary of token counts.
Args:
text_filepattern: A pattern matching one or more files.
corpus_max_lines: An integer; maximum total lines to read.
split_on_newlines: A boolean. If true, then split files by lines and strip
leading and trailing whitespace from each line. Otherwise, treat each
file as a single string.
Returns:
a dictionary mapping token to count.
]
variable[counts] assign[=] call[name[collections].Counter, parameter[]]
for taget[name[doc]] in starred[call[name[_read_filepattern], parameter[name[text_filepattern]]]] begin[:]
call[name[counts].update, parameter[call[name[encode], parameter[call[name[_native_to_unicode], parameter[name[doc]]]]]]]
call[name[mlperf_log].transformer_print, parameter[]]
return[name[counts]] | keyword[def] identifier[corpus_token_counts] (
identifier[text_filepattern] , identifier[corpus_max_lines] , identifier[split_on_newlines] = keyword[True] ):
literal[string]
identifier[counts] = identifier[collections] . identifier[Counter] ()
keyword[for] identifier[doc] keyword[in] identifier[_read_filepattern] (
identifier[text_filepattern] ,
identifier[max_lines] = identifier[corpus_max_lines] ,
identifier[split_on_newlines] = identifier[split_on_newlines] ):
identifier[counts] . identifier[update] ( identifier[encode] ( identifier[_native_to_unicode] ( identifier[doc] )))
identifier[mlperf_log] . identifier[transformer_print] (
identifier[key] = identifier[mlperf_log] . identifier[PREPROC_VOCAB_SIZE] , identifier[value] = identifier[len] ( identifier[counts] ))
keyword[return] identifier[counts] | def corpus_token_counts(text_filepattern, corpus_max_lines, split_on_newlines=True):
"""Read the corpus and compute a dictionary of token counts.
Args:
text_filepattern: A pattern matching one or more files.
corpus_max_lines: An integer; maximum total lines to read.
split_on_newlines: A boolean. If true, then split files by lines and strip
leading and trailing whitespace from each line. Otherwise, treat each
file as a single string.
Returns:
a dictionary mapping token to count.
"""
counts = collections.Counter()
for doc in _read_filepattern(text_filepattern, max_lines=corpus_max_lines, split_on_newlines=split_on_newlines):
counts.update(encode(_native_to_unicode(doc))) # depends on [control=['for'], data=['doc']]
mlperf_log.transformer_print(key=mlperf_log.PREPROC_VOCAB_SIZE, value=len(counts))
return counts |
def __findout_decl_type(match_class, **keywds):
"""implementation details"""
if 'decl_type' in keywds:
return keywds['decl_type']
matcher_args = keywds.copy()
del matcher_args['function']
del matcher_args['recursive']
if 'allow_empty' in matcher_args:
del matcher_args['allow_empty']
decl_matcher = match_class(**matcher_args)
if decl_matcher.decl_type:
return decl_matcher.decl_type
return None | def function[__findout_decl_type, parameter[match_class]]:
constant[implementation details]
if compare[constant[decl_type] in name[keywds]] begin[:]
return[call[name[keywds]][constant[decl_type]]]
variable[matcher_args] assign[=] call[name[keywds].copy, parameter[]]
<ast.Delete object at 0x7da18dc987c0>
<ast.Delete object at 0x7da18dc98310>
if compare[constant[allow_empty] in name[matcher_args]] begin[:]
<ast.Delete object at 0x7da18dc9aaa0>
variable[decl_matcher] assign[=] call[name[match_class], parameter[]]
if name[decl_matcher].decl_type begin[:]
return[name[decl_matcher].decl_type]
return[constant[None]] | keyword[def] identifier[__findout_decl_type] ( identifier[match_class] ,** identifier[keywds] ):
literal[string]
keyword[if] literal[string] keyword[in] identifier[keywds] :
keyword[return] identifier[keywds] [ literal[string] ]
identifier[matcher_args] = identifier[keywds] . identifier[copy] ()
keyword[del] identifier[matcher_args] [ literal[string] ]
keyword[del] identifier[matcher_args] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[matcher_args] :
keyword[del] identifier[matcher_args] [ literal[string] ]
identifier[decl_matcher] = identifier[match_class] (** identifier[matcher_args] )
keyword[if] identifier[decl_matcher] . identifier[decl_type] :
keyword[return] identifier[decl_matcher] . identifier[decl_type]
keyword[return] keyword[None] | def __findout_decl_type(match_class, **keywds):
"""implementation details"""
if 'decl_type' in keywds:
return keywds['decl_type'] # depends on [control=['if'], data=['keywds']]
matcher_args = keywds.copy()
del matcher_args['function']
del matcher_args['recursive']
if 'allow_empty' in matcher_args:
del matcher_args['allow_empty'] # depends on [control=['if'], data=['matcher_args']]
decl_matcher = match_class(**matcher_args)
if decl_matcher.decl_type:
return decl_matcher.decl_type # depends on [control=['if'], data=[]]
return None |
def _data_dict_to_bokeh_chart_data(self, data):
"""
Take a dictionary of data, as returned by the :py:class:`~.ProjectStats`
per_*_data properties, return a 2-tuple of data dict and x labels list
usable by bokeh.charts.
:param data: data dict from :py:class:`~.ProjectStats` property
:type data: dict
:return: 2-tuple of data dict, x labels list
:rtype: tuple
"""
labels = []
# find all the data keys
keys = set()
for date in data:
for k in data[date]:
keys.add(k)
# final output dict
out_data = {}
for k in keys:
out_data[k] = []
# transform the data; deal with sparse data
for data_date, data_dict in sorted(data.items()):
labels.append(data_date)
for k in out_data:
if k in data_dict:
out_data[k].append(data_dict[k])
else:
out_data[k].append(0)
return out_data, labels | def function[_data_dict_to_bokeh_chart_data, parameter[self, data]]:
constant[
Take a dictionary of data, as returned by the :py:class:`~.ProjectStats`
per_*_data properties, return a 2-tuple of data dict and x labels list
usable by bokeh.charts.
:param data: data dict from :py:class:`~.ProjectStats` property
:type data: dict
:return: 2-tuple of data dict, x labels list
:rtype: tuple
]
variable[labels] assign[=] list[[]]
variable[keys] assign[=] call[name[set], parameter[]]
for taget[name[date]] in starred[name[data]] begin[:]
for taget[name[k]] in starred[call[name[data]][name[date]]] begin[:]
call[name[keys].add, parameter[name[k]]]
variable[out_data] assign[=] dictionary[[], []]
for taget[name[k]] in starred[name[keys]] begin[:]
call[name[out_data]][name[k]] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c993df0>, <ast.Name object at 0x7da20c992c80>]]] in starred[call[name[sorted], parameter[call[name[data].items, parameter[]]]]] begin[:]
call[name[labels].append, parameter[name[data_date]]]
for taget[name[k]] in starred[name[out_data]] begin[:]
if compare[name[k] in name[data_dict]] begin[:]
call[call[name[out_data]][name[k]].append, parameter[call[name[data_dict]][name[k]]]]
return[tuple[[<ast.Name object at 0x7da20c9922c0>, <ast.Name object at 0x7da20c993e80>]]] | keyword[def] identifier[_data_dict_to_bokeh_chart_data] ( identifier[self] , identifier[data] ):
literal[string]
identifier[labels] =[]
identifier[keys] = identifier[set] ()
keyword[for] identifier[date] keyword[in] identifier[data] :
keyword[for] identifier[k] keyword[in] identifier[data] [ identifier[date] ]:
identifier[keys] . identifier[add] ( identifier[k] )
identifier[out_data] ={}
keyword[for] identifier[k] keyword[in] identifier[keys] :
identifier[out_data] [ identifier[k] ]=[]
keyword[for] identifier[data_date] , identifier[data_dict] keyword[in] identifier[sorted] ( identifier[data] . identifier[items] ()):
identifier[labels] . identifier[append] ( identifier[data_date] )
keyword[for] identifier[k] keyword[in] identifier[out_data] :
keyword[if] identifier[k] keyword[in] identifier[data_dict] :
identifier[out_data] [ identifier[k] ]. identifier[append] ( identifier[data_dict] [ identifier[k] ])
keyword[else] :
identifier[out_data] [ identifier[k] ]. identifier[append] ( literal[int] )
keyword[return] identifier[out_data] , identifier[labels] | def _data_dict_to_bokeh_chart_data(self, data):
"""
Take a dictionary of data, as returned by the :py:class:`~.ProjectStats`
per_*_data properties, return a 2-tuple of data dict and x labels list
usable by bokeh.charts.
:param data: data dict from :py:class:`~.ProjectStats` property
:type data: dict
:return: 2-tuple of data dict, x labels list
:rtype: tuple
"""
labels = []
# find all the data keys
keys = set()
for date in data:
for k in data[date]:
keys.add(k) # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=['date']]
# final output dict
out_data = {}
for k in keys:
out_data[k] = [] # depends on [control=['for'], data=['k']]
# transform the data; deal with sparse data
for (data_date, data_dict) in sorted(data.items()):
labels.append(data_date)
for k in out_data:
if k in data_dict:
out_data[k].append(data_dict[k]) # depends on [control=['if'], data=['k', 'data_dict']]
else:
out_data[k].append(0) # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=[]]
return (out_data, labels) |
def _new_multipart_upload(self, bucket_name, object_name,
metadata=None, sse=None):
"""
Initialize new multipart upload request.
:param bucket_name: Bucket name of the new multipart request.
:param object_name: Object name of the new multipart request.
:param metadata: Additional new metadata for the new object.
:return: Returns an upload id.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
headers = {}
if metadata:
headers.update(metadata)
if sse:
headers.update(sse.marshal())
response = self._url_open('POST', bucket_name=bucket_name,
object_name=object_name,
query={'uploads': ''},
headers=headers)
return parse_new_multipart_upload(response.data) | def function[_new_multipart_upload, parameter[self, bucket_name, object_name, metadata, sse]]:
constant[
Initialize new multipart upload request.
:param bucket_name: Bucket name of the new multipart request.
:param object_name: Object name of the new multipart request.
:param metadata: Additional new metadata for the new object.
:return: Returns an upload id.
]
call[name[is_valid_bucket_name], parameter[name[bucket_name]]]
call[name[is_non_empty_string], parameter[name[object_name]]]
variable[headers] assign[=] dictionary[[], []]
if name[metadata] begin[:]
call[name[headers].update, parameter[name[metadata]]]
if name[sse] begin[:]
call[name[headers].update, parameter[call[name[sse].marshal, parameter[]]]]
variable[response] assign[=] call[name[self]._url_open, parameter[constant[POST]]]
return[call[name[parse_new_multipart_upload], parameter[name[response].data]]] | keyword[def] identifier[_new_multipart_upload] ( identifier[self] , identifier[bucket_name] , identifier[object_name] ,
identifier[metadata] = keyword[None] , identifier[sse] = keyword[None] ):
literal[string]
identifier[is_valid_bucket_name] ( identifier[bucket_name] )
identifier[is_non_empty_string] ( identifier[object_name] )
identifier[headers] ={}
keyword[if] identifier[metadata] :
identifier[headers] . identifier[update] ( identifier[metadata] )
keyword[if] identifier[sse] :
identifier[headers] . identifier[update] ( identifier[sse] . identifier[marshal] ())
identifier[response] = identifier[self] . identifier[_url_open] ( literal[string] , identifier[bucket_name] = identifier[bucket_name] ,
identifier[object_name] = identifier[object_name] ,
identifier[query] ={ literal[string] : literal[string] },
identifier[headers] = identifier[headers] )
keyword[return] identifier[parse_new_multipart_upload] ( identifier[response] . identifier[data] ) | def _new_multipart_upload(self, bucket_name, object_name, metadata=None, sse=None):
"""
Initialize new multipart upload request.
:param bucket_name: Bucket name of the new multipart request.
:param object_name: Object name of the new multipart request.
:param metadata: Additional new metadata for the new object.
:return: Returns an upload id.
"""
is_valid_bucket_name(bucket_name)
is_non_empty_string(object_name)
headers = {}
if metadata:
headers.update(metadata) # depends on [control=['if'], data=[]]
if sse:
headers.update(sse.marshal()) # depends on [control=['if'], data=[]]
response = self._url_open('POST', bucket_name=bucket_name, object_name=object_name, query={'uploads': ''}, headers=headers)
return parse_new_multipart_upload(response.data) |
def _bigger(interval1, interval2):
"""
Return interval with bigger cardinality
Refer Section 3.1
:param interval1: first interval
:param interval2: second interval
:return: Interval or interval2 whichever has greater cardinality
"""
if interval2.cardinality > interval1.cardinality:
return interval2.copy()
return interval1.copy() | def function[_bigger, parameter[interval1, interval2]]:
constant[
Return interval with bigger cardinality
Refer Section 3.1
:param interval1: first interval
:param interval2: second interval
:return: Interval or interval2 whichever has greater cardinality
]
if compare[name[interval2].cardinality greater[>] name[interval1].cardinality] begin[:]
return[call[name[interval2].copy, parameter[]]]
return[call[name[interval1].copy, parameter[]]] | keyword[def] identifier[_bigger] ( identifier[interval1] , identifier[interval2] ):
literal[string]
keyword[if] identifier[interval2] . identifier[cardinality] > identifier[interval1] . identifier[cardinality] :
keyword[return] identifier[interval2] . identifier[copy] ()
keyword[return] identifier[interval1] . identifier[copy] () | def _bigger(interval1, interval2):
"""
Return interval with bigger cardinality
Refer Section 3.1
:param interval1: first interval
:param interval2: second interval
:return: Interval or interval2 whichever has greater cardinality
"""
if interval2.cardinality > interval1.cardinality:
return interval2.copy() # depends on [control=['if'], data=[]]
return interval1.copy() |
def nextstate(self, newstate, treenode=None, user_data=None):
"""
Manage transition of state.
"""
if newstate is None:
return self
if isinstance(newstate, State) and id(newstate) != id(self):
return newstate
elif isinstance(newstate, StateEvent):
self.state_register.named_events[newstate.name] = True
return newstate.st
elif isinstance(newstate, StatePrecond):
return newstate.st
elif isinstance(newstate, StateHook):
# final API using PSL
newstate.call(treenode, user_data)
return newstate.st
return self | def function[nextstate, parameter[self, newstate, treenode, user_data]]:
constant[
Manage transition of state.
]
if compare[name[newstate] is constant[None]] begin[:]
return[name[self]]
if <ast.BoolOp object at 0x7da1b013f280> begin[:]
return[name[newstate]]
return[name[self]] | keyword[def] identifier[nextstate] ( identifier[self] , identifier[newstate] , identifier[treenode] = keyword[None] , identifier[user_data] = keyword[None] ):
literal[string]
keyword[if] identifier[newstate] keyword[is] keyword[None] :
keyword[return] identifier[self]
keyword[if] identifier[isinstance] ( identifier[newstate] , identifier[State] ) keyword[and] identifier[id] ( identifier[newstate] )!= identifier[id] ( identifier[self] ):
keyword[return] identifier[newstate]
keyword[elif] identifier[isinstance] ( identifier[newstate] , identifier[StateEvent] ):
identifier[self] . identifier[state_register] . identifier[named_events] [ identifier[newstate] . identifier[name] ]= keyword[True]
keyword[return] identifier[newstate] . identifier[st]
keyword[elif] identifier[isinstance] ( identifier[newstate] , identifier[StatePrecond] ):
keyword[return] identifier[newstate] . identifier[st]
keyword[elif] identifier[isinstance] ( identifier[newstate] , identifier[StateHook] ):
identifier[newstate] . identifier[call] ( identifier[treenode] , identifier[user_data] )
keyword[return] identifier[newstate] . identifier[st]
keyword[return] identifier[self] | def nextstate(self, newstate, treenode=None, user_data=None):
"""
Manage transition of state.
"""
if newstate is None:
return self # depends on [control=['if'], data=[]]
if isinstance(newstate, State) and id(newstate) != id(self):
return newstate # depends on [control=['if'], data=[]]
elif isinstance(newstate, StateEvent):
self.state_register.named_events[newstate.name] = True
return newstate.st # depends on [control=['if'], data=[]]
elif isinstance(newstate, StatePrecond):
return newstate.st # depends on [control=['if'], data=[]]
elif isinstance(newstate, StateHook):
# final API using PSL
newstate.call(treenode, user_data)
return newstate.st # depends on [control=['if'], data=[]]
return self |
def _create_regex_if_none(self):
"""
Private function. Checks to see if the local regular expression
object has been created yet
"""
if self._regex is None:
self._regex = re.compile(self._pattern, re.UNICODE) | def function[_create_regex_if_none, parameter[self]]:
constant[
Private function. Checks to see if the local regular expression
object has been created yet
]
if compare[name[self]._regex is constant[None]] begin[:]
name[self]._regex assign[=] call[name[re].compile, parameter[name[self]._pattern, name[re].UNICODE]] | keyword[def] identifier[_create_regex_if_none] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_regex] keyword[is] keyword[None] :
identifier[self] . identifier[_regex] = identifier[re] . identifier[compile] ( identifier[self] . identifier[_pattern] , identifier[re] . identifier[UNICODE] ) | def _create_regex_if_none(self):
"""
Private function. Checks to see if the local regular expression
object has been created yet
"""
if self._regex is None:
self._regex = re.compile(self._pattern, re.UNICODE) # depends on [control=['if'], data=[]] |
def cublasDspr(handle, uplo, n, alpha, x, incx, AP):
"""
Rank-1 operation on real symmetric-packed matrix.
"""
status = _libcublas.cublasDspr_v2(handle,
_CUBLAS_FILL_MODE[uplo], n,
ctypes.byref(ctypes.c_double(alpha)),
int(x), incx, int(AP))
cublasCheckStatus(status) | def function[cublasDspr, parameter[handle, uplo, n, alpha, x, incx, AP]]:
constant[
Rank-1 operation on real symmetric-packed matrix.
]
variable[status] assign[=] call[name[_libcublas].cublasDspr_v2, parameter[name[handle], call[name[_CUBLAS_FILL_MODE]][name[uplo]], name[n], call[name[ctypes].byref, parameter[call[name[ctypes].c_double, parameter[name[alpha]]]]], call[name[int], parameter[name[x]]], name[incx], call[name[int], parameter[name[AP]]]]]
call[name[cublasCheckStatus], parameter[name[status]]] | keyword[def] identifier[cublasDspr] ( identifier[handle] , identifier[uplo] , identifier[n] , identifier[alpha] , identifier[x] , identifier[incx] , identifier[AP] ):
literal[string]
identifier[status] = identifier[_libcublas] . identifier[cublasDspr_v2] ( identifier[handle] ,
identifier[_CUBLAS_FILL_MODE] [ identifier[uplo] ], identifier[n] ,
identifier[ctypes] . identifier[byref] ( identifier[ctypes] . identifier[c_double] ( identifier[alpha] )),
identifier[int] ( identifier[x] ), identifier[incx] , identifier[int] ( identifier[AP] ))
identifier[cublasCheckStatus] ( identifier[status] ) | def cublasDspr(handle, uplo, n, alpha, x, incx, AP):
"""
Rank-1 operation on real symmetric-packed matrix.
"""
status = _libcublas.cublasDspr_v2(handle, _CUBLAS_FILL_MODE[uplo], n, ctypes.byref(ctypes.c_double(alpha)), int(x), incx, int(AP))
cublasCheckStatus(status) |
def write_connection_file(self):
"""write connection info to JSON file"""
if os.path.basename(self.connection_file) == self.connection_file:
cf = os.path.join(self.profile_dir.security_dir, self.connection_file)
else:
cf = self.connection_file
write_connection_file(cf, ip=self.ip, key=self.session.key,
shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port,
iopub_port=self.iopub_port)
self._full_connection_file = cf | def function[write_connection_file, parameter[self]]:
constant[write connection info to JSON file]
if compare[call[name[os].path.basename, parameter[name[self].connection_file]] equal[==] name[self].connection_file] begin[:]
variable[cf] assign[=] call[name[os].path.join, parameter[name[self].profile_dir.security_dir, name[self].connection_file]]
call[name[write_connection_file], parameter[name[cf]]]
name[self]._full_connection_file assign[=] name[cf] | keyword[def] identifier[write_connection_file] ( identifier[self] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[basename] ( identifier[self] . identifier[connection_file] )== identifier[self] . identifier[connection_file] :
identifier[cf] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[profile_dir] . identifier[security_dir] , identifier[self] . identifier[connection_file] )
keyword[else] :
identifier[cf] = identifier[self] . identifier[connection_file]
identifier[write_connection_file] ( identifier[cf] , identifier[ip] = identifier[self] . identifier[ip] , identifier[key] = identifier[self] . identifier[session] . identifier[key] ,
identifier[shell_port] = identifier[self] . identifier[shell_port] , identifier[stdin_port] = identifier[self] . identifier[stdin_port] , identifier[hb_port] = identifier[self] . identifier[hb_port] ,
identifier[iopub_port] = identifier[self] . identifier[iopub_port] )
identifier[self] . identifier[_full_connection_file] = identifier[cf] | def write_connection_file(self):
"""write connection info to JSON file"""
if os.path.basename(self.connection_file) == self.connection_file:
cf = os.path.join(self.profile_dir.security_dir, self.connection_file) # depends on [control=['if'], data=[]]
else:
cf = self.connection_file
write_connection_file(cf, ip=self.ip, key=self.session.key, shell_port=self.shell_port, stdin_port=self.stdin_port, hb_port=self.hb_port, iopub_port=self.iopub_port)
self._full_connection_file = cf |
def sun_events(latitude, longitude, date, timezone=0, zenith=None):
"""Convenience function for calculating sunrise and sunset.
Civil twilight starts/ends when the Sun's centre is 6 degrees below
the horizon.
Nautical twilight starts/ends when the Sun's centre is 12 degrees
below the horizon.
Astronomical twilight starts/ends when the Sun's centre is 18 degrees below
the horizon.
Args:
latitude (float): Location's latitude
longitude (float): Location's longitude
date (datetime.date): Calculate rise or set for given date
timezone (int): Offset from UTC in minutes
zenith (str): Calculate rise/set events, or twilight times
Returns:
tuple of datetime.time: The time for the given events in the specified
timezone
"""
return (sun_rise_set(latitude, longitude, date, 'rise', timezone, zenith),
sun_rise_set(latitude, longitude, date, 'set', timezone, zenith)) | def function[sun_events, parameter[latitude, longitude, date, timezone, zenith]]:
constant[Convenience function for calculating sunrise and sunset.
Civil twilight starts/ends when the Sun's centre is 6 degrees below
the horizon.
Nautical twilight starts/ends when the Sun's centre is 12 degrees
below the horizon.
Astronomical twilight starts/ends when the Sun's centre is 18 degrees below
the horizon.
Args:
latitude (float): Location's latitude
longitude (float): Location's longitude
date (datetime.date): Calculate rise or set for given date
timezone (int): Offset from UTC in minutes
zenith (str): Calculate rise/set events, or twilight times
Returns:
tuple of datetime.time: The time for the given events in the specified
timezone
]
return[tuple[[<ast.Call object at 0x7da18f723400>, <ast.Call object at 0x7da18f722710>]]] | keyword[def] identifier[sun_events] ( identifier[latitude] , identifier[longitude] , identifier[date] , identifier[timezone] = literal[int] , identifier[zenith] = keyword[None] ):
literal[string]
keyword[return] ( identifier[sun_rise_set] ( identifier[latitude] , identifier[longitude] , identifier[date] , literal[string] , identifier[timezone] , identifier[zenith] ),
identifier[sun_rise_set] ( identifier[latitude] , identifier[longitude] , identifier[date] , literal[string] , identifier[timezone] , identifier[zenith] )) | def sun_events(latitude, longitude, date, timezone=0, zenith=None):
"""Convenience function for calculating sunrise and sunset.
Civil twilight starts/ends when the Sun's centre is 6 degrees below
the horizon.
Nautical twilight starts/ends when the Sun's centre is 12 degrees
below the horizon.
Astronomical twilight starts/ends when the Sun's centre is 18 degrees below
the horizon.
Args:
latitude (float): Location's latitude
longitude (float): Location's longitude
date (datetime.date): Calculate rise or set for given date
timezone (int): Offset from UTC in minutes
zenith (str): Calculate rise/set events, or twilight times
Returns:
tuple of datetime.time: The time for the given events in the specified
timezone
"""
return (sun_rise_set(latitude, longitude, date, 'rise', timezone, zenith), sun_rise_set(latitude, longitude, date, 'set', timezone, zenith)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.