code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def get_schema(self, path, with_refs=False, resolved=False):
"""Retrieve a schema.
:param path: schema's relative path.
:param with_refs: replace $refs in the schema.
:param resolved: resolve schema using the resolver
:py:const:`invenio_jsonschemas.config.JSONSCHEMAS_RESOLVER_CLS`
:raises invenio_jsonschemas.errors.JSONSchemaNotFound: If no schema
was found in the specified path.
:returns: The schema in a dictionary form.
"""
if path not in self.schemas:
raise JSONSchemaNotFound(path)
with open(os.path.join(self.schemas[path], path)) as file_:
schema = json.load(file_)
if with_refs:
schema = JsonRef.replace_refs(
schema,
base_uri=request.base_url,
loader=self.loader_cls() if self.loader_cls else None,
)
if resolved:
schema = self.resolver_cls(schema)
return schema | def function[get_schema, parameter[self, path, with_refs, resolved]]:
constant[Retrieve a schema.
:param path: schema's relative path.
:param with_refs: replace $refs in the schema.
:param resolved: resolve schema using the resolver
:py:const:`invenio_jsonschemas.config.JSONSCHEMAS_RESOLVER_CLS`
:raises invenio_jsonschemas.errors.JSONSchemaNotFound: If no schema
was found in the specified path.
:returns: The schema in a dictionary form.
]
if compare[name[path] <ast.NotIn object at 0x7da2590d7190> name[self].schemas] begin[:]
<ast.Raise object at 0x7da20c990670>
with call[name[open], parameter[call[name[os].path.join, parameter[call[name[self].schemas][name[path]], name[path]]]]] begin[:]
variable[schema] assign[=] call[name[json].load, parameter[name[file_]]]
if name[with_refs] begin[:]
variable[schema] assign[=] call[name[JsonRef].replace_refs, parameter[name[schema]]]
if name[resolved] begin[:]
variable[schema] assign[=] call[name[self].resolver_cls, parameter[name[schema]]]
return[name[schema]] | keyword[def] identifier[get_schema] ( identifier[self] , identifier[path] , identifier[with_refs] = keyword[False] , identifier[resolved] = keyword[False] ):
literal[string]
keyword[if] identifier[path] keyword[not] keyword[in] identifier[self] . identifier[schemas] :
keyword[raise] identifier[JSONSchemaNotFound] ( identifier[path] )
keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[schemas] [ identifier[path] ], identifier[path] )) keyword[as] identifier[file_] :
identifier[schema] = identifier[json] . identifier[load] ( identifier[file_] )
keyword[if] identifier[with_refs] :
identifier[schema] = identifier[JsonRef] . identifier[replace_refs] (
identifier[schema] ,
identifier[base_uri] = identifier[request] . identifier[base_url] ,
identifier[loader] = identifier[self] . identifier[loader_cls] () keyword[if] identifier[self] . identifier[loader_cls] keyword[else] keyword[None] ,
)
keyword[if] identifier[resolved] :
identifier[schema] = identifier[self] . identifier[resolver_cls] ( identifier[schema] )
keyword[return] identifier[schema] | def get_schema(self, path, with_refs=False, resolved=False):
"""Retrieve a schema.
:param path: schema's relative path.
:param with_refs: replace $refs in the schema.
:param resolved: resolve schema using the resolver
:py:const:`invenio_jsonschemas.config.JSONSCHEMAS_RESOLVER_CLS`
:raises invenio_jsonschemas.errors.JSONSchemaNotFound: If no schema
was found in the specified path.
:returns: The schema in a dictionary form.
"""
if path not in self.schemas:
raise JSONSchemaNotFound(path) # depends on [control=['if'], data=['path']]
with open(os.path.join(self.schemas[path], path)) as file_:
schema = json.load(file_)
if with_refs:
schema = JsonRef.replace_refs(schema, base_uri=request.base_url, loader=self.loader_cls() if self.loader_cls else None) # depends on [control=['if'], data=[]]
if resolved:
schema = self.resolver_cls(schema) # depends on [control=['if'], data=[]]
return schema # depends on [control=['with'], data=['file_']] |
def serialize(self, raw=False):
'''Encode the private part of the key in a base64 format by default,
but when raw is True it will return hex encoded bytes.
@return: bytes
'''
if raw:
return self._key.encode()
return self._key.encode(nacl.encoding.Base64Encoder) | def function[serialize, parameter[self, raw]]:
constant[Encode the private part of the key in a base64 format by default,
but when raw is True it will return hex encoded bytes.
@return: bytes
]
if name[raw] begin[:]
return[call[name[self]._key.encode, parameter[]]]
return[call[name[self]._key.encode, parameter[name[nacl].encoding.Base64Encoder]]] | keyword[def] identifier[serialize] ( identifier[self] , identifier[raw] = keyword[False] ):
literal[string]
keyword[if] identifier[raw] :
keyword[return] identifier[self] . identifier[_key] . identifier[encode] ()
keyword[return] identifier[self] . identifier[_key] . identifier[encode] ( identifier[nacl] . identifier[encoding] . identifier[Base64Encoder] ) | def serialize(self, raw=False):
"""Encode the private part of the key in a base64 format by default,
but when raw is True it will return hex encoded bytes.
@return: bytes
"""
if raw:
return self._key.encode() # depends on [control=['if'], data=[]]
return self._key.encode(nacl.encoding.Base64Encoder) |
def layout_json_params(self):
"""Return layout.json params in a flattened dict with name param as key."""
if self._layout_json_params is None:
self._layout_json_params = {}
for i in self.layout_json.get('inputs', []):
for p in i.get('parameters', []):
self._layout_json_params.setdefault(p.get('name'), p)
return self._layout_json_params | def function[layout_json_params, parameter[self]]:
constant[Return layout.json params in a flattened dict with name param as key.]
if compare[name[self]._layout_json_params is constant[None]] begin[:]
name[self]._layout_json_params assign[=] dictionary[[], []]
for taget[name[i]] in starred[call[name[self].layout_json.get, parameter[constant[inputs], list[[]]]]] begin[:]
for taget[name[p]] in starred[call[name[i].get, parameter[constant[parameters], list[[]]]]] begin[:]
call[name[self]._layout_json_params.setdefault, parameter[call[name[p].get, parameter[constant[name]]], name[p]]]
return[name[self]._layout_json_params] | keyword[def] identifier[layout_json_params] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_layout_json_params] keyword[is] keyword[None] :
identifier[self] . identifier[_layout_json_params] ={}
keyword[for] identifier[i] keyword[in] identifier[self] . identifier[layout_json] . identifier[get] ( literal[string] ,[]):
keyword[for] identifier[p] keyword[in] identifier[i] . identifier[get] ( literal[string] ,[]):
identifier[self] . identifier[_layout_json_params] . identifier[setdefault] ( identifier[p] . identifier[get] ( literal[string] ), identifier[p] )
keyword[return] identifier[self] . identifier[_layout_json_params] | def layout_json_params(self):
"""Return layout.json params in a flattened dict with name param as key."""
if self._layout_json_params is None:
self._layout_json_params = {}
for i in self.layout_json.get('inputs', []):
for p in i.get('parameters', []):
self._layout_json_params.setdefault(p.get('name'), p) # depends on [control=['for'], data=['p']] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
return self._layout_json_params |
def camera_position(self, pose=None):
'''
returns camera position in world coordinates using self.rvec and self.tvec
from http://stackoverflow.com/questions/14515200/python-opencv-solvepnp-yields-wrong-translation-vector
'''
if pose is None:
pose = self.pose()
t, r = pose
return -np.matrix(cv2.Rodrigues(r)[0]).T * np.matrix(t) | def function[camera_position, parameter[self, pose]]:
constant[
returns camera position in world coordinates using self.rvec and self.tvec
from http://stackoverflow.com/questions/14515200/python-opencv-solvepnp-yields-wrong-translation-vector
]
if compare[name[pose] is constant[None]] begin[:]
variable[pose] assign[=] call[name[self].pose, parameter[]]
<ast.Tuple object at 0x7da1b11d9240> assign[=] name[pose]
return[binary_operation[<ast.UnaryOp object at 0x7da1b11da2c0> * call[name[np].matrix, parameter[name[t]]]]] | keyword[def] identifier[camera_position] ( identifier[self] , identifier[pose] = keyword[None] ):
literal[string]
keyword[if] identifier[pose] keyword[is] keyword[None] :
identifier[pose] = identifier[self] . identifier[pose] ()
identifier[t] , identifier[r] = identifier[pose]
keyword[return] - identifier[np] . identifier[matrix] ( identifier[cv2] . identifier[Rodrigues] ( identifier[r] )[ literal[int] ]). identifier[T] * identifier[np] . identifier[matrix] ( identifier[t] ) | def camera_position(self, pose=None):
"""
returns camera position in world coordinates using self.rvec and self.tvec
from http://stackoverflow.com/questions/14515200/python-opencv-solvepnp-yields-wrong-translation-vector
"""
if pose is None:
pose = self.pose() # depends on [control=['if'], data=['pose']]
(t, r) = pose
return -np.matrix(cv2.Rodrigues(r)[0]).T * np.matrix(t) |
def foreign_key_sets(chain, node):
"""
When a Django model has a ForeignKey to another model, the target
of the foreign key gets a '<modelname>_set' attribute for accessing
a queryset of the model owning the foreign key - eg:
class ModelA(models.Model):
pass
class ModelB(models.Model):
a = models.ForeignKey(ModelA)
Now, ModelA instances will have a modelb_set attribute.
It's also possible to explicitly name the relationship using the related_name argument
to the ForeignKey constructor. As it's impossible to know this without inspecting all
models before processing, we'll instead do a "best guess" approach and see if the attribute
being accessed goes on to be used as a queryset. This is via 'duck typing': if the method
called on the attribute being accessed is something we might find in a queryset, we'll
warn.
"""
quack = False
if node.attrname in MANAGER_ATTRS or node.attrname.endswith('_set'):
# if this is a X_set method, that's a pretty strong signal that this is the default
# Django name, rather than one set by related_name
quack = True
else:
# we will
if isinstance(node.parent, Attribute):
func_name = getattr(node.parent, 'attrname', None)
if func_name in MANAGER_ATTRS:
quack = True
if quack:
children = list(node.get_children())
for child in children:
try:
inferred_cls = child.inferred()
except InferenceError:
pass
else:
for cls in inferred_cls:
if (node_is_subclass(cls,
'django.db.models.manager.Manager',
'django.db.models.base.Model',
'.Model',
'django.db.models.fields.related.ForeignObject')):
# This means that we are looking at a subclass of models.Model
# and something is trying to access a <something>_set attribute.
# Since this could exist, we will return so as not to raise an
# error.
return
chain() | def function[foreign_key_sets, parameter[chain, node]]:
constant[
When a Django model has a ForeignKey to another model, the target
of the foreign key gets a '<modelname>_set' attribute for accessing
a queryset of the model owning the foreign key - eg:
class ModelA(models.Model):
pass
class ModelB(models.Model):
a = models.ForeignKey(ModelA)
Now, ModelA instances will have a modelb_set attribute.
It's also possible to explicitly name the relationship using the related_name argument
to the ForeignKey constructor. As it's impossible to know this without inspecting all
models before processing, we'll instead do a "best guess" approach and see if the attribute
being accessed goes on to be used as a queryset. This is via 'duck typing': if the method
called on the attribute being accessed is something we might find in a queryset, we'll
warn.
]
variable[quack] assign[=] constant[False]
if <ast.BoolOp object at 0x7da20c6aaa40> begin[:]
variable[quack] assign[=] constant[True]
if name[quack] begin[:]
variable[children] assign[=] call[name[list], parameter[call[name[node].get_children, parameter[]]]]
for taget[name[child]] in starred[name[children]] begin[:]
<ast.Try object at 0x7da20c6abe80>
call[name[chain], parameter[]] | keyword[def] identifier[foreign_key_sets] ( identifier[chain] , identifier[node] ):
literal[string]
identifier[quack] = keyword[False]
keyword[if] identifier[node] . identifier[attrname] keyword[in] identifier[MANAGER_ATTRS] keyword[or] identifier[node] . identifier[attrname] . identifier[endswith] ( literal[string] ):
identifier[quack] = keyword[True]
keyword[else] :
keyword[if] identifier[isinstance] ( identifier[node] . identifier[parent] , identifier[Attribute] ):
identifier[func_name] = identifier[getattr] ( identifier[node] . identifier[parent] , literal[string] , keyword[None] )
keyword[if] identifier[func_name] keyword[in] identifier[MANAGER_ATTRS] :
identifier[quack] = keyword[True]
keyword[if] identifier[quack] :
identifier[children] = identifier[list] ( identifier[node] . identifier[get_children] ())
keyword[for] identifier[child] keyword[in] identifier[children] :
keyword[try] :
identifier[inferred_cls] = identifier[child] . identifier[inferred] ()
keyword[except] identifier[InferenceError] :
keyword[pass]
keyword[else] :
keyword[for] identifier[cls] keyword[in] identifier[inferred_cls] :
keyword[if] ( identifier[node_is_subclass] ( identifier[cls] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] )):
keyword[return]
identifier[chain] () | def foreign_key_sets(chain, node):
"""
When a Django model has a ForeignKey to another model, the target
of the foreign key gets a '<modelname>_set' attribute for accessing
a queryset of the model owning the foreign key - eg:
class ModelA(models.Model):
pass
class ModelB(models.Model):
a = models.ForeignKey(ModelA)
Now, ModelA instances will have a modelb_set attribute.
It's also possible to explicitly name the relationship using the related_name argument
to the ForeignKey constructor. As it's impossible to know this without inspecting all
models before processing, we'll instead do a "best guess" approach and see if the attribute
being accessed goes on to be used as a queryset. This is via 'duck typing': if the method
called on the attribute being accessed is something we might find in a queryset, we'll
warn.
"""
quack = False
if node.attrname in MANAGER_ATTRS or node.attrname.endswith('_set'):
# if this is a X_set method, that's a pretty strong signal that this is the default
# Django name, rather than one set by related_name
quack = True # depends on [control=['if'], data=[]]
# we will
elif isinstance(node.parent, Attribute):
func_name = getattr(node.parent, 'attrname', None)
if func_name in MANAGER_ATTRS:
quack = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if quack:
children = list(node.get_children())
for child in children:
try:
inferred_cls = child.inferred() # depends on [control=['try'], data=[]]
except InferenceError:
pass # depends on [control=['except'], data=[]]
else:
for cls in inferred_cls:
if node_is_subclass(cls, 'django.db.models.manager.Manager', 'django.db.models.base.Model', '.Model', 'django.db.models.fields.related.ForeignObject'):
# This means that we are looking at a subclass of models.Model
# and something is trying to access a <something>_set attribute.
# Since this could exist, we will return so as not to raise an
# error.
return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cls']] # depends on [control=['for'], data=['child']] # depends on [control=['if'], data=[]]
chain() |
def append_checksum(self, f, checksum):
""" Append ESPLoader checksum to the just-written image """
align_file_position(f, 16)
f.write(struct.pack(b'B', checksum)) | def function[append_checksum, parameter[self, f, checksum]]:
constant[ Append ESPLoader checksum to the just-written image ]
call[name[align_file_position], parameter[name[f], constant[16]]]
call[name[f].write, parameter[call[name[struct].pack, parameter[constant[b'B'], name[checksum]]]]] | keyword[def] identifier[append_checksum] ( identifier[self] , identifier[f] , identifier[checksum] ):
literal[string]
identifier[align_file_position] ( identifier[f] , literal[int] )
identifier[f] . identifier[write] ( identifier[struct] . identifier[pack] ( literal[string] , identifier[checksum] )) | def append_checksum(self, f, checksum):
""" Append ESPLoader checksum to the just-written image """
align_file_position(f, 16)
f.write(struct.pack(b'B', checksum)) |
def all_databases(client, exclude=['local']):
"""
Yield all databases except excluded (default
excludes 'local').
"""
return (
client[db_name]
for db_name in client.list_database_names()
if db_name not in exclude
) | def function[all_databases, parameter[client, exclude]]:
constant[
Yield all databases except excluded (default
excludes 'local').
]
return[<ast.GeneratorExp object at 0x7da1b2404130>] | keyword[def] identifier[all_databases] ( identifier[client] , identifier[exclude] =[ literal[string] ]):
literal[string]
keyword[return] (
identifier[client] [ identifier[db_name] ]
keyword[for] identifier[db_name] keyword[in] identifier[client] . identifier[list_database_names] ()
keyword[if] identifier[db_name] keyword[not] keyword[in] identifier[exclude]
) | def all_databases(client, exclude=['local']):
"""
Yield all databases except excluded (default
excludes 'local').
"""
return (client[db_name] for db_name in client.list_database_names() if db_name not in exclude) |
def decode(self, output_dict: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:
"""
This method overrides ``Model.decode``, which gets called after ``Model.forward``, at test
time, to finalize predictions. This is (confusingly) a separate notion from the "decoder"
in "encoder/decoder", where that decoder logic lives in the ``TransitionFunction``.
This method trims the output predictions to the first end symbol, replaces indices with
corresponding tokens, and adds a field called ``predicted_tokens`` to the ``output_dict``.
"""
action_mapping = output_dict['action_mapping']
best_actions = output_dict["best_action_sequence"]
debug_infos = output_dict['debug_info']
batch_action_info = []
for batch_index, (predicted_actions, debug_info) in enumerate(zip(best_actions, debug_infos)):
instance_action_info = []
for predicted_action, action_debug_info in zip(predicted_actions, debug_info):
action_info = {}
action_info['predicted_action'] = predicted_action
considered_actions = action_debug_info['considered_actions']
probabilities = action_debug_info['probabilities']
actions = []
for action, probability in zip(considered_actions, probabilities):
if action != -1:
actions.append((action_mapping[(batch_index, action)], probability))
actions.sort()
considered_actions, probabilities = zip(*actions)
action_info['considered_actions'] = considered_actions
action_info['action_probabilities'] = probabilities
action_info['question_attention'] = action_debug_info.get('question_attention', [])
instance_action_info.append(action_info)
batch_action_info.append(instance_action_info)
output_dict["predicted_actions"] = batch_action_info
return output_dict | def function[decode, parameter[self, output_dict]]:
constant[
This method overrides ``Model.decode``, which gets called after ``Model.forward``, at test
time, to finalize predictions. This is (confusingly) a separate notion from the "decoder"
in "encoder/decoder", where that decoder logic lives in the ``TransitionFunction``.
This method trims the output predictions to the first end symbol, replaces indices with
corresponding tokens, and adds a field called ``predicted_tokens`` to the ``output_dict``.
]
variable[action_mapping] assign[=] call[name[output_dict]][constant[action_mapping]]
variable[best_actions] assign[=] call[name[output_dict]][constant[best_action_sequence]]
variable[debug_infos] assign[=] call[name[output_dict]][constant[debug_info]]
variable[batch_action_info] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c7ca200>, <ast.Tuple object at 0x7da20c7c8460>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[best_actions], name[debug_infos]]]]]] begin[:]
variable[instance_action_info] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c7cb5b0>, <ast.Name object at 0x7da20c7c86d0>]]] in starred[call[name[zip], parameter[name[predicted_actions], name[debug_info]]]] begin[:]
variable[action_info] assign[=] dictionary[[], []]
call[name[action_info]][constant[predicted_action]] assign[=] name[predicted_action]
variable[considered_actions] assign[=] call[name[action_debug_info]][constant[considered_actions]]
variable[probabilities] assign[=] call[name[action_debug_info]][constant[probabilities]]
variable[actions] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c7ca110>, <ast.Name object at 0x7da20c7cbc70>]]] in starred[call[name[zip], parameter[name[considered_actions], name[probabilities]]]] begin[:]
if compare[name[action] not_equal[!=] <ast.UnaryOp object at 0x7da20c7c9bd0>] begin[:]
call[name[actions].append, parameter[tuple[[<ast.Subscript object at 0x7da20c7c9ff0>, <ast.Name object at 0x7da20c7cb0d0>]]]]
call[name[actions].sort, parameter[]]
<ast.Tuple object at 0x7da20c7caf80> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da20c7cb4c0>]]
call[name[action_info]][constant[considered_actions]] assign[=] name[considered_actions]
call[name[action_info]][constant[action_probabilities]] assign[=] name[probabilities]
call[name[action_info]][constant[question_attention]] assign[=] call[name[action_debug_info].get, parameter[constant[question_attention], list[[]]]]
call[name[instance_action_info].append, parameter[name[action_info]]]
call[name[batch_action_info].append, parameter[name[instance_action_info]]]
call[name[output_dict]][constant[predicted_actions]] assign[=] name[batch_action_info]
return[name[output_dict]] | keyword[def] identifier[decode] ( identifier[self] , identifier[output_dict] : identifier[Dict] [ identifier[str] , identifier[torch] . identifier[Tensor] ])-> identifier[Dict] [ identifier[str] , identifier[torch] . identifier[Tensor] ]:
literal[string]
identifier[action_mapping] = identifier[output_dict] [ literal[string] ]
identifier[best_actions] = identifier[output_dict] [ literal[string] ]
identifier[debug_infos] = identifier[output_dict] [ literal[string] ]
identifier[batch_action_info] =[]
keyword[for] identifier[batch_index] ,( identifier[predicted_actions] , identifier[debug_info] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[best_actions] , identifier[debug_infos] )):
identifier[instance_action_info] =[]
keyword[for] identifier[predicted_action] , identifier[action_debug_info] keyword[in] identifier[zip] ( identifier[predicted_actions] , identifier[debug_info] ):
identifier[action_info] ={}
identifier[action_info] [ literal[string] ]= identifier[predicted_action]
identifier[considered_actions] = identifier[action_debug_info] [ literal[string] ]
identifier[probabilities] = identifier[action_debug_info] [ literal[string] ]
identifier[actions] =[]
keyword[for] identifier[action] , identifier[probability] keyword[in] identifier[zip] ( identifier[considered_actions] , identifier[probabilities] ):
keyword[if] identifier[action] !=- literal[int] :
identifier[actions] . identifier[append] (( identifier[action_mapping] [( identifier[batch_index] , identifier[action] )], identifier[probability] ))
identifier[actions] . identifier[sort] ()
identifier[considered_actions] , identifier[probabilities] = identifier[zip] (* identifier[actions] )
identifier[action_info] [ literal[string] ]= identifier[considered_actions]
identifier[action_info] [ literal[string] ]= identifier[probabilities]
identifier[action_info] [ literal[string] ]= identifier[action_debug_info] . identifier[get] ( literal[string] ,[])
identifier[instance_action_info] . identifier[append] ( identifier[action_info] )
identifier[batch_action_info] . identifier[append] ( identifier[instance_action_info] )
identifier[output_dict] [ literal[string] ]= identifier[batch_action_info]
keyword[return] identifier[output_dict] | def decode(self, output_dict: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:
"""
This method overrides ``Model.decode``, which gets called after ``Model.forward``, at test
time, to finalize predictions. This is (confusingly) a separate notion from the "decoder"
in "encoder/decoder", where that decoder logic lives in the ``TransitionFunction``.
This method trims the output predictions to the first end symbol, replaces indices with
corresponding tokens, and adds a field called ``predicted_tokens`` to the ``output_dict``.
"""
action_mapping = output_dict['action_mapping']
best_actions = output_dict['best_action_sequence']
debug_infos = output_dict['debug_info']
batch_action_info = []
for (batch_index, (predicted_actions, debug_info)) in enumerate(zip(best_actions, debug_infos)):
instance_action_info = []
for (predicted_action, action_debug_info) in zip(predicted_actions, debug_info):
action_info = {}
action_info['predicted_action'] = predicted_action
considered_actions = action_debug_info['considered_actions']
probabilities = action_debug_info['probabilities']
actions = []
for (action, probability) in zip(considered_actions, probabilities):
if action != -1:
actions.append((action_mapping[batch_index, action], probability)) # depends on [control=['if'], data=['action']] # depends on [control=['for'], data=[]]
actions.sort()
(considered_actions, probabilities) = zip(*actions)
action_info['considered_actions'] = considered_actions
action_info['action_probabilities'] = probabilities
action_info['question_attention'] = action_debug_info.get('question_attention', [])
instance_action_info.append(action_info) # depends on [control=['for'], data=[]]
batch_action_info.append(instance_action_info) # depends on [control=['for'], data=[]]
output_dict['predicted_actions'] = batch_action_info
return output_dict |
def _munge_whitespace(self, text):
"""_munge_whitespace(text : string) -> string
Munge whitespace in text: expand tabs and convert all other
whitespace characters to spaces. Eg. " foo\\tbar\\n\\nbaz"
becomes " foo bar baz".
"""
if self.expand_tabs:
# text = text.expandtabs()
text = ' '.join((' '.join(text.split('\n'))).split('\t'))
if self.replace_whitespace:
# if isinstance(text, str):
# text = text.translate(self.whitespace_trans)
# elif isinstance(text, _unicode):
# text = text.translate(self.unicode_whitespace_trans)
text = ' '.join(' '.join(text.split('\n')).split('\t'))
return text | def function[_munge_whitespace, parameter[self, text]]:
constant[_munge_whitespace(text : string) -> string
Munge whitespace in text: expand tabs and convert all other
whitespace characters to spaces. Eg. " foo\tbar\n\nbaz"
becomes " foo bar baz".
]
if name[self].expand_tabs begin[:]
variable[text] assign[=] call[constant[ ].join, parameter[call[call[constant[ ].join, parameter[call[name[text].split, parameter[constant[
]]]]].split, parameter[constant[ ]]]]]
if name[self].replace_whitespace begin[:]
variable[text] assign[=] call[constant[ ].join, parameter[call[call[constant[ ].join, parameter[call[name[text].split, parameter[constant[
]]]]].split, parameter[constant[ ]]]]]
return[name[text]] | keyword[def] identifier[_munge_whitespace] ( identifier[self] , identifier[text] ):
literal[string]
keyword[if] identifier[self] . identifier[expand_tabs] :
identifier[text] = literal[string] . identifier[join] (( literal[string] . identifier[join] ( identifier[text] . identifier[split] ( literal[string] ))). identifier[split] ( literal[string] ))
keyword[if] identifier[self] . identifier[replace_whitespace] :
identifier[text] = literal[string] . identifier[join] ( literal[string] . identifier[join] ( identifier[text] . identifier[split] ( literal[string] )). identifier[split] ( literal[string] ))
keyword[return] identifier[text] | def _munge_whitespace(self, text):
"""_munge_whitespace(text : string) -> string
Munge whitespace in text: expand tabs and convert all other
whitespace characters to spaces. Eg. " foo\\tbar\\n\\nbaz"
becomes " foo bar baz".
"""
if self.expand_tabs:
# text = text.expandtabs()
text = ' '.join(' '.join(text.split('\n')).split('\t')) # depends on [control=['if'], data=[]]
if self.replace_whitespace:
# if isinstance(text, str):
# text = text.translate(self.whitespace_trans)
# elif isinstance(text, _unicode):
# text = text.translate(self.unicode_whitespace_trans)
text = ' '.join(' '.join(text.split('\n')).split('\t')) # depends on [control=['if'], data=[]]
return text |
def get_area_def(self, dsid):
"""Get the area definition of the dataset."""
geocoding = self.root.find('.//Tile_Geocoding')
epsg = geocoding.find('HORIZONTAL_CS_CODE').text
rows = int(geocoding.find('Size[@resolution="' + str(dsid.resolution) + '"]/NROWS').text)
cols = int(geocoding.find('Size[@resolution="' + str(dsid.resolution) + '"]/NCOLS').text)
geoposition = geocoding.find('Geoposition[@resolution="' + str(dsid.resolution) + '"]')
ulx = float(geoposition.find('ULX').text)
uly = float(geoposition.find('ULY').text)
xdim = float(geoposition.find('XDIM').text)
ydim = float(geoposition.find('YDIM').text)
area_extent = (ulx, uly + rows * ydim, ulx + cols * xdim, uly)
area = geometry.AreaDefinition(
self.tile,
"On-the-fly area",
self.tile,
{'init': epsg},
cols,
rows,
area_extent)
return area | def function[get_area_def, parameter[self, dsid]]:
constant[Get the area definition of the dataset.]
variable[geocoding] assign[=] call[name[self].root.find, parameter[constant[.//Tile_Geocoding]]]
variable[epsg] assign[=] call[name[geocoding].find, parameter[constant[HORIZONTAL_CS_CODE]]].text
variable[rows] assign[=] call[name[int], parameter[call[name[geocoding].find, parameter[binary_operation[binary_operation[constant[Size[@resolution="] + call[name[str], parameter[name[dsid].resolution]]] + constant["]/NROWS]]]].text]]
variable[cols] assign[=] call[name[int], parameter[call[name[geocoding].find, parameter[binary_operation[binary_operation[constant[Size[@resolution="] + call[name[str], parameter[name[dsid].resolution]]] + constant["]/NCOLS]]]].text]]
variable[geoposition] assign[=] call[name[geocoding].find, parameter[binary_operation[binary_operation[constant[Geoposition[@resolution="] + call[name[str], parameter[name[dsid].resolution]]] + constant["]]]]]
variable[ulx] assign[=] call[name[float], parameter[call[name[geoposition].find, parameter[constant[ULX]]].text]]
variable[uly] assign[=] call[name[float], parameter[call[name[geoposition].find, parameter[constant[ULY]]].text]]
variable[xdim] assign[=] call[name[float], parameter[call[name[geoposition].find, parameter[constant[XDIM]]].text]]
variable[ydim] assign[=] call[name[float], parameter[call[name[geoposition].find, parameter[constant[YDIM]]].text]]
variable[area_extent] assign[=] tuple[[<ast.Name object at 0x7da1b1d74400>, <ast.BinOp object at 0x7da1b1d74df0>, <ast.BinOp object at 0x7da1b1d764a0>, <ast.Name object at 0x7da1b1d75a80>]]
variable[area] assign[=] call[name[geometry].AreaDefinition, parameter[name[self].tile, constant[On-the-fly area], name[self].tile, dictionary[[<ast.Constant object at 0x7da1b1d74640>], [<ast.Name object at 0x7da1b1d740d0>]], name[cols], name[rows], name[area_extent]]]
return[name[area]] | keyword[def] identifier[get_area_def] ( identifier[self] , identifier[dsid] ):
literal[string]
identifier[geocoding] = identifier[self] . identifier[root] . identifier[find] ( literal[string] )
identifier[epsg] = identifier[geocoding] . identifier[find] ( literal[string] ). identifier[text]
identifier[rows] = identifier[int] ( identifier[geocoding] . identifier[find] ( literal[string] + identifier[str] ( identifier[dsid] . identifier[resolution] )+ literal[string] ). identifier[text] )
identifier[cols] = identifier[int] ( identifier[geocoding] . identifier[find] ( literal[string] + identifier[str] ( identifier[dsid] . identifier[resolution] )+ literal[string] ). identifier[text] )
identifier[geoposition] = identifier[geocoding] . identifier[find] ( literal[string] + identifier[str] ( identifier[dsid] . identifier[resolution] )+ literal[string] )
identifier[ulx] = identifier[float] ( identifier[geoposition] . identifier[find] ( literal[string] ). identifier[text] )
identifier[uly] = identifier[float] ( identifier[geoposition] . identifier[find] ( literal[string] ). identifier[text] )
identifier[xdim] = identifier[float] ( identifier[geoposition] . identifier[find] ( literal[string] ). identifier[text] )
identifier[ydim] = identifier[float] ( identifier[geoposition] . identifier[find] ( literal[string] ). identifier[text] )
identifier[area_extent] =( identifier[ulx] , identifier[uly] + identifier[rows] * identifier[ydim] , identifier[ulx] + identifier[cols] * identifier[xdim] , identifier[uly] )
identifier[area] = identifier[geometry] . identifier[AreaDefinition] (
identifier[self] . identifier[tile] ,
literal[string] ,
identifier[self] . identifier[tile] ,
{ literal[string] : identifier[epsg] },
identifier[cols] ,
identifier[rows] ,
identifier[area_extent] )
keyword[return] identifier[area] | def get_area_def(self, dsid):
"""Get the area definition of the dataset."""
geocoding = self.root.find('.//Tile_Geocoding')
epsg = geocoding.find('HORIZONTAL_CS_CODE').text
rows = int(geocoding.find('Size[@resolution="' + str(dsid.resolution) + '"]/NROWS').text)
cols = int(geocoding.find('Size[@resolution="' + str(dsid.resolution) + '"]/NCOLS').text)
geoposition = geocoding.find('Geoposition[@resolution="' + str(dsid.resolution) + '"]')
ulx = float(geoposition.find('ULX').text)
uly = float(geoposition.find('ULY').text)
xdim = float(geoposition.find('XDIM').text)
ydim = float(geoposition.find('YDIM').text)
area_extent = (ulx, uly + rows * ydim, ulx + cols * xdim, uly)
area = geometry.AreaDefinition(self.tile, 'On-the-fly area', self.tile, {'init': epsg}, cols, rows, area_extent)
return area |
def append(self, value):
'''Add another value to the attribute'''
if self.__dict__['values']:
self.__dict__['changetype'] = MODIFY_REPLACE
self.__dict__['values'].append(value) | def function[append, parameter[self, value]]:
constant[Add another value to the attribute]
if call[name[self].__dict__][constant[values]] begin[:]
call[name[self].__dict__][constant[changetype]] assign[=] name[MODIFY_REPLACE]
call[call[name[self].__dict__][constant[values]].append, parameter[name[value]]] | keyword[def] identifier[append] ( identifier[self] , identifier[value] ):
literal[string]
keyword[if] identifier[self] . identifier[__dict__] [ literal[string] ]:
identifier[self] . identifier[__dict__] [ literal[string] ]= identifier[MODIFY_REPLACE]
identifier[self] . identifier[__dict__] [ literal[string] ]. identifier[append] ( identifier[value] ) | def append(self, value):
"""Add another value to the attribute"""
if self.__dict__['values']:
self.__dict__['changetype'] = MODIFY_REPLACE # depends on [control=['if'], data=[]]
self.__dict__['values'].append(value) |
def native(s, encoding='utf-8', fallback='iso-8859-1'):
"""Convert a given string into a native string."""
if isinstance(s, str):
return s
if str is unicode: # Python 3.x ->
return unicodestr(s, encoding, fallback)
return bytestring(s, encoding, fallback) | def function[native, parameter[s, encoding, fallback]]:
constant[Convert a given string into a native string.]
if call[name[isinstance], parameter[name[s], name[str]]] begin[:]
return[name[s]]
if compare[name[str] is name[unicode]] begin[:]
return[call[name[unicodestr], parameter[name[s], name[encoding], name[fallback]]]]
return[call[name[bytestring], parameter[name[s], name[encoding], name[fallback]]]] | keyword[def] identifier[native] ( identifier[s] , identifier[encoding] = literal[string] , identifier[fallback] = literal[string] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[s] , identifier[str] ):
keyword[return] identifier[s]
keyword[if] identifier[str] keyword[is] identifier[unicode] :
keyword[return] identifier[unicodestr] ( identifier[s] , identifier[encoding] , identifier[fallback] )
keyword[return] identifier[bytestring] ( identifier[s] , identifier[encoding] , identifier[fallback] ) | def native(s, encoding='utf-8', fallback='iso-8859-1'):
"""Convert a given string into a native string."""
if isinstance(s, str):
return s # depends on [control=['if'], data=[]]
if str is unicode: # Python 3.x ->
return unicodestr(s, encoding, fallback) # depends on [control=['if'], data=[]]
return bytestring(s, encoding, fallback) |
def addAttachment(self, attachment):
"""Adds an attachment or a list of attachments to the Analysis Request
"""
if not isinstance(attachment, (list, tuple)):
attachment = [attachment]
original = self.getAttachment() or []
# Function addAttachment can accept brain, objects or uids
original = map(api.get_uid, original)
attachment = map(api.get_uid, attachment)
# Boil out attachments already assigned to this Analysis Request
attachment = filter(lambda at: at not in original, attachment)
if attachment:
original.extend(attachment)
self.setAttachment(original) | def function[addAttachment, parameter[self, attachment]]:
constant[Adds an attachment or a list of attachments to the Analysis Request
]
if <ast.UnaryOp object at 0x7da204963af0> begin[:]
variable[attachment] assign[=] list[[<ast.Name object at 0x7da204960ac0>]]
variable[original] assign[=] <ast.BoolOp object at 0x7da204962860>
variable[original] assign[=] call[name[map], parameter[name[api].get_uid, name[original]]]
variable[attachment] assign[=] call[name[map], parameter[name[api].get_uid, name[attachment]]]
variable[attachment] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da2047eaa70>, name[attachment]]]
if name[attachment] begin[:]
call[name[original].extend, parameter[name[attachment]]]
call[name[self].setAttachment, parameter[name[original]]] | keyword[def] identifier[addAttachment] ( identifier[self] , identifier[attachment] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[attachment] ,( identifier[list] , identifier[tuple] )):
identifier[attachment] =[ identifier[attachment] ]
identifier[original] = identifier[self] . identifier[getAttachment] () keyword[or] []
identifier[original] = identifier[map] ( identifier[api] . identifier[get_uid] , identifier[original] )
identifier[attachment] = identifier[map] ( identifier[api] . identifier[get_uid] , identifier[attachment] )
identifier[attachment] = identifier[filter] ( keyword[lambda] identifier[at] : identifier[at] keyword[not] keyword[in] identifier[original] , identifier[attachment] )
keyword[if] identifier[attachment] :
identifier[original] . identifier[extend] ( identifier[attachment] )
identifier[self] . identifier[setAttachment] ( identifier[original] ) | def addAttachment(self, attachment):
"""Adds an attachment or a list of attachments to the Analysis Request
"""
if not isinstance(attachment, (list, tuple)):
attachment = [attachment] # depends on [control=['if'], data=[]]
original = self.getAttachment() or []
# Function addAttachment can accept brain, objects or uids
original = map(api.get_uid, original)
attachment = map(api.get_uid, attachment)
# Boil out attachments already assigned to this Analysis Request
attachment = filter(lambda at: at not in original, attachment)
if attachment:
original.extend(attachment)
self.setAttachment(original) # depends on [control=['if'], data=[]] |
def _run_ids(runner, pcap):
"""
Runs the specified IDS runner.
:param runner: Runner instance to use
:param pcap: File path to pcap for analysis
:returns: dict of run metadata/alerts
"""
run = {'name': runner.conf.get('name'),
'module': runner.conf.get('module'),
'ruleset': runner.conf.get('ruleset', 'default'),
'status': STATUS_FAILED,
}
try:
run_start = datetime.now()
version, alerts = runner.run(pcap)
run['version'] = version or 'Unknown'
run['status'] = STATUS_SUCCESS
run['alerts'] = alerts
except Exception as ex:
run['error'] = str(ex)
finally:
run['duration'] = duration(run_start)
return run | def function[_run_ids, parameter[runner, pcap]]:
constant[
Runs the specified IDS runner.
:param runner: Runner instance to use
:param pcap: File path to pcap for analysis
:returns: dict of run metadata/alerts
]
variable[run] assign[=] dictionary[[<ast.Constant object at 0x7da20e9b2ec0>, <ast.Constant object at 0x7da20e9b0eb0>, <ast.Constant object at 0x7da20e9b1b40>, <ast.Constant object at 0x7da20e9b1180>], [<ast.Call object at 0x7da20e9b24d0>, <ast.Call object at 0x7da20e9b3460>, <ast.Call object at 0x7da20e9b2710>, <ast.Name object at 0x7da20e9b2260>]]
<ast.Try object at 0x7da20e9b2650>
return[name[run]] | keyword[def] identifier[_run_ids] ( identifier[runner] , identifier[pcap] ):
literal[string]
identifier[run] ={ literal[string] : identifier[runner] . identifier[conf] . identifier[get] ( literal[string] ),
literal[string] : identifier[runner] . identifier[conf] . identifier[get] ( literal[string] ),
literal[string] : identifier[runner] . identifier[conf] . identifier[get] ( literal[string] , literal[string] ),
literal[string] : identifier[STATUS_FAILED] ,
}
keyword[try] :
identifier[run_start] = identifier[datetime] . identifier[now] ()
identifier[version] , identifier[alerts] = identifier[runner] . identifier[run] ( identifier[pcap] )
identifier[run] [ literal[string] ]= identifier[version] keyword[or] literal[string]
identifier[run] [ literal[string] ]= identifier[STATUS_SUCCESS]
identifier[run] [ literal[string] ]= identifier[alerts]
keyword[except] identifier[Exception] keyword[as] identifier[ex] :
identifier[run] [ literal[string] ]= identifier[str] ( identifier[ex] )
keyword[finally] :
identifier[run] [ literal[string] ]= identifier[duration] ( identifier[run_start] )
keyword[return] identifier[run] | def _run_ids(runner, pcap):
"""
Runs the specified IDS runner.
:param runner: Runner instance to use
:param pcap: File path to pcap for analysis
:returns: dict of run metadata/alerts
"""
run = {'name': runner.conf.get('name'), 'module': runner.conf.get('module'), 'ruleset': runner.conf.get('ruleset', 'default'), 'status': STATUS_FAILED}
try:
run_start = datetime.now()
(version, alerts) = runner.run(pcap)
run['version'] = version or 'Unknown'
run['status'] = STATUS_SUCCESS
run['alerts'] = alerts # depends on [control=['try'], data=[]]
except Exception as ex:
run['error'] = str(ex) # depends on [control=['except'], data=['ex']]
finally:
run['duration'] = duration(run_start)
return run |
def _evaluate_function(self, Ybus, V, Sbus, pv, pq):
""" Evaluates F(x).
"""
mis = multiply(V, conj(Ybus * V)) - Sbus
F = r_[mis[pv].real, mis[pq].real, mis[pq].imag]
return F | def function[_evaluate_function, parameter[self, Ybus, V, Sbus, pv, pq]]:
constant[ Evaluates F(x).
]
variable[mis] assign[=] binary_operation[call[name[multiply], parameter[name[V], call[name[conj], parameter[binary_operation[name[Ybus] * name[V]]]]]] - name[Sbus]]
variable[F] assign[=] call[name[r_]][tuple[[<ast.Attribute object at 0x7da1b2492410>, <ast.Attribute object at 0x7da1b257c0a0>, <ast.Attribute object at 0x7da1b257cf40>]]]
return[name[F]] | keyword[def] identifier[_evaluate_function] ( identifier[self] , identifier[Ybus] , identifier[V] , identifier[Sbus] , identifier[pv] , identifier[pq] ):
literal[string]
identifier[mis] = identifier[multiply] ( identifier[V] , identifier[conj] ( identifier[Ybus] * identifier[V] ))- identifier[Sbus]
identifier[F] = identifier[r_] [ identifier[mis] [ identifier[pv] ]. identifier[real] , identifier[mis] [ identifier[pq] ]. identifier[real] , identifier[mis] [ identifier[pq] ]. identifier[imag] ]
keyword[return] identifier[F] | def _evaluate_function(self, Ybus, V, Sbus, pv, pq):
""" Evaluates F(x).
"""
mis = multiply(V, conj(Ybus * V)) - Sbus
F = r_[mis[pv].real, mis[pq].real, mis[pq].imag]
return F |
def generate_options_map():
"""Generate an ``options_map` to pass to ``extract_from_dir``
This is the options_map that's used to generate a Jinja2 environment. We
want to generate and environment for extraction that's the same as the
environment we use for rendering.
This allows developers to explicitly set a ``JINJA2_CONFIG`` in settings.
If that's not there, then this will pull the relevant bits from the first
Jinja2 backend listed in ``TEMPLATES``.
"""
try:
return settings.PUENTE['JINJA2_CONFIG']
except KeyError:
pass
# If using Django 1.8+, we can skim the TEMPLATES for a backend that we
# know about and extract the settings from that.
for tmpl_config in getattr(settings, 'TEMPLATES', []):
try:
backend = tmpl_config['BACKEND']
except KeyError:
continue
if backend == 'django_jinja.backend.Jinja2':
extensions = tmpl_config.get('OPTIONS', {}).get('extensions', [])
return {
'**.*': {
'extensions': ','.join(extensions),
'silent': 'False',
}
}
# If this is Django 1.7 and Jingo, try to grab extensions from
# JINJA_CONFIG.
if getattr(settings, 'JINJA_CONFIG'):
jinja_config = settings.JINJA_CONFIG
if callable(jinja_config):
jinja_config = jinja_config()
return {
'**.*': {
'extensions': ','.join(jinja_config['extensions']),
'silent': 'False',
}
}
raise CommandError(
'No valid jinja2 config found in settings. See configuration '
'documentation.'
) | def function[generate_options_map, parameter[]]:
constant[Generate an ``options_map` to pass to ``extract_from_dir``
This is the options_map that's used to generate a Jinja2 environment. We
want to generate and environment for extraction that's the same as the
environment we use for rendering.
This allows developers to explicitly set a ``JINJA2_CONFIG`` in settings.
If that's not there, then this will pull the relevant bits from the first
Jinja2 backend listed in ``TEMPLATES``.
]
<ast.Try object at 0x7da18eb54820>
for taget[name[tmpl_config]] in starred[call[name[getattr], parameter[name[settings], constant[TEMPLATES], list[[]]]]] begin[:]
<ast.Try object at 0x7da18eb55540>
if compare[name[backend] equal[==] constant[django_jinja.backend.Jinja2]] begin[:]
variable[extensions] assign[=] call[call[name[tmpl_config].get, parameter[constant[OPTIONS], dictionary[[], []]]].get, parameter[constant[extensions], list[[]]]]
return[dictionary[[<ast.Constant object at 0x7da18ede7a90>], [<ast.Dict object at 0x7da18ede5870>]]]
if call[name[getattr], parameter[name[settings], constant[JINJA_CONFIG]]] begin[:]
variable[jinja_config] assign[=] name[settings].JINJA_CONFIG
if call[name[callable], parameter[name[jinja_config]]] begin[:]
variable[jinja_config] assign[=] call[name[jinja_config], parameter[]]
return[dictionary[[<ast.Constant object at 0x7da18ede6c80>], [<ast.Dict object at 0x7da18ede49d0>]]]
<ast.Raise object at 0x7da20c6ab850> | keyword[def] identifier[generate_options_map] ():
literal[string]
keyword[try] :
keyword[return] identifier[settings] . identifier[PUENTE] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[for] identifier[tmpl_config] keyword[in] identifier[getattr] ( identifier[settings] , literal[string] ,[]):
keyword[try] :
identifier[backend] = identifier[tmpl_config] [ literal[string] ]
keyword[except] identifier[KeyError] :
keyword[continue]
keyword[if] identifier[backend] == literal[string] :
identifier[extensions] = identifier[tmpl_config] . identifier[get] ( literal[string] ,{}). identifier[get] ( literal[string] ,[])
keyword[return] {
literal[string] :{
literal[string] : literal[string] . identifier[join] ( identifier[extensions] ),
literal[string] : literal[string] ,
}
}
keyword[if] identifier[getattr] ( identifier[settings] , literal[string] ):
identifier[jinja_config] = identifier[settings] . identifier[JINJA_CONFIG]
keyword[if] identifier[callable] ( identifier[jinja_config] ):
identifier[jinja_config] = identifier[jinja_config] ()
keyword[return] {
literal[string] :{
literal[string] : literal[string] . identifier[join] ( identifier[jinja_config] [ literal[string] ]),
literal[string] : literal[string] ,
}
}
keyword[raise] identifier[CommandError] (
literal[string]
literal[string]
) | def generate_options_map():
"""Generate an ``options_map` to pass to ``extract_from_dir``
This is the options_map that's used to generate a Jinja2 environment. We
want to generate and environment for extraction that's the same as the
environment we use for rendering.
This allows developers to explicitly set a ``JINJA2_CONFIG`` in settings.
If that's not there, then this will pull the relevant bits from the first
Jinja2 backend listed in ``TEMPLATES``.
"""
try:
return settings.PUENTE['JINJA2_CONFIG'] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
# If using Django 1.8+, we can skim the TEMPLATES for a backend that we
# know about and extract the settings from that.
for tmpl_config in getattr(settings, 'TEMPLATES', []):
try:
backend = tmpl_config['BACKEND'] # depends on [control=['try'], data=[]]
except KeyError:
continue # depends on [control=['except'], data=[]]
if backend == 'django_jinja.backend.Jinja2':
extensions = tmpl_config.get('OPTIONS', {}).get('extensions', [])
return {'**.*': {'extensions': ','.join(extensions), 'silent': 'False'}} # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tmpl_config']]
# If this is Django 1.7 and Jingo, try to grab extensions from
# JINJA_CONFIG.
if getattr(settings, 'JINJA_CONFIG'):
jinja_config = settings.JINJA_CONFIG
if callable(jinja_config):
jinja_config = jinja_config() # depends on [control=['if'], data=[]]
return {'**.*': {'extensions': ','.join(jinja_config['extensions']), 'silent': 'False'}} # depends on [control=['if'], data=[]]
raise CommandError('No valid jinja2 config found in settings. See configuration documentation.') |
def parse_gcs_url(gsurl):
"""
Given a Google Cloud Storage URL (gs://<bucket>/<blob>), returns a
tuple containing the corresponding bucket and blob.
"""
parsed_url = urlparse(gsurl)
if not parsed_url.netloc:
raise AirflowException('Please provide a bucket name')
else:
bucket = parsed_url.netloc
blob = parsed_url.path.strip('/')
return bucket, blob | def function[parse_gcs_url, parameter[gsurl]]:
constant[
Given a Google Cloud Storage URL (gs://<bucket>/<blob>), returns a
tuple containing the corresponding bucket and blob.
]
variable[parsed_url] assign[=] call[name[urlparse], parameter[name[gsurl]]]
if <ast.UnaryOp object at 0x7da20c6c5780> begin[:]
<ast.Raise object at 0x7da20c6c5810> | keyword[def] identifier[parse_gcs_url] ( identifier[gsurl] ):
literal[string]
identifier[parsed_url] = identifier[urlparse] ( identifier[gsurl] )
keyword[if] keyword[not] identifier[parsed_url] . identifier[netloc] :
keyword[raise] identifier[AirflowException] ( literal[string] )
keyword[else] :
identifier[bucket] = identifier[parsed_url] . identifier[netloc]
identifier[blob] = identifier[parsed_url] . identifier[path] . identifier[strip] ( literal[string] )
keyword[return] identifier[bucket] , identifier[blob] | def parse_gcs_url(gsurl):
"""
Given a Google Cloud Storage URL (gs://<bucket>/<blob>), returns a
tuple containing the corresponding bucket and blob.
"""
parsed_url = urlparse(gsurl)
if not parsed_url.netloc:
raise AirflowException('Please provide a bucket name') # depends on [control=['if'], data=[]]
else:
bucket = parsed_url.netloc
blob = parsed_url.path.strip('/')
return (bucket, blob) |
def execute(self, fn, *args, **kwargs):
"""Execute an operation and return the result."""
if not self.asynchronous:
return fn(*args, **kwargs)
raise NotImplementedError | def function[execute, parameter[self, fn]]:
constant[Execute an operation and return the result.]
if <ast.UnaryOp object at 0x7da18dc042e0> begin[:]
return[call[name[fn], parameter[<ast.Starred object at 0x7da18dc05cc0>]]]
<ast.Raise object at 0x7da18dc06740> | keyword[def] identifier[execute] ( identifier[self] , identifier[fn] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[asynchronous] :
keyword[return] identifier[fn] (* identifier[args] ,** identifier[kwargs] )
keyword[raise] identifier[NotImplementedError] | def execute(self, fn, *args, **kwargs):
"""Execute an operation and return the result."""
if not self.asynchronous:
return fn(*args, **kwargs) # depends on [control=['if'], data=[]]
raise NotImplementedError |
def _handle_shutdown_reply(self, msg):
""" Handle shutdown signal, only if from other console.
"""
self.log.debug("shutdown: %s", msg.get('content', ''))
if not self._hidden and not self._is_from_this_session(msg):
if self._local_kernel:
if not msg['content']['restart']:
self.exit_requested.emit(self)
else:
# we just got notified of a restart!
time.sleep(0.25) # wait 1/4 sec to reset
# lest the request for a new prompt
# goes to the old kernel
self.reset()
else: # remote kernel, prompt on Kernel shutdown/reset
title = self.window().windowTitle()
if not msg['content']['restart']:
reply = QtGui.QMessageBox.question(self, title,
"Kernel has been shutdown permanently. "
"Close the Console?",
QtGui.QMessageBox.Yes,QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
self.exit_requested.emit(self)
else:
# XXX: remove message box in favor of using the
# clear_on_kernel_restart setting?
reply = QtGui.QMessageBox.question(self, title,
"Kernel has been reset. Clear the Console?",
QtGui.QMessageBox.Yes,QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
time.sleep(0.25) # wait 1/4 sec to reset
# lest the request for a new prompt
# goes to the old kernel
self.reset() | def function[_handle_shutdown_reply, parameter[self, msg]]:
constant[ Handle shutdown signal, only if from other console.
]
call[name[self].log.debug, parameter[constant[shutdown: %s], call[name[msg].get, parameter[constant[content], constant[]]]]]
if <ast.BoolOp object at 0x7da18fe90df0> begin[:]
if name[self]._local_kernel begin[:]
if <ast.UnaryOp object at 0x7da18fe90dc0> begin[:]
call[name[self].exit_requested.emit, parameter[name[self]]] | keyword[def] identifier[_handle_shutdown_reply] ( identifier[self] , identifier[msg] ):
literal[string]
identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[msg] . identifier[get] ( literal[string] , literal[string] ))
keyword[if] keyword[not] identifier[self] . identifier[_hidden] keyword[and] keyword[not] identifier[self] . identifier[_is_from_this_session] ( identifier[msg] ):
keyword[if] identifier[self] . identifier[_local_kernel] :
keyword[if] keyword[not] identifier[msg] [ literal[string] ][ literal[string] ]:
identifier[self] . identifier[exit_requested] . identifier[emit] ( identifier[self] )
keyword[else] :
identifier[time] . identifier[sleep] ( literal[int] )
identifier[self] . identifier[reset] ()
keyword[else] :
identifier[title] = identifier[self] . identifier[window] (). identifier[windowTitle] ()
keyword[if] keyword[not] identifier[msg] [ literal[string] ][ literal[string] ]:
identifier[reply] = identifier[QtGui] . identifier[QMessageBox] . identifier[question] ( identifier[self] , identifier[title] ,
literal[string]
literal[string] ,
identifier[QtGui] . identifier[QMessageBox] . identifier[Yes] , identifier[QtGui] . identifier[QMessageBox] . identifier[No] )
keyword[if] identifier[reply] == identifier[QtGui] . identifier[QMessageBox] . identifier[Yes] :
identifier[self] . identifier[exit_requested] . identifier[emit] ( identifier[self] )
keyword[else] :
identifier[reply] = identifier[QtGui] . identifier[QMessageBox] . identifier[question] ( identifier[self] , identifier[title] ,
literal[string] ,
identifier[QtGui] . identifier[QMessageBox] . identifier[Yes] , identifier[QtGui] . identifier[QMessageBox] . identifier[No] )
keyword[if] identifier[reply] == identifier[QtGui] . identifier[QMessageBox] . identifier[Yes] :
identifier[time] . identifier[sleep] ( literal[int] )
identifier[self] . identifier[reset] () | def _handle_shutdown_reply(self, msg):
""" Handle shutdown signal, only if from other console.
"""
self.log.debug('shutdown: %s', msg.get('content', ''))
if not self._hidden and (not self._is_from_this_session(msg)):
if self._local_kernel:
if not msg['content']['restart']:
self.exit_requested.emit(self) # depends on [control=['if'], data=[]]
else:
# we just got notified of a restart!
time.sleep(0.25) # wait 1/4 sec to reset
# lest the request for a new prompt
# goes to the old kernel
self.reset() # depends on [control=['if'], data=[]]
else: # remote kernel, prompt on Kernel shutdown/reset
title = self.window().windowTitle()
if not msg['content']['restart']:
reply = QtGui.QMessageBox.question(self, title, 'Kernel has been shutdown permanently. Close the Console?', QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
self.exit_requested.emit(self) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# XXX: remove message box in favor of using the
# clear_on_kernel_restart setting?
reply = QtGui.QMessageBox.question(self, title, 'Kernel has been reset. Clear the Console?', QtGui.QMessageBox.Yes, QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
time.sleep(0.25) # wait 1/4 sec to reset
# lest the request for a new prompt
# goes to the old kernel
self.reset() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def create_key(self, title, key, read_only=False):
"""
:calls: `POST /repos/:owner/:repo/keys <http://developer.github.com/v3/repos/keys>`_
:param title: string
:param key: string
:param read_only: bool
:rtype: :class:`github.RepositoryKey.RepositoryKey`
"""
assert isinstance(title, (str, unicode)), title
assert isinstance(key, (str, unicode)), key
assert isinstance(read_only, bool), read_only
post_parameters = {
"title": title,
"key": key,
"read_only": read_only,
}
headers, data = self._requester.requestJsonAndCheck(
"POST",
self.url + "/keys",
input=post_parameters
)
return github.RepositoryKey.RepositoryKey(self._requester, headers, data, completed=True) | def function[create_key, parameter[self, title, key, read_only]]:
constant[
:calls: `POST /repos/:owner/:repo/keys <http://developer.github.com/v3/repos/keys>`_
:param title: string
:param key: string
:param read_only: bool
:rtype: :class:`github.RepositoryKey.RepositoryKey`
]
assert[call[name[isinstance], parameter[name[title], tuple[[<ast.Name object at 0x7da18c4ce740>, <ast.Name object at 0x7da18c4ccbb0>]]]]]
assert[call[name[isinstance], parameter[name[key], tuple[[<ast.Name object at 0x7da18c4cc940>, <ast.Name object at 0x7da18c4cf790>]]]]]
assert[call[name[isinstance], parameter[name[read_only], name[bool]]]]
variable[post_parameters] assign[=] dictionary[[<ast.Constant object at 0x7da2049617b0>, <ast.Constant object at 0x7da204961600>, <ast.Constant object at 0x7da2049612a0>], [<ast.Name object at 0x7da204961d20>, <ast.Name object at 0x7da204963430>, <ast.Name object at 0x7da2049610f0>]]
<ast.Tuple object at 0x7da204960850> assign[=] call[name[self]._requester.requestJsonAndCheck, parameter[constant[POST], binary_operation[name[self].url + constant[/keys]]]]
return[call[name[github].RepositoryKey.RepositoryKey, parameter[name[self]._requester, name[headers], name[data]]]] | keyword[def] identifier[create_key] ( identifier[self] , identifier[title] , identifier[key] , identifier[read_only] = keyword[False] ):
literal[string]
keyword[assert] identifier[isinstance] ( identifier[title] ,( identifier[str] , identifier[unicode] )), identifier[title]
keyword[assert] identifier[isinstance] ( identifier[key] ,( identifier[str] , identifier[unicode] )), identifier[key]
keyword[assert] identifier[isinstance] ( identifier[read_only] , identifier[bool] ), identifier[read_only]
identifier[post_parameters] ={
literal[string] : identifier[title] ,
literal[string] : identifier[key] ,
literal[string] : identifier[read_only] ,
}
identifier[headers] , identifier[data] = identifier[self] . identifier[_requester] . identifier[requestJsonAndCheck] (
literal[string] ,
identifier[self] . identifier[url] + literal[string] ,
identifier[input] = identifier[post_parameters]
)
keyword[return] identifier[github] . identifier[RepositoryKey] . identifier[RepositoryKey] ( identifier[self] . identifier[_requester] , identifier[headers] , identifier[data] , identifier[completed] = keyword[True] ) | def create_key(self, title, key, read_only=False):
"""
:calls: `POST /repos/:owner/:repo/keys <http://developer.github.com/v3/repos/keys>`_
:param title: string
:param key: string
:param read_only: bool
:rtype: :class:`github.RepositoryKey.RepositoryKey`
"""
assert isinstance(title, (str, unicode)), title
assert isinstance(key, (str, unicode)), key
assert isinstance(read_only, bool), read_only
post_parameters = {'title': title, 'key': key, 'read_only': read_only}
(headers, data) = self._requester.requestJsonAndCheck('POST', self.url + '/keys', input=post_parameters)
return github.RepositoryKey.RepositoryKey(self._requester, headers, data, completed=True) |
def process(self, sched, coro):
"""Add the given coroutine in the scheduler."""
super(AddCoro, self).process(sched, coro)
self.result = sched.add(self.coro, self.args, self.kwargs, self.prio & priority.OP)
if self.prio & priority.CORO:
return self, coro
else:
sched.active.append( (None, coro)) | def function[process, parameter[self, sched, coro]]:
constant[Add the given coroutine in the scheduler.]
call[call[name[super], parameter[name[AddCoro], name[self]]].process, parameter[name[sched], name[coro]]]
name[self].result assign[=] call[name[sched].add, parameter[name[self].coro, name[self].args, name[self].kwargs, binary_operation[name[self].prio <ast.BitAnd object at 0x7da2590d6b60> name[priority].OP]]]
if binary_operation[name[self].prio <ast.BitAnd object at 0x7da2590d6b60> name[priority].CORO] begin[:]
return[tuple[[<ast.Name object at 0x7da18f00f610>, <ast.Name object at 0x7da18f00cfd0>]]] | keyword[def] identifier[process] ( identifier[self] , identifier[sched] , identifier[coro] ):
literal[string]
identifier[super] ( identifier[AddCoro] , identifier[self] ). identifier[process] ( identifier[sched] , identifier[coro] )
identifier[self] . identifier[result] = identifier[sched] . identifier[add] ( identifier[self] . identifier[coro] , identifier[self] . identifier[args] , identifier[self] . identifier[kwargs] , identifier[self] . identifier[prio] & identifier[priority] . identifier[OP] )
keyword[if] identifier[self] . identifier[prio] & identifier[priority] . identifier[CORO] :
keyword[return] identifier[self] , identifier[coro]
keyword[else] :
identifier[sched] . identifier[active] . identifier[append] (( keyword[None] , identifier[coro] )) | def process(self, sched, coro):
"""Add the given coroutine in the scheduler."""
super(AddCoro, self).process(sched, coro)
self.result = sched.add(self.coro, self.args, self.kwargs, self.prio & priority.OP)
if self.prio & priority.CORO:
return (self, coro) # depends on [control=['if'], data=[]]
else:
sched.active.append((None, coro)) |
def process_request(self, unused_request):
"""Called by Django before deciding which view to execute."""
# Compare to the first half of toplevel() in context.py.
tasklets._state.clear_all_pending()
# Create and install a new context.
ctx = tasklets.make_default_context()
tasklets.set_context(ctx) | def function[process_request, parameter[self, unused_request]]:
constant[Called by Django before deciding which view to execute.]
call[name[tasklets]._state.clear_all_pending, parameter[]]
variable[ctx] assign[=] call[name[tasklets].make_default_context, parameter[]]
call[name[tasklets].set_context, parameter[name[ctx]]] | keyword[def] identifier[process_request] ( identifier[self] , identifier[unused_request] ):
literal[string]
identifier[tasklets] . identifier[_state] . identifier[clear_all_pending] ()
identifier[ctx] = identifier[tasklets] . identifier[make_default_context] ()
identifier[tasklets] . identifier[set_context] ( identifier[ctx] ) | def process_request(self, unused_request):
"""Called by Django before deciding which view to execute."""
# Compare to the first half of toplevel() in context.py.
tasklets._state.clear_all_pending()
# Create and install a new context.
ctx = tasklets.make_default_context()
tasklets.set_context(ctx) |
def config(_config=None, **kwargs):
"""
A decorator for setting the default kwargs of `BaseHandler.crawl`.
Any self.crawl with this callback will use this config.
"""
if _config is None:
_config = {}
_config.update(kwargs)
def wrapper(func):
func._config = _config
return func
return wrapper | def function[config, parameter[_config]]:
constant[
A decorator for setting the default kwargs of `BaseHandler.crawl`.
Any self.crawl with this callback will use this config.
]
if compare[name[_config] is constant[None]] begin[:]
variable[_config] assign[=] dictionary[[], []]
call[name[_config].update, parameter[name[kwargs]]]
def function[wrapper, parameter[func]]:
name[func]._config assign[=] name[_config]
return[name[func]]
return[name[wrapper]] | keyword[def] identifier[config] ( identifier[_config] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[_config] keyword[is] keyword[None] :
identifier[_config] ={}
identifier[_config] . identifier[update] ( identifier[kwargs] )
keyword[def] identifier[wrapper] ( identifier[func] ):
identifier[func] . identifier[_config] = identifier[_config]
keyword[return] identifier[func]
keyword[return] identifier[wrapper] | def config(_config=None, **kwargs):
"""
A decorator for setting the default kwargs of `BaseHandler.crawl`.
Any self.crawl with this callback will use this config.
"""
if _config is None:
_config = {} # depends on [control=['if'], data=['_config']]
_config.update(kwargs)
def wrapper(func):
func._config = _config
return func
return wrapper |
def view_create(self, data, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/views#create-view"
api_path = "/api/v2/views.json"
return self.call(api_path, method="POST", data=data, **kwargs) | def function[view_create, parameter[self, data]]:
constant[https://developer.zendesk.com/rest_api/docs/core/views#create-view]
variable[api_path] assign[=] constant[/api/v2/views.json]
return[call[name[self].call, parameter[name[api_path]]]] | keyword[def] identifier[view_create] ( identifier[self] , identifier[data] ,** identifier[kwargs] ):
literal[string]
identifier[api_path] = literal[string]
keyword[return] identifier[self] . identifier[call] ( identifier[api_path] , identifier[method] = literal[string] , identifier[data] = identifier[data] ,** identifier[kwargs] ) | def view_create(self, data, **kwargs):
"""https://developer.zendesk.com/rest_api/docs/core/views#create-view"""
api_path = '/api/v2/views.json'
return self.call(api_path, method='POST', data=data, **kwargs) |
def wait(self, timeout=None):
"""
Waits until all tasks completed or *timeout* seconds passed.
# Raises
Timeout: If the *timeout* is exceeded.
"""
t_start = time.clock()
if not wait_for_condition(self, lambda s: s._tasks == 0, timeout):
raise Timeout | def function[wait, parameter[self, timeout]]:
constant[
Waits until all tasks completed or *timeout* seconds passed.
# Raises
Timeout: If the *timeout* is exceeded.
]
variable[t_start] assign[=] call[name[time].clock, parameter[]]
if <ast.UnaryOp object at 0x7da18dc9b070> begin[:]
<ast.Raise object at 0x7da18dc99fc0> | keyword[def] identifier[wait] ( identifier[self] , identifier[timeout] = keyword[None] ):
literal[string]
identifier[t_start] = identifier[time] . identifier[clock] ()
keyword[if] keyword[not] identifier[wait_for_condition] ( identifier[self] , keyword[lambda] identifier[s] : identifier[s] . identifier[_tasks] == literal[int] , identifier[timeout] ):
keyword[raise] identifier[Timeout] | def wait(self, timeout=None):
"""
Waits until all tasks completed or *timeout* seconds passed.
# Raises
Timeout: If the *timeout* is exceeded.
"""
t_start = time.clock()
if not wait_for_condition(self, lambda s: s._tasks == 0, timeout):
raise Timeout # depends on [control=['if'], data=[]] |
def get(self, block=True, timeout=None):
"""get an item out of the queue
.. note::
if `block` is ``True`` (the default) and the queue is
:meth`empty`, this method will block the current coroutine until
something has been :meth:`put`.
:param block:
whether to block if there is no data yet available (default
``True``)
:type block: bool
:param timeout:
the maximum time in seconds to block waiting for data. with the
default of ``None``, can wait indefinitely. this is unused if
`block` is ``False``.
:type timeout: int, float or None
:raises:
:class:`Empty` if there is no data in the queue and block is
``False``, or `timeout` expires
:returns: something that was previously :meth:`put` in the queue
"""
if not self._data:
if not block:
raise Empty()
current = compat.getcurrent()
waketime = None if timeout is None else time.time() + timeout
if timeout is not None:
scheduler.schedule_at(waketime, current)
self._waiters.append((current, waketime))
scheduler.state.mainloop.switch()
if timeout is not None:
if not scheduler._remove_timer(waketime, current):
self._waiters.remove((current, waketime))
raise Empty()
if self.full() and self._waiters:
scheduler.schedule(self._waiters.popleft()[0])
return self._get() | def function[get, parameter[self, block, timeout]]:
constant[get an item out of the queue
.. note::
if `block` is ``True`` (the default) and the queue is
:meth`empty`, this method will block the current coroutine until
something has been :meth:`put`.
:param block:
whether to block if there is no data yet available (default
``True``)
:type block: bool
:param timeout:
the maximum time in seconds to block waiting for data. with the
default of ``None``, can wait indefinitely. this is unused if
`block` is ``False``.
:type timeout: int, float or None
:raises:
:class:`Empty` if there is no data in the queue and block is
``False``, or `timeout` expires
:returns: something that was previously :meth:`put` in the queue
]
if <ast.UnaryOp object at 0x7da204622fe0> begin[:]
if <ast.UnaryOp object at 0x7da204622c20> begin[:]
<ast.Raise object at 0x7da2046211b0>
variable[current] assign[=] call[name[compat].getcurrent, parameter[]]
variable[waketime] assign[=] <ast.IfExp object at 0x7da204620730>
if compare[name[timeout] is_not constant[None]] begin[:]
call[name[scheduler].schedule_at, parameter[name[waketime], name[current]]]
call[name[self]._waiters.append, parameter[tuple[[<ast.Name object at 0x7da204622cb0>, <ast.Name object at 0x7da18bc73400>]]]]
call[name[scheduler].state.mainloop.switch, parameter[]]
if compare[name[timeout] is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da18fe91a50> begin[:]
call[name[self]._waiters.remove, parameter[tuple[[<ast.Name object at 0x7da18fe926e0>, <ast.Name object at 0x7da18fe93d30>]]]]
<ast.Raise object at 0x7da18fe93be0>
if <ast.BoolOp object at 0x7da18fe90580> begin[:]
call[name[scheduler].schedule, parameter[call[call[name[self]._waiters.popleft, parameter[]]][constant[0]]]]
return[call[name[self]._get, parameter[]]] | keyword[def] identifier[get] ( identifier[self] , identifier[block] = keyword[True] , identifier[timeout] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_data] :
keyword[if] keyword[not] identifier[block] :
keyword[raise] identifier[Empty] ()
identifier[current] = identifier[compat] . identifier[getcurrent] ()
identifier[waketime] = keyword[None] keyword[if] identifier[timeout] keyword[is] keyword[None] keyword[else] identifier[time] . identifier[time] ()+ identifier[timeout]
keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] :
identifier[scheduler] . identifier[schedule_at] ( identifier[waketime] , identifier[current] )
identifier[self] . identifier[_waiters] . identifier[append] (( identifier[current] , identifier[waketime] ))
identifier[scheduler] . identifier[state] . identifier[mainloop] . identifier[switch] ()
keyword[if] identifier[timeout] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[scheduler] . identifier[_remove_timer] ( identifier[waketime] , identifier[current] ):
identifier[self] . identifier[_waiters] . identifier[remove] (( identifier[current] , identifier[waketime] ))
keyword[raise] identifier[Empty] ()
keyword[if] identifier[self] . identifier[full] () keyword[and] identifier[self] . identifier[_waiters] :
identifier[scheduler] . identifier[schedule] ( identifier[self] . identifier[_waiters] . identifier[popleft] ()[ literal[int] ])
keyword[return] identifier[self] . identifier[_get] () | def get(self, block=True, timeout=None):
"""get an item out of the queue
.. note::
if `block` is ``True`` (the default) and the queue is
:meth`empty`, this method will block the current coroutine until
something has been :meth:`put`.
:param block:
whether to block if there is no data yet available (default
``True``)
:type block: bool
:param timeout:
the maximum time in seconds to block waiting for data. with the
default of ``None``, can wait indefinitely. this is unused if
`block` is ``False``.
:type timeout: int, float or None
:raises:
:class:`Empty` if there is no data in the queue and block is
``False``, or `timeout` expires
:returns: something that was previously :meth:`put` in the queue
"""
if not self._data:
if not block:
raise Empty() # depends on [control=['if'], data=[]]
current = compat.getcurrent()
waketime = None if timeout is None else time.time() + timeout
if timeout is not None:
scheduler.schedule_at(waketime, current) # depends on [control=['if'], data=[]]
self._waiters.append((current, waketime))
scheduler.state.mainloop.switch()
if timeout is not None:
if not scheduler._remove_timer(waketime, current):
self._waiters.remove((current, waketime))
raise Empty() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if self.full() and self._waiters:
scheduler.schedule(self._waiters.popleft()[0]) # depends on [control=['if'], data=[]]
return self._get() |
def do_GEOHASHMEMBERS(self, geoh):
"""Return members of a geohash and its neighbors.
GEOHASHMEMBERS u09vej04 [NEIGHBORS 0]"""
geoh, with_neighbors = self._match_option('NEIGHBORS', geoh)
key = compute_geohash_key(geoh, with_neighbors != '0')
if key:
for id_ in DB.smembers(key):
r = Result(id_)
print('{} {}'.format(white(r), blue(r._id))) | def function[do_GEOHASHMEMBERS, parameter[self, geoh]]:
constant[Return members of a geohash and its neighbors.
GEOHASHMEMBERS u09vej04 [NEIGHBORS 0]]
<ast.Tuple object at 0x7da1b26af7f0> assign[=] call[name[self]._match_option, parameter[constant[NEIGHBORS], name[geoh]]]
variable[key] assign[=] call[name[compute_geohash_key], parameter[name[geoh], compare[name[with_neighbors] not_equal[!=] constant[0]]]]
if name[key] begin[:]
for taget[name[id_]] in starred[call[name[DB].smembers, parameter[name[key]]]] begin[:]
variable[r] assign[=] call[name[Result], parameter[name[id_]]]
call[name[print], parameter[call[constant[{} {}].format, parameter[call[name[white], parameter[name[r]]], call[name[blue], parameter[name[r]._id]]]]]] | keyword[def] identifier[do_GEOHASHMEMBERS] ( identifier[self] , identifier[geoh] ):
literal[string]
identifier[geoh] , identifier[with_neighbors] = identifier[self] . identifier[_match_option] ( literal[string] , identifier[geoh] )
identifier[key] = identifier[compute_geohash_key] ( identifier[geoh] , identifier[with_neighbors] != literal[string] )
keyword[if] identifier[key] :
keyword[for] identifier[id_] keyword[in] identifier[DB] . identifier[smembers] ( identifier[key] ):
identifier[r] = identifier[Result] ( identifier[id_] )
identifier[print] ( literal[string] . identifier[format] ( identifier[white] ( identifier[r] ), identifier[blue] ( identifier[r] . identifier[_id] ))) | def do_GEOHASHMEMBERS(self, geoh):
"""Return members of a geohash and its neighbors.
GEOHASHMEMBERS u09vej04 [NEIGHBORS 0]"""
(geoh, with_neighbors) = self._match_option('NEIGHBORS', geoh)
key = compute_geohash_key(geoh, with_neighbors != '0')
if key:
for id_ in DB.smembers(key):
r = Result(id_)
print('{} {}'.format(white(r), blue(r._id))) # depends on [control=['for'], data=['id_']] # depends on [control=['if'], data=[]] |
def _accept(self, listen_socket):
"""
Accept new incoming connection.
"""
conn, addr = listen_socket.accept()
connection = TelnetConnection(conn, addr, self.application, self, encoding=self.encoding)
self.connections.add(connection)
logger.info('New connection %r %r', *addr) | def function[_accept, parameter[self, listen_socket]]:
constant[
Accept new incoming connection.
]
<ast.Tuple object at 0x7da18f00f670> assign[=] call[name[listen_socket].accept, parameter[]]
variable[connection] assign[=] call[name[TelnetConnection], parameter[name[conn], name[addr], name[self].application, name[self]]]
call[name[self].connections.add, parameter[name[connection]]]
call[name[logger].info, parameter[constant[New connection %r %r], <ast.Starred object at 0x7da18f00f340>]] | keyword[def] identifier[_accept] ( identifier[self] , identifier[listen_socket] ):
literal[string]
identifier[conn] , identifier[addr] = identifier[listen_socket] . identifier[accept] ()
identifier[connection] = identifier[TelnetConnection] ( identifier[conn] , identifier[addr] , identifier[self] . identifier[application] , identifier[self] , identifier[encoding] = identifier[self] . identifier[encoding] )
identifier[self] . identifier[connections] . identifier[add] ( identifier[connection] )
identifier[logger] . identifier[info] ( literal[string] ,* identifier[addr] ) | def _accept(self, listen_socket):
"""
Accept new incoming connection.
"""
(conn, addr) = listen_socket.accept()
connection = TelnetConnection(conn, addr, self.application, self, encoding=self.encoding)
self.connections.add(connection)
logger.info('New connection %r %r', *addr) |
def __get_known_node_by_host(self, hostname):
'''
Determine if the node is already known by hostname.
If it is, return it.
'''
for n in self.nodes:
if (n.name == hostname):
return n
return None | def function[__get_known_node_by_host, parameter[self, hostname]]:
constant[
Determine if the node is already known by hostname.
If it is, return it.
]
for taget[name[n]] in starred[name[self].nodes] begin[:]
if compare[name[n].name equal[==] name[hostname]] begin[:]
return[name[n]]
return[constant[None]] | keyword[def] identifier[__get_known_node_by_host] ( identifier[self] , identifier[hostname] ):
literal[string]
keyword[for] identifier[n] keyword[in] identifier[self] . identifier[nodes] :
keyword[if] ( identifier[n] . identifier[name] == identifier[hostname] ):
keyword[return] identifier[n]
keyword[return] keyword[None] | def __get_known_node_by_host(self, hostname):
"""
Determine if the node is already known by hostname.
If it is, return it.
"""
for n in self.nodes:
if n.name == hostname:
return n # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']]
return None |
def button_released(self, key_code, prefix=None):
"""
Called from the controller classes to update the state of this button manager when a button is released.
:internal:
:param key_code:
The code specified when populating Button instance
:param prefix:
Applied to key code if present
"""
if prefix is not None:
state = self.buttons_by_code.get(prefix + str(key_code))
else:
state = self.buttons_by_code.get(key_code)
if state is not None:
state.is_pressed = False
state.last_pressed = None | def function[button_released, parameter[self, key_code, prefix]]:
constant[
Called from the controller classes to update the state of this button manager when a button is released.
:internal:
:param key_code:
The code specified when populating Button instance
:param prefix:
Applied to key code if present
]
if compare[name[prefix] is_not constant[None]] begin[:]
variable[state] assign[=] call[name[self].buttons_by_code.get, parameter[binary_operation[name[prefix] + call[name[str], parameter[name[key_code]]]]]]
if compare[name[state] is_not constant[None]] begin[:]
name[state].is_pressed assign[=] constant[False]
name[state].last_pressed assign[=] constant[None] | keyword[def] identifier[button_released] ( identifier[self] , identifier[key_code] , identifier[prefix] = keyword[None] ):
literal[string]
keyword[if] identifier[prefix] keyword[is] keyword[not] keyword[None] :
identifier[state] = identifier[self] . identifier[buttons_by_code] . identifier[get] ( identifier[prefix] + identifier[str] ( identifier[key_code] ))
keyword[else] :
identifier[state] = identifier[self] . identifier[buttons_by_code] . identifier[get] ( identifier[key_code] )
keyword[if] identifier[state] keyword[is] keyword[not] keyword[None] :
identifier[state] . identifier[is_pressed] = keyword[False]
identifier[state] . identifier[last_pressed] = keyword[None] | def button_released(self, key_code, prefix=None):
"""
Called from the controller classes to update the state of this button manager when a button is released.
:internal:
:param key_code:
The code specified when populating Button instance
:param prefix:
Applied to key code if present
"""
if prefix is not None:
state = self.buttons_by_code.get(prefix + str(key_code)) # depends on [control=['if'], data=['prefix']]
else:
state = self.buttons_by_code.get(key_code)
if state is not None:
state.is_pressed = False
state.last_pressed = None # depends on [control=['if'], data=['state']] |
def random_word(tokens, tokenizer):
"""
Masking some random tokens for Language Model task with probabilities as in the original BERT paper.
:param tokens: list of str, tokenized sentence.
:param tokenizer: Tokenizer, object used for tokenization (we need it's vocab here)
:return: (list of str, list of int), masked tokens and related labels for LM prediction
"""
output_label = []
for i, token in enumerate(tokens):
prob = random.random()
# mask token with 15% probability
if prob < 0.15:
prob /= 0.15
# 80% randomly change token to mask token
if prob < 0.8:
tokens[i] = "[MASK]"
# 10% randomly change token to random token
elif prob < 0.9:
tokens[i] = random.choice(list(tokenizer.vocab.items()))[0]
# -> rest 10% randomly keep current token
# append current token to output (we will predict these later)
try:
output_label.append(tokenizer.vocab[token])
except KeyError:
# For unknown words (should not occur with BPE vocab)
output_label.append(tokenizer.vocab["[UNK]"])
logger.warning("Cannot find token '{}' in vocab. Using [UNK] insetad".format(token))
else:
# no masking token (will be ignored by loss function later)
output_label.append(-1)
return tokens, output_label | def function[random_word, parameter[tokens, tokenizer]]:
constant[
Masking some random tokens for Language Model task with probabilities as in the original BERT paper.
:param tokens: list of str, tokenized sentence.
:param tokenizer: Tokenizer, object used for tokenization (we need it's vocab here)
:return: (list of str, list of int), masked tokens and related labels for LM prediction
]
variable[output_label] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da1b20699f0>, <ast.Name object at 0x7da1b206aa40>]]] in starred[call[name[enumerate], parameter[name[tokens]]]] begin[:]
variable[prob] assign[=] call[name[random].random, parameter[]]
if compare[name[prob] less[<] constant[0.15]] begin[:]
<ast.AugAssign object at 0x7da20c7c9390>
if compare[name[prob] less[<] constant[0.8]] begin[:]
call[name[tokens]][name[i]] assign[=] constant[[MASK]]
<ast.Try object at 0x7da20c7c9210>
return[tuple[[<ast.Name object at 0x7da20c7c86d0>, <ast.Name object at 0x7da20c7c85b0>]]] | keyword[def] identifier[random_word] ( identifier[tokens] , identifier[tokenizer] ):
literal[string]
identifier[output_label] =[]
keyword[for] identifier[i] , identifier[token] keyword[in] identifier[enumerate] ( identifier[tokens] ):
identifier[prob] = identifier[random] . identifier[random] ()
keyword[if] identifier[prob] < literal[int] :
identifier[prob] /= literal[int]
keyword[if] identifier[prob] < literal[int] :
identifier[tokens] [ identifier[i] ]= literal[string]
keyword[elif] identifier[prob] < literal[int] :
identifier[tokens] [ identifier[i] ]= identifier[random] . identifier[choice] ( identifier[list] ( identifier[tokenizer] . identifier[vocab] . identifier[items] ()))[ literal[int] ]
keyword[try] :
identifier[output_label] . identifier[append] ( identifier[tokenizer] . identifier[vocab] [ identifier[token] ])
keyword[except] identifier[KeyError] :
identifier[output_label] . identifier[append] ( identifier[tokenizer] . identifier[vocab] [ literal[string] ])
identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[token] ))
keyword[else] :
identifier[output_label] . identifier[append] (- literal[int] )
keyword[return] identifier[tokens] , identifier[output_label] | def random_word(tokens, tokenizer):
"""
Masking some random tokens for Language Model task with probabilities as in the original BERT paper.
:param tokens: list of str, tokenized sentence.
:param tokenizer: Tokenizer, object used for tokenization (we need it's vocab here)
:return: (list of str, list of int), masked tokens and related labels for LM prediction
"""
output_label = []
for (i, token) in enumerate(tokens):
prob = random.random()
# mask token with 15% probability
if prob < 0.15:
prob /= 0.15
# 80% randomly change token to mask token
if prob < 0.8:
tokens[i] = '[MASK]' # depends on [control=['if'], data=[]]
# 10% randomly change token to random token
elif prob < 0.9:
tokens[i] = random.choice(list(tokenizer.vocab.items()))[0] # depends on [control=['if'], data=[]]
# -> rest 10% randomly keep current token
# append current token to output (we will predict these later)
try:
output_label.append(tokenizer.vocab[token]) # depends on [control=['try'], data=[]]
except KeyError:
# For unknown words (should not occur with BPE vocab)
output_label.append(tokenizer.vocab['[UNK]'])
logger.warning("Cannot find token '{}' in vocab. Using [UNK] insetad".format(token)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['prob']]
else:
# no masking token (will be ignored by loss function later)
output_label.append(-1) # depends on [control=['for'], data=[]]
return (tokens, output_label) |
def parse_unit(self, node):
"""
Parses <Unit>
@param node: Node containing the <Unit> element
@type node: xml.etree.Element
@raise ParseError: When the name is not a string or the unit
specfications are incorrect.
@raise ModelError: When the unit refers to an undefined dimension.
"""
try:
symbol = node.lattrib['symbol']
dimension = node.lattrib['dimension']
except:
self.raise_error('Unit must have a symbol and dimension.')
if 'power' in node.lattrib:
power = int(node.lattrib['power'])
else:
power = 0
if 'name' in node.lattrib:
name = node.lattrib['name']
else:
name = ''
if 'scale' in node.lattrib:
scale = float(node.lattrib['scale'])
else:
scale = 1.0
if 'offset' in node.lattrib:
offset = float(node.lattrib['offset'])
else:
offset = 0.0
self.model.add_unit(Unit(name, symbol, dimension, power, scale, offset)) | def function[parse_unit, parameter[self, node]]:
constant[
Parses <Unit>
@param node: Node containing the <Unit> element
@type node: xml.etree.Element
@raise ParseError: When the name is not a string or the unit
specfications are incorrect.
@raise ModelError: When the unit refers to an undefined dimension.
]
<ast.Try object at 0x7da18f813730>
if compare[constant[power] in name[node].lattrib] begin[:]
variable[power] assign[=] call[name[int], parameter[call[name[node].lattrib][constant[power]]]]
if compare[constant[name] in name[node].lattrib] begin[:]
variable[name] assign[=] call[name[node].lattrib][constant[name]]
if compare[constant[scale] in name[node].lattrib] begin[:]
variable[scale] assign[=] call[name[float], parameter[call[name[node].lattrib][constant[scale]]]]
if compare[constant[offset] in name[node].lattrib] begin[:]
variable[offset] assign[=] call[name[float], parameter[call[name[node].lattrib][constant[offset]]]]
call[name[self].model.add_unit, parameter[call[name[Unit], parameter[name[name], name[symbol], name[dimension], name[power], name[scale], name[offset]]]]] | keyword[def] identifier[parse_unit] ( identifier[self] , identifier[node] ):
literal[string]
keyword[try] :
identifier[symbol] = identifier[node] . identifier[lattrib] [ literal[string] ]
identifier[dimension] = identifier[node] . identifier[lattrib] [ literal[string] ]
keyword[except] :
identifier[self] . identifier[raise_error] ( literal[string] )
keyword[if] literal[string] keyword[in] identifier[node] . identifier[lattrib] :
identifier[power] = identifier[int] ( identifier[node] . identifier[lattrib] [ literal[string] ])
keyword[else] :
identifier[power] = literal[int]
keyword[if] literal[string] keyword[in] identifier[node] . identifier[lattrib] :
identifier[name] = identifier[node] . identifier[lattrib] [ literal[string] ]
keyword[else] :
identifier[name] = literal[string]
keyword[if] literal[string] keyword[in] identifier[node] . identifier[lattrib] :
identifier[scale] = identifier[float] ( identifier[node] . identifier[lattrib] [ literal[string] ])
keyword[else] :
identifier[scale] = literal[int]
keyword[if] literal[string] keyword[in] identifier[node] . identifier[lattrib] :
identifier[offset] = identifier[float] ( identifier[node] . identifier[lattrib] [ literal[string] ])
keyword[else] :
identifier[offset] = literal[int]
identifier[self] . identifier[model] . identifier[add_unit] ( identifier[Unit] ( identifier[name] , identifier[symbol] , identifier[dimension] , identifier[power] , identifier[scale] , identifier[offset] )) | def parse_unit(self, node):
"""
Parses <Unit>
@param node: Node containing the <Unit> element
@type node: xml.etree.Element
@raise ParseError: When the name is not a string or the unit
specfications are incorrect.
@raise ModelError: When the unit refers to an undefined dimension.
"""
try:
symbol = node.lattrib['symbol']
dimension = node.lattrib['dimension'] # depends on [control=['try'], data=[]]
except:
self.raise_error('Unit must have a symbol and dimension.') # depends on [control=['except'], data=[]]
if 'power' in node.lattrib:
power = int(node.lattrib['power']) # depends on [control=['if'], data=[]]
else:
power = 0
if 'name' in node.lattrib:
name = node.lattrib['name'] # depends on [control=['if'], data=[]]
else:
name = ''
if 'scale' in node.lattrib:
scale = float(node.lattrib['scale']) # depends on [control=['if'], data=[]]
else:
scale = 1.0
if 'offset' in node.lattrib:
offset = float(node.lattrib['offset']) # depends on [control=['if'], data=[]]
else:
offset = 0.0
self.model.add_unit(Unit(name, symbol, dimension, power, scale, offset)) |
def select_regexes(strings, regexes):
"""
select subset of strings matching a regex
treats strings as a set
"""
strings = set(strings)
select = set()
if isinstance(strings, collections.Iterable):
for r in regexes:
s = set(filter(re.compile('^' + r + '$').search, strings))
strings -= s
select |= s
return select
else:
raise ValueError("exclude should be iterable") | def function[select_regexes, parameter[strings, regexes]]:
constant[
select subset of strings matching a regex
treats strings as a set
]
variable[strings] assign[=] call[name[set], parameter[name[strings]]]
variable[select] assign[=] call[name[set], parameter[]]
if call[name[isinstance], parameter[name[strings], name[collections].Iterable]] begin[:]
for taget[name[r]] in starred[name[regexes]] begin[:]
variable[s] assign[=] call[name[set], parameter[call[name[filter], parameter[call[name[re].compile, parameter[binary_operation[binary_operation[constant[^] + name[r]] + constant[$]]]].search, name[strings]]]]]
<ast.AugAssign object at 0x7da1b2353010>
<ast.AugAssign object at 0x7da1b2353340>
return[name[select]] | keyword[def] identifier[select_regexes] ( identifier[strings] , identifier[regexes] ):
literal[string]
identifier[strings] = identifier[set] ( identifier[strings] )
identifier[select] = identifier[set] ()
keyword[if] identifier[isinstance] ( identifier[strings] , identifier[collections] . identifier[Iterable] ):
keyword[for] identifier[r] keyword[in] identifier[regexes] :
identifier[s] = identifier[set] ( identifier[filter] ( identifier[re] . identifier[compile] ( literal[string] + identifier[r] + literal[string] ). identifier[search] , identifier[strings] ))
identifier[strings] -= identifier[s]
identifier[select] |= identifier[s]
keyword[return] identifier[select]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] ) | def select_regexes(strings, regexes):
"""
select subset of strings matching a regex
treats strings as a set
"""
strings = set(strings)
select = set()
if isinstance(strings, collections.Iterable):
for r in regexes:
s = set(filter(re.compile('^' + r + '$').search, strings))
strings -= s
select |= s # depends on [control=['for'], data=['r']]
return select # depends on [control=['if'], data=[]]
else:
raise ValueError('exclude should be iterable') |
def wrap_resource(self, pool, resource_wrapper):
"""
Return a resource wrapped in ``resource_wrapper``.
:param pool: A pool instance.
:type pool: :class:`CuttlePool`
:param resource_wrapper: A wrapper class for the resource.
:type resource_wrapper: :class:`Resource`
:return: A wrapped resource.
:rtype: :class:`Resource`
"""
resource = resource_wrapper(self.resource, pool)
self._weakref = weakref.ref(resource)
return resource | def function[wrap_resource, parameter[self, pool, resource_wrapper]]:
constant[
Return a resource wrapped in ``resource_wrapper``.
:param pool: A pool instance.
:type pool: :class:`CuttlePool`
:param resource_wrapper: A wrapper class for the resource.
:type resource_wrapper: :class:`Resource`
:return: A wrapped resource.
:rtype: :class:`Resource`
]
variable[resource] assign[=] call[name[resource_wrapper], parameter[name[self].resource, name[pool]]]
name[self]._weakref assign[=] call[name[weakref].ref, parameter[name[resource]]]
return[name[resource]] | keyword[def] identifier[wrap_resource] ( identifier[self] , identifier[pool] , identifier[resource_wrapper] ):
literal[string]
identifier[resource] = identifier[resource_wrapper] ( identifier[self] . identifier[resource] , identifier[pool] )
identifier[self] . identifier[_weakref] = identifier[weakref] . identifier[ref] ( identifier[resource] )
keyword[return] identifier[resource] | def wrap_resource(self, pool, resource_wrapper):
"""
Return a resource wrapped in ``resource_wrapper``.
:param pool: A pool instance.
:type pool: :class:`CuttlePool`
:param resource_wrapper: A wrapper class for the resource.
:type resource_wrapper: :class:`Resource`
:return: A wrapped resource.
:rtype: :class:`Resource`
"""
resource = resource_wrapper(self.resource, pool)
self._weakref = weakref.ref(resource)
return resource |
def subscribe_param():
"""Print value of parameter"""
def print_data(data):
for parameter in data.parameters:
print(parameter)
processor.create_parameter_subscription('/YSS/SIMULATOR/BatteryVoltage2',
on_data=print_data) | def function[subscribe_param, parameter[]]:
constant[Print value of parameter]
def function[print_data, parameter[data]]:
for taget[name[parameter]] in starred[name[data].parameters] begin[:]
call[name[print], parameter[name[parameter]]]
call[name[processor].create_parameter_subscription, parameter[constant[/YSS/SIMULATOR/BatteryVoltage2]]] | keyword[def] identifier[subscribe_param] ():
literal[string]
keyword[def] identifier[print_data] ( identifier[data] ):
keyword[for] identifier[parameter] keyword[in] identifier[data] . identifier[parameters] :
identifier[print] ( identifier[parameter] )
identifier[processor] . identifier[create_parameter_subscription] ( literal[string] ,
identifier[on_data] = identifier[print_data] ) | def subscribe_param():
"""Print value of parameter"""
def print_data(data):
for parameter in data.parameters:
print(parameter) # depends on [control=['for'], data=['parameter']]
processor.create_parameter_subscription('/YSS/SIMULATOR/BatteryVoltage2', on_data=print_data) |
def _hijack_gtk(self):
"""Hijack a few key functions in GTK for IPython integration.
Modifies pyGTK's main and main_quit with a dummy so user code does not
block IPython. This allows us to use %run to run arbitrary pygtk
scripts from a long-lived IPython session, and when they attempt to
start or stop
Returns
-------
The original functions that have been hijacked:
- gtk.main
- gtk.main_quit
"""
def dummy(*args, **kw):
pass
# save and trap main and main_quit from gtk
orig_main, gtk.main = gtk.main, dummy
orig_main_quit, gtk.main_quit = gtk.main_quit, dummy
return orig_main, orig_main_quit | def function[_hijack_gtk, parameter[self]]:
constant[Hijack a few key functions in GTK for IPython integration.
Modifies pyGTK's main and main_quit with a dummy so user code does not
block IPython. This allows us to use %run to run arbitrary pygtk
scripts from a long-lived IPython session, and when they attempt to
start or stop
Returns
-------
The original functions that have been hijacked:
- gtk.main
- gtk.main_quit
]
def function[dummy, parameter[]]:
pass
<ast.Tuple object at 0x7da18f812e00> assign[=] tuple[[<ast.Attribute object at 0x7da1b26ad8d0>, <ast.Name object at 0x7da1b26ad1b0>]]
<ast.Tuple object at 0x7da1b26af4f0> assign[=] tuple[[<ast.Attribute object at 0x7da1b26acc10>, <ast.Name object at 0x7da1b26ac8e0>]]
return[tuple[[<ast.Name object at 0x7da1b26ad0f0>, <ast.Name object at 0x7da1b26ad450>]]] | keyword[def] identifier[_hijack_gtk] ( identifier[self] ):
literal[string]
keyword[def] identifier[dummy] (* identifier[args] ,** identifier[kw] ):
keyword[pass]
identifier[orig_main] , identifier[gtk] . identifier[main] = identifier[gtk] . identifier[main] , identifier[dummy]
identifier[orig_main_quit] , identifier[gtk] . identifier[main_quit] = identifier[gtk] . identifier[main_quit] , identifier[dummy]
keyword[return] identifier[orig_main] , identifier[orig_main_quit] | def _hijack_gtk(self):
"""Hijack a few key functions in GTK for IPython integration.
Modifies pyGTK's main and main_quit with a dummy so user code does not
block IPython. This allows us to use %run to run arbitrary pygtk
scripts from a long-lived IPython session, and when they attempt to
start or stop
Returns
-------
The original functions that have been hijacked:
- gtk.main
- gtk.main_quit
"""
def dummy(*args, **kw):
pass
# save and trap main and main_quit from gtk
(orig_main, gtk.main) = (gtk.main, dummy)
(orig_main_quit, gtk.main_quit) = (gtk.main_quit, dummy)
return (orig_main, orig_main_quit) |
def generate_jupyter_js(self, cyjs_style=None, cyjs_layout=None):
"""Generate Javascript from a template to run in Jupyter notebooks.
Parameters
----------
cyjs_style : Optional[dict]
A dict that sets CytoscapeJS style as specified in
https://github.com/cytoscape/cytoscape.js/blob/master/documentation/md/style.md.
cyjs_layout : Optional[dict]
A dict that sets CytoscapeJS
`layout parameters <http://js.cytoscape.org/#core/layout>`_.
Returns
-------
str
A Javascript string to be rendered in a Jupyter notebook cell.
"""
# First, export the CAG to CyJS
cyjs_elements = self.export_to_cytoscapejs()
# Load the Javascript template
tempf = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'cag_template.js')
with open(tempf, 'r') as fh:
template = fh.read()
# Load the default style and layout
stylef = os.path.join(os.path.dirname(os.path.abspath(__file__)),
'cag_style.json')
with open(stylef, 'r') as fh:
style = json.load(fh)
# Apply style and layout only if arg wasn't passed in
if cyjs_style is None:
cyjs_style = style['style']
if cyjs_layout is None:
cyjs_layout = style['layout']
# Now fill in the template
formatted_args = tuple(json.dumps(x, indent=2) for x in
(cyjs_elements, cyjs_style, cyjs_layout))
js_str = template % formatted_args
return js_str | def function[generate_jupyter_js, parameter[self, cyjs_style, cyjs_layout]]:
constant[Generate Javascript from a template to run in Jupyter notebooks.
Parameters
----------
cyjs_style : Optional[dict]
A dict that sets CytoscapeJS style as specified in
https://github.com/cytoscape/cytoscape.js/blob/master/documentation/md/style.md.
cyjs_layout : Optional[dict]
A dict that sets CytoscapeJS
`layout parameters <http://js.cytoscape.org/#core/layout>`_.
Returns
-------
str
A Javascript string to be rendered in a Jupyter notebook cell.
]
variable[cyjs_elements] assign[=] call[name[self].export_to_cytoscapejs, parameter[]]
variable[tempf] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[call[name[os].path.abspath, parameter[name[__file__]]]]], constant[cag_template.js]]]
with call[name[open], parameter[name[tempf], constant[r]]] begin[:]
variable[template] assign[=] call[name[fh].read, parameter[]]
variable[stylef] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[call[name[os].path.abspath, parameter[name[__file__]]]]], constant[cag_style.json]]]
with call[name[open], parameter[name[stylef], constant[r]]] begin[:]
variable[style] assign[=] call[name[json].load, parameter[name[fh]]]
if compare[name[cyjs_style] is constant[None]] begin[:]
variable[cyjs_style] assign[=] call[name[style]][constant[style]]
if compare[name[cyjs_layout] is constant[None]] begin[:]
variable[cyjs_layout] assign[=] call[name[style]][constant[layout]]
variable[formatted_args] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da207f01420>]]
variable[js_str] assign[=] binary_operation[name[template] <ast.Mod object at 0x7da2590d6920> name[formatted_args]]
return[name[js_str]] | keyword[def] identifier[generate_jupyter_js] ( identifier[self] , identifier[cyjs_style] = keyword[None] , identifier[cyjs_layout] = keyword[None] ):
literal[string]
identifier[cyjs_elements] = identifier[self] . identifier[export_to_cytoscapejs] ()
identifier[tempf] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[__file__] )),
literal[string] )
keyword[with] identifier[open] ( identifier[tempf] , literal[string] ) keyword[as] identifier[fh] :
identifier[template] = identifier[fh] . identifier[read] ()
identifier[stylef] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[__file__] )),
literal[string] )
keyword[with] identifier[open] ( identifier[stylef] , literal[string] ) keyword[as] identifier[fh] :
identifier[style] = identifier[json] . identifier[load] ( identifier[fh] )
keyword[if] identifier[cyjs_style] keyword[is] keyword[None] :
identifier[cyjs_style] = identifier[style] [ literal[string] ]
keyword[if] identifier[cyjs_layout] keyword[is] keyword[None] :
identifier[cyjs_layout] = identifier[style] [ literal[string] ]
identifier[formatted_args] = identifier[tuple] ( identifier[json] . identifier[dumps] ( identifier[x] , identifier[indent] = literal[int] ) keyword[for] identifier[x] keyword[in]
( identifier[cyjs_elements] , identifier[cyjs_style] , identifier[cyjs_layout] ))
identifier[js_str] = identifier[template] % identifier[formatted_args]
keyword[return] identifier[js_str] | def generate_jupyter_js(self, cyjs_style=None, cyjs_layout=None):
"""Generate Javascript from a template to run in Jupyter notebooks.
Parameters
----------
cyjs_style : Optional[dict]
A dict that sets CytoscapeJS style as specified in
https://github.com/cytoscape/cytoscape.js/blob/master/documentation/md/style.md.
cyjs_layout : Optional[dict]
A dict that sets CytoscapeJS
`layout parameters <http://js.cytoscape.org/#core/layout>`_.
Returns
-------
str
A Javascript string to be rendered in a Jupyter notebook cell.
"""
# First, export the CAG to CyJS
cyjs_elements = self.export_to_cytoscapejs()
# Load the Javascript template
tempf = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'cag_template.js')
with open(tempf, 'r') as fh:
template = fh.read() # depends on [control=['with'], data=['fh']]
# Load the default style and layout
stylef = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'cag_style.json')
with open(stylef, 'r') as fh:
style = json.load(fh) # depends on [control=['with'], data=['fh']]
# Apply style and layout only if arg wasn't passed in
if cyjs_style is None:
cyjs_style = style['style'] # depends on [control=['if'], data=['cyjs_style']]
if cyjs_layout is None:
cyjs_layout = style['layout'] # depends on [control=['if'], data=['cyjs_layout']]
# Now fill in the template
formatted_args = tuple((json.dumps(x, indent=2) for x in (cyjs_elements, cyjs_style, cyjs_layout)))
js_str = template % formatted_args
return js_str |
def server_main(loop, path):
"""Run in the client after the fork."""
loop.fork()
logger.debug('forked function')
sigintwatcher = pyev.Signal(signal.SIGINT, loop, lambda watcher, events: logger.info('interrupt ignored'))
sigintwatcher.start()
sigtermwatcher = pyev.Signal(signal.SIGTERM, loop, server_stop)
sigtermwatcher.start()
adder = AdderService()
dispatcher = ObjectDispatch(adder)
pickle_factory = PickleProtocolFactory(dispatcher)
pickle_server = UnixServer(loop, pickle_factory, path)
pickle_server.start()
msgpack_factory = MsgPackProtocolFactory(dispatcher)
msgpack_server = UnixServer(loop, msgpack_factory, path + '_mp')
msgpack_server.start()
logger.debug('running server loop')
import cProfile
cProfile.runctx('loop.loop()', None, {'loop':loop}, 'server_profile')
logger.debug('server unlooped') | def function[server_main, parameter[loop, path]]:
constant[Run in the client after the fork.]
call[name[loop].fork, parameter[]]
call[name[logger].debug, parameter[constant[forked function]]]
variable[sigintwatcher] assign[=] call[name[pyev].Signal, parameter[name[signal].SIGINT, name[loop], <ast.Lambda object at 0x7da1b09ee530>]]
call[name[sigintwatcher].start, parameter[]]
variable[sigtermwatcher] assign[=] call[name[pyev].Signal, parameter[name[signal].SIGTERM, name[loop], name[server_stop]]]
call[name[sigtermwatcher].start, parameter[]]
variable[adder] assign[=] call[name[AdderService], parameter[]]
variable[dispatcher] assign[=] call[name[ObjectDispatch], parameter[name[adder]]]
variable[pickle_factory] assign[=] call[name[PickleProtocolFactory], parameter[name[dispatcher]]]
variable[pickle_server] assign[=] call[name[UnixServer], parameter[name[loop], name[pickle_factory], name[path]]]
call[name[pickle_server].start, parameter[]]
variable[msgpack_factory] assign[=] call[name[MsgPackProtocolFactory], parameter[name[dispatcher]]]
variable[msgpack_server] assign[=] call[name[UnixServer], parameter[name[loop], name[msgpack_factory], binary_operation[name[path] + constant[_mp]]]]
call[name[msgpack_server].start, parameter[]]
call[name[logger].debug, parameter[constant[running server loop]]]
import module[cProfile]
call[name[cProfile].runctx, parameter[constant[loop.loop()], constant[None], dictionary[[<ast.Constant object at 0x7da1b09b91e0>], [<ast.Name object at 0x7da1b09bbaf0>]], constant[server_profile]]]
call[name[logger].debug, parameter[constant[server unlooped]]] | keyword[def] identifier[server_main] ( identifier[loop] , identifier[path] ):
literal[string]
identifier[loop] . identifier[fork] ()
identifier[logger] . identifier[debug] ( literal[string] )
identifier[sigintwatcher] = identifier[pyev] . identifier[Signal] ( identifier[signal] . identifier[SIGINT] , identifier[loop] , keyword[lambda] identifier[watcher] , identifier[events] : identifier[logger] . identifier[info] ( literal[string] ))
identifier[sigintwatcher] . identifier[start] ()
identifier[sigtermwatcher] = identifier[pyev] . identifier[Signal] ( identifier[signal] . identifier[SIGTERM] , identifier[loop] , identifier[server_stop] )
identifier[sigtermwatcher] . identifier[start] ()
identifier[adder] = identifier[AdderService] ()
identifier[dispatcher] = identifier[ObjectDispatch] ( identifier[adder] )
identifier[pickle_factory] = identifier[PickleProtocolFactory] ( identifier[dispatcher] )
identifier[pickle_server] = identifier[UnixServer] ( identifier[loop] , identifier[pickle_factory] , identifier[path] )
identifier[pickle_server] . identifier[start] ()
identifier[msgpack_factory] = identifier[MsgPackProtocolFactory] ( identifier[dispatcher] )
identifier[msgpack_server] = identifier[UnixServer] ( identifier[loop] , identifier[msgpack_factory] , identifier[path] + literal[string] )
identifier[msgpack_server] . identifier[start] ()
identifier[logger] . identifier[debug] ( literal[string] )
keyword[import] identifier[cProfile]
identifier[cProfile] . identifier[runctx] ( literal[string] , keyword[None] ,{ literal[string] : identifier[loop] }, literal[string] )
identifier[logger] . identifier[debug] ( literal[string] ) | def server_main(loop, path):
"""Run in the client after the fork."""
loop.fork()
logger.debug('forked function')
sigintwatcher = pyev.Signal(signal.SIGINT, loop, lambda watcher, events: logger.info('interrupt ignored'))
sigintwatcher.start()
sigtermwatcher = pyev.Signal(signal.SIGTERM, loop, server_stop)
sigtermwatcher.start()
adder = AdderService()
dispatcher = ObjectDispatch(adder)
pickle_factory = PickleProtocolFactory(dispatcher)
pickle_server = UnixServer(loop, pickle_factory, path)
pickle_server.start()
msgpack_factory = MsgPackProtocolFactory(dispatcher)
msgpack_server = UnixServer(loop, msgpack_factory, path + '_mp')
msgpack_server.start()
logger.debug('running server loop')
import cProfile
cProfile.runctx('loop.loop()', None, {'loop': loop}, 'server_profile')
logger.debug('server unlooped') |
def fetch(self):
"""
Fetch & return a new `SSHKey` object representing the SSH key's current
state
:rtype: SSHKey
:raises DOAPIError: if the API endpoint replies with an error (e.g., if
the SSH key no longer exists)
"""
api = self.doapi_manager
return api._ssh_key(api.request(self.url)["ssh_key"]) | def function[fetch, parameter[self]]:
constant[
Fetch & return a new `SSHKey` object representing the SSH key's current
state
:rtype: SSHKey
:raises DOAPIError: if the API endpoint replies with an error (e.g., if
the SSH key no longer exists)
]
variable[api] assign[=] name[self].doapi_manager
return[call[name[api]._ssh_key, parameter[call[call[name[api].request, parameter[name[self].url]]][constant[ssh_key]]]]] | keyword[def] identifier[fetch] ( identifier[self] ):
literal[string]
identifier[api] = identifier[self] . identifier[doapi_manager]
keyword[return] identifier[api] . identifier[_ssh_key] ( identifier[api] . identifier[request] ( identifier[self] . identifier[url] )[ literal[string] ]) | def fetch(self):
"""
Fetch & return a new `SSHKey` object representing the SSH key's current
state
:rtype: SSHKey
:raises DOAPIError: if the API endpoint replies with an error (e.g., if
the SSH key no longer exists)
"""
api = self.doapi_manager
return api._ssh_key(api.request(self.url)['ssh_key']) |
def apply_update(self, value, index):
"""
Record an opendnp3 data value (Analog, Binary, etc.) in the outstation's database.
The data value gets sent to the Master as a side-effect.
:param value: An instance of Analog, Binary, or another opendnp3 data value.
:param index: (integer) Index of the data definition in the opendnp3 database.
"""
_log.debug('Recording {} measurement, index={}, value={}'.format(type(value).__name__, index, value.value))
builder = asiodnp3.UpdateBuilder()
builder.Update(value, index)
update = builder.Build()
OutstationApplication.get_outstation().Apply(update) | def function[apply_update, parameter[self, value, index]]:
constant[
Record an opendnp3 data value (Analog, Binary, etc.) in the outstation's database.
The data value gets sent to the Master as a side-effect.
:param value: An instance of Analog, Binary, or another opendnp3 data value.
:param index: (integer) Index of the data definition in the opendnp3 database.
]
call[name[_log].debug, parameter[call[constant[Recording {} measurement, index={}, value={}].format, parameter[call[name[type], parameter[name[value]]].__name__, name[index], name[value].value]]]]
variable[builder] assign[=] call[name[asiodnp3].UpdateBuilder, parameter[]]
call[name[builder].Update, parameter[name[value], name[index]]]
variable[update] assign[=] call[name[builder].Build, parameter[]]
call[call[name[OutstationApplication].get_outstation, parameter[]].Apply, parameter[name[update]]] | keyword[def] identifier[apply_update] ( identifier[self] , identifier[value] , identifier[index] ):
literal[string]
identifier[_log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[type] ( identifier[value] ). identifier[__name__] , identifier[index] , identifier[value] . identifier[value] ))
identifier[builder] = identifier[asiodnp3] . identifier[UpdateBuilder] ()
identifier[builder] . identifier[Update] ( identifier[value] , identifier[index] )
identifier[update] = identifier[builder] . identifier[Build] ()
identifier[OutstationApplication] . identifier[get_outstation] (). identifier[Apply] ( identifier[update] ) | def apply_update(self, value, index):
"""
Record an opendnp3 data value (Analog, Binary, etc.) in the outstation's database.
The data value gets sent to the Master as a side-effect.
:param value: An instance of Analog, Binary, or another opendnp3 data value.
:param index: (integer) Index of the data definition in the opendnp3 database.
"""
_log.debug('Recording {} measurement, index={}, value={}'.format(type(value).__name__, index, value.value))
builder = asiodnp3.UpdateBuilder()
builder.Update(value, index)
update = builder.Build()
OutstationApplication.get_outstation().Apply(update) |
def set_buffer_limits(self, high=None, low=None):
"""Set the low and high watermarks for the read buffer."""
if high is None:
high = self.default_buffer_size
if low is None:
low = high // 2
self._buffer_high = high
self._buffer_low = low | def function[set_buffer_limits, parameter[self, high, low]]:
constant[Set the low and high watermarks for the read buffer.]
if compare[name[high] is constant[None]] begin[:]
variable[high] assign[=] name[self].default_buffer_size
if compare[name[low] is constant[None]] begin[:]
variable[low] assign[=] binary_operation[name[high] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]]
name[self]._buffer_high assign[=] name[high]
name[self]._buffer_low assign[=] name[low] | keyword[def] identifier[set_buffer_limits] ( identifier[self] , identifier[high] = keyword[None] , identifier[low] = keyword[None] ):
literal[string]
keyword[if] identifier[high] keyword[is] keyword[None] :
identifier[high] = identifier[self] . identifier[default_buffer_size]
keyword[if] identifier[low] keyword[is] keyword[None] :
identifier[low] = identifier[high] // literal[int]
identifier[self] . identifier[_buffer_high] = identifier[high]
identifier[self] . identifier[_buffer_low] = identifier[low] | def set_buffer_limits(self, high=None, low=None):
"""Set the low and high watermarks for the read buffer."""
if high is None:
high = self.default_buffer_size # depends on [control=['if'], data=['high']]
if low is None:
low = high // 2 # depends on [control=['if'], data=['low']]
self._buffer_high = high
self._buffer_low = low |
def find_ip_address(ipa_id=None, ipa_fqdn=None, ipa_ip_address=None, ipa_subnet_id=None, ipa_osi_id=None):
"""
find the IP Address (ipa) according ipa id (prioritary) or ipa ipAddress
:param ipa_id: the IP Address id
:param ipa_ip_address: the IP Address
:return: found IP Address or None if not found
"""
LOGGER.debug("IPAddressService.find_ip_address")
if (ipa_id is None or not ipa_id) and (ipa_fqdn is None or not ipa_fqdn) and \
((ipa_ip_address is None or not ipa_ip_address) and
((ipa_subnet_id is None or not ipa_subnet_id) or (ipa_osi_id is None or not ipa_osi_id))):
raise exceptions.ArianeCallParametersError('id and fqdn and (ip_address,(ip_subnet_id|ip_osi_id))')
if (ipa_id is not None and ipa_id) and \
((ipa_fqdn is not None and ipa_fqdn) or (ipa_ip_address is not None and ipa_ip_address)):
LOGGER.warn('IPAddressService.find_ip_address - Both id and (fqdn or ipAddress) are defined. '
'Will give you search on id.')
ipa_fqdn = None
ipa_ip_address = None
ipa_osi_id = None
ipa_subnet_id = None
if (ipa_id is None or not ipa_id) and (ipa_fqdn is not None and ipa_fqdn) and \
(ipa_ip_address is not None and ipa_ip_address):
LOGGER.warn('IPAddressService.find_ip_address - Both fqdn and ipAddress are defined. '
'Will give you search on fqdn.')
ipa_ip_address = None
ipa_osi_id = None
ipa_subnet_id = None
params = None
if ipa_id is not None and ipa_id:
params = {'id': ipa_id}
elif ipa_fqdn is not None and ipa_fqdn:
params = {'fqdn': ipa_fqdn}
elif (ipa_ip_address is not None and ipa_ip_address) and (ipa_subnet_id is not None and ipa_subnet_id):
params = {'ipAddress': ipa_ip_address, 'subnetID': ipa_subnet_id}
elif (ipa_ip_address is not None and ipa_ip_address) and (ipa_osi_id is not None and ipa_osi_id):
params = {'ipAddress': ipa_ip_address, 'osiID': ipa_osi_id}
ret = None
if params is not None:
args = {'http_operation': 'GET', 'operation_path': 'get', 'parameters': params}
response = IPAddressService.requester.call(args)
if response.rc == 0:
ret = IPAddress.json_2_ip_address(response.response_content)
elif response.rc != 404:
err_msg = 'IPAddressService.find_ip_address - Problem while finding IP Address (id:' + \
str(ipa_id) + ', ipAddress:' + str(ipa_ip_address) \
+ '). Reason: ' + str(response.response_content) + '-' + str(response.error_message) + \
" (" + str(response.rc) + ")"
LOGGER.warning(
err_msg
)
return ret | def function[find_ip_address, parameter[ipa_id, ipa_fqdn, ipa_ip_address, ipa_subnet_id, ipa_osi_id]]:
constant[
find the IP Address (ipa) according ipa id (prioritary) or ipa ipAddress
:param ipa_id: the IP Address id
:param ipa_ip_address: the IP Address
:return: found IP Address or None if not found
]
call[name[LOGGER].debug, parameter[constant[IPAddressService.find_ip_address]]]
if <ast.BoolOp object at 0x7da1b14c4dc0> begin[:]
<ast.Raise object at 0x7da1b14c5630>
if <ast.BoolOp object at 0x7da1b14c6d70> begin[:]
call[name[LOGGER].warn, parameter[constant[IPAddressService.find_ip_address - Both id and (fqdn or ipAddress) are defined. Will give you search on id.]]]
variable[ipa_fqdn] assign[=] constant[None]
variable[ipa_ip_address] assign[=] constant[None]
variable[ipa_osi_id] assign[=] constant[None]
variable[ipa_subnet_id] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b14c7250> begin[:]
call[name[LOGGER].warn, parameter[constant[IPAddressService.find_ip_address - Both fqdn and ipAddress are defined. Will give you search on fqdn.]]]
variable[ipa_ip_address] assign[=] constant[None]
variable[ipa_osi_id] assign[=] constant[None]
variable[ipa_subnet_id] assign[=] constant[None]
variable[params] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b14c47c0> begin[:]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b14c7ca0>], [<ast.Name object at 0x7da1b14c6530>]]
variable[ret] assign[=] constant[None]
if compare[name[params] is_not constant[None]] begin[:]
variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da1b14c4fd0>, <ast.Constant object at 0x7da1b14c7760>, <ast.Constant object at 0x7da1b14c7c70>], [<ast.Constant object at 0x7da1b14c7bb0>, <ast.Constant object at 0x7da1b14c6c80>, <ast.Name object at 0x7da1b14c5e10>]]
variable[response] assign[=] call[name[IPAddressService].requester.call, parameter[name[args]]]
if compare[name[response].rc equal[==] constant[0]] begin[:]
variable[ret] assign[=] call[name[IPAddress].json_2_ip_address, parameter[name[response].response_content]]
return[name[ret]] | keyword[def] identifier[find_ip_address] ( identifier[ipa_id] = keyword[None] , identifier[ipa_fqdn] = keyword[None] , identifier[ipa_ip_address] = keyword[None] , identifier[ipa_subnet_id] = keyword[None] , identifier[ipa_osi_id] = keyword[None] ):
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[if] ( identifier[ipa_id] keyword[is] keyword[None] keyword[or] keyword[not] identifier[ipa_id] ) keyword[and] ( identifier[ipa_fqdn] keyword[is] keyword[None] keyword[or] keyword[not] identifier[ipa_fqdn] ) keyword[and] (( identifier[ipa_ip_address] keyword[is] keyword[None] keyword[or] keyword[not] identifier[ipa_ip_address] ) keyword[and]
(( identifier[ipa_subnet_id] keyword[is] keyword[None] keyword[or] keyword[not] identifier[ipa_subnet_id] ) keyword[or] ( identifier[ipa_osi_id] keyword[is] keyword[None] keyword[or] keyword[not] identifier[ipa_osi_id] ))):
keyword[raise] identifier[exceptions] . identifier[ArianeCallParametersError] ( literal[string] )
keyword[if] ( identifier[ipa_id] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ipa_id] ) keyword[and] (( identifier[ipa_fqdn] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ipa_fqdn] ) keyword[or] ( identifier[ipa_ip_address] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ipa_ip_address] )):
identifier[LOGGER] . identifier[warn] ( literal[string]
literal[string] )
identifier[ipa_fqdn] = keyword[None]
identifier[ipa_ip_address] = keyword[None]
identifier[ipa_osi_id] = keyword[None]
identifier[ipa_subnet_id] = keyword[None]
keyword[if] ( identifier[ipa_id] keyword[is] keyword[None] keyword[or] keyword[not] identifier[ipa_id] ) keyword[and] ( identifier[ipa_fqdn] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ipa_fqdn] ) keyword[and] ( identifier[ipa_ip_address] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ipa_ip_address] ):
identifier[LOGGER] . identifier[warn] ( literal[string]
literal[string] )
identifier[ipa_ip_address] = keyword[None]
identifier[ipa_osi_id] = keyword[None]
identifier[ipa_subnet_id] = keyword[None]
identifier[params] = keyword[None]
keyword[if] identifier[ipa_id] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ipa_id] :
identifier[params] ={ literal[string] : identifier[ipa_id] }
keyword[elif] identifier[ipa_fqdn] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ipa_fqdn] :
identifier[params] ={ literal[string] : identifier[ipa_fqdn] }
keyword[elif] ( identifier[ipa_ip_address] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ipa_ip_address] ) keyword[and] ( identifier[ipa_subnet_id] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ipa_subnet_id] ):
identifier[params] ={ literal[string] : identifier[ipa_ip_address] , literal[string] : identifier[ipa_subnet_id] }
keyword[elif] ( identifier[ipa_ip_address] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ipa_ip_address] ) keyword[and] ( identifier[ipa_osi_id] keyword[is] keyword[not] keyword[None] keyword[and] identifier[ipa_osi_id] ):
identifier[params] ={ literal[string] : identifier[ipa_ip_address] , literal[string] : identifier[ipa_osi_id] }
identifier[ret] = keyword[None]
keyword[if] identifier[params] keyword[is] keyword[not] keyword[None] :
identifier[args] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[params] }
identifier[response] = identifier[IPAddressService] . identifier[requester] . identifier[call] ( identifier[args] )
keyword[if] identifier[response] . identifier[rc] == literal[int] :
identifier[ret] = identifier[IPAddress] . identifier[json_2_ip_address] ( identifier[response] . identifier[response_content] )
keyword[elif] identifier[response] . identifier[rc] != literal[int] :
identifier[err_msg] = literal[string] + identifier[str] ( identifier[ipa_id] )+ literal[string] + identifier[str] ( identifier[ipa_ip_address] )+ literal[string] + identifier[str] ( identifier[response] . identifier[response_content] )+ literal[string] + identifier[str] ( identifier[response] . identifier[error_message] )+ literal[string] + identifier[str] ( identifier[response] . identifier[rc] )+ literal[string]
identifier[LOGGER] . identifier[warning] (
identifier[err_msg]
)
keyword[return] identifier[ret] | def find_ip_address(ipa_id=None, ipa_fqdn=None, ipa_ip_address=None, ipa_subnet_id=None, ipa_osi_id=None):
"""
find the IP Address (ipa) according ipa id (prioritary) or ipa ipAddress
:param ipa_id: the IP Address id
:param ipa_ip_address: the IP Address
:return: found IP Address or None if not found
"""
LOGGER.debug('IPAddressService.find_ip_address')
if (ipa_id is None or not ipa_id) and (ipa_fqdn is None or not ipa_fqdn) and ((ipa_ip_address is None or not ipa_ip_address) and ((ipa_subnet_id is None or not ipa_subnet_id) or (ipa_osi_id is None or not ipa_osi_id))):
raise exceptions.ArianeCallParametersError('id and fqdn and (ip_address,(ip_subnet_id|ip_osi_id))') # depends on [control=['if'], data=[]]
if (ipa_id is not None and ipa_id) and (ipa_fqdn is not None and ipa_fqdn or (ipa_ip_address is not None and ipa_ip_address)):
LOGGER.warn('IPAddressService.find_ip_address - Both id and (fqdn or ipAddress) are defined. Will give you search on id.')
ipa_fqdn = None
ipa_ip_address = None
ipa_osi_id = None
ipa_subnet_id = None # depends on [control=['if'], data=[]]
if (ipa_id is None or not ipa_id) and (ipa_fqdn is not None and ipa_fqdn) and (ipa_ip_address is not None and ipa_ip_address):
LOGGER.warn('IPAddressService.find_ip_address - Both fqdn and ipAddress are defined. Will give you search on fqdn.')
ipa_ip_address = None
ipa_osi_id = None
ipa_subnet_id = None # depends on [control=['if'], data=[]]
params = None
if ipa_id is not None and ipa_id:
params = {'id': ipa_id} # depends on [control=['if'], data=[]]
elif ipa_fqdn is not None and ipa_fqdn:
params = {'fqdn': ipa_fqdn} # depends on [control=['if'], data=[]]
elif (ipa_ip_address is not None and ipa_ip_address) and (ipa_subnet_id is not None and ipa_subnet_id):
params = {'ipAddress': ipa_ip_address, 'subnetID': ipa_subnet_id} # depends on [control=['if'], data=[]]
elif (ipa_ip_address is not None and ipa_ip_address) and (ipa_osi_id is not None and ipa_osi_id):
params = {'ipAddress': ipa_ip_address, 'osiID': ipa_osi_id} # depends on [control=['if'], data=[]]
ret = None
if params is not None:
args = {'http_operation': 'GET', 'operation_path': 'get', 'parameters': params}
response = IPAddressService.requester.call(args)
if response.rc == 0:
ret = IPAddress.json_2_ip_address(response.response_content) # depends on [control=['if'], data=[]]
elif response.rc != 404:
err_msg = 'IPAddressService.find_ip_address - Problem while finding IP Address (id:' + str(ipa_id) + ', ipAddress:' + str(ipa_ip_address) + '). Reason: ' + str(response.response_content) + '-' + str(response.error_message) + ' (' + str(response.rc) + ')'
LOGGER.warning(err_msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['params']]
return ret |
def decode(data, encoding=None, errors='strict', keep=False,
normalize=False, preserve_dict_class=False, preserve_tuples=False,
to_str=False):
'''
Generic function which will decode whichever type is passed, if necessary.
Optionally use to_str=True to ensure strings are str types and not unicode
on Python 2.
If `strict` is True, and `keep` is False, and we fail to decode, a
UnicodeDecodeError will be raised. Passing `keep` as True allows for the
original value to silently be returned in cases where decoding fails. This
can be useful for cases where the data passed to this function is likely to
contain binary blobs, such as in the case of cp.recv.
If `normalize` is True, then unicodedata.normalize() will be used to
normalize unicode strings down to a single code point per glyph. It is
recommended not to normalize unless you know what you're doing. For
instance, if `data` contains a dictionary, it is possible that normalizing
will lead to data loss because the following two strings will normalize to
the same value:
- u'\\u044f\\u0438\\u0306\\u0446\\u0430.txt'
- u'\\u044f\\u0439\\u0446\\u0430.txt'
One good use case for normalization is in the test suite. For example, on
some platforms such as Mac OS, os.listdir() will produce the first of the
two strings above, in which "й" is represented as two code points (i.e. one
for the base character, and one for the breve mark). Normalizing allows for
a more reliable test case.
'''
_decode_func = salt.utils.stringutils.to_unicode \
if not to_str \
else salt.utils.stringutils.to_str
if isinstance(data, Mapping):
return decode_dict(data, encoding, errors, keep, normalize,
preserve_dict_class, preserve_tuples, to_str)
elif isinstance(data, list):
return decode_list(data, encoding, errors, keep, normalize,
preserve_dict_class, preserve_tuples, to_str)
elif isinstance(data, tuple):
return decode_tuple(data, encoding, errors, keep, normalize,
preserve_dict_class, to_str) \
if preserve_tuples \
else decode_list(data, encoding, errors, keep, normalize,
preserve_dict_class, preserve_tuples, to_str)
else:
try:
data = _decode_func(data, encoding, errors, normalize)
except TypeError:
# to_unicode raises a TypeError when input is not a
# string/bytestring/bytearray. This is expected and simply means we
# are going to leave the value as-is.
pass
except UnicodeDecodeError:
if not keep:
raise
return data | def function[decode, parameter[data, encoding, errors, keep, normalize, preserve_dict_class, preserve_tuples, to_str]]:
constant[
Generic function which will decode whichever type is passed, if necessary.
Optionally use to_str=True to ensure strings are str types and not unicode
on Python 2.
If `strict` is True, and `keep` is False, and we fail to decode, a
UnicodeDecodeError will be raised. Passing `keep` as True allows for the
original value to silently be returned in cases where decoding fails. This
can be useful for cases where the data passed to this function is likely to
contain binary blobs, such as in the case of cp.recv.
If `normalize` is True, then unicodedata.normalize() will be used to
normalize unicode strings down to a single code point per glyph. It is
recommended not to normalize unless you know what you're doing. For
instance, if `data` contains a dictionary, it is possible that normalizing
will lead to data loss because the following two strings will normalize to
the same value:
- u'\u044f\u0438\u0306\u0446\u0430.txt'
- u'\u044f\u0439\u0446\u0430.txt'
One good use case for normalization is in the test suite. For example, on
some platforms such as Mac OS, os.listdir() will produce the first of the
two strings above, in which "й" is represented as two code points (i.e. one
for the base character, and one for the breve mark). Normalizing allows for
a more reliable test case.
]
variable[_decode_func] assign[=] <ast.IfExp object at 0x7da1b1f6e830>
if call[name[isinstance], parameter[name[data], name[Mapping]]] begin[:]
return[call[name[decode_dict], parameter[name[data], name[encoding], name[errors], name[keep], name[normalize], name[preserve_dict_class], name[preserve_tuples], name[to_str]]]] | keyword[def] identifier[decode] ( identifier[data] , identifier[encoding] = keyword[None] , identifier[errors] = literal[string] , identifier[keep] = keyword[False] ,
identifier[normalize] = keyword[False] , identifier[preserve_dict_class] = keyword[False] , identifier[preserve_tuples] = keyword[False] ,
identifier[to_str] = keyword[False] ):
literal[string]
identifier[_decode_func] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] keyword[if] keyword[not] identifier[to_str] keyword[else] identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_str]
keyword[if] identifier[isinstance] ( identifier[data] , identifier[Mapping] ):
keyword[return] identifier[decode_dict] ( identifier[data] , identifier[encoding] , identifier[errors] , identifier[keep] , identifier[normalize] ,
identifier[preserve_dict_class] , identifier[preserve_tuples] , identifier[to_str] )
keyword[elif] identifier[isinstance] ( identifier[data] , identifier[list] ):
keyword[return] identifier[decode_list] ( identifier[data] , identifier[encoding] , identifier[errors] , identifier[keep] , identifier[normalize] ,
identifier[preserve_dict_class] , identifier[preserve_tuples] , identifier[to_str] )
keyword[elif] identifier[isinstance] ( identifier[data] , identifier[tuple] ):
keyword[return] identifier[decode_tuple] ( identifier[data] , identifier[encoding] , identifier[errors] , identifier[keep] , identifier[normalize] ,
identifier[preserve_dict_class] , identifier[to_str] ) keyword[if] identifier[preserve_tuples] keyword[else] identifier[decode_list] ( identifier[data] , identifier[encoding] , identifier[errors] , identifier[keep] , identifier[normalize] ,
identifier[preserve_dict_class] , identifier[preserve_tuples] , identifier[to_str] )
keyword[else] :
keyword[try] :
identifier[data] = identifier[_decode_func] ( identifier[data] , identifier[encoding] , identifier[errors] , identifier[normalize] )
keyword[except] identifier[TypeError] :
keyword[pass]
keyword[except] identifier[UnicodeDecodeError] :
keyword[if] keyword[not] identifier[keep] :
keyword[raise]
keyword[return] identifier[data] | def decode(data, encoding=None, errors='strict', keep=False, normalize=False, preserve_dict_class=False, preserve_tuples=False, to_str=False):
"""
Generic function which will decode whichever type is passed, if necessary.
Optionally use to_str=True to ensure strings are str types and not unicode
on Python 2.
If `strict` is True, and `keep` is False, and we fail to decode, a
UnicodeDecodeError will be raised. Passing `keep` as True allows for the
original value to silently be returned in cases where decoding fails. This
can be useful for cases where the data passed to this function is likely to
contain binary blobs, such as in the case of cp.recv.
If `normalize` is True, then unicodedata.normalize() will be used to
normalize unicode strings down to a single code point per glyph. It is
recommended not to normalize unless you know what you're doing. For
instance, if `data` contains a dictionary, it is possible that normalizing
will lead to data loss because the following two strings will normalize to
the same value:
- u'\\u044f\\u0438\\u0306\\u0446\\u0430.txt'
- u'\\u044f\\u0439\\u0446\\u0430.txt'
One good use case for normalization is in the test suite. For example, on
some platforms such as Mac OS, os.listdir() will produce the first of the
two strings above, in which "й" is represented as two code points (i.e. one
for the base character, and one for the breve mark). Normalizing allows for
a more reliable test case.
"""
_decode_func = salt.utils.stringutils.to_unicode if not to_str else salt.utils.stringutils.to_str
if isinstance(data, Mapping):
return decode_dict(data, encoding, errors, keep, normalize, preserve_dict_class, preserve_tuples, to_str) # depends on [control=['if'], data=[]]
elif isinstance(data, list):
return decode_list(data, encoding, errors, keep, normalize, preserve_dict_class, preserve_tuples, to_str) # depends on [control=['if'], data=[]]
elif isinstance(data, tuple):
return decode_tuple(data, encoding, errors, keep, normalize, preserve_dict_class, to_str) if preserve_tuples else decode_list(data, encoding, errors, keep, normalize, preserve_dict_class, preserve_tuples, to_str) # depends on [control=['if'], data=[]]
else:
try:
data = _decode_func(data, encoding, errors, normalize) # depends on [control=['try'], data=[]]
except TypeError:
# to_unicode raises a TypeError when input is not a
# string/bytestring/bytearray. This is expected and simply means we
# are going to leave the value as-is.
pass # depends on [control=['except'], data=[]]
except UnicodeDecodeError:
if not keep:
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]]
return data |
def get_fn_plan(callback=None, out_callback=None, name='pycbc_cufft', parameters=None):
""" Get the IFFT execute and plan functions
"""
if parameters is None:
parameters = []
source = fftsrc.render(input_callback=callback, output_callback=out_callback, parameters=parameters)
path = compile(source, name)
lib = ctypes.cdll.LoadLibrary(path)
fn = lib.execute
fn.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
plan = lib.create_plan
plan.restype = ctypes.c_void_p
plan.argyptes = [ctypes.c_uint]
return fn, plan | def function[get_fn_plan, parameter[callback, out_callback, name, parameters]]:
constant[ Get the IFFT execute and plan functions
]
if compare[name[parameters] is constant[None]] begin[:]
variable[parameters] assign[=] list[[]]
variable[source] assign[=] call[name[fftsrc].render, parameter[]]
variable[path] assign[=] call[name[compile], parameter[name[source], name[name]]]
variable[lib] assign[=] call[name[ctypes].cdll.LoadLibrary, parameter[name[path]]]
variable[fn] assign[=] name[lib].execute
name[fn].argtypes assign[=] list[[<ast.Attribute object at 0x7da204623730>, <ast.Attribute object at 0x7da204622b60>, <ast.Attribute object at 0x7da2046225c0>, <ast.Attribute object at 0x7da2046204f0>]]
variable[plan] assign[=] name[lib].create_plan
name[plan].restype assign[=] name[ctypes].c_void_p
name[plan].argyptes assign[=] list[[<ast.Attribute object at 0x7da204621120>]]
return[tuple[[<ast.Name object at 0x7da204623370>, <ast.Name object at 0x7da2046202e0>]]] | keyword[def] identifier[get_fn_plan] ( identifier[callback] = keyword[None] , identifier[out_callback] = keyword[None] , identifier[name] = literal[string] , identifier[parameters] = keyword[None] ):
literal[string]
keyword[if] identifier[parameters] keyword[is] keyword[None] :
identifier[parameters] =[]
identifier[source] = identifier[fftsrc] . identifier[render] ( identifier[input_callback] = identifier[callback] , identifier[output_callback] = identifier[out_callback] , identifier[parameters] = identifier[parameters] )
identifier[path] = identifier[compile] ( identifier[source] , identifier[name] )
identifier[lib] = identifier[ctypes] . identifier[cdll] . identifier[LoadLibrary] ( identifier[path] )
identifier[fn] = identifier[lib] . identifier[execute]
identifier[fn] . identifier[argtypes] =[ identifier[ctypes] . identifier[c_void_p] , identifier[ctypes] . identifier[c_void_p] , identifier[ctypes] . identifier[c_void_p] , identifier[ctypes] . identifier[c_void_p] ]
identifier[plan] = identifier[lib] . identifier[create_plan]
identifier[plan] . identifier[restype] = identifier[ctypes] . identifier[c_void_p]
identifier[plan] . identifier[argyptes] =[ identifier[ctypes] . identifier[c_uint] ]
keyword[return] identifier[fn] , identifier[plan] | def get_fn_plan(callback=None, out_callback=None, name='pycbc_cufft', parameters=None):
""" Get the IFFT execute and plan functions
"""
if parameters is None:
parameters = [] # depends on [control=['if'], data=['parameters']]
source = fftsrc.render(input_callback=callback, output_callback=out_callback, parameters=parameters)
path = compile(source, name)
lib = ctypes.cdll.LoadLibrary(path)
fn = lib.execute
fn.argtypes = [ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
plan = lib.create_plan
plan.restype = ctypes.c_void_p
plan.argyptes = [ctypes.c_uint]
return (fn, plan) |
def instantiate_by_name(self, object_name):
""" Instantiate object from the environment, possibly giving some extra arguments """
if object_name not in self.instances:
instance = self.instantiate_from_data(self.environment[object_name])
self.instances[object_name] = instance
return instance
else:
return self.instances[object_name] | def function[instantiate_by_name, parameter[self, object_name]]:
constant[ Instantiate object from the environment, possibly giving some extra arguments ]
if compare[name[object_name] <ast.NotIn object at 0x7da2590d7190> name[self].instances] begin[:]
variable[instance] assign[=] call[name[self].instantiate_from_data, parameter[call[name[self].environment][name[object_name]]]]
call[name[self].instances][name[object_name]] assign[=] name[instance]
return[name[instance]] | keyword[def] identifier[instantiate_by_name] ( identifier[self] , identifier[object_name] ):
literal[string]
keyword[if] identifier[object_name] keyword[not] keyword[in] identifier[self] . identifier[instances] :
identifier[instance] = identifier[self] . identifier[instantiate_from_data] ( identifier[self] . identifier[environment] [ identifier[object_name] ])
identifier[self] . identifier[instances] [ identifier[object_name] ]= identifier[instance]
keyword[return] identifier[instance]
keyword[else] :
keyword[return] identifier[self] . identifier[instances] [ identifier[object_name] ] | def instantiate_by_name(self, object_name):
""" Instantiate object from the environment, possibly giving some extra arguments """
if object_name not in self.instances:
instance = self.instantiate_from_data(self.environment[object_name])
self.instances[object_name] = instance
return instance # depends on [control=['if'], data=['object_name']]
else:
return self.instances[object_name] |
def _compute_sync_map_file_path(
self,
root,
hierarchy_type,
custom_id,
file_name
):
"""
Compute the sync map file path inside the output container.
:param string root: the root of the sync map files inside the container
:param job_os_hierarchy_type: type of job output hierarchy
:type job_os_hierarchy_type: :class:`~aeneas.hierarchytype.HierarchyType`
:param string custom_id: the task custom id (flat) or
page directory name (paged)
:param string file_name: the output file name for the sync map
:rtype: string
"""
prefix = root
if hierarchy_type == HierarchyType.PAGED:
prefix = gf.norm_join(prefix, custom_id)
file_name_joined = gf.norm_join(prefix, file_name)
return self._replace_placeholder(file_name_joined, custom_id) | def function[_compute_sync_map_file_path, parameter[self, root, hierarchy_type, custom_id, file_name]]:
constant[
Compute the sync map file path inside the output container.
:param string root: the root of the sync map files inside the container
:param job_os_hierarchy_type: type of job output hierarchy
:type job_os_hierarchy_type: :class:`~aeneas.hierarchytype.HierarchyType`
:param string custom_id: the task custom id (flat) or
page directory name (paged)
:param string file_name: the output file name for the sync map
:rtype: string
]
variable[prefix] assign[=] name[root]
if compare[name[hierarchy_type] equal[==] name[HierarchyType].PAGED] begin[:]
variable[prefix] assign[=] call[name[gf].norm_join, parameter[name[prefix], name[custom_id]]]
variable[file_name_joined] assign[=] call[name[gf].norm_join, parameter[name[prefix], name[file_name]]]
return[call[name[self]._replace_placeholder, parameter[name[file_name_joined], name[custom_id]]]] | keyword[def] identifier[_compute_sync_map_file_path] (
identifier[self] ,
identifier[root] ,
identifier[hierarchy_type] ,
identifier[custom_id] ,
identifier[file_name]
):
literal[string]
identifier[prefix] = identifier[root]
keyword[if] identifier[hierarchy_type] == identifier[HierarchyType] . identifier[PAGED] :
identifier[prefix] = identifier[gf] . identifier[norm_join] ( identifier[prefix] , identifier[custom_id] )
identifier[file_name_joined] = identifier[gf] . identifier[norm_join] ( identifier[prefix] , identifier[file_name] )
keyword[return] identifier[self] . identifier[_replace_placeholder] ( identifier[file_name_joined] , identifier[custom_id] ) | def _compute_sync_map_file_path(self, root, hierarchy_type, custom_id, file_name):
"""
Compute the sync map file path inside the output container.
:param string root: the root of the sync map files inside the container
:param job_os_hierarchy_type: type of job output hierarchy
:type job_os_hierarchy_type: :class:`~aeneas.hierarchytype.HierarchyType`
:param string custom_id: the task custom id (flat) or
page directory name (paged)
:param string file_name: the output file name for the sync map
:rtype: string
"""
prefix = root
if hierarchy_type == HierarchyType.PAGED:
prefix = gf.norm_join(prefix, custom_id) # depends on [control=['if'], data=[]]
file_name_joined = gf.norm_join(prefix, file_name)
return self._replace_placeholder(file_name_joined, custom_id) |
def kill(self):
"""
Kill the current non blocking command
Raises:
TypeError: If command is blocking
"""
if self.block:
raise TypeError(NON_BLOCKING_ERROR_MESSAGE)
try:
self.process.kill()
except ProcessLookupError as exc:
self.logger.debug(exc) | def function[kill, parameter[self]]:
constant[
Kill the current non blocking command
Raises:
TypeError: If command is blocking
]
if name[self].block begin[:]
<ast.Raise object at 0x7da20c6abfd0>
<ast.Try object at 0x7da20c7cad10> | keyword[def] identifier[kill] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[block] :
keyword[raise] identifier[TypeError] ( identifier[NON_BLOCKING_ERROR_MESSAGE] )
keyword[try] :
identifier[self] . identifier[process] . identifier[kill] ()
keyword[except] identifier[ProcessLookupError] keyword[as] identifier[exc] :
identifier[self] . identifier[logger] . identifier[debug] ( identifier[exc] ) | def kill(self):
"""
Kill the current non blocking command
Raises:
TypeError: If command is blocking
"""
if self.block:
raise TypeError(NON_BLOCKING_ERROR_MESSAGE) # depends on [control=['if'], data=[]]
try:
self.process.kill() # depends on [control=['try'], data=[]]
except ProcessLookupError as exc:
self.logger.debug(exc) # depends on [control=['except'], data=['exc']] |
def indicator_body(indicators):
"""Generate the appropriate dictionary content for POST of an File indicator
Args:
indicators (list): A list of one or more hash value(s).
"""
hash_patterns = {
'md5': re.compile(r'^([a-fA-F\d]{32})$'),
'sha1': re.compile(r'^([a-fA-F\d]{40})$'),
'sha256': re.compile(r'^([a-fA-F\d]{64})$'),
}
body = {}
for indicator in indicators:
if indicator is None:
continue
if hash_patterns['md5'].match(indicator):
body['md5'] = indicator
elif hash_patterns['sha1'].match(indicator):
body['sha1'] = indicator
elif hash_patterns['sha256'].match(indicator):
body['sha256'] = indicator
return body | def function[indicator_body, parameter[indicators]]:
constant[Generate the appropriate dictionary content for POST of an File indicator
Args:
indicators (list): A list of one or more hash value(s).
]
variable[hash_patterns] assign[=] dictionary[[<ast.Constant object at 0x7da2044c35b0>, <ast.Constant object at 0x7da2044c1d80>, <ast.Constant object at 0x7da2044c2590>], [<ast.Call object at 0x7da2044c1390>, <ast.Call object at 0x7da2044c25c0>, <ast.Call object at 0x7da2044c3fd0>]]
variable[body] assign[=] dictionary[[], []]
for taget[name[indicator]] in starred[name[indicators]] begin[:]
if compare[name[indicator] is constant[None]] begin[:]
continue
if call[call[name[hash_patterns]][constant[md5]].match, parameter[name[indicator]]] begin[:]
call[name[body]][constant[md5]] assign[=] name[indicator]
return[name[body]] | keyword[def] identifier[indicator_body] ( identifier[indicators] ):
literal[string]
identifier[hash_patterns] ={
literal[string] : identifier[re] . identifier[compile] ( literal[string] ),
literal[string] : identifier[re] . identifier[compile] ( literal[string] ),
literal[string] : identifier[re] . identifier[compile] ( literal[string] ),
}
identifier[body] ={}
keyword[for] identifier[indicator] keyword[in] identifier[indicators] :
keyword[if] identifier[indicator] keyword[is] keyword[None] :
keyword[continue]
keyword[if] identifier[hash_patterns] [ literal[string] ]. identifier[match] ( identifier[indicator] ):
identifier[body] [ literal[string] ]= identifier[indicator]
keyword[elif] identifier[hash_patterns] [ literal[string] ]. identifier[match] ( identifier[indicator] ):
identifier[body] [ literal[string] ]= identifier[indicator]
keyword[elif] identifier[hash_patterns] [ literal[string] ]. identifier[match] ( identifier[indicator] ):
identifier[body] [ literal[string] ]= identifier[indicator]
keyword[return] identifier[body] | def indicator_body(indicators):
"""Generate the appropriate dictionary content for POST of an File indicator
Args:
indicators (list): A list of one or more hash value(s).
"""
hash_patterns = {'md5': re.compile('^([a-fA-F\\d]{32})$'), 'sha1': re.compile('^([a-fA-F\\d]{40})$'), 'sha256': re.compile('^([a-fA-F\\d]{64})$')}
body = {}
for indicator in indicators:
if indicator is None:
continue # depends on [control=['if'], data=[]]
if hash_patterns['md5'].match(indicator):
body['md5'] = indicator # depends on [control=['if'], data=[]]
elif hash_patterns['sha1'].match(indicator):
body['sha1'] = indicator # depends on [control=['if'], data=[]]
elif hash_patterns['sha256'].match(indicator):
body['sha256'] = indicator # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['indicator']]
return body |
def layer_norm_compute(x, epsilon, scale, bias, layer_collection=None):
"""Layer norm raw computation."""
# Save these before they get converted to tensors by the casting below
params = (scale, bias)
epsilon, scale, bias = [cast_like(t, x) for t in [epsilon, scale, bias]]
mean = tf.reduce_mean(x, axis=[-1], keepdims=True)
variance = tf.reduce_mean(
tf.squared_difference(x, mean), axis=[-1], keepdims=True)
norm_x = (x - mean) * tf.rsqrt(variance + epsilon)
output = norm_x * scale + bias
return output | def function[layer_norm_compute, parameter[x, epsilon, scale, bias, layer_collection]]:
constant[Layer norm raw computation.]
variable[params] assign[=] tuple[[<ast.Name object at 0x7da18dc07130>, <ast.Name object at 0x7da18dc06740>]]
<ast.Tuple object at 0x7da18dc06230> assign[=] <ast.ListComp object at 0x7da18dc04310>
variable[mean] assign[=] call[name[tf].reduce_mean, parameter[name[x]]]
variable[variance] assign[=] call[name[tf].reduce_mean, parameter[call[name[tf].squared_difference, parameter[name[x], name[mean]]]]]
variable[norm_x] assign[=] binary_operation[binary_operation[name[x] - name[mean]] * call[name[tf].rsqrt, parameter[binary_operation[name[variance] + name[epsilon]]]]]
variable[output] assign[=] binary_operation[binary_operation[name[norm_x] * name[scale]] + name[bias]]
return[name[output]] | keyword[def] identifier[layer_norm_compute] ( identifier[x] , identifier[epsilon] , identifier[scale] , identifier[bias] , identifier[layer_collection] = keyword[None] ):
literal[string]
identifier[params] =( identifier[scale] , identifier[bias] )
identifier[epsilon] , identifier[scale] , identifier[bias] =[ identifier[cast_like] ( identifier[t] , identifier[x] ) keyword[for] identifier[t] keyword[in] [ identifier[epsilon] , identifier[scale] , identifier[bias] ]]
identifier[mean] = identifier[tf] . identifier[reduce_mean] ( identifier[x] , identifier[axis] =[- literal[int] ], identifier[keepdims] = keyword[True] )
identifier[variance] = identifier[tf] . identifier[reduce_mean] (
identifier[tf] . identifier[squared_difference] ( identifier[x] , identifier[mean] ), identifier[axis] =[- literal[int] ], identifier[keepdims] = keyword[True] )
identifier[norm_x] =( identifier[x] - identifier[mean] )* identifier[tf] . identifier[rsqrt] ( identifier[variance] + identifier[epsilon] )
identifier[output] = identifier[norm_x] * identifier[scale] + identifier[bias]
keyword[return] identifier[output] | def layer_norm_compute(x, epsilon, scale, bias, layer_collection=None):
"""Layer norm raw computation."""
# Save these before they get converted to tensors by the casting below
params = (scale, bias)
(epsilon, scale, bias) = [cast_like(t, x) for t in [epsilon, scale, bias]]
mean = tf.reduce_mean(x, axis=[-1], keepdims=True)
variance = tf.reduce_mean(tf.squared_difference(x, mean), axis=[-1], keepdims=True)
norm_x = (x - mean) * tf.rsqrt(variance + epsilon)
output = norm_x * scale + bias
return output |
def _select(self, pointer):
"""
Get a YAMLChunk referenced by a pointer.
"""
return YAMLChunk(
self._ruamelparsed,
pointer=pointer,
label=self._label,
strictparsed=self._strictparsed,
key_association=copy(self._key_association),
) | def function[_select, parameter[self, pointer]]:
constant[
Get a YAMLChunk referenced by a pointer.
]
return[call[name[YAMLChunk], parameter[name[self]._ruamelparsed]]] | keyword[def] identifier[_select] ( identifier[self] , identifier[pointer] ):
literal[string]
keyword[return] identifier[YAMLChunk] (
identifier[self] . identifier[_ruamelparsed] ,
identifier[pointer] = identifier[pointer] ,
identifier[label] = identifier[self] . identifier[_label] ,
identifier[strictparsed] = identifier[self] . identifier[_strictparsed] ,
identifier[key_association] = identifier[copy] ( identifier[self] . identifier[_key_association] ),
) | def _select(self, pointer):
"""
Get a YAMLChunk referenced by a pointer.
"""
return YAMLChunk(self._ruamelparsed, pointer=pointer, label=self._label, strictparsed=self._strictparsed, key_association=copy(self._key_association)) |
def init(port=None, do_not_exit=False, disable_tls=False, log_level='WARNING'):
"""Start the Xenon GRPC server on the specified port, or, if a service
is already running on that port, connect to that.
If no port is given, a random port is selected. This means that, by
default, every python instance will start its own instance of a xenon-grpc
process.
:param port: the port number
:param do_not_exit: by default the GRPC server is shut down after Python
exits (through the `atexit` module), setting this value to `True` will
prevent that from happening."""
logger = logging.getLogger('xenon')
logger.setLevel(logging.INFO)
logger_handler = logging.StreamHandler()
logger_handler.setFormatter(logging.Formatter(style='{'))
logger_handler.setLevel(getattr(logging, log_level))
logger.addHandler(logger_handler)
if port is None:
port = find_free_port()
if __server__.process is not None:
logger.warning(
"You tried to run init(), but the server is already running.")
return __server__
__server__.port = port
__server__.disable_tls = disable_tls
__server__.__enter__()
if not do_not_exit:
atexit.register(__server__.__exit__, None, None, None)
return __server__ | def function[init, parameter[port, do_not_exit, disable_tls, log_level]]:
constant[Start the Xenon GRPC server on the specified port, or, if a service
is already running on that port, connect to that.
If no port is given, a random port is selected. This means that, by
default, every python instance will start its own instance of a xenon-grpc
process.
:param port: the port number
:param do_not_exit: by default the GRPC server is shut down after Python
exits (through the `atexit` module), setting this value to `True` will
prevent that from happening.]
variable[logger] assign[=] call[name[logging].getLogger, parameter[constant[xenon]]]
call[name[logger].setLevel, parameter[name[logging].INFO]]
variable[logger_handler] assign[=] call[name[logging].StreamHandler, parameter[]]
call[name[logger_handler].setFormatter, parameter[call[name[logging].Formatter, parameter[]]]]
call[name[logger_handler].setLevel, parameter[call[name[getattr], parameter[name[logging], name[log_level]]]]]
call[name[logger].addHandler, parameter[name[logger_handler]]]
if compare[name[port] is constant[None]] begin[:]
variable[port] assign[=] call[name[find_free_port], parameter[]]
if compare[name[__server__].process is_not constant[None]] begin[:]
call[name[logger].warning, parameter[constant[You tried to run init(), but the server is already running.]]]
return[name[__server__]]
name[__server__].port assign[=] name[port]
name[__server__].disable_tls assign[=] name[disable_tls]
call[name[__server__].__enter__, parameter[]]
if <ast.UnaryOp object at 0x7da1b1864df0> begin[:]
call[name[atexit].register, parameter[name[__server__].__exit__, constant[None], constant[None], constant[None]]]
return[name[__server__]] | keyword[def] identifier[init] ( identifier[port] = keyword[None] , identifier[do_not_exit] = keyword[False] , identifier[disable_tls] = keyword[False] , identifier[log_level] = literal[string] ):
literal[string]
identifier[logger] = identifier[logging] . identifier[getLogger] ( literal[string] )
identifier[logger] . identifier[setLevel] ( identifier[logging] . identifier[INFO] )
identifier[logger_handler] = identifier[logging] . identifier[StreamHandler] ()
identifier[logger_handler] . identifier[setFormatter] ( identifier[logging] . identifier[Formatter] ( identifier[style] = literal[string] ))
identifier[logger_handler] . identifier[setLevel] ( identifier[getattr] ( identifier[logging] , identifier[log_level] ))
identifier[logger] . identifier[addHandler] ( identifier[logger_handler] )
keyword[if] identifier[port] keyword[is] keyword[None] :
identifier[port] = identifier[find_free_port] ()
keyword[if] identifier[__server__] . identifier[process] keyword[is] keyword[not] keyword[None] :
identifier[logger] . identifier[warning] (
literal[string] )
keyword[return] identifier[__server__]
identifier[__server__] . identifier[port] = identifier[port]
identifier[__server__] . identifier[disable_tls] = identifier[disable_tls]
identifier[__server__] . identifier[__enter__] ()
keyword[if] keyword[not] identifier[do_not_exit] :
identifier[atexit] . identifier[register] ( identifier[__server__] . identifier[__exit__] , keyword[None] , keyword[None] , keyword[None] )
keyword[return] identifier[__server__] | def init(port=None, do_not_exit=False, disable_tls=False, log_level='WARNING'):
"""Start the Xenon GRPC server on the specified port, or, if a service
is already running on that port, connect to that.
If no port is given, a random port is selected. This means that, by
default, every python instance will start its own instance of a xenon-grpc
process.
:param port: the port number
:param do_not_exit: by default the GRPC server is shut down after Python
exits (through the `atexit` module), setting this value to `True` will
prevent that from happening."""
logger = logging.getLogger('xenon')
logger.setLevel(logging.INFO)
logger_handler = logging.StreamHandler()
logger_handler.setFormatter(logging.Formatter(style='{'))
logger_handler.setLevel(getattr(logging, log_level))
logger.addHandler(logger_handler)
if port is None:
port = find_free_port() # depends on [control=['if'], data=['port']]
if __server__.process is not None:
logger.warning('You tried to run init(), but the server is already running.')
return __server__ # depends on [control=['if'], data=[]]
__server__.port = port
__server__.disable_tls = disable_tls
__server__.__enter__()
if not do_not_exit:
atexit.register(__server__.__exit__, None, None, None) # depends on [control=['if'], data=[]]
return __server__ |
def stdev(self, default=None):
"""
Calculate the standard deviation of the time series.
:param default: Value to return as a default should the calculation not be possible.
:return: Float representing the standard deviation value or `None`.
"""
return numpy.asscalar(numpy.std(self.values)) if self.values else default | def function[stdev, parameter[self, default]]:
constant[
Calculate the standard deviation of the time series.
:param default: Value to return as a default should the calculation not be possible.
:return: Float representing the standard deviation value or `None`.
]
return[<ast.IfExp object at 0x7da2054a5060>] | keyword[def] identifier[stdev] ( identifier[self] , identifier[default] = keyword[None] ):
literal[string]
keyword[return] identifier[numpy] . identifier[asscalar] ( identifier[numpy] . identifier[std] ( identifier[self] . identifier[values] )) keyword[if] identifier[self] . identifier[values] keyword[else] identifier[default] | def stdev(self, default=None):
"""
Calculate the standard deviation of the time series.
:param default: Value to return as a default should the calculation not be possible.
:return: Float representing the standard deviation value or `None`.
"""
return numpy.asscalar(numpy.std(self.values)) if self.values else default |
def send(self, request, **kwargs):
"""Send a given PreparedRequest."""
# Set defaults that the hooks can utilize to ensure they always have
# the correct parameters to reproduce the previous request.
kwargs.setdefault('stream', self.stream)
kwargs.setdefault('verify', self.verify)
kwargs.setdefault('cert', self.cert)
kwargs.setdefault('proxies', self.proxies)
# It's possible that users might accidentally send a Request object.
# Guard against that specific failure case.
if not isinstance(request, PreparedRequest):
raise ValueError('You can only send PreparedRequests.')
checked_urls = set()
while request.url in self.redirect_cache:
checked_urls.add(request.url)
new_url = self.redirect_cache.get(request.url)
if new_url in checked_urls:
break
request.url = new_url
# Set up variables needed for resolve_redirects and dispatching of hooks
allow_redirects = kwargs.pop('allow_redirects', True)
stream = kwargs.get('stream')
timeout = kwargs.get('timeout')
verify = kwargs.get('verify')
cert = kwargs.get('cert')
proxies = kwargs.get('proxies')
hooks = request.hooks
# Get the appropriate adapter to use
adapter = self.get_adapter(url=request.url)
# Start time (approximately) of the request
start = datetime.utcnow()
# Send the request
r = adapter.send(request, **kwargs)
# Total elapsed time of the request (approximately)
r.elapsed = datetime.utcnow() - start
# Response manipulation hooks
r = dispatch_hook('response', hooks, r, **kwargs)
# Persist cookies
if r.history:
# If the hooks create history then we want those cookies too
for resp in r.history:
extract_cookies_to_jar(self.cookies, resp.request, resp.raw)
extract_cookies_to_jar(self.cookies, request, r.raw)
# Redirect resolving generator.
gen = self.resolve_redirects(r, request,
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies)
# Resolve redirects if allowed.
history = [resp for resp in gen] if allow_redirects else []
# Shuffle things around if there's history.
if history:
# Insert the first (original) request at the start
history.insert(0, r)
# Get the last request made
r = history.pop()
r.history = history
if not stream:
r.content
return r | def function[send, parameter[self, request]]:
constant[Send a given PreparedRequest.]
call[name[kwargs].setdefault, parameter[constant[stream], name[self].stream]]
call[name[kwargs].setdefault, parameter[constant[verify], name[self].verify]]
call[name[kwargs].setdefault, parameter[constant[cert], name[self].cert]]
call[name[kwargs].setdefault, parameter[constant[proxies], name[self].proxies]]
if <ast.UnaryOp object at 0x7da2041d8fd0> begin[:]
<ast.Raise object at 0x7da2041d9510>
variable[checked_urls] assign[=] call[name[set], parameter[]]
while compare[name[request].url in name[self].redirect_cache] begin[:]
call[name[checked_urls].add, parameter[name[request].url]]
variable[new_url] assign[=] call[name[self].redirect_cache.get, parameter[name[request].url]]
if compare[name[new_url] in name[checked_urls]] begin[:]
break
name[request].url assign[=] name[new_url]
variable[allow_redirects] assign[=] call[name[kwargs].pop, parameter[constant[allow_redirects], constant[True]]]
variable[stream] assign[=] call[name[kwargs].get, parameter[constant[stream]]]
variable[timeout] assign[=] call[name[kwargs].get, parameter[constant[timeout]]]
variable[verify] assign[=] call[name[kwargs].get, parameter[constant[verify]]]
variable[cert] assign[=] call[name[kwargs].get, parameter[constant[cert]]]
variable[proxies] assign[=] call[name[kwargs].get, parameter[constant[proxies]]]
variable[hooks] assign[=] name[request].hooks
variable[adapter] assign[=] call[name[self].get_adapter, parameter[]]
variable[start] assign[=] call[name[datetime].utcnow, parameter[]]
variable[r] assign[=] call[name[adapter].send, parameter[name[request]]]
name[r].elapsed assign[=] binary_operation[call[name[datetime].utcnow, parameter[]] - name[start]]
variable[r] assign[=] call[name[dispatch_hook], parameter[constant[response], name[hooks], name[r]]]
if name[r].history begin[:]
for taget[name[resp]] in starred[name[r].history] begin[:]
call[name[extract_cookies_to_jar], parameter[name[self].cookies, name[resp].request, name[resp].raw]]
call[name[extract_cookies_to_jar], parameter[name[self].cookies, name[request], name[r].raw]]
variable[gen] assign[=] call[name[self].resolve_redirects, parameter[name[r], name[request]]]
variable[history] assign[=] <ast.IfExp object at 0x7da2041da6b0>
if name[history] begin[:]
call[name[history].insert, parameter[constant[0], name[r]]]
variable[r] assign[=] call[name[history].pop, parameter[]]
name[r].history assign[=] name[history]
if <ast.UnaryOp object at 0x7da20e960280> begin[:]
name[r].content
return[name[r]] | keyword[def] identifier[send] ( identifier[self] , identifier[request] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] . identifier[setdefault] ( literal[string] , identifier[self] . identifier[stream] )
identifier[kwargs] . identifier[setdefault] ( literal[string] , identifier[self] . identifier[verify] )
identifier[kwargs] . identifier[setdefault] ( literal[string] , identifier[self] . identifier[cert] )
identifier[kwargs] . identifier[setdefault] ( literal[string] , identifier[self] . identifier[proxies] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[request] , identifier[PreparedRequest] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[checked_urls] = identifier[set] ()
keyword[while] identifier[request] . identifier[url] keyword[in] identifier[self] . identifier[redirect_cache] :
identifier[checked_urls] . identifier[add] ( identifier[request] . identifier[url] )
identifier[new_url] = identifier[self] . identifier[redirect_cache] . identifier[get] ( identifier[request] . identifier[url] )
keyword[if] identifier[new_url] keyword[in] identifier[checked_urls] :
keyword[break]
identifier[request] . identifier[url] = identifier[new_url]
identifier[allow_redirects] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[True] )
identifier[stream] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[timeout] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[verify] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[cert] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[proxies] = identifier[kwargs] . identifier[get] ( literal[string] )
identifier[hooks] = identifier[request] . identifier[hooks]
identifier[adapter] = identifier[self] . identifier[get_adapter] ( identifier[url] = identifier[request] . identifier[url] )
identifier[start] = identifier[datetime] . identifier[utcnow] ()
identifier[r] = identifier[adapter] . identifier[send] ( identifier[request] ,** identifier[kwargs] )
identifier[r] . identifier[elapsed] = identifier[datetime] . identifier[utcnow] ()- identifier[start]
identifier[r] = identifier[dispatch_hook] ( literal[string] , identifier[hooks] , identifier[r] ,** identifier[kwargs] )
keyword[if] identifier[r] . identifier[history] :
keyword[for] identifier[resp] keyword[in] identifier[r] . identifier[history] :
identifier[extract_cookies_to_jar] ( identifier[self] . identifier[cookies] , identifier[resp] . identifier[request] , identifier[resp] . identifier[raw] )
identifier[extract_cookies_to_jar] ( identifier[self] . identifier[cookies] , identifier[request] , identifier[r] . identifier[raw] )
identifier[gen] = identifier[self] . identifier[resolve_redirects] ( identifier[r] , identifier[request] ,
identifier[stream] = identifier[stream] ,
identifier[timeout] = identifier[timeout] ,
identifier[verify] = identifier[verify] ,
identifier[cert] = identifier[cert] ,
identifier[proxies] = identifier[proxies] )
identifier[history] =[ identifier[resp] keyword[for] identifier[resp] keyword[in] identifier[gen] ] keyword[if] identifier[allow_redirects] keyword[else] []
keyword[if] identifier[history] :
identifier[history] . identifier[insert] ( literal[int] , identifier[r] )
identifier[r] = identifier[history] . identifier[pop] ()
identifier[r] . identifier[history] = identifier[history]
keyword[if] keyword[not] identifier[stream] :
identifier[r] . identifier[content]
keyword[return] identifier[r] | def send(self, request, **kwargs):
"""Send a given PreparedRequest."""
# Set defaults that the hooks can utilize to ensure they always have
# the correct parameters to reproduce the previous request.
kwargs.setdefault('stream', self.stream)
kwargs.setdefault('verify', self.verify)
kwargs.setdefault('cert', self.cert)
kwargs.setdefault('proxies', self.proxies)
# It's possible that users might accidentally send a Request object.
# Guard against that specific failure case.
if not isinstance(request, PreparedRequest):
raise ValueError('You can only send PreparedRequests.') # depends on [control=['if'], data=[]]
checked_urls = set()
while request.url in self.redirect_cache:
checked_urls.add(request.url)
new_url = self.redirect_cache.get(request.url)
if new_url in checked_urls:
break # depends on [control=['if'], data=[]]
request.url = new_url # depends on [control=['while'], data=[]]
# Set up variables needed for resolve_redirects and dispatching of hooks
allow_redirects = kwargs.pop('allow_redirects', True)
stream = kwargs.get('stream')
timeout = kwargs.get('timeout')
verify = kwargs.get('verify')
cert = kwargs.get('cert')
proxies = kwargs.get('proxies')
hooks = request.hooks
# Get the appropriate adapter to use
adapter = self.get_adapter(url=request.url)
# Start time (approximately) of the request
start = datetime.utcnow()
# Send the request
r = adapter.send(request, **kwargs)
# Total elapsed time of the request (approximately)
r.elapsed = datetime.utcnow() - start
# Response manipulation hooks
r = dispatch_hook('response', hooks, r, **kwargs)
# Persist cookies
if r.history:
# If the hooks create history then we want those cookies too
for resp in r.history:
extract_cookies_to_jar(self.cookies, resp.request, resp.raw) # depends on [control=['for'], data=['resp']] # depends on [control=['if'], data=[]]
extract_cookies_to_jar(self.cookies, request, r.raw)
# Redirect resolving generator.
gen = self.resolve_redirects(r, request, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
# Resolve redirects if allowed.
history = [resp for resp in gen] if allow_redirects else []
# Shuffle things around if there's history.
if history:
# Insert the first (original) request at the start
history.insert(0, r)
# Get the last request made
r = history.pop()
r.history = history # depends on [control=['if'], data=[]]
if not stream:
r.content # depends on [control=['if'], data=[]]
return r |
def RangeFromChild(self, child) -> TextRange:
"""
Call IUIAutomationTextPattern::RangeFromChild.
child: `Control` or its subclass.
Return `TextRange` or None, a text range enclosing a child element such as an image,
hyperlink, Microsoft Excel spreadsheet, or other embedded object.
Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-rangefromchild
"""
textRange = self.pattern.RangeFromChild(Control.Element)
if textRange:
return TextRange(textRange=textRange) | def function[RangeFromChild, parameter[self, child]]:
constant[
Call IUIAutomationTextPattern::RangeFromChild.
child: `Control` or its subclass.
Return `TextRange` or None, a text range enclosing a child element such as an image,
hyperlink, Microsoft Excel spreadsheet, or other embedded object.
Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-rangefromchild
]
variable[textRange] assign[=] call[name[self].pattern.RangeFromChild, parameter[name[Control].Element]]
if name[textRange] begin[:]
return[call[name[TextRange], parameter[]]] | keyword[def] identifier[RangeFromChild] ( identifier[self] , identifier[child] )-> identifier[TextRange] :
literal[string]
identifier[textRange] = identifier[self] . identifier[pattern] . identifier[RangeFromChild] ( identifier[Control] . identifier[Element] )
keyword[if] identifier[textRange] :
keyword[return] identifier[TextRange] ( identifier[textRange] = identifier[textRange] ) | def RangeFromChild(self, child) -> TextRange:
"""
Call IUIAutomationTextPattern::RangeFromChild.
child: `Control` or its subclass.
Return `TextRange` or None, a text range enclosing a child element such as an image,
hyperlink, Microsoft Excel spreadsheet, or other embedded object.
Refer https://docs.microsoft.com/en-us/windows/desktop/api/uiautomationclient/nf-uiautomationclient-iuiautomationtextpattern-rangefromchild
"""
textRange = self.pattern.RangeFromChild(Control.Element)
if textRange:
return TextRange(textRange=textRange) # depends on [control=['if'], data=[]] |
def check64bit(current_system="python"):
"""checks if you are on a 64 bit platform"""
if current_system == "python":
return sys.maxsize > 2147483647
elif current_system == "os":
import platform
pm = platform.machine()
if pm != ".." and pm.endswith('64'): # recent Python (not Iron)
return True
else:
if 'PROCESSOR_ARCHITEW6432' in os.environ:
return True # 32 bit program running on 64 bit Windows
try:
# 64 bit Windows 64 bit program
return os.environ['PROCESSOR_ARCHITECTURE'].endswith('64')
except IndexError:
pass # not Windows
try:
# this often works in Linux
return '64' in platform.architecture()[0]
except Exception:
# is an older version of Python, assume also an older os@
# (best we can guess)
return False | def function[check64bit, parameter[current_system]]:
constant[checks if you are on a 64 bit platform]
if compare[name[current_system] equal[==] constant[python]] begin[:]
return[compare[name[sys].maxsize greater[>] constant[2147483647]]] | keyword[def] identifier[check64bit] ( identifier[current_system] = literal[string] ):
literal[string]
keyword[if] identifier[current_system] == literal[string] :
keyword[return] identifier[sys] . identifier[maxsize] > literal[int]
keyword[elif] identifier[current_system] == literal[string] :
keyword[import] identifier[platform]
identifier[pm] = identifier[platform] . identifier[machine] ()
keyword[if] identifier[pm] != literal[string] keyword[and] identifier[pm] . identifier[endswith] ( literal[string] ):
keyword[return] keyword[True]
keyword[else] :
keyword[if] literal[string] keyword[in] identifier[os] . identifier[environ] :
keyword[return] keyword[True]
keyword[try] :
keyword[return] identifier[os] . identifier[environ] [ literal[string] ]. identifier[endswith] ( literal[string] )
keyword[except] identifier[IndexError] :
keyword[pass]
keyword[try] :
keyword[return] literal[string] keyword[in] identifier[platform] . identifier[architecture] ()[ literal[int] ]
keyword[except] identifier[Exception] :
keyword[return] keyword[False] | def check64bit(current_system='python'):
"""checks if you are on a 64 bit platform"""
if current_system == 'python':
return sys.maxsize > 2147483647 # depends on [control=['if'], data=[]]
elif current_system == 'os':
import platform
pm = platform.machine()
if pm != '..' and pm.endswith('64'): # recent Python (not Iron)
return True # depends on [control=['if'], data=[]]
else:
if 'PROCESSOR_ARCHITEW6432' in os.environ:
return True # 32 bit program running on 64 bit Windows # depends on [control=['if'], data=[]]
try:
# 64 bit Windows 64 bit program
return os.environ['PROCESSOR_ARCHITECTURE'].endswith('64') # depends on [control=['try'], data=[]]
except IndexError:
pass # not Windows # depends on [control=['except'], data=[]]
try:
# this often works in Linux
return '64' in platform.architecture()[0] # depends on [control=['try'], data=[]]
except Exception:
# is an older version of Python, assume also an older os@
# (best we can guess)
return False # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] |
def trigger_created(self, filepath):
"""Triggers created event if file exists."""
if os.path.exists(filepath):
self._trigger('created', filepath) | def function[trigger_created, parameter[self, filepath]]:
constant[Triggers created event if file exists.]
if call[name[os].path.exists, parameter[name[filepath]]] begin[:]
call[name[self]._trigger, parameter[constant[created], name[filepath]]] | keyword[def] identifier[trigger_created] ( identifier[self] , identifier[filepath] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[filepath] ):
identifier[self] . identifier[_trigger] ( literal[string] , identifier[filepath] ) | def trigger_created(self, filepath):
"""Triggers created event if file exists."""
if os.path.exists(filepath):
self._trigger('created', filepath) # depends on [control=['if'], data=[]] |
def _parse_request_range(
range_header: str
) -> Optional[Tuple[Optional[int], Optional[int]]]:
"""Parses a Range header.
Returns either ``None`` or tuple ``(start, end)``.
Note that while the HTTP headers use inclusive byte positions,
this method returns indexes suitable for use in slices.
>>> start, end = _parse_request_range("bytes=1-2")
>>> start, end
(1, 3)
>>> [0, 1, 2, 3, 4][start:end]
[1, 2]
>>> _parse_request_range("bytes=6-")
(6, None)
>>> _parse_request_range("bytes=-6")
(-6, None)
>>> _parse_request_range("bytes=-0")
(None, 0)
>>> _parse_request_range("bytes=")
(None, None)
>>> _parse_request_range("foo=42")
>>> _parse_request_range("bytes=1-2,6-10")
Note: only supports one range (ex, ``bytes=1-2,6-10`` is not allowed).
See [0] for the details of the range header.
[0]: http://greenbytes.de/tech/webdav/draft-ietf-httpbis-p5-range-latest.html#byte.ranges
"""
unit, _, value = range_header.partition("=")
unit, value = unit.strip(), value.strip()
if unit != "bytes":
return None
start_b, _, end_b = value.partition("-")
try:
start = _int_or_none(start_b)
end = _int_or_none(end_b)
except ValueError:
return None
if end is not None:
if start is None:
if end != 0:
start = -end
end = None
else:
end += 1
return (start, end) | def function[_parse_request_range, parameter[range_header]]:
constant[Parses a Range header.
Returns either ``None`` or tuple ``(start, end)``.
Note that while the HTTP headers use inclusive byte positions,
this method returns indexes suitable for use in slices.
>>> start, end = _parse_request_range("bytes=1-2")
>>> start, end
(1, 3)
>>> [0, 1, 2, 3, 4][start:end]
[1, 2]
>>> _parse_request_range("bytes=6-")
(6, None)
>>> _parse_request_range("bytes=-6")
(-6, None)
>>> _parse_request_range("bytes=-0")
(None, 0)
>>> _parse_request_range("bytes=")
(None, None)
>>> _parse_request_range("foo=42")
>>> _parse_request_range("bytes=1-2,6-10")
Note: only supports one range (ex, ``bytes=1-2,6-10`` is not allowed).
See [0] for the details of the range header.
[0]: http://greenbytes.de/tech/webdav/draft-ietf-httpbis-p5-range-latest.html#byte.ranges
]
<ast.Tuple object at 0x7da1b20cb610> assign[=] call[name[range_header].partition, parameter[constant[=]]]
<ast.Tuple object at 0x7da1b20c8eb0> assign[=] tuple[[<ast.Call object at 0x7da1b20c9450>, <ast.Call object at 0x7da1b20cb550>]]
if compare[name[unit] not_equal[!=] constant[bytes]] begin[:]
return[constant[None]]
<ast.Tuple object at 0x7da1b20c8250> assign[=] call[name[value].partition, parameter[constant[-]]]
<ast.Try object at 0x7da1b20c9ed0>
if compare[name[end] is_not constant[None]] begin[:]
if compare[name[start] is constant[None]] begin[:]
if compare[name[end] not_equal[!=] constant[0]] begin[:]
variable[start] assign[=] <ast.UnaryOp object at 0x7da1b20c99f0>
variable[end] assign[=] constant[None]
return[tuple[[<ast.Name object at 0x7da1b20ca290>, <ast.Name object at 0x7da1b20c9750>]]] | keyword[def] identifier[_parse_request_range] (
identifier[range_header] : identifier[str]
)-> identifier[Optional] [ identifier[Tuple] [ identifier[Optional] [ identifier[int] ], identifier[Optional] [ identifier[int] ]]]:
literal[string]
identifier[unit] , identifier[_] , identifier[value] = identifier[range_header] . identifier[partition] ( literal[string] )
identifier[unit] , identifier[value] = identifier[unit] . identifier[strip] (), identifier[value] . identifier[strip] ()
keyword[if] identifier[unit] != literal[string] :
keyword[return] keyword[None]
identifier[start_b] , identifier[_] , identifier[end_b] = identifier[value] . identifier[partition] ( literal[string] )
keyword[try] :
identifier[start] = identifier[_int_or_none] ( identifier[start_b] )
identifier[end] = identifier[_int_or_none] ( identifier[end_b] )
keyword[except] identifier[ValueError] :
keyword[return] keyword[None]
keyword[if] identifier[end] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[start] keyword[is] keyword[None] :
keyword[if] identifier[end] != literal[int] :
identifier[start] =- identifier[end]
identifier[end] = keyword[None]
keyword[else] :
identifier[end] += literal[int]
keyword[return] ( identifier[start] , identifier[end] ) | def _parse_request_range(range_header: str) -> Optional[Tuple[Optional[int], Optional[int]]]:
"""Parses a Range header.
Returns either ``None`` or tuple ``(start, end)``.
Note that while the HTTP headers use inclusive byte positions,
this method returns indexes suitable for use in slices.
>>> start, end = _parse_request_range("bytes=1-2")
>>> start, end
(1, 3)
>>> [0, 1, 2, 3, 4][start:end]
[1, 2]
>>> _parse_request_range("bytes=6-")
(6, None)
>>> _parse_request_range("bytes=-6")
(-6, None)
>>> _parse_request_range("bytes=-0")
(None, 0)
>>> _parse_request_range("bytes=")
(None, None)
>>> _parse_request_range("foo=42")
>>> _parse_request_range("bytes=1-2,6-10")
Note: only supports one range (ex, ``bytes=1-2,6-10`` is not allowed).
See [0] for the details of the range header.
[0]: http://greenbytes.de/tech/webdav/draft-ietf-httpbis-p5-range-latest.html#byte.ranges
"""
(unit, _, value) = range_header.partition('=')
(unit, value) = (unit.strip(), value.strip())
if unit != 'bytes':
return None # depends on [control=['if'], data=[]]
(start_b, _, end_b) = value.partition('-')
try:
start = _int_or_none(start_b)
end = _int_or_none(end_b) # depends on [control=['try'], data=[]]
except ValueError:
return None # depends on [control=['except'], data=[]]
if end is not None:
if start is None:
if end != 0:
start = -end
end = None # depends on [control=['if'], data=['end']] # depends on [control=['if'], data=['start']]
else:
end += 1 # depends on [control=['if'], data=['end']]
return (start, end) |
def get_custom_color_config_from_egrc(config):
"""
Get the ColorConfig from the egrc config object. Any colors not defined
will be None.
"""
pound = _get_color_from_config(config, CONFIG_NAMES.pound)
heading = _get_color_from_config(config, CONFIG_NAMES.heading)
code = _get_color_from_config(config, CONFIG_NAMES.code)
backticks = _get_color_from_config(config, CONFIG_NAMES.backticks)
prompt = _get_color_from_config(config, CONFIG_NAMES.prompt)
pound_reset = _get_color_from_config(config, CONFIG_NAMES.pound_reset)
heading_reset = _get_color_from_config(
config,
CONFIG_NAMES.heading_reset
)
code_reset = _get_color_from_config(config, CONFIG_NAMES.code_reset)
backticks_reset = _get_color_from_config(
config,
CONFIG_NAMES.backticks_reset
)
prompt_reset = _get_color_from_config(config, CONFIG_NAMES.prompt_reset)
result = ColorConfig(
pound=pound,
heading=heading,
code=code,
backticks=backticks,
prompt=prompt,
pound_reset=pound_reset,
heading_reset=heading_reset,
code_reset=code_reset,
backticks_reset=backticks_reset,
prompt_reset=prompt_reset
)
return result | def function[get_custom_color_config_from_egrc, parameter[config]]:
constant[
Get the ColorConfig from the egrc config object. Any colors not defined
will be None.
]
variable[pound] assign[=] call[name[_get_color_from_config], parameter[name[config], name[CONFIG_NAMES].pound]]
variable[heading] assign[=] call[name[_get_color_from_config], parameter[name[config], name[CONFIG_NAMES].heading]]
variable[code] assign[=] call[name[_get_color_from_config], parameter[name[config], name[CONFIG_NAMES].code]]
variable[backticks] assign[=] call[name[_get_color_from_config], parameter[name[config], name[CONFIG_NAMES].backticks]]
variable[prompt] assign[=] call[name[_get_color_from_config], parameter[name[config], name[CONFIG_NAMES].prompt]]
variable[pound_reset] assign[=] call[name[_get_color_from_config], parameter[name[config], name[CONFIG_NAMES].pound_reset]]
variable[heading_reset] assign[=] call[name[_get_color_from_config], parameter[name[config], name[CONFIG_NAMES].heading_reset]]
variable[code_reset] assign[=] call[name[_get_color_from_config], parameter[name[config], name[CONFIG_NAMES].code_reset]]
variable[backticks_reset] assign[=] call[name[_get_color_from_config], parameter[name[config], name[CONFIG_NAMES].backticks_reset]]
variable[prompt_reset] assign[=] call[name[_get_color_from_config], parameter[name[config], name[CONFIG_NAMES].prompt_reset]]
variable[result] assign[=] call[name[ColorConfig], parameter[]]
return[name[result]] | keyword[def] identifier[get_custom_color_config_from_egrc] ( identifier[config] ):
literal[string]
identifier[pound] = identifier[_get_color_from_config] ( identifier[config] , identifier[CONFIG_NAMES] . identifier[pound] )
identifier[heading] = identifier[_get_color_from_config] ( identifier[config] , identifier[CONFIG_NAMES] . identifier[heading] )
identifier[code] = identifier[_get_color_from_config] ( identifier[config] , identifier[CONFIG_NAMES] . identifier[code] )
identifier[backticks] = identifier[_get_color_from_config] ( identifier[config] , identifier[CONFIG_NAMES] . identifier[backticks] )
identifier[prompt] = identifier[_get_color_from_config] ( identifier[config] , identifier[CONFIG_NAMES] . identifier[prompt] )
identifier[pound_reset] = identifier[_get_color_from_config] ( identifier[config] , identifier[CONFIG_NAMES] . identifier[pound_reset] )
identifier[heading_reset] = identifier[_get_color_from_config] (
identifier[config] ,
identifier[CONFIG_NAMES] . identifier[heading_reset]
)
identifier[code_reset] = identifier[_get_color_from_config] ( identifier[config] , identifier[CONFIG_NAMES] . identifier[code_reset] )
identifier[backticks_reset] = identifier[_get_color_from_config] (
identifier[config] ,
identifier[CONFIG_NAMES] . identifier[backticks_reset]
)
identifier[prompt_reset] = identifier[_get_color_from_config] ( identifier[config] , identifier[CONFIG_NAMES] . identifier[prompt_reset] )
identifier[result] = identifier[ColorConfig] (
identifier[pound] = identifier[pound] ,
identifier[heading] = identifier[heading] ,
identifier[code] = identifier[code] ,
identifier[backticks] = identifier[backticks] ,
identifier[prompt] = identifier[prompt] ,
identifier[pound_reset] = identifier[pound_reset] ,
identifier[heading_reset] = identifier[heading_reset] ,
identifier[code_reset] = identifier[code_reset] ,
identifier[backticks_reset] = identifier[backticks_reset] ,
identifier[prompt_reset] = identifier[prompt_reset]
)
keyword[return] identifier[result] | def get_custom_color_config_from_egrc(config):
"""
Get the ColorConfig from the egrc config object. Any colors not defined
will be None.
"""
pound = _get_color_from_config(config, CONFIG_NAMES.pound)
heading = _get_color_from_config(config, CONFIG_NAMES.heading)
code = _get_color_from_config(config, CONFIG_NAMES.code)
backticks = _get_color_from_config(config, CONFIG_NAMES.backticks)
prompt = _get_color_from_config(config, CONFIG_NAMES.prompt)
pound_reset = _get_color_from_config(config, CONFIG_NAMES.pound_reset)
heading_reset = _get_color_from_config(config, CONFIG_NAMES.heading_reset)
code_reset = _get_color_from_config(config, CONFIG_NAMES.code_reset)
backticks_reset = _get_color_from_config(config, CONFIG_NAMES.backticks_reset)
prompt_reset = _get_color_from_config(config, CONFIG_NAMES.prompt_reset)
result = ColorConfig(pound=pound, heading=heading, code=code, backticks=backticks, prompt=prompt, pound_reset=pound_reset, heading_reset=heading_reset, code_reset=code_reset, backticks_reset=backticks_reset, prompt_reset=prompt_reset)
return result |
def orders_from_events(events, sell_delay=5, num_shares=100):
"""Create a DataFrame of orders (signed share quantities) based on event triggers (T/F or 0/1 matrix)
Arguments:
events (DataFrame): mask table to indicate occurrence of buy event (1 = buy, NaN/0/False = do nothing)
sell_delay (int): number of days after the buy order to initiate a sell order of those shares
num_shares (int): number of shares to buy and sell at each event
Returns:
DataFrame: Signed integer numbers of shares to buy (+) or sell (-)
columns are stock ticker symbols
index is datetime at end of trading day (16:00 in NY)
"""
buy = events.copy() * num_shares
sell = -1 * pd.DataFrame(buy.copy().values[:-sell_delay], index=buy.index[sell_delay:], columns=buy.columns)
sell = pd.concat([0 * buy.iloc[:sell_delay], sell])
for i in range(sell_delay):
sell.iloc[-1] -= buy.iloc[-sell_delay + i]
orders = buy + sell
return orders | def function[orders_from_events, parameter[events, sell_delay, num_shares]]:
constant[Create a DataFrame of orders (signed share quantities) based on event triggers (T/F or 0/1 matrix)
Arguments:
events (DataFrame): mask table to indicate occurrence of buy event (1 = buy, NaN/0/False = do nothing)
sell_delay (int): number of days after the buy order to initiate a sell order of those shares
num_shares (int): number of shares to buy and sell at each event
Returns:
DataFrame: Signed integer numbers of shares to buy (+) or sell (-)
columns are stock ticker symbols
index is datetime at end of trading day (16:00 in NY)
]
variable[buy] assign[=] binary_operation[call[name[events].copy, parameter[]] * name[num_shares]]
variable[sell] assign[=] binary_operation[<ast.UnaryOp object at 0x7da1b16177c0> * call[name[pd].DataFrame, parameter[call[call[name[buy].copy, parameter[]].values][<ast.Slice object at 0x7da1b1616470>]]]]
variable[sell] assign[=] call[name[pd].concat, parameter[list[[<ast.BinOp object at 0x7da1b1614af0>, <ast.Name object at 0x7da1b162b010>]]]]
for taget[name[i]] in starred[call[name[range], parameter[name[sell_delay]]]] begin[:]
<ast.AugAssign object at 0x7da1b162a530>
variable[orders] assign[=] binary_operation[name[buy] + name[sell]]
return[name[orders]] | keyword[def] identifier[orders_from_events] ( identifier[events] , identifier[sell_delay] = literal[int] , identifier[num_shares] = literal[int] ):
literal[string]
identifier[buy] = identifier[events] . identifier[copy] ()* identifier[num_shares]
identifier[sell] =- literal[int] * identifier[pd] . identifier[DataFrame] ( identifier[buy] . identifier[copy] (). identifier[values] [:- identifier[sell_delay] ], identifier[index] = identifier[buy] . identifier[index] [ identifier[sell_delay] :], identifier[columns] = identifier[buy] . identifier[columns] )
identifier[sell] = identifier[pd] . identifier[concat] ([ literal[int] * identifier[buy] . identifier[iloc] [: identifier[sell_delay] ], identifier[sell] ])
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[sell_delay] ):
identifier[sell] . identifier[iloc] [- literal[int] ]-= identifier[buy] . identifier[iloc] [- identifier[sell_delay] + identifier[i] ]
identifier[orders] = identifier[buy] + identifier[sell]
keyword[return] identifier[orders] | def orders_from_events(events, sell_delay=5, num_shares=100):
"""Create a DataFrame of orders (signed share quantities) based on event triggers (T/F or 0/1 matrix)
Arguments:
events (DataFrame): mask table to indicate occurrence of buy event (1 = buy, NaN/0/False = do nothing)
sell_delay (int): number of days after the buy order to initiate a sell order of those shares
num_shares (int): number of shares to buy and sell at each event
Returns:
DataFrame: Signed integer numbers of shares to buy (+) or sell (-)
columns are stock ticker symbols
index is datetime at end of trading day (16:00 in NY)
"""
buy = events.copy() * num_shares
sell = -1 * pd.DataFrame(buy.copy().values[:-sell_delay], index=buy.index[sell_delay:], columns=buy.columns)
sell = pd.concat([0 * buy.iloc[:sell_delay], sell])
for i in range(sell_delay):
sell.iloc[-1] -= buy.iloc[-sell_delay + i] # depends on [control=['for'], data=['i']]
orders = buy + sell
return orders |
def create(dataset, target, model_name, features=None,
validation_set='auto', distributed='auto',
verbose=True, seed=None, **kwargs):
"""
Create a :class:`~turicreate.toolkits.SupervisedLearningModel`,
This is generic function that allows you to create any model that
implements SupervisedLearningModel This function is normally not called, call
specific model's create function instead
Parameters
----------
dataset : SFrame
Dataset for training the model.
target : string
Name of the column containing the target variable. The values in this
column must be 0 or 1, of integer type.
model_name : string
Name of the model
features : list[string], optional
List of feature names used by feature column
validation_set : SFrame, optional
A dataset for monitoring the model's generalization performance.
For each row of the progress table, the chosen metrics are computed
for both the provided training dataset and the validation_set. The
format of this SFrame must be the same as the training set.
By default this argument is set to 'auto' and a validation set is
automatically sampled and used for progress printing. If
validation_set is set to None, then no additional metrics
are computed. The default value is 'auto'.
distributed: env
The distributed environment
verbose : boolean
whether print out messages during training
seed : int, optional
Seed for random number generation. Set this value to ensure that the
same model is created every time.
kwargs : dict
Additional parameter options that can be passed
"""
# Perform error-checking and trim inputs to specified columns
dataset, validation_set = _validate_data(dataset, target, features,
validation_set)
# Sample a validation set from the training data if requested
if isinstance(validation_set, str):
assert validation_set == 'auto'
if dataset.num_rows() >= 100:
if verbose:
print_validation_track_notification()
dataset, validation_set = dataset.random_split(.95, seed=seed, exact=True)
else:
validation_set = _turicreate.SFrame()
elif validation_set is None:
validation_set = _turicreate.SFrame()
# Sanitize model-specific options
options = {k.lower(): kwargs[k] for k in kwargs}
# Create a model instance and train it
model = _turicreate.extensions.__dict__[model_name]()
with QuietProgress(verbose):
model.train(dataset, target, validation_set, options)
return SupervisedLearningModel(model, model_name) | def function[create, parameter[dataset, target, model_name, features, validation_set, distributed, verbose, seed]]:
constant[
Create a :class:`~turicreate.toolkits.SupervisedLearningModel`,
This is generic function that allows you to create any model that
implements SupervisedLearningModel This function is normally not called, call
specific model's create function instead
Parameters
----------
dataset : SFrame
Dataset for training the model.
target : string
Name of the column containing the target variable. The values in this
column must be 0 or 1, of integer type.
model_name : string
Name of the model
features : list[string], optional
List of feature names used by feature column
validation_set : SFrame, optional
A dataset for monitoring the model's generalization performance.
For each row of the progress table, the chosen metrics are computed
for both the provided training dataset and the validation_set. The
format of this SFrame must be the same as the training set.
By default this argument is set to 'auto' and a validation set is
automatically sampled and used for progress printing. If
validation_set is set to None, then no additional metrics
are computed. The default value is 'auto'.
distributed: env
The distributed environment
verbose : boolean
whether print out messages during training
seed : int, optional
Seed for random number generation. Set this value to ensure that the
same model is created every time.
kwargs : dict
Additional parameter options that can be passed
]
<ast.Tuple object at 0x7da1b1ef16f0> assign[=] call[name[_validate_data], parameter[name[dataset], name[target], name[features], name[validation_set]]]
if call[name[isinstance], parameter[name[validation_set], name[str]]] begin[:]
assert[compare[name[validation_set] equal[==] constant[auto]]]
if compare[call[name[dataset].num_rows, parameter[]] greater_or_equal[>=] constant[100]] begin[:]
if name[verbose] begin[:]
call[name[print_validation_track_notification], parameter[]]
<ast.Tuple object at 0x7da1b1ef2800> assign[=] call[name[dataset].random_split, parameter[constant[0.95]]]
variable[options] assign[=] <ast.DictComp object at 0x7da1b1ef1300>
variable[model] assign[=] call[call[name[_turicreate].extensions.__dict__][name[model_name]], parameter[]]
with call[name[QuietProgress], parameter[name[verbose]]] begin[:]
call[name[model].train, parameter[name[dataset], name[target], name[validation_set], name[options]]]
return[call[name[SupervisedLearningModel], parameter[name[model], name[model_name]]]] | keyword[def] identifier[create] ( identifier[dataset] , identifier[target] , identifier[model_name] , identifier[features] = keyword[None] ,
identifier[validation_set] = literal[string] , identifier[distributed] = literal[string] ,
identifier[verbose] = keyword[True] , identifier[seed] = keyword[None] ,** identifier[kwargs] ):
literal[string]
identifier[dataset] , identifier[validation_set] = identifier[_validate_data] ( identifier[dataset] , identifier[target] , identifier[features] ,
identifier[validation_set] )
keyword[if] identifier[isinstance] ( identifier[validation_set] , identifier[str] ):
keyword[assert] identifier[validation_set] == literal[string]
keyword[if] identifier[dataset] . identifier[num_rows] ()>= literal[int] :
keyword[if] identifier[verbose] :
identifier[print_validation_track_notification] ()
identifier[dataset] , identifier[validation_set] = identifier[dataset] . identifier[random_split] ( literal[int] , identifier[seed] = identifier[seed] , identifier[exact] = keyword[True] )
keyword[else] :
identifier[validation_set] = identifier[_turicreate] . identifier[SFrame] ()
keyword[elif] identifier[validation_set] keyword[is] keyword[None] :
identifier[validation_set] = identifier[_turicreate] . identifier[SFrame] ()
identifier[options] ={ identifier[k] . identifier[lower] (): identifier[kwargs] [ identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[kwargs] }
identifier[model] = identifier[_turicreate] . identifier[extensions] . identifier[__dict__] [ identifier[model_name] ]()
keyword[with] identifier[QuietProgress] ( identifier[verbose] ):
identifier[model] . identifier[train] ( identifier[dataset] , identifier[target] , identifier[validation_set] , identifier[options] )
keyword[return] identifier[SupervisedLearningModel] ( identifier[model] , identifier[model_name] ) | def create(dataset, target, model_name, features=None, validation_set='auto', distributed='auto', verbose=True, seed=None, **kwargs):
"""
Create a :class:`~turicreate.toolkits.SupervisedLearningModel`,
This is generic function that allows you to create any model that
implements SupervisedLearningModel This function is normally not called, call
specific model's create function instead
Parameters
----------
dataset : SFrame
Dataset for training the model.
target : string
Name of the column containing the target variable. The values in this
column must be 0 or 1, of integer type.
model_name : string
Name of the model
features : list[string], optional
List of feature names used by feature column
validation_set : SFrame, optional
A dataset for monitoring the model's generalization performance.
For each row of the progress table, the chosen metrics are computed
for both the provided training dataset and the validation_set. The
format of this SFrame must be the same as the training set.
By default this argument is set to 'auto' and a validation set is
automatically sampled and used for progress printing. If
validation_set is set to None, then no additional metrics
are computed. The default value is 'auto'.
distributed: env
The distributed environment
verbose : boolean
whether print out messages during training
seed : int, optional
Seed for random number generation. Set this value to ensure that the
same model is created every time.
kwargs : dict
Additional parameter options that can be passed
"""
# Perform error-checking and trim inputs to specified columns
(dataset, validation_set) = _validate_data(dataset, target, features, validation_set)
# Sample a validation set from the training data if requested
if isinstance(validation_set, str):
assert validation_set == 'auto'
if dataset.num_rows() >= 100:
if verbose:
print_validation_track_notification() # depends on [control=['if'], data=[]]
(dataset, validation_set) = dataset.random_split(0.95, seed=seed, exact=True) # depends on [control=['if'], data=[]]
else:
validation_set = _turicreate.SFrame() # depends on [control=['if'], data=[]]
elif validation_set is None:
validation_set = _turicreate.SFrame() # depends on [control=['if'], data=['validation_set']]
# Sanitize model-specific options
options = {k.lower(): kwargs[k] for k in kwargs}
# Create a model instance and train it
model = _turicreate.extensions.__dict__[model_name]()
with QuietProgress(verbose):
model.train(dataset, target, validation_set, options) # depends on [control=['with'], data=[]]
return SupervisedLearningModel(model, model_name) |
def call(self, *args, **kwargs):
"""Calls a redis command and returns a Future of the reply.
Args:
*args: full redis command as variable length argument list or
a Pipeline object (as a single argument).
**kwargs: internal private options (do not use).
Returns:
a Future with the decoded redis reply as result (when available) or
a ConnectionError object in case of connection error.
Raises:
ClientError: your Pipeline object is empty.
Examples:
>>> @tornado.gen.coroutine
def foobar():
client = Client()
result = yield client.call("HSET", "key", "field", "val")
"""
if not self.is_connected():
if self.autoconnect:
# We use this method only when we are not contected
# to void performance penaly due to gen.coroutine decorator
return self._call_with_autoconnect(*args, **kwargs)
else:
error = ConnectionError("you are not connected and "
"autoconnect=False")
return tornado.gen.maybe_future(error)
return self._call(*args, **kwargs) | def function[call, parameter[self]]:
constant[Calls a redis command and returns a Future of the reply.
Args:
*args: full redis command as variable length argument list or
a Pipeline object (as a single argument).
**kwargs: internal private options (do not use).
Returns:
a Future with the decoded redis reply as result (when available) or
a ConnectionError object in case of connection error.
Raises:
ClientError: your Pipeline object is empty.
Examples:
>>> @tornado.gen.coroutine
def foobar():
client = Client()
result = yield client.call("HSET", "key", "field", "val")
]
if <ast.UnaryOp object at 0x7da2041dabc0> begin[:]
if name[self].autoconnect begin[:]
return[call[name[self]._call_with_autoconnect, parameter[<ast.Starred object at 0x7da2041daaa0>]]]
return[call[name[self]._call, parameter[<ast.Starred object at 0x7da2041dada0>]]] | keyword[def] identifier[call] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[is_connected] ():
keyword[if] identifier[self] . identifier[autoconnect] :
keyword[return] identifier[self] . identifier[_call_with_autoconnect] (* identifier[args] ,** identifier[kwargs] )
keyword[else] :
identifier[error] = identifier[ConnectionError] ( literal[string]
literal[string] )
keyword[return] identifier[tornado] . identifier[gen] . identifier[maybe_future] ( identifier[error] )
keyword[return] identifier[self] . identifier[_call] (* identifier[args] ,** identifier[kwargs] ) | def call(self, *args, **kwargs):
"""Calls a redis command and returns a Future of the reply.
Args:
*args: full redis command as variable length argument list or
a Pipeline object (as a single argument).
**kwargs: internal private options (do not use).
Returns:
a Future with the decoded redis reply as result (when available) or
a ConnectionError object in case of connection error.
Raises:
ClientError: your Pipeline object is empty.
Examples:
>>> @tornado.gen.coroutine
def foobar():
client = Client()
result = yield client.call("HSET", "key", "field", "val")
"""
if not self.is_connected():
if self.autoconnect:
# We use this method only when we are not contected
# to void performance penaly due to gen.coroutine decorator
return self._call_with_autoconnect(*args, **kwargs) # depends on [control=['if'], data=[]]
else:
error = ConnectionError('you are not connected and autoconnect=False')
return tornado.gen.maybe_future(error) # depends on [control=['if'], data=[]]
return self._call(*args, **kwargs) |
def ion_or_solid_comp_object(formula):
"""
Returns either an ion object or composition object given
a formula.
Args:
formula: String formula. Eg. of ion: NaOH(aq), Na[+];
Eg. of solid: Fe2O3(s), Fe(s), Na2O
Returns:
Composition/Ion object
"""
m = re.search(r"\[([^\[\]]+)\]|\(aq\)", formula)
if m:
comp_obj = Ion.from_formula(formula)
elif re.search(r"\(s\)", formula):
comp_obj = Composition(formula[:-3])
else:
comp_obj = Composition(formula)
return comp_obj | def function[ion_or_solid_comp_object, parameter[formula]]:
constant[
Returns either an ion object or composition object given
a formula.
Args:
formula: String formula. Eg. of ion: NaOH(aq), Na[+];
Eg. of solid: Fe2O3(s), Fe(s), Na2O
Returns:
Composition/Ion object
]
variable[m] assign[=] call[name[re].search, parameter[constant[\[([^\[\]]+)\]|\(aq\)], name[formula]]]
if name[m] begin[:]
variable[comp_obj] assign[=] call[name[Ion].from_formula, parameter[name[formula]]]
return[name[comp_obj]] | keyword[def] identifier[ion_or_solid_comp_object] ( identifier[formula] ):
literal[string]
identifier[m] = identifier[re] . identifier[search] ( literal[string] , identifier[formula] )
keyword[if] identifier[m] :
identifier[comp_obj] = identifier[Ion] . identifier[from_formula] ( identifier[formula] )
keyword[elif] identifier[re] . identifier[search] ( literal[string] , identifier[formula] ):
identifier[comp_obj] = identifier[Composition] ( identifier[formula] [:- literal[int] ])
keyword[else] :
identifier[comp_obj] = identifier[Composition] ( identifier[formula] )
keyword[return] identifier[comp_obj] | def ion_or_solid_comp_object(formula):
"""
Returns either an ion object or composition object given
a formula.
Args:
formula: String formula. Eg. of ion: NaOH(aq), Na[+];
Eg. of solid: Fe2O3(s), Fe(s), Na2O
Returns:
Composition/Ion object
"""
m = re.search('\\[([^\\[\\]]+)\\]|\\(aq\\)', formula)
if m:
comp_obj = Ion.from_formula(formula) # depends on [control=['if'], data=[]]
elif re.search('\\(s\\)', formula):
comp_obj = Composition(formula[:-3]) # depends on [control=['if'], data=[]]
else:
comp_obj = Composition(formula)
return comp_obj |
def load_yaml_file(i):
"""
Input: {
yaml_file - name of YAML file
}
Output: {
return - return code = 0, if successful
= 16, if file not found (may be warning)
> 0, if error
(error) - error text if return > 0
dict - dict from YAML file
}
"""
import yaml
fn=i['yaml_file']
try:
if sys.version_info[0]>2:
f=open(fn, 'r', encoding='utf8')
else:
f=open(fn, 'r')
except Exception as e:
return {'return':16, 'error':'problem opening YAML file='+fn+' ('+format(e)+')'}
try:
s=f.read()
except Exception as e:
f.close()
return {'return':1, 'error':'problem reading YAML file='+fn+' ('+format(e)+')'}
f.close()
try:
d=yaml.load(s)
except Exception as e:
return {'return':1, 'error':'problem parsing YAML from file='+fn+' ('+format(e)+')'}
return {'return':0, 'dict': d} | def function[load_yaml_file, parameter[i]]:
constant[
Input: {
yaml_file - name of YAML file
}
Output: {
return - return code = 0, if successful
= 16, if file not found (may be warning)
> 0, if error
(error) - error text if return > 0
dict - dict from YAML file
}
]
import module[yaml]
variable[fn] assign[=] call[name[i]][constant[yaml_file]]
<ast.Try object at 0x7da1b23ed900>
<ast.Try object at 0x7da1b23ef700>
call[name[f].close, parameter[]]
<ast.Try object at 0x7da1b222bd60>
return[dictionary[[<ast.Constant object at 0x7da1b2283eb0>, <ast.Constant object at 0x7da1b2282260>], [<ast.Constant object at 0x7da1b2280fd0>, <ast.Name object at 0x7da1b2282b60>]]] | keyword[def] identifier[load_yaml_file] ( identifier[i] ):
literal[string]
keyword[import] identifier[yaml]
identifier[fn] = identifier[i] [ literal[string] ]
keyword[try] :
keyword[if] identifier[sys] . identifier[version_info] [ literal[int] ]> literal[int] :
identifier[f] = identifier[open] ( identifier[fn] , literal[string] , identifier[encoding] = literal[string] )
keyword[else] :
identifier[f] = identifier[open] ( identifier[fn] , literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[return] { literal[string] : literal[int] , literal[string] : literal[string] + identifier[fn] + literal[string] + identifier[format] ( identifier[e] )+ literal[string] }
keyword[try] :
identifier[s] = identifier[f] . identifier[read] ()
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[f] . identifier[close] ()
keyword[return] { literal[string] : literal[int] , literal[string] : literal[string] + identifier[fn] + literal[string] + identifier[format] ( identifier[e] )+ literal[string] }
identifier[f] . identifier[close] ()
keyword[try] :
identifier[d] = identifier[yaml] . identifier[load] ( identifier[s] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[return] { literal[string] : literal[int] , literal[string] : literal[string] + identifier[fn] + literal[string] + identifier[format] ( identifier[e] )+ literal[string] }
keyword[return] { literal[string] : literal[int] , literal[string] : identifier[d] } | def load_yaml_file(i):
"""
Input: {
yaml_file - name of YAML file
}
Output: {
return - return code = 0, if successful
= 16, if file not found (may be warning)
> 0, if error
(error) - error text if return > 0
dict - dict from YAML file
}
"""
import yaml
fn = i['yaml_file']
try:
if sys.version_info[0] > 2:
f = open(fn, 'r', encoding='utf8') # depends on [control=['if'], data=[]]
else:
f = open(fn, 'r') # depends on [control=['try'], data=[]]
except Exception as e:
return {'return': 16, 'error': 'problem opening YAML file=' + fn + ' (' + format(e) + ')'} # depends on [control=['except'], data=['e']]
try:
s = f.read() # depends on [control=['try'], data=[]]
except Exception as e:
f.close()
return {'return': 1, 'error': 'problem reading YAML file=' + fn + ' (' + format(e) + ')'} # depends on [control=['except'], data=['e']]
f.close()
try:
d = yaml.load(s) # depends on [control=['try'], data=[]]
except Exception as e:
return {'return': 1, 'error': 'problem parsing YAML from file=' + fn + ' (' + format(e) + ')'} # depends on [control=['except'], data=['e']]
return {'return': 0, 'dict': d} |
def delete_namespaced_job(self, name, namespace, **kwargs): # noqa: E501
"""delete_namespaced_job # noqa: E501
delete a Job # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_job(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Job (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the \"orphan\" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:param V1DeleteOptions body:
:return: V1Status
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_namespaced_job_with_http_info(name, namespace, **kwargs) # noqa: E501
else:
(data) = self.delete_namespaced_job_with_http_info(name, namespace, **kwargs) # noqa: E501
return data | def function[delete_namespaced_job, parameter[self, name, namespace]]:
constant[delete_namespaced_job # noqa: E501
delete a Job # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_namespaced_job(name, namespace, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: name of the Job (required)
:param str namespace: object name and auth scope, such as for teams and projects (required)
:param str pretty: If 'true', then the output is pretty printed.
:param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed
:param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.
:param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the "orphan" finalizer will be added to/removed from the object's finalizers list. Either this field or PropagationPolicy may be set, but not both.
:param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: 'Orphan' - orphan the dependents; 'Background' - allow the garbage collector to delete the dependents in the background; 'Foreground' - a cascading policy that deletes all dependents in the foreground.
:param V1DeleteOptions body:
:return: V1Status
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async_req]]] begin[:]
return[call[name[self].delete_namespaced_job_with_http_info, parameter[name[name], name[namespace]]]] | keyword[def] identifier[delete_namespaced_job] ( identifier[self] , identifier[name] , identifier[namespace] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[delete_namespaced_job_with_http_info] ( identifier[name] , identifier[namespace] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[self] . identifier[delete_namespaced_job_with_http_info] ( identifier[name] , identifier[namespace] ,** identifier[kwargs] )
keyword[return] identifier[data] | def delete_namespaced_job(self, name, namespace, **kwargs): # noqa: E501
'delete_namespaced_job # noqa: E501\n\n delete a Job # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.delete_namespaced_job(name, namespace, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the Job (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param str pretty: If \'true\', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :param int grace_period_seconds: The duration in seconds before the object should be deleted. Value must be non-negative integer. The value zero indicates delete immediately. If this value is nil, the default grace period for the specified type will be used. Defaults to a per object value if not specified. zero means delete immediately.\n :param bool orphan_dependents: Deprecated: please use the PropagationPolicy, this field will be deprecated in 1.7. Should the dependent objects be orphaned. If true/false, the "orphan" finalizer will be added to/removed from the object\'s finalizers list. Either this field or PropagationPolicy may be set, but not both.\n :param str propagation_policy: Whether and how garbage collection will be performed. Either this field or OrphanDependents may be set, but not both. The default policy is decided by the existing finalizer set in the metadata.finalizers and the resource-specific default policy. Acceptable values are: \'Orphan\' - orphan the dependents; \'Background\' - allow the garbage collector to delete the dependents in the background; \'Foreground\' - a cascading policy that deletes all dependents in the foreground.\n :param V1DeleteOptions body:\n :return: V1Status\n If the method is called asynchronously,\n returns the request thread.\n '
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_namespaced_job_with_http_info(name, namespace, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]]
else:
data = self.delete_namespaced_job_with_http_info(name, namespace, **kwargs) # noqa: E501
return data |
def _attachChild(self, child):
"attach a child database, returning an identifier for it"
self._childCounter += 1
databaseName = 'child_db_%d' % (self._childCounter,)
self._attachedChildren[databaseName] = child
# ATTACH DATABASE statements can't use bind paramaters, blech.
self.executeSQL("ATTACH DATABASE '%s' AS %s" % (
child.dbdir.child('db.sqlite').path,
databaseName,))
return databaseName | def function[_attachChild, parameter[self, child]]:
constant[attach a child database, returning an identifier for it]
<ast.AugAssign object at 0x7da1b0d5b3d0>
variable[databaseName] assign[=] binary_operation[constant[child_db_%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0d59570>]]]
call[name[self]._attachedChildren][name[databaseName]] assign[=] name[child]
call[name[self].executeSQL, parameter[binary_operation[constant[ATTACH DATABASE '%s' AS %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b0d5a4d0>, <ast.Name object at 0x7da1b0d596f0>]]]]]
return[name[databaseName]] | keyword[def] identifier[_attachChild] ( identifier[self] , identifier[child] ):
literal[string]
identifier[self] . identifier[_childCounter] += literal[int]
identifier[databaseName] = literal[string] %( identifier[self] . identifier[_childCounter] ,)
identifier[self] . identifier[_attachedChildren] [ identifier[databaseName] ]= identifier[child]
identifier[self] . identifier[executeSQL] ( literal[string] %(
identifier[child] . identifier[dbdir] . identifier[child] ( literal[string] ). identifier[path] ,
identifier[databaseName] ,))
keyword[return] identifier[databaseName] | def _attachChild(self, child):
"""attach a child database, returning an identifier for it"""
self._childCounter += 1
databaseName = 'child_db_%d' % (self._childCounter,)
self._attachedChildren[databaseName] = child
# ATTACH DATABASE statements can't use bind paramaters, blech.
self.executeSQL("ATTACH DATABASE '%s' AS %s" % (child.dbdir.child('db.sqlite').path, databaseName))
return databaseName |
def modification_time(self):
"""dfdatetime.Filetime: modification time."""
timestamp = self._fsntfs_attribute.get_modification_time_as_integer()
return dfdatetime_filetime.Filetime(timestamp=timestamp) | def function[modification_time, parameter[self]]:
constant[dfdatetime.Filetime: modification time.]
variable[timestamp] assign[=] call[name[self]._fsntfs_attribute.get_modification_time_as_integer, parameter[]]
return[call[name[dfdatetime_filetime].Filetime, parameter[]]] | keyword[def] identifier[modification_time] ( identifier[self] ):
literal[string]
identifier[timestamp] = identifier[self] . identifier[_fsntfs_attribute] . identifier[get_modification_time_as_integer] ()
keyword[return] identifier[dfdatetime_filetime] . identifier[Filetime] ( identifier[timestamp] = identifier[timestamp] ) | def modification_time(self):
"""dfdatetime.Filetime: modification time."""
timestamp = self._fsntfs_attribute.get_modification_time_as_integer()
return dfdatetime_filetime.Filetime(timestamp=timestamp) |
def run_agent(device_type):
"""Run a simple GPG-agent server."""
p = argparse.ArgumentParser()
p.add_argument('--homedir', default=os.environ.get('GNUPGHOME'))
p.add_argument('-v', '--verbose', default=0, action='count')
p.add_argument('--server', default=False, action='store_true',
help='Use stdin/stdout for communication with GPG.')
p.add_argument('--pin-entry-binary', type=str, default='pinentry',
help='Path to PIN entry UI helper.')
p.add_argument('--passphrase-entry-binary', type=str, default='pinentry',
help='Path to passphrase entry UI helper.')
p.add_argument('--cache-expiry-seconds', type=float, default=float('inf'),
help='Expire passphrase from cache after this duration.')
args, _ = p.parse_known_args()
assert args.homedir
log_file = os.path.join(args.homedir, 'gpg-agent.log')
util.setup_logging(verbosity=args.verbose, filename=log_file)
log.debug('sys.argv: %s', sys.argv)
log.debug('os.environ: %s', os.environ)
log.debug('pid: %d, parent pid: %d', os.getpid(), os.getppid())
try:
env = {'GNUPGHOME': args.homedir, 'PATH': os.environ['PATH']}
pubkey_bytes = keyring.export_public_keys(env=env)
device_type.ui = device.ui.UI(device_type=device_type,
config=vars(args))
device_type.ui.cached_passphrase_ack = util.ExpiringCache(
seconds=float(args.cache_expiry_seconds))
handler = agent.Handler(device=device_type(),
pubkey_bytes=pubkey_bytes)
sock_server = _server_from_assuan_fd(os.environ)
if sock_server is None:
sock_server = _server_from_sock_path(env)
with sock_server as sock:
for conn in agent.yield_connections(sock):
with contextlib.closing(conn):
try:
handler.handle(conn)
except agent.AgentStop:
log.info('stopping gpg-agent')
return
except IOError as e:
log.info('connection closed: %s', e)
return
except Exception as e: # pylint: disable=broad-except
log.exception('handler failed: %s', e)
except Exception as e: # pylint: disable=broad-except
log.exception('gpg-agent failed: %s', e) | def function[run_agent, parameter[device_type]]:
constant[Run a simple GPG-agent server.]
variable[p] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[p].add_argument, parameter[constant[--homedir]]]
call[name[p].add_argument, parameter[constant[-v], constant[--verbose]]]
call[name[p].add_argument, parameter[constant[--server]]]
call[name[p].add_argument, parameter[constant[--pin-entry-binary]]]
call[name[p].add_argument, parameter[constant[--passphrase-entry-binary]]]
call[name[p].add_argument, parameter[constant[--cache-expiry-seconds]]]
<ast.Tuple object at 0x7da1b1296c20> assign[=] call[name[p].parse_known_args, parameter[]]
assert[name[args].homedir]
variable[log_file] assign[=] call[name[os].path.join, parameter[name[args].homedir, constant[gpg-agent.log]]]
call[name[util].setup_logging, parameter[]]
call[name[log].debug, parameter[constant[sys.argv: %s], name[sys].argv]]
call[name[log].debug, parameter[constant[os.environ: %s], name[os].environ]]
call[name[log].debug, parameter[constant[pid: %d, parent pid: %d], call[name[os].getpid, parameter[]], call[name[os].getppid, parameter[]]]]
<ast.Try object at 0x7da1b12a8ac0> | keyword[def] identifier[run_agent] ( identifier[device_type] ):
literal[string]
identifier[p] = identifier[argparse] . identifier[ArgumentParser] ()
identifier[p] . identifier[add_argument] ( literal[string] , identifier[default] = identifier[os] . identifier[environ] . identifier[get] ( literal[string] ))
identifier[p] . identifier[add_argument] ( literal[string] , literal[string] , identifier[default] = literal[int] , identifier[action] = literal[string] )
identifier[p] . identifier[add_argument] ( literal[string] , identifier[default] = keyword[False] , identifier[action] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[default] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[default] = literal[string] ,
identifier[help] = literal[string] )
identifier[p] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[float] , identifier[default] = identifier[float] ( literal[string] ),
identifier[help] = literal[string] )
identifier[args] , identifier[_] = identifier[p] . identifier[parse_known_args] ()
keyword[assert] identifier[args] . identifier[homedir]
identifier[log_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[args] . identifier[homedir] , literal[string] )
identifier[util] . identifier[setup_logging] ( identifier[verbosity] = identifier[args] . identifier[verbose] , identifier[filename] = identifier[log_file] )
identifier[log] . identifier[debug] ( literal[string] , identifier[sys] . identifier[argv] )
identifier[log] . identifier[debug] ( literal[string] , identifier[os] . identifier[environ] )
identifier[log] . identifier[debug] ( literal[string] , identifier[os] . identifier[getpid] (), identifier[os] . identifier[getppid] ())
keyword[try] :
identifier[env] ={ literal[string] : identifier[args] . identifier[homedir] , literal[string] : identifier[os] . identifier[environ] [ literal[string] ]}
identifier[pubkey_bytes] = identifier[keyring] . identifier[export_public_keys] ( identifier[env] = identifier[env] )
identifier[device_type] . identifier[ui] = identifier[device] . identifier[ui] . identifier[UI] ( identifier[device_type] = identifier[device_type] ,
identifier[config] = identifier[vars] ( identifier[args] ))
identifier[device_type] . identifier[ui] . identifier[cached_passphrase_ack] = identifier[util] . identifier[ExpiringCache] (
identifier[seconds] = identifier[float] ( identifier[args] . identifier[cache_expiry_seconds] ))
identifier[handler] = identifier[agent] . identifier[Handler] ( identifier[device] = identifier[device_type] (),
identifier[pubkey_bytes] = identifier[pubkey_bytes] )
identifier[sock_server] = identifier[_server_from_assuan_fd] ( identifier[os] . identifier[environ] )
keyword[if] identifier[sock_server] keyword[is] keyword[None] :
identifier[sock_server] = identifier[_server_from_sock_path] ( identifier[env] )
keyword[with] identifier[sock_server] keyword[as] identifier[sock] :
keyword[for] identifier[conn] keyword[in] identifier[agent] . identifier[yield_connections] ( identifier[sock] ):
keyword[with] identifier[contextlib] . identifier[closing] ( identifier[conn] ):
keyword[try] :
identifier[handler] . identifier[handle] ( identifier[conn] )
keyword[except] identifier[agent] . identifier[AgentStop] :
identifier[log] . identifier[info] ( literal[string] )
keyword[return]
keyword[except] identifier[IOError] keyword[as] identifier[e] :
identifier[log] . identifier[info] ( literal[string] , identifier[e] )
keyword[return]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[log] . identifier[exception] ( literal[string] , identifier[e] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[log] . identifier[exception] ( literal[string] , identifier[e] ) | def run_agent(device_type):
"""Run a simple GPG-agent server."""
p = argparse.ArgumentParser()
p.add_argument('--homedir', default=os.environ.get('GNUPGHOME'))
p.add_argument('-v', '--verbose', default=0, action='count')
p.add_argument('--server', default=False, action='store_true', help='Use stdin/stdout for communication with GPG.')
p.add_argument('--pin-entry-binary', type=str, default='pinentry', help='Path to PIN entry UI helper.')
p.add_argument('--passphrase-entry-binary', type=str, default='pinentry', help='Path to passphrase entry UI helper.')
p.add_argument('--cache-expiry-seconds', type=float, default=float('inf'), help='Expire passphrase from cache after this duration.')
(args, _) = p.parse_known_args()
assert args.homedir
log_file = os.path.join(args.homedir, 'gpg-agent.log')
util.setup_logging(verbosity=args.verbose, filename=log_file)
log.debug('sys.argv: %s', sys.argv)
log.debug('os.environ: %s', os.environ)
log.debug('pid: %d, parent pid: %d', os.getpid(), os.getppid())
try:
env = {'GNUPGHOME': args.homedir, 'PATH': os.environ['PATH']}
pubkey_bytes = keyring.export_public_keys(env=env)
device_type.ui = device.ui.UI(device_type=device_type, config=vars(args))
device_type.ui.cached_passphrase_ack = util.ExpiringCache(seconds=float(args.cache_expiry_seconds))
handler = agent.Handler(device=device_type(), pubkey_bytes=pubkey_bytes)
sock_server = _server_from_assuan_fd(os.environ)
if sock_server is None:
sock_server = _server_from_sock_path(env) # depends on [control=['if'], data=['sock_server']]
with sock_server as sock:
for conn in agent.yield_connections(sock):
with contextlib.closing(conn):
try:
handler.handle(conn) # depends on [control=['try'], data=[]]
except agent.AgentStop:
log.info('stopping gpg-agent')
return # depends on [control=['except'], data=[]]
except IOError as e:
log.info('connection closed: %s', e)
return # depends on [control=['except'], data=['e']]
except Exception as e: # pylint: disable=broad-except
log.exception('handler failed: %s', e) # depends on [control=['except'], data=['e']] # depends on [control=['with'], data=[]] # depends on [control=['for'], data=['conn']] # depends on [control=['with'], data=['sock']] # depends on [control=['try'], data=[]]
except Exception as e: # pylint: disable=broad-except
log.exception('gpg-agent failed: %s', e) # depends on [control=['except'], data=['e']] |
def build_url(server_context, controller, action, container_path=None):
# type: (ServerContext, str, str, str) -> str
"""
Builds a URL from a controller and an action. Users the server context to determine domain,
context path, container, etc.
:param server_context: A LabKey server context. See utils.create_server_context.
:param controller: The controller to use in building the URL
:param action: The action to use in building the URL
:param container_path:
:return:
"""
return server_context.build_url(controller, action, container_path=container_path) | def function[build_url, parameter[server_context, controller, action, container_path]]:
constant[
Builds a URL from a controller and an action. Users the server context to determine domain,
context path, container, etc.
:param server_context: A LabKey server context. See utils.create_server_context.
:param controller: The controller to use in building the URL
:param action: The action to use in building the URL
:param container_path:
:return:
]
return[call[name[server_context].build_url, parameter[name[controller], name[action]]]] | keyword[def] identifier[build_url] ( identifier[server_context] , identifier[controller] , identifier[action] , identifier[container_path] = keyword[None] ):
literal[string]
keyword[return] identifier[server_context] . identifier[build_url] ( identifier[controller] , identifier[action] , identifier[container_path] = identifier[container_path] ) | def build_url(server_context, controller, action, container_path=None):
# type: (ServerContext, str, str, str) -> str
'\n Builds a URL from a controller and an action. Users the server context to determine domain,\n context path, container, etc.\n :param server_context: A LabKey server context. See utils.create_server_context.\n :param controller: The controller to use in building the URL\n :param action: The action to use in building the URL\n :param container_path:\n :return:\n '
return server_context.build_url(controller, action, container_path=container_path) |
def default_output_format(content_type='application/json', apply_globally=False, api=None, cli=False, http=True):
"""A decorator that allows you to override the default output format for an API"""
def decorator(formatter):
formatter = hug.output_format.content_type(content_type)(formatter)
if apply_globally:
if http:
hug.defaults.output_format = formatter
if cli:
hug.defaults.cli_output_format = formatter
else:
apply_to_api = hug.API(api) if api else hug.api.from_object(formatter)
if http:
apply_to_api.http.output_format = formatter
if cli:
apply_to_api.cli.output_format = formatter
return formatter
return decorator | def function[default_output_format, parameter[content_type, apply_globally, api, cli, http]]:
constant[A decorator that allows you to override the default output format for an API]
def function[decorator, parameter[formatter]]:
variable[formatter] assign[=] call[call[name[hug].output_format.content_type, parameter[name[content_type]]], parameter[name[formatter]]]
if name[apply_globally] begin[:]
if name[http] begin[:]
name[hug].defaults.output_format assign[=] name[formatter]
if name[cli] begin[:]
name[hug].defaults.cli_output_format assign[=] name[formatter]
return[name[formatter]]
return[name[decorator]] | keyword[def] identifier[default_output_format] ( identifier[content_type] = literal[string] , identifier[apply_globally] = keyword[False] , identifier[api] = keyword[None] , identifier[cli] = keyword[False] , identifier[http] = keyword[True] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[formatter] ):
identifier[formatter] = identifier[hug] . identifier[output_format] . identifier[content_type] ( identifier[content_type] )( identifier[formatter] )
keyword[if] identifier[apply_globally] :
keyword[if] identifier[http] :
identifier[hug] . identifier[defaults] . identifier[output_format] = identifier[formatter]
keyword[if] identifier[cli] :
identifier[hug] . identifier[defaults] . identifier[cli_output_format] = identifier[formatter]
keyword[else] :
identifier[apply_to_api] = identifier[hug] . identifier[API] ( identifier[api] ) keyword[if] identifier[api] keyword[else] identifier[hug] . identifier[api] . identifier[from_object] ( identifier[formatter] )
keyword[if] identifier[http] :
identifier[apply_to_api] . identifier[http] . identifier[output_format] = identifier[formatter]
keyword[if] identifier[cli] :
identifier[apply_to_api] . identifier[cli] . identifier[output_format] = identifier[formatter]
keyword[return] identifier[formatter]
keyword[return] identifier[decorator] | def default_output_format(content_type='application/json', apply_globally=False, api=None, cli=False, http=True):
"""A decorator that allows you to override the default output format for an API"""
def decorator(formatter):
formatter = hug.output_format.content_type(content_type)(formatter)
if apply_globally:
if http:
hug.defaults.output_format = formatter # depends on [control=['if'], data=[]]
if cli:
hug.defaults.cli_output_format = formatter # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
apply_to_api = hug.API(api) if api else hug.api.from_object(formatter)
if http:
apply_to_api.http.output_format = formatter # depends on [control=['if'], data=[]]
if cli:
apply_to_api.cli.output_format = formatter # depends on [control=['if'], data=[]]
return formatter
return decorator |
def get_api_key(client, username, secret):
"""Attempts API-Key and password auth to get an API key.
This will also generate an API key if one doesn't exist
"""
# Try to use a client with username/api key
if len(secret) == 64:
try:
client['Account'].getCurrentUser()
return secret
except SoftLayer.SoftLayerAPIError as ex:
if 'invalid api token' not in ex.faultString.lower():
raise
else:
# Try to use a client with username/password
client.authenticate_with_password(username, secret)
user_record = client['Account'].getCurrentUser(mask='id, apiAuthenticationKeys')
api_keys = user_record['apiAuthenticationKeys']
if len(api_keys) == 0:
return client['User_Customer'].addApiAuthenticationKey(id=user_record['id'])
return api_keys[0]['authenticationKey'] | def function[get_api_key, parameter[client, username, secret]]:
constant[Attempts API-Key and password auth to get an API key.
This will also generate an API key if one doesn't exist
]
if compare[call[name[len], parameter[name[secret]]] equal[==] constant[64]] begin[:]
<ast.Try object at 0x7da18f58fca0> | keyword[def] identifier[get_api_key] ( identifier[client] , identifier[username] , identifier[secret] ):
literal[string]
keyword[if] identifier[len] ( identifier[secret] )== literal[int] :
keyword[try] :
identifier[client] [ literal[string] ]. identifier[getCurrentUser] ()
keyword[return] identifier[secret]
keyword[except] identifier[SoftLayer] . identifier[SoftLayerAPIError] keyword[as] identifier[ex] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[ex] . identifier[faultString] . identifier[lower] ():
keyword[raise]
keyword[else] :
identifier[client] . identifier[authenticate_with_password] ( identifier[username] , identifier[secret] )
identifier[user_record] = identifier[client] [ literal[string] ]. identifier[getCurrentUser] ( identifier[mask] = literal[string] )
identifier[api_keys] = identifier[user_record] [ literal[string] ]
keyword[if] identifier[len] ( identifier[api_keys] )== literal[int] :
keyword[return] identifier[client] [ literal[string] ]. identifier[addApiAuthenticationKey] ( identifier[id] = identifier[user_record] [ literal[string] ])
keyword[return] identifier[api_keys] [ literal[int] ][ literal[string] ] | def get_api_key(client, username, secret):
"""Attempts API-Key and password auth to get an API key.
This will also generate an API key if one doesn't exist
"""
# Try to use a client with username/api key
if len(secret) == 64:
try:
client['Account'].getCurrentUser()
return secret # depends on [control=['try'], data=[]]
except SoftLayer.SoftLayerAPIError as ex:
if 'invalid api token' not in ex.faultString.lower():
raise # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['ex']] # depends on [control=['if'], data=[]]
else:
# Try to use a client with username/password
client.authenticate_with_password(username, secret)
user_record = client['Account'].getCurrentUser(mask='id, apiAuthenticationKeys')
api_keys = user_record['apiAuthenticationKeys']
if len(api_keys) == 0:
return client['User_Customer'].addApiAuthenticationKey(id=user_record['id']) # depends on [control=['if'], data=[]]
return api_keys[0]['authenticationKey'] |
def learn(self, initial_state_key, limit=1000, game_n=1):
'''
Multi-Agent Learning.
Override.
Args:
initial_state_key: Initial state.
limit: Limit of the number of learning.
game_n: The number of games.
'''
end_flag = False
state_key_list = [None] * len(self.q_learning_list)
action_key_list = [None] * len(self.q_learning_list)
next_action_key_list = [None] * len(self.q_learning_list)
for game in range(game_n):
state_key = initial_state_key
self.t = 1
while self.t <= limit:
for i in range(len(self.q_learning_list)):
state_key_list[i] = state_key
if game + 1 == game_n:
self.state_key_list.append(tuple(i, state_key_list))
self.q_learning_list[i].t = self.t
next_action_list = self.q_learning_list[i].extract_possible_actions(tuple(i, state_key_list))
if len(next_action_list):
action_key = self.q_learning_list[i].select_action(
state_key=tuple(i, state_key_list),
next_action_list=next_action_list
)
action_key_list[i] = action_key
reward_value = self.q_learning_list[i].observe_reward_value(
tuple(i, state_key_list),
tuple(i, action_key_list)
)
# Check.
if self.q_learning_list[i].check_the_end_flag(tuple(i, state_key_list)) is True:
end_flag = True
# Max-Q-Value in next action time.
next_next_action_list = self.q_learning_list[i].extract_possible_actions(
tuple(i, action_key_list)
)
if len(next_next_action_list):
next_action_key = self.q_learning_list[i].predict_next_action(
tuple(i, action_key_list),
next_next_action_list
)
next_action_key_list[i] = next_action_key
next_max_q = self.q_learning_list[i].extract_q_df(
tuple(i, action_key_list),
next_action_key
)
# Update Q-Value.
self.q_learning_list[i].update_q(
state_key=tuple(i, state_key_list),
action_key=tuple(i, action_key_list),
reward_value=reward_value,
next_max_q=next_max_q
)
# Update State.
state_key = self.q_learning_list[i].update_state(
state_key=tuple(i, state_key_list),
action_key=tuple(i, action_key_list)
)
state_key_list[i] = state_key
# Epsode.
self.t += 1
self.q_learning_list[i].t = self.t
if end_flag is True:
break | def function[learn, parameter[self, initial_state_key, limit, game_n]]:
constant[
Multi-Agent Learning.
Override.
Args:
initial_state_key: Initial state.
limit: Limit of the number of learning.
game_n: The number of games.
]
variable[end_flag] assign[=] constant[False]
variable[state_key_list] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b07ad630>]] * call[name[len], parameter[name[self].q_learning_list]]]
variable[action_key_list] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b07af460>]] * call[name[len], parameter[name[self].q_learning_list]]]
variable[next_action_key_list] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b07ad360>]] * call[name[len], parameter[name[self].q_learning_list]]]
for taget[name[game]] in starred[call[name[range], parameter[name[game_n]]]] begin[:]
variable[state_key] assign[=] name[initial_state_key]
name[self].t assign[=] constant[1]
while compare[name[self].t less_or_equal[<=] name[limit]] begin[:]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].q_learning_list]]]]] begin[:]
call[name[state_key_list]][name[i]] assign[=] name[state_key]
if compare[binary_operation[name[game] + constant[1]] equal[==] name[game_n]] begin[:]
call[name[self].state_key_list.append, parameter[call[name[tuple], parameter[name[i], name[state_key_list]]]]]
call[name[self].q_learning_list][name[i]].t assign[=] name[self].t
variable[next_action_list] assign[=] call[call[name[self].q_learning_list][name[i]].extract_possible_actions, parameter[call[name[tuple], parameter[name[i], name[state_key_list]]]]]
if call[name[len], parameter[name[next_action_list]]] begin[:]
variable[action_key] assign[=] call[call[name[self].q_learning_list][name[i]].select_action, parameter[]]
call[name[action_key_list]][name[i]] assign[=] name[action_key]
variable[reward_value] assign[=] call[call[name[self].q_learning_list][name[i]].observe_reward_value, parameter[call[name[tuple], parameter[name[i], name[state_key_list]]], call[name[tuple], parameter[name[i], name[action_key_list]]]]]
if compare[call[call[name[self].q_learning_list][name[i]].check_the_end_flag, parameter[call[name[tuple], parameter[name[i], name[state_key_list]]]]] is constant[True]] begin[:]
variable[end_flag] assign[=] constant[True]
variable[next_next_action_list] assign[=] call[call[name[self].q_learning_list][name[i]].extract_possible_actions, parameter[call[name[tuple], parameter[name[i], name[action_key_list]]]]]
if call[name[len], parameter[name[next_next_action_list]]] begin[:]
variable[next_action_key] assign[=] call[call[name[self].q_learning_list][name[i]].predict_next_action, parameter[call[name[tuple], parameter[name[i], name[action_key_list]]], name[next_next_action_list]]]
call[name[next_action_key_list]][name[i]] assign[=] name[next_action_key]
variable[next_max_q] assign[=] call[call[name[self].q_learning_list][name[i]].extract_q_df, parameter[call[name[tuple], parameter[name[i], name[action_key_list]]], name[next_action_key]]]
call[call[name[self].q_learning_list][name[i]].update_q, parameter[]]
variable[state_key] assign[=] call[call[name[self].q_learning_list][name[i]].update_state, parameter[]]
call[name[state_key_list]][name[i]] assign[=] name[state_key]
<ast.AugAssign object at 0x7da204963430>
call[name[self].q_learning_list][name[i]].t assign[=] name[self].t
if compare[name[end_flag] is constant[True]] begin[:]
break | keyword[def] identifier[learn] ( identifier[self] , identifier[initial_state_key] , identifier[limit] = literal[int] , identifier[game_n] = literal[int] ):
literal[string]
identifier[end_flag] = keyword[False]
identifier[state_key_list] =[ keyword[None] ]* identifier[len] ( identifier[self] . identifier[q_learning_list] )
identifier[action_key_list] =[ keyword[None] ]* identifier[len] ( identifier[self] . identifier[q_learning_list] )
identifier[next_action_key_list] =[ keyword[None] ]* identifier[len] ( identifier[self] . identifier[q_learning_list] )
keyword[for] identifier[game] keyword[in] identifier[range] ( identifier[game_n] ):
identifier[state_key] = identifier[initial_state_key]
identifier[self] . identifier[t] = literal[int]
keyword[while] identifier[self] . identifier[t] <= identifier[limit] :
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[q_learning_list] )):
identifier[state_key_list] [ identifier[i] ]= identifier[state_key]
keyword[if] identifier[game] + literal[int] == identifier[game_n] :
identifier[self] . identifier[state_key_list] . identifier[append] ( identifier[tuple] ( identifier[i] , identifier[state_key_list] ))
identifier[self] . identifier[q_learning_list] [ identifier[i] ]. identifier[t] = identifier[self] . identifier[t]
identifier[next_action_list] = identifier[self] . identifier[q_learning_list] [ identifier[i] ]. identifier[extract_possible_actions] ( identifier[tuple] ( identifier[i] , identifier[state_key_list] ))
keyword[if] identifier[len] ( identifier[next_action_list] ):
identifier[action_key] = identifier[self] . identifier[q_learning_list] [ identifier[i] ]. identifier[select_action] (
identifier[state_key] = identifier[tuple] ( identifier[i] , identifier[state_key_list] ),
identifier[next_action_list] = identifier[next_action_list]
)
identifier[action_key_list] [ identifier[i] ]= identifier[action_key]
identifier[reward_value] = identifier[self] . identifier[q_learning_list] [ identifier[i] ]. identifier[observe_reward_value] (
identifier[tuple] ( identifier[i] , identifier[state_key_list] ),
identifier[tuple] ( identifier[i] , identifier[action_key_list] )
)
keyword[if] identifier[self] . identifier[q_learning_list] [ identifier[i] ]. identifier[check_the_end_flag] ( identifier[tuple] ( identifier[i] , identifier[state_key_list] )) keyword[is] keyword[True] :
identifier[end_flag] = keyword[True]
identifier[next_next_action_list] = identifier[self] . identifier[q_learning_list] [ identifier[i] ]. identifier[extract_possible_actions] (
identifier[tuple] ( identifier[i] , identifier[action_key_list] )
)
keyword[if] identifier[len] ( identifier[next_next_action_list] ):
identifier[next_action_key] = identifier[self] . identifier[q_learning_list] [ identifier[i] ]. identifier[predict_next_action] (
identifier[tuple] ( identifier[i] , identifier[action_key_list] ),
identifier[next_next_action_list]
)
identifier[next_action_key_list] [ identifier[i] ]= identifier[next_action_key]
identifier[next_max_q] = identifier[self] . identifier[q_learning_list] [ identifier[i] ]. identifier[extract_q_df] (
identifier[tuple] ( identifier[i] , identifier[action_key_list] ),
identifier[next_action_key]
)
identifier[self] . identifier[q_learning_list] [ identifier[i] ]. identifier[update_q] (
identifier[state_key] = identifier[tuple] ( identifier[i] , identifier[state_key_list] ),
identifier[action_key] = identifier[tuple] ( identifier[i] , identifier[action_key_list] ),
identifier[reward_value] = identifier[reward_value] ,
identifier[next_max_q] = identifier[next_max_q]
)
identifier[state_key] = identifier[self] . identifier[q_learning_list] [ identifier[i] ]. identifier[update_state] (
identifier[state_key] = identifier[tuple] ( identifier[i] , identifier[state_key_list] ),
identifier[action_key] = identifier[tuple] ( identifier[i] , identifier[action_key_list] )
)
identifier[state_key_list] [ identifier[i] ]= identifier[state_key]
identifier[self] . identifier[t] += literal[int]
identifier[self] . identifier[q_learning_list] [ identifier[i] ]. identifier[t] = identifier[self] . identifier[t]
keyword[if] identifier[end_flag] keyword[is] keyword[True] :
keyword[break] | def learn(self, initial_state_key, limit=1000, game_n=1):
"""
Multi-Agent Learning.
Override.
Args:
initial_state_key: Initial state.
limit: Limit of the number of learning.
game_n: The number of games.
"""
end_flag = False
state_key_list = [None] * len(self.q_learning_list)
action_key_list = [None] * len(self.q_learning_list)
next_action_key_list = [None] * len(self.q_learning_list)
for game in range(game_n):
state_key = initial_state_key
self.t = 1
while self.t <= limit:
for i in range(len(self.q_learning_list)):
state_key_list[i] = state_key
if game + 1 == game_n:
self.state_key_list.append(tuple(i, state_key_list)) # depends on [control=['if'], data=[]]
self.q_learning_list[i].t = self.t
next_action_list = self.q_learning_list[i].extract_possible_actions(tuple(i, state_key_list))
if len(next_action_list):
action_key = self.q_learning_list[i].select_action(state_key=tuple(i, state_key_list), next_action_list=next_action_list)
action_key_list[i] = action_key
reward_value = self.q_learning_list[i].observe_reward_value(tuple(i, state_key_list), tuple(i, action_key_list))
# Check.
if self.q_learning_list[i].check_the_end_flag(tuple(i, state_key_list)) is True:
end_flag = True # depends on [control=['if'], data=[]]
# Max-Q-Value in next action time.
next_next_action_list = self.q_learning_list[i].extract_possible_actions(tuple(i, action_key_list))
if len(next_next_action_list):
next_action_key = self.q_learning_list[i].predict_next_action(tuple(i, action_key_list), next_next_action_list)
next_action_key_list[i] = next_action_key
next_max_q = self.q_learning_list[i].extract_q_df(tuple(i, action_key_list), next_action_key)
# Update Q-Value.
self.q_learning_list[i].update_q(state_key=tuple(i, state_key_list), action_key=tuple(i, action_key_list), reward_value=reward_value, next_max_q=next_max_q)
# Update State.
state_key = self.q_learning_list[i].update_state(state_key=tuple(i, state_key_list), action_key=tuple(i, action_key_list))
state_key_list[i] = state_key # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Epsode.
self.t += 1
self.q_learning_list[i].t = self.t
if end_flag is True:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['while'], data=[]] # depends on [control=['for'], data=['game']] |
def ida_spawn(ida_binary, filename, port=18861, mode='oneshot',
processor_type=None, logfile=None):
"""
Open IDA on the the file we want to analyse.
:param ida_binary: The binary name or path to ida
:param filename: The filename to open in IDA
:param port: The port on which to serve rpc from ida
:param mode: The server mode. "oneshot" to close ida when the connection is closed, or
"threaded" to run IDA visible to the user and allow multiple connections
:param processor_type:
Which processor IDA should analyze this binary as, e.g. "metapc". If not
provided, IDA will guess.
:param logfile: The file to log IDA's output to. Default /tmp/idalink-{port}.log
"""
ida_progname = _which(ida_binary)
if ida_progname is None:
raise IDALinkError('Could not find executable %s' % ida_binary)
if mode not in ('oneshot', 'threaded'):
raise ValueError("Bad mode %s" % mode)
if logfile is None:
logfile = LOGFILE.format(port=port)
ida_realpath = os.path.expanduser(ida_progname)
file_realpath = os.path.realpath(os.path.expanduser(filename))
server_script = os.path.join(MODULE_DIR, 'server.py')
LOG.info('Launching IDA (%s) on %s, listening on port %d, logging to %s',
ida_realpath, file_realpath, port, logfile)
env = dict(os.environ)
if mode == 'oneshot':
env['TVHEADLESS'] = '1'
if sys.platform == "darwin":
# If we are running in a virtual environment, which we should, we need
# to insert the python lib into the launched process in order for IDA
# to not default back to the Apple-installed python because of the use
# of paths in library identifiers on macOS.
if "VIRTUAL_ENV" in os.environ:
env['DYLD_INSERT_LIBRARIES'] = os.environ['VIRTUAL_ENV'] + '/.Python'
# The parameters are:
# -A Automatic mode
# -S Run a script (our server script)
# -L Log all output to our logfile
# -p Set the processor type
command = [
ida_realpath,
'-A',
'-S%s %d %s' % (server_script, port, mode),
'-L%s' % logfile,
]
if processor_type is not None:
command.append('-p%s' % processor_type)
command.append(file_realpath)
LOG.debug('IDA command is %s', ' '.join("%s" % s for s in command))
return subprocess.Popen(command, env=env) | def function[ida_spawn, parameter[ida_binary, filename, port, mode, processor_type, logfile]]:
constant[
Open IDA on the the file we want to analyse.
:param ida_binary: The binary name or path to ida
:param filename: The filename to open in IDA
:param port: The port on which to serve rpc from ida
:param mode: The server mode. "oneshot" to close ida when the connection is closed, or
"threaded" to run IDA visible to the user and allow multiple connections
:param processor_type:
Which processor IDA should analyze this binary as, e.g. "metapc". If not
provided, IDA will guess.
:param logfile: The file to log IDA's output to. Default /tmp/idalink-{port}.log
]
variable[ida_progname] assign[=] call[name[_which], parameter[name[ida_binary]]]
if compare[name[ida_progname] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b2344850>
if compare[name[mode] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b2345b70>, <ast.Constant object at 0x7da1b2345e70>]]] begin[:]
<ast.Raise object at 0x7da1b2344e20>
if compare[name[logfile] is constant[None]] begin[:]
variable[logfile] assign[=] call[name[LOGFILE].format, parameter[]]
variable[ida_realpath] assign[=] call[name[os].path.expanduser, parameter[name[ida_progname]]]
variable[file_realpath] assign[=] call[name[os].path.realpath, parameter[call[name[os].path.expanduser, parameter[name[filename]]]]]
variable[server_script] assign[=] call[name[os].path.join, parameter[name[MODULE_DIR], constant[server.py]]]
call[name[LOG].info, parameter[constant[Launching IDA (%s) on %s, listening on port %d, logging to %s], name[ida_realpath], name[file_realpath], name[port], name[logfile]]]
variable[env] assign[=] call[name[dict], parameter[name[os].environ]]
if compare[name[mode] equal[==] constant[oneshot]] begin[:]
call[name[env]][constant[TVHEADLESS]] assign[=] constant[1]
if compare[name[sys].platform equal[==] constant[darwin]] begin[:]
if compare[constant[VIRTUAL_ENV] in name[os].environ] begin[:]
call[name[env]][constant[DYLD_INSERT_LIBRARIES]] assign[=] binary_operation[call[name[os].environ][constant[VIRTUAL_ENV]] + constant[/.Python]]
variable[command] assign[=] list[[<ast.Name object at 0x7da1b2345ed0>, <ast.Constant object at 0x7da1b23465c0>, <ast.BinOp object at 0x7da1b2347ac0>, <ast.BinOp object at 0x7da1b2346c20>]]
if compare[name[processor_type] is_not constant[None]] begin[:]
call[name[command].append, parameter[binary_operation[constant[-p%s] <ast.Mod object at 0x7da2590d6920> name[processor_type]]]]
call[name[command].append, parameter[name[file_realpath]]]
call[name[LOG].debug, parameter[constant[IDA command is %s], call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da1b2346a70>]]]]
return[call[name[subprocess].Popen, parameter[name[command]]]] | keyword[def] identifier[ida_spawn] ( identifier[ida_binary] , identifier[filename] , identifier[port] = literal[int] , identifier[mode] = literal[string] ,
identifier[processor_type] = keyword[None] , identifier[logfile] = keyword[None] ):
literal[string]
identifier[ida_progname] = identifier[_which] ( identifier[ida_binary] )
keyword[if] identifier[ida_progname] keyword[is] keyword[None] :
keyword[raise] identifier[IDALinkError] ( literal[string] % identifier[ida_binary] )
keyword[if] identifier[mode] keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string] % identifier[mode] )
keyword[if] identifier[logfile] keyword[is] keyword[None] :
identifier[logfile] = identifier[LOGFILE] . identifier[format] ( identifier[port] = identifier[port] )
identifier[ida_realpath] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[ida_progname] )
identifier[file_realpath] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[filename] ))
identifier[server_script] = identifier[os] . identifier[path] . identifier[join] ( identifier[MODULE_DIR] , literal[string] )
identifier[LOG] . identifier[info] ( literal[string] ,
identifier[ida_realpath] , identifier[file_realpath] , identifier[port] , identifier[logfile] )
identifier[env] = identifier[dict] ( identifier[os] . identifier[environ] )
keyword[if] identifier[mode] == literal[string] :
identifier[env] [ literal[string] ]= literal[string]
keyword[if] identifier[sys] . identifier[platform] == literal[string] :
keyword[if] literal[string] keyword[in] identifier[os] . identifier[environ] :
identifier[env] [ literal[string] ]= identifier[os] . identifier[environ] [ literal[string] ]+ literal[string]
identifier[command] =[
identifier[ida_realpath] ,
literal[string] ,
literal[string] %( identifier[server_script] , identifier[port] , identifier[mode] ),
literal[string] % identifier[logfile] ,
]
keyword[if] identifier[processor_type] keyword[is] keyword[not] keyword[None] :
identifier[command] . identifier[append] ( literal[string] % identifier[processor_type] )
identifier[command] . identifier[append] ( identifier[file_realpath] )
identifier[LOG] . identifier[debug] ( literal[string] , literal[string] . identifier[join] ( literal[string] % identifier[s] keyword[for] identifier[s] keyword[in] identifier[command] ))
keyword[return] identifier[subprocess] . identifier[Popen] ( identifier[command] , identifier[env] = identifier[env] ) | def ida_spawn(ida_binary, filename, port=18861, mode='oneshot', processor_type=None, logfile=None):
"""
Open IDA on the the file we want to analyse.
:param ida_binary: The binary name or path to ida
:param filename: The filename to open in IDA
:param port: The port on which to serve rpc from ida
:param mode: The server mode. "oneshot" to close ida when the connection is closed, or
"threaded" to run IDA visible to the user and allow multiple connections
:param processor_type:
Which processor IDA should analyze this binary as, e.g. "metapc". If not
provided, IDA will guess.
:param logfile: The file to log IDA's output to. Default /tmp/idalink-{port}.log
"""
ida_progname = _which(ida_binary)
if ida_progname is None:
raise IDALinkError('Could not find executable %s' % ida_binary) # depends on [control=['if'], data=[]]
if mode not in ('oneshot', 'threaded'):
raise ValueError('Bad mode %s' % mode) # depends on [control=['if'], data=['mode']]
if logfile is None:
logfile = LOGFILE.format(port=port) # depends on [control=['if'], data=['logfile']]
ida_realpath = os.path.expanduser(ida_progname)
file_realpath = os.path.realpath(os.path.expanduser(filename))
server_script = os.path.join(MODULE_DIR, 'server.py')
LOG.info('Launching IDA (%s) on %s, listening on port %d, logging to %s', ida_realpath, file_realpath, port, logfile)
env = dict(os.environ)
if mode == 'oneshot':
env['TVHEADLESS'] = '1' # depends on [control=['if'], data=[]]
if sys.platform == 'darwin':
# If we are running in a virtual environment, which we should, we need
# to insert the python lib into the launched process in order for IDA
# to not default back to the Apple-installed python because of the use
# of paths in library identifiers on macOS.
if 'VIRTUAL_ENV' in os.environ:
env['DYLD_INSERT_LIBRARIES'] = os.environ['VIRTUAL_ENV'] + '/.Python' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# The parameters are:
# -A Automatic mode
# -S Run a script (our server script)
# -L Log all output to our logfile
# -p Set the processor type
command = [ida_realpath, '-A', '-S%s %d %s' % (server_script, port, mode), '-L%s' % logfile]
if processor_type is not None:
command.append('-p%s' % processor_type) # depends on [control=['if'], data=['processor_type']]
command.append(file_realpath)
LOG.debug('IDA command is %s', ' '.join(('%s' % s for s in command)))
return subprocess.Popen(command, env=env) |
def upix_to_pix(upix):
"""Get the nside from a unique pixel number."""
nside = np.power(2, np.floor(np.log2(upix / 4)) / 2).astype(int)
pix = upix - 4 * np.power(nside, 2)
return pix, nside | def function[upix_to_pix, parameter[upix]]:
constant[Get the nside from a unique pixel number.]
variable[nside] assign[=] call[call[name[np].power, parameter[constant[2], binary_operation[call[name[np].floor, parameter[call[name[np].log2, parameter[binary_operation[name[upix] / constant[4]]]]]] / constant[2]]]].astype, parameter[name[int]]]
variable[pix] assign[=] binary_operation[name[upix] - binary_operation[constant[4] * call[name[np].power, parameter[name[nside], constant[2]]]]]
return[tuple[[<ast.Name object at 0x7da18bcc89a0>, <ast.Name object at 0x7da18bcca830>]]] | keyword[def] identifier[upix_to_pix] ( identifier[upix] ):
literal[string]
identifier[nside] = identifier[np] . identifier[power] ( literal[int] , identifier[np] . identifier[floor] ( identifier[np] . identifier[log2] ( identifier[upix] / literal[int] ))/ literal[int] ). identifier[astype] ( identifier[int] )
identifier[pix] = identifier[upix] - literal[int] * identifier[np] . identifier[power] ( identifier[nside] , literal[int] )
keyword[return] identifier[pix] , identifier[nside] | def upix_to_pix(upix):
"""Get the nside from a unique pixel number."""
nside = np.power(2, np.floor(np.log2(upix / 4)) / 2).astype(int)
pix = upix - 4 * np.power(nside, 2)
return (pix, nside) |
def remove_synchronous(self, resource, force=False, timeout=-1):
"""
Deletes the resource specified by {id} synchronously.
Args:
resource: dict object to remove
force:
If set to true, the operation completes despite any problems with
network connectivity or errors on the resource itself. The default is false.
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
bool: operation success
"""
uri = self._client.build_uri(resource['uri']) + "/synchronous"
remove_resource = {'uri': uri}
return self._client.delete(remove_resource, force=force, timeout=timeout) | def function[remove_synchronous, parameter[self, resource, force, timeout]]:
constant[
Deletes the resource specified by {id} synchronously.
Args:
resource: dict object to remove
force:
If set to true, the operation completes despite any problems with
network connectivity or errors on the resource itself. The default is false.
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
bool: operation success
]
variable[uri] assign[=] binary_operation[call[name[self]._client.build_uri, parameter[call[name[resource]][constant[uri]]]] + constant[/synchronous]]
variable[remove_resource] assign[=] dictionary[[<ast.Constant object at 0x7da20c76d030>], [<ast.Name object at 0x7da20c76f250>]]
return[call[name[self]._client.delete, parameter[name[remove_resource]]]] | keyword[def] identifier[remove_synchronous] ( identifier[self] , identifier[resource] , identifier[force] = keyword[False] , identifier[timeout] =- literal[int] ):
literal[string]
identifier[uri] = identifier[self] . identifier[_client] . identifier[build_uri] ( identifier[resource] [ literal[string] ])+ literal[string]
identifier[remove_resource] ={ literal[string] : identifier[uri] }
keyword[return] identifier[self] . identifier[_client] . identifier[delete] ( identifier[remove_resource] , identifier[force] = identifier[force] , identifier[timeout] = identifier[timeout] ) | def remove_synchronous(self, resource, force=False, timeout=-1):
"""
Deletes the resource specified by {id} synchronously.
Args:
resource: dict object to remove
force:
If set to true, the operation completes despite any problems with
network connectivity or errors on the resource itself. The default is false.
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
bool: operation success
"""
uri = self._client.build_uri(resource['uri']) + '/synchronous'
remove_resource = {'uri': uri}
return self._client.delete(remove_resource, force=force, timeout=timeout) |
def getEstablishments(self, city_id, **kwargs):
"""
:param city_id: id of the city for which collections are needed
:param lat: latitude
:param lon: longitude
Get a list of restaurant types in a city. The location/City input can be provided in the following ways
- Using Zomato City ID
- Using coordinates of any location within a city
List of all restaurants categorized under a particular restaurant type can obtained using
/Search API with Establishment ID and location details as inputs
"""
params = {"city_id": city_id}
optional_params = ["lat", "lon"]
for key in optional_params:
if key in kwargs:
params[key] = kwargs[key]
establishments = self.api.get("/establishments", params)
return establishments | def function[getEstablishments, parameter[self, city_id]]:
constant[
:param city_id: id of the city for which collections are needed
:param lat: latitude
:param lon: longitude
Get a list of restaurant types in a city. The location/City input can be provided in the following ways
- Using Zomato City ID
- Using coordinates of any location within a city
List of all restaurants categorized under a particular restaurant type can obtained using
/Search API with Establishment ID and location details as inputs
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18eb55960>], [<ast.Name object at 0x7da18eb55a50>]]
variable[optional_params] assign[=] list[[<ast.Constant object at 0x7da20c6aae00>, <ast.Constant object at 0x7da20c6aa050>]]
for taget[name[key]] in starred[name[optional_params]] begin[:]
if compare[name[key] in name[kwargs]] begin[:]
call[name[params]][name[key]] assign[=] call[name[kwargs]][name[key]]
variable[establishments] assign[=] call[name[self].api.get, parameter[constant[/establishments], name[params]]]
return[name[establishments]] | keyword[def] identifier[getEstablishments] ( identifier[self] , identifier[city_id] ,** identifier[kwargs] ):
literal[string]
identifier[params] ={ literal[string] : identifier[city_id] }
identifier[optional_params] =[ literal[string] , literal[string] ]
keyword[for] identifier[key] keyword[in] identifier[optional_params] :
keyword[if] identifier[key] keyword[in] identifier[kwargs] :
identifier[params] [ identifier[key] ]= identifier[kwargs] [ identifier[key] ]
identifier[establishments] = identifier[self] . identifier[api] . identifier[get] ( literal[string] , identifier[params] )
keyword[return] identifier[establishments] | def getEstablishments(self, city_id, **kwargs):
"""
:param city_id: id of the city for which collections are needed
:param lat: latitude
:param lon: longitude
Get a list of restaurant types in a city. The location/City input can be provided in the following ways
- Using Zomato City ID
- Using coordinates of any location within a city
List of all restaurants categorized under a particular restaurant type can obtained using
/Search API with Establishment ID and location details as inputs
"""
params = {'city_id': city_id}
optional_params = ['lat', 'lon']
for key in optional_params:
if key in kwargs:
params[key] = kwargs[key] # depends on [control=['if'], data=['key', 'kwargs']] # depends on [control=['for'], data=['key']]
establishments = self.api.get('/establishments', params)
return establishments |
def compress(a, b):
"""Performs the *compressed* diff of `a` and `b` such that the original
contents of the :func:`difflib.ndiff` call can be reconstructed using
:func:`~acorn.logging.diff.restore`.
Args:
a (str or list): *original* string or list of strings to diff.
b (str or list): *edited* string or list of strings to diff.
"""
from difflib import ndiff
left = a.splitlines(1) if isinstance(a, string_types) else a
right = b.splitlines(1) if isinstance(b, string_types) else b
ldiff = list(ndiff(left, right))
result = {}
latest = None
combo = None
icombo = 0
iorig = 0
for i, line in enumerate(ldiff):
cs = [l[0] for l in ldiff[i:min((i+4, len(ldiff)))]]
if cs[0] != ' ':
#Initialize a new entry in the diff list.
if latest is None:
latest = iorig
result[latest] = []
#We have to be careful. At a minimum, there may be a '-' or a '+' when the lines are
#completely added or deleted. When they are *altered*, then we also expect one or
#more '?' lines showing the differences.
if combo is None:
if cs[0] == '-':
#Check whether the next lines have one of these combinations:
if (len(cs) >=3 and cs[1] == '+' and cs[2] == '?'):
combo = 3
elif (len(cs) >= 4 and cs[1] == '?' and cs[2] == '+'
and cs[3] == '?'):
combo = 4
else:
#This is a stand-alone deletion.
combo = 1
elif cs[0] == '+':
#This is for the stand-alone addition.
combo = 1
if icombo < combo:
result[latest].append(line)
icombo += 1
if icombo == combo:
if combo > 1:
latest = None
combo = None
icombo = 0
if cs[0] != '+':
iorig += 1
else:
latest = None
iorig += 1
return result | def function[compress, parameter[a, b]]:
constant[Performs the *compressed* diff of `a` and `b` such that the original
contents of the :func:`difflib.ndiff` call can be reconstructed using
:func:`~acorn.logging.diff.restore`.
Args:
a (str or list): *original* string or list of strings to diff.
b (str or list): *edited* string or list of strings to diff.
]
from relative_module[difflib] import module[ndiff]
variable[left] assign[=] <ast.IfExp object at 0x7da1b14601f0>
variable[right] assign[=] <ast.IfExp object at 0x7da1b1470eb0>
variable[ldiff] assign[=] call[name[list], parameter[call[name[ndiff], parameter[name[left], name[right]]]]]
variable[result] assign[=] dictionary[[], []]
variable[latest] assign[=] constant[None]
variable[combo] assign[=] constant[None]
variable[icombo] assign[=] constant[0]
variable[iorig] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b1417160>, <ast.Name object at 0x7da1b1417fd0>]]] in starred[call[name[enumerate], parameter[name[ldiff]]]] begin[:]
variable[cs] assign[=] <ast.ListComp object at 0x7da1b1415bd0>
if compare[call[name[cs]][constant[0]] not_equal[!=] constant[ ]] begin[:]
if compare[name[latest] is constant[None]] begin[:]
variable[latest] assign[=] name[iorig]
call[name[result]][name[latest]] assign[=] list[[]]
if compare[name[combo] is constant[None]] begin[:]
if compare[call[name[cs]][constant[0]] equal[==] constant[-]] begin[:]
if <ast.BoolOp object at 0x7da1b1460820> begin[:]
variable[combo] assign[=] constant[3]
if compare[name[icombo] less[<] name[combo]] begin[:]
call[call[name[result]][name[latest]].append, parameter[name[line]]]
<ast.AugAssign object at 0x7da1b1460670>
if compare[name[icombo] equal[==] name[combo]] begin[:]
if compare[name[combo] greater[>] constant[1]] begin[:]
variable[latest] assign[=] constant[None]
variable[combo] assign[=] constant[None]
variable[icombo] assign[=] constant[0]
if compare[call[name[cs]][constant[0]] not_equal[!=] constant[+]] begin[:]
<ast.AugAssign object at 0x7da1b14629b0>
return[name[result]] | keyword[def] identifier[compress] ( identifier[a] , identifier[b] ):
literal[string]
keyword[from] identifier[difflib] keyword[import] identifier[ndiff]
identifier[left] = identifier[a] . identifier[splitlines] ( literal[int] ) keyword[if] identifier[isinstance] ( identifier[a] , identifier[string_types] ) keyword[else] identifier[a]
identifier[right] = identifier[b] . identifier[splitlines] ( literal[int] ) keyword[if] identifier[isinstance] ( identifier[b] , identifier[string_types] ) keyword[else] identifier[b]
identifier[ldiff] = identifier[list] ( identifier[ndiff] ( identifier[left] , identifier[right] ))
identifier[result] ={}
identifier[latest] = keyword[None]
identifier[combo] = keyword[None]
identifier[icombo] = literal[int]
identifier[iorig] = literal[int]
keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[ldiff] ):
identifier[cs] =[ identifier[l] [ literal[int] ] keyword[for] identifier[l] keyword[in] identifier[ldiff] [ identifier[i] : identifier[min] (( identifier[i] + literal[int] , identifier[len] ( identifier[ldiff] )))]]
keyword[if] identifier[cs] [ literal[int] ]!= literal[string] :
keyword[if] identifier[latest] keyword[is] keyword[None] :
identifier[latest] = identifier[iorig]
identifier[result] [ identifier[latest] ]=[]
keyword[if] identifier[combo] keyword[is] keyword[None] :
keyword[if] identifier[cs] [ literal[int] ]== literal[string] :
keyword[if] ( identifier[len] ( identifier[cs] )>= literal[int] keyword[and] identifier[cs] [ literal[int] ]== literal[string] keyword[and] identifier[cs] [ literal[int] ]== literal[string] ):
identifier[combo] = literal[int]
keyword[elif] ( identifier[len] ( identifier[cs] )>= literal[int] keyword[and] identifier[cs] [ literal[int] ]== literal[string] keyword[and] identifier[cs] [ literal[int] ]== literal[string]
keyword[and] identifier[cs] [ literal[int] ]== literal[string] ):
identifier[combo] = literal[int]
keyword[else] :
identifier[combo] = literal[int]
keyword[elif] identifier[cs] [ literal[int] ]== literal[string] :
identifier[combo] = literal[int]
keyword[if] identifier[icombo] < identifier[combo] :
identifier[result] [ identifier[latest] ]. identifier[append] ( identifier[line] )
identifier[icombo] += literal[int]
keyword[if] identifier[icombo] == identifier[combo] :
keyword[if] identifier[combo] > literal[int] :
identifier[latest] = keyword[None]
identifier[combo] = keyword[None]
identifier[icombo] = literal[int]
keyword[if] identifier[cs] [ literal[int] ]!= literal[string] :
identifier[iorig] += literal[int]
keyword[else] :
identifier[latest] = keyword[None]
identifier[iorig] += literal[int]
keyword[return] identifier[result] | def compress(a, b):
"""Performs the *compressed* diff of `a` and `b` such that the original
contents of the :func:`difflib.ndiff` call can be reconstructed using
:func:`~acorn.logging.diff.restore`.
Args:
a (str or list): *original* string or list of strings to diff.
b (str or list): *edited* string or list of strings to diff.
"""
from difflib import ndiff
left = a.splitlines(1) if isinstance(a, string_types) else a
right = b.splitlines(1) if isinstance(b, string_types) else b
ldiff = list(ndiff(left, right))
result = {}
latest = None
combo = None
icombo = 0
iorig = 0
for (i, line) in enumerate(ldiff):
cs = [l[0] for l in ldiff[i:min((i + 4, len(ldiff)))]]
if cs[0] != ' ':
#Initialize a new entry in the diff list.
if latest is None:
latest = iorig
result[latest] = [] # depends on [control=['if'], data=['latest']] #We have to be careful. At a minimum, there may be a '-' or a '+' when the lines are
#completely added or deleted. When they are *altered*, then we also expect one or
#more '?' lines showing the differences.
if combo is None:
if cs[0] == '-':
#Check whether the next lines have one of these combinations:
if len(cs) >= 3 and cs[1] == '+' and (cs[2] == '?'):
combo = 3 # depends on [control=['if'], data=[]]
elif len(cs) >= 4 and cs[1] == '?' and (cs[2] == '+') and (cs[3] == '?'):
combo = 4 # depends on [control=['if'], data=[]]
else:
#This is a stand-alone deletion.
combo = 1 # depends on [control=['if'], data=[]]
elif cs[0] == '+':
#This is for the stand-alone addition.
combo = 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['combo']]
if icombo < combo:
result[latest].append(line)
icombo += 1 # depends on [control=['if'], data=['icombo']]
if icombo == combo:
if combo > 1:
latest = None # depends on [control=['if'], data=[]]
combo = None
icombo = 0
if cs[0] != '+':
iorig += 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['icombo', 'combo']] # depends on [control=['if'], data=[]]
else:
latest = None
iorig += 1 # depends on [control=['for'], data=[]]
return result |
def _untrack_tendril(self, tendril):
"""
Removes the tendril from the set of tracked tendrils.
"""
try:
del self.tendrils[tendril._tendril_key]
except KeyError:
pass
# Also remove from _tendrils
try:
del self._tendrils[tendril.proto][tendril._tendril_key]
except KeyError:
pass | def function[_untrack_tendril, parameter[self, tendril]]:
constant[
Removes the tendril from the set of tracked tendrils.
]
<ast.Try object at 0x7da18bcc8be0>
<ast.Try object at 0x7da18bccb8e0> | keyword[def] identifier[_untrack_tendril] ( identifier[self] , identifier[tendril] ):
literal[string]
keyword[try] :
keyword[del] identifier[self] . identifier[tendrils] [ identifier[tendril] . identifier[_tendril_key] ]
keyword[except] identifier[KeyError] :
keyword[pass]
keyword[try] :
keyword[del] identifier[self] . identifier[_tendrils] [ identifier[tendril] . identifier[proto] ][ identifier[tendril] . identifier[_tendril_key] ]
keyword[except] identifier[KeyError] :
keyword[pass] | def _untrack_tendril(self, tendril):
"""
Removes the tendril from the set of tracked tendrils.
"""
try:
del self.tendrils[tendril._tendril_key] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]]
# Also remove from _tendrils
try:
del self._tendrils[tendril.proto][tendril._tendril_key] # depends on [control=['try'], data=[]]
except KeyError:
pass # depends on [control=['except'], data=[]] |
def __discovery_doc_descriptor(self, services, hostname=None):
"""Builds a discovery doc for an API.
Args:
services: List of protorpc.remote.Service instances implementing an
api/version.
hostname: string, Hostname of the API, to override the value set on the
current service. Defaults to None.
Returns:
A dictionary that can be deserialized into JSON in discovery doc format.
Raises:
ApiConfigurationError: If there's something wrong with the API
configuration, such as a multiclass API decorated with different API
descriptors (see the docstring for api()), or a repeated method
signature.
"""
merged_api_info = self.__get_merged_api_info(services)
descriptor = self.get_descriptor_defaults(merged_api_info,
hostname=hostname)
description = merged_api_info.description
if not description and len(services) == 1:
description = services[0].__doc__
if description:
descriptor['description'] = description
descriptor['parameters'] = self.__standard_parameters_descriptor()
descriptor['auth'] = self.__standard_auth_descriptor(services)
# Add namespace information, if provided
if merged_api_info.namespace:
descriptor['ownerDomain'] = merged_api_info.namespace.owner_domain
descriptor['ownerName'] = merged_api_info.namespace.owner_name
descriptor['packagePath'] = merged_api_info.namespace.package_path or ''
else:
if merged_api_info.owner_domain is not None:
descriptor['ownerDomain'] = merged_api_info.owner_domain
if merged_api_info.owner_name is not None:
descriptor['ownerName'] = merged_api_info.owner_name
if merged_api_info.package_path is not None:
descriptor['packagePath'] = merged_api_info.package_path
method_map = {}
method_collision_tracker = {}
rest_collision_tracker = {}
resource_index = collections.defaultdict(list)
resource_map = {}
# For the first pass, only process top-level methods (that is, those methods
# that are unattached to a resource).
for service in services:
remote_methods = service.all_remote_methods()
for protorpc_meth_name, protorpc_meth_info in remote_methods.iteritems():
method_info = getattr(protorpc_meth_info, 'method_info', None)
# Skip methods that are not decorated with @method
if method_info is None:
continue
path = method_info.get_path(service.api_info)
method_id = method_info.method_id(service.api_info)
canonical_method_id = self._get_canonical_method_id(method_id)
resource_path = self._get_resource_path(method_id)
# Make sure the same method name isn't repeated.
if method_id in method_collision_tracker:
raise api_exceptions.ApiConfigurationError(
'Method %s used multiple times, in classes %s and %s' %
(method_id, method_collision_tracker[method_id],
service.__name__))
else:
method_collision_tracker[method_id] = service.__name__
# Make sure the same HTTP method & path aren't repeated.
rest_identifier = (method_info.http_method, path)
if rest_identifier in rest_collision_tracker:
raise api_exceptions.ApiConfigurationError(
'%s path "%s" used multiple times, in classes %s and %s' %
(method_info.http_method, path,
rest_collision_tracker[rest_identifier],
service.__name__))
else:
rest_collision_tracker[rest_identifier] = service.__name__
# If this method is part of a resource, note it and skip it for now
if resource_path:
resource_index[resource_path[0]].append((service, protorpc_meth_info))
else:
method_map[canonical_method_id] = self.__method_descriptor(
service, method_info, protorpc_meth_info)
# Do another pass for methods attached to resources
for resource, resource_methods in resource_index.items():
resource_map[resource] = self.__resource_descriptor(resource,
resource_methods)
if method_map:
descriptor['methods'] = method_map
if resource_map:
descriptor['resources'] = resource_map
# Add schemas, if any
schemas = self.__schemas_descriptor()
if schemas:
descriptor['schemas'] = schemas
return descriptor | def function[__discovery_doc_descriptor, parameter[self, services, hostname]]:
constant[Builds a discovery doc for an API.
Args:
services: List of protorpc.remote.Service instances implementing an
api/version.
hostname: string, Hostname of the API, to override the value set on the
current service. Defaults to None.
Returns:
A dictionary that can be deserialized into JSON in discovery doc format.
Raises:
ApiConfigurationError: If there's something wrong with the API
configuration, such as a multiclass API decorated with different API
descriptors (see the docstring for api()), or a repeated method
signature.
]
variable[merged_api_info] assign[=] call[name[self].__get_merged_api_info, parameter[name[services]]]
variable[descriptor] assign[=] call[name[self].get_descriptor_defaults, parameter[name[merged_api_info]]]
variable[description] assign[=] name[merged_api_info].description
if <ast.BoolOp object at 0x7da1b0d54df0> begin[:]
variable[description] assign[=] call[name[services]][constant[0]].__doc__
if name[description] begin[:]
call[name[descriptor]][constant[description]] assign[=] name[description]
call[name[descriptor]][constant[parameters]] assign[=] call[name[self].__standard_parameters_descriptor, parameter[]]
call[name[descriptor]][constant[auth]] assign[=] call[name[self].__standard_auth_descriptor, parameter[name[services]]]
if name[merged_api_info].namespace begin[:]
call[name[descriptor]][constant[ownerDomain]] assign[=] name[merged_api_info].namespace.owner_domain
call[name[descriptor]][constant[ownerName]] assign[=] name[merged_api_info].namespace.owner_name
call[name[descriptor]][constant[packagePath]] assign[=] <ast.BoolOp object at 0x7da1b0d55e70>
variable[method_map] assign[=] dictionary[[], []]
variable[method_collision_tracker] assign[=] dictionary[[], []]
variable[rest_collision_tracker] assign[=] dictionary[[], []]
variable[resource_index] assign[=] call[name[collections].defaultdict, parameter[name[list]]]
variable[resource_map] assign[=] dictionary[[], []]
for taget[name[service]] in starred[name[services]] begin[:]
variable[remote_methods] assign[=] call[name[service].all_remote_methods, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b0d566b0>, <ast.Name object at 0x7da1b0d554b0>]]] in starred[call[name[remote_methods].iteritems, parameter[]]] begin[:]
variable[method_info] assign[=] call[name[getattr], parameter[name[protorpc_meth_info], constant[method_info], constant[None]]]
if compare[name[method_info] is constant[None]] begin[:]
continue
variable[path] assign[=] call[name[method_info].get_path, parameter[name[service].api_info]]
variable[method_id] assign[=] call[name[method_info].method_id, parameter[name[service].api_info]]
variable[canonical_method_id] assign[=] call[name[self]._get_canonical_method_id, parameter[name[method_id]]]
variable[resource_path] assign[=] call[name[self]._get_resource_path, parameter[name[method_id]]]
if compare[name[method_id] in name[method_collision_tracker]] begin[:]
<ast.Raise object at 0x7da1b0d55960>
variable[rest_identifier] assign[=] tuple[[<ast.Attribute object at 0x7da1b0d553f0>, <ast.Name object at 0x7da1b0d57b20>]]
if compare[name[rest_identifier] in name[rest_collision_tracker]] begin[:]
<ast.Raise object at 0x7da1b0d54c70>
if name[resource_path] begin[:]
call[call[name[resource_index]][call[name[resource_path]][constant[0]]].append, parameter[tuple[[<ast.Name object at 0x7da1b0efe350>, <ast.Name object at 0x7da1b0eff790>]]]]
for taget[tuple[[<ast.Name object at 0x7da1b0efda20>, <ast.Name object at 0x7da1b0eff190>]]] in starred[call[name[resource_index].items, parameter[]]] begin[:]
call[name[resource_map]][name[resource]] assign[=] call[name[self].__resource_descriptor, parameter[name[resource], name[resource_methods]]]
if name[method_map] begin[:]
call[name[descriptor]][constant[methods]] assign[=] name[method_map]
if name[resource_map] begin[:]
call[name[descriptor]][constant[resources]] assign[=] name[resource_map]
variable[schemas] assign[=] call[name[self].__schemas_descriptor, parameter[]]
if name[schemas] begin[:]
call[name[descriptor]][constant[schemas]] assign[=] name[schemas]
return[name[descriptor]] | keyword[def] identifier[__discovery_doc_descriptor] ( identifier[self] , identifier[services] , identifier[hostname] = keyword[None] ):
literal[string]
identifier[merged_api_info] = identifier[self] . identifier[__get_merged_api_info] ( identifier[services] )
identifier[descriptor] = identifier[self] . identifier[get_descriptor_defaults] ( identifier[merged_api_info] ,
identifier[hostname] = identifier[hostname] )
identifier[description] = identifier[merged_api_info] . identifier[description]
keyword[if] keyword[not] identifier[description] keyword[and] identifier[len] ( identifier[services] )== literal[int] :
identifier[description] = identifier[services] [ literal[int] ]. identifier[__doc__]
keyword[if] identifier[description] :
identifier[descriptor] [ literal[string] ]= identifier[description]
identifier[descriptor] [ literal[string] ]= identifier[self] . identifier[__standard_parameters_descriptor] ()
identifier[descriptor] [ literal[string] ]= identifier[self] . identifier[__standard_auth_descriptor] ( identifier[services] )
keyword[if] identifier[merged_api_info] . identifier[namespace] :
identifier[descriptor] [ literal[string] ]= identifier[merged_api_info] . identifier[namespace] . identifier[owner_domain]
identifier[descriptor] [ literal[string] ]= identifier[merged_api_info] . identifier[namespace] . identifier[owner_name]
identifier[descriptor] [ literal[string] ]= identifier[merged_api_info] . identifier[namespace] . identifier[package_path] keyword[or] literal[string]
keyword[else] :
keyword[if] identifier[merged_api_info] . identifier[owner_domain] keyword[is] keyword[not] keyword[None] :
identifier[descriptor] [ literal[string] ]= identifier[merged_api_info] . identifier[owner_domain]
keyword[if] identifier[merged_api_info] . identifier[owner_name] keyword[is] keyword[not] keyword[None] :
identifier[descriptor] [ literal[string] ]= identifier[merged_api_info] . identifier[owner_name]
keyword[if] identifier[merged_api_info] . identifier[package_path] keyword[is] keyword[not] keyword[None] :
identifier[descriptor] [ literal[string] ]= identifier[merged_api_info] . identifier[package_path]
identifier[method_map] ={}
identifier[method_collision_tracker] ={}
identifier[rest_collision_tracker] ={}
identifier[resource_index] = identifier[collections] . identifier[defaultdict] ( identifier[list] )
identifier[resource_map] ={}
keyword[for] identifier[service] keyword[in] identifier[services] :
identifier[remote_methods] = identifier[service] . identifier[all_remote_methods] ()
keyword[for] identifier[protorpc_meth_name] , identifier[protorpc_meth_info] keyword[in] identifier[remote_methods] . identifier[iteritems] ():
identifier[method_info] = identifier[getattr] ( identifier[protorpc_meth_info] , literal[string] , keyword[None] )
keyword[if] identifier[method_info] keyword[is] keyword[None] :
keyword[continue]
identifier[path] = identifier[method_info] . identifier[get_path] ( identifier[service] . identifier[api_info] )
identifier[method_id] = identifier[method_info] . identifier[method_id] ( identifier[service] . identifier[api_info] )
identifier[canonical_method_id] = identifier[self] . identifier[_get_canonical_method_id] ( identifier[method_id] )
identifier[resource_path] = identifier[self] . identifier[_get_resource_path] ( identifier[method_id] )
keyword[if] identifier[method_id] keyword[in] identifier[method_collision_tracker] :
keyword[raise] identifier[api_exceptions] . identifier[ApiConfigurationError] (
literal[string] %
( identifier[method_id] , identifier[method_collision_tracker] [ identifier[method_id] ],
identifier[service] . identifier[__name__] ))
keyword[else] :
identifier[method_collision_tracker] [ identifier[method_id] ]= identifier[service] . identifier[__name__]
identifier[rest_identifier] =( identifier[method_info] . identifier[http_method] , identifier[path] )
keyword[if] identifier[rest_identifier] keyword[in] identifier[rest_collision_tracker] :
keyword[raise] identifier[api_exceptions] . identifier[ApiConfigurationError] (
literal[string] %
( identifier[method_info] . identifier[http_method] , identifier[path] ,
identifier[rest_collision_tracker] [ identifier[rest_identifier] ],
identifier[service] . identifier[__name__] ))
keyword[else] :
identifier[rest_collision_tracker] [ identifier[rest_identifier] ]= identifier[service] . identifier[__name__]
keyword[if] identifier[resource_path] :
identifier[resource_index] [ identifier[resource_path] [ literal[int] ]]. identifier[append] (( identifier[service] , identifier[protorpc_meth_info] ))
keyword[else] :
identifier[method_map] [ identifier[canonical_method_id] ]= identifier[self] . identifier[__method_descriptor] (
identifier[service] , identifier[method_info] , identifier[protorpc_meth_info] )
keyword[for] identifier[resource] , identifier[resource_methods] keyword[in] identifier[resource_index] . identifier[items] ():
identifier[resource_map] [ identifier[resource] ]= identifier[self] . identifier[__resource_descriptor] ( identifier[resource] ,
identifier[resource_methods] )
keyword[if] identifier[method_map] :
identifier[descriptor] [ literal[string] ]= identifier[method_map]
keyword[if] identifier[resource_map] :
identifier[descriptor] [ literal[string] ]= identifier[resource_map]
identifier[schemas] = identifier[self] . identifier[__schemas_descriptor] ()
keyword[if] identifier[schemas] :
identifier[descriptor] [ literal[string] ]= identifier[schemas]
keyword[return] identifier[descriptor] | def __discovery_doc_descriptor(self, services, hostname=None):
"""Builds a discovery doc for an API.
Args:
services: List of protorpc.remote.Service instances implementing an
api/version.
hostname: string, Hostname of the API, to override the value set on the
current service. Defaults to None.
Returns:
A dictionary that can be deserialized into JSON in discovery doc format.
Raises:
ApiConfigurationError: If there's something wrong with the API
configuration, such as a multiclass API decorated with different API
descriptors (see the docstring for api()), or a repeated method
signature.
"""
merged_api_info = self.__get_merged_api_info(services)
descriptor = self.get_descriptor_defaults(merged_api_info, hostname=hostname)
description = merged_api_info.description
if not description and len(services) == 1:
description = services[0].__doc__ # depends on [control=['if'], data=[]]
if description:
descriptor['description'] = description # depends on [control=['if'], data=[]]
descriptor['parameters'] = self.__standard_parameters_descriptor()
descriptor['auth'] = self.__standard_auth_descriptor(services)
# Add namespace information, if provided
if merged_api_info.namespace:
descriptor['ownerDomain'] = merged_api_info.namespace.owner_domain
descriptor['ownerName'] = merged_api_info.namespace.owner_name
descriptor['packagePath'] = merged_api_info.namespace.package_path or '' # depends on [control=['if'], data=[]]
else:
if merged_api_info.owner_domain is not None:
descriptor['ownerDomain'] = merged_api_info.owner_domain # depends on [control=['if'], data=[]]
if merged_api_info.owner_name is not None:
descriptor['ownerName'] = merged_api_info.owner_name # depends on [control=['if'], data=[]]
if merged_api_info.package_path is not None:
descriptor['packagePath'] = merged_api_info.package_path # depends on [control=['if'], data=[]]
method_map = {}
method_collision_tracker = {}
rest_collision_tracker = {}
resource_index = collections.defaultdict(list)
resource_map = {}
# For the first pass, only process top-level methods (that is, those methods
# that are unattached to a resource).
for service in services:
remote_methods = service.all_remote_methods()
for (protorpc_meth_name, protorpc_meth_info) in remote_methods.iteritems():
method_info = getattr(protorpc_meth_info, 'method_info', None)
# Skip methods that are not decorated with @method
if method_info is None:
continue # depends on [control=['if'], data=[]]
path = method_info.get_path(service.api_info)
method_id = method_info.method_id(service.api_info)
canonical_method_id = self._get_canonical_method_id(method_id)
resource_path = self._get_resource_path(method_id)
# Make sure the same method name isn't repeated.
if method_id in method_collision_tracker:
raise api_exceptions.ApiConfigurationError('Method %s used multiple times, in classes %s and %s' % (method_id, method_collision_tracker[method_id], service.__name__)) # depends on [control=['if'], data=['method_id', 'method_collision_tracker']]
else:
method_collision_tracker[method_id] = service.__name__
# Make sure the same HTTP method & path aren't repeated.
rest_identifier = (method_info.http_method, path)
if rest_identifier in rest_collision_tracker:
raise api_exceptions.ApiConfigurationError('%s path "%s" used multiple times, in classes %s and %s' % (method_info.http_method, path, rest_collision_tracker[rest_identifier], service.__name__)) # depends on [control=['if'], data=['rest_identifier', 'rest_collision_tracker']]
else:
rest_collision_tracker[rest_identifier] = service.__name__
# If this method is part of a resource, note it and skip it for now
if resource_path:
resource_index[resource_path[0]].append((service, protorpc_meth_info)) # depends on [control=['if'], data=[]]
else:
method_map[canonical_method_id] = self.__method_descriptor(service, method_info, protorpc_meth_info) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['service']]
# Do another pass for methods attached to resources
for (resource, resource_methods) in resource_index.items():
resource_map[resource] = self.__resource_descriptor(resource, resource_methods) # depends on [control=['for'], data=[]]
if method_map:
descriptor['methods'] = method_map # depends on [control=['if'], data=[]]
if resource_map:
descriptor['resources'] = resource_map # depends on [control=['if'], data=[]]
# Add schemas, if any
schemas = self.__schemas_descriptor()
if schemas:
descriptor['schemas'] = schemas # depends on [control=['if'], data=[]]
return descriptor |
def applicable_file_flags(self):
"""
Return the applicable file flags attribute of the BFD file being
processed.
"""
if not self._ptr:
raise BfdException("BFD not initialized")
return _bfd.get_bfd_attribute(
self._ptr, BfdAttributes.APPLICABLE_FILE_FLAGS) | def function[applicable_file_flags, parameter[self]]:
constant[
Return the applicable file flags attribute of the BFD file being
processed.
]
if <ast.UnaryOp object at 0x7da207f9aa70> begin[:]
<ast.Raise object at 0x7da207f9b0a0>
return[call[name[_bfd].get_bfd_attribute, parameter[name[self]._ptr, name[BfdAttributes].APPLICABLE_FILE_FLAGS]]] | keyword[def] identifier[applicable_file_flags] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_ptr] :
keyword[raise] identifier[BfdException] ( literal[string] )
keyword[return] identifier[_bfd] . identifier[get_bfd_attribute] (
identifier[self] . identifier[_ptr] , identifier[BfdAttributes] . identifier[APPLICABLE_FILE_FLAGS] ) | def applicable_file_flags(self):
"""
Return the applicable file flags attribute of the BFD file being
processed.
"""
if not self._ptr:
raise BfdException('BFD not initialized') # depends on [control=['if'], data=[]]
return _bfd.get_bfd_attribute(self._ptr, BfdAttributes.APPLICABLE_FILE_FLAGS) |
def _update_list_store_entry(self, list_store, config_key, config_value):
"""Helper method to update a list store
:param Gtk.ListStore list_store: List store to be updated
:param str config_key: Config key to search for
:param config_value: New config value
:returns: Row of list store that has been updated
:rtype: int
"""
for row_num, row in enumerate(list_store):
if row[self.KEY_STORAGE_ID] == config_key:
row[self.VALUE_STORAGE_ID] = str(config_value)
row[self.TOGGLE_VALUE_STORAGE_ID] = config_value
return row_num | def function[_update_list_store_entry, parameter[self, list_store, config_key, config_value]]:
constant[Helper method to update a list store
:param Gtk.ListStore list_store: List store to be updated
:param str config_key: Config key to search for
:param config_value: New config value
:returns: Row of list store that has been updated
:rtype: int
]
for taget[tuple[[<ast.Name object at 0x7da20c76c640>, <ast.Name object at 0x7da20c76ed40>]]] in starred[call[name[enumerate], parameter[name[list_store]]]] begin[:]
if compare[call[name[row]][name[self].KEY_STORAGE_ID] equal[==] name[config_key]] begin[:]
call[name[row]][name[self].VALUE_STORAGE_ID] assign[=] call[name[str], parameter[name[config_value]]]
call[name[row]][name[self].TOGGLE_VALUE_STORAGE_ID] assign[=] name[config_value]
return[name[row_num]] | keyword[def] identifier[_update_list_store_entry] ( identifier[self] , identifier[list_store] , identifier[config_key] , identifier[config_value] ):
literal[string]
keyword[for] identifier[row_num] , identifier[row] keyword[in] identifier[enumerate] ( identifier[list_store] ):
keyword[if] identifier[row] [ identifier[self] . identifier[KEY_STORAGE_ID] ]== identifier[config_key] :
identifier[row] [ identifier[self] . identifier[VALUE_STORAGE_ID] ]= identifier[str] ( identifier[config_value] )
identifier[row] [ identifier[self] . identifier[TOGGLE_VALUE_STORAGE_ID] ]= identifier[config_value]
keyword[return] identifier[row_num] | def _update_list_store_entry(self, list_store, config_key, config_value):
"""Helper method to update a list store
:param Gtk.ListStore list_store: List store to be updated
:param str config_key: Config key to search for
:param config_value: New config value
:returns: Row of list store that has been updated
:rtype: int
"""
for (row_num, row) in enumerate(list_store):
if row[self.KEY_STORAGE_ID] == config_key:
row[self.VALUE_STORAGE_ID] = str(config_value)
row[self.TOGGLE_VALUE_STORAGE_ID] = config_value
return row_num # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def cover(ctx, html=False):
'''Run tests suite with coverage'''
params = '--cov-report term --cov-report html' if html else ''
with ctx.cd(ROOT):
ctx.run('pytest --cov flask_fs {0}'.format(params), pty=True) | def function[cover, parameter[ctx, html]]:
constant[Run tests suite with coverage]
variable[params] assign[=] <ast.IfExp object at 0x7da1b0c50cd0>
with call[name[ctx].cd, parameter[name[ROOT]]] begin[:]
call[name[ctx].run, parameter[call[constant[pytest --cov flask_fs {0}].format, parameter[name[params]]]]] | keyword[def] identifier[cover] ( identifier[ctx] , identifier[html] = keyword[False] ):
literal[string]
identifier[params] = literal[string] keyword[if] identifier[html] keyword[else] literal[string]
keyword[with] identifier[ctx] . identifier[cd] ( identifier[ROOT] ):
identifier[ctx] . identifier[run] ( literal[string] . identifier[format] ( identifier[params] ), identifier[pty] = keyword[True] ) | def cover(ctx, html=False):
"""Run tests suite with coverage"""
params = '--cov-report term --cov-report html' if html else ''
with ctx.cd(ROOT):
ctx.run('pytest --cov flask_fs {0}'.format(params), pty=True) # depends on [control=['with'], data=[]] |
def _get_entity(service_instance, entity):
'''
Returns the entity associated with the entity dict representation
Supported entities: cluster, vcenter
Expected entity format:
.. code-block:: python
cluster:
{'type': 'cluster',
'datacenter': <datacenter_name>,
'cluster': <cluster_name>}
vcenter:
{'type': 'vcenter'}
service_instance
Service instance (vim.ServiceInstance) of the vCenter.
entity
Entity dict in the format above
'''
log.trace('Retrieving entity: %s', entity)
if entity['type'] == 'cluster':
dc_ref = salt.utils.vmware.get_datacenter(service_instance,
entity['datacenter'])
return salt.utils.vmware.get_cluster(dc_ref, entity['cluster'])
elif entity['type'] == 'vcenter':
return None
raise ArgumentValueError('Unsupported entity type \'{0}\''
''.format(entity['type'])) | def function[_get_entity, parameter[service_instance, entity]]:
constant[
Returns the entity associated with the entity dict representation
Supported entities: cluster, vcenter
Expected entity format:
.. code-block:: python
cluster:
{'type': 'cluster',
'datacenter': <datacenter_name>,
'cluster': <cluster_name>}
vcenter:
{'type': 'vcenter'}
service_instance
Service instance (vim.ServiceInstance) of the vCenter.
entity
Entity dict in the format above
]
call[name[log].trace, parameter[constant[Retrieving entity: %s], name[entity]]]
if compare[call[name[entity]][constant[type]] equal[==] constant[cluster]] begin[:]
variable[dc_ref] assign[=] call[name[salt].utils.vmware.get_datacenter, parameter[name[service_instance], call[name[entity]][constant[datacenter]]]]
return[call[name[salt].utils.vmware.get_cluster, parameter[name[dc_ref], call[name[entity]][constant[cluster]]]]]
<ast.Raise object at 0x7da2041d9f30> | keyword[def] identifier[_get_entity] ( identifier[service_instance] , identifier[entity] ):
literal[string]
identifier[log] . identifier[trace] ( literal[string] , identifier[entity] )
keyword[if] identifier[entity] [ literal[string] ]== literal[string] :
identifier[dc_ref] = identifier[salt] . identifier[utils] . identifier[vmware] . identifier[get_datacenter] ( identifier[service_instance] ,
identifier[entity] [ literal[string] ])
keyword[return] identifier[salt] . identifier[utils] . identifier[vmware] . identifier[get_cluster] ( identifier[dc_ref] , identifier[entity] [ literal[string] ])
keyword[elif] identifier[entity] [ literal[string] ]== literal[string] :
keyword[return] keyword[None]
keyword[raise] identifier[ArgumentValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[entity] [ literal[string] ])) | def _get_entity(service_instance, entity):
"""
Returns the entity associated with the entity dict representation
Supported entities: cluster, vcenter
Expected entity format:
.. code-block:: python
cluster:
{'type': 'cluster',
'datacenter': <datacenter_name>,
'cluster': <cluster_name>}
vcenter:
{'type': 'vcenter'}
service_instance
Service instance (vim.ServiceInstance) of the vCenter.
entity
Entity dict in the format above
"""
log.trace('Retrieving entity: %s', entity)
if entity['type'] == 'cluster':
dc_ref = salt.utils.vmware.get_datacenter(service_instance, entity['datacenter'])
return salt.utils.vmware.get_cluster(dc_ref, entity['cluster']) # depends on [control=['if'], data=[]]
elif entity['type'] == 'vcenter':
return None # depends on [control=['if'], data=[]]
raise ArgumentValueError("Unsupported entity type '{0}'".format(entity['type'])) |
def node_changed(self):
"""
Triggers the host model(s) :meth:`umbra.ui.models.GraphModel.node_changed` method.
:return: Method success.
:rtype: bool
"""
for model in umbra.ui.models.GraphModel.find_model(self):
model.node_changed(self)
return True | def function[node_changed, parameter[self]]:
constant[
Triggers the host model(s) :meth:`umbra.ui.models.GraphModel.node_changed` method.
:return: Method success.
:rtype: bool
]
for taget[name[model]] in starred[call[name[umbra].ui.models.GraphModel.find_model, parameter[name[self]]]] begin[:]
call[name[model].node_changed, parameter[name[self]]]
return[constant[True]] | keyword[def] identifier[node_changed] ( identifier[self] ):
literal[string]
keyword[for] identifier[model] keyword[in] identifier[umbra] . identifier[ui] . identifier[models] . identifier[GraphModel] . identifier[find_model] ( identifier[self] ):
identifier[model] . identifier[node_changed] ( identifier[self] )
keyword[return] keyword[True] | def node_changed(self):
"""
Triggers the host model(s) :meth:`umbra.ui.models.GraphModel.node_changed` method.
:return: Method success.
:rtype: bool
"""
for model in umbra.ui.models.GraphModel.find_model(self):
model.node_changed(self) # depends on [control=['for'], data=['model']]
return True |
def main():
"""Parser the command line and run the validator."""
parser = argparse.ArgumentParser(
description="[v" + __version__ + "] " + __doc__,
prog="w3c_validator",
)
parser.add_argument(
"--log",
default="INFO",
help=("log level: DEBUG, INFO or INFO "
"(default: INFO)"))
parser.add_argument(
"--version", action="version", version="%(prog)s " + __version__)
parser.add_argument(
"--verbose", help="increase output verbosity", action="store_true")
parser.add_argument(
"source", metavar="F", type=str, nargs="+", help="file or URL")
args = parser.parse_args()
logging.basicConfig(level=getattr(logging, args.log))
LOGGER.info("Files to validate: \n {0}".format("\n ".join(args.source)))
LOGGER.info("Number of files: {0}".format(len(args.source)))
errors = 0
warnings = 0
for f in args.source:
LOGGER.info("validating: %s ..." % f)
retrys = 0
while retrys < 2:
result = validate(f, verbose=args.verbose)
if result:
break
time.sleep(2)
retrys += 1
LOGGER.info("retrying: %s ..." % f)
else:
LOGGER.info("failed: %s" % f)
errors += 1
continue
# import pdb; pdb.set_trace()
if f.endswith(".css"):
errorcount = result["cssvalidation"]["result"]["errorcount"]
warningcount = result["cssvalidation"]["result"]["warningcount"]
errors += errorcount
warnings += warningcount
if errorcount > 0:
LOGGER.info("errors: %d" % errorcount)
if warningcount > 0:
LOGGER.info("warnings: %d" % warningcount)
else:
for msg in result["messages"]:
print_msg(msg)
if msg["type"] == "error":
errors += 1
else:
warnings += 1
sys.exit(min(errors, 255)) | def function[main, parameter[]]:
constant[Parser the command line and run the validator.]
variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]]
call[name[parser].add_argument, parameter[constant[--log]]]
call[name[parser].add_argument, parameter[constant[--version]]]
call[name[parser].add_argument, parameter[constant[--verbose]]]
call[name[parser].add_argument, parameter[constant[source]]]
variable[args] assign[=] call[name[parser].parse_args, parameter[]]
call[name[logging].basicConfig, parameter[]]
call[name[LOGGER].info, parameter[call[constant[Files to validate:
{0}].format, parameter[call[constant[
].join, parameter[name[args].source]]]]]]
call[name[LOGGER].info, parameter[call[constant[Number of files: {0}].format, parameter[call[name[len], parameter[name[args].source]]]]]]
variable[errors] assign[=] constant[0]
variable[warnings] assign[=] constant[0]
for taget[name[f]] in starred[name[args].source] begin[:]
call[name[LOGGER].info, parameter[binary_operation[constant[validating: %s ...] <ast.Mod object at 0x7da2590d6920> name[f]]]]
variable[retrys] assign[=] constant[0]
while compare[name[retrys] less[<] constant[2]] begin[:]
variable[result] assign[=] call[name[validate], parameter[name[f]]]
if name[result] begin[:]
break
call[name[time].sleep, parameter[constant[2]]]
<ast.AugAssign object at 0x7da1b0a6d7e0>
call[name[LOGGER].info, parameter[binary_operation[constant[retrying: %s ...] <ast.Mod object at 0x7da2590d6920> name[f]]]]
if call[name[f].endswith, parameter[constant[.css]]] begin[:]
variable[errorcount] assign[=] call[call[call[name[result]][constant[cssvalidation]]][constant[result]]][constant[errorcount]]
variable[warningcount] assign[=] call[call[call[name[result]][constant[cssvalidation]]][constant[result]]][constant[warningcount]]
<ast.AugAssign object at 0x7da1b0bdad70>
<ast.AugAssign object at 0x7da1b0bd87c0>
if compare[name[errorcount] greater[>] constant[0]] begin[:]
call[name[LOGGER].info, parameter[binary_operation[constant[errors: %d] <ast.Mod object at 0x7da2590d6920> name[errorcount]]]]
if compare[name[warningcount] greater[>] constant[0]] begin[:]
call[name[LOGGER].info, parameter[binary_operation[constant[warnings: %d] <ast.Mod object at 0x7da2590d6920> name[warningcount]]]]
call[name[sys].exit, parameter[call[name[min], parameter[name[errors], constant[255]]]]] | keyword[def] identifier[main] ():
literal[string]
identifier[parser] = identifier[argparse] . identifier[ArgumentParser] (
identifier[description] = literal[string] + identifier[__version__] + literal[string] + identifier[__doc__] ,
identifier[prog] = literal[string] ,
)
identifier[parser] . identifier[add_argument] (
literal[string] ,
identifier[default] = literal[string] ,
identifier[help] =( literal[string]
literal[string] ))
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[action] = literal[string] , identifier[version] = literal[string] + identifier[__version__] )
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[help] = literal[string] , identifier[action] = literal[string] )
identifier[parser] . identifier[add_argument] (
literal[string] , identifier[metavar] = literal[string] , identifier[type] = identifier[str] , identifier[nargs] = literal[string] , identifier[help] = literal[string] )
identifier[args] = identifier[parser] . identifier[parse_args] ()
identifier[logging] . identifier[basicConfig] ( identifier[level] = identifier[getattr] ( identifier[logging] , identifier[args] . identifier[log] ))
identifier[LOGGER] . identifier[info] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[args] . identifier[source] )))
identifier[LOGGER] . identifier[info] ( literal[string] . identifier[format] ( identifier[len] ( identifier[args] . identifier[source] )))
identifier[errors] = literal[int]
identifier[warnings] = literal[int]
keyword[for] identifier[f] keyword[in] identifier[args] . identifier[source] :
identifier[LOGGER] . identifier[info] ( literal[string] % identifier[f] )
identifier[retrys] = literal[int]
keyword[while] identifier[retrys] < literal[int] :
identifier[result] = identifier[validate] ( identifier[f] , identifier[verbose] = identifier[args] . identifier[verbose] )
keyword[if] identifier[result] :
keyword[break]
identifier[time] . identifier[sleep] ( literal[int] )
identifier[retrys] += literal[int]
identifier[LOGGER] . identifier[info] ( literal[string] % identifier[f] )
keyword[else] :
identifier[LOGGER] . identifier[info] ( literal[string] % identifier[f] )
identifier[errors] += literal[int]
keyword[continue]
keyword[if] identifier[f] . identifier[endswith] ( literal[string] ):
identifier[errorcount] = identifier[result] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[warningcount] = identifier[result] [ literal[string] ][ literal[string] ][ literal[string] ]
identifier[errors] += identifier[errorcount]
identifier[warnings] += identifier[warningcount]
keyword[if] identifier[errorcount] > literal[int] :
identifier[LOGGER] . identifier[info] ( literal[string] % identifier[errorcount] )
keyword[if] identifier[warningcount] > literal[int] :
identifier[LOGGER] . identifier[info] ( literal[string] % identifier[warningcount] )
keyword[else] :
keyword[for] identifier[msg] keyword[in] identifier[result] [ literal[string] ]:
identifier[print_msg] ( identifier[msg] )
keyword[if] identifier[msg] [ literal[string] ]== literal[string] :
identifier[errors] += literal[int]
keyword[else] :
identifier[warnings] += literal[int]
identifier[sys] . identifier[exit] ( identifier[min] ( identifier[errors] , literal[int] )) | def main():
"""Parser the command line and run the validator."""
parser = argparse.ArgumentParser(description='[v' + __version__ + '] ' + __doc__, prog='w3c_validator')
parser.add_argument('--log', default='INFO', help='log level: DEBUG, INFO or INFO (default: INFO)')
parser.add_argument('--version', action='version', version='%(prog)s ' + __version__)
parser.add_argument('--verbose', help='increase output verbosity', action='store_true')
parser.add_argument('source', metavar='F', type=str, nargs='+', help='file or URL')
args = parser.parse_args()
logging.basicConfig(level=getattr(logging, args.log))
LOGGER.info('Files to validate: \n {0}'.format('\n '.join(args.source)))
LOGGER.info('Number of files: {0}'.format(len(args.source)))
errors = 0
warnings = 0
for f in args.source:
LOGGER.info('validating: %s ...' % f)
retrys = 0
while retrys < 2:
result = validate(f, verbose=args.verbose)
if result:
break # depends on [control=['if'], data=[]]
time.sleep(2)
retrys += 1
LOGGER.info('retrying: %s ...' % f) # depends on [control=['while'], data=['retrys']]
else:
LOGGER.info('failed: %s' % f)
errors += 1
continue
# import pdb; pdb.set_trace()
if f.endswith('.css'):
errorcount = result['cssvalidation']['result']['errorcount']
warningcount = result['cssvalidation']['result']['warningcount']
errors += errorcount
warnings += warningcount
if errorcount > 0:
LOGGER.info('errors: %d' % errorcount) # depends on [control=['if'], data=['errorcount']]
if warningcount > 0:
LOGGER.info('warnings: %d' % warningcount) # depends on [control=['if'], data=['warningcount']] # depends on [control=['if'], data=[]]
else:
for msg in result['messages']:
print_msg(msg)
if msg['type'] == 'error':
errors += 1 # depends on [control=['if'], data=[]]
else:
warnings += 1 # depends on [control=['for'], data=['msg']] # depends on [control=['for'], data=['f']]
sys.exit(min(errors, 255)) |
def encode(self, value):
'''
:param value: value to encode
'''
kassert.is_of_types(value, Bits)
remainder = len(value) % 8
if remainder:
value += Bits(bin='0' * (8 - remainder))
return value | def function[encode, parameter[self, value]]:
constant[
:param value: value to encode
]
call[name[kassert].is_of_types, parameter[name[value], name[Bits]]]
variable[remainder] assign[=] binary_operation[call[name[len], parameter[name[value]]] <ast.Mod object at 0x7da2590d6920> constant[8]]
if name[remainder] begin[:]
<ast.AugAssign object at 0x7da18dc07e50>
return[name[value]] | keyword[def] identifier[encode] ( identifier[self] , identifier[value] ):
literal[string]
identifier[kassert] . identifier[is_of_types] ( identifier[value] , identifier[Bits] )
identifier[remainder] = identifier[len] ( identifier[value] )% literal[int]
keyword[if] identifier[remainder] :
identifier[value] += identifier[Bits] ( identifier[bin] = literal[string] *( literal[int] - identifier[remainder] ))
keyword[return] identifier[value] | def encode(self, value):
"""
:param value: value to encode
"""
kassert.is_of_types(value, Bits)
remainder = len(value) % 8
if remainder:
value += Bits(bin='0' * (8 - remainder)) # depends on [control=['if'], data=[]]
return value |
def find_model_by_table_name(name):
"""Find a model reference by its table name"""
for model in ModelBase._decl_class_registry.values():
if hasattr(model, '__table__') and model.__table__.fullname == name:
return model
return None | def function[find_model_by_table_name, parameter[name]]:
constant[Find a model reference by its table name]
for taget[name[model]] in starred[call[name[ModelBase]._decl_class_registry.values, parameter[]]] begin[:]
if <ast.BoolOp object at 0x7da1b0d0d0f0> begin[:]
return[name[model]]
return[constant[None]] | keyword[def] identifier[find_model_by_table_name] ( identifier[name] ):
literal[string]
keyword[for] identifier[model] keyword[in] identifier[ModelBase] . identifier[_decl_class_registry] . identifier[values] ():
keyword[if] identifier[hasattr] ( identifier[model] , literal[string] ) keyword[and] identifier[model] . identifier[__table__] . identifier[fullname] == identifier[name] :
keyword[return] identifier[model]
keyword[return] keyword[None] | def find_model_by_table_name(name):
"""Find a model reference by its table name"""
for model in ModelBase._decl_class_registry.values():
if hasattr(model, '__table__') and model.__table__.fullname == name:
return model # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['model']]
return None |
def from_shortcode(cls, context: InstaloaderContext, shortcode: str):
"""Create a post object from a given shortcode"""
# pylint:disable=protected-access
post = cls(context, {'shortcode': shortcode})
post._node = post._full_metadata
return post | def function[from_shortcode, parameter[cls, context, shortcode]]:
constant[Create a post object from a given shortcode]
variable[post] assign[=] call[name[cls], parameter[name[context], dictionary[[<ast.Constant object at 0x7da20c6e7070>], [<ast.Name object at 0x7da20c6e77f0>]]]]
name[post]._node assign[=] name[post]._full_metadata
return[name[post]] | keyword[def] identifier[from_shortcode] ( identifier[cls] , identifier[context] : identifier[InstaloaderContext] , identifier[shortcode] : identifier[str] ):
literal[string]
identifier[post] = identifier[cls] ( identifier[context] ,{ literal[string] : identifier[shortcode] })
identifier[post] . identifier[_node] = identifier[post] . identifier[_full_metadata]
keyword[return] identifier[post] | def from_shortcode(cls, context: InstaloaderContext, shortcode: str):
"""Create a post object from a given shortcode"""
# pylint:disable=protected-access
post = cls(context, {'shortcode': shortcode})
post._node = post._full_metadata
return post |
def chimera_anticluster(m, n=None, t=4, multiplier=3.0,
cls=BinaryQuadraticModel, subgraph=None, seed=None):
"""Generate an anticluster problem on a Chimera lattice.
An anticluster problem has weak interactions within a tile and strong
interactions between tiles.
Args:
m (int):
Number of rows in the Chimera lattice.
n (int, optional, default=m):
Number of columns in the Chimera lattice.
t (int, optional, default=t):
Size of the shore within each Chimera tile.
multiplier (number, optional, default=3.0):
Strength of the intertile edges.
cls (class, optional, default=:class:`.BinaryQuadraticModel`):
Binary quadratic model class to build from.
subgraph (int/tuple[nodes, edges]/:obj:`~networkx.Graph`):
A subgraph of a Chimera(m, n, t) graph to build the anticluster
problem on.
seed (int, optional, default=None):
Random seed.
Returns:
:obj:`.BinaryQuadraticModel`: spin-valued binary quadratic model.
"""
if seed is None:
seed = numpy.random.randint(2**32, dtype=np.uint32)
r = numpy.random.RandomState(seed)
m = int(m)
if n is None:
n = m
else:
n = int(n)
t = int(t)
ldata = np.zeros(m*n*t*2) # number of nodes
if m and n and t:
inrow, incol = zip(*_iter_chimera_tile_edges(m, n, t))
if m > 1 or n > 1:
outrow, outcol = zip(*_iter_chimera_intertile_edges(m, n, t))
else:
outrow = outcol = tuple()
qdata = r.choice((-1., 1.), size=len(inrow)+len(outrow))
qdata[len(inrow):] *= multiplier
irow = inrow + outrow
icol = incol + outcol
else:
irow = icol = qdata = tuple()
bqm = cls.from_numpy_vectors(ldata, (irow, icol, qdata), 0.0, SPIN)
if subgraph is not None:
nodes, edges = subgraph
subbqm = cls.empty(SPIN)
try:
subbqm.add_variables_from((v, bqm.linear[v]) for v in nodes)
except KeyError:
msg = "given 'subgraph' contains nodes not in Chimera({}, {}, {})".format(m, n, t)
raise ValueError(msg)
try:
subbqm.add_interactions_from((u, v, bqm.adj[u][v]) for u, v in edges)
except KeyError:
msg = "given 'subgraph' contains edges not in Chimera({}, {}, {})".format(m, n, t)
raise ValueError(msg)
bqm = subbqm
return bqm | def function[chimera_anticluster, parameter[m, n, t, multiplier, cls, subgraph, seed]]:
constant[Generate an anticluster problem on a Chimera lattice.
An anticluster problem has weak interactions within a tile and strong
interactions between tiles.
Args:
m (int):
Number of rows in the Chimera lattice.
n (int, optional, default=m):
Number of columns in the Chimera lattice.
t (int, optional, default=t):
Size of the shore within each Chimera tile.
multiplier (number, optional, default=3.0):
Strength of the intertile edges.
cls (class, optional, default=:class:`.BinaryQuadraticModel`):
Binary quadratic model class to build from.
subgraph (int/tuple[nodes, edges]/:obj:`~networkx.Graph`):
A subgraph of a Chimera(m, n, t) graph to build the anticluster
problem on.
seed (int, optional, default=None):
Random seed.
Returns:
:obj:`.BinaryQuadraticModel`: spin-valued binary quadratic model.
]
if compare[name[seed] is constant[None]] begin[:]
variable[seed] assign[=] call[name[numpy].random.randint, parameter[binary_operation[constant[2] ** constant[32]]]]
variable[r] assign[=] call[name[numpy].random.RandomState, parameter[name[seed]]]
variable[m] assign[=] call[name[int], parameter[name[m]]]
if compare[name[n] is constant[None]] begin[:]
variable[n] assign[=] name[m]
variable[t] assign[=] call[name[int], parameter[name[t]]]
variable[ldata] assign[=] call[name[np].zeros, parameter[binary_operation[binary_operation[binary_operation[name[m] * name[n]] * name[t]] * constant[2]]]]
if <ast.BoolOp object at 0x7da1b0717700> begin[:]
<ast.Tuple object at 0x7da1b0716560> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b0715630>]]
if <ast.BoolOp object at 0x7da1b0714b80> begin[:]
<ast.Tuple object at 0x7da1b0714850> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b0714fd0>]]
variable[qdata] assign[=] call[name[r].choice, parameter[tuple[[<ast.UnaryOp object at 0x7da1b0716200>, <ast.Constant object at 0x7da1b07146d0>]]]]
<ast.AugAssign object at 0x7da1b0716cb0>
variable[irow] assign[=] binary_operation[name[inrow] + name[outrow]]
variable[icol] assign[=] binary_operation[name[incol] + name[outcol]]
variable[bqm] assign[=] call[name[cls].from_numpy_vectors, parameter[name[ldata], tuple[[<ast.Name object at 0x7da1b0716e00>, <ast.Name object at 0x7da1b07156f0>, <ast.Name object at 0x7da1b0714fa0>]], constant[0.0], name[SPIN]]]
if compare[name[subgraph] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da1b0716b90> assign[=] name[subgraph]
variable[subbqm] assign[=] call[name[cls].empty, parameter[name[SPIN]]]
<ast.Try object at 0x7da1b0717550>
<ast.Try object at 0x7da1b07150f0>
variable[bqm] assign[=] name[subbqm]
return[name[bqm]] | keyword[def] identifier[chimera_anticluster] ( identifier[m] , identifier[n] = keyword[None] , identifier[t] = literal[int] , identifier[multiplier] = literal[int] ,
identifier[cls] = identifier[BinaryQuadraticModel] , identifier[subgraph] = keyword[None] , identifier[seed] = keyword[None] ):
literal[string]
keyword[if] identifier[seed] keyword[is] keyword[None] :
identifier[seed] = identifier[numpy] . identifier[random] . identifier[randint] ( literal[int] ** literal[int] , identifier[dtype] = identifier[np] . identifier[uint32] )
identifier[r] = identifier[numpy] . identifier[random] . identifier[RandomState] ( identifier[seed] )
identifier[m] = identifier[int] ( identifier[m] )
keyword[if] identifier[n] keyword[is] keyword[None] :
identifier[n] = identifier[m]
keyword[else] :
identifier[n] = identifier[int] ( identifier[n] )
identifier[t] = identifier[int] ( identifier[t] )
identifier[ldata] = identifier[np] . identifier[zeros] ( identifier[m] * identifier[n] * identifier[t] * literal[int] )
keyword[if] identifier[m] keyword[and] identifier[n] keyword[and] identifier[t] :
identifier[inrow] , identifier[incol] = identifier[zip] (* identifier[_iter_chimera_tile_edges] ( identifier[m] , identifier[n] , identifier[t] ))
keyword[if] identifier[m] > literal[int] keyword[or] identifier[n] > literal[int] :
identifier[outrow] , identifier[outcol] = identifier[zip] (* identifier[_iter_chimera_intertile_edges] ( identifier[m] , identifier[n] , identifier[t] ))
keyword[else] :
identifier[outrow] = identifier[outcol] = identifier[tuple] ()
identifier[qdata] = identifier[r] . identifier[choice] ((- literal[int] , literal[int] ), identifier[size] = identifier[len] ( identifier[inrow] )+ identifier[len] ( identifier[outrow] ))
identifier[qdata] [ identifier[len] ( identifier[inrow] ):]*= identifier[multiplier]
identifier[irow] = identifier[inrow] + identifier[outrow]
identifier[icol] = identifier[incol] + identifier[outcol]
keyword[else] :
identifier[irow] = identifier[icol] = identifier[qdata] = identifier[tuple] ()
identifier[bqm] = identifier[cls] . identifier[from_numpy_vectors] ( identifier[ldata] ,( identifier[irow] , identifier[icol] , identifier[qdata] ), literal[int] , identifier[SPIN] )
keyword[if] identifier[subgraph] keyword[is] keyword[not] keyword[None] :
identifier[nodes] , identifier[edges] = identifier[subgraph]
identifier[subbqm] = identifier[cls] . identifier[empty] ( identifier[SPIN] )
keyword[try] :
identifier[subbqm] . identifier[add_variables_from] (( identifier[v] , identifier[bqm] . identifier[linear] [ identifier[v] ]) keyword[for] identifier[v] keyword[in] identifier[nodes] )
keyword[except] identifier[KeyError] :
identifier[msg] = literal[string] . identifier[format] ( identifier[m] , identifier[n] , identifier[t] )
keyword[raise] identifier[ValueError] ( identifier[msg] )
keyword[try] :
identifier[subbqm] . identifier[add_interactions_from] (( identifier[u] , identifier[v] , identifier[bqm] . identifier[adj] [ identifier[u] ][ identifier[v] ]) keyword[for] identifier[u] , identifier[v] keyword[in] identifier[edges] )
keyword[except] identifier[KeyError] :
identifier[msg] = literal[string] . identifier[format] ( identifier[m] , identifier[n] , identifier[t] )
keyword[raise] identifier[ValueError] ( identifier[msg] )
identifier[bqm] = identifier[subbqm]
keyword[return] identifier[bqm] | def chimera_anticluster(m, n=None, t=4, multiplier=3.0, cls=BinaryQuadraticModel, subgraph=None, seed=None):
"""Generate an anticluster problem on a Chimera lattice.
An anticluster problem has weak interactions within a tile and strong
interactions between tiles.
Args:
m (int):
Number of rows in the Chimera lattice.
n (int, optional, default=m):
Number of columns in the Chimera lattice.
t (int, optional, default=t):
Size of the shore within each Chimera tile.
multiplier (number, optional, default=3.0):
Strength of the intertile edges.
cls (class, optional, default=:class:`.BinaryQuadraticModel`):
Binary quadratic model class to build from.
subgraph (int/tuple[nodes, edges]/:obj:`~networkx.Graph`):
A subgraph of a Chimera(m, n, t) graph to build the anticluster
problem on.
seed (int, optional, default=None):
Random seed.
Returns:
:obj:`.BinaryQuadraticModel`: spin-valued binary quadratic model.
"""
if seed is None:
seed = numpy.random.randint(2 ** 32, dtype=np.uint32) # depends on [control=['if'], data=['seed']]
r = numpy.random.RandomState(seed)
m = int(m)
if n is None:
n = m # depends on [control=['if'], data=['n']]
else:
n = int(n)
t = int(t)
ldata = np.zeros(m * n * t * 2) # number of nodes
if m and n and t:
(inrow, incol) = zip(*_iter_chimera_tile_edges(m, n, t))
if m > 1 or n > 1:
(outrow, outcol) = zip(*_iter_chimera_intertile_edges(m, n, t)) # depends on [control=['if'], data=[]]
else:
outrow = outcol = tuple()
qdata = r.choice((-1.0, 1.0), size=len(inrow) + len(outrow))
qdata[len(inrow):] *= multiplier
irow = inrow + outrow
icol = incol + outcol # depends on [control=['if'], data=[]]
else:
irow = icol = qdata = tuple()
bqm = cls.from_numpy_vectors(ldata, (irow, icol, qdata), 0.0, SPIN)
if subgraph is not None:
(nodes, edges) = subgraph
subbqm = cls.empty(SPIN)
try:
subbqm.add_variables_from(((v, bqm.linear[v]) for v in nodes)) # depends on [control=['try'], data=[]]
except KeyError:
msg = "given 'subgraph' contains nodes not in Chimera({}, {}, {})".format(m, n, t)
raise ValueError(msg) # depends on [control=['except'], data=[]]
try:
subbqm.add_interactions_from(((u, v, bqm.adj[u][v]) for (u, v) in edges)) # depends on [control=['try'], data=[]]
except KeyError:
msg = "given 'subgraph' contains edges not in Chimera({}, {}, {})".format(m, n, t)
raise ValueError(msg) # depends on [control=['except'], data=[]]
bqm = subbqm # depends on [control=['if'], data=['subgraph']]
return bqm |
def format_args_in_redis_protocol(*args):
"""Formats arguments into redis protocol...
This function makes and returns a string/buffer corresponding to
given arguments formated with the redis protocol.
integer, text, string or binary types are automatically converted
(using utf8 if necessary).
More informations about the protocol: http://redis.io/topics/protocol
Args:
*args: full redis command as variable length argument list
Returns:
binary string (arguments in redis protocol)
Examples:
>>> format_args_in_redis_protocol("HSET", "key", "field", "value")
'*4\r\n$4\r\nHSET\r\n$3\r\nkey\r\n$5\r\nfield\r\n$5\r\nvalue\r\n'
"""
buf = WriteBuffer()
l = "*%d\r\n" % len(args) # noqa: E741
if six.PY2:
buf.append(l)
else: # pragma: no cover
buf.append(l.encode('utf-8'))
for arg in args:
if isinstance(arg, six.text_type):
# it's a unicode string in Python2 or a standard (unicode)
# string in Python3, let's encode it in utf-8 to get raw bytes
arg = arg.encode('utf-8')
elif isinstance(arg, six.string_types):
# it's a basestring in Python2 => nothing to do
pass
elif isinstance(arg, six.binary_type): # pragma: no cover
# it's a raw bytes string in Python3 => nothing to do
pass
elif isinstance(arg, six.integer_types):
tmp = "%d" % arg
if six.PY2:
arg = tmp
else: # pragma: no cover
arg = tmp.encode('utf-8')
elif isinstance(arg, WriteBuffer):
# it's a WriteBuffer object => nothing to do
pass
else:
raise Exception("don't know what to do with %s" % type(arg))
l = "$%d\r\n" % len(arg) # noqa: E741
if six.PY2:
buf.append(l)
else: # pragma: no cover
buf.append(l.encode('utf-8'))
buf.append(arg)
buf.append(b"\r\n")
return buf | def function[format_args_in_redis_protocol, parameter[]]:
constant[Formats arguments into redis protocol...
This function makes and returns a string/buffer corresponding to
given arguments formated with the redis protocol.
integer, text, string or binary types are automatically converted
(using utf8 if necessary).
More informations about the protocol: http://redis.io/topics/protocol
Args:
*args: full redis command as variable length argument list
Returns:
binary string (arguments in redis protocol)
Examples:
>>> format_args_in_redis_protocol("HSET", "key", "field", "value")
'*4
$4
HSET
$3
key
$5
field
$5
value
'
]
variable[buf] assign[=] call[name[WriteBuffer], parameter[]]
variable[l] assign[=] binary_operation[constant[*%d
] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[args]]]]
if name[six].PY2 begin[:]
call[name[buf].append, parameter[name[l]]]
for taget[name[arg]] in starred[name[args]] begin[:]
if call[name[isinstance], parameter[name[arg], name[six].text_type]] begin[:]
variable[arg] assign[=] call[name[arg].encode, parameter[constant[utf-8]]]
variable[l] assign[=] binary_operation[constant[$%d
] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[arg]]]]
if name[six].PY2 begin[:]
call[name[buf].append, parameter[name[l]]]
call[name[buf].append, parameter[name[arg]]]
call[name[buf].append, parameter[constant[b'\r\n']]]
return[name[buf]] | keyword[def] identifier[format_args_in_redis_protocol] (* identifier[args] ):
literal[string]
identifier[buf] = identifier[WriteBuffer] ()
identifier[l] = literal[string] % identifier[len] ( identifier[args] )
keyword[if] identifier[six] . identifier[PY2] :
identifier[buf] . identifier[append] ( identifier[l] )
keyword[else] :
identifier[buf] . identifier[append] ( identifier[l] . identifier[encode] ( literal[string] ))
keyword[for] identifier[arg] keyword[in] identifier[args] :
keyword[if] identifier[isinstance] ( identifier[arg] , identifier[six] . identifier[text_type] ):
identifier[arg] = identifier[arg] . identifier[encode] ( literal[string] )
keyword[elif] identifier[isinstance] ( identifier[arg] , identifier[six] . identifier[string_types] ):
keyword[pass]
keyword[elif] identifier[isinstance] ( identifier[arg] , identifier[six] . identifier[binary_type] ):
keyword[pass]
keyword[elif] identifier[isinstance] ( identifier[arg] , identifier[six] . identifier[integer_types] ):
identifier[tmp] = literal[string] % identifier[arg]
keyword[if] identifier[six] . identifier[PY2] :
identifier[arg] = identifier[tmp]
keyword[else] :
identifier[arg] = identifier[tmp] . identifier[encode] ( literal[string] )
keyword[elif] identifier[isinstance] ( identifier[arg] , identifier[WriteBuffer] ):
keyword[pass]
keyword[else] :
keyword[raise] identifier[Exception] ( literal[string] % identifier[type] ( identifier[arg] ))
identifier[l] = literal[string] % identifier[len] ( identifier[arg] )
keyword[if] identifier[six] . identifier[PY2] :
identifier[buf] . identifier[append] ( identifier[l] )
keyword[else] :
identifier[buf] . identifier[append] ( identifier[l] . identifier[encode] ( literal[string] ))
identifier[buf] . identifier[append] ( identifier[arg] )
identifier[buf] . identifier[append] ( literal[string] )
keyword[return] identifier[buf] | def format_args_in_redis_protocol(*args):
"""Formats arguments into redis protocol...
This function makes and returns a string/buffer corresponding to
given arguments formated with the redis protocol.
integer, text, string or binary types are automatically converted
(using utf8 if necessary).
More informations about the protocol: http://redis.io/topics/protocol
Args:
*args: full redis command as variable length argument list
Returns:
binary string (arguments in redis protocol)
Examples:
>>> format_args_in_redis_protocol("HSET", "key", "field", "value")
'*4\r
$4\r
HSET\r
$3\r
key\r
$5\r
field\r
$5\r
value\r
'
"""
buf = WriteBuffer()
l = '*%d\r\n' % len(args) # noqa: E741
if six.PY2:
buf.append(l) # depends on [control=['if'], data=[]]
else: # pragma: no cover
buf.append(l.encode('utf-8'))
for arg in args:
if isinstance(arg, six.text_type):
# it's a unicode string in Python2 or a standard (unicode)
# string in Python3, let's encode it in utf-8 to get raw bytes
arg = arg.encode('utf-8') # depends on [control=['if'], data=[]]
elif isinstance(arg, six.string_types):
# it's a basestring in Python2 => nothing to do
pass # depends on [control=['if'], data=[]]
elif isinstance(arg, six.binary_type): # pragma: no cover
# it's a raw bytes string in Python3 => nothing to do
pass # depends on [control=['if'], data=[]]
elif isinstance(arg, six.integer_types):
tmp = '%d' % arg
if six.PY2:
arg = tmp # depends on [control=['if'], data=[]]
else: # pragma: no cover
arg = tmp.encode('utf-8') # depends on [control=['if'], data=[]]
elif isinstance(arg, WriteBuffer):
# it's a WriteBuffer object => nothing to do
pass # depends on [control=['if'], data=[]]
else:
raise Exception("don't know what to do with %s" % type(arg))
l = '$%d\r\n' % len(arg) # noqa: E741
if six.PY2:
buf.append(l) # depends on [control=['if'], data=[]]
else: # pragma: no cover
buf.append(l.encode('utf-8'))
buf.append(arg)
buf.append(b'\r\n') # depends on [control=['for'], data=['arg']]
return buf |
def _response(self, in_response_to, consumer_url=None, status=None,
issuer=None, sign=False, to_sign=None, sp_entity_id=None,
encrypt_assertion=False,
encrypt_assertion_self_contained=False,
encrypted_advice_attributes=False,
encrypt_cert_advice=None, encrypt_cert_assertion=None,
sign_assertion=None, pefim=False, sign_alg=None,
digest_alg=None, **kwargs):
""" Create a Response.
Encryption:
encrypt_assertion must be true for encryption to be
performed. If encrypted_advice_attributes also is
true, then will the function try to encrypt the assertion in
the the advice element of the main
assertion. Only one assertion element is allowed in the
advice element, if multiple assertions exists
in the advice element the main assertion will be encrypted
instead, since it's no point to encrypt
If encrypted_advice_attributes is
false the main assertion will be encrypted. Since the same key
:param in_response_to: The session identifier of the request
:param consumer_url: The URL which should receive the response
:param status: An instance of samlp.Status
:param issuer: The issuer of the response
:param sign: Whether the response should be signed or not
:param to_sign: If there are other parts to sign
:param sp_entity_id: Entity ID for the calling service provider.
:param encrypt_assertion: True if assertions should be encrypted.
:param encrypt_assertion_self_contained: True if all encrypted
assertions should have alla namespaces selfcontained.
:param encrypted_advice_attributes: True if assertions in the advice
element should be encrypted.
:param encrypt_cert_advice: Certificate to be used for encryption of
assertions in the advice element.
:param encrypt_cert_assertion: Certificate to be used for encryption
of assertions.
:param sign_assertion: True if assertions should be signed.
:param pefim: True if a response according to the PEFIM profile
should be created.
:param kwargs: Extra key word arguments
:return: A Response instance
"""
if not status:
status = success_status_factory()
_issuer = self._issuer(issuer)
response = response_factory(issuer=_issuer,
in_response_to=in_response_to,
status=status, sign_alg=sign_alg,
digest_alg=digest_alg)
if consumer_url:
response.destination = consumer_url
self._add_info(response, **kwargs)
if not sign and to_sign and not encrypt_assertion:
return signed_instance_factory(response, self.sec, to_sign)
has_encrypt_cert = self.has_encrypt_cert_in_metadata(sp_entity_id)
if not has_encrypt_cert and encrypt_cert_advice is None:
encrypted_advice_attributes = False
if not has_encrypt_cert and encrypt_cert_assertion is None:
encrypt_assertion = False
if encrypt_assertion or (
encrypted_advice_attributes and
response.assertion.advice is
not None and
len(response.assertion.advice.assertion) == 1):
if sign:
response.signature = pre_signature_part(response.id,
self.sec.my_cert, 1,
sign_alg=sign_alg,
digest_alg=digest_alg)
sign_class = [(class_name(response), response.id)]
else:
sign_class = []
if encrypted_advice_attributes and response.assertion.advice is \
not None \
and len(response.assertion.advice.assertion) > 0:
_assertions = response.assertion
if not isinstance(_assertions, list):
_assertions = [_assertions]
for _assertion in _assertions:
_assertion.advice.encrypted_assertion = []
_assertion.advice.encrypted_assertion.append(
EncryptedAssertion())
_advice_assertions = copy.deepcopy(
_assertion.advice.assertion)
_assertion.advice.assertion = []
if not isinstance(_advice_assertions, list):
_advice_assertions = [_advice_assertions]
for tmp_assertion in _advice_assertions:
to_sign_advice = []
if sign_assertion and not pefim:
tmp_assertion.signature = pre_signature_part(
tmp_assertion.id, self.sec.my_cert, 1,
sign_alg=sign_alg, digest_alg=digest_alg)
to_sign_advice.append(
(class_name(tmp_assertion), tmp_assertion.id))
# tmp_assertion = response.assertion.advice.assertion[0]
_assertion.advice.encrypted_assertion[
0].add_extension_element(tmp_assertion)
if encrypt_assertion_self_contained:
advice_tag = \
response.assertion.advice._to_element_tree().tag
assertion_tag = tmp_assertion._to_element_tree().tag
response = \
response.get_xml_string_with_self_contained_assertion_within_advice_encrypted_assertion(
assertion_tag, advice_tag)
node_xpath = ''.join(
["/*[local-name()=\"%s\"]" % v for v in
["Response", "Assertion", "Advice",
"EncryptedAssertion", "Assertion"]])
if to_sign_advice:
response = signed_instance_factory(response,
self.sec,
to_sign_advice)
response = self._encrypt_assertion(
encrypt_cert_advice, sp_entity_id, response,
node_xpath=node_xpath)
response = response_from_string(response)
if encrypt_assertion:
to_sign_assertion = []
if sign_assertion is not None and sign_assertion:
_assertions = response.assertion
if not isinstance(_assertions, list):
_assertions = [_assertions]
for _assertion in _assertions:
_assertion.signature = pre_signature_part(
_assertion.id, self.sec.my_cert, 1,
sign_alg=sign_alg, digest_alg=digest_alg)
to_sign_assertion.append(
(class_name(_assertion), _assertion.id))
if encrypt_assertion_self_contained:
try:
assertion_tag = response.assertion._to_element_tree(
).tag
except:
assertion_tag = response.assertion[
0]._to_element_tree().tag
response = pre_encrypt_assertion(response)
response = \
response.get_xml_string_with_self_contained_assertion_within_encrypted_assertion(
assertion_tag)
else:
response = pre_encrypt_assertion(response)
if to_sign_assertion:
response = signed_instance_factory(response, self.sec,
to_sign_assertion)
response = self._encrypt_assertion(encrypt_cert_assertion,
sp_entity_id, response)
else:
if to_sign:
response = signed_instance_factory(response, self.sec,
to_sign)
if sign:
return signed_instance_factory(response, self.sec, sign_class)
else:
return response
if sign:
return self.sign(response, to_sign=to_sign, sign_alg=sign_alg,
digest_alg=digest_alg)
else:
return response | def function[_response, parameter[self, in_response_to, consumer_url, status, issuer, sign, to_sign, sp_entity_id, encrypt_assertion, encrypt_assertion_self_contained, encrypted_advice_attributes, encrypt_cert_advice, encrypt_cert_assertion, sign_assertion, pefim, sign_alg, digest_alg]]:
constant[ Create a Response.
Encryption:
encrypt_assertion must be true for encryption to be
performed. If encrypted_advice_attributes also is
true, then will the function try to encrypt the assertion in
the the advice element of the main
assertion. Only one assertion element is allowed in the
advice element, if multiple assertions exists
in the advice element the main assertion will be encrypted
instead, since it's no point to encrypt
If encrypted_advice_attributes is
false the main assertion will be encrypted. Since the same key
:param in_response_to: The session identifier of the request
:param consumer_url: The URL which should receive the response
:param status: An instance of samlp.Status
:param issuer: The issuer of the response
:param sign: Whether the response should be signed or not
:param to_sign: If there are other parts to sign
:param sp_entity_id: Entity ID for the calling service provider.
:param encrypt_assertion: True if assertions should be encrypted.
:param encrypt_assertion_self_contained: True if all encrypted
assertions should have alla namespaces selfcontained.
:param encrypted_advice_attributes: True if assertions in the advice
element should be encrypted.
:param encrypt_cert_advice: Certificate to be used for encryption of
assertions in the advice element.
:param encrypt_cert_assertion: Certificate to be used for encryption
of assertions.
:param sign_assertion: True if assertions should be signed.
:param pefim: True if a response according to the PEFIM profile
should be created.
:param kwargs: Extra key word arguments
:return: A Response instance
]
if <ast.UnaryOp object at 0x7da2041daf50> begin[:]
variable[status] assign[=] call[name[success_status_factory], parameter[]]
variable[_issuer] assign[=] call[name[self]._issuer, parameter[name[issuer]]]
variable[response] assign[=] call[name[response_factory], parameter[]]
if name[consumer_url] begin[:]
name[response].destination assign[=] name[consumer_url]
call[name[self]._add_info, parameter[name[response]]]
if <ast.BoolOp object at 0x7da2041daec0> begin[:]
return[call[name[signed_instance_factory], parameter[name[response], name[self].sec, name[to_sign]]]]
variable[has_encrypt_cert] assign[=] call[name[self].has_encrypt_cert_in_metadata, parameter[name[sp_entity_id]]]
if <ast.BoolOp object at 0x7da2041db6d0> begin[:]
variable[encrypted_advice_attributes] assign[=] constant[False]
if <ast.BoolOp object at 0x7da2054a6d10> begin[:]
variable[encrypt_assertion] assign[=] constant[False]
if <ast.BoolOp object at 0x7da2054a4730> begin[:]
if name[sign] begin[:]
name[response].signature assign[=] call[name[pre_signature_part], parameter[name[response].id, name[self].sec.my_cert, constant[1]]]
variable[sign_class] assign[=] list[[<ast.Tuple object at 0x7da2054a7880>]]
if <ast.BoolOp object at 0x7da2054a7a60> begin[:]
variable[_assertions] assign[=] name[response].assertion
if <ast.UnaryOp object at 0x7da2041d8c70> begin[:]
variable[_assertions] assign[=] list[[<ast.Name object at 0x7da2041d86a0>]]
for taget[name[_assertion]] in starred[name[_assertions]] begin[:]
name[_assertion].advice.encrypted_assertion assign[=] list[[]]
call[name[_assertion].advice.encrypted_assertion.append, parameter[call[name[EncryptedAssertion], parameter[]]]]
variable[_advice_assertions] assign[=] call[name[copy].deepcopy, parameter[name[_assertion].advice.assertion]]
name[_assertion].advice.assertion assign[=] list[[]]
if <ast.UnaryOp object at 0x7da2041d9960> begin[:]
variable[_advice_assertions] assign[=] list[[<ast.Name object at 0x7da2041d9db0>]]
for taget[name[tmp_assertion]] in starred[name[_advice_assertions]] begin[:]
variable[to_sign_advice] assign[=] list[[]]
if <ast.BoolOp object at 0x7da20c794cd0> begin[:]
name[tmp_assertion].signature assign[=] call[name[pre_signature_part], parameter[name[tmp_assertion].id, name[self].sec.my_cert, constant[1]]]
call[name[to_sign_advice].append, parameter[tuple[[<ast.Call object at 0x7da20c796a10>, <ast.Attribute object at 0x7da20c795180>]]]]
call[call[name[_assertion].advice.encrypted_assertion][constant[0]].add_extension_element, parameter[name[tmp_assertion]]]
if name[encrypt_assertion_self_contained] begin[:]
variable[advice_tag] assign[=] call[name[response].assertion.advice._to_element_tree, parameter[]].tag
variable[assertion_tag] assign[=] call[name[tmp_assertion]._to_element_tree, parameter[]].tag
variable[response] assign[=] call[name[response].get_xml_string_with_self_contained_assertion_within_advice_encrypted_assertion, parameter[name[assertion_tag], name[advice_tag]]]
variable[node_xpath] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da20c796860>]]
if name[to_sign_advice] begin[:]
variable[response] assign[=] call[name[signed_instance_factory], parameter[name[response], name[self].sec, name[to_sign_advice]]]
variable[response] assign[=] call[name[self]._encrypt_assertion, parameter[name[encrypt_cert_advice], name[sp_entity_id], name[response]]]
variable[response] assign[=] call[name[response_from_string], parameter[name[response]]]
if name[encrypt_assertion] begin[:]
variable[to_sign_assertion] assign[=] list[[]]
if <ast.BoolOp object at 0x7da20c795a20> begin[:]
variable[_assertions] assign[=] name[response].assertion
if <ast.UnaryOp object at 0x7da20c7948e0> begin[:]
variable[_assertions] assign[=] list[[<ast.Name object at 0x7da20e9611b0>]]
for taget[name[_assertion]] in starred[name[_assertions]] begin[:]
name[_assertion].signature assign[=] call[name[pre_signature_part], parameter[name[_assertion].id, name[self].sec.my_cert, constant[1]]]
call[name[to_sign_assertion].append, parameter[tuple[[<ast.Call object at 0x7da20e963820>, <ast.Attribute object at 0x7da20e962230>]]]]
if name[encrypt_assertion_self_contained] begin[:]
<ast.Try object at 0x7da20e961b40>
variable[response] assign[=] call[name[pre_encrypt_assertion], parameter[name[response]]]
variable[response] assign[=] call[name[response].get_xml_string_with_self_contained_assertion_within_encrypted_assertion, parameter[name[assertion_tag]]]
if name[to_sign_assertion] begin[:]
variable[response] assign[=] call[name[signed_instance_factory], parameter[name[response], name[self].sec, name[to_sign_assertion]]]
variable[response] assign[=] call[name[self]._encrypt_assertion, parameter[name[encrypt_cert_assertion], name[sp_entity_id], name[response]]]
if name[sign] begin[:]
return[call[name[signed_instance_factory], parameter[name[response], name[self].sec, name[sign_class]]]]
if name[sign] begin[:]
return[call[name[self].sign, parameter[name[response]]]] | keyword[def] identifier[_response] ( identifier[self] , identifier[in_response_to] , identifier[consumer_url] = keyword[None] , identifier[status] = keyword[None] ,
identifier[issuer] = keyword[None] , identifier[sign] = keyword[False] , identifier[to_sign] = keyword[None] , identifier[sp_entity_id] = keyword[None] ,
identifier[encrypt_assertion] = keyword[False] ,
identifier[encrypt_assertion_self_contained] = keyword[False] ,
identifier[encrypted_advice_attributes] = keyword[False] ,
identifier[encrypt_cert_advice] = keyword[None] , identifier[encrypt_cert_assertion] = keyword[None] ,
identifier[sign_assertion] = keyword[None] , identifier[pefim] = keyword[False] , identifier[sign_alg] = keyword[None] ,
identifier[digest_alg] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[status] :
identifier[status] = identifier[success_status_factory] ()
identifier[_issuer] = identifier[self] . identifier[_issuer] ( identifier[issuer] )
identifier[response] = identifier[response_factory] ( identifier[issuer] = identifier[_issuer] ,
identifier[in_response_to] = identifier[in_response_to] ,
identifier[status] = identifier[status] , identifier[sign_alg] = identifier[sign_alg] ,
identifier[digest_alg] = identifier[digest_alg] )
keyword[if] identifier[consumer_url] :
identifier[response] . identifier[destination] = identifier[consumer_url]
identifier[self] . identifier[_add_info] ( identifier[response] ,** identifier[kwargs] )
keyword[if] keyword[not] identifier[sign] keyword[and] identifier[to_sign] keyword[and] keyword[not] identifier[encrypt_assertion] :
keyword[return] identifier[signed_instance_factory] ( identifier[response] , identifier[self] . identifier[sec] , identifier[to_sign] )
identifier[has_encrypt_cert] = identifier[self] . identifier[has_encrypt_cert_in_metadata] ( identifier[sp_entity_id] )
keyword[if] keyword[not] identifier[has_encrypt_cert] keyword[and] identifier[encrypt_cert_advice] keyword[is] keyword[None] :
identifier[encrypted_advice_attributes] = keyword[False]
keyword[if] keyword[not] identifier[has_encrypt_cert] keyword[and] identifier[encrypt_cert_assertion] keyword[is] keyword[None] :
identifier[encrypt_assertion] = keyword[False]
keyword[if] identifier[encrypt_assertion] keyword[or] (
identifier[encrypted_advice_attributes] keyword[and]
identifier[response] . identifier[assertion] . identifier[advice] keyword[is]
keyword[not] keyword[None] keyword[and]
identifier[len] ( identifier[response] . identifier[assertion] . identifier[advice] . identifier[assertion] )== literal[int] ):
keyword[if] identifier[sign] :
identifier[response] . identifier[signature] = identifier[pre_signature_part] ( identifier[response] . identifier[id] ,
identifier[self] . identifier[sec] . identifier[my_cert] , literal[int] ,
identifier[sign_alg] = identifier[sign_alg] ,
identifier[digest_alg] = identifier[digest_alg] )
identifier[sign_class] =[( identifier[class_name] ( identifier[response] ), identifier[response] . identifier[id] )]
keyword[else] :
identifier[sign_class] =[]
keyword[if] identifier[encrypted_advice_attributes] keyword[and] identifier[response] . identifier[assertion] . identifier[advice] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[response] . identifier[assertion] . identifier[advice] . identifier[assertion] )> literal[int] :
identifier[_assertions] = identifier[response] . identifier[assertion]
keyword[if] keyword[not] identifier[isinstance] ( identifier[_assertions] , identifier[list] ):
identifier[_assertions] =[ identifier[_assertions] ]
keyword[for] identifier[_assertion] keyword[in] identifier[_assertions] :
identifier[_assertion] . identifier[advice] . identifier[encrypted_assertion] =[]
identifier[_assertion] . identifier[advice] . identifier[encrypted_assertion] . identifier[append] (
identifier[EncryptedAssertion] ())
identifier[_advice_assertions] = identifier[copy] . identifier[deepcopy] (
identifier[_assertion] . identifier[advice] . identifier[assertion] )
identifier[_assertion] . identifier[advice] . identifier[assertion] =[]
keyword[if] keyword[not] identifier[isinstance] ( identifier[_advice_assertions] , identifier[list] ):
identifier[_advice_assertions] =[ identifier[_advice_assertions] ]
keyword[for] identifier[tmp_assertion] keyword[in] identifier[_advice_assertions] :
identifier[to_sign_advice] =[]
keyword[if] identifier[sign_assertion] keyword[and] keyword[not] identifier[pefim] :
identifier[tmp_assertion] . identifier[signature] = identifier[pre_signature_part] (
identifier[tmp_assertion] . identifier[id] , identifier[self] . identifier[sec] . identifier[my_cert] , literal[int] ,
identifier[sign_alg] = identifier[sign_alg] , identifier[digest_alg] = identifier[digest_alg] )
identifier[to_sign_advice] . identifier[append] (
( identifier[class_name] ( identifier[tmp_assertion] ), identifier[tmp_assertion] . identifier[id] ))
identifier[_assertion] . identifier[advice] . identifier[encrypted_assertion] [
literal[int] ]. identifier[add_extension_element] ( identifier[tmp_assertion] )
keyword[if] identifier[encrypt_assertion_self_contained] :
identifier[advice_tag] = identifier[response] . identifier[assertion] . identifier[advice] . identifier[_to_element_tree] (). identifier[tag]
identifier[assertion_tag] = identifier[tmp_assertion] . identifier[_to_element_tree] (). identifier[tag]
identifier[response] = identifier[response] . identifier[get_xml_string_with_self_contained_assertion_within_advice_encrypted_assertion] (
identifier[assertion_tag] , identifier[advice_tag] )
identifier[node_xpath] = literal[string] . identifier[join] (
[ literal[string] % identifier[v] keyword[for] identifier[v] keyword[in]
[ literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ]])
keyword[if] identifier[to_sign_advice] :
identifier[response] = identifier[signed_instance_factory] ( identifier[response] ,
identifier[self] . identifier[sec] ,
identifier[to_sign_advice] )
identifier[response] = identifier[self] . identifier[_encrypt_assertion] (
identifier[encrypt_cert_advice] , identifier[sp_entity_id] , identifier[response] ,
identifier[node_xpath] = identifier[node_xpath] )
identifier[response] = identifier[response_from_string] ( identifier[response] )
keyword[if] identifier[encrypt_assertion] :
identifier[to_sign_assertion] =[]
keyword[if] identifier[sign_assertion] keyword[is] keyword[not] keyword[None] keyword[and] identifier[sign_assertion] :
identifier[_assertions] = identifier[response] . identifier[assertion]
keyword[if] keyword[not] identifier[isinstance] ( identifier[_assertions] , identifier[list] ):
identifier[_assertions] =[ identifier[_assertions] ]
keyword[for] identifier[_assertion] keyword[in] identifier[_assertions] :
identifier[_assertion] . identifier[signature] = identifier[pre_signature_part] (
identifier[_assertion] . identifier[id] , identifier[self] . identifier[sec] . identifier[my_cert] , literal[int] ,
identifier[sign_alg] = identifier[sign_alg] , identifier[digest_alg] = identifier[digest_alg] )
identifier[to_sign_assertion] . identifier[append] (
( identifier[class_name] ( identifier[_assertion] ), identifier[_assertion] . identifier[id] ))
keyword[if] identifier[encrypt_assertion_self_contained] :
keyword[try] :
identifier[assertion_tag] = identifier[response] . identifier[assertion] . identifier[_to_element_tree] (
). identifier[tag]
keyword[except] :
identifier[assertion_tag] = identifier[response] . identifier[assertion] [
literal[int] ]. identifier[_to_element_tree] (). identifier[tag]
identifier[response] = identifier[pre_encrypt_assertion] ( identifier[response] )
identifier[response] = identifier[response] . identifier[get_xml_string_with_self_contained_assertion_within_encrypted_assertion] (
identifier[assertion_tag] )
keyword[else] :
identifier[response] = identifier[pre_encrypt_assertion] ( identifier[response] )
keyword[if] identifier[to_sign_assertion] :
identifier[response] = identifier[signed_instance_factory] ( identifier[response] , identifier[self] . identifier[sec] ,
identifier[to_sign_assertion] )
identifier[response] = identifier[self] . identifier[_encrypt_assertion] ( identifier[encrypt_cert_assertion] ,
identifier[sp_entity_id] , identifier[response] )
keyword[else] :
keyword[if] identifier[to_sign] :
identifier[response] = identifier[signed_instance_factory] ( identifier[response] , identifier[self] . identifier[sec] ,
identifier[to_sign] )
keyword[if] identifier[sign] :
keyword[return] identifier[signed_instance_factory] ( identifier[response] , identifier[self] . identifier[sec] , identifier[sign_class] )
keyword[else] :
keyword[return] identifier[response]
keyword[if] identifier[sign] :
keyword[return] identifier[self] . identifier[sign] ( identifier[response] , identifier[to_sign] = identifier[to_sign] , identifier[sign_alg] = identifier[sign_alg] ,
identifier[digest_alg] = identifier[digest_alg] )
keyword[else] :
keyword[return] identifier[response] | def _response(self, in_response_to, consumer_url=None, status=None, issuer=None, sign=False, to_sign=None, sp_entity_id=None, encrypt_assertion=False, encrypt_assertion_self_contained=False, encrypted_advice_attributes=False, encrypt_cert_advice=None, encrypt_cert_assertion=None, sign_assertion=None, pefim=False, sign_alg=None, digest_alg=None, **kwargs):
""" Create a Response.
Encryption:
encrypt_assertion must be true for encryption to be
performed. If encrypted_advice_attributes also is
true, then will the function try to encrypt the assertion in
the the advice element of the main
assertion. Only one assertion element is allowed in the
advice element, if multiple assertions exists
in the advice element the main assertion will be encrypted
instead, since it's no point to encrypt
If encrypted_advice_attributes is
false the main assertion will be encrypted. Since the same key
:param in_response_to: The session identifier of the request
:param consumer_url: The URL which should receive the response
:param status: An instance of samlp.Status
:param issuer: The issuer of the response
:param sign: Whether the response should be signed or not
:param to_sign: If there are other parts to sign
:param sp_entity_id: Entity ID for the calling service provider.
:param encrypt_assertion: True if assertions should be encrypted.
:param encrypt_assertion_self_contained: True if all encrypted
assertions should have alla namespaces selfcontained.
:param encrypted_advice_attributes: True if assertions in the advice
element should be encrypted.
:param encrypt_cert_advice: Certificate to be used for encryption of
assertions in the advice element.
:param encrypt_cert_assertion: Certificate to be used for encryption
of assertions.
:param sign_assertion: True if assertions should be signed.
:param pefim: True if a response according to the PEFIM profile
should be created.
:param kwargs: Extra key word arguments
:return: A Response instance
"""
if not status:
status = success_status_factory() # depends on [control=['if'], data=[]]
_issuer = self._issuer(issuer)
response = response_factory(issuer=_issuer, in_response_to=in_response_to, status=status, sign_alg=sign_alg, digest_alg=digest_alg)
if consumer_url:
response.destination = consumer_url # depends on [control=['if'], data=[]]
self._add_info(response, **kwargs)
if not sign and to_sign and (not encrypt_assertion):
return signed_instance_factory(response, self.sec, to_sign) # depends on [control=['if'], data=[]]
has_encrypt_cert = self.has_encrypt_cert_in_metadata(sp_entity_id)
if not has_encrypt_cert and encrypt_cert_advice is None:
encrypted_advice_attributes = False # depends on [control=['if'], data=[]]
if not has_encrypt_cert and encrypt_cert_assertion is None:
encrypt_assertion = False # depends on [control=['if'], data=[]]
if encrypt_assertion or (encrypted_advice_attributes and response.assertion.advice is not None and (len(response.assertion.advice.assertion) == 1)):
if sign:
response.signature = pre_signature_part(response.id, self.sec.my_cert, 1, sign_alg=sign_alg, digest_alg=digest_alg)
sign_class = [(class_name(response), response.id)] # depends on [control=['if'], data=[]]
else:
sign_class = []
if encrypted_advice_attributes and response.assertion.advice is not None and (len(response.assertion.advice.assertion) > 0):
_assertions = response.assertion
if not isinstance(_assertions, list):
_assertions = [_assertions] # depends on [control=['if'], data=[]]
for _assertion in _assertions:
_assertion.advice.encrypted_assertion = []
_assertion.advice.encrypted_assertion.append(EncryptedAssertion())
_advice_assertions = copy.deepcopy(_assertion.advice.assertion)
_assertion.advice.assertion = []
if not isinstance(_advice_assertions, list):
_advice_assertions = [_advice_assertions] # depends on [control=['if'], data=[]]
for tmp_assertion in _advice_assertions:
to_sign_advice = []
if sign_assertion and (not pefim):
tmp_assertion.signature = pre_signature_part(tmp_assertion.id, self.sec.my_cert, 1, sign_alg=sign_alg, digest_alg=digest_alg)
to_sign_advice.append((class_name(tmp_assertion), tmp_assertion.id)) # depends on [control=['if'], data=[]]
# tmp_assertion = response.assertion.advice.assertion[0]
_assertion.advice.encrypted_assertion[0].add_extension_element(tmp_assertion)
if encrypt_assertion_self_contained:
advice_tag = response.assertion.advice._to_element_tree().tag
assertion_tag = tmp_assertion._to_element_tree().tag
response = response.get_xml_string_with_self_contained_assertion_within_advice_encrypted_assertion(assertion_tag, advice_tag) # depends on [control=['if'], data=[]]
node_xpath = ''.join(['/*[local-name()="%s"]' % v for v in ['Response', 'Assertion', 'Advice', 'EncryptedAssertion', 'Assertion']])
if to_sign_advice:
response = signed_instance_factory(response, self.sec, to_sign_advice) # depends on [control=['if'], data=[]]
response = self._encrypt_assertion(encrypt_cert_advice, sp_entity_id, response, node_xpath=node_xpath)
response = response_from_string(response) # depends on [control=['for'], data=['tmp_assertion']] # depends on [control=['for'], data=['_assertion']] # depends on [control=['if'], data=[]]
if encrypt_assertion:
to_sign_assertion = []
if sign_assertion is not None and sign_assertion:
_assertions = response.assertion
if not isinstance(_assertions, list):
_assertions = [_assertions] # depends on [control=['if'], data=[]]
for _assertion in _assertions:
_assertion.signature = pre_signature_part(_assertion.id, self.sec.my_cert, 1, sign_alg=sign_alg, digest_alg=digest_alg)
to_sign_assertion.append((class_name(_assertion), _assertion.id)) # depends on [control=['for'], data=['_assertion']] # depends on [control=['if'], data=[]]
if encrypt_assertion_self_contained:
try:
assertion_tag = response.assertion._to_element_tree().tag # depends on [control=['try'], data=[]]
except:
assertion_tag = response.assertion[0]._to_element_tree().tag # depends on [control=['except'], data=[]]
response = pre_encrypt_assertion(response)
response = response.get_xml_string_with_self_contained_assertion_within_encrypted_assertion(assertion_tag) # depends on [control=['if'], data=[]]
else:
response = pre_encrypt_assertion(response)
if to_sign_assertion:
response = signed_instance_factory(response, self.sec, to_sign_assertion) # depends on [control=['if'], data=[]]
response = self._encrypt_assertion(encrypt_cert_assertion, sp_entity_id, response) # depends on [control=['if'], data=[]]
elif to_sign:
response = signed_instance_factory(response, self.sec, to_sign) # depends on [control=['if'], data=[]]
if sign:
return signed_instance_factory(response, self.sec, sign_class) # depends on [control=['if'], data=[]]
else:
return response # depends on [control=['if'], data=[]]
if sign:
return self.sign(response, to_sign=to_sign, sign_alg=sign_alg, digest_alg=digest_alg) # depends on [control=['if'], data=[]]
else:
return response |
def get_tile_from_image(image, size, top_left_corner, tile_size):
"""
Returns a rectangular region of the given image as a separate image
If the tile goes off the edge of the image, it will be truncated. The new size is also returned.
:param image: The given image, as a list of (R,G,B) tuples
:param size: The size of the image, as (width, height)
:param top_left_corner: The top left corner of the tile, relative to the image, as a tuple (x,y)
:param tile_size: The size of the tile, as a tuple (width, height)
:return: A tuple (tile, size) where the tile is a list of (R,G,B) tuples and the size is (width, height)
"""
tile_pixels = []
# crop tile if necessary
tile_x = min(size[0] - top_left_corner[0], tile_size[0])
tile_y = min(size[1] - top_left_corner[1], tile_size[1])
tile_size = tile_x, tile_y
for y in range(tile_size[1]):
for x in range(tile_size[0]):
coords = (x + top_left_corner[0], y + top_left_corner[1])
tile_pixels.append(image[coords_to_index(coords, size[0])])
return tile_pixels, tile_size | def function[get_tile_from_image, parameter[image, size, top_left_corner, tile_size]]:
constant[
Returns a rectangular region of the given image as a separate image
If the tile goes off the edge of the image, it will be truncated. The new size is also returned.
:param image: The given image, as a list of (R,G,B) tuples
:param size: The size of the image, as (width, height)
:param top_left_corner: The top left corner of the tile, relative to the image, as a tuple (x,y)
:param tile_size: The size of the tile, as a tuple (width, height)
:return: A tuple (tile, size) where the tile is a list of (R,G,B) tuples and the size is (width, height)
]
variable[tile_pixels] assign[=] list[[]]
variable[tile_x] assign[=] call[name[min], parameter[binary_operation[call[name[size]][constant[0]] - call[name[top_left_corner]][constant[0]]], call[name[tile_size]][constant[0]]]]
variable[tile_y] assign[=] call[name[min], parameter[binary_operation[call[name[size]][constant[1]] - call[name[top_left_corner]][constant[1]]], call[name[tile_size]][constant[1]]]]
variable[tile_size] assign[=] tuple[[<ast.Name object at 0x7da1b13a7640>, <ast.Name object at 0x7da1b13a4d90>]]
for taget[name[y]] in starred[call[name[range], parameter[call[name[tile_size]][constant[1]]]]] begin[:]
for taget[name[x]] in starred[call[name[range], parameter[call[name[tile_size]][constant[0]]]]] begin[:]
variable[coords] assign[=] tuple[[<ast.BinOp object at 0x7da1b13a7760>, <ast.BinOp object at 0x7da1b13a5060>]]
call[name[tile_pixels].append, parameter[call[name[image]][call[name[coords_to_index], parameter[name[coords], call[name[size]][constant[0]]]]]]]
return[tuple[[<ast.Name object at 0x7da1b13a5720>, <ast.Name object at 0x7da1b13a5c30>]]] | keyword[def] identifier[get_tile_from_image] ( identifier[image] , identifier[size] , identifier[top_left_corner] , identifier[tile_size] ):
literal[string]
identifier[tile_pixels] =[]
identifier[tile_x] = identifier[min] ( identifier[size] [ literal[int] ]- identifier[top_left_corner] [ literal[int] ], identifier[tile_size] [ literal[int] ])
identifier[tile_y] = identifier[min] ( identifier[size] [ literal[int] ]- identifier[top_left_corner] [ literal[int] ], identifier[tile_size] [ literal[int] ])
identifier[tile_size] = identifier[tile_x] , identifier[tile_y]
keyword[for] identifier[y] keyword[in] identifier[range] ( identifier[tile_size] [ literal[int] ]):
keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[tile_size] [ literal[int] ]):
identifier[coords] =( identifier[x] + identifier[top_left_corner] [ literal[int] ], identifier[y] + identifier[top_left_corner] [ literal[int] ])
identifier[tile_pixels] . identifier[append] ( identifier[image] [ identifier[coords_to_index] ( identifier[coords] , identifier[size] [ literal[int] ])])
keyword[return] identifier[tile_pixels] , identifier[tile_size] | def get_tile_from_image(image, size, top_left_corner, tile_size):
"""
Returns a rectangular region of the given image as a separate image
If the tile goes off the edge of the image, it will be truncated. The new size is also returned.
:param image: The given image, as a list of (R,G,B) tuples
:param size: The size of the image, as (width, height)
:param top_left_corner: The top left corner of the tile, relative to the image, as a tuple (x,y)
:param tile_size: The size of the tile, as a tuple (width, height)
:return: A tuple (tile, size) where the tile is a list of (R,G,B) tuples and the size is (width, height)
"""
tile_pixels = []
# crop tile if necessary
tile_x = min(size[0] - top_left_corner[0], tile_size[0])
tile_y = min(size[1] - top_left_corner[1], tile_size[1])
tile_size = (tile_x, tile_y)
for y in range(tile_size[1]):
for x in range(tile_size[0]):
coords = (x + top_left_corner[0], y + top_left_corner[1])
tile_pixels.append(image[coords_to_index(coords, size[0])]) # depends on [control=['for'], data=['x']] # depends on [control=['for'], data=['y']]
return (tile_pixels, tile_size) |
def update(self, params, values):
"""
Update the particles field given new parameter values
"""
#1. Figure out if we're going to do a global update, in which
# case we just draw from scratch.
global_update, particles = self._update_type(params)
# if we are doing a global update, everything must change, so
# starting fresh will be faster instead of add subtract
if global_update:
self.set_values(params, values)
self.initialize()
return
# otherwise, update individual particles. delete the current versions
# of the particles update the particles, and redraw them anew at the
# places given by (params, values)
oldargs = self._drawargs()
for n in particles:
self._draw_particle(self.pos[n], *listify(oldargs[n]), sign=-1)
self.set_values(params, values)
newargs = self._drawargs()
for n in particles:
self._draw_particle(self.pos[n], *listify(newargs[n]), sign=+1) | def function[update, parameter[self, params, values]]:
constant[
Update the particles field given new parameter values
]
<ast.Tuple object at 0x7da18dc05480> assign[=] call[name[self]._update_type, parameter[name[params]]]
if name[global_update] begin[:]
call[name[self].set_values, parameter[name[params], name[values]]]
call[name[self].initialize, parameter[]]
return[None]
variable[oldargs] assign[=] call[name[self]._drawargs, parameter[]]
for taget[name[n]] in starred[name[particles]] begin[:]
call[name[self]._draw_particle, parameter[call[name[self].pos][name[n]], <ast.Starred object at 0x7da20c6e49a0>]]
call[name[self].set_values, parameter[name[params], name[values]]]
variable[newargs] assign[=] call[name[self]._drawargs, parameter[]]
for taget[name[n]] in starred[name[particles]] begin[:]
call[name[self]._draw_particle, parameter[call[name[self].pos][name[n]], <ast.Starred object at 0x7da18ede7d00>]] | keyword[def] identifier[update] ( identifier[self] , identifier[params] , identifier[values] ):
literal[string]
identifier[global_update] , identifier[particles] = identifier[self] . identifier[_update_type] ( identifier[params] )
keyword[if] identifier[global_update] :
identifier[self] . identifier[set_values] ( identifier[params] , identifier[values] )
identifier[self] . identifier[initialize] ()
keyword[return]
identifier[oldargs] = identifier[self] . identifier[_drawargs] ()
keyword[for] identifier[n] keyword[in] identifier[particles] :
identifier[self] . identifier[_draw_particle] ( identifier[self] . identifier[pos] [ identifier[n] ],* identifier[listify] ( identifier[oldargs] [ identifier[n] ]), identifier[sign] =- literal[int] )
identifier[self] . identifier[set_values] ( identifier[params] , identifier[values] )
identifier[newargs] = identifier[self] . identifier[_drawargs] ()
keyword[for] identifier[n] keyword[in] identifier[particles] :
identifier[self] . identifier[_draw_particle] ( identifier[self] . identifier[pos] [ identifier[n] ],* identifier[listify] ( identifier[newargs] [ identifier[n] ]), identifier[sign] =+ literal[int] ) | def update(self, params, values):
"""
Update the particles field given new parameter values
"""
#1. Figure out if we're going to do a global update, in which
# case we just draw from scratch.
(global_update, particles) = self._update_type(params)
# if we are doing a global update, everything must change, so
# starting fresh will be faster instead of add subtract
if global_update:
self.set_values(params, values)
self.initialize()
return # depends on [control=['if'], data=[]]
# otherwise, update individual particles. delete the current versions
# of the particles update the particles, and redraw them anew at the
# places given by (params, values)
oldargs = self._drawargs()
for n in particles:
self._draw_particle(self.pos[n], *listify(oldargs[n]), sign=-1) # depends on [control=['for'], data=['n']]
self.set_values(params, values)
newargs = self._drawargs()
for n in particles:
self._draw_particle(self.pos[n], *listify(newargs[n]), sign=+1) # depends on [control=['for'], data=['n']] |
def watch_for_new_conf(self, timeout=0):
"""Check if a new configuration was sent to the daemon
This function is called on each daemon loop turn. Basically it is a sleep...
If a new configuration was posted, this function returns True
:param timeout: timeout to wait. Default is no wait time.
:type timeout: float
:return: None
"""
logger.debug("Watching for a new configuration, timeout: %s", timeout)
self.make_a_pause(timeout=timeout, check_time_change=False)
return any(self.new_conf) | def function[watch_for_new_conf, parameter[self, timeout]]:
constant[Check if a new configuration was sent to the daemon
This function is called on each daemon loop turn. Basically it is a sleep...
If a new configuration was posted, this function returns True
:param timeout: timeout to wait. Default is no wait time.
:type timeout: float
:return: None
]
call[name[logger].debug, parameter[constant[Watching for a new configuration, timeout: %s], name[timeout]]]
call[name[self].make_a_pause, parameter[]]
return[call[name[any], parameter[name[self].new_conf]]] | keyword[def] identifier[watch_for_new_conf] ( identifier[self] , identifier[timeout] = literal[int] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] , identifier[timeout] )
identifier[self] . identifier[make_a_pause] ( identifier[timeout] = identifier[timeout] , identifier[check_time_change] = keyword[False] )
keyword[return] identifier[any] ( identifier[self] . identifier[new_conf] ) | def watch_for_new_conf(self, timeout=0):
"""Check if a new configuration was sent to the daemon
This function is called on each daemon loop turn. Basically it is a sleep...
If a new configuration was posted, this function returns True
:param timeout: timeout to wait. Default is no wait time.
:type timeout: float
:return: None
"""
logger.debug('Watching for a new configuration, timeout: %s', timeout)
self.make_a_pause(timeout=timeout, check_time_change=False)
return any(self.new_conf) |
def coarse_graining(network, state, internal_indices):
"""Find the maximal coarse-graining of a micro-system.
Args:
network (Network): The network in question.
state (tuple[int]): The state of the network.
internal_indices (tuple[int]): Nodes in the micro-system.
Returns:
tuple[int, CoarseGrain]: The phi-value of the maximal |CoarseGrain|.
"""
max_phi = float('-inf')
max_coarse_grain = CoarseGrain((), ())
for coarse_grain in all_coarse_grains(internal_indices):
try:
subsystem = MacroSubsystem(network, state, internal_indices,
coarse_grain=coarse_grain)
except ConditionallyDependentError:
continue
phi = compute.phi(subsystem)
if (phi - max_phi) > constants.EPSILON:
max_phi = phi
max_coarse_grain = coarse_grain
return (max_phi, max_coarse_grain) | def function[coarse_graining, parameter[network, state, internal_indices]]:
constant[Find the maximal coarse-graining of a micro-system.
Args:
network (Network): The network in question.
state (tuple[int]): The state of the network.
internal_indices (tuple[int]): Nodes in the micro-system.
Returns:
tuple[int, CoarseGrain]: The phi-value of the maximal |CoarseGrain|.
]
variable[max_phi] assign[=] call[name[float], parameter[constant[-inf]]]
variable[max_coarse_grain] assign[=] call[name[CoarseGrain], parameter[tuple[[]], tuple[[]]]]
for taget[name[coarse_grain]] in starred[call[name[all_coarse_grains], parameter[name[internal_indices]]]] begin[:]
<ast.Try object at 0x7da20e957be0>
variable[phi] assign[=] call[name[compute].phi, parameter[name[subsystem]]]
if compare[binary_operation[name[phi] - name[max_phi]] greater[>] name[constants].EPSILON] begin[:]
variable[max_phi] assign[=] name[phi]
variable[max_coarse_grain] assign[=] name[coarse_grain]
return[tuple[[<ast.Name object at 0x7da20e954490>, <ast.Name object at 0x7da20e9553f0>]]] | keyword[def] identifier[coarse_graining] ( identifier[network] , identifier[state] , identifier[internal_indices] ):
literal[string]
identifier[max_phi] = identifier[float] ( literal[string] )
identifier[max_coarse_grain] = identifier[CoarseGrain] ((),())
keyword[for] identifier[coarse_grain] keyword[in] identifier[all_coarse_grains] ( identifier[internal_indices] ):
keyword[try] :
identifier[subsystem] = identifier[MacroSubsystem] ( identifier[network] , identifier[state] , identifier[internal_indices] ,
identifier[coarse_grain] = identifier[coarse_grain] )
keyword[except] identifier[ConditionallyDependentError] :
keyword[continue]
identifier[phi] = identifier[compute] . identifier[phi] ( identifier[subsystem] )
keyword[if] ( identifier[phi] - identifier[max_phi] )> identifier[constants] . identifier[EPSILON] :
identifier[max_phi] = identifier[phi]
identifier[max_coarse_grain] = identifier[coarse_grain]
keyword[return] ( identifier[max_phi] , identifier[max_coarse_grain] ) | def coarse_graining(network, state, internal_indices):
"""Find the maximal coarse-graining of a micro-system.
Args:
network (Network): The network in question.
state (tuple[int]): The state of the network.
internal_indices (tuple[int]): Nodes in the micro-system.
Returns:
tuple[int, CoarseGrain]: The phi-value of the maximal |CoarseGrain|.
"""
max_phi = float('-inf')
max_coarse_grain = CoarseGrain((), ())
for coarse_grain in all_coarse_grains(internal_indices):
try:
subsystem = MacroSubsystem(network, state, internal_indices, coarse_grain=coarse_grain) # depends on [control=['try'], data=[]]
except ConditionallyDependentError:
continue # depends on [control=['except'], data=[]]
phi = compute.phi(subsystem)
if phi - max_phi > constants.EPSILON:
max_phi = phi
max_coarse_grain = coarse_grain # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['coarse_grain']]
return (max_phi, max_coarse_grain) |
def insert_many(self, items):
"""
Insert many items at once into a temporary table.
"""
return SessionContext.session.execute(
self.insert(values=[
to_dict(item, self.c)
for item in items
]),
).rowcount | def function[insert_many, parameter[self, items]]:
constant[
Insert many items at once into a temporary table.
]
return[call[name[SessionContext].session.execute, parameter[call[name[self].insert, parameter[]]]].rowcount] | keyword[def] identifier[insert_many] ( identifier[self] , identifier[items] ):
literal[string]
keyword[return] identifier[SessionContext] . identifier[session] . identifier[execute] (
identifier[self] . identifier[insert] ( identifier[values] =[
identifier[to_dict] ( identifier[item] , identifier[self] . identifier[c] )
keyword[for] identifier[item] keyword[in] identifier[items]
]),
). identifier[rowcount] | def insert_many(self, items):
"""
Insert many items at once into a temporary table.
"""
return SessionContext.session.execute(self.insert(values=[to_dict(item, self.c) for item in items])).rowcount |
def _import_data(data, axis, slice_step, first_slice_offset=0):
"""
import ndarray or SimpleITK data
"""
try:
import SimpleITK as sitk
if type(data) is sitk.SimpleITK.Image:
data = sitk.GetArrayFromImage(data)
except:
pass
data = __select_slices(data, axis, slice_step, first_slice_offset=first_slice_offset)
return data | def function[_import_data, parameter[data, axis, slice_step, first_slice_offset]]:
constant[
import ndarray or SimpleITK data
]
<ast.Try object at 0x7da1b271da50>
variable[data] assign[=] call[name[__select_slices], parameter[name[data], name[axis], name[slice_step]]]
return[name[data]] | keyword[def] identifier[_import_data] ( identifier[data] , identifier[axis] , identifier[slice_step] , identifier[first_slice_offset] = literal[int] ):
literal[string]
keyword[try] :
keyword[import] identifier[SimpleITK] keyword[as] identifier[sitk]
keyword[if] identifier[type] ( identifier[data] ) keyword[is] identifier[sitk] . identifier[SimpleITK] . identifier[Image] :
identifier[data] = identifier[sitk] . identifier[GetArrayFromImage] ( identifier[data] )
keyword[except] :
keyword[pass]
identifier[data] = identifier[__select_slices] ( identifier[data] , identifier[axis] , identifier[slice_step] , identifier[first_slice_offset] = identifier[first_slice_offset] )
keyword[return] identifier[data] | def _import_data(data, axis, slice_step, first_slice_offset=0):
"""
import ndarray or SimpleITK data
"""
try:
import SimpleITK as sitk
if type(data) is sitk.SimpleITK.Image:
data = sitk.GetArrayFromImage(data) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
data = __select_slices(data, axis, slice_step, first_slice_offset=first_slice_offset)
return data |
def remove_cert_binding(name, site, hostheader='', ipaddress='*', port=443):
'''
Remove a certificate from an IIS binding.
.. note:
This function only removes the certificate from the web binding. It does
not remove the web binding itself.
:param str name: The thumbprint of the certificate.
:param str site: The IIS site name.
:param str hostheader: The host header of the binding.
:param str ipaddress: The IP address of the binding.
:param str port: The TCP port of the binding.
Example of usage with only the required arguments:
.. code-block:: yaml
site0-cert-binding-remove:
win_iis.remove_cert_binding:
- name: 9988776655443322111000AAABBBCCCDDDEEEFFF
- site: site0
Example of usage specifying all available arguments:
.. code-block:: yaml
site0-cert-binding-remove:
win_iis.remove_cert_binding:
- name: 9988776655443322111000AAABBBCCCDDDEEEFFF
- site: site0
- hostheader: site0.local
- ipaddress: 192.168.1.199
- port: 443
.. versionadded:: 2016.11.0
'''
ret = {'name': name,
'changes': {},
'comment': str(),
'result': None}
binding_info = _get_binding_info(hostheader, ipaddress, port)
current_cert_bindings = __salt__['win_iis.list_cert_bindings'](site)
if binding_info not in current_cert_bindings:
ret['comment'] = 'Certificate binding has already been removed: {0}'.format(name)
ret['result'] = True
elif __opts__['test']:
ret['comment'] = 'Certificate binding will be removed: {0}'.format(name)
ret['changes'] = {'old': name,
'new': None}
else:
current_name = current_cert_bindings[binding_info]['certificatehash']
if name == current_name:
ret['comment'] = 'Removed certificate binding: {0}'.format(name)
ret['changes'] = {'old': name,
'new': None}
ret['result'] = __salt__['win_iis.remove_cert_binding'](name, site, hostheader,
ipaddress, port)
return ret | def function[remove_cert_binding, parameter[name, site, hostheader, ipaddress, port]]:
constant[
Remove a certificate from an IIS binding.
.. note:
This function only removes the certificate from the web binding. It does
not remove the web binding itself.
:param str name: The thumbprint of the certificate.
:param str site: The IIS site name.
:param str hostheader: The host header of the binding.
:param str ipaddress: The IP address of the binding.
:param str port: The TCP port of the binding.
Example of usage with only the required arguments:
.. code-block:: yaml
site0-cert-binding-remove:
win_iis.remove_cert_binding:
- name: 9988776655443322111000AAABBBCCCDDDEEEFFF
- site: site0
Example of usage specifying all available arguments:
.. code-block:: yaml
site0-cert-binding-remove:
win_iis.remove_cert_binding:
- name: 9988776655443322111000AAABBBCCCDDDEEEFFF
- site: site0
- hostheader: site0.local
- ipaddress: 192.168.1.199
- port: 443
.. versionadded:: 2016.11.0
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b2344430>, <ast.Constant object at 0x7da1b2346710>, <ast.Constant object at 0x7da1b23469b0>, <ast.Constant object at 0x7da1b2347c40>], [<ast.Name object at 0x7da1b23463b0>, <ast.Dict object at 0x7da1b23471c0>, <ast.Call object at 0x7da1b2344580>, <ast.Constant object at 0x7da1b2347760>]]
variable[binding_info] assign[=] call[name[_get_binding_info], parameter[name[hostheader], name[ipaddress], name[port]]]
variable[current_cert_bindings] assign[=] call[call[name[__salt__]][constant[win_iis.list_cert_bindings]], parameter[name[site]]]
if compare[name[binding_info] <ast.NotIn object at 0x7da2590d7190> name[current_cert_bindings]] begin[:]
call[name[ret]][constant[comment]] assign[=] call[constant[Certificate binding has already been removed: {0}].format, parameter[name[name]]]
call[name[ret]][constant[result]] assign[=] constant[True]
return[name[ret]] | keyword[def] identifier[remove_cert_binding] ( identifier[name] , identifier[site] , identifier[hostheader] = literal[string] , identifier[ipaddress] = literal[string] , identifier[port] = literal[int] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] ,
literal[string] :{},
literal[string] : identifier[str] (),
literal[string] : keyword[None] }
identifier[binding_info] = identifier[_get_binding_info] ( identifier[hostheader] , identifier[ipaddress] , identifier[port] )
identifier[current_cert_bindings] = identifier[__salt__] [ literal[string] ]( identifier[site] )
keyword[if] identifier[binding_info] keyword[not] keyword[in] identifier[current_cert_bindings] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]= keyword[True]
keyword[elif] identifier[__opts__] [ literal[string] ]:
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]={ literal[string] : identifier[name] ,
literal[string] : keyword[None] }
keyword[else] :
identifier[current_name] = identifier[current_cert_bindings] [ identifier[binding_info] ][ literal[string] ]
keyword[if] identifier[name] == identifier[current_name] :
identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] )
identifier[ret] [ literal[string] ]={ literal[string] : identifier[name] ,
literal[string] : keyword[None] }
identifier[ret] [ literal[string] ]= identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[site] , identifier[hostheader] ,
identifier[ipaddress] , identifier[port] )
keyword[return] identifier[ret] | def remove_cert_binding(name, site, hostheader='', ipaddress='*', port=443):
"""
Remove a certificate from an IIS binding.
.. note:
This function only removes the certificate from the web binding. It does
not remove the web binding itself.
:param str name: The thumbprint of the certificate.
:param str site: The IIS site name.
:param str hostheader: The host header of the binding.
:param str ipaddress: The IP address of the binding.
:param str port: The TCP port of the binding.
Example of usage with only the required arguments:
.. code-block:: yaml
site0-cert-binding-remove:
win_iis.remove_cert_binding:
- name: 9988776655443322111000AAABBBCCCDDDEEEFFF
- site: site0
Example of usage specifying all available arguments:
.. code-block:: yaml
site0-cert-binding-remove:
win_iis.remove_cert_binding:
- name: 9988776655443322111000AAABBBCCCDDDEEEFFF
- site: site0
- hostheader: site0.local
- ipaddress: 192.168.1.199
- port: 443
.. versionadded:: 2016.11.0
"""
ret = {'name': name, 'changes': {}, 'comment': str(), 'result': None}
binding_info = _get_binding_info(hostheader, ipaddress, port)
current_cert_bindings = __salt__['win_iis.list_cert_bindings'](site)
if binding_info not in current_cert_bindings:
ret['comment'] = 'Certificate binding has already been removed: {0}'.format(name)
ret['result'] = True # depends on [control=['if'], data=[]]
elif __opts__['test']:
ret['comment'] = 'Certificate binding will be removed: {0}'.format(name)
ret['changes'] = {'old': name, 'new': None} # depends on [control=['if'], data=[]]
else:
current_name = current_cert_bindings[binding_info]['certificatehash']
if name == current_name:
ret['comment'] = 'Removed certificate binding: {0}'.format(name)
ret['changes'] = {'old': name, 'new': None}
ret['result'] = __salt__['win_iis.remove_cert_binding'](name, site, hostheader, ipaddress, port) # depends on [control=['if'], data=['name']]
return ret |
def delete_sched_block_instance(self, block_id):
"""Delete the specified Scheduling Block Instance.
Removes the Scheduling Block Instance, and all Processing Blocks
that belong to it from the database"""
LOG.debug('Deleting SBI %s', block_id)
scheduling_blocks = self._db.get_all_blocks(block_id)
if not scheduling_blocks:
raise RuntimeError('Scheduling block not found: {}'.
format(block_id))
if scheduling_blocks:
for blocks in scheduling_blocks:
if "processing_block" not in blocks:
self._db.delete_block(blocks)
else:
split_key = blocks.split(':')
self._db.delete_block(blocks)
# Add a event to the processing block event list to notify
# about deleting from the db
self._db.push_event(self.processing_event_name, "deleted",
split_key[3])
# Add a event to the scheduling block event list to notify
# of a deleting a scheduling block from the db
self._db.push_event(self.scheduling_event_name, "deleted",
block_id) | def function[delete_sched_block_instance, parameter[self, block_id]]:
constant[Delete the specified Scheduling Block Instance.
Removes the Scheduling Block Instance, and all Processing Blocks
that belong to it from the database]
call[name[LOG].debug, parameter[constant[Deleting SBI %s], name[block_id]]]
variable[scheduling_blocks] assign[=] call[name[self]._db.get_all_blocks, parameter[name[block_id]]]
if <ast.UnaryOp object at 0x7da1b036b1c0> begin[:]
<ast.Raise object at 0x7da1b03686a0>
if name[scheduling_blocks] begin[:]
for taget[name[blocks]] in starred[name[scheduling_blocks]] begin[:]
if compare[constant[processing_block] <ast.NotIn object at 0x7da2590d7190> name[blocks]] begin[:]
call[name[self]._db.delete_block, parameter[name[blocks]]]
call[name[self]._db.push_event, parameter[name[self].scheduling_event_name, constant[deleted], name[block_id]]] | keyword[def] identifier[delete_sched_block_instance] ( identifier[self] , identifier[block_id] ):
literal[string]
identifier[LOG] . identifier[debug] ( literal[string] , identifier[block_id] )
identifier[scheduling_blocks] = identifier[self] . identifier[_db] . identifier[get_all_blocks] ( identifier[block_id] )
keyword[if] keyword[not] identifier[scheduling_blocks] :
keyword[raise] identifier[RuntimeError] ( literal[string] .
identifier[format] ( identifier[block_id] ))
keyword[if] identifier[scheduling_blocks] :
keyword[for] identifier[blocks] keyword[in] identifier[scheduling_blocks] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[blocks] :
identifier[self] . identifier[_db] . identifier[delete_block] ( identifier[blocks] )
keyword[else] :
identifier[split_key] = identifier[blocks] . identifier[split] ( literal[string] )
identifier[self] . identifier[_db] . identifier[delete_block] ( identifier[blocks] )
identifier[self] . identifier[_db] . identifier[push_event] ( identifier[self] . identifier[processing_event_name] , literal[string] ,
identifier[split_key] [ literal[int] ])
identifier[self] . identifier[_db] . identifier[push_event] ( identifier[self] . identifier[scheduling_event_name] , literal[string] ,
identifier[block_id] ) | def delete_sched_block_instance(self, block_id):
"""Delete the specified Scheduling Block Instance.
Removes the Scheduling Block Instance, and all Processing Blocks
that belong to it from the database"""
LOG.debug('Deleting SBI %s', block_id)
scheduling_blocks = self._db.get_all_blocks(block_id)
if not scheduling_blocks:
raise RuntimeError('Scheduling block not found: {}'.format(block_id)) # depends on [control=['if'], data=[]]
if scheduling_blocks:
for blocks in scheduling_blocks:
if 'processing_block' not in blocks:
self._db.delete_block(blocks) # depends on [control=['if'], data=['blocks']]
else:
split_key = blocks.split(':')
self._db.delete_block(blocks)
# Add a event to the processing block event list to notify
# about deleting from the db
self._db.push_event(self.processing_event_name, 'deleted', split_key[3]) # depends on [control=['for'], data=['blocks']]
# Add a event to the scheduling block event list to notify
# of a deleting a scheduling block from the db
self._db.push_event(self.scheduling_event_name, 'deleted', block_id) # depends on [control=['if'], data=[]] |
def last(self):
"""Last time step available.
Example:
>>> sdat = StagyyData('path/to/run')
>>> assert(sdat.steps.last is sdat.steps[-1])
"""
if self._last is UNDETERMINED:
# not necessarily the last one...
self._last = self.sdat.tseries.index[-1]
return self[self._last] | def function[last, parameter[self]]:
constant[Last time step available.
Example:
>>> sdat = StagyyData('path/to/run')
>>> assert(sdat.steps.last is sdat.steps[-1])
]
if compare[name[self]._last is name[UNDETERMINED]] begin[:]
name[self]._last assign[=] call[name[self].sdat.tseries.index][<ast.UnaryOp object at 0x7da1b1a78f10>]
return[call[name[self]][name[self]._last]] | keyword[def] identifier[last] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_last] keyword[is] identifier[UNDETERMINED] :
identifier[self] . identifier[_last] = identifier[self] . identifier[sdat] . identifier[tseries] . identifier[index] [- literal[int] ]
keyword[return] identifier[self] [ identifier[self] . identifier[_last] ] | def last(self):
"""Last time step available.
Example:
>>> sdat = StagyyData('path/to/run')
>>> assert(sdat.steps.last is sdat.steps[-1])
"""
if self._last is UNDETERMINED:
# not necessarily the last one...
self._last = self.sdat.tseries.index[-1] # depends on [control=['if'], data=[]]
return self[self._last] |
def update_swarm(self, version, swarm_spec=None, rotate_worker_token=False,
rotate_manager_token=False):
"""
Update the Swarm's configuration
Args:
version (int): The version number of the swarm object being
updated. This is required to avoid conflicting writes.
swarm_spec (dict): Configuration settings to update. Use
:py:meth:`~docker.api.swarm.SwarmApiMixin.create_swarm_spec` to
generate a valid configuration. Default: ``None``.
rotate_worker_token (bool): Rotate the worker join token. Default:
``False``.
rotate_manager_token (bool): Rotate the manager join token.
Default: ``False``.
Returns:
``True`` if the request went through.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
url = self._url('/swarm/update')
response = self._post_json(url, data=swarm_spec, params={
'rotateWorkerToken': rotate_worker_token,
'rotateManagerToken': rotate_manager_token,
'version': version
})
self._raise_for_status(response)
return True | def function[update_swarm, parameter[self, version, swarm_spec, rotate_worker_token, rotate_manager_token]]:
constant[
Update the Swarm's configuration
Args:
version (int): The version number of the swarm object being
updated. This is required to avoid conflicting writes.
swarm_spec (dict): Configuration settings to update. Use
:py:meth:`~docker.api.swarm.SwarmApiMixin.create_swarm_spec` to
generate a valid configuration. Default: ``None``.
rotate_worker_token (bool): Rotate the worker join token. Default:
``False``.
rotate_manager_token (bool): Rotate the manager join token.
Default: ``False``.
Returns:
``True`` if the request went through.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
]
variable[url] assign[=] call[name[self]._url, parameter[constant[/swarm/update]]]
variable[response] assign[=] call[name[self]._post_json, parameter[name[url]]]
call[name[self]._raise_for_status, parameter[name[response]]]
return[constant[True]] | keyword[def] identifier[update_swarm] ( identifier[self] , identifier[version] , identifier[swarm_spec] = keyword[None] , identifier[rotate_worker_token] = keyword[False] ,
identifier[rotate_manager_token] = keyword[False] ):
literal[string]
identifier[url] = identifier[self] . identifier[_url] ( literal[string] )
identifier[response] = identifier[self] . identifier[_post_json] ( identifier[url] , identifier[data] = identifier[swarm_spec] , identifier[params] ={
literal[string] : identifier[rotate_worker_token] ,
literal[string] : identifier[rotate_manager_token] ,
literal[string] : identifier[version]
})
identifier[self] . identifier[_raise_for_status] ( identifier[response] )
keyword[return] keyword[True] | def update_swarm(self, version, swarm_spec=None, rotate_worker_token=False, rotate_manager_token=False):
"""
Update the Swarm's configuration
Args:
version (int): The version number of the swarm object being
updated. This is required to avoid conflicting writes.
swarm_spec (dict): Configuration settings to update. Use
:py:meth:`~docker.api.swarm.SwarmApiMixin.create_swarm_spec` to
generate a valid configuration. Default: ``None``.
rotate_worker_token (bool): Rotate the worker join token. Default:
``False``.
rotate_manager_token (bool): Rotate the manager join token.
Default: ``False``.
Returns:
``True`` if the request went through.
Raises:
:py:class:`docker.errors.APIError`
If the server returns an error.
"""
url = self._url('/swarm/update')
response = self._post_json(url, data=swarm_spec, params={'rotateWorkerToken': rotate_worker_token, 'rotateManagerToken': rotate_manager_token, 'version': version})
self._raise_for_status(response)
return True |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.