code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def _restore_port_binding(self,
switch_ip, pvlan_ids,
port, native_vlan):
"""Restores a set of vlans for a given port."""
intf_type, nexus_port = nexus_help.split_interface_name(port)
# If native_vlan is configured, this is isolated since
# two configs (native + trunk) must be sent for this vlan only.
if native_vlan != 0:
self.driver.send_enable_vlan_on_trunk_int(
switch_ip, native_vlan,
intf_type, nexus_port, True)
# If this is the only vlan
if len(pvlan_ids) == 1:
return
concat_vlans = ''
compressed_vlans = self._get_compressed_vlan_list(pvlan_ids)
for pvlan in compressed_vlans:
if concat_vlans == '':
concat_vlans = "%s" % pvlan
else:
concat_vlans += ",%s" % pvlan
# if string starts getting a bit long, send it.
if len(concat_vlans) >= const.CREATE_PORT_VLAN_LENGTH:
self.driver.send_enable_vlan_on_trunk_int(
switch_ip, concat_vlans,
intf_type, nexus_port, False)
concat_vlans = ''
# Send remaining vlans if any
if len(concat_vlans):
self.driver.send_enable_vlan_on_trunk_int(
switch_ip, concat_vlans,
intf_type, nexus_port, False) | def function[_restore_port_binding, parameter[self, switch_ip, pvlan_ids, port, native_vlan]]:
constant[Restores a set of vlans for a given port.]
<ast.Tuple object at 0x7da1b1be6980> assign[=] call[name[nexus_help].split_interface_name, parameter[name[port]]]
if compare[name[native_vlan] not_equal[!=] constant[0]] begin[:]
call[name[self].driver.send_enable_vlan_on_trunk_int, parameter[name[switch_ip], name[native_vlan], name[intf_type], name[nexus_port], constant[True]]]
if compare[call[name[len], parameter[name[pvlan_ids]]] equal[==] constant[1]] begin[:]
return[None]
variable[concat_vlans] assign[=] constant[]
variable[compressed_vlans] assign[=] call[name[self]._get_compressed_vlan_list, parameter[name[pvlan_ids]]]
for taget[name[pvlan]] in starred[name[compressed_vlans]] begin[:]
if compare[name[concat_vlans] equal[==] constant[]] begin[:]
variable[concat_vlans] assign[=] binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> name[pvlan]]
if compare[call[name[len], parameter[name[concat_vlans]]] greater_or_equal[>=] name[const].CREATE_PORT_VLAN_LENGTH] begin[:]
call[name[self].driver.send_enable_vlan_on_trunk_int, parameter[name[switch_ip], name[concat_vlans], name[intf_type], name[nexus_port], constant[False]]]
variable[concat_vlans] assign[=] constant[]
if call[name[len], parameter[name[concat_vlans]]] begin[:]
call[name[self].driver.send_enable_vlan_on_trunk_int, parameter[name[switch_ip], name[concat_vlans], name[intf_type], name[nexus_port], constant[False]]] | keyword[def] identifier[_restore_port_binding] ( identifier[self] ,
identifier[switch_ip] , identifier[pvlan_ids] ,
identifier[port] , identifier[native_vlan] ):
literal[string]
identifier[intf_type] , identifier[nexus_port] = identifier[nexus_help] . identifier[split_interface_name] ( identifier[port] )
keyword[if] identifier[native_vlan] != literal[int] :
identifier[self] . identifier[driver] . identifier[send_enable_vlan_on_trunk_int] (
identifier[switch_ip] , identifier[native_vlan] ,
identifier[intf_type] , identifier[nexus_port] , keyword[True] )
keyword[if] identifier[len] ( identifier[pvlan_ids] )== literal[int] :
keyword[return]
identifier[concat_vlans] = literal[string]
identifier[compressed_vlans] = identifier[self] . identifier[_get_compressed_vlan_list] ( identifier[pvlan_ids] )
keyword[for] identifier[pvlan] keyword[in] identifier[compressed_vlans] :
keyword[if] identifier[concat_vlans] == literal[string] :
identifier[concat_vlans] = literal[string] % identifier[pvlan]
keyword[else] :
identifier[concat_vlans] += literal[string] % identifier[pvlan]
keyword[if] identifier[len] ( identifier[concat_vlans] )>= identifier[const] . identifier[CREATE_PORT_VLAN_LENGTH] :
identifier[self] . identifier[driver] . identifier[send_enable_vlan_on_trunk_int] (
identifier[switch_ip] , identifier[concat_vlans] ,
identifier[intf_type] , identifier[nexus_port] , keyword[False] )
identifier[concat_vlans] = literal[string]
keyword[if] identifier[len] ( identifier[concat_vlans] ):
identifier[self] . identifier[driver] . identifier[send_enable_vlan_on_trunk_int] (
identifier[switch_ip] , identifier[concat_vlans] ,
identifier[intf_type] , identifier[nexus_port] , keyword[False] ) | def _restore_port_binding(self, switch_ip, pvlan_ids, port, native_vlan):
"""Restores a set of vlans for a given port."""
(intf_type, nexus_port) = nexus_help.split_interface_name(port)
# If native_vlan is configured, this is isolated since
# two configs (native + trunk) must be sent for this vlan only.
if native_vlan != 0:
self.driver.send_enable_vlan_on_trunk_int(switch_ip, native_vlan, intf_type, nexus_port, True)
# If this is the only vlan
if len(pvlan_ids) == 1:
return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['native_vlan']]
concat_vlans = ''
compressed_vlans = self._get_compressed_vlan_list(pvlan_ids)
for pvlan in compressed_vlans:
if concat_vlans == '':
concat_vlans = '%s' % pvlan # depends on [control=['if'], data=['concat_vlans']]
else:
concat_vlans += ',%s' % pvlan
# if string starts getting a bit long, send it.
if len(concat_vlans) >= const.CREATE_PORT_VLAN_LENGTH:
self.driver.send_enable_vlan_on_trunk_int(switch_ip, concat_vlans, intf_type, nexus_port, False)
concat_vlans = '' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['pvlan']]
# Send remaining vlans if any
if len(concat_vlans):
self.driver.send_enable_vlan_on_trunk_int(switch_ip, concat_vlans, intf_type, nexus_port, False) # depends on [control=['if'], data=[]] |
def variant (name, parents_or_properties, explicit_properties = []):
""" Declares a new variant.
First determines explicit properties for this variant, by
refining parents' explicit properties with the passed explicit
properties. The result is remembered and will be used if
this variant is used as parent.
Second, determines the full property set for this variant by
adding to the explicit properties default values for all properties
which neither present nor are symmetric.
Lastly, makes appropriate value of 'variant' property expand
to the full property set.
name: Name of the variant
parents_or_properties: Specifies parent variants, if
'explicit_properties' are given,
and explicit_properties otherwise.
explicit_properties: Explicit properties.
"""
parents = []
if not explicit_properties:
explicit_properties = parents_or_properties
else:
parents = parents_or_properties
inherited = property_set.empty()
if parents:
# If we allow multiple parents, we'd have to to check for conflicts
# between base variants, and there was no demand for so to bother.
if len (parents) > 1:
raise BaseException ("Multiple base variants are not yet supported")
p = parents[0]
# TODO: the check may be stricter
if not feature.is_implicit_value (p):
raise BaseException ("Invalid base variant '%s'" % p)
inherited = __variant_explicit_properties[p]
explicit_properties = property_set.create_with_validation(explicit_properties)
explicit_properties = inherited.refine(explicit_properties)
# Record explicitly specified properties for this variant
# We do this after inheriting parents' properties, so that
# they affect other variants, derived from this one.
__variant_explicit_properties[name] = explicit_properties
feature.extend('variant', [name])
feature.compose ("<variant>" + name, explicit_properties.all()) | def function[variant, parameter[name, parents_or_properties, explicit_properties]]:
constant[ Declares a new variant.
First determines explicit properties for this variant, by
refining parents' explicit properties with the passed explicit
properties. The result is remembered and will be used if
this variant is used as parent.
Second, determines the full property set for this variant by
adding to the explicit properties default values for all properties
which neither present nor are symmetric.
Lastly, makes appropriate value of 'variant' property expand
to the full property set.
name: Name of the variant
parents_or_properties: Specifies parent variants, if
'explicit_properties' are given,
and explicit_properties otherwise.
explicit_properties: Explicit properties.
]
variable[parents] assign[=] list[[]]
if <ast.UnaryOp object at 0x7da1b1f8f9d0> begin[:]
variable[explicit_properties] assign[=] name[parents_or_properties]
variable[inherited] assign[=] call[name[property_set].empty, parameter[]]
if name[parents] begin[:]
if compare[call[name[len], parameter[name[parents]]] greater[>] constant[1]] begin[:]
<ast.Raise object at 0x7da1b1f8d1e0>
variable[p] assign[=] call[name[parents]][constant[0]]
if <ast.UnaryOp object at 0x7da1b1f8c4c0> begin[:]
<ast.Raise object at 0x7da1b1f8e260>
variable[inherited] assign[=] call[name[__variant_explicit_properties]][name[p]]
variable[explicit_properties] assign[=] call[name[property_set].create_with_validation, parameter[name[explicit_properties]]]
variable[explicit_properties] assign[=] call[name[inherited].refine, parameter[name[explicit_properties]]]
call[name[__variant_explicit_properties]][name[name]] assign[=] name[explicit_properties]
call[name[feature].extend, parameter[constant[variant], list[[<ast.Name object at 0x7da1b1f8dde0>]]]]
call[name[feature].compose, parameter[binary_operation[constant[<variant>] + name[name]], call[name[explicit_properties].all, parameter[]]]] | keyword[def] identifier[variant] ( identifier[name] , identifier[parents_or_properties] , identifier[explicit_properties] =[]):
literal[string]
identifier[parents] =[]
keyword[if] keyword[not] identifier[explicit_properties] :
identifier[explicit_properties] = identifier[parents_or_properties]
keyword[else] :
identifier[parents] = identifier[parents_or_properties]
identifier[inherited] = identifier[property_set] . identifier[empty] ()
keyword[if] identifier[parents] :
keyword[if] identifier[len] ( identifier[parents] )> literal[int] :
keyword[raise] identifier[BaseException] ( literal[string] )
identifier[p] = identifier[parents] [ literal[int] ]
keyword[if] keyword[not] identifier[feature] . identifier[is_implicit_value] ( identifier[p] ):
keyword[raise] identifier[BaseException] ( literal[string] % identifier[p] )
identifier[inherited] = identifier[__variant_explicit_properties] [ identifier[p] ]
identifier[explicit_properties] = identifier[property_set] . identifier[create_with_validation] ( identifier[explicit_properties] )
identifier[explicit_properties] = identifier[inherited] . identifier[refine] ( identifier[explicit_properties] )
identifier[__variant_explicit_properties] [ identifier[name] ]= identifier[explicit_properties]
identifier[feature] . identifier[extend] ( literal[string] ,[ identifier[name] ])
identifier[feature] . identifier[compose] ( literal[string] + identifier[name] , identifier[explicit_properties] . identifier[all] ()) | def variant(name, parents_or_properties, explicit_properties=[]):
""" Declares a new variant.
First determines explicit properties for this variant, by
refining parents' explicit properties with the passed explicit
properties. The result is remembered and will be used if
this variant is used as parent.
Second, determines the full property set for this variant by
adding to the explicit properties default values for all properties
which neither present nor are symmetric.
Lastly, makes appropriate value of 'variant' property expand
to the full property set.
name: Name of the variant
parents_or_properties: Specifies parent variants, if
'explicit_properties' are given,
and explicit_properties otherwise.
explicit_properties: Explicit properties.
"""
parents = []
if not explicit_properties:
explicit_properties = parents_or_properties # depends on [control=['if'], data=[]]
else:
parents = parents_or_properties
inherited = property_set.empty()
if parents:
# If we allow multiple parents, we'd have to to check for conflicts
# between base variants, and there was no demand for so to bother.
if len(parents) > 1:
raise BaseException('Multiple base variants are not yet supported') # depends on [control=['if'], data=[]]
p = parents[0]
# TODO: the check may be stricter
if not feature.is_implicit_value(p):
raise BaseException("Invalid base variant '%s'" % p) # depends on [control=['if'], data=[]]
inherited = __variant_explicit_properties[p] # depends on [control=['if'], data=[]]
explicit_properties = property_set.create_with_validation(explicit_properties)
explicit_properties = inherited.refine(explicit_properties)
# Record explicitly specified properties for this variant
# We do this after inheriting parents' properties, so that
# they affect other variants, derived from this one.
__variant_explicit_properties[name] = explicit_properties
feature.extend('variant', [name])
feature.compose('<variant>' + name, explicit_properties.all()) |
def remove_child(self, child):
"""Removes a child instance from the Tag's children.
Args:
child (Tag): The child to be removed.
"""
if child in self.children.values() and hasattr(child, 'identifier'):
for k in self.children.keys():
if hasattr(self.children[k], 'identifier'):
if self.children[k].identifier == child.identifier:
if k in self._render_children_list:
self._render_children_list.remove(k)
self.children.pop(k)
# when the child is removed we stop the iteration
# this implies that a child replication should not be allowed
break | def function[remove_child, parameter[self, child]]:
constant[Removes a child instance from the Tag's children.
Args:
child (Tag): The child to be removed.
]
if <ast.BoolOp object at 0x7da18dc9b220> begin[:]
for taget[name[k]] in starred[call[name[self].children.keys, parameter[]]] begin[:]
if call[name[hasattr], parameter[call[name[self].children][name[k]], constant[identifier]]] begin[:]
if compare[call[name[self].children][name[k]].identifier equal[==] name[child].identifier] begin[:]
if compare[name[k] in name[self]._render_children_list] begin[:]
call[name[self]._render_children_list.remove, parameter[name[k]]]
call[name[self].children.pop, parameter[name[k]]]
break | keyword[def] identifier[remove_child] ( identifier[self] , identifier[child] ):
literal[string]
keyword[if] identifier[child] keyword[in] identifier[self] . identifier[children] . identifier[values] () keyword[and] identifier[hasattr] ( identifier[child] , literal[string] ):
keyword[for] identifier[k] keyword[in] identifier[self] . identifier[children] . identifier[keys] ():
keyword[if] identifier[hasattr] ( identifier[self] . identifier[children] [ identifier[k] ], literal[string] ):
keyword[if] identifier[self] . identifier[children] [ identifier[k] ]. identifier[identifier] == identifier[child] . identifier[identifier] :
keyword[if] identifier[k] keyword[in] identifier[self] . identifier[_render_children_list] :
identifier[self] . identifier[_render_children_list] . identifier[remove] ( identifier[k] )
identifier[self] . identifier[children] . identifier[pop] ( identifier[k] )
keyword[break] | def remove_child(self, child):
"""Removes a child instance from the Tag's children.
Args:
child (Tag): The child to be removed.
"""
if child in self.children.values() and hasattr(child, 'identifier'):
for k in self.children.keys():
if hasattr(self.children[k], 'identifier'):
if self.children[k].identifier == child.identifier:
if k in self._render_children_list:
self._render_children_list.remove(k) # depends on [control=['if'], data=['k']]
self.children.pop(k)
# when the child is removed we stop the iteration
# this implies that a child replication should not be allowed
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]] |
def decrypt(self, binary):
'''
Decrypt and unpack the original OpenConfig object,
serialized using MessagePack.
Raise BadSignatureException when the signature
was forged or corrupted.
'''
try:
encrypted = self.verify_key.verify(binary)
except BadSignatureError:
log.error('Signature was forged or corrupt', exc_info=True)
raise BadSignatureException('Signature was forged or corrupt')
try:
packed = self.priv_key.decrypt(encrypted)
except CryptoError:
log.error('Unable to decrypt', exc_info=True)
raise CryptoException('Unable to decrypt')
return umsgpack.unpackb(packed) | def function[decrypt, parameter[self, binary]]:
constant[
Decrypt and unpack the original OpenConfig object,
serialized using MessagePack.
Raise BadSignatureException when the signature
was forged or corrupted.
]
<ast.Try object at 0x7da20c990cd0>
<ast.Try object at 0x7da20c991780>
return[call[name[umsgpack].unpackb, parameter[name[packed]]]] | keyword[def] identifier[decrypt] ( identifier[self] , identifier[binary] ):
literal[string]
keyword[try] :
identifier[encrypted] = identifier[self] . identifier[verify_key] . identifier[verify] ( identifier[binary] )
keyword[except] identifier[BadSignatureError] :
identifier[log] . identifier[error] ( literal[string] , identifier[exc_info] = keyword[True] )
keyword[raise] identifier[BadSignatureException] ( literal[string] )
keyword[try] :
identifier[packed] = identifier[self] . identifier[priv_key] . identifier[decrypt] ( identifier[encrypted] )
keyword[except] identifier[CryptoError] :
identifier[log] . identifier[error] ( literal[string] , identifier[exc_info] = keyword[True] )
keyword[raise] identifier[CryptoException] ( literal[string] )
keyword[return] identifier[umsgpack] . identifier[unpackb] ( identifier[packed] ) | def decrypt(self, binary):
"""
Decrypt and unpack the original OpenConfig object,
serialized using MessagePack.
Raise BadSignatureException when the signature
was forged or corrupted.
"""
try:
encrypted = self.verify_key.verify(binary) # depends on [control=['try'], data=[]]
except BadSignatureError:
log.error('Signature was forged or corrupt', exc_info=True)
raise BadSignatureException('Signature was forged or corrupt') # depends on [control=['except'], data=[]]
try:
packed = self.priv_key.decrypt(encrypted) # depends on [control=['try'], data=[]]
except CryptoError:
log.error('Unable to decrypt', exc_info=True)
raise CryptoException('Unable to decrypt') # depends on [control=['except'], data=[]]
return umsgpack.unpackb(packed) |
def fancy_transpose(data, roll=1):
"""Fancy transpose
This method transposes a multidimensional matrix.
Parameters
----------
data : np.ndarray
Input data array
roll : int
Roll direction and amount. Default (roll=1)
Returns
-------
np.ndarray transposed data
Notes
-----
Adjustment to numpy.transpose
Examples
--------
>>> from modopt.base.np_adjust import fancy_transpose
>>> x = np.arange(27).reshape(3, 3, 3)
>>> x
array([[[ 0, 1, 2],
[ 3, 4, 5],
[ 6, 7, 8]],
[[ 9, 10, 11],
[12, 13, 14],
[15, 16, 17]],
[[18, 19, 20],
[21, 22, 23],
[24, 25, 26]]])
>>> fancy_transpose(x)
array([[[ 0, 3, 6],
[ 9, 12, 15],
[18, 21, 24]],
[[ 1, 4, 7],
[10, 13, 16],
[19, 22, 25]],
[[ 2, 5, 8],
[11, 14, 17],
[20, 23, 26]]])
>>> fancy_transpose(x, roll=-1)
array([[[ 0, 9, 18],
[ 1, 10, 19],
[ 2, 11, 20]],
[[ 3, 12, 21],
[ 4, 13, 22],
[ 5, 14, 23]],
[[ 6, 15, 24],
[ 7, 16, 25],
[ 8, 17, 26]]])
"""
axis_roll = np.roll(np.arange(data.ndim), roll)
return np.transpose(data, axes=axis_roll) | def function[fancy_transpose, parameter[data, roll]]:
constant[Fancy transpose
This method transposes a multidimensional matrix.
Parameters
----------
data : np.ndarray
Input data array
roll : int
Roll direction and amount. Default (roll=1)
Returns
-------
np.ndarray transposed data
Notes
-----
Adjustment to numpy.transpose
Examples
--------
>>> from modopt.base.np_adjust import fancy_transpose
>>> x = np.arange(27).reshape(3, 3, 3)
>>> x
array([[[ 0, 1, 2],
[ 3, 4, 5],
[ 6, 7, 8]],
[[ 9, 10, 11],
[12, 13, 14],
[15, 16, 17]],
[[18, 19, 20],
[21, 22, 23],
[24, 25, 26]]])
>>> fancy_transpose(x)
array([[[ 0, 3, 6],
[ 9, 12, 15],
[18, 21, 24]],
[[ 1, 4, 7],
[10, 13, 16],
[19, 22, 25]],
[[ 2, 5, 8],
[11, 14, 17],
[20, 23, 26]]])
>>> fancy_transpose(x, roll=-1)
array([[[ 0, 9, 18],
[ 1, 10, 19],
[ 2, 11, 20]],
[[ 3, 12, 21],
[ 4, 13, 22],
[ 5, 14, 23]],
[[ 6, 15, 24],
[ 7, 16, 25],
[ 8, 17, 26]]])
]
variable[axis_roll] assign[=] call[name[np].roll, parameter[call[name[np].arange, parameter[name[data].ndim]], name[roll]]]
return[call[name[np].transpose, parameter[name[data]]]] | keyword[def] identifier[fancy_transpose] ( identifier[data] , identifier[roll] = literal[int] ):
literal[string]
identifier[axis_roll] = identifier[np] . identifier[roll] ( identifier[np] . identifier[arange] ( identifier[data] . identifier[ndim] ), identifier[roll] )
keyword[return] identifier[np] . identifier[transpose] ( identifier[data] , identifier[axes] = identifier[axis_roll] ) | def fancy_transpose(data, roll=1):
"""Fancy transpose
This method transposes a multidimensional matrix.
Parameters
----------
data : np.ndarray
Input data array
roll : int
Roll direction and amount. Default (roll=1)
Returns
-------
np.ndarray transposed data
Notes
-----
Adjustment to numpy.transpose
Examples
--------
>>> from modopt.base.np_adjust import fancy_transpose
>>> x = np.arange(27).reshape(3, 3, 3)
>>> x
array([[[ 0, 1, 2],
[ 3, 4, 5],
[ 6, 7, 8]],
[[ 9, 10, 11],
[12, 13, 14],
[15, 16, 17]],
[[18, 19, 20],
[21, 22, 23],
[24, 25, 26]]])
>>> fancy_transpose(x)
array([[[ 0, 3, 6],
[ 9, 12, 15],
[18, 21, 24]],
[[ 1, 4, 7],
[10, 13, 16],
[19, 22, 25]],
[[ 2, 5, 8],
[11, 14, 17],
[20, 23, 26]]])
>>> fancy_transpose(x, roll=-1)
array([[[ 0, 9, 18],
[ 1, 10, 19],
[ 2, 11, 20]],
[[ 3, 12, 21],
[ 4, 13, 22],
[ 5, 14, 23]],
[[ 6, 15, 24],
[ 7, 16, 25],
[ 8, 17, 26]]])
"""
axis_roll = np.roll(np.arange(data.ndim), roll)
return np.transpose(data, axes=axis_roll) |
def _parseStats(self, lines, parse_slabs = False):
"""Parse stats output from memcached and return dictionary of stats-
@param lines: Array of lines of input text.
@param parse_slabs: Parse slab stats if True.
@return: Stats dictionary.
"""
info_dict = {}
info_dict['slabs'] = {}
for line in lines:
mobj = re.match('^STAT\s(\w+)\s(\S+)$', line)
if mobj:
info_dict[mobj.group(1)] = util.parse_value(mobj.group(2), True)
continue
elif parse_slabs:
mobj = re.match('STAT\s(\w+:)?(\d+):(\w+)\s(\S+)$', line)
if mobj:
(slab, key, val) = mobj.groups()[-3:]
if not info_dict['slabs'].has_key(slab):
info_dict['slabs'][slab] = {}
info_dict['slabs'][slab][key] = util.parse_value(val, True)
return info_dict | def function[_parseStats, parameter[self, lines, parse_slabs]]:
constant[Parse stats output from memcached and return dictionary of stats-
@param lines: Array of lines of input text.
@param parse_slabs: Parse slab stats if True.
@return: Stats dictionary.
]
variable[info_dict] assign[=] dictionary[[], []]
call[name[info_dict]][constant[slabs]] assign[=] dictionary[[], []]
for taget[name[line]] in starred[name[lines]] begin[:]
variable[mobj] assign[=] call[name[re].match, parameter[constant[^STAT\s(\w+)\s(\S+)$], name[line]]]
if name[mobj] begin[:]
call[name[info_dict]][call[name[mobj].group, parameter[constant[1]]]] assign[=] call[name[util].parse_value, parameter[call[name[mobj].group, parameter[constant[2]]], constant[True]]]
continue
return[name[info_dict]] | keyword[def] identifier[_parseStats] ( identifier[self] , identifier[lines] , identifier[parse_slabs] = keyword[False] ):
literal[string]
identifier[info_dict] ={}
identifier[info_dict] [ literal[string] ]={}
keyword[for] identifier[line] keyword[in] identifier[lines] :
identifier[mobj] = identifier[re] . identifier[match] ( literal[string] , identifier[line] )
keyword[if] identifier[mobj] :
identifier[info_dict] [ identifier[mobj] . identifier[group] ( literal[int] )]= identifier[util] . identifier[parse_value] ( identifier[mobj] . identifier[group] ( literal[int] ), keyword[True] )
keyword[continue]
keyword[elif] identifier[parse_slabs] :
identifier[mobj] = identifier[re] . identifier[match] ( literal[string] , identifier[line] )
keyword[if] identifier[mobj] :
( identifier[slab] , identifier[key] , identifier[val] )= identifier[mobj] . identifier[groups] ()[- literal[int] :]
keyword[if] keyword[not] identifier[info_dict] [ literal[string] ]. identifier[has_key] ( identifier[slab] ):
identifier[info_dict] [ literal[string] ][ identifier[slab] ]={}
identifier[info_dict] [ literal[string] ][ identifier[slab] ][ identifier[key] ]= identifier[util] . identifier[parse_value] ( identifier[val] , keyword[True] )
keyword[return] identifier[info_dict] | def _parseStats(self, lines, parse_slabs=False):
"""Parse stats output from memcached and return dictionary of stats-
@param lines: Array of lines of input text.
@param parse_slabs: Parse slab stats if True.
@return: Stats dictionary.
"""
info_dict = {}
info_dict['slabs'] = {}
for line in lines:
mobj = re.match('^STAT\\s(\\w+)\\s(\\S+)$', line)
if mobj:
info_dict[mobj.group(1)] = util.parse_value(mobj.group(2), True)
continue # depends on [control=['if'], data=[]]
elif parse_slabs:
mobj = re.match('STAT\\s(\\w+:)?(\\d+):(\\w+)\\s(\\S+)$', line)
if mobj:
(slab, key, val) = mobj.groups()[-3:]
if not info_dict['slabs'].has_key(slab):
info_dict['slabs'][slab] = {} # depends on [control=['if'], data=[]]
info_dict['slabs'][slab][key] = util.parse_value(val, True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']]
return info_dict |
def before_sleep_func_accept_retry_state(fn):
"""Wrap "before_sleep" function to accept "retry_state"."""
if not six.callable(fn):
return fn
if func_takes_retry_state(fn):
return fn
@_utils.wraps(fn)
def wrapped_before_sleep_func(retry_state):
# retry_object, sleep, last_result
warn_about_non_retry_state_deprecation(
'before_sleep', fn, stacklevel=4)
return fn(
retry_state.retry_object,
sleep=getattr(retry_state.next_action, 'sleep'),
last_result=retry_state.outcome)
return wrapped_before_sleep_func | def function[before_sleep_func_accept_retry_state, parameter[fn]]:
constant[Wrap "before_sleep" function to accept "retry_state".]
if <ast.UnaryOp object at 0x7da204564ee0> begin[:]
return[name[fn]]
if call[name[func_takes_retry_state], parameter[name[fn]]] begin[:]
return[name[fn]]
def function[wrapped_before_sleep_func, parameter[retry_state]]:
call[name[warn_about_non_retry_state_deprecation], parameter[constant[before_sleep], name[fn]]]
return[call[name[fn], parameter[name[retry_state].retry_object]]]
return[name[wrapped_before_sleep_func]] | keyword[def] identifier[before_sleep_func_accept_retry_state] ( identifier[fn] ):
literal[string]
keyword[if] keyword[not] identifier[six] . identifier[callable] ( identifier[fn] ):
keyword[return] identifier[fn]
keyword[if] identifier[func_takes_retry_state] ( identifier[fn] ):
keyword[return] identifier[fn]
@ identifier[_utils] . identifier[wraps] ( identifier[fn] )
keyword[def] identifier[wrapped_before_sleep_func] ( identifier[retry_state] ):
identifier[warn_about_non_retry_state_deprecation] (
literal[string] , identifier[fn] , identifier[stacklevel] = literal[int] )
keyword[return] identifier[fn] (
identifier[retry_state] . identifier[retry_object] ,
identifier[sleep] = identifier[getattr] ( identifier[retry_state] . identifier[next_action] , literal[string] ),
identifier[last_result] = identifier[retry_state] . identifier[outcome] )
keyword[return] identifier[wrapped_before_sleep_func] | def before_sleep_func_accept_retry_state(fn):
"""Wrap "before_sleep" function to accept "retry_state"."""
if not six.callable(fn):
return fn # depends on [control=['if'], data=[]]
if func_takes_retry_state(fn):
return fn # depends on [control=['if'], data=[]]
@_utils.wraps(fn)
def wrapped_before_sleep_func(retry_state):
# retry_object, sleep, last_result
warn_about_non_retry_state_deprecation('before_sleep', fn, stacklevel=4)
return fn(retry_state.retry_object, sleep=getattr(retry_state.next_action, 'sleep'), last_result=retry_state.outcome)
return wrapped_before_sleep_func |
def setSurfaces(self, normalSurface, downSurface=None, highlightSurface=None):
"""Switch the button to a custom image type of button (rather than a
text button). You can specify either a pygame.Surface object or a
string of a filename to load for each of the three button appearance
states."""
if downSurface is None:
downSurface = normalSurface
if highlightSurface is None:
highlightSurface = normalSurface
if type(normalSurface) == str:
self.origSurfaceNormal = pygame.image.load(normalSurface)
if type(downSurface) == str:
self.origSurfaceDown = pygame.image.load(downSurface)
if type(highlightSurface) == str:
self.origSurfaceHighlight = pygame.image.load(highlightSurface)
if self.origSurfaceNormal.get_size() != self.origSurfaceDown.get_size() != self.origSurfaceHighlight.get_size():
raise Exception('foo')
self.surfaceNormal = self.origSurfaceNormal
self.surfaceDown = self.origSurfaceDown
self.surfaceHighlight = self.origSurfaceHighlight
self.customSurfaces = True
self._rect = pygame.Rect((self._rect.left, self._rect.top, self.surfaceNormal.get_width(), self.surfaceNormal.get_height())) | def function[setSurfaces, parameter[self, normalSurface, downSurface, highlightSurface]]:
constant[Switch the button to a custom image type of button (rather than a
text button). You can specify either a pygame.Surface object or a
string of a filename to load for each of the three button appearance
states.]
if compare[name[downSurface] is constant[None]] begin[:]
variable[downSurface] assign[=] name[normalSurface]
if compare[name[highlightSurface] is constant[None]] begin[:]
variable[highlightSurface] assign[=] name[normalSurface]
if compare[call[name[type], parameter[name[normalSurface]]] equal[==] name[str]] begin[:]
name[self].origSurfaceNormal assign[=] call[name[pygame].image.load, parameter[name[normalSurface]]]
if compare[call[name[type], parameter[name[downSurface]]] equal[==] name[str]] begin[:]
name[self].origSurfaceDown assign[=] call[name[pygame].image.load, parameter[name[downSurface]]]
if compare[call[name[type], parameter[name[highlightSurface]]] equal[==] name[str]] begin[:]
name[self].origSurfaceHighlight assign[=] call[name[pygame].image.load, parameter[name[highlightSurface]]]
if compare[call[name[self].origSurfaceNormal.get_size, parameter[]] not_equal[!=] call[name[self].origSurfaceDown.get_size, parameter[]]] begin[:]
<ast.Raise object at 0x7da18f811f90>
name[self].surfaceNormal assign[=] name[self].origSurfaceNormal
name[self].surfaceDown assign[=] name[self].origSurfaceDown
name[self].surfaceHighlight assign[=] name[self].origSurfaceHighlight
name[self].customSurfaces assign[=] constant[True]
name[self]._rect assign[=] call[name[pygame].Rect, parameter[tuple[[<ast.Attribute object at 0x7da18f812ef0>, <ast.Attribute object at 0x7da18f811060>, <ast.Call object at 0x7da18f8102b0>, <ast.Call object at 0x7da18f00c1f0>]]]] | keyword[def] identifier[setSurfaces] ( identifier[self] , identifier[normalSurface] , identifier[downSurface] = keyword[None] , identifier[highlightSurface] = keyword[None] ):
literal[string]
keyword[if] identifier[downSurface] keyword[is] keyword[None] :
identifier[downSurface] = identifier[normalSurface]
keyword[if] identifier[highlightSurface] keyword[is] keyword[None] :
identifier[highlightSurface] = identifier[normalSurface]
keyword[if] identifier[type] ( identifier[normalSurface] )== identifier[str] :
identifier[self] . identifier[origSurfaceNormal] = identifier[pygame] . identifier[image] . identifier[load] ( identifier[normalSurface] )
keyword[if] identifier[type] ( identifier[downSurface] )== identifier[str] :
identifier[self] . identifier[origSurfaceDown] = identifier[pygame] . identifier[image] . identifier[load] ( identifier[downSurface] )
keyword[if] identifier[type] ( identifier[highlightSurface] )== identifier[str] :
identifier[self] . identifier[origSurfaceHighlight] = identifier[pygame] . identifier[image] . identifier[load] ( identifier[highlightSurface] )
keyword[if] identifier[self] . identifier[origSurfaceNormal] . identifier[get_size] ()!= identifier[self] . identifier[origSurfaceDown] . identifier[get_size] ()!= identifier[self] . identifier[origSurfaceHighlight] . identifier[get_size] ():
keyword[raise] identifier[Exception] ( literal[string] )
identifier[self] . identifier[surfaceNormal] = identifier[self] . identifier[origSurfaceNormal]
identifier[self] . identifier[surfaceDown] = identifier[self] . identifier[origSurfaceDown]
identifier[self] . identifier[surfaceHighlight] = identifier[self] . identifier[origSurfaceHighlight]
identifier[self] . identifier[customSurfaces] = keyword[True]
identifier[self] . identifier[_rect] = identifier[pygame] . identifier[Rect] (( identifier[self] . identifier[_rect] . identifier[left] , identifier[self] . identifier[_rect] . identifier[top] , identifier[self] . identifier[surfaceNormal] . identifier[get_width] (), identifier[self] . identifier[surfaceNormal] . identifier[get_height] ())) | def setSurfaces(self, normalSurface, downSurface=None, highlightSurface=None):
"""Switch the button to a custom image type of button (rather than a
text button). You can specify either a pygame.Surface object or a
string of a filename to load for each of the three button appearance
states."""
if downSurface is None:
downSurface = normalSurface # depends on [control=['if'], data=['downSurface']]
if highlightSurface is None:
highlightSurface = normalSurface # depends on [control=['if'], data=['highlightSurface']]
if type(normalSurface) == str:
self.origSurfaceNormal = pygame.image.load(normalSurface) # depends on [control=['if'], data=[]]
if type(downSurface) == str:
self.origSurfaceDown = pygame.image.load(downSurface) # depends on [control=['if'], data=[]]
if type(highlightSurface) == str:
self.origSurfaceHighlight = pygame.image.load(highlightSurface) # depends on [control=['if'], data=[]]
if self.origSurfaceNormal.get_size() != self.origSurfaceDown.get_size() != self.origSurfaceHighlight.get_size():
raise Exception('foo') # depends on [control=['if'], data=[]]
self.surfaceNormal = self.origSurfaceNormal
self.surfaceDown = self.origSurfaceDown
self.surfaceHighlight = self.origSurfaceHighlight
self.customSurfaces = True
self._rect = pygame.Rect((self._rect.left, self._rect.top, self.surfaceNormal.get_width(), self.surfaceNormal.get_height())) |
def op_gen(mcode):
"""Generate a machine instruction using the op gen table."""
gen = op_tbl[mcode[0]]
ret = gen[0] # opcode
nargs = len(gen)
i = 1
while i < nargs:
if i < len(mcode): # or assume they are same len
ret |= (mcode[i]&gen[i][0]) << gen[i][1]
i += 1
return ret | def function[op_gen, parameter[mcode]]:
constant[Generate a machine instruction using the op gen table.]
variable[gen] assign[=] call[name[op_tbl]][call[name[mcode]][constant[0]]]
variable[ret] assign[=] call[name[gen]][constant[0]]
variable[nargs] assign[=] call[name[len], parameter[name[gen]]]
variable[i] assign[=] constant[1]
while compare[name[i] less[<] name[nargs]] begin[:]
if compare[name[i] less[<] call[name[len], parameter[name[mcode]]]] begin[:]
<ast.AugAssign object at 0x7da1b19ce710>
<ast.AugAssign object at 0x7da1b19cfd90>
return[name[ret]] | keyword[def] identifier[op_gen] ( identifier[mcode] ):
literal[string]
identifier[gen] = identifier[op_tbl] [ identifier[mcode] [ literal[int] ]]
identifier[ret] = identifier[gen] [ literal[int] ]
identifier[nargs] = identifier[len] ( identifier[gen] )
identifier[i] = literal[int]
keyword[while] identifier[i] < identifier[nargs] :
keyword[if] identifier[i] < identifier[len] ( identifier[mcode] ):
identifier[ret] |=( identifier[mcode] [ identifier[i] ]& identifier[gen] [ identifier[i] ][ literal[int] ])<< identifier[gen] [ identifier[i] ][ literal[int] ]
identifier[i] += literal[int]
keyword[return] identifier[ret] | def op_gen(mcode):
"""Generate a machine instruction using the op gen table."""
gen = op_tbl[mcode[0]]
ret = gen[0] # opcode
nargs = len(gen)
i = 1
while i < nargs:
if i < len(mcode): # or assume they are same len
ret |= (mcode[i] & gen[i][0]) << gen[i][1] # depends on [control=['if'], data=['i']]
i += 1 # depends on [control=['while'], data=['i']]
return ret |
def field_items(self, path=str(), **options):
""" Returns a **flatten** list of ``('field path', field item)`` tuples
for each :class:`Field` *nested* in the `Sequence`.
:param str path: field path of the `Sequence`.
:keyword bool nested: if ``True`` all :class:`Pointer` fields in the
:attr:`~Pointer.data` objects of all :class:`Pointer` fields in
the `Sequence` list their referenced :attr:`~Pointer.data` object
field items as well (chained method call).
"""
items = list()
for index, item in enumerate(self):
if path:
item_path = "{0}[{1}]".format(path, str(index))
else:
item_path = "[{0}]".format(str(index))
# Container
if is_container(item):
for field_item in item.field_items(item_path, **options):
items.append(field_item)
# Pointer
elif is_pointer(item) and get_nested(options):
for field_item in item.field_items(item_path, **options):
items.append(field_item)
# Field
elif is_field(item):
items.append((item_path, item))
else:
raise MemberTypeError(self, item, item_path)
return items | def function[field_items, parameter[self, path]]:
constant[ Returns a **flatten** list of ``('field path', field item)`` tuples
for each :class:`Field` *nested* in the `Sequence`.
:param str path: field path of the `Sequence`.
:keyword bool nested: if ``True`` all :class:`Pointer` fields in the
:attr:`~Pointer.data` objects of all :class:`Pointer` fields in
the `Sequence` list their referenced :attr:`~Pointer.data` object
field items as well (chained method call).
]
variable[items] assign[=] call[name[list], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da20e955330>, <ast.Name object at 0x7da20e955ba0>]]] in starred[call[name[enumerate], parameter[name[self]]]] begin[:]
if name[path] begin[:]
variable[item_path] assign[=] call[constant[{0}[{1}]].format, parameter[name[path], call[name[str], parameter[name[index]]]]]
if call[name[is_container], parameter[name[item]]] begin[:]
for taget[name[field_item]] in starred[call[name[item].field_items, parameter[name[item_path]]]] begin[:]
call[name[items].append, parameter[name[field_item]]]
return[name[items]] | keyword[def] identifier[field_items] ( identifier[self] , identifier[path] = identifier[str] (),** identifier[options] ):
literal[string]
identifier[items] = identifier[list] ()
keyword[for] identifier[index] , identifier[item] keyword[in] identifier[enumerate] ( identifier[self] ):
keyword[if] identifier[path] :
identifier[item_path] = literal[string] . identifier[format] ( identifier[path] , identifier[str] ( identifier[index] ))
keyword[else] :
identifier[item_path] = literal[string] . identifier[format] ( identifier[str] ( identifier[index] ))
keyword[if] identifier[is_container] ( identifier[item] ):
keyword[for] identifier[field_item] keyword[in] identifier[item] . identifier[field_items] ( identifier[item_path] ,** identifier[options] ):
identifier[items] . identifier[append] ( identifier[field_item] )
keyword[elif] identifier[is_pointer] ( identifier[item] ) keyword[and] identifier[get_nested] ( identifier[options] ):
keyword[for] identifier[field_item] keyword[in] identifier[item] . identifier[field_items] ( identifier[item_path] ,** identifier[options] ):
identifier[items] . identifier[append] ( identifier[field_item] )
keyword[elif] identifier[is_field] ( identifier[item] ):
identifier[items] . identifier[append] (( identifier[item_path] , identifier[item] ))
keyword[else] :
keyword[raise] identifier[MemberTypeError] ( identifier[self] , identifier[item] , identifier[item_path] )
keyword[return] identifier[items] | def field_items(self, path=str(), **options):
""" Returns a **flatten** list of ``('field path', field item)`` tuples
for each :class:`Field` *nested* in the `Sequence`.
:param str path: field path of the `Sequence`.
:keyword bool nested: if ``True`` all :class:`Pointer` fields in the
:attr:`~Pointer.data` objects of all :class:`Pointer` fields in
the `Sequence` list their referenced :attr:`~Pointer.data` object
field items as well (chained method call).
"""
items = list()
for (index, item) in enumerate(self):
if path:
item_path = '{0}[{1}]'.format(path, str(index)) # depends on [control=['if'], data=[]]
else:
item_path = '[{0}]'.format(str(index))
# Container
if is_container(item):
for field_item in item.field_items(item_path, **options):
items.append(field_item) # depends on [control=['for'], data=['field_item']] # depends on [control=['if'], data=[]]
# Pointer
elif is_pointer(item) and get_nested(options):
for field_item in item.field_items(item_path, **options):
items.append(field_item) # depends on [control=['for'], data=['field_item']] # depends on [control=['if'], data=[]]
# Field
elif is_field(item):
items.append((item_path, item)) # depends on [control=['if'], data=[]]
else:
raise MemberTypeError(self, item, item_path) # depends on [control=['for'], data=[]]
return items |
def from_class(metacls, cls, auto_store=True):
"""Create proper PySchema class from cls
Any methods and attributes will be transferred to the
new object
"""
if auto_store:
def wrap(cls):
return cls
else:
wrap = no_auto_store()
return wrap(metacls.__new__(
metacls,
cls.__name__,
(Record,),
dict(cls.__dict__)
)) | def function[from_class, parameter[metacls, cls, auto_store]]:
constant[Create proper PySchema class from cls
Any methods and attributes will be transferred to the
new object
]
if name[auto_store] begin[:]
def function[wrap, parameter[cls]]:
return[name[cls]]
return[call[name[wrap], parameter[call[name[metacls].__new__, parameter[name[metacls], name[cls].__name__, tuple[[<ast.Name object at 0x7da2044c3190>]], call[name[dict], parameter[name[cls].__dict__]]]]]]] | keyword[def] identifier[from_class] ( identifier[metacls] , identifier[cls] , identifier[auto_store] = keyword[True] ):
literal[string]
keyword[if] identifier[auto_store] :
keyword[def] identifier[wrap] ( identifier[cls] ):
keyword[return] identifier[cls]
keyword[else] :
identifier[wrap] = identifier[no_auto_store] ()
keyword[return] identifier[wrap] ( identifier[metacls] . identifier[__new__] (
identifier[metacls] ,
identifier[cls] . identifier[__name__] ,
( identifier[Record] ,),
identifier[dict] ( identifier[cls] . identifier[__dict__] )
)) | def from_class(metacls, cls, auto_store=True):
"""Create proper PySchema class from cls
Any methods and attributes will be transferred to the
new object
"""
if auto_store:
def wrap(cls):
return cls # depends on [control=['if'], data=[]]
else:
wrap = no_auto_store()
return wrap(metacls.__new__(metacls, cls.__name__, (Record,), dict(cls.__dict__))) |
def construct(self, sp_entity_id, attrconvs, policy, issuer, farg,
authn_class=None, authn_auth=None, authn_decl=None,
encrypt=None, sec_context=None, authn_decl_ref=None,
authn_instant="", subject_locality="", authn_statem=None,
name_id=None, session_not_on_or_after=None):
""" Construct the Assertion
:param sp_entity_id: The entityid of the SP
:param in_response_to: An identifier of the message, this message is
a response to
:param name_id: An NameID instance
:param attrconvs: AttributeConverters
:param policy: The policy that should be adhered to when replying
:param issuer: Who is issuing the statement
:param authn_class: The authentication class
:param authn_auth: The authentication instance
:param authn_decl: An Authentication Context declaration
:param encrypt: Whether to encrypt parts or all of the Assertion
:param sec_context: The security context used when encrypting
:param authn_decl_ref: An Authentication Context declaration reference
:param authn_instant: When the Authentication was performed
:param subject_locality: Specifies the DNS domain name and IP address
for the system from which the assertion subject was apparently
authenticated.
:param authn_statem: A AuthnStatement instance
:return: An Assertion instance
"""
if policy:
_name_format = policy.get_name_form(sp_entity_id)
else:
_name_format = NAME_FORMAT_URI
attr_statement = saml.AttributeStatement(attribute=from_local(
attrconvs, self, _name_format))
if encrypt == "attributes":
for attr in attr_statement.attribute:
enc = sec_context.encrypt(text="%s" % attr)
encd = xmlenc.encrypted_data_from_string(enc)
encattr = saml.EncryptedAttribute(encrypted_data=encd)
attr_statement.encrypted_attribute.append(encattr)
attr_statement.attribute = []
# start using now and for some time
conds = policy.conditions(sp_entity_id)
if authn_statem:
_authn_statement = authn_statem
elif authn_auth or authn_class or authn_decl or authn_decl_ref:
_authn_statement = authn_statement(authn_class, authn_auth,
authn_decl, authn_decl_ref,
authn_instant,
subject_locality,
session_not_on_or_after=session_not_on_or_after)
else:
_authn_statement = None
subject = do_subject(policy, sp_entity_id, name_id,
**farg['subject'])
_ass = assertion_factory(issuer=issuer, conditions=conds,
subject=subject)
if _authn_statement:
_ass.authn_statement = [_authn_statement]
if not attr_statement.empty():
_ass.attribute_statement = [attr_statement]
return _ass | def function[construct, parameter[self, sp_entity_id, attrconvs, policy, issuer, farg, authn_class, authn_auth, authn_decl, encrypt, sec_context, authn_decl_ref, authn_instant, subject_locality, authn_statem, name_id, session_not_on_or_after]]:
constant[ Construct the Assertion
:param sp_entity_id: The entityid of the SP
:param in_response_to: An identifier of the message, this message is
a response to
:param name_id: An NameID instance
:param attrconvs: AttributeConverters
:param policy: The policy that should be adhered to when replying
:param issuer: Who is issuing the statement
:param authn_class: The authentication class
:param authn_auth: The authentication instance
:param authn_decl: An Authentication Context declaration
:param encrypt: Whether to encrypt parts or all of the Assertion
:param sec_context: The security context used when encrypting
:param authn_decl_ref: An Authentication Context declaration reference
:param authn_instant: When the Authentication was performed
:param subject_locality: Specifies the DNS domain name and IP address
for the system from which the assertion subject was apparently
authenticated.
:param authn_statem: A AuthnStatement instance
:return: An Assertion instance
]
if name[policy] begin[:]
variable[_name_format] assign[=] call[name[policy].get_name_form, parameter[name[sp_entity_id]]]
variable[attr_statement] assign[=] call[name[saml].AttributeStatement, parameter[]]
if compare[name[encrypt] equal[==] constant[attributes]] begin[:]
for taget[name[attr]] in starred[name[attr_statement].attribute] begin[:]
variable[enc] assign[=] call[name[sec_context].encrypt, parameter[]]
variable[encd] assign[=] call[name[xmlenc].encrypted_data_from_string, parameter[name[enc]]]
variable[encattr] assign[=] call[name[saml].EncryptedAttribute, parameter[]]
call[name[attr_statement].encrypted_attribute.append, parameter[name[encattr]]]
name[attr_statement].attribute assign[=] list[[]]
variable[conds] assign[=] call[name[policy].conditions, parameter[name[sp_entity_id]]]
if name[authn_statem] begin[:]
variable[_authn_statement] assign[=] name[authn_statem]
variable[subject] assign[=] call[name[do_subject], parameter[name[policy], name[sp_entity_id], name[name_id]]]
variable[_ass] assign[=] call[name[assertion_factory], parameter[]]
if name[_authn_statement] begin[:]
name[_ass].authn_statement assign[=] list[[<ast.Name object at 0x7da2054a5b70>]]
if <ast.UnaryOp object at 0x7da2054a5720> begin[:]
name[_ass].attribute_statement assign[=] list[[<ast.Name object at 0x7da20e960730>]]
return[name[_ass]] | keyword[def] identifier[construct] ( identifier[self] , identifier[sp_entity_id] , identifier[attrconvs] , identifier[policy] , identifier[issuer] , identifier[farg] ,
identifier[authn_class] = keyword[None] , identifier[authn_auth] = keyword[None] , identifier[authn_decl] = keyword[None] ,
identifier[encrypt] = keyword[None] , identifier[sec_context] = keyword[None] , identifier[authn_decl_ref] = keyword[None] ,
identifier[authn_instant] = literal[string] , identifier[subject_locality] = literal[string] , identifier[authn_statem] = keyword[None] ,
identifier[name_id] = keyword[None] , identifier[session_not_on_or_after] = keyword[None] ):
literal[string]
keyword[if] identifier[policy] :
identifier[_name_format] = identifier[policy] . identifier[get_name_form] ( identifier[sp_entity_id] )
keyword[else] :
identifier[_name_format] = identifier[NAME_FORMAT_URI]
identifier[attr_statement] = identifier[saml] . identifier[AttributeStatement] ( identifier[attribute] = identifier[from_local] (
identifier[attrconvs] , identifier[self] , identifier[_name_format] ))
keyword[if] identifier[encrypt] == literal[string] :
keyword[for] identifier[attr] keyword[in] identifier[attr_statement] . identifier[attribute] :
identifier[enc] = identifier[sec_context] . identifier[encrypt] ( identifier[text] = literal[string] % identifier[attr] )
identifier[encd] = identifier[xmlenc] . identifier[encrypted_data_from_string] ( identifier[enc] )
identifier[encattr] = identifier[saml] . identifier[EncryptedAttribute] ( identifier[encrypted_data] = identifier[encd] )
identifier[attr_statement] . identifier[encrypted_attribute] . identifier[append] ( identifier[encattr] )
identifier[attr_statement] . identifier[attribute] =[]
identifier[conds] = identifier[policy] . identifier[conditions] ( identifier[sp_entity_id] )
keyword[if] identifier[authn_statem] :
identifier[_authn_statement] = identifier[authn_statem]
keyword[elif] identifier[authn_auth] keyword[or] identifier[authn_class] keyword[or] identifier[authn_decl] keyword[or] identifier[authn_decl_ref] :
identifier[_authn_statement] = identifier[authn_statement] ( identifier[authn_class] , identifier[authn_auth] ,
identifier[authn_decl] , identifier[authn_decl_ref] ,
identifier[authn_instant] ,
identifier[subject_locality] ,
identifier[session_not_on_or_after] = identifier[session_not_on_or_after] )
keyword[else] :
identifier[_authn_statement] = keyword[None]
identifier[subject] = identifier[do_subject] ( identifier[policy] , identifier[sp_entity_id] , identifier[name_id] ,
** identifier[farg] [ literal[string] ])
identifier[_ass] = identifier[assertion_factory] ( identifier[issuer] = identifier[issuer] , identifier[conditions] = identifier[conds] ,
identifier[subject] = identifier[subject] )
keyword[if] identifier[_authn_statement] :
identifier[_ass] . identifier[authn_statement] =[ identifier[_authn_statement] ]
keyword[if] keyword[not] identifier[attr_statement] . identifier[empty] ():
identifier[_ass] . identifier[attribute_statement] =[ identifier[attr_statement] ]
keyword[return] identifier[_ass] | def construct(self, sp_entity_id, attrconvs, policy, issuer, farg, authn_class=None, authn_auth=None, authn_decl=None, encrypt=None, sec_context=None, authn_decl_ref=None, authn_instant='', subject_locality='', authn_statem=None, name_id=None, session_not_on_or_after=None):
""" Construct the Assertion
:param sp_entity_id: The entityid of the SP
:param in_response_to: An identifier of the message, this message is
a response to
:param name_id: An NameID instance
:param attrconvs: AttributeConverters
:param policy: The policy that should be adhered to when replying
:param issuer: Who is issuing the statement
:param authn_class: The authentication class
:param authn_auth: The authentication instance
:param authn_decl: An Authentication Context declaration
:param encrypt: Whether to encrypt parts or all of the Assertion
:param sec_context: The security context used when encrypting
:param authn_decl_ref: An Authentication Context declaration reference
:param authn_instant: When the Authentication was performed
:param subject_locality: Specifies the DNS domain name and IP address
for the system from which the assertion subject was apparently
authenticated.
:param authn_statem: A AuthnStatement instance
:return: An Assertion instance
"""
if policy:
_name_format = policy.get_name_form(sp_entity_id) # depends on [control=['if'], data=[]]
else:
_name_format = NAME_FORMAT_URI
attr_statement = saml.AttributeStatement(attribute=from_local(attrconvs, self, _name_format))
if encrypt == 'attributes':
for attr in attr_statement.attribute:
enc = sec_context.encrypt(text='%s' % attr)
encd = xmlenc.encrypted_data_from_string(enc)
encattr = saml.EncryptedAttribute(encrypted_data=encd)
attr_statement.encrypted_attribute.append(encattr) # depends on [control=['for'], data=['attr']]
attr_statement.attribute = [] # depends on [control=['if'], data=[]]
# start using now and for some time
conds = policy.conditions(sp_entity_id)
if authn_statem:
_authn_statement = authn_statem # depends on [control=['if'], data=[]]
elif authn_auth or authn_class or authn_decl or authn_decl_ref:
_authn_statement = authn_statement(authn_class, authn_auth, authn_decl, authn_decl_ref, authn_instant, subject_locality, session_not_on_or_after=session_not_on_or_after) # depends on [control=['if'], data=[]]
else:
_authn_statement = None
subject = do_subject(policy, sp_entity_id, name_id, **farg['subject'])
_ass = assertion_factory(issuer=issuer, conditions=conds, subject=subject)
if _authn_statement:
_ass.authn_statement = [_authn_statement] # depends on [control=['if'], data=[]]
if not attr_statement.empty():
_ass.attribute_statement = [attr_statement] # depends on [control=['if'], data=[]]
return _ass |
def out_format(data, out, opts=None, **kwargs):
'''
Return the formatted outputter string for the passed data
'''
return try_printout(data, out, opts, **kwargs) | def function[out_format, parameter[data, out, opts]]:
constant[
Return the formatted outputter string for the passed data
]
return[call[name[try_printout], parameter[name[data], name[out], name[opts]]]] | keyword[def] identifier[out_format] ( identifier[data] , identifier[out] , identifier[opts] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[return] identifier[try_printout] ( identifier[data] , identifier[out] , identifier[opts] ,** identifier[kwargs] ) | def out_format(data, out, opts=None, **kwargs):
"""
Return the formatted outputter string for the passed data
"""
return try_printout(data, out, opts, **kwargs) |
def ofp_instruction_from_jsondict(dp, jsonlist, encap=True):
"""
This function is intended to be used with
ryu.lib.ofctl_string.ofp_instruction_from_str.
It is very similar to ofp_msg_from_jsondict, but works on
a list of OFPInstructions/OFPActions. It also encapsulates
OFPAction into OFPInstructionActions, as >OF1.0 OFPFlowMod
requires that.
This function takes the following arguments.
======== ==================================================
Argument Description
======== ==================================================
dp An instance of ryu.controller.Datapath.
jsonlist A list of JSON style dictionaries.
encap Encapsulate OFPAction into OFPInstructionActions.
Must be false for OF10.
======== ==================================================
"""
proto = dp.ofproto
parser = dp.ofproto_parser
actions = []
result = []
for jsondict in jsonlist:
assert len(jsondict) == 1
k, v = list(jsondict.items())[0]
cls = getattr(parser, k)
if issubclass(cls, parser.OFPAction):
if encap:
actions.append(cls.from_jsondict(v))
continue
else:
ofpinst = getattr(parser, 'OFPInstruction', None)
if not ofpinst or not issubclass(cls, ofpinst):
raise ValueError("Supplied jsondict is of wrong type: %s",
jsondict)
result.append(cls.from_jsondict(v))
if not encap:
return result
if actions:
# Although the OpenFlow spec says Apply Actions is executed first,
# let's place it in the head as a precaution.
result = [parser.OFPInstructionActions(
proto.OFPIT_APPLY_ACTIONS, actions)] + result
return result | def function[ofp_instruction_from_jsondict, parameter[dp, jsonlist, encap]]:
constant[
This function is intended to be used with
ryu.lib.ofctl_string.ofp_instruction_from_str.
It is very similar to ofp_msg_from_jsondict, but works on
a list of OFPInstructions/OFPActions. It also encapsulates
OFPAction into OFPInstructionActions, as >OF1.0 OFPFlowMod
requires that.
This function takes the following arguments.
======== ==================================================
Argument Description
======== ==================================================
dp An instance of ryu.controller.Datapath.
jsonlist A list of JSON style dictionaries.
encap Encapsulate OFPAction into OFPInstructionActions.
Must be false for OF10.
======== ==================================================
]
variable[proto] assign[=] name[dp].ofproto
variable[parser] assign[=] name[dp].ofproto_parser
variable[actions] assign[=] list[[]]
variable[result] assign[=] list[[]]
for taget[name[jsondict]] in starred[name[jsonlist]] begin[:]
assert[compare[call[name[len], parameter[name[jsondict]]] equal[==] constant[1]]]
<ast.Tuple object at 0x7da1b1a346a0> assign[=] call[call[name[list], parameter[call[name[jsondict].items, parameter[]]]]][constant[0]]
variable[cls] assign[=] call[name[getattr], parameter[name[parser], name[k]]]
if call[name[issubclass], parameter[name[cls], name[parser].OFPAction]] begin[:]
if name[encap] begin[:]
call[name[actions].append, parameter[call[name[cls].from_jsondict, parameter[name[v]]]]]
continue
call[name[result].append, parameter[call[name[cls].from_jsondict, parameter[name[v]]]]]
if <ast.UnaryOp object at 0x7da1b1a36e90> begin[:]
return[name[result]]
if name[actions] begin[:]
variable[result] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b1a350f0>]] + name[result]]
return[name[result]] | keyword[def] identifier[ofp_instruction_from_jsondict] ( identifier[dp] , identifier[jsonlist] , identifier[encap] = keyword[True] ):
literal[string]
identifier[proto] = identifier[dp] . identifier[ofproto]
identifier[parser] = identifier[dp] . identifier[ofproto_parser]
identifier[actions] =[]
identifier[result] =[]
keyword[for] identifier[jsondict] keyword[in] identifier[jsonlist] :
keyword[assert] identifier[len] ( identifier[jsondict] )== literal[int]
identifier[k] , identifier[v] = identifier[list] ( identifier[jsondict] . identifier[items] ())[ literal[int] ]
identifier[cls] = identifier[getattr] ( identifier[parser] , identifier[k] )
keyword[if] identifier[issubclass] ( identifier[cls] , identifier[parser] . identifier[OFPAction] ):
keyword[if] identifier[encap] :
identifier[actions] . identifier[append] ( identifier[cls] . identifier[from_jsondict] ( identifier[v] ))
keyword[continue]
keyword[else] :
identifier[ofpinst] = identifier[getattr] ( identifier[parser] , literal[string] , keyword[None] )
keyword[if] keyword[not] identifier[ofpinst] keyword[or] keyword[not] identifier[issubclass] ( identifier[cls] , identifier[ofpinst] ):
keyword[raise] identifier[ValueError] ( literal[string] ,
identifier[jsondict] )
identifier[result] . identifier[append] ( identifier[cls] . identifier[from_jsondict] ( identifier[v] ))
keyword[if] keyword[not] identifier[encap] :
keyword[return] identifier[result]
keyword[if] identifier[actions] :
identifier[result] =[ identifier[parser] . identifier[OFPInstructionActions] (
identifier[proto] . identifier[OFPIT_APPLY_ACTIONS] , identifier[actions] )]+ identifier[result]
keyword[return] identifier[result] | def ofp_instruction_from_jsondict(dp, jsonlist, encap=True):
"""
This function is intended to be used with
ryu.lib.ofctl_string.ofp_instruction_from_str.
It is very similar to ofp_msg_from_jsondict, but works on
a list of OFPInstructions/OFPActions. It also encapsulates
OFPAction into OFPInstructionActions, as >OF1.0 OFPFlowMod
requires that.
This function takes the following arguments.
======== ==================================================
Argument Description
======== ==================================================
dp An instance of ryu.controller.Datapath.
jsonlist A list of JSON style dictionaries.
encap Encapsulate OFPAction into OFPInstructionActions.
Must be false for OF10.
======== ==================================================
"""
proto = dp.ofproto
parser = dp.ofproto_parser
actions = []
result = []
for jsondict in jsonlist:
assert len(jsondict) == 1
(k, v) = list(jsondict.items())[0]
cls = getattr(parser, k)
if issubclass(cls, parser.OFPAction):
if encap:
actions.append(cls.from_jsondict(v))
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
ofpinst = getattr(parser, 'OFPInstruction', None)
if not ofpinst or not issubclass(cls, ofpinst):
raise ValueError('Supplied jsondict is of wrong type: %s', jsondict) # depends on [control=['if'], data=[]]
result.append(cls.from_jsondict(v)) # depends on [control=['for'], data=['jsondict']]
if not encap:
return result # depends on [control=['if'], data=[]]
if actions:
# Although the OpenFlow spec says Apply Actions is executed first,
# let's place it in the head as a precaution.
result = [parser.OFPInstructionActions(proto.OFPIT_APPLY_ACTIONS, actions)] + result # depends on [control=['if'], data=[]]
return result |
def close(self):
"""Close the authentication layer and cleanup
all the authentication wrapper objects.
"""
self.sasl.mechanism.destroy()
self.sasl_client.get_client().destroy()
self._underlying_xio.destroy() | def function[close, parameter[self]]:
constant[Close the authentication layer and cleanup
all the authentication wrapper objects.
]
call[name[self].sasl.mechanism.destroy, parameter[]]
call[call[name[self].sasl_client.get_client, parameter[]].destroy, parameter[]]
call[name[self]._underlying_xio.destroy, parameter[]] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
identifier[self] . identifier[sasl] . identifier[mechanism] . identifier[destroy] ()
identifier[self] . identifier[sasl_client] . identifier[get_client] (). identifier[destroy] ()
identifier[self] . identifier[_underlying_xio] . identifier[destroy] () | def close(self):
"""Close the authentication layer and cleanup
all the authentication wrapper objects.
"""
self.sasl.mechanism.destroy()
self.sasl_client.get_client().destroy()
self._underlying_xio.destroy() |
def cli(ctx, ftdi_enable, ftdi_disable, serial_enable, serial_disable):
"""Manage FPGA boards drivers."""
exit_code = 0
if ftdi_enable: # pragma: no cover
exit_code = Drivers().ftdi_enable()
elif ftdi_disable: # pragma: no cover
exit_code = Drivers().ftdi_disable()
elif serial_enable: # pragma: no cover
exit_code = Drivers().serial_enable()
elif serial_disable: # pragma: no cover
exit_code = Drivers().serial_disable()
else:
click.secho(ctx.get_help())
ctx.exit(exit_code) | def function[cli, parameter[ctx, ftdi_enable, ftdi_disable, serial_enable, serial_disable]]:
constant[Manage FPGA boards drivers.]
variable[exit_code] assign[=] constant[0]
if name[ftdi_enable] begin[:]
variable[exit_code] assign[=] call[call[name[Drivers], parameter[]].ftdi_enable, parameter[]]
call[name[ctx].exit, parameter[name[exit_code]]] | keyword[def] identifier[cli] ( identifier[ctx] , identifier[ftdi_enable] , identifier[ftdi_disable] , identifier[serial_enable] , identifier[serial_disable] ):
literal[string]
identifier[exit_code] = literal[int]
keyword[if] identifier[ftdi_enable] :
identifier[exit_code] = identifier[Drivers] (). identifier[ftdi_enable] ()
keyword[elif] identifier[ftdi_disable] :
identifier[exit_code] = identifier[Drivers] (). identifier[ftdi_disable] ()
keyword[elif] identifier[serial_enable] :
identifier[exit_code] = identifier[Drivers] (). identifier[serial_enable] ()
keyword[elif] identifier[serial_disable] :
identifier[exit_code] = identifier[Drivers] (). identifier[serial_disable] ()
keyword[else] :
identifier[click] . identifier[secho] ( identifier[ctx] . identifier[get_help] ())
identifier[ctx] . identifier[exit] ( identifier[exit_code] ) | def cli(ctx, ftdi_enable, ftdi_disable, serial_enable, serial_disable):
"""Manage FPGA boards drivers."""
exit_code = 0
if ftdi_enable: # pragma: no cover
exit_code = Drivers().ftdi_enable() # depends on [control=['if'], data=[]]
elif ftdi_disable: # pragma: no cover
exit_code = Drivers().ftdi_disable() # depends on [control=['if'], data=[]]
elif serial_enable: # pragma: no cover
exit_code = Drivers().serial_enable() # depends on [control=['if'], data=[]]
elif serial_disable: # pragma: no cover
exit_code = Drivers().serial_disable() # depends on [control=['if'], data=[]]
else:
click.secho(ctx.get_help())
ctx.exit(exit_code) |
def mirror_video(self, is_mirror, callback=None):
'''
Mirror video
``is_mirror``: 0 not mirror, 1 mirror
'''
params = {'isMirror': is_mirror}
return self.execute_command('mirrorVideo', params, callback=callback) | def function[mirror_video, parameter[self, is_mirror, callback]]:
constant[
Mirror video
``is_mirror``: 0 not mirror, 1 mirror
]
variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b1a76e60>], [<ast.Name object at 0x7da1b1a775b0>]]
return[call[name[self].execute_command, parameter[constant[mirrorVideo], name[params]]]] | keyword[def] identifier[mirror_video] ( identifier[self] , identifier[is_mirror] , identifier[callback] = keyword[None] ):
literal[string]
identifier[params] ={ literal[string] : identifier[is_mirror] }
keyword[return] identifier[self] . identifier[execute_command] ( literal[string] , identifier[params] , identifier[callback] = identifier[callback] ) | def mirror_video(self, is_mirror, callback=None):
"""
Mirror video
``is_mirror``: 0 not mirror, 1 mirror
"""
params = {'isMirror': is_mirror}
return self.execute_command('mirrorVideo', params, callback=callback) |
def get_proficiencies_by_ids(self, proficiency_ids):
"""Gets a ``ProficiencyList`` corresponding to the given ``IdList``.
arg: proficiency_ids (osid.id.IdList): the list of ``Ids`` to
retrieve
return: (osid.learning.ProficiencyList) - the returned
``Proficiency`` list
raise: NotFound - an ``Id`` was not found
raise: NullArgument - ``proficiency_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_ids
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('learning',
collection='Proficiency',
runtime=self._runtime)
object_id_list = []
for i in proficiency_ids:
object_id_list.append(ObjectId(self._get_id(i, 'learning').get_identifier()))
result = collection.find(
dict({'_id': {'$in': object_id_list}},
**self._view_filter()))
result = list(result)
sorted_result = []
for object_id in object_id_list:
for object_map in result:
if object_map['_id'] == object_id:
sorted_result.append(object_map)
break
return objects.ProficiencyList(sorted_result, runtime=self._runtime, proxy=self._proxy) | def function[get_proficiencies_by_ids, parameter[self, proficiency_ids]]:
constant[Gets a ``ProficiencyList`` corresponding to the given ``IdList``.
arg: proficiency_ids (osid.id.IdList): the list of ``Ids`` to
retrieve
return: (osid.learning.ProficiencyList) - the returned
``Proficiency`` list
raise: NotFound - an ``Id`` was not found
raise: NullArgument - ``proficiency_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[learning]]]
variable[object_id_list] assign[=] list[[]]
for taget[name[i]] in starred[name[proficiency_ids]] begin[:]
call[name[object_id_list].append, parameter[call[name[ObjectId], parameter[call[call[name[self]._get_id, parameter[name[i], constant[learning]]].get_identifier, parameter[]]]]]]
variable[result] assign[=] call[name[collection].find, parameter[call[name[dict], parameter[dictionary[[<ast.Constant object at 0x7da18bc72500>], [<ast.Dict object at 0x7da18bc716f0>]]]]]]
variable[result] assign[=] call[name[list], parameter[name[result]]]
variable[sorted_result] assign[=] list[[]]
for taget[name[object_id]] in starred[name[object_id_list]] begin[:]
for taget[name[object_map]] in starred[name[result]] begin[:]
if compare[call[name[object_map]][constant[_id]] equal[==] name[object_id]] begin[:]
call[name[sorted_result].append, parameter[name[object_map]]]
break
return[call[name[objects].ProficiencyList, parameter[name[sorted_result]]]] | keyword[def] identifier[get_proficiencies_by_ids] ( identifier[self] , identifier[proficiency_ids] ):
literal[string]
identifier[collection] = identifier[JSONClientValidated] ( literal[string] ,
identifier[collection] = literal[string] ,
identifier[runtime] = identifier[self] . identifier[_runtime] )
identifier[object_id_list] =[]
keyword[for] identifier[i] keyword[in] identifier[proficiency_ids] :
identifier[object_id_list] . identifier[append] ( identifier[ObjectId] ( identifier[self] . identifier[_get_id] ( identifier[i] , literal[string] ). identifier[get_identifier] ()))
identifier[result] = identifier[collection] . identifier[find] (
identifier[dict] ({ literal[string] :{ literal[string] : identifier[object_id_list] }},
** identifier[self] . identifier[_view_filter] ()))
identifier[result] = identifier[list] ( identifier[result] )
identifier[sorted_result] =[]
keyword[for] identifier[object_id] keyword[in] identifier[object_id_list] :
keyword[for] identifier[object_map] keyword[in] identifier[result] :
keyword[if] identifier[object_map] [ literal[string] ]== identifier[object_id] :
identifier[sorted_result] . identifier[append] ( identifier[object_map] )
keyword[break]
keyword[return] identifier[objects] . identifier[ProficiencyList] ( identifier[sorted_result] , identifier[runtime] = identifier[self] . identifier[_runtime] , identifier[proxy] = identifier[self] . identifier[_proxy] ) | def get_proficiencies_by_ids(self, proficiency_ids):
"""Gets a ``ProficiencyList`` corresponding to the given ``IdList``.
arg: proficiency_ids (osid.id.IdList): the list of ``Ids`` to
retrieve
return: (osid.learning.ProficiencyList) - the returned
``Proficiency`` list
raise: NotFound - an ``Id`` was not found
raise: NullArgument - ``proficiency_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.ResourceLookupSession.get_resources_by_ids
# NOTE: This implementation currently ignores plenary view
collection = JSONClientValidated('learning', collection='Proficiency', runtime=self._runtime)
object_id_list = []
for i in proficiency_ids:
object_id_list.append(ObjectId(self._get_id(i, 'learning').get_identifier())) # depends on [control=['for'], data=['i']]
result = collection.find(dict({'_id': {'$in': object_id_list}}, **self._view_filter()))
result = list(result)
sorted_result = []
for object_id in object_id_list:
for object_map in result:
if object_map['_id'] == object_id:
sorted_result.append(object_map)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['object_map']] # depends on [control=['for'], data=['object_id']]
return objects.ProficiencyList(sorted_result, runtime=self._runtime, proxy=self._proxy) |
def auth_aliases(d):
"""Interpret user/password aliases.
"""
for alias, real in ((USER_KEY, "readonly_user"),
(PASS_KEY, "readonly_password")):
if alias in d:
d[real] = d[alias]
del d[alias] | def function[auth_aliases, parameter[d]]:
constant[Interpret user/password aliases.
]
for taget[tuple[[<ast.Name object at 0x7da20c9935b0>, <ast.Name object at 0x7da20c9912d0>]]] in starred[tuple[[<ast.Tuple object at 0x7da20c991a50>, <ast.Tuple object at 0x7da20c990a00>]]] begin[:]
if compare[name[alias] in name[d]] begin[:]
call[name[d]][name[real]] assign[=] call[name[d]][name[alias]]
<ast.Delete object at 0x7da20c991c90> | keyword[def] identifier[auth_aliases] ( identifier[d] ):
literal[string]
keyword[for] identifier[alias] , identifier[real] keyword[in] (( identifier[USER_KEY] , literal[string] ),
( identifier[PASS_KEY] , literal[string] )):
keyword[if] identifier[alias] keyword[in] identifier[d] :
identifier[d] [ identifier[real] ]= identifier[d] [ identifier[alias] ]
keyword[del] identifier[d] [ identifier[alias] ] | def auth_aliases(d):
"""Interpret user/password aliases.
"""
for (alias, real) in ((USER_KEY, 'readonly_user'), (PASS_KEY, 'readonly_password')):
if alias in d:
d[real] = d[alias]
del d[alias] # depends on [control=['if'], data=['alias', 'd']] # depends on [control=['for'], data=[]] |
def cast(self, method, args={}, declare=None, retry=None,
retry_policy=None, type=None, exchange=None, **props):
"""Send message to actor. Discarding replies."""
retry = self.retry if retry is None else retry
body = {'class': self.name, 'method': method, 'args': args}
_retry_policy = self.retry_policy
if retry_policy: # merge default and custom policies.
_retry_policy = dict(_retry_policy, **retry_policy)
if type and type not in self.types:
raise ValueError('Unsupported type: {0}'.format(type))
elif not type:
type = ACTOR_TYPE.DIRECT
props.setdefault('routing_key', self.routing_key)
props.setdefault('serializer', self.serializer)
exchange = exchange or self.type_to_exchange[type]()
declare = (maybe_list(declare) or []) + [exchange]
with producers[self._connection].acquire(block=True) as producer:
return producer.publish(body, exchange=exchange, declare=declare,
retry=retry, retry_policy=retry_policy,
**props) | def function[cast, parameter[self, method, args, declare, retry, retry_policy, type, exchange]]:
constant[Send message to actor. Discarding replies.]
variable[retry] assign[=] <ast.IfExp object at 0x7da1b138a5f0>
variable[body] assign[=] dictionary[[<ast.Constant object at 0x7da1b1389f30>, <ast.Constant object at 0x7da1b1388460>, <ast.Constant object at 0x7da1b13894e0>], [<ast.Attribute object at 0x7da1b1389810>, <ast.Name object at 0x7da1b1389060>, <ast.Name object at 0x7da1b138af20>]]
variable[_retry_policy] assign[=] name[self].retry_policy
if name[retry_policy] begin[:]
variable[_retry_policy] assign[=] call[name[dict], parameter[name[_retry_policy]]]
if <ast.BoolOp object at 0x7da20c795ea0> begin[:]
<ast.Raise object at 0x7da20c796aa0>
call[name[props].setdefault, parameter[constant[routing_key], name[self].routing_key]]
call[name[props].setdefault, parameter[constant[serializer], name[self].serializer]]
variable[exchange] assign[=] <ast.BoolOp object at 0x7da20c796650>
variable[declare] assign[=] binary_operation[<ast.BoolOp object at 0x7da2054a7280> + list[[<ast.Name object at 0x7da2054a58a0>]]]
with call[call[name[producers]][name[self]._connection].acquire, parameter[]] begin[:]
return[call[name[producer].publish, parameter[name[body]]]] | keyword[def] identifier[cast] ( identifier[self] , identifier[method] , identifier[args] ={}, identifier[declare] = keyword[None] , identifier[retry] = keyword[None] ,
identifier[retry_policy] = keyword[None] , identifier[type] = keyword[None] , identifier[exchange] = keyword[None] ,** identifier[props] ):
literal[string]
identifier[retry] = identifier[self] . identifier[retry] keyword[if] identifier[retry] keyword[is] keyword[None] keyword[else] identifier[retry]
identifier[body] ={ literal[string] : identifier[self] . identifier[name] , literal[string] : identifier[method] , literal[string] : identifier[args] }
identifier[_retry_policy] = identifier[self] . identifier[retry_policy]
keyword[if] identifier[retry_policy] :
identifier[_retry_policy] = identifier[dict] ( identifier[_retry_policy] ,** identifier[retry_policy] )
keyword[if] identifier[type] keyword[and] identifier[type] keyword[not] keyword[in] identifier[self] . identifier[types] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[type] ))
keyword[elif] keyword[not] identifier[type] :
identifier[type] = identifier[ACTOR_TYPE] . identifier[DIRECT]
identifier[props] . identifier[setdefault] ( literal[string] , identifier[self] . identifier[routing_key] )
identifier[props] . identifier[setdefault] ( literal[string] , identifier[self] . identifier[serializer] )
identifier[exchange] = identifier[exchange] keyword[or] identifier[self] . identifier[type_to_exchange] [ identifier[type] ]()
identifier[declare] =( identifier[maybe_list] ( identifier[declare] ) keyword[or] [])+[ identifier[exchange] ]
keyword[with] identifier[producers] [ identifier[self] . identifier[_connection] ]. identifier[acquire] ( identifier[block] = keyword[True] ) keyword[as] identifier[producer] :
keyword[return] identifier[producer] . identifier[publish] ( identifier[body] , identifier[exchange] = identifier[exchange] , identifier[declare] = identifier[declare] ,
identifier[retry] = identifier[retry] , identifier[retry_policy] = identifier[retry_policy] ,
** identifier[props] ) | def cast(self, method, args={}, declare=None, retry=None, retry_policy=None, type=None, exchange=None, **props):
"""Send message to actor. Discarding replies."""
retry = self.retry if retry is None else retry
body = {'class': self.name, 'method': method, 'args': args}
_retry_policy = self.retry_policy
if retry_policy: # merge default and custom policies.
_retry_policy = dict(_retry_policy, **retry_policy) # depends on [control=['if'], data=[]]
if type and type not in self.types:
raise ValueError('Unsupported type: {0}'.format(type)) # depends on [control=['if'], data=[]]
elif not type:
type = ACTOR_TYPE.DIRECT # depends on [control=['if'], data=[]]
props.setdefault('routing_key', self.routing_key)
props.setdefault('serializer', self.serializer)
exchange = exchange or self.type_to_exchange[type]()
declare = (maybe_list(declare) or []) + [exchange]
with producers[self._connection].acquire(block=True) as producer:
return producer.publish(body, exchange=exchange, declare=declare, retry=retry, retry_policy=retry_policy, **props) # depends on [control=['with'], data=['producer']] |
def _pdb_frame(self):
"""Return current Pdb frame if there is any"""
if self._pdb_obj is not None and self._pdb_obj.curframe is not None:
return self._pdb_obj.curframe | def function[_pdb_frame, parameter[self]]:
constant[Return current Pdb frame if there is any]
if <ast.BoolOp object at 0x7da20e955690> begin[:]
return[name[self]._pdb_obj.curframe] | keyword[def] identifier[_pdb_frame] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_pdb_obj] keyword[is] keyword[not] keyword[None] keyword[and] identifier[self] . identifier[_pdb_obj] . identifier[curframe] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_pdb_obj] . identifier[curframe] | def _pdb_frame(self):
"""Return current Pdb frame if there is any"""
if self._pdb_obj is not None and self._pdb_obj.curframe is not None:
return self._pdb_obj.curframe # depends on [control=['if'], data=[]] |
def get_log_entry_query_session_for_log(self, log_id, proxy):
"""Gets the ``OsidSession`` associated with the log entry query service for the given log.
arg: log_id (osid.id.Id): the ``Id`` of the ``Log``
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.logging.LogEntryQuerySession) - a
``LogEntryQuerySession``
raise: NotFound - no ``Log`` found by the given ``Id``
raise: NullArgument - ``log_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_log_entry_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_log_entry_query()`` and
``supports_visible_federation()`` are ``true``*
"""
if not self.supports_log_entry_query():
raise errors.Unimplemented()
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.LogEntryQuerySession(log_id, proxy, self._runtime) | def function[get_log_entry_query_session_for_log, parameter[self, log_id, proxy]]:
constant[Gets the ``OsidSession`` associated with the log entry query service for the given log.
arg: log_id (osid.id.Id): the ``Id`` of the ``Log``
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.logging.LogEntryQuerySession) - a
``LogEntryQuerySession``
raise: NotFound - no ``Log`` found by the given ``Id``
raise: NullArgument - ``log_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_log_entry_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_log_entry_query()`` and
``supports_visible_federation()`` are ``true``*
]
if <ast.UnaryOp object at 0x7da204963a00> begin[:]
<ast.Raise object at 0x7da204963bb0>
return[call[name[sessions].LogEntryQuerySession, parameter[name[log_id], name[proxy], name[self]._runtime]]] | keyword[def] identifier[get_log_entry_query_session_for_log] ( identifier[self] , identifier[log_id] , identifier[proxy] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[supports_log_entry_query] ():
keyword[raise] identifier[errors] . identifier[Unimplemented] ()
keyword[return] identifier[sessions] . identifier[LogEntryQuerySession] ( identifier[log_id] , identifier[proxy] , identifier[self] . identifier[_runtime] ) | def get_log_entry_query_session_for_log(self, log_id, proxy):
"""Gets the ``OsidSession`` associated with the log entry query service for the given log.
arg: log_id (osid.id.Id): the ``Id`` of the ``Log``
arg: proxy (osid.proxy.Proxy): a proxy
return: (osid.logging.LogEntryQuerySession) - a
``LogEntryQuerySession``
raise: NotFound - no ``Log`` found by the given ``Id``
raise: NullArgument - ``log_id`` or ``proxy`` is ``null``
raise: OperationFailed - unable to complete request
raise: Unimplemented - ``supports_log_entry_query()`` or
``supports_visible_federation()`` is ``false``
*compliance: optional -- This method must be implemented if
``supports_log_entry_query()`` and
``supports_visible_federation()`` are ``true``*
"""
if not self.supports_log_entry_query():
raise errors.Unimplemented() # depends on [control=['if'], data=[]]
##
# Also include check to see if the catalog Id is found otherwise raise errors.NotFound
##
# pylint: disable=no-member
return sessions.LogEntryQuerySession(log_id, proxy, self._runtime) |
def file_sha1(path):
"""
Compute SHA1 hash of a file.
"""
sha1 = hashlib.sha1()
with open(path, "rb") as f:
while True:
block = f.read(2 ** 10)
if not block:
break
sha1.update(block)
return sha1.hexdigest() | def function[file_sha1, parameter[path]]:
constant[
Compute SHA1 hash of a file.
]
variable[sha1] assign[=] call[name[hashlib].sha1, parameter[]]
with call[name[open], parameter[name[path], constant[rb]]] begin[:]
while constant[True] begin[:]
variable[block] assign[=] call[name[f].read, parameter[binary_operation[constant[2] ** constant[10]]]]
if <ast.UnaryOp object at 0x7da1b04ef970> begin[:]
break
call[name[sha1].update, parameter[name[block]]]
return[call[name[sha1].hexdigest, parameter[]]] | keyword[def] identifier[file_sha1] ( identifier[path] ):
literal[string]
identifier[sha1] = identifier[hashlib] . identifier[sha1] ()
keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] :
keyword[while] keyword[True] :
identifier[block] = identifier[f] . identifier[read] ( literal[int] ** literal[int] )
keyword[if] keyword[not] identifier[block] :
keyword[break]
identifier[sha1] . identifier[update] ( identifier[block] )
keyword[return] identifier[sha1] . identifier[hexdigest] () | def file_sha1(path):
"""
Compute SHA1 hash of a file.
"""
sha1 = hashlib.sha1()
with open(path, 'rb') as f:
while True:
block = f.read(2 ** 10)
if not block:
break # depends on [control=['if'], data=[]]
sha1.update(block) # depends on [control=['while'], data=[]]
return sha1.hexdigest() # depends on [control=['with'], data=['f']] |
def options(self, context, module_options):
'''
PATH Path to dll/exe to inject
PROCID Process ID to inject into (default: current powershell process)
EXEARGS Arguments to pass to the executable being reflectively loaded (default: None)
'''
if not 'PATH' in module_options:
context.log.error('PATH option is required!')
exit(1)
self.payload_path = os.path.expanduser(module_options['PATH'])
if not os.path.exists(self.payload_path):
context.log.error('Invalid path to EXE/DLL!')
exit(1)
self.procid = None
self.exeargs = None
if 'PROCID' in module_options:
self.procid = module_options['PROCID']
if 'EXEARGS' in module_options:
self.exeargs = module_options['EXEARGS']
self.ps_script = obfs_ps_script('powersploit/CodeExecution/Invoke-ReflectivePEInjection.ps1') | def function[options, parameter[self, context, module_options]]:
constant[
PATH Path to dll/exe to inject
PROCID Process ID to inject into (default: current powershell process)
EXEARGS Arguments to pass to the executable being reflectively loaded (default: None)
]
if <ast.UnaryOp object at 0x7da20e962aa0> begin[:]
call[name[context].log.error, parameter[constant[PATH option is required!]]]
call[name[exit], parameter[constant[1]]]
name[self].payload_path assign[=] call[name[os].path.expanduser, parameter[call[name[module_options]][constant[PATH]]]]
if <ast.UnaryOp object at 0x7da20e961390> begin[:]
call[name[context].log.error, parameter[constant[Invalid path to EXE/DLL!]]]
call[name[exit], parameter[constant[1]]]
name[self].procid assign[=] constant[None]
name[self].exeargs assign[=] constant[None]
if compare[constant[PROCID] in name[module_options]] begin[:]
name[self].procid assign[=] call[name[module_options]][constant[PROCID]]
if compare[constant[EXEARGS] in name[module_options]] begin[:]
name[self].exeargs assign[=] call[name[module_options]][constant[EXEARGS]]
name[self].ps_script assign[=] call[name[obfs_ps_script], parameter[constant[powersploit/CodeExecution/Invoke-ReflectivePEInjection.ps1]]] | keyword[def] identifier[options] ( identifier[self] , identifier[context] , identifier[module_options] ):
literal[string]
keyword[if] keyword[not] literal[string] keyword[in] identifier[module_options] :
identifier[context] . identifier[log] . identifier[error] ( literal[string] )
identifier[exit] ( literal[int] )
identifier[self] . identifier[payload_path] = identifier[os] . identifier[path] . identifier[expanduser] ( identifier[module_options] [ literal[string] ])
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[self] . identifier[payload_path] ):
identifier[context] . identifier[log] . identifier[error] ( literal[string] )
identifier[exit] ( literal[int] )
identifier[self] . identifier[procid] = keyword[None]
identifier[self] . identifier[exeargs] = keyword[None]
keyword[if] literal[string] keyword[in] identifier[module_options] :
identifier[self] . identifier[procid] = identifier[module_options] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[module_options] :
identifier[self] . identifier[exeargs] = identifier[module_options] [ literal[string] ]
identifier[self] . identifier[ps_script] = identifier[obfs_ps_script] ( literal[string] ) | def options(self, context, module_options):
"""
PATH Path to dll/exe to inject
PROCID Process ID to inject into (default: current powershell process)
EXEARGS Arguments to pass to the executable being reflectively loaded (default: None)
"""
if not 'PATH' in module_options:
context.log.error('PATH option is required!')
exit(1) # depends on [control=['if'], data=[]]
self.payload_path = os.path.expanduser(module_options['PATH'])
if not os.path.exists(self.payload_path):
context.log.error('Invalid path to EXE/DLL!')
exit(1) # depends on [control=['if'], data=[]]
self.procid = None
self.exeargs = None
if 'PROCID' in module_options:
self.procid = module_options['PROCID'] # depends on [control=['if'], data=['module_options']]
if 'EXEARGS' in module_options:
self.exeargs = module_options['EXEARGS'] # depends on [control=['if'], data=['module_options']]
self.ps_script = obfs_ps_script('powersploit/CodeExecution/Invoke-ReflectivePEInjection.ps1') |
def exists(self, **kwargs):
"""Providing a partition is not necessary on topology; causes errors"""
kwargs.pop('partition', None)
kwargs['transform_name'] = True
return self._exists(**kwargs) | def function[exists, parameter[self]]:
constant[Providing a partition is not necessary on topology; causes errors]
call[name[kwargs].pop, parameter[constant[partition], constant[None]]]
call[name[kwargs]][constant[transform_name]] assign[=] constant[True]
return[call[name[self]._exists, parameter[]]] | keyword[def] identifier[exists] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] )
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[return] identifier[self] . identifier[_exists] (** identifier[kwargs] ) | def exists(self, **kwargs):
"""Providing a partition is not necessary on topology; causes errors"""
kwargs.pop('partition', None)
kwargs['transform_name'] = True
return self._exists(**kwargs) |
def keep_entry_range(entry, lows, highs, converter, regex):
"""
Check if an entry falls into a desired range.
Every number in the entry will be extracted using *regex*,
if any are within a given low to high range the entry will
be kept.
Parameters
----------
entry : str
lows : iterable
Collection of low values against which to compare the entry.
highs : iterable
Collection of high values against which to compare the entry.
converter : callable
Function to convert a string to a number.
regex : regex object
Regular expression to locate numbers in a string.
Returns
-------
True if the entry should be kept, False otherwise.
"""
return any(
low <= converter(num) <= high
for num in regex.findall(entry)
for low, high in zip(lows, highs)
) | def function[keep_entry_range, parameter[entry, lows, highs, converter, regex]]:
constant[
Check if an entry falls into a desired range.
Every number in the entry will be extracted using *regex*,
if any are within a given low to high range the entry will
be kept.
Parameters
----------
entry : str
lows : iterable
Collection of low values against which to compare the entry.
highs : iterable
Collection of high values against which to compare the entry.
converter : callable
Function to convert a string to a number.
regex : regex object
Regular expression to locate numbers in a string.
Returns
-------
True if the entry should be kept, False otherwise.
]
return[call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b0b37520>]]] | keyword[def] identifier[keep_entry_range] ( identifier[entry] , identifier[lows] , identifier[highs] , identifier[converter] , identifier[regex] ):
literal[string]
keyword[return] identifier[any] (
identifier[low] <= identifier[converter] ( identifier[num] )<= identifier[high]
keyword[for] identifier[num] keyword[in] identifier[regex] . identifier[findall] ( identifier[entry] )
keyword[for] identifier[low] , identifier[high] keyword[in] identifier[zip] ( identifier[lows] , identifier[highs] )
) | def keep_entry_range(entry, lows, highs, converter, regex):
"""
Check if an entry falls into a desired range.
Every number in the entry will be extracted using *regex*,
if any are within a given low to high range the entry will
be kept.
Parameters
----------
entry : str
lows : iterable
Collection of low values against which to compare the entry.
highs : iterable
Collection of high values against which to compare the entry.
converter : callable
Function to convert a string to a number.
regex : regex object
Regular expression to locate numbers in a string.
Returns
-------
True if the entry should be kept, False otherwise.
"""
return any((low <= converter(num) <= high for num in regex.findall(entry) for (low, high) in zip(lows, highs))) |
def load_tool_info(tool_name):
"""
Load the tool-info class.
@param tool_name: The name of the tool-info module.
Either a full Python package name or a name within the benchexec.tools package.
@return: A tuple of the full name of the used tool-info module and an instance of the tool-info class.
"""
tool_module = tool_name if '.' in tool_name else ("benchexec.tools." + tool_name)
try:
tool = __import__(tool_module, fromlist=['Tool']).Tool()
except ImportError as ie:
sys.exit('Unsupported tool "{0}" specified. ImportError: {1}'.format(tool_name, ie))
except AttributeError:
sys.exit('The module "{0}" does not define the necessary class "Tool", '
'it cannot be used as tool info for BenchExec.'.format(tool_module))
return (tool_module, tool) | def function[load_tool_info, parameter[tool_name]]:
constant[
Load the tool-info class.
@param tool_name: The name of the tool-info module.
Either a full Python package name or a name within the benchexec.tools package.
@return: A tuple of the full name of the used tool-info module and an instance of the tool-info class.
]
variable[tool_module] assign[=] <ast.IfExp object at 0x7da18f812fb0>
<ast.Try object at 0x7da18f811900>
return[tuple[[<ast.Name object at 0x7da18dc9a050>, <ast.Name object at 0x7da18dc98b50>]]] | keyword[def] identifier[load_tool_info] ( identifier[tool_name] ):
literal[string]
identifier[tool_module] = identifier[tool_name] keyword[if] literal[string] keyword[in] identifier[tool_name] keyword[else] ( literal[string] + identifier[tool_name] )
keyword[try] :
identifier[tool] = identifier[__import__] ( identifier[tool_module] , identifier[fromlist] =[ literal[string] ]). identifier[Tool] ()
keyword[except] identifier[ImportError] keyword[as] identifier[ie] :
identifier[sys] . identifier[exit] ( literal[string] . identifier[format] ( identifier[tool_name] , identifier[ie] ))
keyword[except] identifier[AttributeError] :
identifier[sys] . identifier[exit] ( literal[string]
literal[string] . identifier[format] ( identifier[tool_module] ))
keyword[return] ( identifier[tool_module] , identifier[tool] ) | def load_tool_info(tool_name):
"""
Load the tool-info class.
@param tool_name: The name of the tool-info module.
Either a full Python package name or a name within the benchexec.tools package.
@return: A tuple of the full name of the used tool-info module and an instance of the tool-info class.
"""
tool_module = tool_name if '.' in tool_name else 'benchexec.tools.' + tool_name
try:
tool = __import__(tool_module, fromlist=['Tool']).Tool() # depends on [control=['try'], data=[]]
except ImportError as ie:
sys.exit('Unsupported tool "{0}" specified. ImportError: {1}'.format(tool_name, ie)) # depends on [control=['except'], data=['ie']]
except AttributeError:
sys.exit('The module "{0}" does not define the necessary class "Tool", it cannot be used as tool info for BenchExec.'.format(tool_module)) # depends on [control=['except'], data=[]]
return (tool_module, tool) |
def insert_header(self, hkey, value, index=None):
"""
This will insert/overwrite a value to the header and hkeys.
Parameters
----------
hkey
Header key. Will be appended to self.hkeys if non existent, or
inserted at the specified index.
If hkey is an integer, uses self.hkeys[hkey].
value
Value of the header.
index=None
If specified (integer), hkey will be inserted at this location in
self.hkeys.
"""
#if hkey is '': return
# if it's an integer, use the hkey from the list
if type(hkey) in [int, int]: hkey = self.hkeys[hkey]
# set the data
self.headers[str(hkey)] = value
if not hkey in self.hkeys:
if index is None: self.hkeys.append(str(hkey))
else: self.hkeys.insert(index, str(hkey))
return self | def function[insert_header, parameter[self, hkey, value, index]]:
constant[
This will insert/overwrite a value to the header and hkeys.
Parameters
----------
hkey
Header key. Will be appended to self.hkeys if non existent, or
inserted at the specified index.
If hkey is an integer, uses self.hkeys[hkey].
value
Value of the header.
index=None
If specified (integer), hkey will be inserted at this location in
self.hkeys.
]
if compare[call[name[type], parameter[name[hkey]]] in list[[<ast.Name object at 0x7da2041db550>, <ast.Name object at 0x7da2041d8af0>]]] begin[:]
variable[hkey] assign[=] call[name[self].hkeys][name[hkey]]
call[name[self].headers][call[name[str], parameter[name[hkey]]]] assign[=] name[value]
if <ast.UnaryOp object at 0x7da18c4cf940> begin[:]
if compare[name[index] is constant[None]] begin[:]
call[name[self].hkeys.append, parameter[call[name[str], parameter[name[hkey]]]]]
return[name[self]] | keyword[def] identifier[insert_header] ( identifier[self] , identifier[hkey] , identifier[value] , identifier[index] = keyword[None] ):
literal[string]
keyword[if] identifier[type] ( identifier[hkey] ) keyword[in] [ identifier[int] , identifier[int] ]: identifier[hkey] = identifier[self] . identifier[hkeys] [ identifier[hkey] ]
identifier[self] . identifier[headers] [ identifier[str] ( identifier[hkey] )]= identifier[value]
keyword[if] keyword[not] identifier[hkey] keyword[in] identifier[self] . identifier[hkeys] :
keyword[if] identifier[index] keyword[is] keyword[None] : identifier[self] . identifier[hkeys] . identifier[append] ( identifier[str] ( identifier[hkey] ))
keyword[else] : identifier[self] . identifier[hkeys] . identifier[insert] ( identifier[index] , identifier[str] ( identifier[hkey] ))
keyword[return] identifier[self] | def insert_header(self, hkey, value, index=None):
"""
This will insert/overwrite a value to the header and hkeys.
Parameters
----------
hkey
Header key. Will be appended to self.hkeys if non existent, or
inserted at the specified index.
If hkey is an integer, uses self.hkeys[hkey].
value
Value of the header.
index=None
If specified (integer), hkey will be inserted at this location in
self.hkeys.
"""
#if hkey is '': return
# if it's an integer, use the hkey from the list
if type(hkey) in [int, int]:
hkey = self.hkeys[hkey] # depends on [control=['if'], data=[]]
# set the data
self.headers[str(hkey)] = value
if not hkey in self.hkeys:
if index is None:
self.hkeys.append(str(hkey)) # depends on [control=['if'], data=[]]
else:
self.hkeys.insert(index, str(hkey)) # depends on [control=['if'], data=[]]
return self |
def create_cookie(host, path, secure, expires, name, value):
"""Shortcut function to create a cookie
"""
return http.cookiejar.Cookie(0, name, value, None, False, host, host.startswith('.'), host.startswith('.'), path,
True, secure, expires, False, None, None, {}) | def function[create_cookie, parameter[host, path, secure, expires, name, value]]:
constant[Shortcut function to create a cookie
]
return[call[name[http].cookiejar.Cookie, parameter[constant[0], name[name], name[value], constant[None], constant[False], name[host], call[name[host].startswith, parameter[constant[.]]], call[name[host].startswith, parameter[constant[.]]], name[path], constant[True], name[secure], name[expires], constant[False], constant[None], constant[None], dictionary[[], []]]]] | keyword[def] identifier[create_cookie] ( identifier[host] , identifier[path] , identifier[secure] , identifier[expires] , identifier[name] , identifier[value] ):
literal[string]
keyword[return] identifier[http] . identifier[cookiejar] . identifier[Cookie] ( literal[int] , identifier[name] , identifier[value] , keyword[None] , keyword[False] , identifier[host] , identifier[host] . identifier[startswith] ( literal[string] ), identifier[host] . identifier[startswith] ( literal[string] ), identifier[path] ,
keyword[True] , identifier[secure] , identifier[expires] , keyword[False] , keyword[None] , keyword[None] ,{}) | def create_cookie(host, path, secure, expires, name, value):
"""Shortcut function to create a cookie
"""
return http.cookiejar.Cookie(0, name, value, None, False, host, host.startswith('.'), host.startswith('.'), path, True, secure, expires, False, None, None, {}) |
def ll(self,*args,**kwargs):
"""
NAME:
ll
PURPOSE:
return Galactic longitude
INPUT:
t - (optional) time at which to get ll
obs=[X,Y,Z] - (optional) position of observer (in kpc)
(default=Object-wide default)
OR Orbit object that corresponds to the orbit
of the observer
Y is ignored and always assumed to be zero
ro= distance in kpc corresponding to R=1. (default=Object-wide default)
OUTPUT:
l(t)
HISTORY:
2011-02-23 - Written - Bovy (NYU)
"""
_check_roSet(self,kwargs,'ll')
lbd= self._lbd(*args,**kwargs)
return lbd[:,0] | def function[ll, parameter[self]]:
constant[
NAME:
ll
PURPOSE:
return Galactic longitude
INPUT:
t - (optional) time at which to get ll
obs=[X,Y,Z] - (optional) position of observer (in kpc)
(default=Object-wide default)
OR Orbit object that corresponds to the orbit
of the observer
Y is ignored and always assumed to be zero
ro= distance in kpc corresponding to R=1. (default=Object-wide default)
OUTPUT:
l(t)
HISTORY:
2011-02-23 - Written - Bovy (NYU)
]
call[name[_check_roSet], parameter[name[self], name[kwargs], constant[ll]]]
variable[lbd] assign[=] call[name[self]._lbd, parameter[<ast.Starred object at 0x7da1b0c90c40>]]
return[call[name[lbd]][tuple[[<ast.Slice object at 0x7da1b0c93280>, <ast.Constant object at 0x7da1b0c90730>]]]] | keyword[def] identifier[ll] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[_check_roSet] ( identifier[self] , identifier[kwargs] , literal[string] )
identifier[lbd] = identifier[self] . identifier[_lbd] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[lbd] [:, literal[int] ] | def ll(self, *args, **kwargs):
"""
NAME:
ll
PURPOSE:
return Galactic longitude
INPUT:
t - (optional) time at which to get ll
obs=[X,Y,Z] - (optional) position of observer (in kpc)
(default=Object-wide default)
OR Orbit object that corresponds to the orbit
of the observer
Y is ignored and always assumed to be zero
ro= distance in kpc corresponding to R=1. (default=Object-wide default)
OUTPUT:
l(t)
HISTORY:
2011-02-23 - Written - Bovy (NYU)
"""
_check_roSet(self, kwargs, 'll')
lbd = self._lbd(*args, **kwargs)
return lbd[:, 0] |
def load(name,
split=None,
data_dir=None,
batch_size=1,
download=True,
as_supervised=False,
with_info=False,
builder_kwargs=None,
download_and_prepare_kwargs=None,
as_dataset_kwargs=None,
try_gcs=False):
"""Loads the named dataset into a `tf.data.Dataset`.
If `split=None` (the default), returns all splits for the dataset. Otherwise,
returns the specified split.
`load` is a convenience method that fetches the `tfds.core.DatasetBuilder` by
string name, optionally calls `DatasetBuilder.download_and_prepare`
(if `download=True`), and then calls `DatasetBuilder.as_dataset`.
This is roughly equivalent to:
```
builder = tfds.builder(name, data_dir=data_dir, **builder_kwargs)
if download:
builder.download_and_prepare(**download_and_prepare_kwargs)
ds = builder.as_dataset(
split=split, as_supervised=as_supervised, **as_dataset_kwargs)
if with_info:
return ds, builder.info
return ds
```
If you'd like NumPy arrays instead of `tf.data.Dataset`s or `tf.Tensor`s,
you can pass the return value to `tfds.as_numpy`.
Callers must pass arguments as keyword arguments.
**Warning**: calling this function might potentially trigger the download
of hundreds of GiB to disk. Refer to the `download` argument.
Args:
name: `str`, the registered name of the `DatasetBuilder` (the snake case
version of the class name). This can be either `"dataset_name"` or
`"dataset_name/config_name"` for datasets with `BuilderConfig`s.
As a convenience, this string may contain comma-separated keyword
arguments for the builder. For example `"foo_bar/a=True,b=3"` would use
the `FooBar` dataset passing the keyword arguments `a=True` and `b=3`
(for builders with configs, it would be `"foo_bar/zoo/a=True,b=3"` to
use the `"zoo"` config and pass to the builder keyword arguments `a=True`
and `b=3`).
split: `tfds.Split` or `str`, which split of the data to load. If None,
will return a `dict` with all splits (typically `tfds.Split.TRAIN` and
`tfds.Split.TEST`).
data_dir: `str` (optional), directory to read/write data.
Defaults to "~/tensorflow_datasets".
batch_size: `int`, set to > 1 to get batches of examples. Note that
variable length features will be 0-padded. If
`batch_size=-1`, will return the full dataset as `tf.Tensor`s.
download: `bool` (optional), whether to call
`tfds.core.DatasetBuilder.download_and_prepare`
before calling `tf.DatasetBuilder.as_dataset`. If `False`, data is
expected to be in `data_dir`. If `True` and the data is already in
`data_dir`, `download_and_prepare` is a no-op.
as_supervised: `bool`, if `True`, the returned `tf.data.Dataset`
will have a 2-tuple structure `(input, label)` according to
`builder.info.supervised_keys`. If `False`, the default,
the returned `tf.data.Dataset` will have a dictionary with all the
features.
with_info: `bool`, if True, tfds.load will return the tuple
(tf.data.Dataset, tfds.core.DatasetInfo) containing the info associated
with the builder.
builder_kwargs: `dict` (optional), keyword arguments to be passed to the
`tfds.core.DatasetBuilder` constructor. `data_dir` will be passed
through by default.
download_and_prepare_kwargs: `dict` (optional) keyword arguments passed to
`tfds.core.DatasetBuilder.download_and_prepare` if `download=True`. Allow
to control where to download and extract the cached data. If not set,
cache_dir and manual_dir will automatically be deduced from data_dir.
as_dataset_kwargs: `dict` (optional), keyword arguments passed to
`tfds.core.DatasetBuilder.as_dataset`. `split` will be passed through by
default. Example: `{'shuffle_files': True}`.
Note that shuffle_files is False by default unless
`split == tfds.Split.TRAIN`.
try_gcs: `bool`, if True, tfds.load will see if the dataset exists on
the public GCS bucket before building it locally.
Returns:
ds: `tf.data.Dataset`, the dataset requested, or if `split` is None, a
`dict<key: tfds.Split, value: tfds.data.Dataset>`. If `batch_size=-1`,
these will be full datasets as `tf.Tensor`s.
ds_info: `tfds.core.DatasetInfo`, if `with_info` is True, then `tfds.load`
will return a tuple `(ds, ds_info)` containing dataset information
(version, features, splits, num_examples,...). Note that the `ds_info`
object documents the entire dataset, regardless of the `split` requested.
Split-specific information is available in `ds_info.splits`.
"""
name, name_builder_kwargs = _dataset_name_and_kwargs_from_name_str(name)
name_builder_kwargs.update(builder_kwargs or {})
builder_kwargs = name_builder_kwargs
# Set data_dir
if try_gcs and gcs_utils.is_dataset_on_gcs(name):
data_dir = constants.GCS_DATA_DIR
elif data_dir is None:
data_dir = constants.DATA_DIR
dbuilder = builder(name, data_dir=data_dir, **builder_kwargs)
if download:
download_and_prepare_kwargs = download_and_prepare_kwargs or {}
dbuilder.download_and_prepare(**download_and_prepare_kwargs)
if as_dataset_kwargs is None:
as_dataset_kwargs = {}
as_dataset_kwargs = dict(as_dataset_kwargs)
as_dataset_kwargs["split"] = split
as_dataset_kwargs["as_supervised"] = as_supervised
as_dataset_kwargs["batch_size"] = batch_size
ds = dbuilder.as_dataset(**as_dataset_kwargs)
if with_info:
return ds, dbuilder.info
return ds | def function[load, parameter[name, split, data_dir, batch_size, download, as_supervised, with_info, builder_kwargs, download_and_prepare_kwargs, as_dataset_kwargs, try_gcs]]:
constant[Loads the named dataset into a `tf.data.Dataset`.
If `split=None` (the default), returns all splits for the dataset. Otherwise,
returns the specified split.
`load` is a convenience method that fetches the `tfds.core.DatasetBuilder` by
string name, optionally calls `DatasetBuilder.download_and_prepare`
(if `download=True`), and then calls `DatasetBuilder.as_dataset`.
This is roughly equivalent to:
```
builder = tfds.builder(name, data_dir=data_dir, **builder_kwargs)
if download:
builder.download_and_prepare(**download_and_prepare_kwargs)
ds = builder.as_dataset(
split=split, as_supervised=as_supervised, **as_dataset_kwargs)
if with_info:
return ds, builder.info
return ds
```
If you'd like NumPy arrays instead of `tf.data.Dataset`s or `tf.Tensor`s,
you can pass the return value to `tfds.as_numpy`.
Callers must pass arguments as keyword arguments.
**Warning**: calling this function might potentially trigger the download
of hundreds of GiB to disk. Refer to the `download` argument.
Args:
name: `str`, the registered name of the `DatasetBuilder` (the snake case
version of the class name). This can be either `"dataset_name"` or
`"dataset_name/config_name"` for datasets with `BuilderConfig`s.
As a convenience, this string may contain comma-separated keyword
arguments for the builder. For example `"foo_bar/a=True,b=3"` would use
the `FooBar` dataset passing the keyword arguments `a=True` and `b=3`
(for builders with configs, it would be `"foo_bar/zoo/a=True,b=3"` to
use the `"zoo"` config and pass to the builder keyword arguments `a=True`
and `b=3`).
split: `tfds.Split` or `str`, which split of the data to load. If None,
will return a `dict` with all splits (typically `tfds.Split.TRAIN` and
`tfds.Split.TEST`).
data_dir: `str` (optional), directory to read/write data.
Defaults to "~/tensorflow_datasets".
batch_size: `int`, set to > 1 to get batches of examples. Note that
variable length features will be 0-padded. If
`batch_size=-1`, will return the full dataset as `tf.Tensor`s.
download: `bool` (optional), whether to call
`tfds.core.DatasetBuilder.download_and_prepare`
before calling `tf.DatasetBuilder.as_dataset`. If `False`, data is
expected to be in `data_dir`. If `True` and the data is already in
`data_dir`, `download_and_prepare` is a no-op.
as_supervised: `bool`, if `True`, the returned `tf.data.Dataset`
will have a 2-tuple structure `(input, label)` according to
`builder.info.supervised_keys`. If `False`, the default,
the returned `tf.data.Dataset` will have a dictionary with all the
features.
with_info: `bool`, if True, tfds.load will return the tuple
(tf.data.Dataset, tfds.core.DatasetInfo) containing the info associated
with the builder.
builder_kwargs: `dict` (optional), keyword arguments to be passed to the
`tfds.core.DatasetBuilder` constructor. `data_dir` will be passed
through by default.
download_and_prepare_kwargs: `dict` (optional) keyword arguments passed to
`tfds.core.DatasetBuilder.download_and_prepare` if `download=True`. Allow
to control where to download and extract the cached data. If not set,
cache_dir and manual_dir will automatically be deduced from data_dir.
as_dataset_kwargs: `dict` (optional), keyword arguments passed to
`tfds.core.DatasetBuilder.as_dataset`. `split` will be passed through by
default. Example: `{'shuffle_files': True}`.
Note that shuffle_files is False by default unless
`split == tfds.Split.TRAIN`.
try_gcs: `bool`, if True, tfds.load will see if the dataset exists on
the public GCS bucket before building it locally.
Returns:
ds: `tf.data.Dataset`, the dataset requested, or if `split` is None, a
`dict<key: tfds.Split, value: tfds.data.Dataset>`. If `batch_size=-1`,
these will be full datasets as `tf.Tensor`s.
ds_info: `tfds.core.DatasetInfo`, if `with_info` is True, then `tfds.load`
will return a tuple `(ds, ds_info)` containing dataset information
(version, features, splits, num_examples,...). Note that the `ds_info`
object documents the entire dataset, regardless of the `split` requested.
Split-specific information is available in `ds_info.splits`.
]
<ast.Tuple object at 0x7da1b2066620> assign[=] call[name[_dataset_name_and_kwargs_from_name_str], parameter[name[name]]]
call[name[name_builder_kwargs].update, parameter[<ast.BoolOp object at 0x7da1b20673d0>]]
variable[builder_kwargs] assign[=] name[name_builder_kwargs]
if <ast.BoolOp object at 0x7da1b2067940> begin[:]
variable[data_dir] assign[=] name[constants].GCS_DATA_DIR
variable[dbuilder] assign[=] call[name[builder], parameter[name[name]]]
if name[download] begin[:]
variable[download_and_prepare_kwargs] assign[=] <ast.BoolOp object at 0x7da1b2025960>
call[name[dbuilder].download_and_prepare, parameter[]]
if compare[name[as_dataset_kwargs] is constant[None]] begin[:]
variable[as_dataset_kwargs] assign[=] dictionary[[], []]
variable[as_dataset_kwargs] assign[=] call[name[dict], parameter[name[as_dataset_kwargs]]]
call[name[as_dataset_kwargs]][constant[split]] assign[=] name[split]
call[name[as_dataset_kwargs]][constant[as_supervised]] assign[=] name[as_supervised]
call[name[as_dataset_kwargs]][constant[batch_size]] assign[=] name[batch_size]
variable[ds] assign[=] call[name[dbuilder].as_dataset, parameter[]]
if name[with_info] begin[:]
return[tuple[[<ast.Name object at 0x7da1b2061d80>, <ast.Attribute object at 0x7da1b2061d50>]]]
return[name[ds]] | keyword[def] identifier[load] ( identifier[name] ,
identifier[split] = keyword[None] ,
identifier[data_dir] = keyword[None] ,
identifier[batch_size] = literal[int] ,
identifier[download] = keyword[True] ,
identifier[as_supervised] = keyword[False] ,
identifier[with_info] = keyword[False] ,
identifier[builder_kwargs] = keyword[None] ,
identifier[download_and_prepare_kwargs] = keyword[None] ,
identifier[as_dataset_kwargs] = keyword[None] ,
identifier[try_gcs] = keyword[False] ):
literal[string]
identifier[name] , identifier[name_builder_kwargs] = identifier[_dataset_name_and_kwargs_from_name_str] ( identifier[name] )
identifier[name_builder_kwargs] . identifier[update] ( identifier[builder_kwargs] keyword[or] {})
identifier[builder_kwargs] = identifier[name_builder_kwargs]
keyword[if] identifier[try_gcs] keyword[and] identifier[gcs_utils] . identifier[is_dataset_on_gcs] ( identifier[name] ):
identifier[data_dir] = identifier[constants] . identifier[GCS_DATA_DIR]
keyword[elif] identifier[data_dir] keyword[is] keyword[None] :
identifier[data_dir] = identifier[constants] . identifier[DATA_DIR]
identifier[dbuilder] = identifier[builder] ( identifier[name] , identifier[data_dir] = identifier[data_dir] ,** identifier[builder_kwargs] )
keyword[if] identifier[download] :
identifier[download_and_prepare_kwargs] = identifier[download_and_prepare_kwargs] keyword[or] {}
identifier[dbuilder] . identifier[download_and_prepare] (** identifier[download_and_prepare_kwargs] )
keyword[if] identifier[as_dataset_kwargs] keyword[is] keyword[None] :
identifier[as_dataset_kwargs] ={}
identifier[as_dataset_kwargs] = identifier[dict] ( identifier[as_dataset_kwargs] )
identifier[as_dataset_kwargs] [ literal[string] ]= identifier[split]
identifier[as_dataset_kwargs] [ literal[string] ]= identifier[as_supervised]
identifier[as_dataset_kwargs] [ literal[string] ]= identifier[batch_size]
identifier[ds] = identifier[dbuilder] . identifier[as_dataset] (** identifier[as_dataset_kwargs] )
keyword[if] identifier[with_info] :
keyword[return] identifier[ds] , identifier[dbuilder] . identifier[info]
keyword[return] identifier[ds] | def load(name, split=None, data_dir=None, batch_size=1, download=True, as_supervised=False, with_info=False, builder_kwargs=None, download_and_prepare_kwargs=None, as_dataset_kwargs=None, try_gcs=False):
"""Loads the named dataset into a `tf.data.Dataset`.
If `split=None` (the default), returns all splits for the dataset. Otherwise,
returns the specified split.
`load` is a convenience method that fetches the `tfds.core.DatasetBuilder` by
string name, optionally calls `DatasetBuilder.download_and_prepare`
(if `download=True`), and then calls `DatasetBuilder.as_dataset`.
This is roughly equivalent to:
```
builder = tfds.builder(name, data_dir=data_dir, **builder_kwargs)
if download:
builder.download_and_prepare(**download_and_prepare_kwargs)
ds = builder.as_dataset(
split=split, as_supervised=as_supervised, **as_dataset_kwargs)
if with_info:
return ds, builder.info
return ds
```
If you'd like NumPy arrays instead of `tf.data.Dataset`s or `tf.Tensor`s,
you can pass the return value to `tfds.as_numpy`.
Callers must pass arguments as keyword arguments.
**Warning**: calling this function might potentially trigger the download
of hundreds of GiB to disk. Refer to the `download` argument.
Args:
name: `str`, the registered name of the `DatasetBuilder` (the snake case
version of the class name). This can be either `"dataset_name"` or
`"dataset_name/config_name"` for datasets with `BuilderConfig`s.
As a convenience, this string may contain comma-separated keyword
arguments for the builder. For example `"foo_bar/a=True,b=3"` would use
the `FooBar` dataset passing the keyword arguments `a=True` and `b=3`
(for builders with configs, it would be `"foo_bar/zoo/a=True,b=3"` to
use the `"zoo"` config and pass to the builder keyword arguments `a=True`
and `b=3`).
split: `tfds.Split` or `str`, which split of the data to load. If None,
will return a `dict` with all splits (typically `tfds.Split.TRAIN` and
`tfds.Split.TEST`).
data_dir: `str` (optional), directory to read/write data.
Defaults to "~/tensorflow_datasets".
batch_size: `int`, set to > 1 to get batches of examples. Note that
variable length features will be 0-padded. If
`batch_size=-1`, will return the full dataset as `tf.Tensor`s.
download: `bool` (optional), whether to call
`tfds.core.DatasetBuilder.download_and_prepare`
before calling `tf.DatasetBuilder.as_dataset`. If `False`, data is
expected to be in `data_dir`. If `True` and the data is already in
`data_dir`, `download_and_prepare` is a no-op.
as_supervised: `bool`, if `True`, the returned `tf.data.Dataset`
will have a 2-tuple structure `(input, label)` according to
`builder.info.supervised_keys`. If `False`, the default,
the returned `tf.data.Dataset` will have a dictionary with all the
features.
with_info: `bool`, if True, tfds.load will return the tuple
(tf.data.Dataset, tfds.core.DatasetInfo) containing the info associated
with the builder.
builder_kwargs: `dict` (optional), keyword arguments to be passed to the
`tfds.core.DatasetBuilder` constructor. `data_dir` will be passed
through by default.
download_and_prepare_kwargs: `dict` (optional) keyword arguments passed to
`tfds.core.DatasetBuilder.download_and_prepare` if `download=True`. Allow
to control where to download and extract the cached data. If not set,
cache_dir and manual_dir will automatically be deduced from data_dir.
as_dataset_kwargs: `dict` (optional), keyword arguments passed to
`tfds.core.DatasetBuilder.as_dataset`. `split` will be passed through by
default. Example: `{'shuffle_files': True}`.
Note that shuffle_files is False by default unless
`split == tfds.Split.TRAIN`.
try_gcs: `bool`, if True, tfds.load will see if the dataset exists on
the public GCS bucket before building it locally.
Returns:
ds: `tf.data.Dataset`, the dataset requested, or if `split` is None, a
`dict<key: tfds.Split, value: tfds.data.Dataset>`. If `batch_size=-1`,
these will be full datasets as `tf.Tensor`s.
ds_info: `tfds.core.DatasetInfo`, if `with_info` is True, then `tfds.load`
will return a tuple `(ds, ds_info)` containing dataset information
(version, features, splits, num_examples,...). Note that the `ds_info`
object documents the entire dataset, regardless of the `split` requested.
Split-specific information is available in `ds_info.splits`.
"""
(name, name_builder_kwargs) = _dataset_name_and_kwargs_from_name_str(name)
name_builder_kwargs.update(builder_kwargs or {})
builder_kwargs = name_builder_kwargs
# Set data_dir
if try_gcs and gcs_utils.is_dataset_on_gcs(name):
data_dir = constants.GCS_DATA_DIR # depends on [control=['if'], data=[]]
elif data_dir is None:
data_dir = constants.DATA_DIR # depends on [control=['if'], data=['data_dir']]
dbuilder = builder(name, data_dir=data_dir, **builder_kwargs)
if download:
download_and_prepare_kwargs = download_and_prepare_kwargs or {}
dbuilder.download_and_prepare(**download_and_prepare_kwargs) # depends on [control=['if'], data=[]]
if as_dataset_kwargs is None:
as_dataset_kwargs = {} # depends on [control=['if'], data=['as_dataset_kwargs']]
as_dataset_kwargs = dict(as_dataset_kwargs)
as_dataset_kwargs['split'] = split
as_dataset_kwargs['as_supervised'] = as_supervised
as_dataset_kwargs['batch_size'] = batch_size
ds = dbuilder.as_dataset(**as_dataset_kwargs)
if with_info:
return (ds, dbuilder.info) # depends on [control=['if'], data=[]]
return ds |
def validate(self, raw_data, **kwargs):
"""The string ``'True'`` (case insensitive) will be converted
to ``True``, as will any positive integers.
"""
super(BooleanField, self).validate(raw_data, **kwargs)
if isinstance(raw_data, string_types):
valid_data = raw_data.strip().lower() == 'true'
elif isinstance(raw_data, bool):
valid_data = raw_data
else:
valid_data = raw_data > 0
return valid_data | def function[validate, parameter[self, raw_data]]:
constant[The string ``'True'`` (case insensitive) will be converted
to ``True``, as will any positive integers.
]
call[call[name[super], parameter[name[BooleanField], name[self]]].validate, parameter[name[raw_data]]]
if call[name[isinstance], parameter[name[raw_data], name[string_types]]] begin[:]
variable[valid_data] assign[=] compare[call[call[name[raw_data].strip, parameter[]].lower, parameter[]] equal[==] constant[true]]
return[name[valid_data]] | keyword[def] identifier[validate] ( identifier[self] , identifier[raw_data] ,** identifier[kwargs] ):
literal[string]
identifier[super] ( identifier[BooleanField] , identifier[self] ). identifier[validate] ( identifier[raw_data] ,** identifier[kwargs] )
keyword[if] identifier[isinstance] ( identifier[raw_data] , identifier[string_types] ):
identifier[valid_data] = identifier[raw_data] . identifier[strip] (). identifier[lower] ()== literal[string]
keyword[elif] identifier[isinstance] ( identifier[raw_data] , identifier[bool] ):
identifier[valid_data] = identifier[raw_data]
keyword[else] :
identifier[valid_data] = identifier[raw_data] > literal[int]
keyword[return] identifier[valid_data] | def validate(self, raw_data, **kwargs):
"""The string ``'True'`` (case insensitive) will be converted
to ``True``, as will any positive integers.
"""
super(BooleanField, self).validate(raw_data, **kwargs)
if isinstance(raw_data, string_types):
valid_data = raw_data.strip().lower() == 'true' # depends on [control=['if'], data=[]]
elif isinstance(raw_data, bool):
valid_data = raw_data # depends on [control=['if'], data=[]]
else:
valid_data = raw_data > 0
return valid_data |
def direction_to_tile(from_tile_id, to_tile_id):
"""
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
"""
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction | def function[direction_to_tile, parameter[from_tile_id, to_tile_id]]:
constant[
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
]
variable[coord_from] assign[=] call[name[tile_id_to_coord], parameter[name[from_tile_id]]]
variable[coord_to] assign[=] call[name[tile_id_to_coord], parameter[name[to_tile_id]]]
variable[direction] assign[=] call[name[tile_tile_offset_to_direction], parameter[binary_operation[name[coord_to] - name[coord_from]]]]
return[name[direction]] | keyword[def] identifier[direction_to_tile] ( identifier[from_tile_id] , identifier[to_tile_id] ):
literal[string]
identifier[coord_from] = identifier[tile_id_to_coord] ( identifier[from_tile_id] )
identifier[coord_to] = identifier[tile_id_to_coord] ( identifier[to_tile_id] )
identifier[direction] = identifier[tile_tile_offset_to_direction] ( identifier[coord_to] - identifier[coord_from] )
keyword[return] identifier[direction] | def direction_to_tile(from_tile_id, to_tile_id):
"""
Convenience method wrapping tile_tile_offset_to_direction. Used to get the direction
of the offset between two tiles. The tiles must be adjacent.
:param from_tile_id: tile identifier, int
:param to_tile_id: tile identifier, int
:return: direction from from_tile to to_tile, str
"""
coord_from = tile_id_to_coord(from_tile_id)
coord_to = tile_id_to_coord(to_tile_id)
direction = tile_tile_offset_to_direction(coord_to - coord_from)
# logging.debug('Tile direction: {}->{} is {}'.format(
# from_tile.tile_id,
# to_tile.tile_id,
# direction
# ))
return direction |
def dfa_complementation(dfa: dict) -> dict:
""" Returns a DFA that accepts any word but he ones accepted
by the input DFA.
Let A be a completed DFA, :math:`Ā = (Σ, S, s_0 , ρ, S − F )`
is the DFA that runs A but accepts whatever word A does not.
:param dict dfa: input DFA.
:return: *(dict)* representing the complement of the input DFA.
"""
dfa_complement = dfa_completion(deepcopy(dfa))
dfa_complement['accepting_states'] = \
dfa_complement['states'].difference(dfa_complement['accepting_states'])
return dfa_complement | def function[dfa_complementation, parameter[dfa]]:
constant[ Returns a DFA that accepts any word but he ones accepted
by the input DFA.
Let A be a completed DFA, :math:`Ā = (Σ, S, s_0 , ρ, S − F )`
is the DFA that runs A but accepts whatever word A does not.
:param dict dfa: input DFA.
:return: *(dict)* representing the complement of the input DFA.
]
variable[dfa_complement] assign[=] call[name[dfa_completion], parameter[call[name[deepcopy], parameter[name[dfa]]]]]
call[name[dfa_complement]][constant[accepting_states]] assign[=] call[call[name[dfa_complement]][constant[states]].difference, parameter[call[name[dfa_complement]][constant[accepting_states]]]]
return[name[dfa_complement]] | keyword[def] identifier[dfa_complementation] ( identifier[dfa] : identifier[dict] )-> identifier[dict] :
literal[string]
identifier[dfa_complement] = identifier[dfa_completion] ( identifier[deepcopy] ( identifier[dfa] ))
identifier[dfa_complement] [ literal[string] ]= identifier[dfa_complement] [ literal[string] ]. identifier[difference] ( identifier[dfa_complement] [ literal[string] ])
keyword[return] identifier[dfa_complement] | def dfa_complementation(dfa: dict) -> dict:
""" Returns a DFA that accepts any word but he ones accepted
by the input DFA.
Let A be a completed DFA, :math:`Ā = (Σ, S, s_0 , ρ, S − F )`
is the DFA that runs A but accepts whatever word A does not.
:param dict dfa: input DFA.
:return: *(dict)* representing the complement of the input DFA.
"""
dfa_complement = dfa_completion(deepcopy(dfa))
dfa_complement['accepting_states'] = dfa_complement['states'].difference(dfa_complement['accepting_states'])
return dfa_complement |
def _parse_endofnames(client, command, actor, args):
"""Parse an ENDOFNAMES and dispatch a NAMES event for the channel."""
args = args.split(" :", 1)[0] # Strip off human-readable message
_, _, channel = args.rpartition(' ')
channel = client.server.get_channel(channel) or channel.lower()
client.dispatch_event('MEMBERS', channel) | def function[_parse_endofnames, parameter[client, command, actor, args]]:
constant[Parse an ENDOFNAMES and dispatch a NAMES event for the channel.]
variable[args] assign[=] call[call[name[args].split, parameter[constant[ :], constant[1]]]][constant[0]]
<ast.Tuple object at 0x7da20c795480> assign[=] call[name[args].rpartition, parameter[constant[ ]]]
variable[channel] assign[=] <ast.BoolOp object at 0x7da20c796320>
call[name[client].dispatch_event, parameter[constant[MEMBERS], name[channel]]] | keyword[def] identifier[_parse_endofnames] ( identifier[client] , identifier[command] , identifier[actor] , identifier[args] ):
literal[string]
identifier[args] = identifier[args] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]
identifier[_] , identifier[_] , identifier[channel] = identifier[args] . identifier[rpartition] ( literal[string] )
identifier[channel] = identifier[client] . identifier[server] . identifier[get_channel] ( identifier[channel] ) keyword[or] identifier[channel] . identifier[lower] ()
identifier[client] . identifier[dispatch_event] ( literal[string] , identifier[channel] ) | def _parse_endofnames(client, command, actor, args):
"""Parse an ENDOFNAMES and dispatch a NAMES event for the channel."""
args = args.split(' :', 1)[0] # Strip off human-readable message
(_, _, channel) = args.rpartition(' ')
channel = client.server.get_channel(channel) or channel.lower()
client.dispatch_event('MEMBERS', channel) |
def create(self, datapath, tracker_urls, comment=None, root_name=None,
created_by=None, private=False, no_date=False, progress=None,
callback=None):
""" Create a metafile with the path given on object creation.
Returns the last metafile dict that was written (as an object, not bencoded).
"""
if datapath:
self.datapath = datapath
try:
tracker_urls = ['' + tracker_urls]
except TypeError:
tracker_urls = list(tracker_urls)
multi_mode = len(tracker_urls) > 1
# TODO add optimization so the hashing happens only once for multiple URLs!
for tracker_url in tracker_urls:
# Lookup announce URLs from config file
try:
if urlparse.urlparse(tracker_url).scheme:
tracker_alias = urlparse.urlparse(tracker_url).netloc.split(':')[0].split('.')
tracker_alias = tracker_alias[-2 if len(tracker_alias) > 1 else 0]
else:
tracker_alias, tracker_url = config.lookup_announce_alias(tracker_url)
tracker_url = tracker_url[0]
except (KeyError, IndexError):
raise error.UserError("Bad tracker URL %r, or unknown alias!" % (tracker_url,))
# Determine metafile name
output_name = self.filename
if multi_mode:
# Add 2nd level of announce URL domain to metafile name
output_name = list(os.path.splitext(output_name))
try:
output_name[1:1] = '-' + tracker_alias
except (IndexError,):
self.LOG.error("Malformed announce URL %r, skipping!" % (tracker_url,))
continue
output_name = ''.join(output_name)
# Hash the data
self.LOG.info("Creating %r for %s %r..." % (
output_name, "filenames read from" if self._fifo else "data in", self.datapath,
))
meta, _ = self._make_meta(tracker_url, root_name, private, progress)
# Add optional fields
if comment:
meta["comment"] = comment
if created_by:
meta["created by"] = created_by
if not no_date:
meta["creation date"] = int(time.time())
if callback:
callback(meta)
# Write metafile to disk
self.LOG.debug("Writing %r..." % (output_name,))
bencode.bwrite(output_name, meta)
return meta | def function[create, parameter[self, datapath, tracker_urls, comment, root_name, created_by, private, no_date, progress, callback]]:
constant[ Create a metafile with the path given on object creation.
Returns the last metafile dict that was written (as an object, not bencoded).
]
if name[datapath] begin[:]
name[self].datapath assign[=] name[datapath]
<ast.Try object at 0x7da1b13878b0>
variable[multi_mode] assign[=] compare[call[name[len], parameter[name[tracker_urls]]] greater[>] constant[1]]
for taget[name[tracker_url]] in starred[name[tracker_urls]] begin[:]
<ast.Try object at 0x7da1b1387d30>
variable[output_name] assign[=] name[self].filename
if name[multi_mode] begin[:]
variable[output_name] assign[=] call[name[list], parameter[call[name[os].path.splitext, parameter[name[output_name]]]]]
<ast.Try object at 0x7da1b1386350>
variable[output_name] assign[=] call[constant[].join, parameter[name[output_name]]]
call[name[self].LOG.info, parameter[binary_operation[constant[Creating %r for %s %r...] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1385c00>, <ast.IfExp object at 0x7da1b1385bd0>, <ast.Attribute object at 0x7da1b1385ae0>]]]]]
<ast.Tuple object at 0x7da1b1385a20> assign[=] call[name[self]._make_meta, parameter[name[tracker_url], name[root_name], name[private], name[progress]]]
if name[comment] begin[:]
call[name[meta]][constant[comment]] assign[=] name[comment]
if name[created_by] begin[:]
call[name[meta]][constant[created by]] assign[=] name[created_by]
if <ast.UnaryOp object at 0x7da1b1385570> begin[:]
call[name[meta]][constant[creation date]] assign[=] call[name[int], parameter[call[name[time].time, parameter[]]]]
if name[callback] begin[:]
call[name[callback], parameter[name[meta]]]
call[name[self].LOG.debug, parameter[binary_operation[constant[Writing %r...] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1385330>]]]]]
call[name[bencode].bwrite, parameter[name[output_name], name[meta]]]
return[name[meta]] | keyword[def] identifier[create] ( identifier[self] , identifier[datapath] , identifier[tracker_urls] , identifier[comment] = keyword[None] , identifier[root_name] = keyword[None] ,
identifier[created_by] = keyword[None] , identifier[private] = keyword[False] , identifier[no_date] = keyword[False] , identifier[progress] = keyword[None] ,
identifier[callback] = keyword[None] ):
literal[string]
keyword[if] identifier[datapath] :
identifier[self] . identifier[datapath] = identifier[datapath]
keyword[try] :
identifier[tracker_urls] =[ literal[string] + identifier[tracker_urls] ]
keyword[except] identifier[TypeError] :
identifier[tracker_urls] = identifier[list] ( identifier[tracker_urls] )
identifier[multi_mode] = identifier[len] ( identifier[tracker_urls] )> literal[int]
keyword[for] identifier[tracker_url] keyword[in] identifier[tracker_urls] :
keyword[try] :
keyword[if] identifier[urlparse] . identifier[urlparse] ( identifier[tracker_url] ). identifier[scheme] :
identifier[tracker_alias] = identifier[urlparse] . identifier[urlparse] ( identifier[tracker_url] ). identifier[netloc] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )
identifier[tracker_alias] = identifier[tracker_alias] [- literal[int] keyword[if] identifier[len] ( identifier[tracker_alias] )> literal[int] keyword[else] literal[int] ]
keyword[else] :
identifier[tracker_alias] , identifier[tracker_url] = identifier[config] . identifier[lookup_announce_alias] ( identifier[tracker_url] )
identifier[tracker_url] = identifier[tracker_url] [ literal[int] ]
keyword[except] ( identifier[KeyError] , identifier[IndexError] ):
keyword[raise] identifier[error] . identifier[UserError] ( literal[string] %( identifier[tracker_url] ,))
identifier[output_name] = identifier[self] . identifier[filename]
keyword[if] identifier[multi_mode] :
identifier[output_name] = identifier[list] ( identifier[os] . identifier[path] . identifier[splitext] ( identifier[output_name] ))
keyword[try] :
identifier[output_name] [ literal[int] : literal[int] ]= literal[string] + identifier[tracker_alias]
keyword[except] ( identifier[IndexError] ,):
identifier[self] . identifier[LOG] . identifier[error] ( literal[string] %( identifier[tracker_url] ,))
keyword[continue]
identifier[output_name] = literal[string] . identifier[join] ( identifier[output_name] )
identifier[self] . identifier[LOG] . identifier[info] ( literal[string] %(
identifier[output_name] , literal[string] keyword[if] identifier[self] . identifier[_fifo] keyword[else] literal[string] , identifier[self] . identifier[datapath] ,
))
identifier[meta] , identifier[_] = identifier[self] . identifier[_make_meta] ( identifier[tracker_url] , identifier[root_name] , identifier[private] , identifier[progress] )
keyword[if] identifier[comment] :
identifier[meta] [ literal[string] ]= identifier[comment]
keyword[if] identifier[created_by] :
identifier[meta] [ literal[string] ]= identifier[created_by]
keyword[if] keyword[not] identifier[no_date] :
identifier[meta] [ literal[string] ]= identifier[int] ( identifier[time] . identifier[time] ())
keyword[if] identifier[callback] :
identifier[callback] ( identifier[meta] )
identifier[self] . identifier[LOG] . identifier[debug] ( literal[string] %( identifier[output_name] ,))
identifier[bencode] . identifier[bwrite] ( identifier[output_name] , identifier[meta] )
keyword[return] identifier[meta] | def create(self, datapath, tracker_urls, comment=None, root_name=None, created_by=None, private=False, no_date=False, progress=None, callback=None):
""" Create a metafile with the path given on object creation.
Returns the last metafile dict that was written (as an object, not bencoded).
"""
if datapath:
self.datapath = datapath # depends on [control=['if'], data=[]]
try:
tracker_urls = ['' + tracker_urls] # depends on [control=['try'], data=[]]
except TypeError:
tracker_urls = list(tracker_urls) # depends on [control=['except'], data=[]]
multi_mode = len(tracker_urls) > 1
# TODO add optimization so the hashing happens only once for multiple URLs!
for tracker_url in tracker_urls:
# Lookup announce URLs from config file
try:
if urlparse.urlparse(tracker_url).scheme:
tracker_alias = urlparse.urlparse(tracker_url).netloc.split(':')[0].split('.')
tracker_alias = tracker_alias[-2 if len(tracker_alias) > 1 else 0] # depends on [control=['if'], data=[]]
else:
(tracker_alias, tracker_url) = config.lookup_announce_alias(tracker_url)
tracker_url = tracker_url[0] # depends on [control=['try'], data=[]]
except (KeyError, IndexError):
raise error.UserError('Bad tracker URL %r, or unknown alias!' % (tracker_url,)) # depends on [control=['except'], data=[]]
# Determine metafile name
output_name = self.filename
if multi_mode:
# Add 2nd level of announce URL domain to metafile name
output_name = list(os.path.splitext(output_name))
try:
output_name[1:1] = '-' + tracker_alias # depends on [control=['try'], data=[]]
except (IndexError,):
self.LOG.error('Malformed announce URL %r, skipping!' % (tracker_url,))
continue # depends on [control=['except'], data=[]]
output_name = ''.join(output_name) # depends on [control=['if'], data=[]]
# Hash the data
self.LOG.info('Creating %r for %s %r...' % (output_name, 'filenames read from' if self._fifo else 'data in', self.datapath))
(meta, _) = self._make_meta(tracker_url, root_name, private, progress)
# Add optional fields
if comment:
meta['comment'] = comment # depends on [control=['if'], data=[]]
if created_by:
meta['created by'] = created_by # depends on [control=['if'], data=[]]
if not no_date:
meta['creation date'] = int(time.time()) # depends on [control=['if'], data=[]]
if callback:
callback(meta) # depends on [control=['if'], data=[]]
# Write metafile to disk
self.LOG.debug('Writing %r...' % (output_name,))
bencode.bwrite(output_name, meta) # depends on [control=['for'], data=['tracker_url']]
return meta |
async def send_chat_action(self, chat_id: typing.Union[base.Integer, base.String],
action: base.String) -> base.Boolean:
"""
Use this method when you need to tell the user that something is happening on the bot's side.
The status is set for 5 seconds or less
(when a message arrives from your bot, Telegram clients clear its typing status).
We only recommend using this method when a response from the bot will take
a noticeable amount of time to arrive.
Source: https://core.telegram.org/bots/api#sendchataction
:param chat_id: Unique identifier for the target chat or username of the target channel
:type chat_id: :obj:`typing.Union[base.Integer, base.String]`
:param action: Type of action to broadcast
:type action: :obj:`base.String`
:return: Returns True on success
:rtype: :obj:`base.Boolean`
"""
payload = generate_payload(**locals())
result = await self.request(api.Methods.SEND_CHAT_ACTION, payload)
return result | <ast.AsyncFunctionDef object at 0x7da1b17aa140> | keyword[async] keyword[def] identifier[send_chat_action] ( identifier[self] , identifier[chat_id] : identifier[typing] . identifier[Union] [ identifier[base] . identifier[Integer] , identifier[base] . identifier[String] ],
identifier[action] : identifier[base] . identifier[String] )-> identifier[base] . identifier[Boolean] :
literal[string]
identifier[payload] = identifier[generate_payload] (** identifier[locals] ())
identifier[result] = keyword[await] identifier[self] . identifier[request] ( identifier[api] . identifier[Methods] . identifier[SEND_CHAT_ACTION] , identifier[payload] )
keyword[return] identifier[result] | async def send_chat_action(self, chat_id: typing.Union[base.Integer, base.String], action: base.String) -> base.Boolean:
"""
Use this method when you need to tell the user that something is happening on the bot's side.
The status is set for 5 seconds or less
(when a message arrives from your bot, Telegram clients clear its typing status).
We only recommend using this method when a response from the bot will take
a noticeable amount of time to arrive.
Source: https://core.telegram.org/bots/api#sendchataction
:param chat_id: Unique identifier for the target chat or username of the target channel
:type chat_id: :obj:`typing.Union[base.Integer, base.String]`
:param action: Type of action to broadcast
:type action: :obj:`base.String`
:return: Returns True on success
:rtype: :obj:`base.Boolean`
"""
payload = generate_payload(**locals())
result = await self.request(api.Methods.SEND_CHAT_ACTION, payload)
return result |
def time_delays(self, kwargs_lens, kwargs_ps, kappa_ext=0):
"""
predicts the time delays of the image positions
:param kwargs_lens: lens model parameters
:param kwargs_ps: point source parameters
:param kappa_ext: external convergence (optional)
:return: time delays at image positions for the fixed cosmology
"""
fermat_pot = self.lens_analysis.fermat_potential(kwargs_lens, kwargs_ps)
time_delay = self.lensCosmo.time_delay_units(fermat_pot, kappa_ext)
return time_delay | def function[time_delays, parameter[self, kwargs_lens, kwargs_ps, kappa_ext]]:
constant[
predicts the time delays of the image positions
:param kwargs_lens: lens model parameters
:param kwargs_ps: point source parameters
:param kappa_ext: external convergence (optional)
:return: time delays at image positions for the fixed cosmology
]
variable[fermat_pot] assign[=] call[name[self].lens_analysis.fermat_potential, parameter[name[kwargs_lens], name[kwargs_ps]]]
variable[time_delay] assign[=] call[name[self].lensCosmo.time_delay_units, parameter[name[fermat_pot], name[kappa_ext]]]
return[name[time_delay]] | keyword[def] identifier[time_delays] ( identifier[self] , identifier[kwargs_lens] , identifier[kwargs_ps] , identifier[kappa_ext] = literal[int] ):
literal[string]
identifier[fermat_pot] = identifier[self] . identifier[lens_analysis] . identifier[fermat_potential] ( identifier[kwargs_lens] , identifier[kwargs_ps] )
identifier[time_delay] = identifier[self] . identifier[lensCosmo] . identifier[time_delay_units] ( identifier[fermat_pot] , identifier[kappa_ext] )
keyword[return] identifier[time_delay] | def time_delays(self, kwargs_lens, kwargs_ps, kappa_ext=0):
"""
predicts the time delays of the image positions
:param kwargs_lens: lens model parameters
:param kwargs_ps: point source parameters
:param kappa_ext: external convergence (optional)
:return: time delays at image positions for the fixed cosmology
"""
fermat_pot = self.lens_analysis.fermat_potential(kwargs_lens, kwargs_ps)
time_delay = self.lensCosmo.time_delay_units(fermat_pot, kappa_ext)
return time_delay |
def printcolour(text, sameline=False, colour=get_colour("ENDC")):
"""
Print color text using escape codes
"""
if sameline:
sep = ''
else:
sep = '\n'
sys.stdout.write(get_colour(colour) + text + bcolours["ENDC"] + sep) | def function[printcolour, parameter[text, sameline, colour]]:
constant[
Print color text using escape codes
]
if name[sameline] begin[:]
variable[sep] assign[=] constant[]
call[name[sys].stdout.write, parameter[binary_operation[binary_operation[binary_operation[call[name[get_colour], parameter[name[colour]]] + name[text]] + call[name[bcolours]][constant[ENDC]]] + name[sep]]]] | keyword[def] identifier[printcolour] ( identifier[text] , identifier[sameline] = keyword[False] , identifier[colour] = identifier[get_colour] ( literal[string] )):
literal[string]
keyword[if] identifier[sameline] :
identifier[sep] = literal[string]
keyword[else] :
identifier[sep] = literal[string]
identifier[sys] . identifier[stdout] . identifier[write] ( identifier[get_colour] ( identifier[colour] )+ identifier[text] + identifier[bcolours] [ literal[string] ]+ identifier[sep] ) | def printcolour(text, sameline=False, colour=get_colour('ENDC')):
"""
Print color text using escape codes
"""
if sameline:
sep = '' # depends on [control=['if'], data=[]]
else:
sep = '\n'
sys.stdout.write(get_colour(colour) + text + bcolours['ENDC'] + sep) |
def busca_cep_correios(cep):
''' Pesquisa o CEP informado no site dos correios '''
if cep is None or not isinstance(cep, str):
raise AttributeError("O CEP deve ser do tipo string!")
elif not cep.isdigit() or len(cep) != 8:
raise AttributeError("O CEP deve conter apenas 8 dígitos!")
url = 'http://www.buscacep.correios.com.br/sistemas/buscacep/resultadoBuscaCepEndereco.cfm'
payload = {'relaxation': cep, 'tipoCEP': 'ALL', 'semelhante': 'N'}
resp = requests.post(url, data=payload)
if resp.status_code != 200:
raise Exception("Erro acessando site dos correios!", resp.status_code)
from bs4 import BeautifulSoup
soup = BeautifulSoup(resp.text, "html.parser")
value_cells = soup.find('table', attrs={'class': 'tmptabela'})
values = list(value_cells.findAll('tr'))
texto_clean = []
for value in values[1].findAll('td'):
texto_clean.append(value.get_text().strip())
logradouro = Logradouro(texto_clean)
return logradouro | def function[busca_cep_correios, parameter[cep]]:
constant[ Pesquisa o CEP informado no site dos correios ]
if <ast.BoolOp object at 0x7da1b15a38e0> begin[:]
<ast.Raise object at 0x7da1b15a20b0>
variable[url] assign[=] constant[http://www.buscacep.correios.com.br/sistemas/buscacep/resultadoBuscaCepEndereco.cfm]
variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da20c992020>, <ast.Constant object at 0x7da20c992d70>, <ast.Constant object at 0x7da20c992980>], [<ast.Name object at 0x7da20c993f40>, <ast.Constant object at 0x7da20c9916c0>, <ast.Constant object at 0x7da20c9908b0>]]
variable[resp] assign[=] call[name[requests].post, parameter[name[url]]]
if compare[name[resp].status_code not_equal[!=] constant[200]] begin[:]
<ast.Raise object at 0x7da20c991780>
from relative_module[bs4] import module[BeautifulSoup]
variable[soup] assign[=] call[name[BeautifulSoup], parameter[name[resp].text, constant[html.parser]]]
variable[value_cells] assign[=] call[name[soup].find, parameter[constant[table]]]
variable[values] assign[=] call[name[list], parameter[call[name[value_cells].findAll, parameter[constant[tr]]]]]
variable[texto_clean] assign[=] list[[]]
for taget[name[value]] in starred[call[call[name[values]][constant[1]].findAll, parameter[constant[td]]]] begin[:]
call[name[texto_clean].append, parameter[call[call[name[value].get_text, parameter[]].strip, parameter[]]]]
variable[logradouro] assign[=] call[name[Logradouro], parameter[name[texto_clean]]]
return[name[logradouro]] | keyword[def] identifier[busca_cep_correios] ( identifier[cep] ):
literal[string]
keyword[if] identifier[cep] keyword[is] keyword[None] keyword[or] keyword[not] identifier[isinstance] ( identifier[cep] , identifier[str] ):
keyword[raise] identifier[AttributeError] ( literal[string] )
keyword[elif] keyword[not] identifier[cep] . identifier[isdigit] () keyword[or] identifier[len] ( identifier[cep] )!= literal[int] :
keyword[raise] identifier[AttributeError] ( literal[string] )
identifier[url] = literal[string]
identifier[payload] ={ literal[string] : identifier[cep] , literal[string] : literal[string] , literal[string] : literal[string] }
identifier[resp] = identifier[requests] . identifier[post] ( identifier[url] , identifier[data] = identifier[payload] )
keyword[if] identifier[resp] . identifier[status_code] != literal[int] :
keyword[raise] identifier[Exception] ( literal[string] , identifier[resp] . identifier[status_code] )
keyword[from] identifier[bs4] keyword[import] identifier[BeautifulSoup]
identifier[soup] = identifier[BeautifulSoup] ( identifier[resp] . identifier[text] , literal[string] )
identifier[value_cells] = identifier[soup] . identifier[find] ( literal[string] , identifier[attrs] ={ literal[string] : literal[string] })
identifier[values] = identifier[list] ( identifier[value_cells] . identifier[findAll] ( literal[string] ))
identifier[texto_clean] =[]
keyword[for] identifier[value] keyword[in] identifier[values] [ literal[int] ]. identifier[findAll] ( literal[string] ):
identifier[texto_clean] . identifier[append] ( identifier[value] . identifier[get_text] (). identifier[strip] ())
identifier[logradouro] = identifier[Logradouro] ( identifier[texto_clean] )
keyword[return] identifier[logradouro] | def busca_cep_correios(cep):
""" Pesquisa o CEP informado no site dos correios """
if cep is None or not isinstance(cep, str):
raise AttributeError('O CEP deve ser do tipo string!') # depends on [control=['if'], data=[]]
elif not cep.isdigit() or len(cep) != 8:
raise AttributeError('O CEP deve conter apenas 8 dígitos!') # depends on [control=['if'], data=[]]
url = 'http://www.buscacep.correios.com.br/sistemas/buscacep/resultadoBuscaCepEndereco.cfm'
payload = {'relaxation': cep, 'tipoCEP': 'ALL', 'semelhante': 'N'}
resp = requests.post(url, data=payload)
if resp.status_code != 200:
raise Exception('Erro acessando site dos correios!', resp.status_code) # depends on [control=['if'], data=[]]
from bs4 import BeautifulSoup
soup = BeautifulSoup(resp.text, 'html.parser')
value_cells = soup.find('table', attrs={'class': 'tmptabela'})
values = list(value_cells.findAll('tr'))
texto_clean = []
for value in values[1].findAll('td'):
texto_clean.append(value.get_text().strip()) # depends on [control=['for'], data=['value']]
logradouro = Logradouro(texto_clean)
return logradouro |
def addFeatureSet(self):
"""
Adds a new feature set into this repo
"""
self._openRepo()
dataset = self._repo.getDatasetByName(self._args.datasetName)
filePath = self._getFilePath(self._args.filePath,
self._args.relativePath)
name = getNameFromPath(self._args.filePath)
featureSet = sequence_annotations.Gff3DbFeatureSet(
dataset, name)
referenceSetName = self._args.referenceSetName
if referenceSetName is None:
raise exceptions.RepoManagerException(
"A reference set name must be provided")
referenceSet = self._repo.getReferenceSetByName(referenceSetName)
featureSet.setReferenceSet(referenceSet)
ontologyName = self._args.ontologyName
if ontologyName is None:
raise exceptions.RepoManagerException(
"A sequence ontology name must be provided")
ontology = self._repo.getOntologyByName(ontologyName)
self._checkSequenceOntology(ontology)
featureSet.setOntology(ontology)
featureSet.populateFromFile(filePath)
featureSet.setAttributes(json.loads(self._args.attributes))
self._updateRepo(self._repo.insertFeatureSet, featureSet) | def function[addFeatureSet, parameter[self]]:
constant[
Adds a new feature set into this repo
]
call[name[self]._openRepo, parameter[]]
variable[dataset] assign[=] call[name[self]._repo.getDatasetByName, parameter[name[self]._args.datasetName]]
variable[filePath] assign[=] call[name[self]._getFilePath, parameter[name[self]._args.filePath, name[self]._args.relativePath]]
variable[name] assign[=] call[name[getNameFromPath], parameter[name[self]._args.filePath]]
variable[featureSet] assign[=] call[name[sequence_annotations].Gff3DbFeatureSet, parameter[name[dataset], name[name]]]
variable[referenceSetName] assign[=] name[self]._args.referenceSetName
if compare[name[referenceSetName] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b26aceb0>
variable[referenceSet] assign[=] call[name[self]._repo.getReferenceSetByName, parameter[name[referenceSetName]]]
call[name[featureSet].setReferenceSet, parameter[name[referenceSet]]]
variable[ontologyName] assign[=] name[self]._args.ontologyName
if compare[name[ontologyName] is constant[None]] begin[:]
<ast.Raise object at 0x7da18ede7f10>
variable[ontology] assign[=] call[name[self]._repo.getOntologyByName, parameter[name[ontologyName]]]
call[name[self]._checkSequenceOntology, parameter[name[ontology]]]
call[name[featureSet].setOntology, parameter[name[ontology]]]
call[name[featureSet].populateFromFile, parameter[name[filePath]]]
call[name[featureSet].setAttributes, parameter[call[name[json].loads, parameter[name[self]._args.attributes]]]]
call[name[self]._updateRepo, parameter[name[self]._repo.insertFeatureSet, name[featureSet]]] | keyword[def] identifier[addFeatureSet] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_openRepo] ()
identifier[dataset] = identifier[self] . identifier[_repo] . identifier[getDatasetByName] ( identifier[self] . identifier[_args] . identifier[datasetName] )
identifier[filePath] = identifier[self] . identifier[_getFilePath] ( identifier[self] . identifier[_args] . identifier[filePath] ,
identifier[self] . identifier[_args] . identifier[relativePath] )
identifier[name] = identifier[getNameFromPath] ( identifier[self] . identifier[_args] . identifier[filePath] )
identifier[featureSet] = identifier[sequence_annotations] . identifier[Gff3DbFeatureSet] (
identifier[dataset] , identifier[name] )
identifier[referenceSetName] = identifier[self] . identifier[_args] . identifier[referenceSetName]
keyword[if] identifier[referenceSetName] keyword[is] keyword[None] :
keyword[raise] identifier[exceptions] . identifier[RepoManagerException] (
literal[string] )
identifier[referenceSet] = identifier[self] . identifier[_repo] . identifier[getReferenceSetByName] ( identifier[referenceSetName] )
identifier[featureSet] . identifier[setReferenceSet] ( identifier[referenceSet] )
identifier[ontologyName] = identifier[self] . identifier[_args] . identifier[ontologyName]
keyword[if] identifier[ontologyName] keyword[is] keyword[None] :
keyword[raise] identifier[exceptions] . identifier[RepoManagerException] (
literal[string] )
identifier[ontology] = identifier[self] . identifier[_repo] . identifier[getOntologyByName] ( identifier[ontologyName] )
identifier[self] . identifier[_checkSequenceOntology] ( identifier[ontology] )
identifier[featureSet] . identifier[setOntology] ( identifier[ontology] )
identifier[featureSet] . identifier[populateFromFile] ( identifier[filePath] )
identifier[featureSet] . identifier[setAttributes] ( identifier[json] . identifier[loads] ( identifier[self] . identifier[_args] . identifier[attributes] ))
identifier[self] . identifier[_updateRepo] ( identifier[self] . identifier[_repo] . identifier[insertFeatureSet] , identifier[featureSet] ) | def addFeatureSet(self):
"""
Adds a new feature set into this repo
"""
self._openRepo()
dataset = self._repo.getDatasetByName(self._args.datasetName)
filePath = self._getFilePath(self._args.filePath, self._args.relativePath)
name = getNameFromPath(self._args.filePath)
featureSet = sequence_annotations.Gff3DbFeatureSet(dataset, name)
referenceSetName = self._args.referenceSetName
if referenceSetName is None:
raise exceptions.RepoManagerException('A reference set name must be provided') # depends on [control=['if'], data=[]]
referenceSet = self._repo.getReferenceSetByName(referenceSetName)
featureSet.setReferenceSet(referenceSet)
ontologyName = self._args.ontologyName
if ontologyName is None:
raise exceptions.RepoManagerException('A sequence ontology name must be provided') # depends on [control=['if'], data=[]]
ontology = self._repo.getOntologyByName(ontologyName)
self._checkSequenceOntology(ontology)
featureSet.setOntology(ontology)
featureSet.populateFromFile(filePath)
featureSet.setAttributes(json.loads(self._args.attributes))
self._updateRepo(self._repo.insertFeatureSet, featureSet) |
def list_all_files(self):
""" Utility method that yields all files on the device's file
systems.
"""
def list_files_recursively(directory):
f_gen = itertools.chain(
directory.files,
*tuple(list_files_recursively(d)
for d in directory.directories))
for f in f_gen:
yield f
return list_files_recursively(self.filesystem) | def function[list_all_files, parameter[self]]:
constant[ Utility method that yields all files on the device's file
systems.
]
def function[list_files_recursively, parameter[directory]]:
variable[f_gen] assign[=] call[name[itertools].chain, parameter[name[directory].files, <ast.Starred object at 0x7da1b0403970>]]
for taget[name[f]] in starred[name[f_gen]] begin[:]
<ast.Yield object at 0x7da1b04015a0>
return[call[name[list_files_recursively], parameter[name[self].filesystem]]] | keyword[def] identifier[list_all_files] ( identifier[self] ):
literal[string]
keyword[def] identifier[list_files_recursively] ( identifier[directory] ):
identifier[f_gen] = identifier[itertools] . identifier[chain] (
identifier[directory] . identifier[files] ,
* identifier[tuple] ( identifier[list_files_recursively] ( identifier[d] )
keyword[for] identifier[d] keyword[in] identifier[directory] . identifier[directories] ))
keyword[for] identifier[f] keyword[in] identifier[f_gen] :
keyword[yield] identifier[f]
keyword[return] identifier[list_files_recursively] ( identifier[self] . identifier[filesystem] ) | def list_all_files(self):
""" Utility method that yields all files on the device's file
systems.
"""
def list_files_recursively(directory):
f_gen = itertools.chain(directory.files, *tuple((list_files_recursively(d) for d in directory.directories)))
for f in f_gen:
yield f # depends on [control=['for'], data=['f']]
return list_files_recursively(self.filesystem) |
def detect_response_encoding(response, is_html=False, peek=131072):
'''Return the likely encoding of the response document.
Args:
response (Response): An instance of :class:`.http.Response`.
is_html (bool): See :func:`.util.detect_encoding`.
peek (int): The maximum number of bytes of the document to be analyzed.
Returns:
``str``, ``None``: The codec name.
'''
encoding = get_heading_encoding(response)
encoding = wpull.string.detect_encoding(
wpull.util.peek_file(response.body, peek), encoding=encoding, is_html=is_html
)
_logger.debug(__('Got encoding: {0}', encoding))
return encoding | def function[detect_response_encoding, parameter[response, is_html, peek]]:
constant[Return the likely encoding of the response document.
Args:
response (Response): An instance of :class:`.http.Response`.
is_html (bool): See :func:`.util.detect_encoding`.
peek (int): The maximum number of bytes of the document to be analyzed.
Returns:
``str``, ``None``: The codec name.
]
variable[encoding] assign[=] call[name[get_heading_encoding], parameter[name[response]]]
variable[encoding] assign[=] call[name[wpull].string.detect_encoding, parameter[call[name[wpull].util.peek_file, parameter[name[response].body, name[peek]]]]]
call[name[_logger].debug, parameter[call[name[__], parameter[constant[Got encoding: {0}], name[encoding]]]]]
return[name[encoding]] | keyword[def] identifier[detect_response_encoding] ( identifier[response] , identifier[is_html] = keyword[False] , identifier[peek] = literal[int] ):
literal[string]
identifier[encoding] = identifier[get_heading_encoding] ( identifier[response] )
identifier[encoding] = identifier[wpull] . identifier[string] . identifier[detect_encoding] (
identifier[wpull] . identifier[util] . identifier[peek_file] ( identifier[response] . identifier[body] , identifier[peek] ), identifier[encoding] = identifier[encoding] , identifier[is_html] = identifier[is_html]
)
identifier[_logger] . identifier[debug] ( identifier[__] ( literal[string] , identifier[encoding] ))
keyword[return] identifier[encoding] | def detect_response_encoding(response, is_html=False, peek=131072):
"""Return the likely encoding of the response document.
Args:
response (Response): An instance of :class:`.http.Response`.
is_html (bool): See :func:`.util.detect_encoding`.
peek (int): The maximum number of bytes of the document to be analyzed.
Returns:
``str``, ``None``: The codec name.
"""
encoding = get_heading_encoding(response)
encoding = wpull.string.detect_encoding(wpull.util.peek_file(response.body, peek), encoding=encoding, is_html=is_html)
_logger.debug(__('Got encoding: {0}', encoding))
return encoding |
def from_json(cls, json_data):
"""Deserialize a JSON-serialized instance.
Inverse to :meth:`to_json`.
Args:
json_data: dict or string, Serialized JSON (as a string or an
already parsed dictionary) representing a credential.
Returns:
ServiceAccountCredentials from the serialized data.
"""
if not isinstance(json_data, dict):
json_data = json.loads(_helpers._from_bytes(json_data))
private_key_pkcs8_pem = None
pkcs12_val = json_data.get(_PKCS12_KEY)
password = None
if pkcs12_val is None:
private_key_pkcs8_pem = json_data['_private_key_pkcs8_pem']
signer = crypt.Signer.from_string(private_key_pkcs8_pem)
else:
# NOTE: This assumes that private_key_pkcs8_pem is not also
# in the serialized data. This would be very incorrect
# state.
pkcs12_val = base64.b64decode(pkcs12_val)
password = json_data['_private_key_password']
signer = crypt.Signer.from_string(pkcs12_val, password)
credentials = cls(
json_data['_service_account_email'],
signer,
scopes=json_data['_scopes'],
private_key_id=json_data['_private_key_id'],
client_id=json_data['client_id'],
user_agent=json_data['_user_agent'],
**json_data['_kwargs']
)
if private_key_pkcs8_pem is not None:
credentials._private_key_pkcs8_pem = private_key_pkcs8_pem
if pkcs12_val is not None:
credentials._private_key_pkcs12 = pkcs12_val
if password is not None:
credentials._private_key_password = password
credentials.invalid = json_data['invalid']
credentials.access_token = json_data['access_token']
credentials.token_uri = json_data['token_uri']
credentials.revoke_uri = json_data['revoke_uri']
token_expiry = json_data.get('token_expiry', None)
if token_expiry is not None:
credentials.token_expiry = datetime.datetime.strptime(
token_expiry, client.EXPIRY_FORMAT)
return credentials | def function[from_json, parameter[cls, json_data]]:
constant[Deserialize a JSON-serialized instance.
Inverse to :meth:`to_json`.
Args:
json_data: dict or string, Serialized JSON (as a string or an
already parsed dictionary) representing a credential.
Returns:
ServiceAccountCredentials from the serialized data.
]
if <ast.UnaryOp object at 0x7da1b016e9e0> begin[:]
variable[json_data] assign[=] call[name[json].loads, parameter[call[name[_helpers]._from_bytes, parameter[name[json_data]]]]]
variable[private_key_pkcs8_pem] assign[=] constant[None]
variable[pkcs12_val] assign[=] call[name[json_data].get, parameter[name[_PKCS12_KEY]]]
variable[password] assign[=] constant[None]
if compare[name[pkcs12_val] is constant[None]] begin[:]
variable[private_key_pkcs8_pem] assign[=] call[name[json_data]][constant[_private_key_pkcs8_pem]]
variable[signer] assign[=] call[name[crypt].Signer.from_string, parameter[name[private_key_pkcs8_pem]]]
variable[credentials] assign[=] call[name[cls], parameter[call[name[json_data]][constant[_service_account_email]], name[signer]]]
if compare[name[private_key_pkcs8_pem] is_not constant[None]] begin[:]
name[credentials]._private_key_pkcs8_pem assign[=] name[private_key_pkcs8_pem]
if compare[name[pkcs12_val] is_not constant[None]] begin[:]
name[credentials]._private_key_pkcs12 assign[=] name[pkcs12_val]
if compare[name[password] is_not constant[None]] begin[:]
name[credentials]._private_key_password assign[=] name[password]
name[credentials].invalid assign[=] call[name[json_data]][constant[invalid]]
name[credentials].access_token assign[=] call[name[json_data]][constant[access_token]]
name[credentials].token_uri assign[=] call[name[json_data]][constant[token_uri]]
name[credentials].revoke_uri assign[=] call[name[json_data]][constant[revoke_uri]]
variable[token_expiry] assign[=] call[name[json_data].get, parameter[constant[token_expiry], constant[None]]]
if compare[name[token_expiry] is_not constant[None]] begin[:]
name[credentials].token_expiry assign[=] call[name[datetime].datetime.strptime, parameter[name[token_expiry], name[client].EXPIRY_FORMAT]]
return[name[credentials]] | keyword[def] identifier[from_json] ( identifier[cls] , identifier[json_data] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[json_data] , identifier[dict] ):
identifier[json_data] = identifier[json] . identifier[loads] ( identifier[_helpers] . identifier[_from_bytes] ( identifier[json_data] ))
identifier[private_key_pkcs8_pem] = keyword[None]
identifier[pkcs12_val] = identifier[json_data] . identifier[get] ( identifier[_PKCS12_KEY] )
identifier[password] = keyword[None]
keyword[if] identifier[pkcs12_val] keyword[is] keyword[None] :
identifier[private_key_pkcs8_pem] = identifier[json_data] [ literal[string] ]
identifier[signer] = identifier[crypt] . identifier[Signer] . identifier[from_string] ( identifier[private_key_pkcs8_pem] )
keyword[else] :
identifier[pkcs12_val] = identifier[base64] . identifier[b64decode] ( identifier[pkcs12_val] )
identifier[password] = identifier[json_data] [ literal[string] ]
identifier[signer] = identifier[crypt] . identifier[Signer] . identifier[from_string] ( identifier[pkcs12_val] , identifier[password] )
identifier[credentials] = identifier[cls] (
identifier[json_data] [ literal[string] ],
identifier[signer] ,
identifier[scopes] = identifier[json_data] [ literal[string] ],
identifier[private_key_id] = identifier[json_data] [ literal[string] ],
identifier[client_id] = identifier[json_data] [ literal[string] ],
identifier[user_agent] = identifier[json_data] [ literal[string] ],
** identifier[json_data] [ literal[string] ]
)
keyword[if] identifier[private_key_pkcs8_pem] keyword[is] keyword[not] keyword[None] :
identifier[credentials] . identifier[_private_key_pkcs8_pem] = identifier[private_key_pkcs8_pem]
keyword[if] identifier[pkcs12_val] keyword[is] keyword[not] keyword[None] :
identifier[credentials] . identifier[_private_key_pkcs12] = identifier[pkcs12_val]
keyword[if] identifier[password] keyword[is] keyword[not] keyword[None] :
identifier[credentials] . identifier[_private_key_password] = identifier[password]
identifier[credentials] . identifier[invalid] = identifier[json_data] [ literal[string] ]
identifier[credentials] . identifier[access_token] = identifier[json_data] [ literal[string] ]
identifier[credentials] . identifier[token_uri] = identifier[json_data] [ literal[string] ]
identifier[credentials] . identifier[revoke_uri] = identifier[json_data] [ literal[string] ]
identifier[token_expiry] = identifier[json_data] . identifier[get] ( literal[string] , keyword[None] )
keyword[if] identifier[token_expiry] keyword[is] keyword[not] keyword[None] :
identifier[credentials] . identifier[token_expiry] = identifier[datetime] . identifier[datetime] . identifier[strptime] (
identifier[token_expiry] , identifier[client] . identifier[EXPIRY_FORMAT] )
keyword[return] identifier[credentials] | def from_json(cls, json_data):
"""Deserialize a JSON-serialized instance.
Inverse to :meth:`to_json`.
Args:
json_data: dict or string, Serialized JSON (as a string or an
already parsed dictionary) representing a credential.
Returns:
ServiceAccountCredentials from the serialized data.
"""
if not isinstance(json_data, dict):
json_data = json.loads(_helpers._from_bytes(json_data)) # depends on [control=['if'], data=[]]
private_key_pkcs8_pem = None
pkcs12_val = json_data.get(_PKCS12_KEY)
password = None
if pkcs12_val is None:
private_key_pkcs8_pem = json_data['_private_key_pkcs8_pem']
signer = crypt.Signer.from_string(private_key_pkcs8_pem) # depends on [control=['if'], data=[]]
else:
# NOTE: This assumes that private_key_pkcs8_pem is not also
# in the serialized data. This would be very incorrect
# state.
pkcs12_val = base64.b64decode(pkcs12_val)
password = json_data['_private_key_password']
signer = crypt.Signer.from_string(pkcs12_val, password)
credentials = cls(json_data['_service_account_email'], signer, scopes=json_data['_scopes'], private_key_id=json_data['_private_key_id'], client_id=json_data['client_id'], user_agent=json_data['_user_agent'], **json_data['_kwargs'])
if private_key_pkcs8_pem is not None:
credentials._private_key_pkcs8_pem = private_key_pkcs8_pem # depends on [control=['if'], data=['private_key_pkcs8_pem']]
if pkcs12_val is not None:
credentials._private_key_pkcs12 = pkcs12_val # depends on [control=['if'], data=['pkcs12_val']]
if password is not None:
credentials._private_key_password = password # depends on [control=['if'], data=['password']]
credentials.invalid = json_data['invalid']
credentials.access_token = json_data['access_token']
credentials.token_uri = json_data['token_uri']
credentials.revoke_uri = json_data['revoke_uri']
token_expiry = json_data.get('token_expiry', None)
if token_expiry is not None:
credentials.token_expiry = datetime.datetime.strptime(token_expiry, client.EXPIRY_FORMAT) # depends on [control=['if'], data=['token_expiry']]
return credentials |
def generate_message_definitions(basename, xml):
'''generate files for one XML file'''
directory = os.path.join(basename, xml.basename)
print("Generating Objective-C implementation in directory %s" % directory)
mavparse.mkdir_p(directory)
xml.basename_camel_case = camel_case_from_underscores(xml.basename)
# Add some extra field attributes for convenience
for m in xml.message:
m.basename = xml.basename
m.parse_time = xml.parse_time
m.name_camel_case = camel_case_from_underscores(m.name_lower)
for f in m.fields:
f.name_lower_camel_case = lower_camel_case_from_underscores(f.name);
f.get_message = "[self %s]" % f.name_lower_camel_case
f.return_method_implementation = ''
f.array_prefix = ''
f.array_return_arg = ''
f.get_arg = ''
f.get_arg_objc = ''
if f.enum:
f.return_type = f.enum
f.arg_type = f.enum
else:
f.return_type = f.type
f.arg_type = f.type
if f.print_format is None:
if f.array_length != 0:
f.print_format = "%@"
elif f.type.startswith('uint64_t'):
f.print_format = "%lld"
elif f.type.startswith('uint') or f.type.startswith('int'):
f.print_format = "%d"
elif f.type.startswith('float'):
f.print_format = "%f"
elif f.type.startswith('char'):
f.print_format = "%c"
else:
print("print_format unsupported for type %s" % f.type)
if f.array_length != 0:
f.get_message = '@"[array of %s[%d]]"' % (f.type, f.array_length)
f.array_prefix = ' *'
f.array_return_arg = '%s, %u, ' % (f.name, f.array_length)
f.return_type = 'uint16_t'
f.get_arg = ', %s' % (f.name)
f.get_arg_objc = ':(%s *)%s' % (f.type, f.name)
if f.type == 'char':
# Special handling for strings (assumes all char arrays are strings)
f.return_type = 'NSString *'
f.get_arg_objc = ''
f.get_message = "[self %s]" % f.name_lower_camel_case
f.return_method_implementation = \
"""char string[%(array_length)d];
mavlink_msg_%(message_name_lower)s_get_%(name)s(&(self->_message), (char *)&string);
return [[NSString alloc] initWithBytes:string length:%(array_length)d encoding:NSASCIIStringEncoding];""" % {'array_length': f.array_length, 'message_name_lower': m.name_lower, 'name': f.name}
if not f.return_method_implementation:
f.return_method_implementation = \
"""return mavlink_msg_%(message_name_lower)s_get_%(name)s(&(self->_message)%(get_arg)s);""" % {'message_name_lower': m.name_lower, 'name': f.name, 'get_arg': f.get_arg}
for m in xml.message:
m.arg_fields = []
for f in m.fields:
if not f.omit_arg:
m.arg_fields.append(f)
generate_message_definitions_h(directory, xml)
for m in xml.message:
generate_message(directory, m) | def function[generate_message_definitions, parameter[basename, xml]]:
constant[generate files for one XML file]
variable[directory] assign[=] call[name[os].path.join, parameter[name[basename], name[xml].basename]]
call[name[print], parameter[binary_operation[constant[Generating Objective-C implementation in directory %s] <ast.Mod object at 0x7da2590d6920> name[directory]]]]
call[name[mavparse].mkdir_p, parameter[name[directory]]]
name[xml].basename_camel_case assign[=] call[name[camel_case_from_underscores], parameter[name[xml].basename]]
for taget[name[m]] in starred[name[xml].message] begin[:]
name[m].basename assign[=] name[xml].basename
name[m].parse_time assign[=] name[xml].parse_time
name[m].name_camel_case assign[=] call[name[camel_case_from_underscores], parameter[name[m].name_lower]]
for taget[name[f]] in starred[name[m].fields] begin[:]
name[f].name_lower_camel_case assign[=] call[name[lower_camel_case_from_underscores], parameter[name[f].name]]
name[f].get_message assign[=] binary_operation[constant[[self %s]] <ast.Mod object at 0x7da2590d6920> name[f].name_lower_camel_case]
name[f].return_method_implementation assign[=] constant[]
name[f].array_prefix assign[=] constant[]
name[f].array_return_arg assign[=] constant[]
name[f].get_arg assign[=] constant[]
name[f].get_arg_objc assign[=] constant[]
if name[f].enum begin[:]
name[f].return_type assign[=] name[f].enum
name[f].arg_type assign[=] name[f].enum
if compare[name[f].print_format is constant[None]] begin[:]
if compare[name[f].array_length not_equal[!=] constant[0]] begin[:]
name[f].print_format assign[=] constant[%@]
if compare[name[f].array_length not_equal[!=] constant[0]] begin[:]
name[f].get_message assign[=] binary_operation[constant[@"[array of %s[%d]]"] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b2345a20>, <ast.Attribute object at 0x7da1b23477f0>]]]
name[f].array_prefix assign[=] constant[ *]
name[f].array_return_arg assign[=] binary_operation[constant[%s, %u, ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b2347e80>, <ast.Attribute object at 0x7da1b2347d60>]]]
name[f].return_type assign[=] constant[uint16_t]
name[f].get_arg assign[=] binary_operation[constant[, %s] <ast.Mod object at 0x7da2590d6920> name[f].name]
name[f].get_arg_objc assign[=] binary_operation[constant[:(%s *)%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1b23455d0>, <ast.Attribute object at 0x7da1b2347460>]]]
if compare[name[f].type equal[==] constant[char]] begin[:]
name[f].return_type assign[=] constant[NSString *]
name[f].get_arg_objc assign[=] constant[]
name[f].get_message assign[=] binary_operation[constant[[self %s]] <ast.Mod object at 0x7da2590d6920> name[f].name_lower_camel_case]
name[f].return_method_implementation assign[=] binary_operation[constant[char string[%(array_length)d];
mavlink_msg_%(message_name_lower)s_get_%(name)s(&(self->_message), (char *)&string);
return [[NSString alloc] initWithBytes:string length:%(array_length)d encoding:NSASCIIStringEncoding];] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b2344550>, <ast.Constant object at 0x7da1b23451b0>, <ast.Constant object at 0x7da1b23464a0>], [<ast.Attribute object at 0x7da1b2346350>, <ast.Attribute object at 0x7da1b2347e20>, <ast.Attribute object at 0x7da1b2344b50>]]]
if <ast.UnaryOp object at 0x7da1b2346590> begin[:]
name[f].return_method_implementation assign[=] binary_operation[constant[return mavlink_msg_%(message_name_lower)s_get_%(name)s(&(self->_message)%(get_arg)s);] <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b23452a0>, <ast.Constant object at 0x7da1b2344af0>, <ast.Constant object at 0x7da1b2347310>], [<ast.Attribute object at 0x7da1b2347970>, <ast.Attribute object at 0x7da1b23458a0>, <ast.Attribute object at 0x7da1b2344a60>]]]
for taget[name[m]] in starred[name[xml].message] begin[:]
name[m].arg_fields assign[=] list[[]]
for taget[name[f]] in starred[name[m].fields] begin[:]
if <ast.UnaryOp object at 0x7da1b2345ea0> begin[:]
call[name[m].arg_fields.append, parameter[name[f]]]
call[name[generate_message_definitions_h], parameter[name[directory], name[xml]]]
for taget[name[m]] in starred[name[xml].message] begin[:]
call[name[generate_message], parameter[name[directory], name[m]]] | keyword[def] identifier[generate_message_definitions] ( identifier[basename] , identifier[xml] ):
literal[string]
identifier[directory] = identifier[os] . identifier[path] . identifier[join] ( identifier[basename] , identifier[xml] . identifier[basename] )
identifier[print] ( literal[string] % identifier[directory] )
identifier[mavparse] . identifier[mkdir_p] ( identifier[directory] )
identifier[xml] . identifier[basename_camel_case] = identifier[camel_case_from_underscores] ( identifier[xml] . identifier[basename] )
keyword[for] identifier[m] keyword[in] identifier[xml] . identifier[message] :
identifier[m] . identifier[basename] = identifier[xml] . identifier[basename]
identifier[m] . identifier[parse_time] = identifier[xml] . identifier[parse_time]
identifier[m] . identifier[name_camel_case] = identifier[camel_case_from_underscores] ( identifier[m] . identifier[name_lower] )
keyword[for] identifier[f] keyword[in] identifier[m] . identifier[fields] :
identifier[f] . identifier[name_lower_camel_case] = identifier[lower_camel_case_from_underscores] ( identifier[f] . identifier[name] );
identifier[f] . identifier[get_message] = literal[string] % identifier[f] . identifier[name_lower_camel_case]
identifier[f] . identifier[return_method_implementation] = literal[string]
identifier[f] . identifier[array_prefix] = literal[string]
identifier[f] . identifier[array_return_arg] = literal[string]
identifier[f] . identifier[get_arg] = literal[string]
identifier[f] . identifier[get_arg_objc] = literal[string]
keyword[if] identifier[f] . identifier[enum] :
identifier[f] . identifier[return_type] = identifier[f] . identifier[enum]
identifier[f] . identifier[arg_type] = identifier[f] . identifier[enum]
keyword[else] :
identifier[f] . identifier[return_type] = identifier[f] . identifier[type]
identifier[f] . identifier[arg_type] = identifier[f] . identifier[type]
keyword[if] identifier[f] . identifier[print_format] keyword[is] keyword[None] :
keyword[if] identifier[f] . identifier[array_length] != literal[int] :
identifier[f] . identifier[print_format] = literal[string]
keyword[elif] identifier[f] . identifier[type] . identifier[startswith] ( literal[string] ):
identifier[f] . identifier[print_format] = literal[string]
keyword[elif] identifier[f] . identifier[type] . identifier[startswith] ( literal[string] ) keyword[or] identifier[f] . identifier[type] . identifier[startswith] ( literal[string] ):
identifier[f] . identifier[print_format] = literal[string]
keyword[elif] identifier[f] . identifier[type] . identifier[startswith] ( literal[string] ):
identifier[f] . identifier[print_format] = literal[string]
keyword[elif] identifier[f] . identifier[type] . identifier[startswith] ( literal[string] ):
identifier[f] . identifier[print_format] = literal[string]
keyword[else] :
identifier[print] ( literal[string] % identifier[f] . identifier[type] )
keyword[if] identifier[f] . identifier[array_length] != literal[int] :
identifier[f] . identifier[get_message] = literal[string] %( identifier[f] . identifier[type] , identifier[f] . identifier[array_length] )
identifier[f] . identifier[array_prefix] = literal[string]
identifier[f] . identifier[array_return_arg] = literal[string] %( identifier[f] . identifier[name] , identifier[f] . identifier[array_length] )
identifier[f] . identifier[return_type] = literal[string]
identifier[f] . identifier[get_arg] = literal[string] %( identifier[f] . identifier[name] )
identifier[f] . identifier[get_arg_objc] = literal[string] %( identifier[f] . identifier[type] , identifier[f] . identifier[name] )
keyword[if] identifier[f] . identifier[type] == literal[string] :
identifier[f] . identifier[return_type] = literal[string]
identifier[f] . identifier[get_arg_objc] = literal[string]
identifier[f] . identifier[get_message] = literal[string] % identifier[f] . identifier[name_lower_camel_case]
identifier[f] . identifier[return_method_implementation] = literal[string] %{ literal[string] : identifier[f] . identifier[array_length] , literal[string] : identifier[m] . identifier[name_lower] , literal[string] : identifier[f] . identifier[name] }
keyword[if] keyword[not] identifier[f] . identifier[return_method_implementation] :
identifier[f] . identifier[return_method_implementation] = literal[string] %{ literal[string] : identifier[m] . identifier[name_lower] , literal[string] : identifier[f] . identifier[name] , literal[string] : identifier[f] . identifier[get_arg] }
keyword[for] identifier[m] keyword[in] identifier[xml] . identifier[message] :
identifier[m] . identifier[arg_fields] =[]
keyword[for] identifier[f] keyword[in] identifier[m] . identifier[fields] :
keyword[if] keyword[not] identifier[f] . identifier[omit_arg] :
identifier[m] . identifier[arg_fields] . identifier[append] ( identifier[f] )
identifier[generate_message_definitions_h] ( identifier[directory] , identifier[xml] )
keyword[for] identifier[m] keyword[in] identifier[xml] . identifier[message] :
identifier[generate_message] ( identifier[directory] , identifier[m] ) | def generate_message_definitions(basename, xml):
"""generate files for one XML file"""
directory = os.path.join(basename, xml.basename)
print('Generating Objective-C implementation in directory %s' % directory)
mavparse.mkdir_p(directory)
xml.basename_camel_case = camel_case_from_underscores(xml.basename)
# Add some extra field attributes for convenience
for m in xml.message:
m.basename = xml.basename
m.parse_time = xml.parse_time
m.name_camel_case = camel_case_from_underscores(m.name_lower)
for f in m.fields:
f.name_lower_camel_case = lower_camel_case_from_underscores(f.name)
f.get_message = '[self %s]' % f.name_lower_camel_case
f.return_method_implementation = ''
f.array_prefix = ''
f.array_return_arg = ''
f.get_arg = ''
f.get_arg_objc = ''
if f.enum:
f.return_type = f.enum
f.arg_type = f.enum # depends on [control=['if'], data=[]]
else:
f.return_type = f.type
f.arg_type = f.type
if f.print_format is None:
if f.array_length != 0:
f.print_format = '%@' # depends on [control=['if'], data=[]]
elif f.type.startswith('uint64_t'):
f.print_format = '%lld' # depends on [control=['if'], data=[]]
elif f.type.startswith('uint') or f.type.startswith('int'):
f.print_format = '%d' # depends on [control=['if'], data=[]]
elif f.type.startswith('float'):
f.print_format = '%f' # depends on [control=['if'], data=[]]
elif f.type.startswith('char'):
f.print_format = '%c' # depends on [control=['if'], data=[]]
else:
print('print_format unsupported for type %s' % f.type) # depends on [control=['if'], data=[]]
if f.array_length != 0:
f.get_message = '@"[array of %s[%d]]"' % (f.type, f.array_length)
f.array_prefix = ' *'
f.array_return_arg = '%s, %u, ' % (f.name, f.array_length)
f.return_type = 'uint16_t'
f.get_arg = ', %s' % f.name
f.get_arg_objc = ':(%s *)%s' % (f.type, f.name)
if f.type == 'char':
# Special handling for strings (assumes all char arrays are strings)
f.return_type = 'NSString *'
f.get_arg_objc = ''
f.get_message = '[self %s]' % f.name_lower_camel_case
f.return_method_implementation = 'char string[%(array_length)d];\n mavlink_msg_%(message_name_lower)s_get_%(name)s(&(self->_message), (char *)&string);\n return [[NSString alloc] initWithBytes:string length:%(array_length)d encoding:NSASCIIStringEncoding];' % {'array_length': f.array_length, 'message_name_lower': m.name_lower, 'name': f.name} # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not f.return_method_implementation:
f.return_method_implementation = 'return mavlink_msg_%(message_name_lower)s_get_%(name)s(&(self->_message)%(get_arg)s);' % {'message_name_lower': m.name_lower, 'name': f.name, 'get_arg': f.get_arg} # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] # depends on [control=['for'], data=['m']]
for m in xml.message:
m.arg_fields = []
for f in m.fields:
if not f.omit_arg:
m.arg_fields.append(f) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] # depends on [control=['for'], data=['m']]
generate_message_definitions_h(directory, xml)
for m in xml.message:
generate_message(directory, m) # depends on [control=['for'], data=['m']] |
def get_address_nonce(self, address, api_token):
"""
Looks up the address nonce of this address
Neccesary for the transaction creation
"""
broadcast_url = self.base_url + '?module=proxy&action=eth_getTransactionCount'
broadcast_url += '&address=%s' % address
broadcast_url += '&tag=latest'
if api_token:
'&apikey=%s' % api_token
response = requests.get(broadcast_url, )
if int(response.status_code) == 200:
# the int(res, 0) transforms the hex nonce to int
nonce = int(response.json().get('result', None), 0)
logging.info('Nonce check went correct: %s', response.json())
return nonce
else:
logging.info('response error checking nonce')
raise BroadcastError('Error checking the nonce through the Etherscan API. Error msg: %s', response.text) | def function[get_address_nonce, parameter[self, address, api_token]]:
constant[
Looks up the address nonce of this address
Neccesary for the transaction creation
]
variable[broadcast_url] assign[=] binary_operation[name[self].base_url + constant[?module=proxy&action=eth_getTransactionCount]]
<ast.AugAssign object at 0x7da18dc98d30>
<ast.AugAssign object at 0x7da18dc9a290>
if name[api_token] begin[:]
binary_operation[constant[&apikey=%s] <ast.Mod object at 0x7da2590d6920> name[api_token]]
variable[response] assign[=] call[name[requests].get, parameter[name[broadcast_url]]]
if compare[call[name[int], parameter[name[response].status_code]] equal[==] constant[200]] begin[:]
variable[nonce] assign[=] call[name[int], parameter[call[call[name[response].json, parameter[]].get, parameter[constant[result], constant[None]]], constant[0]]]
call[name[logging].info, parameter[constant[Nonce check went correct: %s], call[name[response].json, parameter[]]]]
return[name[nonce]]
<ast.Raise object at 0x7da18fe911e0> | keyword[def] identifier[get_address_nonce] ( identifier[self] , identifier[address] , identifier[api_token] ):
literal[string]
identifier[broadcast_url] = identifier[self] . identifier[base_url] + literal[string]
identifier[broadcast_url] += literal[string] % identifier[address]
identifier[broadcast_url] += literal[string]
keyword[if] identifier[api_token] :
literal[string] % identifier[api_token]
identifier[response] = identifier[requests] . identifier[get] ( identifier[broadcast_url] ,)
keyword[if] identifier[int] ( identifier[response] . identifier[status_code] )== literal[int] :
identifier[nonce] = identifier[int] ( identifier[response] . identifier[json] (). identifier[get] ( literal[string] , keyword[None] ), literal[int] )
identifier[logging] . identifier[info] ( literal[string] , identifier[response] . identifier[json] ())
keyword[return] identifier[nonce]
keyword[else] :
identifier[logging] . identifier[info] ( literal[string] )
keyword[raise] identifier[BroadcastError] ( literal[string] , identifier[response] . identifier[text] ) | def get_address_nonce(self, address, api_token):
"""
Looks up the address nonce of this address
Neccesary for the transaction creation
"""
broadcast_url = self.base_url + '?module=proxy&action=eth_getTransactionCount'
broadcast_url += '&address=%s' % address
broadcast_url += '&tag=latest'
if api_token:
'&apikey=%s' % api_token # depends on [control=['if'], data=[]]
response = requests.get(broadcast_url)
if int(response.status_code) == 200:
# the int(res, 0) transforms the hex nonce to int
nonce = int(response.json().get('result', None), 0)
logging.info('Nonce check went correct: %s', response.json())
return nonce # depends on [control=['if'], data=[]]
else:
logging.info('response error checking nonce')
raise BroadcastError('Error checking the nonce through the Etherscan API. Error msg: %s', response.text) |
def wait(self, timeout=15):
"""
block until pod is not ready, raises an exc ProbeTimeout if timeout is reached
:param timeout: int or float (seconds), time to wait for pod to run
:return: None
"""
Probe(timeout=timeout, fnc=self.is_ready, expected_retval=True).run() | def function[wait, parameter[self, timeout]]:
constant[
block until pod is not ready, raises an exc ProbeTimeout if timeout is reached
:param timeout: int or float (seconds), time to wait for pod to run
:return: None
]
call[call[name[Probe], parameter[]].run, parameter[]] | keyword[def] identifier[wait] ( identifier[self] , identifier[timeout] = literal[int] ):
literal[string]
identifier[Probe] ( identifier[timeout] = identifier[timeout] , identifier[fnc] = identifier[self] . identifier[is_ready] , identifier[expected_retval] = keyword[True] ). identifier[run] () | def wait(self, timeout=15):
"""
block until pod is not ready, raises an exc ProbeTimeout if timeout is reached
:param timeout: int or float (seconds), time to wait for pod to run
:return: None
"""
Probe(timeout=timeout, fnc=self.is_ready, expected_retval=True).run() |
def auth(self, password):
"""Request for authentication in a password-protected Redis server.
Redis can be instructed to require a password before allowing clients
to execute commands. This is done using the ``requirepass`` directive
in the configuration file.
If the password does not match, an
:exc:`~tredis.exceptions.AuthError` exception
will be raised.
:param password: The password to authenticate with
:type password: :class:`str`, :class:`bytes`
:rtype: bool
:raises: :exc:`~tredis.exceptions.AuthError`,
:exc:`~tredis.exceptions.RedisError`
"""
future = concurrent.TracebackFuture()
def on_response(response):
"""Process the redis response
:param response: The future with the response
:type response: tornado.concurrent.Future
"""
exc = response.exception()
if exc:
if exc.args[0] == b'invalid password':
future.set_exception(exceptions.AuthError(exc))
else:
future.set_exception(exc)
else:
future.set_result(response.result())
execute_future = self._execute([b'AUTH', password], b'OK')
self.io_loop.add_future(execute_future, on_response)
return future | def function[auth, parameter[self, password]]:
constant[Request for authentication in a password-protected Redis server.
Redis can be instructed to require a password before allowing clients
to execute commands. This is done using the ``requirepass`` directive
in the configuration file.
If the password does not match, an
:exc:`~tredis.exceptions.AuthError` exception
will be raised.
:param password: The password to authenticate with
:type password: :class:`str`, :class:`bytes`
:rtype: bool
:raises: :exc:`~tredis.exceptions.AuthError`,
:exc:`~tredis.exceptions.RedisError`
]
variable[future] assign[=] call[name[concurrent].TracebackFuture, parameter[]]
def function[on_response, parameter[response]]:
constant[Process the redis response
:param response: The future with the response
:type response: tornado.concurrent.Future
]
variable[exc] assign[=] call[name[response].exception, parameter[]]
if name[exc] begin[:]
if compare[call[name[exc].args][constant[0]] equal[==] constant[b'invalid password']] begin[:]
call[name[future].set_exception, parameter[call[name[exceptions].AuthError, parameter[name[exc]]]]]
variable[execute_future] assign[=] call[name[self]._execute, parameter[list[[<ast.Constant object at 0x7da18dc07a60>, <ast.Name object at 0x7da18dc04dc0>]], constant[b'OK']]]
call[name[self].io_loop.add_future, parameter[name[execute_future], name[on_response]]]
return[name[future]] | keyword[def] identifier[auth] ( identifier[self] , identifier[password] ):
literal[string]
identifier[future] = identifier[concurrent] . identifier[TracebackFuture] ()
keyword[def] identifier[on_response] ( identifier[response] ):
literal[string]
identifier[exc] = identifier[response] . identifier[exception] ()
keyword[if] identifier[exc] :
keyword[if] identifier[exc] . identifier[args] [ literal[int] ]== literal[string] :
identifier[future] . identifier[set_exception] ( identifier[exceptions] . identifier[AuthError] ( identifier[exc] ))
keyword[else] :
identifier[future] . identifier[set_exception] ( identifier[exc] )
keyword[else] :
identifier[future] . identifier[set_result] ( identifier[response] . identifier[result] ())
identifier[execute_future] = identifier[self] . identifier[_execute] ([ literal[string] , identifier[password] ], literal[string] )
identifier[self] . identifier[io_loop] . identifier[add_future] ( identifier[execute_future] , identifier[on_response] )
keyword[return] identifier[future] | def auth(self, password):
"""Request for authentication in a password-protected Redis server.
Redis can be instructed to require a password before allowing clients
to execute commands. This is done using the ``requirepass`` directive
in the configuration file.
If the password does not match, an
:exc:`~tredis.exceptions.AuthError` exception
will be raised.
:param password: The password to authenticate with
:type password: :class:`str`, :class:`bytes`
:rtype: bool
:raises: :exc:`~tredis.exceptions.AuthError`,
:exc:`~tredis.exceptions.RedisError`
"""
future = concurrent.TracebackFuture()
def on_response(response):
"""Process the redis response
:param response: The future with the response
:type response: tornado.concurrent.Future
"""
exc = response.exception()
if exc:
if exc.args[0] == b'invalid password':
future.set_exception(exceptions.AuthError(exc)) # depends on [control=['if'], data=[]]
else:
future.set_exception(exc) # depends on [control=['if'], data=[]]
else:
future.set_result(response.result())
execute_future = self._execute([b'AUTH', password], b'OK')
self.io_loop.add_future(execute_future, on_response)
return future |
def to_date(value, default=None):
"""Tries to convert the passed in value to Zope's DateTime
:param value: The value to be converted to a valid DateTime
:type value: str, DateTime or datetime
:return: The DateTime representation of the value passed in or default
"""
if isinstance(value, DateTime):
return value
if not value:
if default is None:
return None
return to_date(default)
try:
if isinstance(value, str) and '.' in value:
# https://docs.plone.org/develop/plone/misc/datetime.html#datetime-problems-and-pitfalls
return DateTime(value, datefmt='international')
return DateTime(value)
except (TypeError, ValueError, DateTimeError):
return to_date(default) | def function[to_date, parameter[value, default]]:
constant[Tries to convert the passed in value to Zope's DateTime
:param value: The value to be converted to a valid DateTime
:type value: str, DateTime or datetime
:return: The DateTime representation of the value passed in or default
]
if call[name[isinstance], parameter[name[value], name[DateTime]]] begin[:]
return[name[value]]
if <ast.UnaryOp object at 0x7da18f09f700> begin[:]
if compare[name[default] is constant[None]] begin[:]
return[constant[None]]
return[call[name[to_date], parameter[name[default]]]]
<ast.Try object at 0x7da18f09eb60> | keyword[def] identifier[to_date] ( identifier[value] , identifier[default] = keyword[None] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[DateTime] ):
keyword[return] identifier[value]
keyword[if] keyword[not] identifier[value] :
keyword[if] identifier[default] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[return] identifier[to_date] ( identifier[default] )
keyword[try] :
keyword[if] identifier[isinstance] ( identifier[value] , identifier[str] ) keyword[and] literal[string] keyword[in] identifier[value] :
keyword[return] identifier[DateTime] ( identifier[value] , identifier[datefmt] = literal[string] )
keyword[return] identifier[DateTime] ( identifier[value] )
keyword[except] ( identifier[TypeError] , identifier[ValueError] , identifier[DateTimeError] ):
keyword[return] identifier[to_date] ( identifier[default] ) | def to_date(value, default=None):
"""Tries to convert the passed in value to Zope's DateTime
:param value: The value to be converted to a valid DateTime
:type value: str, DateTime or datetime
:return: The DateTime representation of the value passed in or default
"""
if isinstance(value, DateTime):
return value # depends on [control=['if'], data=[]]
if not value:
if default is None:
return None # depends on [control=['if'], data=[]]
return to_date(default) # depends on [control=['if'], data=[]]
try:
if isinstance(value, str) and '.' in value:
# https://docs.plone.org/develop/plone/misc/datetime.html#datetime-problems-and-pitfalls
return DateTime(value, datefmt='international') # depends on [control=['if'], data=[]]
return DateTime(value) # depends on [control=['try'], data=[]]
except (TypeError, ValueError, DateTimeError):
return to_date(default) # depends on [control=['except'], data=[]] |
def retry_mkstemp(suffix='', prefix='tmp', directory=None, max_retries=3):
"""
Make mkstemp more robust against AFS glitches.
"""
if directory is None:
directory = current_app.config['CFG_TMPSHAREDDIR']
for retry_count in range(1, max_retries + 1):
try:
tmp_file_fd, tmp_file_name = tempfile.mkstemp(suffix=suffix,
prefix=prefix,
dir=directory)
except OSError as e:
if e.errno == 19 and retry_count <= max_retries:
# AFS Glitch?
time.sleep(10)
else:
raise
else:
break
return tmp_file_fd, tmp_file_name | def function[retry_mkstemp, parameter[suffix, prefix, directory, max_retries]]:
constant[
Make mkstemp more robust against AFS glitches.
]
if compare[name[directory] is constant[None]] begin[:]
variable[directory] assign[=] call[name[current_app].config][constant[CFG_TMPSHAREDDIR]]
for taget[name[retry_count]] in starred[call[name[range], parameter[constant[1], binary_operation[name[max_retries] + constant[1]]]]] begin[:]
<ast.Try object at 0x7da18dc99480>
return[tuple[[<ast.Name object at 0x7da18dc9a3e0>, <ast.Name object at 0x7da18dc99c60>]]] | keyword[def] identifier[retry_mkstemp] ( identifier[suffix] = literal[string] , identifier[prefix] = literal[string] , identifier[directory] = keyword[None] , identifier[max_retries] = literal[int] ):
literal[string]
keyword[if] identifier[directory] keyword[is] keyword[None] :
identifier[directory] = identifier[current_app] . identifier[config] [ literal[string] ]
keyword[for] identifier[retry_count] keyword[in] identifier[range] ( literal[int] , identifier[max_retries] + literal[int] ):
keyword[try] :
identifier[tmp_file_fd] , identifier[tmp_file_name] = identifier[tempfile] . identifier[mkstemp] ( identifier[suffix] = identifier[suffix] ,
identifier[prefix] = identifier[prefix] ,
identifier[dir] = identifier[directory] )
keyword[except] identifier[OSError] keyword[as] identifier[e] :
keyword[if] identifier[e] . identifier[errno] == literal[int] keyword[and] identifier[retry_count] <= identifier[max_retries] :
identifier[time] . identifier[sleep] ( literal[int] )
keyword[else] :
keyword[raise]
keyword[else] :
keyword[break]
keyword[return] identifier[tmp_file_fd] , identifier[tmp_file_name] | def retry_mkstemp(suffix='', prefix='tmp', directory=None, max_retries=3):
"""
Make mkstemp more robust against AFS glitches.
"""
if directory is None:
directory = current_app.config['CFG_TMPSHAREDDIR'] # depends on [control=['if'], data=['directory']]
for retry_count in range(1, max_retries + 1):
try:
(tmp_file_fd, tmp_file_name) = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=directory) # depends on [control=['try'], data=[]]
except OSError as e:
if e.errno == 19 and retry_count <= max_retries:
# AFS Glitch?
time.sleep(10) # depends on [control=['if'], data=[]]
else:
raise # depends on [control=['except'], data=['e']]
else:
break # depends on [control=['for'], data=['retry_count']]
return (tmp_file_fd, tmp_file_name) |
def GetReportDownloadHeaders(self, **kwargs):
"""Returns a dictionary of headers for a report download request.
Note that the given keyword arguments will override any settings configured
from the googleads.yaml file.
Args:
**kwargs: Optional keyword arguments.
Keyword Arguments:
client_customer_id: A string containing a client_customer_id intended to
override the default value set by the AdWordsClient.
include_zero_impressions: A boolean indicating whether the report should
show rows with zero impressions.
skip_report_header: A boolean indicating whether to include a header row
containing the report name and date range. If false or not specified,
report output will include the header row.
skip_column_header: A boolean indicating whether to include column names
in reports. If false or not specified, report output will include the
column names.
skip_report_summary: A boolean indicating whether to include a summary row
containing the report totals. If false or not specified, report output
will include the summary row.
use_raw_enum_values: A boolean indicating whether to return enum field
values as enums instead of display values.
Returns:
A dictionary containing the headers configured for downloading a report.
Raises:
GoogleAdsValueError: If one or more of the report header keyword arguments
is invalid.
"""
headers = self._adwords_client.oauth2_client.CreateHttpHeader()
headers.update({
'Content-type': self._CONTENT_TYPE,
'developerToken': str(self._adwords_client.developer_token),
'clientCustomerId': str(kwargs.get(
'client_customer_id', self._adwords_client.client_customer_id)),
'User-Agent': ''.join([
self._adwords_client.user_agent,
googleads.common.GenerateLibSig(self._PRODUCT_SIG),
',gzip'])
})
headers.update(self.custom_http_headers)
updated_kwargs = dict(self._adwords_client.report_download_headers)
updated_kwargs.update(kwargs)
for kw in updated_kwargs:
try:
headers[_REPORT_HEADER_KWARGS[kw]] = str(updated_kwargs[kw])
except KeyError:
raise googleads.errors.GoogleAdsValueError(
'The provided keyword "%s" is invalid. Accepted keywords are: %s'
% (kw, _REPORT_HEADER_KWARGS.keys()))
return headers | def function[GetReportDownloadHeaders, parameter[self]]:
constant[Returns a dictionary of headers for a report download request.
Note that the given keyword arguments will override any settings configured
from the googleads.yaml file.
Args:
**kwargs: Optional keyword arguments.
Keyword Arguments:
client_customer_id: A string containing a client_customer_id intended to
override the default value set by the AdWordsClient.
include_zero_impressions: A boolean indicating whether the report should
show rows with zero impressions.
skip_report_header: A boolean indicating whether to include a header row
containing the report name and date range. If false or not specified,
report output will include the header row.
skip_column_header: A boolean indicating whether to include column names
in reports. If false or not specified, report output will include the
column names.
skip_report_summary: A boolean indicating whether to include a summary row
containing the report totals. If false or not specified, report output
will include the summary row.
use_raw_enum_values: A boolean indicating whether to return enum field
values as enums instead of display values.
Returns:
A dictionary containing the headers configured for downloading a report.
Raises:
GoogleAdsValueError: If one or more of the report header keyword arguments
is invalid.
]
variable[headers] assign[=] call[name[self]._adwords_client.oauth2_client.CreateHttpHeader, parameter[]]
call[name[headers].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1bd67d0>, <ast.Constant object at 0x7da1b1bd5f30>, <ast.Constant object at 0x7da1b1bd5750>, <ast.Constant object at 0x7da1b1bd6470>], [<ast.Attribute object at 0x7da1b1bd7220>, <ast.Call object at 0x7da1b1bd7550>, <ast.Call object at 0x7da1b1bd57b0>, <ast.Call object at 0x7da1b1bd7520>]]]]
call[name[headers].update, parameter[name[self].custom_http_headers]]
variable[updated_kwargs] assign[=] call[name[dict], parameter[name[self]._adwords_client.report_download_headers]]
call[name[updated_kwargs].update, parameter[name[kwargs]]]
for taget[name[kw]] in starred[name[updated_kwargs]] begin[:]
<ast.Try object at 0x7da1b1bd6170>
return[name[headers]] | keyword[def] identifier[GetReportDownloadHeaders] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[headers] = identifier[self] . identifier[_adwords_client] . identifier[oauth2_client] . identifier[CreateHttpHeader] ()
identifier[headers] . identifier[update] ({
literal[string] : identifier[self] . identifier[_CONTENT_TYPE] ,
literal[string] : identifier[str] ( identifier[self] . identifier[_adwords_client] . identifier[developer_token] ),
literal[string] : identifier[str] ( identifier[kwargs] . identifier[get] (
literal[string] , identifier[self] . identifier[_adwords_client] . identifier[client_customer_id] )),
literal[string] : literal[string] . identifier[join] ([
identifier[self] . identifier[_adwords_client] . identifier[user_agent] ,
identifier[googleads] . identifier[common] . identifier[GenerateLibSig] ( identifier[self] . identifier[_PRODUCT_SIG] ),
literal[string] ])
})
identifier[headers] . identifier[update] ( identifier[self] . identifier[custom_http_headers] )
identifier[updated_kwargs] = identifier[dict] ( identifier[self] . identifier[_adwords_client] . identifier[report_download_headers] )
identifier[updated_kwargs] . identifier[update] ( identifier[kwargs] )
keyword[for] identifier[kw] keyword[in] identifier[updated_kwargs] :
keyword[try] :
identifier[headers] [ identifier[_REPORT_HEADER_KWARGS] [ identifier[kw] ]]= identifier[str] ( identifier[updated_kwargs] [ identifier[kw] ])
keyword[except] identifier[KeyError] :
keyword[raise] identifier[googleads] . identifier[errors] . identifier[GoogleAdsValueError] (
literal[string]
%( identifier[kw] , identifier[_REPORT_HEADER_KWARGS] . identifier[keys] ()))
keyword[return] identifier[headers] | def GetReportDownloadHeaders(self, **kwargs):
"""Returns a dictionary of headers for a report download request.
Note that the given keyword arguments will override any settings configured
from the googleads.yaml file.
Args:
**kwargs: Optional keyword arguments.
Keyword Arguments:
client_customer_id: A string containing a client_customer_id intended to
override the default value set by the AdWordsClient.
include_zero_impressions: A boolean indicating whether the report should
show rows with zero impressions.
skip_report_header: A boolean indicating whether to include a header row
containing the report name and date range. If false or not specified,
report output will include the header row.
skip_column_header: A boolean indicating whether to include column names
in reports. If false or not specified, report output will include the
column names.
skip_report_summary: A boolean indicating whether to include a summary row
containing the report totals. If false or not specified, report output
will include the summary row.
use_raw_enum_values: A boolean indicating whether to return enum field
values as enums instead of display values.
Returns:
A dictionary containing the headers configured for downloading a report.
Raises:
GoogleAdsValueError: If one or more of the report header keyword arguments
is invalid.
"""
headers = self._adwords_client.oauth2_client.CreateHttpHeader()
headers.update({'Content-type': self._CONTENT_TYPE, 'developerToken': str(self._adwords_client.developer_token), 'clientCustomerId': str(kwargs.get('client_customer_id', self._adwords_client.client_customer_id)), 'User-Agent': ''.join([self._adwords_client.user_agent, googleads.common.GenerateLibSig(self._PRODUCT_SIG), ',gzip'])})
headers.update(self.custom_http_headers)
updated_kwargs = dict(self._adwords_client.report_download_headers)
updated_kwargs.update(kwargs)
for kw in updated_kwargs:
try:
headers[_REPORT_HEADER_KWARGS[kw]] = str(updated_kwargs[kw]) # depends on [control=['try'], data=[]]
except KeyError:
raise googleads.errors.GoogleAdsValueError('The provided keyword "%s" is invalid. Accepted keywords are: %s' % (kw, _REPORT_HEADER_KWARGS.keys())) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['kw']]
return headers |
def installedDependents(self, target):
"""
Return an iterable of things installed on the target that
require this item.
"""
for dc in self.store.query(_DependencyConnector,
_DependencyConnector.target == target):
depends = dependentsOf(dc.installee.__class__)
if self.__class__ in depends:
yield dc.installee | def function[installedDependents, parameter[self, target]]:
constant[
Return an iterable of things installed on the target that
require this item.
]
for taget[name[dc]] in starred[call[name[self].store.query, parameter[name[_DependencyConnector], compare[name[_DependencyConnector].target equal[==] name[target]]]]] begin[:]
variable[depends] assign[=] call[name[dependentsOf], parameter[name[dc].installee.__class__]]
if compare[name[self].__class__ in name[depends]] begin[:]
<ast.Yield object at 0x7da1b0d5a9e0> | keyword[def] identifier[installedDependents] ( identifier[self] , identifier[target] ):
literal[string]
keyword[for] identifier[dc] keyword[in] identifier[self] . identifier[store] . identifier[query] ( identifier[_DependencyConnector] ,
identifier[_DependencyConnector] . identifier[target] == identifier[target] ):
identifier[depends] = identifier[dependentsOf] ( identifier[dc] . identifier[installee] . identifier[__class__] )
keyword[if] identifier[self] . identifier[__class__] keyword[in] identifier[depends] :
keyword[yield] identifier[dc] . identifier[installee] | def installedDependents(self, target):
"""
Return an iterable of things installed on the target that
require this item.
"""
for dc in self.store.query(_DependencyConnector, _DependencyConnector.target == target):
depends = dependentsOf(dc.installee.__class__)
if self.__class__ in depends:
yield dc.installee # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['dc']] |
def permute(sequence, permutation):
"""Apply a permutation sigma({j}) to an arbitrary sequence.
:param sequence: Any finite length sequence ``[l_1,l_2,...l_n]``. If it is a list, tuple or str, the return type will be the same.
:param permutation: permutation image tuple
:type permutation: tuple
:return: The permuted sequence ``[l_sigma(1), l_sigma(2), ..., l_sigma(n)]``
:raise: BadPermutationError or ValueError
"""
if len(sequence) != len(permutation):
raise ValueError((sequence, permutation))
if not check_permutation(permutation):
raise BadPermutationError(str(permutation))
if type(sequence) in (list, tuple, str):
constructor = type(sequence)
else:
constructor = list
return constructor((sequence[p] for p in permutation)) | def function[permute, parameter[sequence, permutation]]:
constant[Apply a permutation sigma({j}) to an arbitrary sequence.
:param sequence: Any finite length sequence ``[l_1,l_2,...l_n]``. If it is a list, tuple or str, the return type will be the same.
:param permutation: permutation image tuple
:type permutation: tuple
:return: The permuted sequence ``[l_sigma(1), l_sigma(2), ..., l_sigma(n)]``
:raise: BadPermutationError or ValueError
]
if compare[call[name[len], parameter[name[sequence]]] not_equal[!=] call[name[len], parameter[name[permutation]]]] begin[:]
<ast.Raise object at 0x7da18bcca530>
if <ast.UnaryOp object at 0x7da18bcca590> begin[:]
<ast.Raise object at 0x7da18bccb760>
if compare[call[name[type], parameter[name[sequence]]] in tuple[[<ast.Name object at 0x7da18bccae90>, <ast.Name object at 0x7da18bcc9810>, <ast.Name object at 0x7da18bcc8e20>]]] begin[:]
variable[constructor] assign[=] call[name[type], parameter[name[sequence]]]
return[call[name[constructor], parameter[<ast.GeneratorExp object at 0x7da18bcc8370>]]] | keyword[def] identifier[permute] ( identifier[sequence] , identifier[permutation] ):
literal[string]
keyword[if] identifier[len] ( identifier[sequence] )!= identifier[len] ( identifier[permutation] ):
keyword[raise] identifier[ValueError] (( identifier[sequence] , identifier[permutation] ))
keyword[if] keyword[not] identifier[check_permutation] ( identifier[permutation] ):
keyword[raise] identifier[BadPermutationError] ( identifier[str] ( identifier[permutation] ))
keyword[if] identifier[type] ( identifier[sequence] ) keyword[in] ( identifier[list] , identifier[tuple] , identifier[str] ):
identifier[constructor] = identifier[type] ( identifier[sequence] )
keyword[else] :
identifier[constructor] = identifier[list]
keyword[return] identifier[constructor] (( identifier[sequence] [ identifier[p] ] keyword[for] identifier[p] keyword[in] identifier[permutation] )) | def permute(sequence, permutation):
"""Apply a permutation sigma({j}) to an arbitrary sequence.
:param sequence: Any finite length sequence ``[l_1,l_2,...l_n]``. If it is a list, tuple or str, the return type will be the same.
:param permutation: permutation image tuple
:type permutation: tuple
:return: The permuted sequence ``[l_sigma(1), l_sigma(2), ..., l_sigma(n)]``
:raise: BadPermutationError or ValueError
"""
if len(sequence) != len(permutation):
raise ValueError((sequence, permutation)) # depends on [control=['if'], data=[]]
if not check_permutation(permutation):
raise BadPermutationError(str(permutation)) # depends on [control=['if'], data=[]]
if type(sequence) in (list, tuple, str):
constructor = type(sequence) # depends on [control=['if'], data=[]]
else:
constructor = list
return constructor((sequence[p] for p in permutation)) |
def run(self, dag):
"""
Run the StochasticSwap pass on `dag`.
Args:
dag (DAGCircuit): DAG to map.
Returns:
DAGCircuit: A mapped DAG.
Raises:
TranspilerError: if the coupling map or the layout are not
compatible with the DAG
"""
if self.initial_layout is None:
if self.property_set["layout"]:
self.initial_layout = self.property_set["layout"]
else:
self.initial_layout = Layout.generate_trivial_layout(*dag.qregs.values())
if len(dag.qubits()) != len(self.initial_layout):
raise TranspilerError('The layout does not match the amount of qubits in the DAG')
if len(self.coupling_map.physical_qubits) != len(self.initial_layout):
raise TranspilerError(
"Mappers require to have the layout to be the same size as the coupling map")
self.input_layout = self.initial_layout.copy()
self.qregs = dag.qregs
if self.seed is None:
self.seed = np.random.randint(0, np.iinfo(np.int32).max)
self.rng = np.random.RandomState(self.seed)
logger.debug("StochasticSwap RandomState seeded with seed=%s", self.seed)
new_dag = self._mapper(dag, self.coupling_map, trials=self.trials)
# self.property_set["layout"] = self.initial_layout
return new_dag | def function[run, parameter[self, dag]]:
constant[
Run the StochasticSwap pass on `dag`.
Args:
dag (DAGCircuit): DAG to map.
Returns:
DAGCircuit: A mapped DAG.
Raises:
TranspilerError: if the coupling map or the layout are not
compatible with the DAG
]
if compare[name[self].initial_layout is constant[None]] begin[:]
if call[name[self].property_set][constant[layout]] begin[:]
name[self].initial_layout assign[=] call[name[self].property_set][constant[layout]]
if compare[call[name[len], parameter[call[name[dag].qubits, parameter[]]]] not_equal[!=] call[name[len], parameter[name[self].initial_layout]]] begin[:]
<ast.Raise object at 0x7da1b059cbe0>
if compare[call[name[len], parameter[name[self].coupling_map.physical_qubits]] not_equal[!=] call[name[len], parameter[name[self].initial_layout]]] begin[:]
<ast.Raise object at 0x7da1b059e890>
name[self].input_layout assign[=] call[name[self].initial_layout.copy, parameter[]]
name[self].qregs assign[=] name[dag].qregs
if compare[name[self].seed is constant[None]] begin[:]
name[self].seed assign[=] call[name[np].random.randint, parameter[constant[0], call[name[np].iinfo, parameter[name[np].int32]].max]]
name[self].rng assign[=] call[name[np].random.RandomState, parameter[name[self].seed]]
call[name[logger].debug, parameter[constant[StochasticSwap RandomState seeded with seed=%s], name[self].seed]]
variable[new_dag] assign[=] call[name[self]._mapper, parameter[name[dag], name[self].coupling_map]]
return[name[new_dag]] | keyword[def] identifier[run] ( identifier[self] , identifier[dag] ):
literal[string]
keyword[if] identifier[self] . identifier[initial_layout] keyword[is] keyword[None] :
keyword[if] identifier[self] . identifier[property_set] [ literal[string] ]:
identifier[self] . identifier[initial_layout] = identifier[self] . identifier[property_set] [ literal[string] ]
keyword[else] :
identifier[self] . identifier[initial_layout] = identifier[Layout] . identifier[generate_trivial_layout] (* identifier[dag] . identifier[qregs] . identifier[values] ())
keyword[if] identifier[len] ( identifier[dag] . identifier[qubits] ())!= identifier[len] ( identifier[self] . identifier[initial_layout] ):
keyword[raise] identifier[TranspilerError] ( literal[string] )
keyword[if] identifier[len] ( identifier[self] . identifier[coupling_map] . identifier[physical_qubits] )!= identifier[len] ( identifier[self] . identifier[initial_layout] ):
keyword[raise] identifier[TranspilerError] (
literal[string] )
identifier[self] . identifier[input_layout] = identifier[self] . identifier[initial_layout] . identifier[copy] ()
identifier[self] . identifier[qregs] = identifier[dag] . identifier[qregs]
keyword[if] identifier[self] . identifier[seed] keyword[is] keyword[None] :
identifier[self] . identifier[seed] = identifier[np] . identifier[random] . identifier[randint] ( literal[int] , identifier[np] . identifier[iinfo] ( identifier[np] . identifier[int32] ). identifier[max] )
identifier[self] . identifier[rng] = identifier[np] . identifier[random] . identifier[RandomState] ( identifier[self] . identifier[seed] )
identifier[logger] . identifier[debug] ( literal[string] , identifier[self] . identifier[seed] )
identifier[new_dag] = identifier[self] . identifier[_mapper] ( identifier[dag] , identifier[self] . identifier[coupling_map] , identifier[trials] = identifier[self] . identifier[trials] )
keyword[return] identifier[new_dag] | def run(self, dag):
"""
Run the StochasticSwap pass on `dag`.
Args:
dag (DAGCircuit): DAG to map.
Returns:
DAGCircuit: A mapped DAG.
Raises:
TranspilerError: if the coupling map or the layout are not
compatible with the DAG
"""
if self.initial_layout is None:
if self.property_set['layout']:
self.initial_layout = self.property_set['layout'] # depends on [control=['if'], data=[]]
else:
self.initial_layout = Layout.generate_trivial_layout(*dag.qregs.values()) # depends on [control=['if'], data=[]]
if len(dag.qubits()) != len(self.initial_layout):
raise TranspilerError('The layout does not match the amount of qubits in the DAG') # depends on [control=['if'], data=[]]
if len(self.coupling_map.physical_qubits) != len(self.initial_layout):
raise TranspilerError('Mappers require to have the layout to be the same size as the coupling map') # depends on [control=['if'], data=[]]
self.input_layout = self.initial_layout.copy()
self.qregs = dag.qregs
if self.seed is None:
self.seed = np.random.randint(0, np.iinfo(np.int32).max) # depends on [control=['if'], data=[]]
self.rng = np.random.RandomState(self.seed)
logger.debug('StochasticSwap RandomState seeded with seed=%s', self.seed)
new_dag = self._mapper(dag, self.coupling_map, trials=self.trials)
# self.property_set["layout"] = self.initial_layout
return new_dag |
def validation_scatter(self, log_lam, b, masks, pre_v, gp, flux,
time, med):
'''
Computes the scatter in the validation set.
'''
# Update the lambda matrix
self.lam[b] = 10 ** log_lam
# Validation set scatter
scatter = [None for i in range(len(masks))]
for i in range(len(masks)):
model = self.cv_compute(b, *pre_v[i])
try:
gpm, _ = gp.predict(flux - model - med, time[masks[i]])
except ValueError:
# Sometimes the model can have NaNs if
# `lambda` is a crazy value
return 1.e30
fdet = (flux - model)[masks[i]] - gpm
scatter[i] = 1.e6 * (1.4826 * np.nanmedian(np.abs(fdet / med -
np.nanmedian(fdet / med))) /
np.sqrt(len(masks[i])))
return np.max(scatter) | def function[validation_scatter, parameter[self, log_lam, b, masks, pre_v, gp, flux, time, med]]:
constant[
Computes the scatter in the validation set.
]
call[name[self].lam][name[b]] assign[=] binary_operation[constant[10] ** name[log_lam]]
variable[scatter] assign[=] <ast.ListComp object at 0x7da1b0e31db0>
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[masks]]]]]] begin[:]
variable[model] assign[=] call[name[self].cv_compute, parameter[name[b], <ast.Starred object at 0x7da1b0e33850>]]
<ast.Try object at 0x7da1b0e33700>
variable[fdet] assign[=] binary_operation[call[binary_operation[name[flux] - name[model]]][call[name[masks]][name[i]]] - name[gpm]]
call[name[scatter]][name[i]] assign[=] binary_operation[constant[1000000.0] * binary_operation[binary_operation[constant[1.4826] * call[name[np].nanmedian, parameter[call[name[np].abs, parameter[binary_operation[binary_operation[name[fdet] / name[med]] - call[name[np].nanmedian, parameter[binary_operation[name[fdet] / name[med]]]]]]]]]] / call[name[np].sqrt, parameter[call[name[len], parameter[call[name[masks]][name[i]]]]]]]]
return[call[name[np].max, parameter[name[scatter]]]] | keyword[def] identifier[validation_scatter] ( identifier[self] , identifier[log_lam] , identifier[b] , identifier[masks] , identifier[pre_v] , identifier[gp] , identifier[flux] ,
identifier[time] , identifier[med] ):
literal[string]
identifier[self] . identifier[lam] [ identifier[b] ]= literal[int] ** identifier[log_lam]
identifier[scatter] =[ keyword[None] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[masks] ))]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[masks] )):
identifier[model] = identifier[self] . identifier[cv_compute] ( identifier[b] ,* identifier[pre_v] [ identifier[i] ])
keyword[try] :
identifier[gpm] , identifier[_] = identifier[gp] . identifier[predict] ( identifier[flux] - identifier[model] - identifier[med] , identifier[time] [ identifier[masks] [ identifier[i] ]])
keyword[except] identifier[ValueError] :
keyword[return] literal[int]
identifier[fdet] =( identifier[flux] - identifier[model] )[ identifier[masks] [ identifier[i] ]]- identifier[gpm]
identifier[scatter] [ identifier[i] ]= literal[int] *( literal[int] * identifier[np] . identifier[nanmedian] ( identifier[np] . identifier[abs] ( identifier[fdet] / identifier[med] -
identifier[np] . identifier[nanmedian] ( identifier[fdet] / identifier[med] )))/
identifier[np] . identifier[sqrt] ( identifier[len] ( identifier[masks] [ identifier[i] ])))
keyword[return] identifier[np] . identifier[max] ( identifier[scatter] ) | def validation_scatter(self, log_lam, b, masks, pre_v, gp, flux, time, med):
"""
Computes the scatter in the validation set.
"""
# Update the lambda matrix
self.lam[b] = 10 ** log_lam
# Validation set scatter
scatter = [None for i in range(len(masks))]
for i in range(len(masks)):
model = self.cv_compute(b, *pre_v[i])
try:
(gpm, _) = gp.predict(flux - model - med, time[masks[i]]) # depends on [control=['try'], data=[]]
except ValueError:
# Sometimes the model can have NaNs if
# `lambda` is a crazy value
return 1e+30 # depends on [control=['except'], data=[]]
fdet = (flux - model)[masks[i]] - gpm
scatter[i] = 1000000.0 * (1.4826 * np.nanmedian(np.abs(fdet / med - np.nanmedian(fdet / med))) / np.sqrt(len(masks[i]))) # depends on [control=['for'], data=['i']]
return np.max(scatter) |
def _write_error_batch_wait(future, batch, database, measurement,
measurements):
"""Invoked by the IOLoop, this method checks if the HTTP request future
created by :meth:`_write_error_batch` is done. If it's done it will
evaluate the result, logging any error and moving on to the next
measurement. If there are no measurements left in the `measurements`
argument, it will consider the batch complete.
:param tornado.concurrent.Future future: The AsyncHTTPClient request future
:param str batch: The batch ID
:param str database: The database name for the measurements
:param str measurement: The measurement the future is for
:param list measurements: The measurements that failed to write as a batch
"""
if not future.done():
ioloop.IOLoop.current().add_timeout(
ioloop.IOLoop.current().time() + 0.025,
_write_error_batch_wait, future, batch, database, measurement,
measurements)
return
error = future.exception()
if isinstance(error, httpclient.HTTPError):
if error.code == 400:
LOGGER.error('Error writing %s measurement from batch %s to '
'InfluxDB (%s): %s', database, batch, error.code,
error.response.body)
LOGGER.info('Bad %s measurement from batch %s: %s',
database, batch, measurement)
else:
LOGGER.error('Error submitting individual metric for %s from '
'batch %s to InfluxDB (%s): %s',
database, batch, error.code)
measurements = measurements + [measurement]
elif isinstance(error, (TimeoutError, OSError, socket.error,
select.error, ssl.socket_error)):
LOGGER.error('Error submitting individual metric for %s from batch '
'%s to InfluxDB (%s)', database, batch, error)
_write_error_batch(batch, database, measurements + [measurement])
measurements = measurements + [measurement]
if not measurements:
LOGGER.info('All %s measurements from batch %s processed',
database, batch)
return
# Continue writing measurements
_write_error_batch(batch, database, measurements) | def function[_write_error_batch_wait, parameter[future, batch, database, measurement, measurements]]:
constant[Invoked by the IOLoop, this method checks if the HTTP request future
created by :meth:`_write_error_batch` is done. If it's done it will
evaluate the result, logging any error and moving on to the next
measurement. If there are no measurements left in the `measurements`
argument, it will consider the batch complete.
:param tornado.concurrent.Future future: The AsyncHTTPClient request future
:param str batch: The batch ID
:param str database: The database name for the measurements
:param str measurement: The measurement the future is for
:param list measurements: The measurements that failed to write as a batch
]
if <ast.UnaryOp object at 0x7da1b246a710> begin[:]
call[call[name[ioloop].IOLoop.current, parameter[]].add_timeout, parameter[binary_operation[call[call[name[ioloop].IOLoop.current, parameter[]].time, parameter[]] + constant[0.025]], name[_write_error_batch_wait], name[future], name[batch], name[database], name[measurement], name[measurements]]]
return[None]
variable[error] assign[=] call[name[future].exception, parameter[]]
if call[name[isinstance], parameter[name[error], name[httpclient].HTTPError]] begin[:]
if compare[name[error].code equal[==] constant[400]] begin[:]
call[name[LOGGER].error, parameter[constant[Error writing %s measurement from batch %s to InfluxDB (%s): %s], name[database], name[batch], name[error].code, name[error].response.body]]
call[name[LOGGER].info, parameter[constant[Bad %s measurement from batch %s: %s], name[database], name[batch], name[measurement]]]
if <ast.UnaryOp object at 0x7da1b246a0e0> begin[:]
call[name[LOGGER].info, parameter[constant[All %s measurements from batch %s processed], name[database], name[batch]]]
return[None]
call[name[_write_error_batch], parameter[name[batch], name[database], name[measurements]]] | keyword[def] identifier[_write_error_batch_wait] ( identifier[future] , identifier[batch] , identifier[database] , identifier[measurement] ,
identifier[measurements] ):
literal[string]
keyword[if] keyword[not] identifier[future] . identifier[done] ():
identifier[ioloop] . identifier[IOLoop] . identifier[current] (). identifier[add_timeout] (
identifier[ioloop] . identifier[IOLoop] . identifier[current] (). identifier[time] ()+ literal[int] ,
identifier[_write_error_batch_wait] , identifier[future] , identifier[batch] , identifier[database] , identifier[measurement] ,
identifier[measurements] )
keyword[return]
identifier[error] = identifier[future] . identifier[exception] ()
keyword[if] identifier[isinstance] ( identifier[error] , identifier[httpclient] . identifier[HTTPError] ):
keyword[if] identifier[error] . identifier[code] == literal[int] :
identifier[LOGGER] . identifier[error] ( literal[string]
literal[string] , identifier[database] , identifier[batch] , identifier[error] . identifier[code] ,
identifier[error] . identifier[response] . identifier[body] )
identifier[LOGGER] . identifier[info] ( literal[string] ,
identifier[database] , identifier[batch] , identifier[measurement] )
keyword[else] :
identifier[LOGGER] . identifier[error] ( literal[string]
literal[string] ,
identifier[database] , identifier[batch] , identifier[error] . identifier[code] )
identifier[measurements] = identifier[measurements] +[ identifier[measurement] ]
keyword[elif] identifier[isinstance] ( identifier[error] ,( identifier[TimeoutError] , identifier[OSError] , identifier[socket] . identifier[error] ,
identifier[select] . identifier[error] , identifier[ssl] . identifier[socket_error] )):
identifier[LOGGER] . identifier[error] ( literal[string]
literal[string] , identifier[database] , identifier[batch] , identifier[error] )
identifier[_write_error_batch] ( identifier[batch] , identifier[database] , identifier[measurements] +[ identifier[measurement] ])
identifier[measurements] = identifier[measurements] +[ identifier[measurement] ]
keyword[if] keyword[not] identifier[measurements] :
identifier[LOGGER] . identifier[info] ( literal[string] ,
identifier[database] , identifier[batch] )
keyword[return]
identifier[_write_error_batch] ( identifier[batch] , identifier[database] , identifier[measurements] ) | def _write_error_batch_wait(future, batch, database, measurement, measurements):
"""Invoked by the IOLoop, this method checks if the HTTP request future
created by :meth:`_write_error_batch` is done. If it's done it will
evaluate the result, logging any error and moving on to the next
measurement. If there are no measurements left in the `measurements`
argument, it will consider the batch complete.
:param tornado.concurrent.Future future: The AsyncHTTPClient request future
:param str batch: The batch ID
:param str database: The database name for the measurements
:param str measurement: The measurement the future is for
:param list measurements: The measurements that failed to write as a batch
"""
if not future.done():
ioloop.IOLoop.current().add_timeout(ioloop.IOLoop.current().time() + 0.025, _write_error_batch_wait, future, batch, database, measurement, measurements)
return # depends on [control=['if'], data=[]]
error = future.exception()
if isinstance(error, httpclient.HTTPError):
if error.code == 400:
LOGGER.error('Error writing %s measurement from batch %s to InfluxDB (%s): %s', database, batch, error.code, error.response.body)
LOGGER.info('Bad %s measurement from batch %s: %s', database, batch, measurement) # depends on [control=['if'], data=[]]
else:
LOGGER.error('Error submitting individual metric for %s from batch %s to InfluxDB (%s): %s', database, batch, error.code)
measurements = measurements + [measurement] # depends on [control=['if'], data=[]]
elif isinstance(error, (TimeoutError, OSError, socket.error, select.error, ssl.socket_error)):
LOGGER.error('Error submitting individual metric for %s from batch %s to InfluxDB (%s)', database, batch, error)
_write_error_batch(batch, database, measurements + [measurement])
measurements = measurements + [measurement] # depends on [control=['if'], data=[]]
if not measurements:
LOGGER.info('All %s measurements from batch %s processed', database, batch)
return # depends on [control=['if'], data=[]]
# Continue writing measurements
_write_error_batch(batch, database, measurements) |
def _persist_metadata(self):
"""
Write all script meta-data, including the persistent script Store.
The Store instance might contain arbitrary user data, like function objects, OpenCL contexts, or whatever other
non-serializable objects, both as keys or values.
Try to serialize the data, and if it fails, fall back to checking the store and removing all non-serializable
data.
"""
serializable_data = self.get_serializable()
try:
self._try_persist_metadata(serializable_data)
except TypeError:
# The user added non-serializable data to the store, so skip all non-serializable keys or values.
cleaned_data = Script._remove_non_serializable_store_entries(serializable_data["store"])
self._try_persist_metadata(cleaned_data) | def function[_persist_metadata, parameter[self]]:
constant[
Write all script meta-data, including the persistent script Store.
The Store instance might contain arbitrary user data, like function objects, OpenCL contexts, or whatever other
non-serializable objects, both as keys or values.
Try to serialize the data, and if it fails, fall back to checking the store and removing all non-serializable
data.
]
variable[serializable_data] assign[=] call[name[self].get_serializable, parameter[]]
<ast.Try object at 0x7da18eb54130> | keyword[def] identifier[_persist_metadata] ( identifier[self] ):
literal[string]
identifier[serializable_data] = identifier[self] . identifier[get_serializable] ()
keyword[try] :
identifier[self] . identifier[_try_persist_metadata] ( identifier[serializable_data] )
keyword[except] identifier[TypeError] :
identifier[cleaned_data] = identifier[Script] . identifier[_remove_non_serializable_store_entries] ( identifier[serializable_data] [ literal[string] ])
identifier[self] . identifier[_try_persist_metadata] ( identifier[cleaned_data] ) | def _persist_metadata(self):
"""
Write all script meta-data, including the persistent script Store.
The Store instance might contain arbitrary user data, like function objects, OpenCL contexts, or whatever other
non-serializable objects, both as keys or values.
Try to serialize the data, and if it fails, fall back to checking the store and removing all non-serializable
data.
"""
serializable_data = self.get_serializable()
try:
self._try_persist_metadata(serializable_data) # depends on [control=['try'], data=[]]
except TypeError:
# The user added non-serializable data to the store, so skip all non-serializable keys or values.
cleaned_data = Script._remove_non_serializable_store_entries(serializable_data['store'])
self._try_persist_metadata(cleaned_data) # depends on [control=['except'], data=[]] |
def wrap_cell_expression(source, template='{expr}'):
"""
If a cell ends in an expression that could be displaying a HoloViews
object (as determined using the AST), wrap it with a given prefix
and suffix string.
If the cell doesn't end in an expression, return the source unchanged.
"""
cell_output_types = (ast.IfExp, ast.BoolOp, ast.BinOp, ast.Call,
ast.Name, ast.Attribute)
try:
node = ast.parse(comment_out_magics(source))
except SyntaxError:
return source
filtered = source.splitlines()
if node.body != []:
last_expr = node.body[-1]
if not isinstance(last_expr, ast.Expr):
pass # Not an expression
elif isinstance(last_expr.value, cell_output_types):
# CAREFUL WITH UTF8!
expr_end_slice = filtered[last_expr.lineno-1][:last_expr.col_offset]
expr_start_slice = filtered[last_expr.lineno-1][last_expr.col_offset:]
start = '\n'.join(filtered[:last_expr.lineno-1]
+ ([expr_end_slice] if expr_end_slice else []))
ending = '\n'.join(([expr_start_slice] if expr_start_slice else [])
+ filtered[last_expr.lineno:])
# BUG!! Adds newline for 'foo'; <expr>
return start + '\n' + template.format(expr=ending)
return source | def function[wrap_cell_expression, parameter[source, template]]:
constant[
If a cell ends in an expression that could be displaying a HoloViews
object (as determined using the AST), wrap it with a given prefix
and suffix string.
If the cell doesn't end in an expression, return the source unchanged.
]
variable[cell_output_types] assign[=] tuple[[<ast.Attribute object at 0x7da18bcc99c0>, <ast.Attribute object at 0x7da18bcc8ca0>, <ast.Attribute object at 0x7da18bcc8b20>, <ast.Attribute object at 0x7da18bccac50>, <ast.Attribute object at 0x7da18bccbfa0>, <ast.Attribute object at 0x7da18bccba00>]]
<ast.Try object at 0x7da18bccb190>
variable[filtered] assign[=] call[name[source].splitlines, parameter[]]
if compare[name[node].body not_equal[!=] list[[]]] begin[:]
variable[last_expr] assign[=] call[name[node].body][<ast.UnaryOp object at 0x7da18bcca740>]
if <ast.UnaryOp object at 0x7da18bccabc0> begin[:]
pass
return[name[source]] | keyword[def] identifier[wrap_cell_expression] ( identifier[source] , identifier[template] = literal[string] ):
literal[string]
identifier[cell_output_types] =( identifier[ast] . identifier[IfExp] , identifier[ast] . identifier[BoolOp] , identifier[ast] . identifier[BinOp] , identifier[ast] . identifier[Call] ,
identifier[ast] . identifier[Name] , identifier[ast] . identifier[Attribute] )
keyword[try] :
identifier[node] = identifier[ast] . identifier[parse] ( identifier[comment_out_magics] ( identifier[source] ))
keyword[except] identifier[SyntaxError] :
keyword[return] identifier[source]
identifier[filtered] = identifier[source] . identifier[splitlines] ()
keyword[if] identifier[node] . identifier[body] !=[]:
identifier[last_expr] = identifier[node] . identifier[body] [- literal[int] ]
keyword[if] keyword[not] identifier[isinstance] ( identifier[last_expr] , identifier[ast] . identifier[Expr] ):
keyword[pass]
keyword[elif] identifier[isinstance] ( identifier[last_expr] . identifier[value] , identifier[cell_output_types] ):
identifier[expr_end_slice] = identifier[filtered] [ identifier[last_expr] . identifier[lineno] - literal[int] ][: identifier[last_expr] . identifier[col_offset] ]
identifier[expr_start_slice] = identifier[filtered] [ identifier[last_expr] . identifier[lineno] - literal[int] ][ identifier[last_expr] . identifier[col_offset] :]
identifier[start] = literal[string] . identifier[join] ( identifier[filtered] [: identifier[last_expr] . identifier[lineno] - literal[int] ]
+([ identifier[expr_end_slice] ] keyword[if] identifier[expr_end_slice] keyword[else] []))
identifier[ending] = literal[string] . identifier[join] (([ identifier[expr_start_slice] ] keyword[if] identifier[expr_start_slice] keyword[else] [])
+ identifier[filtered] [ identifier[last_expr] . identifier[lineno] :])
keyword[return] identifier[start] + literal[string] + identifier[template] . identifier[format] ( identifier[expr] = identifier[ending] )
keyword[return] identifier[source] | def wrap_cell_expression(source, template='{expr}'):
"""
If a cell ends in an expression that could be displaying a HoloViews
object (as determined using the AST), wrap it with a given prefix
and suffix string.
If the cell doesn't end in an expression, return the source unchanged.
"""
cell_output_types = (ast.IfExp, ast.BoolOp, ast.BinOp, ast.Call, ast.Name, ast.Attribute)
try:
node = ast.parse(comment_out_magics(source)) # depends on [control=['try'], data=[]]
except SyntaxError:
return source # depends on [control=['except'], data=[]]
filtered = source.splitlines()
if node.body != []:
last_expr = node.body[-1]
if not isinstance(last_expr, ast.Expr):
pass # Not an expression # depends on [control=['if'], data=[]]
elif isinstance(last_expr.value, cell_output_types):
# CAREFUL WITH UTF8!
expr_end_slice = filtered[last_expr.lineno - 1][:last_expr.col_offset]
expr_start_slice = filtered[last_expr.lineno - 1][last_expr.col_offset:]
start = '\n'.join(filtered[:last_expr.lineno - 1] + ([expr_end_slice] if expr_end_slice else []))
ending = '\n'.join(([expr_start_slice] if expr_start_slice else []) + filtered[last_expr.lineno:])
# BUG!! Adds newline for 'foo'; <expr>
return start + '\n' + template.format(expr=ending) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return source |
def encode_bytes(obj, nsprefix=None):
""" Encodes an OpenMath element into a string.
:param obj: Object to encode as string.
:type obj: OMAny
:rtype: bytes
"""
node = encode_xml(obj, nsprefix)
return etree.tostring(node) | def function[encode_bytes, parameter[obj, nsprefix]]:
constant[ Encodes an OpenMath element into a string.
:param obj: Object to encode as string.
:type obj: OMAny
:rtype: bytes
]
variable[node] assign[=] call[name[encode_xml], parameter[name[obj], name[nsprefix]]]
return[call[name[etree].tostring, parameter[name[node]]]] | keyword[def] identifier[encode_bytes] ( identifier[obj] , identifier[nsprefix] = keyword[None] ):
literal[string]
identifier[node] = identifier[encode_xml] ( identifier[obj] , identifier[nsprefix] )
keyword[return] identifier[etree] . identifier[tostring] ( identifier[node] ) | def encode_bytes(obj, nsprefix=None):
""" Encodes an OpenMath element into a string.
:param obj: Object to encode as string.
:type obj: OMAny
:rtype: bytes
"""
node = encode_xml(obj, nsprefix)
return etree.tostring(node) |
def funcNrlTcMotPred(idxPrc,
varPixX,
varPixY,
NrlMdlChunk,
varNumTP,
aryBoxCar, # aryCond
path,
varNumNrlMdls,
varNumMtDrctn,
varPar,
queOut):
"""
Function for creating neural time course models.
This function should be used to create neural models if different
predictors for every motion direction are included.
"""
# # if hd5 method is used: open file for reading
# filename = 'aryBoxCar' + str(idxPrc) + '.hdf5'
# hdf5_path = os.path.join(path, filename)
# fileH = tables.openFile(hdf5_path, mode='r')
# Output array with pRF model time courses at all modelled standard
# deviations for current pixel position:
aryOut = np.empty((len(NrlMdlChunk), varNumTP, varNumMtDrctn),
dtype='float32')
# Prepare status indicator if this is the first of the parallel processes:
if idxPrc == 1:
# We create a status indicator for the time consuming pRF model finding
# algorithm. Number of steps of the status indicator:
varStsStpSze = 20
# Number of pRF models to fit:
varNumLoops = varNumNrlMdls/varPar
# Vector with pRF values at which to give status feedback:
vecStatus = np.linspace(0,
varNumLoops,
num=(varStsStpSze+1),
endpoint=True)
vecStatus = np.ceil(vecStatus)
vecStatus = vecStatus.astype(int)
# Vector with corresponding percentage values at which to give status
# feedback:
vecStatusPrc = np.linspace(0,
100,
num=(varStsStpSze+1),
endpoint=True)
vecStatusPrc = np.ceil(vecStatusPrc)
vecStatusPrc = vecStatusPrc.astype(int)
# Counter for status indicator:
varCntSts01 = 0
varCntSts02 = 0
# Loop through all Gauss parameters that are in this chunk
for idx, NrlMdlTrpl in enumerate(NrlMdlChunk):
# Status indicator (only used in the first of the parallel
# processes):
if idxPrc == 1:
# Status indicator:
if varCntSts02 == vecStatus[varCntSts01]:
# Prepare status message:
strStsMsg = ('---------Progress: ' +
str(vecStatusPrc[varCntSts01]) +
' % --- ' +
str(vecStatus[varCntSts01]) +
' loops out of ' +
str(varNumLoops))
print(strStsMsg)
# Only increment counter if the last value has not been
# reached yet:
if varCntSts01 < varStsStpSze:
varCntSts01 = varCntSts01 + int(1)
# x pos of Gauss model: NrlMdlTrpl[0]
# y pos of Gauss model: NrlMdlTrpl[1]
# std of Gauss model: NrlMdlTrpl[2]
# index of tng crv model: NrlMdlTrpl[3]
varTmpX = int(np.around(NrlMdlTrpl[0], 0))
varTmpY = int(np.around(NrlMdlTrpl[1], 0))
# Create pRF model (2D):
aryGauss = funcGauss2D(varPixX,
varPixY,
varTmpX,
varTmpY,
NrlMdlTrpl[2])
# Multiply pixel-wise box car model with Gaussian pRF models:
aryNrlTcTmp = np.multiply(aryBoxCar, aryGauss[:, :, None, None])
# Calculate sum across x- and y-dimensions - the 'area under the
# Gaussian surface'. This is essentially an unscaled version of the
# neural time course model (i.e. not yet scaled for the size of
# the pRF).
aryNrlTcTmp = np.sum(aryNrlTcTmp, axis=(0, 1))
# Normalise the nrl time course model to the size of the pRF. This
# gives us the ratio of 'activation' of the pRF at each time point,
# or, in other words, the neural time course model.
aryNrlTcTmp = np.divide(aryNrlTcTmp,
np.sum(aryGauss, axis=(0, 1)))
# Put model time courses into the function's output array:
aryOut[idx, :, :] = aryNrlTcTmp
# Status indicator (only used in the first of the parallel
# processes):
if idxPrc == 1:
# Increment status indicator counter:
varCntSts02 = varCntSts02 + 1
# Output list:
lstOut = [idxPrc,
aryOut,
]
queOut.put(lstOut) | def function[funcNrlTcMotPred, parameter[idxPrc, varPixX, varPixY, NrlMdlChunk, varNumTP, aryBoxCar, path, varNumNrlMdls, varNumMtDrctn, varPar, queOut]]:
constant[
Function for creating neural time course models.
This function should be used to create neural models if different
predictors for every motion direction are included.
]
variable[aryOut] assign[=] call[name[np].empty, parameter[tuple[[<ast.Call object at 0x7da1b10f4fd0>, <ast.Name object at 0x7da1b10f5120>, <ast.Name object at 0x7da1b10f4fa0>]]]]
if compare[name[idxPrc] equal[==] constant[1]] begin[:]
variable[varStsStpSze] assign[=] constant[20]
variable[varNumLoops] assign[=] binary_operation[name[varNumNrlMdls] / name[varPar]]
variable[vecStatus] assign[=] call[name[np].linspace, parameter[constant[0], name[varNumLoops]]]
variable[vecStatus] assign[=] call[name[np].ceil, parameter[name[vecStatus]]]
variable[vecStatus] assign[=] call[name[vecStatus].astype, parameter[name[int]]]
variable[vecStatusPrc] assign[=] call[name[np].linspace, parameter[constant[0], constant[100]]]
variable[vecStatusPrc] assign[=] call[name[np].ceil, parameter[name[vecStatusPrc]]]
variable[vecStatusPrc] assign[=] call[name[vecStatusPrc].astype, parameter[name[int]]]
variable[varCntSts01] assign[=] constant[0]
variable[varCntSts02] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b0f131f0>, <ast.Name object at 0x7da1b0f11f90>]]] in starred[call[name[enumerate], parameter[name[NrlMdlChunk]]]] begin[:]
if compare[name[idxPrc] equal[==] constant[1]] begin[:]
if compare[name[varCntSts02] equal[==] call[name[vecStatus]][name[varCntSts01]]] begin[:]
variable[strStsMsg] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[---------Progress: ] + call[name[str], parameter[call[name[vecStatusPrc]][name[varCntSts01]]]]] + constant[ % --- ]] + call[name[str], parameter[call[name[vecStatus]][name[varCntSts01]]]]] + constant[ loops out of ]] + call[name[str], parameter[name[varNumLoops]]]]
call[name[print], parameter[name[strStsMsg]]]
if compare[name[varCntSts01] less[<] name[varStsStpSze]] begin[:]
variable[varCntSts01] assign[=] binary_operation[name[varCntSts01] + call[name[int], parameter[constant[1]]]]
variable[varTmpX] assign[=] call[name[int], parameter[call[name[np].around, parameter[call[name[NrlMdlTrpl]][constant[0]], constant[0]]]]]
variable[varTmpY] assign[=] call[name[int], parameter[call[name[np].around, parameter[call[name[NrlMdlTrpl]][constant[1]], constant[0]]]]]
variable[aryGauss] assign[=] call[name[funcGauss2D], parameter[name[varPixX], name[varPixY], name[varTmpX], name[varTmpY], call[name[NrlMdlTrpl]][constant[2]]]]
variable[aryNrlTcTmp] assign[=] call[name[np].multiply, parameter[name[aryBoxCar], call[name[aryGauss]][tuple[[<ast.Slice object at 0x7da1b0f29630>, <ast.Slice object at 0x7da1b0f2aa70>, <ast.Constant object at 0x7da1b0f2b1f0>, <ast.Constant object at 0x7da1b0f2b220>]]]]]
variable[aryNrlTcTmp] assign[=] call[name[np].sum, parameter[name[aryNrlTcTmp]]]
variable[aryNrlTcTmp] assign[=] call[name[np].divide, parameter[name[aryNrlTcTmp], call[name[np].sum, parameter[name[aryGauss]]]]]
call[name[aryOut]][tuple[[<ast.Name object at 0x7da1b0f2ac80>, <ast.Slice object at 0x7da1b0f2bc10>, <ast.Slice object at 0x7da1b0f2ac50>]]] assign[=] name[aryNrlTcTmp]
if compare[name[idxPrc] equal[==] constant[1]] begin[:]
variable[varCntSts02] assign[=] binary_operation[name[varCntSts02] + constant[1]]
variable[lstOut] assign[=] list[[<ast.Name object at 0x7da1b0f29690>, <ast.Name object at 0x7da1b0f281f0>]]
call[name[queOut].put, parameter[name[lstOut]]] | keyword[def] identifier[funcNrlTcMotPred] ( identifier[idxPrc] ,
identifier[varPixX] ,
identifier[varPixY] ,
identifier[NrlMdlChunk] ,
identifier[varNumTP] ,
identifier[aryBoxCar] ,
identifier[path] ,
identifier[varNumNrlMdls] ,
identifier[varNumMtDrctn] ,
identifier[varPar] ,
identifier[queOut] ):
literal[string]
identifier[aryOut] = identifier[np] . identifier[empty] (( identifier[len] ( identifier[NrlMdlChunk] ), identifier[varNumTP] , identifier[varNumMtDrctn] ),
identifier[dtype] = literal[string] )
keyword[if] identifier[idxPrc] == literal[int] :
identifier[varStsStpSze] = literal[int]
identifier[varNumLoops] = identifier[varNumNrlMdls] / identifier[varPar]
identifier[vecStatus] = identifier[np] . identifier[linspace] ( literal[int] ,
identifier[varNumLoops] ,
identifier[num] =( identifier[varStsStpSze] + literal[int] ),
identifier[endpoint] = keyword[True] )
identifier[vecStatus] = identifier[np] . identifier[ceil] ( identifier[vecStatus] )
identifier[vecStatus] = identifier[vecStatus] . identifier[astype] ( identifier[int] )
identifier[vecStatusPrc] = identifier[np] . identifier[linspace] ( literal[int] ,
literal[int] ,
identifier[num] =( identifier[varStsStpSze] + literal[int] ),
identifier[endpoint] = keyword[True] )
identifier[vecStatusPrc] = identifier[np] . identifier[ceil] ( identifier[vecStatusPrc] )
identifier[vecStatusPrc] = identifier[vecStatusPrc] . identifier[astype] ( identifier[int] )
identifier[varCntSts01] = literal[int]
identifier[varCntSts02] = literal[int]
keyword[for] identifier[idx] , identifier[NrlMdlTrpl] keyword[in] identifier[enumerate] ( identifier[NrlMdlChunk] ):
keyword[if] identifier[idxPrc] == literal[int] :
keyword[if] identifier[varCntSts02] == identifier[vecStatus] [ identifier[varCntSts01] ]:
identifier[strStsMsg] =( literal[string] +
identifier[str] ( identifier[vecStatusPrc] [ identifier[varCntSts01] ])+
literal[string] +
identifier[str] ( identifier[vecStatus] [ identifier[varCntSts01] ])+
literal[string] +
identifier[str] ( identifier[varNumLoops] ))
identifier[print] ( identifier[strStsMsg] )
keyword[if] identifier[varCntSts01] < identifier[varStsStpSze] :
identifier[varCntSts01] = identifier[varCntSts01] + identifier[int] ( literal[int] )
identifier[varTmpX] = identifier[int] ( identifier[np] . identifier[around] ( identifier[NrlMdlTrpl] [ literal[int] ], literal[int] ))
identifier[varTmpY] = identifier[int] ( identifier[np] . identifier[around] ( identifier[NrlMdlTrpl] [ literal[int] ], literal[int] ))
identifier[aryGauss] = identifier[funcGauss2D] ( identifier[varPixX] ,
identifier[varPixY] ,
identifier[varTmpX] ,
identifier[varTmpY] ,
identifier[NrlMdlTrpl] [ literal[int] ])
identifier[aryNrlTcTmp] = identifier[np] . identifier[multiply] ( identifier[aryBoxCar] , identifier[aryGauss] [:,:, keyword[None] , keyword[None] ])
identifier[aryNrlTcTmp] = identifier[np] . identifier[sum] ( identifier[aryNrlTcTmp] , identifier[axis] =( literal[int] , literal[int] ))
identifier[aryNrlTcTmp] = identifier[np] . identifier[divide] ( identifier[aryNrlTcTmp] ,
identifier[np] . identifier[sum] ( identifier[aryGauss] , identifier[axis] =( literal[int] , literal[int] )))
identifier[aryOut] [ identifier[idx] ,:,:]= identifier[aryNrlTcTmp]
keyword[if] identifier[idxPrc] == literal[int] :
identifier[varCntSts02] = identifier[varCntSts02] + literal[int]
identifier[lstOut] =[ identifier[idxPrc] ,
identifier[aryOut] ,
]
identifier[queOut] . identifier[put] ( identifier[lstOut] ) | def funcNrlTcMotPred(idxPrc, varPixX, varPixY, NrlMdlChunk, varNumTP, aryBoxCar, path, varNumNrlMdls, varNumMtDrctn, varPar, queOut): # aryCond
'\n Function for creating neural time course models.\n This function should be used to create neural models if different\n predictors for every motion direction are included.\n '
# # if hd5 method is used: open file for reading
# filename = 'aryBoxCar' + str(idxPrc) + '.hdf5'
# hdf5_path = os.path.join(path, filename)
# fileH = tables.openFile(hdf5_path, mode='r')
# Output array with pRF model time courses at all modelled standard
# deviations for current pixel position:
aryOut = np.empty((len(NrlMdlChunk), varNumTP, varNumMtDrctn), dtype='float32')
# Prepare status indicator if this is the first of the parallel processes:
if idxPrc == 1:
# We create a status indicator for the time consuming pRF model finding
# algorithm. Number of steps of the status indicator:
varStsStpSze = 20
# Number of pRF models to fit:
varNumLoops = varNumNrlMdls / varPar
# Vector with pRF values at which to give status feedback:
vecStatus = np.linspace(0, varNumLoops, num=varStsStpSze + 1, endpoint=True)
vecStatus = np.ceil(vecStatus)
vecStatus = vecStatus.astype(int)
# Vector with corresponding percentage values at which to give status
# feedback:
vecStatusPrc = np.linspace(0, 100, num=varStsStpSze + 1, endpoint=True)
vecStatusPrc = np.ceil(vecStatusPrc)
vecStatusPrc = vecStatusPrc.astype(int)
# Counter for status indicator:
varCntSts01 = 0
varCntSts02 = 0 # depends on [control=['if'], data=[]]
# Loop through all Gauss parameters that are in this chunk
for (idx, NrlMdlTrpl) in enumerate(NrlMdlChunk):
# Status indicator (only used in the first of the parallel
# processes):
if idxPrc == 1:
# Status indicator:
if varCntSts02 == vecStatus[varCntSts01]:
# Prepare status message:
strStsMsg = '---------Progress: ' + str(vecStatusPrc[varCntSts01]) + ' % --- ' + str(vecStatus[varCntSts01]) + ' loops out of ' + str(varNumLoops)
print(strStsMsg)
# Only increment counter if the last value has not been
# reached yet:
if varCntSts01 < varStsStpSze:
varCntSts01 = varCntSts01 + int(1) # depends on [control=['if'], data=['varCntSts01']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# x pos of Gauss model: NrlMdlTrpl[0]
# y pos of Gauss model: NrlMdlTrpl[1]
# std of Gauss model: NrlMdlTrpl[2]
# index of tng crv model: NrlMdlTrpl[3]
varTmpX = int(np.around(NrlMdlTrpl[0], 0))
varTmpY = int(np.around(NrlMdlTrpl[1], 0))
# Create pRF model (2D):
aryGauss = funcGauss2D(varPixX, varPixY, varTmpX, varTmpY, NrlMdlTrpl[2])
# Multiply pixel-wise box car model with Gaussian pRF models:
aryNrlTcTmp = np.multiply(aryBoxCar, aryGauss[:, :, None, None])
# Calculate sum across x- and y-dimensions - the 'area under the
# Gaussian surface'. This is essentially an unscaled version of the
# neural time course model (i.e. not yet scaled for the size of
# the pRF).
aryNrlTcTmp = np.sum(aryNrlTcTmp, axis=(0, 1))
# Normalise the nrl time course model to the size of the pRF. This
# gives us the ratio of 'activation' of the pRF at each time point,
# or, in other words, the neural time course model.
aryNrlTcTmp = np.divide(aryNrlTcTmp, np.sum(aryGauss, axis=(0, 1)))
# Put model time courses into the function's output array:
aryOut[idx, :, :] = aryNrlTcTmp
# Status indicator (only used in the first of the parallel
# processes):
if idxPrc == 1:
# Increment status indicator counter:
varCntSts02 = varCntSts02 + 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# Output list:
lstOut = [idxPrc, aryOut]
queOut.put(lstOut) |
def compute_batch(self, duplicate_manager=None, context_manager=None):
"""
Computes the elements of the batch sequentially by penalizing the acquisition.
"""
from ...acquisitions import AcquisitionLP
assert isinstance(self.acquisition, AcquisitionLP)
self.acquisition.update_batches(None,None,None)
# --- GET first element in the batch
X_batch = self.acquisition.optimize()[0]
k=1
if self.batch_size >1:
# ---------- Approximate the constants of the the method
L = estimate_L(self.acquisition.model.model,self.acquisition.space.get_bounds())
Min = self.acquisition.model.model.Y.min()
# --- GET the remaining elements
while k<self.batch_size:
self.acquisition.update_batches(X_batch,L,Min)
new_sample = self.acquisition.optimize()[0]
X_batch = np.vstack((X_batch,new_sample))
k +=1
# --- Back to the non-penalized acquisition
self.acquisition.update_batches(None,None,None)
return X_batch | def function[compute_batch, parameter[self, duplicate_manager, context_manager]]:
constant[
Computes the elements of the batch sequentially by penalizing the acquisition.
]
from relative_module[acquisitions] import module[AcquisitionLP]
assert[call[name[isinstance], parameter[name[self].acquisition, name[AcquisitionLP]]]]
call[name[self].acquisition.update_batches, parameter[constant[None], constant[None], constant[None]]]
variable[X_batch] assign[=] call[call[name[self].acquisition.optimize, parameter[]]][constant[0]]
variable[k] assign[=] constant[1]
if compare[name[self].batch_size greater[>] constant[1]] begin[:]
variable[L] assign[=] call[name[estimate_L], parameter[name[self].acquisition.model.model, call[name[self].acquisition.space.get_bounds, parameter[]]]]
variable[Min] assign[=] call[name[self].acquisition.model.model.Y.min, parameter[]]
while compare[name[k] less[<] name[self].batch_size] begin[:]
call[name[self].acquisition.update_batches, parameter[name[X_batch], name[L], name[Min]]]
variable[new_sample] assign[=] call[call[name[self].acquisition.optimize, parameter[]]][constant[0]]
variable[X_batch] assign[=] call[name[np].vstack, parameter[tuple[[<ast.Name object at 0x7da18bcc8a00>, <ast.Name object at 0x7da18bccbd60>]]]]
<ast.AugAssign object at 0x7da18bcc89d0>
call[name[self].acquisition.update_batches, parameter[constant[None], constant[None], constant[None]]]
return[name[X_batch]] | keyword[def] identifier[compute_batch] ( identifier[self] , identifier[duplicate_manager] = keyword[None] , identifier[context_manager] = keyword[None] ):
literal[string]
keyword[from] ... identifier[acquisitions] keyword[import] identifier[AcquisitionLP]
keyword[assert] identifier[isinstance] ( identifier[self] . identifier[acquisition] , identifier[AcquisitionLP] )
identifier[self] . identifier[acquisition] . identifier[update_batches] ( keyword[None] , keyword[None] , keyword[None] )
identifier[X_batch] = identifier[self] . identifier[acquisition] . identifier[optimize] ()[ literal[int] ]
identifier[k] = literal[int]
keyword[if] identifier[self] . identifier[batch_size] > literal[int] :
identifier[L] = identifier[estimate_L] ( identifier[self] . identifier[acquisition] . identifier[model] . identifier[model] , identifier[self] . identifier[acquisition] . identifier[space] . identifier[get_bounds] ())
identifier[Min] = identifier[self] . identifier[acquisition] . identifier[model] . identifier[model] . identifier[Y] . identifier[min] ()
keyword[while] identifier[k] < identifier[self] . identifier[batch_size] :
identifier[self] . identifier[acquisition] . identifier[update_batches] ( identifier[X_batch] , identifier[L] , identifier[Min] )
identifier[new_sample] = identifier[self] . identifier[acquisition] . identifier[optimize] ()[ literal[int] ]
identifier[X_batch] = identifier[np] . identifier[vstack] (( identifier[X_batch] , identifier[new_sample] ))
identifier[k] += literal[int]
identifier[self] . identifier[acquisition] . identifier[update_batches] ( keyword[None] , keyword[None] , keyword[None] )
keyword[return] identifier[X_batch] | def compute_batch(self, duplicate_manager=None, context_manager=None):
"""
Computes the elements of the batch sequentially by penalizing the acquisition.
"""
from ...acquisitions import AcquisitionLP
assert isinstance(self.acquisition, AcquisitionLP)
self.acquisition.update_batches(None, None, None)
# --- GET first element in the batch
X_batch = self.acquisition.optimize()[0]
k = 1
if self.batch_size > 1:
# ---------- Approximate the constants of the the method
L = estimate_L(self.acquisition.model.model, self.acquisition.space.get_bounds())
Min = self.acquisition.model.model.Y.min() # depends on [control=['if'], data=[]]
# --- GET the remaining elements
while k < self.batch_size:
self.acquisition.update_batches(X_batch, L, Min)
new_sample = self.acquisition.optimize()[0]
X_batch = np.vstack((X_batch, new_sample))
k += 1 # depends on [control=['while'], data=['k']]
# --- Back to the non-penalized acquisition
self.acquisition.update_batches(None, None, None)
return X_batch |
def pr_auc(fg_vals, bg_vals):
"""
Computes the Precision-Recall Area Under Curve (PR AUC)
Parameters
----------
fg_vals : array_like
list of values for positive set
bg_vals : array_like
list of values for negative set
Returns
-------
score : float
PR AUC score
"""
# Create y_labels
y_true, y_score = values_to_labels(fg_vals, bg_vals)
return average_precision_score(y_true, y_score) | def function[pr_auc, parameter[fg_vals, bg_vals]]:
constant[
Computes the Precision-Recall Area Under Curve (PR AUC)
Parameters
----------
fg_vals : array_like
list of values for positive set
bg_vals : array_like
list of values for negative set
Returns
-------
score : float
PR AUC score
]
<ast.Tuple object at 0x7da1b10a67d0> assign[=] call[name[values_to_labels], parameter[name[fg_vals], name[bg_vals]]]
return[call[name[average_precision_score], parameter[name[y_true], name[y_score]]]] | keyword[def] identifier[pr_auc] ( identifier[fg_vals] , identifier[bg_vals] ):
literal[string]
identifier[y_true] , identifier[y_score] = identifier[values_to_labels] ( identifier[fg_vals] , identifier[bg_vals] )
keyword[return] identifier[average_precision_score] ( identifier[y_true] , identifier[y_score] ) | def pr_auc(fg_vals, bg_vals):
"""
Computes the Precision-Recall Area Under Curve (PR AUC)
Parameters
----------
fg_vals : array_like
list of values for positive set
bg_vals : array_like
list of values for negative set
Returns
-------
score : float
PR AUC score
"""
# Create y_labels
(y_true, y_score) = values_to_labels(fg_vals, bg_vals)
return average_precision_score(y_true, y_score) |
def kill_chain_phase_names(instance):
"""Ensure the `kill_chain_name` and `phase_name` properties of
`kill_chain_phase` objects follow naming style conventions.
"""
if instance['type'] in enums.KILL_CHAIN_PHASE_USES and 'kill_chain_phases' in instance:
for phase in instance['kill_chain_phases']:
if 'kill_chain_name' not in phase:
# Since this field is required, schemas will already catch the error
return
chain_name = phase['kill_chain_name']
if not chain_name.islower() or '_' in chain_name or ' ' in chain_name:
yield JSONError("kill_chain_name '%s' should be all lowercase"
" and use hyphens instead of spaces or "
"underscores as word separators." % chain_name,
instance['id'], 'kill-chain-names')
phase_name = phase['phase_name']
if not phase_name.islower() or '_' in phase_name or ' ' in phase_name:
yield JSONError("phase_name '%s' should be all lowercase and "
"use hyphens instead of spaces or underscores "
"as word separators." % phase_name,
instance['id'], 'kill-chain-names') | def function[kill_chain_phase_names, parameter[instance]]:
constant[Ensure the `kill_chain_name` and `phase_name` properties of
`kill_chain_phase` objects follow naming style conventions.
]
if <ast.BoolOp object at 0x7da1b0fc45e0> begin[:]
for taget[name[phase]] in starred[call[name[instance]][constant[kill_chain_phases]]] begin[:]
if compare[constant[kill_chain_name] <ast.NotIn object at 0x7da2590d7190> name[phase]] begin[:]
return[None]
variable[chain_name] assign[=] call[name[phase]][constant[kill_chain_name]]
if <ast.BoolOp object at 0x7da1b0f5a800> begin[:]
<ast.Yield object at 0x7da1b0f59ae0>
variable[phase_name] assign[=] call[name[phase]][constant[phase_name]]
if <ast.BoolOp object at 0x7da1b0f599c0> begin[:]
<ast.Yield object at 0x7da1b0f5b790> | keyword[def] identifier[kill_chain_phase_names] ( identifier[instance] ):
literal[string]
keyword[if] identifier[instance] [ literal[string] ] keyword[in] identifier[enums] . identifier[KILL_CHAIN_PHASE_USES] keyword[and] literal[string] keyword[in] identifier[instance] :
keyword[for] identifier[phase] keyword[in] identifier[instance] [ literal[string] ]:
keyword[if] literal[string] keyword[not] keyword[in] identifier[phase] :
keyword[return]
identifier[chain_name] = identifier[phase] [ literal[string] ]
keyword[if] keyword[not] identifier[chain_name] . identifier[islower] () keyword[or] literal[string] keyword[in] identifier[chain_name] keyword[or] literal[string] keyword[in] identifier[chain_name] :
keyword[yield] identifier[JSONError] ( literal[string]
literal[string]
literal[string] % identifier[chain_name] ,
identifier[instance] [ literal[string] ], literal[string] )
identifier[phase_name] = identifier[phase] [ literal[string] ]
keyword[if] keyword[not] identifier[phase_name] . identifier[islower] () keyword[or] literal[string] keyword[in] identifier[phase_name] keyword[or] literal[string] keyword[in] identifier[phase_name] :
keyword[yield] identifier[JSONError] ( literal[string]
literal[string]
literal[string] % identifier[phase_name] ,
identifier[instance] [ literal[string] ], literal[string] ) | def kill_chain_phase_names(instance):
"""Ensure the `kill_chain_name` and `phase_name` properties of
`kill_chain_phase` objects follow naming style conventions.
"""
if instance['type'] in enums.KILL_CHAIN_PHASE_USES and 'kill_chain_phases' in instance:
for phase in instance['kill_chain_phases']:
if 'kill_chain_name' not in phase:
# Since this field is required, schemas will already catch the error
return # depends on [control=['if'], data=[]]
chain_name = phase['kill_chain_name']
if not chain_name.islower() or '_' in chain_name or ' ' in chain_name:
yield JSONError("kill_chain_name '%s' should be all lowercase and use hyphens instead of spaces or underscores as word separators." % chain_name, instance['id'], 'kill-chain-names') # depends on [control=['if'], data=[]]
phase_name = phase['phase_name']
if not phase_name.islower() or '_' in phase_name or ' ' in phase_name:
yield JSONError("phase_name '%s' should be all lowercase and use hyphens instead of spaces or underscores as word separators." % phase_name, instance['id'], 'kill-chain-names') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['phase']] # depends on [control=['if'], data=[]] |
def report_errors(audit, url):
"""
Args:
audit: results of `AxeCoreAudit.do_audit()`.
url: the url of the page being audited.
Raises: `AccessibilityError`
"""
errors = AxeCoreAudit.get_errors(audit)
if errors["total"] > 0:
msg = u"URL '{}' has {} errors:\n\n{}".format(
url,
errors["total"],
AxeCoreAudit.format_errors(errors["errors"])
)
raise AccessibilityError(msg) | def function[report_errors, parameter[audit, url]]:
constant[
Args:
audit: results of `AxeCoreAudit.do_audit()`.
url: the url of the page being audited.
Raises: `AccessibilityError`
]
variable[errors] assign[=] call[name[AxeCoreAudit].get_errors, parameter[name[audit]]]
if compare[call[name[errors]][constant[total]] greater[>] constant[0]] begin[:]
variable[msg] assign[=] call[constant[URL '{}' has {} errors:
{}].format, parameter[name[url], call[name[errors]][constant[total]], call[name[AxeCoreAudit].format_errors, parameter[call[name[errors]][constant[errors]]]]]]
<ast.Raise object at 0x7da18fe92fe0> | keyword[def] identifier[report_errors] ( identifier[audit] , identifier[url] ):
literal[string]
identifier[errors] = identifier[AxeCoreAudit] . identifier[get_errors] ( identifier[audit] )
keyword[if] identifier[errors] [ literal[string] ]> literal[int] :
identifier[msg] = literal[string] . identifier[format] (
identifier[url] ,
identifier[errors] [ literal[string] ],
identifier[AxeCoreAudit] . identifier[format_errors] ( identifier[errors] [ literal[string] ])
)
keyword[raise] identifier[AccessibilityError] ( identifier[msg] ) | def report_errors(audit, url):
"""
Args:
audit: results of `AxeCoreAudit.do_audit()`.
url: the url of the page being audited.
Raises: `AccessibilityError`
"""
errors = AxeCoreAudit.get_errors(audit)
if errors['total'] > 0:
msg = u"URL '{}' has {} errors:\n\n{}".format(url, errors['total'], AxeCoreAudit.format_errors(errors['errors']))
raise AccessibilityError(msg) # depends on [control=['if'], data=[]] |
def move(self, parent, index=None):
"""
Move this group to a new parent.
:param parent: The new parent group; if None will be root group.
:type parent: :class:`keepassdb.model.Group`
:param index: The 0-based index within the parent (defaults to appending
group to end of parent's children).
:type index: int
"""
return self.db.move_group(self, parent, index=index) | def function[move, parameter[self, parent, index]]:
constant[
Move this group to a new parent.
:param parent: The new parent group; if None will be root group.
:type parent: :class:`keepassdb.model.Group`
:param index: The 0-based index within the parent (defaults to appending
group to end of parent's children).
:type index: int
]
return[call[name[self].db.move_group, parameter[name[self], name[parent]]]] | keyword[def] identifier[move] ( identifier[self] , identifier[parent] , identifier[index] = keyword[None] ):
literal[string]
keyword[return] identifier[self] . identifier[db] . identifier[move_group] ( identifier[self] , identifier[parent] , identifier[index] = identifier[index] ) | def move(self, parent, index=None):
"""
Move this group to a new parent.
:param parent: The new parent group; if None will be root group.
:type parent: :class:`keepassdb.model.Group`
:param index: The 0-based index within the parent (defaults to appending
group to end of parent's children).
:type index: int
"""
return self.db.move_group(self, parent, index=index) |
def application(self, app_id):
"""Update the context to use the given application ID and stop the
application when done.
For example::
with cn.application(54):
# All commands in this block will use app_id=54.
# On leaving the block `cn.send_signal("stop", 54)` is
# automatically called.
"""
# Get a new context and add a method that will be called before the
# context is removed from the stack.
context = self(app_id=app_id)
context.before_close(lambda: self.send_signal("stop"))
return context | def function[application, parameter[self, app_id]]:
constant[Update the context to use the given application ID and stop the
application when done.
For example::
with cn.application(54):
# All commands in this block will use app_id=54.
# On leaving the block `cn.send_signal("stop", 54)` is
# automatically called.
]
variable[context] assign[=] call[name[self], parameter[]]
call[name[context].before_close, parameter[<ast.Lambda object at 0x7da1b19ca350>]]
return[name[context]] | keyword[def] identifier[application] ( identifier[self] , identifier[app_id] ):
literal[string]
identifier[context] = identifier[self] ( identifier[app_id] = identifier[app_id] )
identifier[context] . identifier[before_close] ( keyword[lambda] : identifier[self] . identifier[send_signal] ( literal[string] ))
keyword[return] identifier[context] | def application(self, app_id):
"""Update the context to use the given application ID and stop the
application when done.
For example::
with cn.application(54):
# All commands in this block will use app_id=54.
# On leaving the block `cn.send_signal("stop", 54)` is
# automatically called.
"""
# Get a new context and add a method that will be called before the
# context is removed from the stack.
context = self(app_id=app_id)
context.before_close(lambda : self.send_signal('stop'))
return context |
def validate(cls, mapper_spec):
"""Validates mapper spec and all mapper parameters.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError("Mapper input reader class mismatch")
params = _get_params(mapper_spec)
if cls.BLOB_KEY_PARAM not in params:
raise BadReaderParamsError("Must specify 'blob_key' for mapper input")
blob_key = params[cls.BLOB_KEY_PARAM]
blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key))
if not blob_info:
raise BadReaderParamsError("Could not find blobinfo for key %s" %
blob_key) | def function[validate, parameter[cls, mapper_spec]]:
constant[Validates mapper spec and all mapper parameters.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
]
if compare[call[name[mapper_spec].input_reader_class, parameter[]] not_equal[!=] name[cls]] begin[:]
<ast.Raise object at 0x7da18eb56620>
variable[params] assign[=] call[name[_get_params], parameter[name[mapper_spec]]]
if compare[name[cls].BLOB_KEY_PARAM <ast.NotIn object at 0x7da2590d7190> name[params]] begin[:]
<ast.Raise object at 0x7da18eb544c0>
variable[blob_key] assign[=] call[name[params]][name[cls].BLOB_KEY_PARAM]
variable[blob_info] assign[=] call[name[blobstore].BlobInfo.get, parameter[call[name[blobstore].BlobKey, parameter[name[blob_key]]]]]
if <ast.UnaryOp object at 0x7da18eb57340> begin[:]
<ast.Raise object at 0x7da18eb550f0> | keyword[def] identifier[validate] ( identifier[cls] , identifier[mapper_spec] ):
literal[string]
keyword[if] identifier[mapper_spec] . identifier[input_reader_class] ()!= identifier[cls] :
keyword[raise] identifier[BadReaderParamsError] ( literal[string] )
identifier[params] = identifier[_get_params] ( identifier[mapper_spec] )
keyword[if] identifier[cls] . identifier[BLOB_KEY_PARAM] keyword[not] keyword[in] identifier[params] :
keyword[raise] identifier[BadReaderParamsError] ( literal[string] )
identifier[blob_key] = identifier[params] [ identifier[cls] . identifier[BLOB_KEY_PARAM] ]
identifier[blob_info] = identifier[blobstore] . identifier[BlobInfo] . identifier[get] ( identifier[blobstore] . identifier[BlobKey] ( identifier[blob_key] ))
keyword[if] keyword[not] identifier[blob_info] :
keyword[raise] identifier[BadReaderParamsError] ( literal[string] %
identifier[blob_key] ) | def validate(cls, mapper_spec):
"""Validates mapper spec and all mapper parameters.
Args:
mapper_spec: The MapperSpec for this InputReader.
Raises:
BadReaderParamsError: required parameters are missing or invalid.
"""
if mapper_spec.input_reader_class() != cls:
raise BadReaderParamsError('Mapper input reader class mismatch') # depends on [control=['if'], data=[]]
params = _get_params(mapper_spec)
if cls.BLOB_KEY_PARAM not in params:
raise BadReaderParamsError("Must specify 'blob_key' for mapper input") # depends on [control=['if'], data=[]]
blob_key = params[cls.BLOB_KEY_PARAM]
blob_info = blobstore.BlobInfo.get(blobstore.BlobKey(blob_key))
if not blob_info:
raise BadReaderParamsError('Could not find blobinfo for key %s' % blob_key) # depends on [control=['if'], data=[]] |
def keep_path(self, path):
"""
Given a path, returns True if the path should be kept, False if it should be cut.
"""
if len(path.addr_trace) < 2:
return True
return self.should_take_exit(path.addr_trace[-2], path.addr_trace[-1]) | def function[keep_path, parameter[self, path]]:
constant[
Given a path, returns True if the path should be kept, False if it should be cut.
]
if compare[call[name[len], parameter[name[path].addr_trace]] less[<] constant[2]] begin[:]
return[constant[True]]
return[call[name[self].should_take_exit, parameter[call[name[path].addr_trace][<ast.UnaryOp object at 0x7da18bc70b20>], call[name[path].addr_trace][<ast.UnaryOp object at 0x7da18bc73a00>]]]] | keyword[def] identifier[keep_path] ( identifier[self] , identifier[path] ):
literal[string]
keyword[if] identifier[len] ( identifier[path] . identifier[addr_trace] )< literal[int] :
keyword[return] keyword[True]
keyword[return] identifier[self] . identifier[should_take_exit] ( identifier[path] . identifier[addr_trace] [- literal[int] ], identifier[path] . identifier[addr_trace] [- literal[int] ]) | def keep_path(self, path):
"""
Given a path, returns True if the path should be kept, False if it should be cut.
"""
if len(path.addr_trace) < 2:
return True # depends on [control=['if'], data=[]]
return self.should_take_exit(path.addr_trace[-2], path.addr_trace[-1]) |
def analyze(self, handle, filename):
"""Submit a file for analysis.
:type handle: File handle
:param handle: Handle to file to upload for analysis.
:type filename: str
:param filename: File name.
:rtype: str
:return: Task ID as a string
"""
# ensure the handle is at offset 0.
handle.seek(0)
try:
return self.jbx.submit_sample(handle)['webids'][0]
except (jbxapi.JoeException, KeyError, IndexError) as e:
raise sandboxapi.SandboxError("error in analyze: {e}".format(e=e)) | def function[analyze, parameter[self, handle, filename]]:
constant[Submit a file for analysis.
:type handle: File handle
:param handle: Handle to file to upload for analysis.
:type filename: str
:param filename: File name.
:rtype: str
:return: Task ID as a string
]
call[name[handle].seek, parameter[constant[0]]]
<ast.Try object at 0x7da18bccb9d0> | keyword[def] identifier[analyze] ( identifier[self] , identifier[handle] , identifier[filename] ):
literal[string]
identifier[handle] . identifier[seek] ( literal[int] )
keyword[try] :
keyword[return] identifier[self] . identifier[jbx] . identifier[submit_sample] ( identifier[handle] )[ literal[string] ][ literal[int] ]
keyword[except] ( identifier[jbxapi] . identifier[JoeException] , identifier[KeyError] , identifier[IndexError] ) keyword[as] identifier[e] :
keyword[raise] identifier[sandboxapi] . identifier[SandboxError] ( literal[string] . identifier[format] ( identifier[e] = identifier[e] )) | def analyze(self, handle, filename):
"""Submit a file for analysis.
:type handle: File handle
:param handle: Handle to file to upload for analysis.
:type filename: str
:param filename: File name.
:rtype: str
:return: Task ID as a string
"""
# ensure the handle is at offset 0.
handle.seek(0)
try:
return self.jbx.submit_sample(handle)['webids'][0] # depends on [control=['try'], data=[]]
except (jbxapi.JoeException, KeyError, IndexError) as e:
raise sandboxapi.SandboxError('error in analyze: {e}'.format(e=e)) # depends on [control=['except'], data=['e']] |
def format_call(self, api_version, api_call):
""" Return properly formatted QualysGuard API call according to api_version etiquette.
"""
# Remove possible starting slashes or trailing question marks in call.
api_call = api_call.lstrip('/')
api_call = api_call.rstrip('?')
logger.debug('api_call post strip =\n%s' % api_call)
# Make sure call always ends in slash for API v2 calls.
if (api_version == 2 and api_call[-1] != '/'):
# Add slash.
logger.debug('Adding "/" to api_call.')
api_call += '/'
if api_call in self.api_methods_with_trailing_slash[api_version]:
# Add slash.
logger.debug('Adding "/" to api_call.')
api_call += '/'
return api_call | def function[format_call, parameter[self, api_version, api_call]]:
constant[ Return properly formatted QualysGuard API call according to api_version etiquette.
]
variable[api_call] assign[=] call[name[api_call].lstrip, parameter[constant[/]]]
variable[api_call] assign[=] call[name[api_call].rstrip, parameter[constant[?]]]
call[name[logger].debug, parameter[binary_operation[constant[api_call post strip =
%s] <ast.Mod object at 0x7da2590d6920> name[api_call]]]]
if <ast.BoolOp object at 0x7da1b2346d10> begin[:]
call[name[logger].debug, parameter[constant[Adding "/" to api_call.]]]
<ast.AugAssign object at 0x7da1b2345ed0>
if compare[name[api_call] in call[name[self].api_methods_with_trailing_slash][name[api_version]]] begin[:]
call[name[logger].debug, parameter[constant[Adding "/" to api_call.]]]
<ast.AugAssign object at 0x7da1b2346050>
return[name[api_call]] | keyword[def] identifier[format_call] ( identifier[self] , identifier[api_version] , identifier[api_call] ):
literal[string]
identifier[api_call] = identifier[api_call] . identifier[lstrip] ( literal[string] )
identifier[api_call] = identifier[api_call] . identifier[rstrip] ( literal[string] )
identifier[logger] . identifier[debug] ( literal[string] % identifier[api_call] )
keyword[if] ( identifier[api_version] == literal[int] keyword[and] identifier[api_call] [- literal[int] ]!= literal[string] ):
identifier[logger] . identifier[debug] ( literal[string] )
identifier[api_call] += literal[string]
keyword[if] identifier[api_call] keyword[in] identifier[self] . identifier[api_methods_with_trailing_slash] [ identifier[api_version] ]:
identifier[logger] . identifier[debug] ( literal[string] )
identifier[api_call] += literal[string]
keyword[return] identifier[api_call] | def format_call(self, api_version, api_call):
""" Return properly formatted QualysGuard API call according to api_version etiquette.
"""
# Remove possible starting slashes or trailing question marks in call.
api_call = api_call.lstrip('/')
api_call = api_call.rstrip('?')
logger.debug('api_call post strip =\n%s' % api_call)
# Make sure call always ends in slash for API v2 calls.
if api_version == 2 and api_call[-1] != '/':
# Add slash.
logger.debug('Adding "/" to api_call.')
api_call += '/' # depends on [control=['if'], data=[]]
if api_call in self.api_methods_with_trailing_slash[api_version]:
# Add slash.
logger.debug('Adding "/" to api_call.')
api_call += '/' # depends on [control=['if'], data=['api_call']]
return api_call |
def as_html(self):
"""
Return the OGC WKT which corresponds to the CRS as HTML.
For example::
>>> print(get(27700).as_html()) # doctest: +ELLIPSIS
<div class="syntax"><pre><span class="gh">PROJCS</span><span...
"""
url = '{prefix}{code}.html?download'.format(prefix=EPSG_IO_URL,
code=self.id)
return requests.get(url).text | def function[as_html, parameter[self]]:
constant[
Return the OGC WKT which corresponds to the CRS as HTML.
For example::
>>> print(get(27700).as_html()) # doctest: +ELLIPSIS
<div class="syntax"><pre><span class="gh">PROJCS</span><span...
]
variable[url] assign[=] call[constant[{prefix}{code}.html?download].format, parameter[]]
return[call[name[requests].get, parameter[name[url]]].text] | keyword[def] identifier[as_html] ( identifier[self] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[prefix] = identifier[EPSG_IO_URL] ,
identifier[code] = identifier[self] . identifier[id] )
keyword[return] identifier[requests] . identifier[get] ( identifier[url] ). identifier[text] | def as_html(self):
"""
Return the OGC WKT which corresponds to the CRS as HTML.
For example::
>>> print(get(27700).as_html()) # doctest: +ELLIPSIS
<div class="syntax"><pre><span class="gh">PROJCS</span><span...
"""
url = '{prefix}{code}.html?download'.format(prefix=EPSG_IO_URL, code=self.id)
return requests.get(url).text |
def parseDoc(cur):
"""parse an XML in-memory document and build a tree. """
ret = libxml2mod.xmlParseDoc(cur)
if ret is None:raise parserError('xmlParseDoc() failed')
return xmlDoc(_obj=ret) | def function[parseDoc, parameter[cur]]:
constant[parse an XML in-memory document and build a tree. ]
variable[ret] assign[=] call[name[libxml2mod].xmlParseDoc, parameter[name[cur]]]
if compare[name[ret] is constant[None]] begin[:]
<ast.Raise object at 0x7da1b1fa7340>
return[call[name[xmlDoc], parameter[]]] | keyword[def] identifier[parseDoc] ( identifier[cur] ):
literal[string]
identifier[ret] = identifier[libxml2mod] . identifier[xmlParseDoc] ( identifier[cur] )
keyword[if] identifier[ret] keyword[is] keyword[None] : keyword[raise] identifier[parserError] ( literal[string] )
keyword[return] identifier[xmlDoc] ( identifier[_obj] = identifier[ret] ) | def parseDoc(cur):
"""parse an XML in-memory document and build a tree. """
ret = libxml2mod.xmlParseDoc(cur)
if ret is None:
raise parserError('xmlParseDoc() failed') # depends on [control=['if'], data=[]]
return xmlDoc(_obj=ret) |
def rprint(sep='\n', end='\n', file=sys.stdout, flush=False):
"""A coroutine sink which prints received items stdout
Args:
sep: Optional separator to be printed between received items.
end: Optional terminator to be printed after the last item.
file: Optional stream to which to print.
flush: Optional flag to force flushing after each item.
"""
try:
first_item = (yield)
file.write(str(first_item))
if flush:
file.flush()
while True:
item = (yield)
file.write(sep)
file.write(str(item))
if flush:
file.flush()
except GeneratorExit:
file.write(end)
if flush:
file.flush() | def function[rprint, parameter[sep, end, file, flush]]:
constant[A coroutine sink which prints received items stdout
Args:
sep: Optional separator to be printed between received items.
end: Optional terminator to be printed after the last item.
file: Optional stream to which to print.
flush: Optional flag to force flushing after each item.
]
<ast.Try object at 0x7da20c76c670> | keyword[def] identifier[rprint] ( identifier[sep] = literal[string] , identifier[end] = literal[string] , identifier[file] = identifier[sys] . identifier[stdout] , identifier[flush] = keyword[False] ):
literal[string]
keyword[try] :
identifier[first_item] =( keyword[yield] )
identifier[file] . identifier[write] ( identifier[str] ( identifier[first_item] ))
keyword[if] identifier[flush] :
identifier[file] . identifier[flush] ()
keyword[while] keyword[True] :
identifier[item] =( keyword[yield] )
identifier[file] . identifier[write] ( identifier[sep] )
identifier[file] . identifier[write] ( identifier[str] ( identifier[item] ))
keyword[if] identifier[flush] :
identifier[file] . identifier[flush] ()
keyword[except] identifier[GeneratorExit] :
identifier[file] . identifier[write] ( identifier[end] )
keyword[if] identifier[flush] :
identifier[file] . identifier[flush] () | def rprint(sep='\n', end='\n', file=sys.stdout, flush=False):
"""A coroutine sink which prints received items stdout
Args:
sep: Optional separator to be printed between received items.
end: Optional terminator to be printed after the last item.
file: Optional stream to which to print.
flush: Optional flag to force flushing after each item.
"""
try:
first_item = (yield)
file.write(str(first_item))
if flush:
file.flush() # depends on [control=['if'], data=[]]
while True:
item = (yield)
file.write(sep)
file.write(str(item))
if flush:
file.flush() # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]]
except GeneratorExit:
file.write(end)
if flush:
file.flush() # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] |
def save(self, filename=None, lineendings='default', encoding='latin-1'):
"""
Save the IDF as a text file with the optional filename passed, or with
the current idfname of the IDF.
Parameters
----------
filename : str, optional
Filepath to save the file. If None then use the IDF.idfname
parameter. Also accepts a file handle.
lineendings : str, optional
Line endings to use in the saved file. Options are 'default',
'windows' and 'unix' the default is 'default' which uses the line
endings for the current system.
encoding : str, optional
Encoding to use for the saved file. The default is 'latin-1' which
is compatible with the EnergyPlus IDFEditor.
"""
if filename is None:
filename = self.idfname
s = self.idfstr()
if lineendings == 'default':
system = platform.system()
s = '!- {} Line endings \n'.format(system) + s
slines = s.splitlines()
s = os.linesep.join(slines)
elif lineendings == 'windows':
s = '!- Windows Line endings \n' + s
slines = s.splitlines()
s = '\r\n'.join(slines)
elif lineendings == 'unix':
s = '!- Unix Line endings \n' + s
slines = s.splitlines()
s = '\n'.join(slines)
s = s.encode(encoding)
try:
with open(filename, 'wb') as idf_out:
idf_out.write(s)
except TypeError: # in the case that filename is a file handle
try:
filename.write(s)
except TypeError:
filename.write(s.decode(encoding)) | def function[save, parameter[self, filename, lineendings, encoding]]:
constant[
Save the IDF as a text file with the optional filename passed, or with
the current idfname of the IDF.
Parameters
----------
filename : str, optional
Filepath to save the file. If None then use the IDF.idfname
parameter. Also accepts a file handle.
lineendings : str, optional
Line endings to use in the saved file. Options are 'default',
'windows' and 'unix' the default is 'default' which uses the line
endings for the current system.
encoding : str, optional
Encoding to use for the saved file. The default is 'latin-1' which
is compatible with the EnergyPlus IDFEditor.
]
if compare[name[filename] is constant[None]] begin[:]
variable[filename] assign[=] name[self].idfname
variable[s] assign[=] call[name[self].idfstr, parameter[]]
if compare[name[lineendings] equal[==] constant[default]] begin[:]
variable[system] assign[=] call[name[platform].system, parameter[]]
variable[s] assign[=] binary_operation[call[constant[!- {} Line endings
].format, parameter[name[system]]] + name[s]]
variable[slines] assign[=] call[name[s].splitlines, parameter[]]
variable[s] assign[=] call[name[os].linesep.join, parameter[name[slines]]]
variable[s] assign[=] call[name[s].encode, parameter[name[encoding]]]
<ast.Try object at 0x7da2044c21d0> | keyword[def] identifier[save] ( identifier[self] , identifier[filename] = keyword[None] , identifier[lineendings] = literal[string] , identifier[encoding] = literal[string] ):
literal[string]
keyword[if] identifier[filename] keyword[is] keyword[None] :
identifier[filename] = identifier[self] . identifier[idfname]
identifier[s] = identifier[self] . identifier[idfstr] ()
keyword[if] identifier[lineendings] == literal[string] :
identifier[system] = identifier[platform] . identifier[system] ()
identifier[s] = literal[string] . identifier[format] ( identifier[system] )+ identifier[s]
identifier[slines] = identifier[s] . identifier[splitlines] ()
identifier[s] = identifier[os] . identifier[linesep] . identifier[join] ( identifier[slines] )
keyword[elif] identifier[lineendings] == literal[string] :
identifier[s] = literal[string] + identifier[s]
identifier[slines] = identifier[s] . identifier[splitlines] ()
identifier[s] = literal[string] . identifier[join] ( identifier[slines] )
keyword[elif] identifier[lineendings] == literal[string] :
identifier[s] = literal[string] + identifier[s]
identifier[slines] = identifier[s] . identifier[splitlines] ()
identifier[s] = literal[string] . identifier[join] ( identifier[slines] )
identifier[s] = identifier[s] . identifier[encode] ( identifier[encoding] )
keyword[try] :
keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[idf_out] :
identifier[idf_out] . identifier[write] ( identifier[s] )
keyword[except] identifier[TypeError] :
keyword[try] :
identifier[filename] . identifier[write] ( identifier[s] )
keyword[except] identifier[TypeError] :
identifier[filename] . identifier[write] ( identifier[s] . identifier[decode] ( identifier[encoding] )) | def save(self, filename=None, lineendings='default', encoding='latin-1'):
"""
Save the IDF as a text file with the optional filename passed, or with
the current idfname of the IDF.
Parameters
----------
filename : str, optional
Filepath to save the file. If None then use the IDF.idfname
parameter. Also accepts a file handle.
lineendings : str, optional
Line endings to use in the saved file. Options are 'default',
'windows' and 'unix' the default is 'default' which uses the line
endings for the current system.
encoding : str, optional
Encoding to use for the saved file. The default is 'latin-1' which
is compatible with the EnergyPlus IDFEditor.
"""
if filename is None:
filename = self.idfname # depends on [control=['if'], data=['filename']]
s = self.idfstr()
if lineendings == 'default':
system = platform.system()
s = '!- {} Line endings \n'.format(system) + s
slines = s.splitlines()
s = os.linesep.join(slines) # depends on [control=['if'], data=[]]
elif lineendings == 'windows':
s = '!- Windows Line endings \n' + s
slines = s.splitlines()
s = '\r\n'.join(slines) # depends on [control=['if'], data=[]]
elif lineendings == 'unix':
s = '!- Unix Line endings \n' + s
slines = s.splitlines()
s = '\n'.join(slines) # depends on [control=['if'], data=[]]
s = s.encode(encoding)
try:
with open(filename, 'wb') as idf_out:
idf_out.write(s) # depends on [control=['with'], data=['idf_out']] # depends on [control=['try'], data=[]]
except TypeError: # in the case that filename is a file handle
try:
filename.write(s) # depends on [control=['try'], data=[]]
except TypeError:
filename.write(s.decode(encoding)) # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] |
def raw_filter(self, filters):
"""Sends all filters to the API.
No fancy, just a wrapper. Any advanced functionality shall be implemented as another method.
Args:
filters: List of filters (strings)
Returns: :py:class:`SearchResult`
"""
return SearchResult(self, self._api.get(self._href, **{"filter[]": filters})) | def function[raw_filter, parameter[self, filters]]:
constant[Sends all filters to the API.
No fancy, just a wrapper. Any advanced functionality shall be implemented as another method.
Args:
filters: List of filters (strings)
Returns: :py:class:`SearchResult`
]
return[call[name[SearchResult], parameter[name[self], call[name[self]._api.get, parameter[name[self]._href]]]]] | keyword[def] identifier[raw_filter] ( identifier[self] , identifier[filters] ):
literal[string]
keyword[return] identifier[SearchResult] ( identifier[self] , identifier[self] . identifier[_api] . identifier[get] ( identifier[self] . identifier[_href] ,**{ literal[string] : identifier[filters] })) | def raw_filter(self, filters):
"""Sends all filters to the API.
No fancy, just a wrapper. Any advanced functionality shall be implemented as another method.
Args:
filters: List of filters (strings)
Returns: :py:class:`SearchResult`
"""
return SearchResult(self, self._api.get(self._href, **{'filter[]': filters})) |
def replace_customer_by_id(cls, customer_id, customer, **kwargs):
"""Replace Customer
Replace all attributes of Customer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_customer_by_id(customer_id, customer, async=True)
>>> result = thread.get()
:param async bool
:param str customer_id: ID of customer to replace (required)
:param Customer customer: Attributes of customer to replace (required)
:return: Customer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._replace_customer_by_id_with_http_info(customer_id, customer, **kwargs)
else:
(data) = cls._replace_customer_by_id_with_http_info(customer_id, customer, **kwargs)
return data | def function[replace_customer_by_id, parameter[cls, customer_id, customer]]:
constant[Replace Customer
Replace all attributes of Customer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_customer_by_id(customer_id, customer, async=True)
>>> result = thread.get()
:param async bool
:param str customer_id: ID of customer to replace (required)
:param Customer customer: Attributes of customer to replace (required)
:return: Customer
If the method is called asynchronously,
returns the request thread.
]
call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True]
if call[name[kwargs].get, parameter[constant[async]]] begin[:]
return[call[name[cls]._replace_customer_by_id_with_http_info, parameter[name[customer_id], name[customer]]]] | keyword[def] identifier[replace_customer_by_id] ( identifier[cls] , identifier[customer_id] , identifier[customer] ,** identifier[kwargs] ):
literal[string]
identifier[kwargs] [ literal[string] ]= keyword[True]
keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ):
keyword[return] identifier[cls] . identifier[_replace_customer_by_id_with_http_info] ( identifier[customer_id] , identifier[customer] ,** identifier[kwargs] )
keyword[else] :
( identifier[data] )= identifier[cls] . identifier[_replace_customer_by_id_with_http_info] ( identifier[customer_id] , identifier[customer] ,** identifier[kwargs] )
keyword[return] identifier[data] | def replace_customer_by_id(cls, customer_id, customer, **kwargs):
"""Replace Customer
Replace all attributes of Customer
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async=True
>>> thread = api.replace_customer_by_id(customer_id, customer, async=True)
>>> result = thread.get()
:param async bool
:param str customer_id: ID of customer to replace (required)
:param Customer customer: Attributes of customer to replace (required)
:return: Customer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async'):
return cls._replace_customer_by_id_with_http_info(customer_id, customer, **kwargs) # depends on [control=['if'], data=[]]
else:
data = cls._replace_customer_by_id_with_http_info(customer_id, customer, **kwargs)
return data |
def d_x(data, axis, boundary='forward-backward'):
'''
Calculates a second-order centered finite difference of data along the
specified axis.
Parameters
----------
data : ndarray
Data on which we are taking a derivative.
axis : int
Index of the data array on which to take the difference.
boundary : string, optional
Boundary condition. If 'periodic', assume periodic boundary condition
for centered difference. If 'forward-backward', take first-order
forward or backward derivatives at boundary.
Returns
-------
derivative : ndarray
Derivative of the data along the specified axis.
Raises
------
ValueError:
If an invalid boundary condition choice is given, if both dx and x are
specified, if axis is out of the valid range for the shape of the data,
or if x is specified and axis_x is out of the valid range for the shape
of x.
'''
if abs(axis) > len(data.shape):
raise ValueError('axis is out of bounds for the shape of data')
if boundary == 'periodic':
diff = np.roll(data, -1, axis) - np.roll(data, 1, axis)
elif boundary == 'forward-backward':
# We will take forward-backward differencing at edges
# need some fancy indexing to handle arbitrary derivative axis
# Initialize our index lists
front = [slice(s) for s in data.shape]
back = [slice(s) for s in data.shape]
target = [slice(s) for s in data.shape]
# Set our index values for the derivative axis
# front is the +1 index for derivative
front[axis] = np.array([1, -1])
# back is the -1 index for derivative
back[axis] = np.array([0, -2])
# target is the position where the derivative is being calculated
target[axis] = np.array([0, -1])
diff = (np.roll(data, -1, axis) - np.roll(data, 1, axis))/(2.)
diff[target] = (data[front]-data[back])
else: # invalid boundary condition was given
raise ValueError('Invalid option {} for boundary '
'condition.'.format(boundary))
return diff | def function[d_x, parameter[data, axis, boundary]]:
constant[
Calculates a second-order centered finite difference of data along the
specified axis.
Parameters
----------
data : ndarray
Data on which we are taking a derivative.
axis : int
Index of the data array on which to take the difference.
boundary : string, optional
Boundary condition. If 'periodic', assume periodic boundary condition
for centered difference. If 'forward-backward', take first-order
forward or backward derivatives at boundary.
Returns
-------
derivative : ndarray
Derivative of the data along the specified axis.
Raises
------
ValueError:
If an invalid boundary condition choice is given, if both dx and x are
specified, if axis is out of the valid range for the shape of the data,
or if x is specified and axis_x is out of the valid range for the shape
of x.
]
if compare[call[name[abs], parameter[name[axis]]] greater[>] call[name[len], parameter[name[data].shape]]] begin[:]
<ast.Raise object at 0x7da207f03370>
if compare[name[boundary] equal[==] constant[periodic]] begin[:]
variable[diff] assign[=] binary_operation[call[name[np].roll, parameter[name[data], <ast.UnaryOp object at 0x7da207f01060>, name[axis]]] - call[name[np].roll, parameter[name[data], constant[1], name[axis]]]]
return[name[diff]] | keyword[def] identifier[d_x] ( identifier[data] , identifier[axis] , identifier[boundary] = literal[string] ):
literal[string]
keyword[if] identifier[abs] ( identifier[axis] )> identifier[len] ( identifier[data] . identifier[shape] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[boundary] == literal[string] :
identifier[diff] = identifier[np] . identifier[roll] ( identifier[data] ,- literal[int] , identifier[axis] )- identifier[np] . identifier[roll] ( identifier[data] , literal[int] , identifier[axis] )
keyword[elif] identifier[boundary] == literal[string] :
identifier[front] =[ identifier[slice] ( identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[data] . identifier[shape] ]
identifier[back] =[ identifier[slice] ( identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[data] . identifier[shape] ]
identifier[target] =[ identifier[slice] ( identifier[s] ) keyword[for] identifier[s] keyword[in] identifier[data] . identifier[shape] ]
identifier[front] [ identifier[axis] ]= identifier[np] . identifier[array] ([ literal[int] ,- literal[int] ])
identifier[back] [ identifier[axis] ]= identifier[np] . identifier[array] ([ literal[int] ,- literal[int] ])
identifier[target] [ identifier[axis] ]= identifier[np] . identifier[array] ([ literal[int] ,- literal[int] ])
identifier[diff] =( identifier[np] . identifier[roll] ( identifier[data] ,- literal[int] , identifier[axis] )- identifier[np] . identifier[roll] ( identifier[data] , literal[int] , identifier[axis] ))/( literal[int] )
identifier[diff] [ identifier[target] ]=( identifier[data] [ identifier[front] ]- identifier[data] [ identifier[back] ])
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[boundary] ))
keyword[return] identifier[diff] | def d_x(data, axis, boundary='forward-backward'):
"""
Calculates a second-order centered finite difference of data along the
specified axis.
Parameters
----------
data : ndarray
Data on which we are taking a derivative.
axis : int
Index of the data array on which to take the difference.
boundary : string, optional
Boundary condition. If 'periodic', assume periodic boundary condition
for centered difference. If 'forward-backward', take first-order
forward or backward derivatives at boundary.
Returns
-------
derivative : ndarray
Derivative of the data along the specified axis.
Raises
------
ValueError:
If an invalid boundary condition choice is given, if both dx and x are
specified, if axis is out of the valid range for the shape of the data,
or if x is specified and axis_x is out of the valid range for the shape
of x.
"""
if abs(axis) > len(data.shape):
raise ValueError('axis is out of bounds for the shape of data') # depends on [control=['if'], data=[]]
if boundary == 'periodic':
diff = np.roll(data, -1, axis) - np.roll(data, 1, axis) # depends on [control=['if'], data=[]]
elif boundary == 'forward-backward':
# We will take forward-backward differencing at edges
# need some fancy indexing to handle arbitrary derivative axis
# Initialize our index lists
front = [slice(s) for s in data.shape]
back = [slice(s) for s in data.shape]
target = [slice(s) for s in data.shape]
# Set our index values for the derivative axis
# front is the +1 index for derivative
front[axis] = np.array([1, -1])
# back is the -1 index for derivative
back[axis] = np.array([0, -2])
# target is the position where the derivative is being calculated
target[axis] = np.array([0, -1])
diff = (np.roll(data, -1, axis) - np.roll(data, 1, axis)) / 2.0
diff[target] = data[front] - data[back] # depends on [control=['if'], data=[]]
else: # invalid boundary condition was given
raise ValueError('Invalid option {} for boundary condition.'.format(boundary))
return diff |
def create(cls, name, situation_context, attacker=None, target=None,
severity='information', situation_type=None, description=None,
comment=None):
"""
Create an inspection situation.
:param str name: name of the situation
:param InspectionSituationContext situation_context: The situation
context type used to define this situation. Identifies the proper
parameter that identifies how the situation is defined (i.e. regex, etc).
:param str attacker: Attacker information, used to identify last packet
the triggers attack and is only used for blacklisting. Values can
be packet_source, packet_destination, connection_source, or
connection_destination
:param str target: Target information, used to identify the last packet
that triggers the attack and is only used for blacklisting. Values
can be packet_source, packet_destination, connection_source, or
connection_destination
:param str severity: severity for this situation. Valid values are
critical, high, low, information
:param str description: optional description
:param str comment: optional comment
"""
try:
json = {
'name': name, 'comment': comment,
'description': description,
'situation_context_ref': situation_context.href,
'attacker': attacker, 'victim': target,
'severity': _severity_by_name(severity)}
element = ElementCreator(cls, json)
tag = situation_type or SituationTag('User Defined Situations')
tag.add_element(element)
return element
except ElementNotFound as e:
raise CreateElementFailed('{}. Inspection Situation Contexts require SMC '
'version 6.5 and above.'.format(str(e))) | def function[create, parameter[cls, name, situation_context, attacker, target, severity, situation_type, description, comment]]:
constant[
Create an inspection situation.
:param str name: name of the situation
:param InspectionSituationContext situation_context: The situation
context type used to define this situation. Identifies the proper
parameter that identifies how the situation is defined (i.e. regex, etc).
:param str attacker: Attacker information, used to identify last packet
the triggers attack and is only used for blacklisting. Values can
be packet_source, packet_destination, connection_source, or
connection_destination
:param str target: Target information, used to identify the last packet
that triggers the attack and is only used for blacklisting. Values
can be packet_source, packet_destination, connection_source, or
connection_destination
:param str severity: severity for this situation. Valid values are
critical, high, low, information
:param str description: optional description
:param str comment: optional comment
]
<ast.Try object at 0x7da1b1b17700> | keyword[def] identifier[create] ( identifier[cls] , identifier[name] , identifier[situation_context] , identifier[attacker] = keyword[None] , identifier[target] = keyword[None] ,
identifier[severity] = literal[string] , identifier[situation_type] = keyword[None] , identifier[description] = keyword[None] ,
identifier[comment] = keyword[None] ):
literal[string]
keyword[try] :
identifier[json] ={
literal[string] : identifier[name] , literal[string] : identifier[comment] ,
literal[string] : identifier[description] ,
literal[string] : identifier[situation_context] . identifier[href] ,
literal[string] : identifier[attacker] , literal[string] : identifier[target] ,
literal[string] : identifier[_severity_by_name] ( identifier[severity] )}
identifier[element] = identifier[ElementCreator] ( identifier[cls] , identifier[json] )
identifier[tag] = identifier[situation_type] keyword[or] identifier[SituationTag] ( literal[string] )
identifier[tag] . identifier[add_element] ( identifier[element] )
keyword[return] identifier[element]
keyword[except] identifier[ElementNotFound] keyword[as] identifier[e] :
keyword[raise] identifier[CreateElementFailed] ( literal[string]
literal[string] . identifier[format] ( identifier[str] ( identifier[e] ))) | def create(cls, name, situation_context, attacker=None, target=None, severity='information', situation_type=None, description=None, comment=None):
"""
Create an inspection situation.
:param str name: name of the situation
:param InspectionSituationContext situation_context: The situation
context type used to define this situation. Identifies the proper
parameter that identifies how the situation is defined (i.e. regex, etc).
:param str attacker: Attacker information, used to identify last packet
the triggers attack and is only used for blacklisting. Values can
be packet_source, packet_destination, connection_source, or
connection_destination
:param str target: Target information, used to identify the last packet
that triggers the attack and is only used for blacklisting. Values
can be packet_source, packet_destination, connection_source, or
connection_destination
:param str severity: severity for this situation. Valid values are
critical, high, low, information
:param str description: optional description
:param str comment: optional comment
"""
try:
json = {'name': name, 'comment': comment, 'description': description, 'situation_context_ref': situation_context.href, 'attacker': attacker, 'victim': target, 'severity': _severity_by_name(severity)}
element = ElementCreator(cls, json)
tag = situation_type or SituationTag('User Defined Situations')
tag.add_element(element)
return element # depends on [control=['try'], data=[]]
except ElementNotFound as e:
raise CreateElementFailed('{}. Inspection Situation Contexts require SMC version 6.5 and above.'.format(str(e))) # depends on [control=['except'], data=['e']] |
def calculate_dates(self, dt):
"""
Given a date, find that day's open and period end (open + offset).
"""
period_start, period_close = self.cal.open_and_close_for_session(
self.cal.minute_to_session_label(dt),
)
# Align the market open and close times here with the execution times
# used by the simulation clock. This ensures that scheduled functions
# trigger at the correct times.
self._period_start = self.cal.execution_time_from_open(period_start)
self._period_close = self.cal.execution_time_from_close(period_close)
self._period_end = self._period_start + self.offset - self._one_minute | def function[calculate_dates, parameter[self, dt]]:
constant[
Given a date, find that day's open and period end (open + offset).
]
<ast.Tuple object at 0x7da1b2046110> assign[=] call[name[self].cal.open_and_close_for_session, parameter[call[name[self].cal.minute_to_session_label, parameter[name[dt]]]]]
name[self]._period_start assign[=] call[name[self].cal.execution_time_from_open, parameter[name[period_start]]]
name[self]._period_close assign[=] call[name[self].cal.execution_time_from_close, parameter[name[period_close]]]
name[self]._period_end assign[=] binary_operation[binary_operation[name[self]._period_start + name[self].offset] - name[self]._one_minute] | keyword[def] identifier[calculate_dates] ( identifier[self] , identifier[dt] ):
literal[string]
identifier[period_start] , identifier[period_close] = identifier[self] . identifier[cal] . identifier[open_and_close_for_session] (
identifier[self] . identifier[cal] . identifier[minute_to_session_label] ( identifier[dt] ),
)
identifier[self] . identifier[_period_start] = identifier[self] . identifier[cal] . identifier[execution_time_from_open] ( identifier[period_start] )
identifier[self] . identifier[_period_close] = identifier[self] . identifier[cal] . identifier[execution_time_from_close] ( identifier[period_close] )
identifier[self] . identifier[_period_end] = identifier[self] . identifier[_period_start] + identifier[self] . identifier[offset] - identifier[self] . identifier[_one_minute] | def calculate_dates(self, dt):
"""
Given a date, find that day's open and period end (open + offset).
"""
(period_start, period_close) = self.cal.open_and_close_for_session(self.cal.minute_to_session_label(dt))
# Align the market open and close times here with the execution times
# used by the simulation clock. This ensures that scheduled functions
# trigger at the correct times.
self._period_start = self.cal.execution_time_from_open(period_start)
self._period_close = self.cal.execution_time_from_close(period_close)
self._period_end = self._period_start + self.offset - self._one_minute |
def runner_argspec(module=''):
'''
Return the argument specification of functions in Salt runner
modules.
.. versionadded:: 2015.5.0
CLI Example:
.. code-block:: bash
salt '*' sys.runner_argspec state
salt '*' sys.runner_argspec http
salt '*' sys.runner_argspec
Runner names can be specified as globs.
.. code-block:: bash
salt '*' sys.runner_argspec 'winrepo.*'
'''
run_ = salt.runner.Runner(__opts__)
return salt.utils.args.argspec_report(run_.functions, module) | def function[runner_argspec, parameter[module]]:
constant[
Return the argument specification of functions in Salt runner
modules.
.. versionadded:: 2015.5.0
CLI Example:
.. code-block:: bash
salt '*' sys.runner_argspec state
salt '*' sys.runner_argspec http
salt '*' sys.runner_argspec
Runner names can be specified as globs.
.. code-block:: bash
salt '*' sys.runner_argspec 'winrepo.*'
]
variable[run_] assign[=] call[name[salt].runner.Runner, parameter[name[__opts__]]]
return[call[name[salt].utils.args.argspec_report, parameter[name[run_].functions, name[module]]]] | keyword[def] identifier[runner_argspec] ( identifier[module] = literal[string] ):
literal[string]
identifier[run_] = identifier[salt] . identifier[runner] . identifier[Runner] ( identifier[__opts__] )
keyword[return] identifier[salt] . identifier[utils] . identifier[args] . identifier[argspec_report] ( identifier[run_] . identifier[functions] , identifier[module] ) | def runner_argspec(module=''):
"""
Return the argument specification of functions in Salt runner
modules.
.. versionadded:: 2015.5.0
CLI Example:
.. code-block:: bash
salt '*' sys.runner_argspec state
salt '*' sys.runner_argspec http
salt '*' sys.runner_argspec
Runner names can be specified as globs.
.. code-block:: bash
salt '*' sys.runner_argspec 'winrepo.*'
"""
run_ = salt.runner.Runner(__opts__)
return salt.utils.args.argspec_report(run_.functions, module) |
def match_deadline(self, start, end, match):
"""Matches assessments whose end time falls between the specified range inclusive.
arg: start (osid.calendaring.DateTime): start of range
arg: end (osid.calendaring.DateTime): end of range
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: InvalidArgument - ``end`` is less than ``start``
raise: NullArgument - ``start`` or ``end`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
self._match_minimum_date_time('deadline', start, match)
self._match_maximum_date_time('deadline', end, match) | def function[match_deadline, parameter[self, start, end, match]]:
constant[Matches assessments whose end time falls between the specified range inclusive.
arg: start (osid.calendaring.DateTime): start of range
arg: end (osid.calendaring.DateTime): end of range
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: InvalidArgument - ``end`` is less than ``start``
raise: NullArgument - ``start`` or ``end`` is ``null``
*compliance: mandatory -- This method must be implemented.*
]
call[name[self]._match_minimum_date_time, parameter[constant[deadline], name[start], name[match]]]
call[name[self]._match_maximum_date_time, parameter[constant[deadline], name[end], name[match]]] | keyword[def] identifier[match_deadline] ( identifier[self] , identifier[start] , identifier[end] , identifier[match] ):
literal[string]
identifier[self] . identifier[_match_minimum_date_time] ( literal[string] , identifier[start] , identifier[match] )
identifier[self] . identifier[_match_maximum_date_time] ( literal[string] , identifier[end] , identifier[match] ) | def match_deadline(self, start, end, match):
"""Matches assessments whose end time falls between the specified range inclusive.
arg: start (osid.calendaring.DateTime): start of range
arg: end (osid.calendaring.DateTime): end of range
arg: match (boolean): ``true`` for a positive match,
``false`` for a negative match
raise: InvalidArgument - ``end`` is less than ``start``
raise: NullArgument - ``start`` or ``end`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
self._match_minimum_date_time('deadline', start, match)
self._match_maximum_date_time('deadline', end, match) |
def version():
'''
Return server version from varnishd -V
CLI Example:
.. code-block:: bash
salt '*' varnish.version
'''
cmd = ['varnishd', '-V']
out = __salt__['cmd.run'](cmd, python_shell=False)
ret = re.search(r'\(varnish-([^\)]+)\)', out).group(1)
return ret | def function[version, parameter[]]:
constant[
Return server version from varnishd -V
CLI Example:
.. code-block:: bash
salt '*' varnish.version
]
variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b2169a80>, <ast.Constant object at 0x7da1b21686a0>]]
variable[out] assign[=] call[call[name[__salt__]][constant[cmd.run]], parameter[name[cmd]]]
variable[ret] assign[=] call[call[name[re].search, parameter[constant[\(varnish-([^\)]+)\)], name[out]]].group, parameter[constant[1]]]
return[name[ret]] | keyword[def] identifier[version] ():
literal[string]
identifier[cmd] =[ literal[string] , literal[string] ]
identifier[out] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] )
identifier[ret] = identifier[re] . identifier[search] ( literal[string] , identifier[out] ). identifier[group] ( literal[int] )
keyword[return] identifier[ret] | def version():
"""
Return server version from varnishd -V
CLI Example:
.. code-block:: bash
salt '*' varnish.version
"""
cmd = ['varnishd', '-V']
out = __salt__['cmd.run'](cmd, python_shell=False)
ret = re.search('\\(varnish-([^\\)]+)\\)', out).group(1)
return ret |
def random_pos(self, context_iterable, num_permutations):
"""Obtains random positions w/ replacement which match sequence context.
Parameters
----------
context_iterable: iterable containing two element tuple
Records number of mutations in each context. context_iterable
should be something like [('AA', 5), ...].
num_permutations : int
Number of permutations used in the permutation test.
Returns
-------
position_list : list
Contains context string and the randomly chosen positions
for that context.
"""
position_list = []
for contxt, n in context_iterable:
pos_array = self.random_context_pos(n, num_permutations, contxt)
position_list.append([contxt, pos_array])
return position_list | def function[random_pos, parameter[self, context_iterable, num_permutations]]:
constant[Obtains random positions w/ replacement which match sequence context.
Parameters
----------
context_iterable: iterable containing two element tuple
Records number of mutations in each context. context_iterable
should be something like [('AA', 5), ...].
num_permutations : int
Number of permutations used in the permutation test.
Returns
-------
position_list : list
Contains context string and the randomly chosen positions
for that context.
]
variable[position_list] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da20c6c5480>, <ast.Name object at 0x7da20c6c7a30>]]] in starred[name[context_iterable]] begin[:]
variable[pos_array] assign[=] call[name[self].random_context_pos, parameter[name[n], name[num_permutations], name[contxt]]]
call[name[position_list].append, parameter[list[[<ast.Name object at 0x7da2047eabf0>, <ast.Name object at 0x7da2047eae60>]]]]
return[name[position_list]] | keyword[def] identifier[random_pos] ( identifier[self] , identifier[context_iterable] , identifier[num_permutations] ):
literal[string]
identifier[position_list] =[]
keyword[for] identifier[contxt] , identifier[n] keyword[in] identifier[context_iterable] :
identifier[pos_array] = identifier[self] . identifier[random_context_pos] ( identifier[n] , identifier[num_permutations] , identifier[contxt] )
identifier[position_list] . identifier[append] ([ identifier[contxt] , identifier[pos_array] ])
keyword[return] identifier[position_list] | def random_pos(self, context_iterable, num_permutations):
"""Obtains random positions w/ replacement which match sequence context.
Parameters
----------
context_iterable: iterable containing two element tuple
Records number of mutations in each context. context_iterable
should be something like [('AA', 5), ...].
num_permutations : int
Number of permutations used in the permutation test.
Returns
-------
position_list : list
Contains context string and the randomly chosen positions
for that context.
"""
position_list = []
for (contxt, n) in context_iterable:
pos_array = self.random_context_pos(n, num_permutations, contxt)
position_list.append([contxt, pos_array]) # depends on [control=['for'], data=[]]
return position_list |
def prefetch_translations(instances, **kwargs):
"""
Prefetches translations for the given instances.
Can be useful for a list of instances.
"""
from .mixins import ModelMixin
if not isinstance(instances, collections.Iterable):
instances = [instances]
populate_missing = kwargs.get("populate_missing", True)
grouped_translations = utils.get_grouped_translations(instances, **kwargs)
# In the case of no translations objects
if not grouped_translations and populate_missing:
for instance in instances:
instance.populate_missing_translations()
for instance in instances:
if (
issubclass(instance.__class__, ModelMixin)
and instance.pk in grouped_translations
):
for translation in grouped_translations[instance.pk]:
instance._linguist.set_cache(instance=instance, translation=translation)
if populate_missing:
instance.populate_missing_translations() | def function[prefetch_translations, parameter[instances]]:
constant[
Prefetches translations for the given instances.
Can be useful for a list of instances.
]
from relative_module[mixins] import module[ModelMixin]
if <ast.UnaryOp object at 0x7da20e9b1150> begin[:]
variable[instances] assign[=] list[[<ast.Name object at 0x7da1b27edd20>]]
variable[populate_missing] assign[=] call[name[kwargs].get, parameter[constant[populate_missing], constant[True]]]
variable[grouped_translations] assign[=] call[name[utils].get_grouped_translations, parameter[name[instances]]]
if <ast.BoolOp object at 0x7da1b27ecc10> begin[:]
for taget[name[instance]] in starred[name[instances]] begin[:]
call[name[instance].populate_missing_translations, parameter[]]
for taget[name[instance]] in starred[name[instances]] begin[:]
if <ast.BoolOp object at 0x7da1b27eee90> begin[:]
for taget[name[translation]] in starred[call[name[grouped_translations]][name[instance].pk]] begin[:]
call[name[instance]._linguist.set_cache, parameter[]]
if name[populate_missing] begin[:]
call[name[instance].populate_missing_translations, parameter[]] | keyword[def] identifier[prefetch_translations] ( identifier[instances] ,** identifier[kwargs] ):
literal[string]
keyword[from] . identifier[mixins] keyword[import] identifier[ModelMixin]
keyword[if] keyword[not] identifier[isinstance] ( identifier[instances] , identifier[collections] . identifier[Iterable] ):
identifier[instances] =[ identifier[instances] ]
identifier[populate_missing] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] )
identifier[grouped_translations] = identifier[utils] . identifier[get_grouped_translations] ( identifier[instances] ,** identifier[kwargs] )
keyword[if] keyword[not] identifier[grouped_translations] keyword[and] identifier[populate_missing] :
keyword[for] identifier[instance] keyword[in] identifier[instances] :
identifier[instance] . identifier[populate_missing_translations] ()
keyword[for] identifier[instance] keyword[in] identifier[instances] :
keyword[if] (
identifier[issubclass] ( identifier[instance] . identifier[__class__] , identifier[ModelMixin] )
keyword[and] identifier[instance] . identifier[pk] keyword[in] identifier[grouped_translations]
):
keyword[for] identifier[translation] keyword[in] identifier[grouped_translations] [ identifier[instance] . identifier[pk] ]:
identifier[instance] . identifier[_linguist] . identifier[set_cache] ( identifier[instance] = identifier[instance] , identifier[translation] = identifier[translation] )
keyword[if] identifier[populate_missing] :
identifier[instance] . identifier[populate_missing_translations] () | def prefetch_translations(instances, **kwargs):
"""
Prefetches translations for the given instances.
Can be useful for a list of instances.
"""
from .mixins import ModelMixin
if not isinstance(instances, collections.Iterable):
instances = [instances] # depends on [control=['if'], data=[]]
populate_missing = kwargs.get('populate_missing', True)
grouped_translations = utils.get_grouped_translations(instances, **kwargs)
# In the case of no translations objects
if not grouped_translations and populate_missing:
for instance in instances:
instance.populate_missing_translations() # depends on [control=['for'], data=['instance']] # depends on [control=['if'], data=[]]
for instance in instances:
if issubclass(instance.__class__, ModelMixin) and instance.pk in grouped_translations:
for translation in grouped_translations[instance.pk]:
instance._linguist.set_cache(instance=instance, translation=translation) # depends on [control=['for'], data=['translation']]
if populate_missing:
instance.populate_missing_translations() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['instance']] |
def sell(self, quantity, **kwargs):
""" Shortcut for ``instrument.order("SELL", ...)`` and accepts all of its
`optional parameters <#qtpylib.instrument.Instrument.order>`_
:Parameters:
quantity : int
Order quantity
"""
self.parent.order("SELL", self, quantity=quantity, **kwargs) | def function[sell, parameter[self, quantity]]:
constant[ Shortcut for ``instrument.order("SELL", ...)`` and accepts all of its
`optional parameters <#qtpylib.instrument.Instrument.order>`_
:Parameters:
quantity : int
Order quantity
]
call[name[self].parent.order, parameter[constant[SELL], name[self]]] | keyword[def] identifier[sell] ( identifier[self] , identifier[quantity] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[parent] . identifier[order] ( literal[string] , identifier[self] , identifier[quantity] = identifier[quantity] ,** identifier[kwargs] ) | def sell(self, quantity, **kwargs):
""" Shortcut for ``instrument.order("SELL", ...)`` and accepts all of its
`optional parameters <#qtpylib.instrument.Instrument.order>`_
:Parameters:
quantity : int
Order quantity
"""
self.parent.order('SELL', self, quantity=quantity, **kwargs) |
def designspace(self):
"""Get a designspace Document instance that links the masters together
and holds instance data.
"""
if self._designspace_is_complete:
return self._designspace
self._designspace_is_complete = True
list(self.masters) # Make sure that the UFOs are built
self.to_designspace_axes()
self.to_designspace_sources()
self.to_designspace_instances()
self.to_designspace_family_user_data()
if self.bracket_layers:
self._apply_bracket_layers()
# append base style shared by all masters to designspace file name
base_family = self.family_name or "Unnamed"
base_style = find_base_style(self.font.masters)
if base_style:
base_style = "-" + base_style
name = (base_family + base_style).replace(" ", "") + ".designspace"
self.designspace.filename = name
return self._designspace | def function[designspace, parameter[self]]:
constant[Get a designspace Document instance that links the masters together
and holds instance data.
]
if name[self]._designspace_is_complete begin[:]
return[name[self]._designspace]
name[self]._designspace_is_complete assign[=] constant[True]
call[name[list], parameter[name[self].masters]]
call[name[self].to_designspace_axes, parameter[]]
call[name[self].to_designspace_sources, parameter[]]
call[name[self].to_designspace_instances, parameter[]]
call[name[self].to_designspace_family_user_data, parameter[]]
if name[self].bracket_layers begin[:]
call[name[self]._apply_bracket_layers, parameter[]]
variable[base_family] assign[=] <ast.BoolOp object at 0x7da20c992560>
variable[base_style] assign[=] call[name[find_base_style], parameter[name[self].font.masters]]
if name[base_style] begin[:]
variable[base_style] assign[=] binary_operation[constant[-] + name[base_style]]
variable[name] assign[=] binary_operation[call[binary_operation[name[base_family] + name[base_style]].replace, parameter[constant[ ], constant[]]] + constant[.designspace]]
name[self].designspace.filename assign[=] name[name]
return[name[self]._designspace] | keyword[def] identifier[designspace] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_designspace_is_complete] :
keyword[return] identifier[self] . identifier[_designspace]
identifier[self] . identifier[_designspace_is_complete] = keyword[True]
identifier[list] ( identifier[self] . identifier[masters] )
identifier[self] . identifier[to_designspace_axes] ()
identifier[self] . identifier[to_designspace_sources] ()
identifier[self] . identifier[to_designspace_instances] ()
identifier[self] . identifier[to_designspace_family_user_data] ()
keyword[if] identifier[self] . identifier[bracket_layers] :
identifier[self] . identifier[_apply_bracket_layers] ()
identifier[base_family] = identifier[self] . identifier[family_name] keyword[or] literal[string]
identifier[base_style] = identifier[find_base_style] ( identifier[self] . identifier[font] . identifier[masters] )
keyword[if] identifier[base_style] :
identifier[base_style] = literal[string] + identifier[base_style]
identifier[name] =( identifier[base_family] + identifier[base_style] ). identifier[replace] ( literal[string] , literal[string] )+ literal[string]
identifier[self] . identifier[designspace] . identifier[filename] = identifier[name]
keyword[return] identifier[self] . identifier[_designspace] | def designspace(self):
"""Get a designspace Document instance that links the masters together
and holds instance data.
"""
if self._designspace_is_complete:
return self._designspace # depends on [control=['if'], data=[]]
self._designspace_is_complete = True
list(self.masters) # Make sure that the UFOs are built
self.to_designspace_axes()
self.to_designspace_sources()
self.to_designspace_instances()
self.to_designspace_family_user_data()
if self.bracket_layers:
self._apply_bracket_layers() # depends on [control=['if'], data=[]]
# append base style shared by all masters to designspace file name
base_family = self.family_name or 'Unnamed'
base_style = find_base_style(self.font.masters)
if base_style:
base_style = '-' + base_style # depends on [control=['if'], data=[]]
name = (base_family + base_style).replace(' ', '') + '.designspace'
self.designspace.filename = name
return self._designspace |
def setTimeout(self, time):
"""Set global timeout value, in seconds, for all DDE calls"""
self.conversation.SetDDETimeout(round(time))
return self.conversation.GetDDETimeout() | def function[setTimeout, parameter[self, time]]:
constant[Set global timeout value, in seconds, for all DDE calls]
call[name[self].conversation.SetDDETimeout, parameter[call[name[round], parameter[name[time]]]]]
return[call[name[self].conversation.GetDDETimeout, parameter[]]] | keyword[def] identifier[setTimeout] ( identifier[self] , identifier[time] ):
literal[string]
identifier[self] . identifier[conversation] . identifier[SetDDETimeout] ( identifier[round] ( identifier[time] ))
keyword[return] identifier[self] . identifier[conversation] . identifier[GetDDETimeout] () | def setTimeout(self, time):
"""Set global timeout value, in seconds, for all DDE calls"""
self.conversation.SetDDETimeout(round(time))
return self.conversation.GetDDETimeout() |
def _count_expected_bids(self, recipients):
'''
Count the expected number of bids (after receiving them we close the
contract. If the recipient type is broadcast return None which denotes
unknown number of bids (contract will be closed after timeout).
'''
count = 0
for recp in recipients:
if recp.type == RecipientType.broadcast:
return None
count += 1
return count | def function[_count_expected_bids, parameter[self, recipients]]:
constant[
Count the expected number of bids (after receiving them we close the
contract. If the recipient type is broadcast return None which denotes
unknown number of bids (contract will be closed after timeout).
]
variable[count] assign[=] constant[0]
for taget[name[recp]] in starred[name[recipients]] begin[:]
if compare[name[recp].type equal[==] name[RecipientType].broadcast] begin[:]
return[constant[None]]
<ast.AugAssign object at 0x7da20e955ab0>
return[name[count]] | keyword[def] identifier[_count_expected_bids] ( identifier[self] , identifier[recipients] ):
literal[string]
identifier[count] = literal[int]
keyword[for] identifier[recp] keyword[in] identifier[recipients] :
keyword[if] identifier[recp] . identifier[type] == identifier[RecipientType] . identifier[broadcast] :
keyword[return] keyword[None]
identifier[count] += literal[int]
keyword[return] identifier[count] | def _count_expected_bids(self, recipients):
"""
Count the expected number of bids (after receiving them we close the
contract. If the recipient type is broadcast return None which denotes
unknown number of bids (contract will be closed after timeout).
"""
count = 0
for recp in recipients:
if recp.type == RecipientType.broadcast:
return None # depends on [control=['if'], data=[]]
count += 1 # depends on [control=['for'], data=['recp']]
return count |
def sources(self):
"""
Returns a dictionary containing all defined sources (of any kind)
:return: collections.OrderedDict()
"""
sources = collections.OrderedDict()
for d in (self.point_sources, self.extended_sources, self.particle_sources):
sources.update(d)
return sources | def function[sources, parameter[self]]:
constant[
Returns a dictionary containing all defined sources (of any kind)
:return: collections.OrderedDict()
]
variable[sources] assign[=] call[name[collections].OrderedDict, parameter[]]
for taget[name[d]] in starred[tuple[[<ast.Attribute object at 0x7da1b0efa500>, <ast.Attribute object at 0x7da1b0efa980>, <ast.Attribute object at 0x7da1b0efa710>]]] begin[:]
call[name[sources].update, parameter[name[d]]]
return[name[sources]] | keyword[def] identifier[sources] ( identifier[self] ):
literal[string]
identifier[sources] = identifier[collections] . identifier[OrderedDict] ()
keyword[for] identifier[d] keyword[in] ( identifier[self] . identifier[point_sources] , identifier[self] . identifier[extended_sources] , identifier[self] . identifier[particle_sources] ):
identifier[sources] . identifier[update] ( identifier[d] )
keyword[return] identifier[sources] | def sources(self):
"""
Returns a dictionary containing all defined sources (of any kind)
:return: collections.OrderedDict()
"""
sources = collections.OrderedDict()
for d in (self.point_sources, self.extended_sources, self.particle_sources):
sources.update(d) # depends on [control=['for'], data=['d']]
return sources |
def saturate_color(color, amount):
"""Saturate a hex color."""
r, g, b = hex_to_rgb(color)
r, g, b = [x/255.0 for x in (r, g, b)]
h, l, s = colorsys.rgb_to_hls(r, g, b)
s = amount
r, g, b = colorsys.hls_to_rgb(h, l, s)
r, g, b = [x*255.0 for x in (r, g, b)]
return rgb_to_hex((int(r), int(g), int(b))) | def function[saturate_color, parameter[color, amount]]:
constant[Saturate a hex color.]
<ast.Tuple object at 0x7da1b18be680> assign[=] call[name[hex_to_rgb], parameter[name[color]]]
<ast.Tuple object at 0x7da1b18d1bd0> assign[=] <ast.ListComp object at 0x7da1b18d1450>
<ast.Tuple object at 0x7da1b1780730> assign[=] call[name[colorsys].rgb_to_hls, parameter[name[r], name[g], name[b]]]
variable[s] assign[=] name[amount]
<ast.Tuple object at 0x7da1b18bc5b0> assign[=] call[name[colorsys].hls_to_rgb, parameter[name[h], name[l], name[s]]]
<ast.Tuple object at 0x7da1b18bf6d0> assign[=] <ast.ListComp object at 0x7da1b18bd240>
return[call[name[rgb_to_hex], parameter[tuple[[<ast.Call object at 0x7da1b1831120>, <ast.Call object at 0x7da1b1833a30>, <ast.Call object at 0x7da1b18306a0>]]]]] | keyword[def] identifier[saturate_color] ( identifier[color] , identifier[amount] ):
literal[string]
identifier[r] , identifier[g] , identifier[b] = identifier[hex_to_rgb] ( identifier[color] )
identifier[r] , identifier[g] , identifier[b] =[ identifier[x] / literal[int] keyword[for] identifier[x] keyword[in] ( identifier[r] , identifier[g] , identifier[b] )]
identifier[h] , identifier[l] , identifier[s] = identifier[colorsys] . identifier[rgb_to_hls] ( identifier[r] , identifier[g] , identifier[b] )
identifier[s] = identifier[amount]
identifier[r] , identifier[g] , identifier[b] = identifier[colorsys] . identifier[hls_to_rgb] ( identifier[h] , identifier[l] , identifier[s] )
identifier[r] , identifier[g] , identifier[b] =[ identifier[x] * literal[int] keyword[for] identifier[x] keyword[in] ( identifier[r] , identifier[g] , identifier[b] )]
keyword[return] identifier[rgb_to_hex] (( identifier[int] ( identifier[r] ), identifier[int] ( identifier[g] ), identifier[int] ( identifier[b] ))) | def saturate_color(color, amount):
"""Saturate a hex color."""
(r, g, b) = hex_to_rgb(color)
(r, g, b) = [x / 255.0 for x in (r, g, b)]
(h, l, s) = colorsys.rgb_to_hls(r, g, b)
s = amount
(r, g, b) = colorsys.hls_to_rgb(h, l, s)
(r, g, b) = [x * 255.0 for x in (r, g, b)]
return rgb_to_hex((int(r), int(g), int(b))) |
def seqlogo(letter_heights, vocab="DNA", ax=None):
"""Make a logo plot
# Arguments
letter_heights: "motif length" x "vocabulary size" numpy array
Can also contain negative values.
vocab: str, Vocabulary name. Can be: DNA, RNA, AA, RNAStruct.
ax: matplotlib axis
"""
ax = ax or plt.gca()
assert letter_heights.shape[1] == len(VOCABS[vocab])
x_range = [1, letter_heights.shape[0]]
pos_heights = np.copy(letter_heights)
pos_heights[letter_heights < 0] = 0
neg_heights = np.copy(letter_heights)
neg_heights[letter_heights > 0] = 0
for x_pos, heights in enumerate(letter_heights):
letters_and_heights = sorted(zip(heights, list(VOCABS[vocab].keys())))
y_pos_pos = 0.0
y_neg_pos = 0.0
for height, letter in letters_and_heights:
color = VOCABS[vocab][letter]
polygons = letter_polygons[letter]
if height > 0:
add_letter_to_axis(ax, polygons, color, 0.5 + x_pos, y_pos_pos, height)
y_pos_pos += height
else:
add_letter_to_axis(ax, polygons, color, 0.5 + x_pos, y_neg_pos, height)
y_neg_pos += height
# if add_hline:
# ax.axhline(color="black", linewidth=1)
ax.set_xlim(x_range[0] - 1, x_range[1] + 1)
ax.grid(False)
ax.set_xticks(list(range(*x_range)) + [x_range[-1]])
ax.set_aspect(aspect='auto', adjustable='box')
ax.autoscale_view() | def function[seqlogo, parameter[letter_heights, vocab, ax]]:
constant[Make a logo plot
# Arguments
letter_heights: "motif length" x "vocabulary size" numpy array
Can also contain negative values.
vocab: str, Vocabulary name. Can be: DNA, RNA, AA, RNAStruct.
ax: matplotlib axis
]
variable[ax] assign[=] <ast.BoolOp object at 0x7da204564b20>
assert[compare[call[name[letter_heights].shape][constant[1]] equal[==] call[name[len], parameter[call[name[VOCABS]][name[vocab]]]]]]
variable[x_range] assign[=] list[[<ast.Constant object at 0x7da2045662f0>, <ast.Subscript object at 0x7da204567910>]]
variable[pos_heights] assign[=] call[name[np].copy, parameter[name[letter_heights]]]
call[name[pos_heights]][compare[name[letter_heights] less[<] constant[0]]] assign[=] constant[0]
variable[neg_heights] assign[=] call[name[np].copy, parameter[name[letter_heights]]]
call[name[neg_heights]][compare[name[letter_heights] greater[>] constant[0]]] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da204565150>, <ast.Name object at 0x7da204565ae0>]]] in starred[call[name[enumerate], parameter[name[letter_heights]]]] begin[:]
variable[letters_and_heights] assign[=] call[name[sorted], parameter[call[name[zip], parameter[name[heights], call[name[list], parameter[call[call[name[VOCABS]][name[vocab]].keys, parameter[]]]]]]]]
variable[y_pos_pos] assign[=] constant[0.0]
variable[y_neg_pos] assign[=] constant[0.0]
for taget[tuple[[<ast.Name object at 0x7da1b031c160>, <ast.Name object at 0x7da1b031d4e0>]]] in starred[name[letters_and_heights]] begin[:]
variable[color] assign[=] call[call[name[VOCABS]][name[vocab]]][name[letter]]
variable[polygons] assign[=] call[name[letter_polygons]][name[letter]]
if compare[name[height] greater[>] constant[0]] begin[:]
call[name[add_letter_to_axis], parameter[name[ax], name[polygons], name[color], binary_operation[constant[0.5] + name[x_pos]], name[y_pos_pos], name[height]]]
<ast.AugAssign object at 0x7da1b031e1a0>
call[name[ax].set_xlim, parameter[binary_operation[call[name[x_range]][constant[0]] - constant[1]], binary_operation[call[name[x_range]][constant[1]] + constant[1]]]]
call[name[ax].grid, parameter[constant[False]]]
call[name[ax].set_xticks, parameter[binary_operation[call[name[list], parameter[call[name[range], parameter[<ast.Starred object at 0x7da2046216f0>]]]] + list[[<ast.Subscript object at 0x7da204620670>]]]]]
call[name[ax].set_aspect, parameter[]]
call[name[ax].autoscale_view, parameter[]] | keyword[def] identifier[seqlogo] ( identifier[letter_heights] , identifier[vocab] = literal[string] , identifier[ax] = keyword[None] ):
literal[string]
identifier[ax] = identifier[ax] keyword[or] identifier[plt] . identifier[gca] ()
keyword[assert] identifier[letter_heights] . identifier[shape] [ literal[int] ]== identifier[len] ( identifier[VOCABS] [ identifier[vocab] ])
identifier[x_range] =[ literal[int] , identifier[letter_heights] . identifier[shape] [ literal[int] ]]
identifier[pos_heights] = identifier[np] . identifier[copy] ( identifier[letter_heights] )
identifier[pos_heights] [ identifier[letter_heights] < literal[int] ]= literal[int]
identifier[neg_heights] = identifier[np] . identifier[copy] ( identifier[letter_heights] )
identifier[neg_heights] [ identifier[letter_heights] > literal[int] ]= literal[int]
keyword[for] identifier[x_pos] , identifier[heights] keyword[in] identifier[enumerate] ( identifier[letter_heights] ):
identifier[letters_and_heights] = identifier[sorted] ( identifier[zip] ( identifier[heights] , identifier[list] ( identifier[VOCABS] [ identifier[vocab] ]. identifier[keys] ())))
identifier[y_pos_pos] = literal[int]
identifier[y_neg_pos] = literal[int]
keyword[for] identifier[height] , identifier[letter] keyword[in] identifier[letters_and_heights] :
identifier[color] = identifier[VOCABS] [ identifier[vocab] ][ identifier[letter] ]
identifier[polygons] = identifier[letter_polygons] [ identifier[letter] ]
keyword[if] identifier[height] > literal[int] :
identifier[add_letter_to_axis] ( identifier[ax] , identifier[polygons] , identifier[color] , literal[int] + identifier[x_pos] , identifier[y_pos_pos] , identifier[height] )
identifier[y_pos_pos] += identifier[height]
keyword[else] :
identifier[add_letter_to_axis] ( identifier[ax] , identifier[polygons] , identifier[color] , literal[int] + identifier[x_pos] , identifier[y_neg_pos] , identifier[height] )
identifier[y_neg_pos] += identifier[height]
identifier[ax] . identifier[set_xlim] ( identifier[x_range] [ literal[int] ]- literal[int] , identifier[x_range] [ literal[int] ]+ literal[int] )
identifier[ax] . identifier[grid] ( keyword[False] )
identifier[ax] . identifier[set_xticks] ( identifier[list] ( identifier[range] (* identifier[x_range] ))+[ identifier[x_range] [- literal[int] ]])
identifier[ax] . identifier[set_aspect] ( identifier[aspect] = literal[string] , identifier[adjustable] = literal[string] )
identifier[ax] . identifier[autoscale_view] () | def seqlogo(letter_heights, vocab='DNA', ax=None):
"""Make a logo plot
# Arguments
letter_heights: "motif length" x "vocabulary size" numpy array
Can also contain negative values.
vocab: str, Vocabulary name. Can be: DNA, RNA, AA, RNAStruct.
ax: matplotlib axis
"""
ax = ax or plt.gca()
assert letter_heights.shape[1] == len(VOCABS[vocab])
x_range = [1, letter_heights.shape[0]]
pos_heights = np.copy(letter_heights)
pos_heights[letter_heights < 0] = 0
neg_heights = np.copy(letter_heights)
neg_heights[letter_heights > 0] = 0
for (x_pos, heights) in enumerate(letter_heights):
letters_and_heights = sorted(zip(heights, list(VOCABS[vocab].keys())))
y_pos_pos = 0.0
y_neg_pos = 0.0
for (height, letter) in letters_and_heights:
color = VOCABS[vocab][letter]
polygons = letter_polygons[letter]
if height > 0:
add_letter_to_axis(ax, polygons, color, 0.5 + x_pos, y_pos_pos, height)
y_pos_pos += height # depends on [control=['if'], data=['height']]
else:
add_letter_to_axis(ax, polygons, color, 0.5 + x_pos, y_neg_pos, height)
y_neg_pos += height # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
# if add_hline:
# ax.axhline(color="black", linewidth=1)
ax.set_xlim(x_range[0] - 1, x_range[1] + 1)
ax.grid(False)
ax.set_xticks(list(range(*x_range)) + [x_range[-1]])
ax.set_aspect(aspect='auto', adjustable='box')
ax.autoscale_view() |
def _generic_matrix_calc(fn, trees, normalise, min_overlap=4, overlap_fail_value=0, show_progress=True):
"""(fn, trees, normalise)
Calculates all pairwise distances between trees given in the parameter 'trees'.
Distance functions:
eucdist_matrix
geodist_matrix
rfdist_matrix
wrfdist_matrix
These wrap the leafset-checking functions. If the faster non-leafset-checking functions are needed, do this:
scipy.spatial.distance(['getDistance'(t1.phylotree, t2.phylotree, normalise)
for (t1, t2) in itertools.combinations(trees, 2)])
for your choice of 'getDistance' out of:
getEuclideanDistance
getGeodesicDistance
getRobinsonFouldsDistance
getWeightedRobinsonFouldsDistance
:param trees: list or tuple, or some other iterable container type containing Tree objects
:param normalise: boolean
:param min_overlap: int
:return: numpy.array
"""
jobs = itertools.combinations(trees, 2)
results = []
if show_progress:
pbar = setup_progressbar('Calculating tree distances', 0.5 * len(trees) * (len(trees) - 1))
pbar.start()
for i, (t1, t2) in enumerate(jobs):
results.append(_generic_distance_calc(fn, t1, t2, normalise, min_overlap, overlap_fail_value))
if show_progress:
pbar.update(i)
if show_progress:
pbar.finish()
return scipy.spatial.distance.squareform(results) | def function[_generic_matrix_calc, parameter[fn, trees, normalise, min_overlap, overlap_fail_value, show_progress]]:
constant[(fn, trees, normalise)
Calculates all pairwise distances between trees given in the parameter 'trees'.
Distance functions:
eucdist_matrix
geodist_matrix
rfdist_matrix
wrfdist_matrix
These wrap the leafset-checking functions. If the faster non-leafset-checking functions are needed, do this:
scipy.spatial.distance(['getDistance'(t1.phylotree, t2.phylotree, normalise)
for (t1, t2) in itertools.combinations(trees, 2)])
for your choice of 'getDistance' out of:
getEuclideanDistance
getGeodesicDistance
getRobinsonFouldsDistance
getWeightedRobinsonFouldsDistance
:param trees: list or tuple, or some other iterable container type containing Tree objects
:param normalise: boolean
:param min_overlap: int
:return: numpy.array
]
variable[jobs] assign[=] call[name[itertools].combinations, parameter[name[trees], constant[2]]]
variable[results] assign[=] list[[]]
if name[show_progress] begin[:]
variable[pbar] assign[=] call[name[setup_progressbar], parameter[constant[Calculating tree distances], binary_operation[binary_operation[constant[0.5] * call[name[len], parameter[name[trees]]]] * binary_operation[call[name[len], parameter[name[trees]]] - constant[1]]]]]
call[name[pbar].start, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18ede5f90>, <ast.Tuple object at 0x7da18ede7610>]]] in starred[call[name[enumerate], parameter[name[jobs]]]] begin[:]
call[name[results].append, parameter[call[name[_generic_distance_calc], parameter[name[fn], name[t1], name[t2], name[normalise], name[min_overlap], name[overlap_fail_value]]]]]
if name[show_progress] begin[:]
call[name[pbar].update, parameter[name[i]]]
if name[show_progress] begin[:]
call[name[pbar].finish, parameter[]]
return[call[name[scipy].spatial.distance.squareform, parameter[name[results]]]] | keyword[def] identifier[_generic_matrix_calc] ( identifier[fn] , identifier[trees] , identifier[normalise] , identifier[min_overlap] = literal[int] , identifier[overlap_fail_value] = literal[int] , identifier[show_progress] = keyword[True] ):
literal[string]
identifier[jobs] = identifier[itertools] . identifier[combinations] ( identifier[trees] , literal[int] )
identifier[results] =[]
keyword[if] identifier[show_progress] :
identifier[pbar] = identifier[setup_progressbar] ( literal[string] , literal[int] * identifier[len] ( identifier[trees] )*( identifier[len] ( identifier[trees] )- literal[int] ))
identifier[pbar] . identifier[start] ()
keyword[for] identifier[i] ,( identifier[t1] , identifier[t2] ) keyword[in] identifier[enumerate] ( identifier[jobs] ):
identifier[results] . identifier[append] ( identifier[_generic_distance_calc] ( identifier[fn] , identifier[t1] , identifier[t2] , identifier[normalise] , identifier[min_overlap] , identifier[overlap_fail_value] ))
keyword[if] identifier[show_progress] :
identifier[pbar] . identifier[update] ( identifier[i] )
keyword[if] identifier[show_progress] :
identifier[pbar] . identifier[finish] ()
keyword[return] identifier[scipy] . identifier[spatial] . identifier[distance] . identifier[squareform] ( identifier[results] ) | def _generic_matrix_calc(fn, trees, normalise, min_overlap=4, overlap_fail_value=0, show_progress=True):
"""(fn, trees, normalise)
Calculates all pairwise distances between trees given in the parameter 'trees'.
Distance functions:
eucdist_matrix
geodist_matrix
rfdist_matrix
wrfdist_matrix
These wrap the leafset-checking functions. If the faster non-leafset-checking functions are needed, do this:
scipy.spatial.distance(['getDistance'(t1.phylotree, t2.phylotree, normalise)
for (t1, t2) in itertools.combinations(trees, 2)])
for your choice of 'getDistance' out of:
getEuclideanDistance
getGeodesicDistance
getRobinsonFouldsDistance
getWeightedRobinsonFouldsDistance
:param trees: list or tuple, or some other iterable container type containing Tree objects
:param normalise: boolean
:param min_overlap: int
:return: numpy.array
"""
jobs = itertools.combinations(trees, 2)
results = []
if show_progress:
pbar = setup_progressbar('Calculating tree distances', 0.5 * len(trees) * (len(trees) - 1))
pbar.start() # depends on [control=['if'], data=[]]
for (i, (t1, t2)) in enumerate(jobs):
results.append(_generic_distance_calc(fn, t1, t2, normalise, min_overlap, overlap_fail_value))
if show_progress:
pbar.update(i) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
if show_progress:
pbar.finish() # depends on [control=['if'], data=[]]
return scipy.spatial.distance.squareform(results) |
def process_context_token(self, context_token):
"""
Provides a way to pass an asynchronous token to the security context, outside of the normal
context-establishment token passing flow. This method is not normally used, but some
example uses are:
* when the initiator's context is established successfully but the acceptor's context isn't
and the acceptor needs to signal to the initiator that the context shouldn't be used.
* if :meth:`delete` on one peer's context returns a final token that can be passed to the
other peer to indicate the other peer's context should be torn down as well (though it's
recommended that :meth:`delete` should return nothing, i.e. this method should not be
used by GSSAPI mechanisms).
:param context_token: The context token to pass to the security context
:type context_token: bytes
:raises: :exc:`~gssapi.error.DefectiveToken` if consistency checks on the token failed.
:exc:`~gssapi.error.NoContext` if this context is invalid.
:exc:`~gssapi.error.GSSException` for any other GSSAPI errors.
"""
minor_status = ffi.new('OM_uint32[1]')
context_token_buffer = ffi.new('gss_buffer_desc[1]')
context_token_buffer[0].length = len(context_token)
c_str_context_token = ffi.new('char[]', context_token)
context_token_buffer[0].value = c_str_context_token
retval = C.gss_process_context_token(
minor_status,
self._ctx[0],
context_token_buffer
)
if GSS_ERROR(retval):
if minor_status[0] and self.mech_type:
raise _exception_for_status(retval, minor_status[0], self.mech_type)
else:
raise _exception_for_status(retval, minor_status[0]) | def function[process_context_token, parameter[self, context_token]]:
constant[
Provides a way to pass an asynchronous token to the security context, outside of the normal
context-establishment token passing flow. This method is not normally used, but some
example uses are:
* when the initiator's context is established successfully but the acceptor's context isn't
and the acceptor needs to signal to the initiator that the context shouldn't be used.
* if :meth:`delete` on one peer's context returns a final token that can be passed to the
other peer to indicate the other peer's context should be torn down as well (though it's
recommended that :meth:`delete` should return nothing, i.e. this method should not be
used by GSSAPI mechanisms).
:param context_token: The context token to pass to the security context
:type context_token: bytes
:raises: :exc:`~gssapi.error.DefectiveToken` if consistency checks on the token failed.
:exc:`~gssapi.error.NoContext` if this context is invalid.
:exc:`~gssapi.error.GSSException` for any other GSSAPI errors.
]
variable[minor_status] assign[=] call[name[ffi].new, parameter[constant[OM_uint32[1]]]]
variable[context_token_buffer] assign[=] call[name[ffi].new, parameter[constant[gss_buffer_desc[1]]]]
call[name[context_token_buffer]][constant[0]].length assign[=] call[name[len], parameter[name[context_token]]]
variable[c_str_context_token] assign[=] call[name[ffi].new, parameter[constant[char[]], name[context_token]]]
call[name[context_token_buffer]][constant[0]].value assign[=] name[c_str_context_token]
variable[retval] assign[=] call[name[C].gss_process_context_token, parameter[name[minor_status], call[name[self]._ctx][constant[0]], name[context_token_buffer]]]
if call[name[GSS_ERROR], parameter[name[retval]]] begin[:]
if <ast.BoolOp object at 0x7da1b023cd60> begin[:]
<ast.Raise object at 0x7da1b023c070> | keyword[def] identifier[process_context_token] ( identifier[self] , identifier[context_token] ):
literal[string]
identifier[minor_status] = identifier[ffi] . identifier[new] ( literal[string] )
identifier[context_token_buffer] = identifier[ffi] . identifier[new] ( literal[string] )
identifier[context_token_buffer] [ literal[int] ]. identifier[length] = identifier[len] ( identifier[context_token] )
identifier[c_str_context_token] = identifier[ffi] . identifier[new] ( literal[string] , identifier[context_token] )
identifier[context_token_buffer] [ literal[int] ]. identifier[value] = identifier[c_str_context_token]
identifier[retval] = identifier[C] . identifier[gss_process_context_token] (
identifier[minor_status] ,
identifier[self] . identifier[_ctx] [ literal[int] ],
identifier[context_token_buffer]
)
keyword[if] identifier[GSS_ERROR] ( identifier[retval] ):
keyword[if] identifier[minor_status] [ literal[int] ] keyword[and] identifier[self] . identifier[mech_type] :
keyword[raise] identifier[_exception_for_status] ( identifier[retval] , identifier[minor_status] [ literal[int] ], identifier[self] . identifier[mech_type] )
keyword[else] :
keyword[raise] identifier[_exception_for_status] ( identifier[retval] , identifier[minor_status] [ literal[int] ]) | def process_context_token(self, context_token):
"""
Provides a way to pass an asynchronous token to the security context, outside of the normal
context-establishment token passing flow. This method is not normally used, but some
example uses are:
* when the initiator's context is established successfully but the acceptor's context isn't
and the acceptor needs to signal to the initiator that the context shouldn't be used.
* if :meth:`delete` on one peer's context returns a final token that can be passed to the
other peer to indicate the other peer's context should be torn down as well (though it's
recommended that :meth:`delete` should return nothing, i.e. this method should not be
used by GSSAPI mechanisms).
:param context_token: The context token to pass to the security context
:type context_token: bytes
:raises: :exc:`~gssapi.error.DefectiveToken` if consistency checks on the token failed.
:exc:`~gssapi.error.NoContext` if this context is invalid.
:exc:`~gssapi.error.GSSException` for any other GSSAPI errors.
"""
minor_status = ffi.new('OM_uint32[1]')
context_token_buffer = ffi.new('gss_buffer_desc[1]')
context_token_buffer[0].length = len(context_token)
c_str_context_token = ffi.new('char[]', context_token)
context_token_buffer[0].value = c_str_context_token
retval = C.gss_process_context_token(minor_status, self._ctx[0], context_token_buffer)
if GSS_ERROR(retval):
if minor_status[0] and self.mech_type:
raise _exception_for_status(retval, minor_status[0], self.mech_type) # depends on [control=['if'], data=[]]
else:
raise _exception_for_status(retval, minor_status[0]) # depends on [control=['if'], data=[]] |
def cdf(self, y, f, n):
r"""
Cumulative density function of the likelihood.
Parameters
----------
y: ndarray
query quantiles, i.e.\ :math:`P(Y \leq y)`.
f: ndarray
latent function from the GLM prior (:math:`\mathbf{f} =
\boldsymbol\Phi \mathbf{w}`)
n: ndarray
the total number of observations
Returns
-------
cdf: ndarray
Cumulative density function evaluated at y.
"""
return binom.cdf(y, n=n, p=expit(f)) | def function[cdf, parameter[self, y, f, n]]:
constant[
Cumulative density function of the likelihood.
Parameters
----------
y: ndarray
query quantiles, i.e.\ :math:`P(Y \leq y)`.
f: ndarray
latent function from the GLM prior (:math:`\mathbf{f} =
\boldsymbol\Phi \mathbf{w}`)
n: ndarray
the total number of observations
Returns
-------
cdf: ndarray
Cumulative density function evaluated at y.
]
return[call[name[binom].cdf, parameter[name[y]]]] | keyword[def] identifier[cdf] ( identifier[self] , identifier[y] , identifier[f] , identifier[n] ):
literal[string]
keyword[return] identifier[binom] . identifier[cdf] ( identifier[y] , identifier[n] = identifier[n] , identifier[p] = identifier[expit] ( identifier[f] )) | def cdf(self, y, f, n):
"""
Cumulative density function of the likelihood.
Parameters
----------
y: ndarray
query quantiles, i.e.\\ :math:`P(Y \\leq y)`.
f: ndarray
latent function from the GLM prior (:math:`\\mathbf{f} =
\\boldsymbol\\Phi \\mathbf{w}`)
n: ndarray
the total number of observations
Returns
-------
cdf: ndarray
Cumulative density function evaluated at y.
"""
return binom.cdf(y, n=n, p=expit(f)) |
def eventgroups(ctx, sport):
""" [bookie] List event groups for a sport
:param str sport: Sports id
"""
sport = Sport(sport, peerplays_instance=ctx.peerplays)
click.echo(pretty_print(sport.eventgroups, ctx=ctx)) | def function[eventgroups, parameter[ctx, sport]]:
constant[ [bookie] List event groups for a sport
:param str sport: Sports id
]
variable[sport] assign[=] call[name[Sport], parameter[name[sport]]]
call[name[click].echo, parameter[call[name[pretty_print], parameter[name[sport].eventgroups]]]] | keyword[def] identifier[eventgroups] ( identifier[ctx] , identifier[sport] ):
literal[string]
identifier[sport] = identifier[Sport] ( identifier[sport] , identifier[peerplays_instance] = identifier[ctx] . identifier[peerplays] )
identifier[click] . identifier[echo] ( identifier[pretty_print] ( identifier[sport] . identifier[eventgroups] , identifier[ctx] = identifier[ctx] )) | def eventgroups(ctx, sport):
""" [bookie] List event groups for a sport
:param str sport: Sports id
"""
sport = Sport(sport, peerplays_instance=ctx.peerplays)
click.echo(pretty_print(sport.eventgroups, ctx=ctx)) |
def reduce_sorted_to_intersect(ar1, ar2):
"""
Takes two sorted arrays and return the intersection ar1 in ar2, ar2 in ar1.
Parameters
----------
ar1 : (M,) array_like
Input array.
ar2 : array_like
Input array.
Returns
-------
ar1, ar1 : ndarray, ndarray
The intersection values.
"""
# Ravel both arrays, behavior for the first array could be different
ar1 = np.asarray(ar1).ravel()
ar2 = np.asarray(ar2).ravel()
# get min max values of the arrays
ar1_biggest_value = ar1[-1]
ar1_smallest_value = ar1[0]
ar2_biggest_value = ar2[-1]
ar2_smallest_value = ar2[0]
if ar1_biggest_value < ar2_smallest_value or ar1_smallest_value > ar2_biggest_value: # special case, no intersection at all
return ar1[0:0], ar2[0:0]
# get min/max indices with values that are also in the other array
min_index_ar1 = np.argmin(ar1 < ar2_smallest_value)
max_index_ar1 = np.argmax(ar1 > ar2_biggest_value)
min_index_ar2 = np.argmin(ar2 < ar1_smallest_value)
max_index_ar2 = np.argmax(ar2 > ar1_biggest_value)
if min_index_ar1 < 0:
min_index_ar1 = 0
if min_index_ar2 < 0:
min_index_ar2 = 0
if max_index_ar1 == 0 or max_index_ar1 > ar1.shape[0]:
max_index_ar1 = ar1.shape[0]
if max_index_ar2 == 0 or max_index_ar2 > ar2.shape[0]:
max_index_ar2 = ar2.shape[0]
# reduce the data
return ar1[min_index_ar1:max_index_ar1], ar2[min_index_ar2:max_index_ar2] | def function[reduce_sorted_to_intersect, parameter[ar1, ar2]]:
constant[
Takes two sorted arrays and return the intersection ar1 in ar2, ar2 in ar1.
Parameters
----------
ar1 : (M,) array_like
Input array.
ar2 : array_like
Input array.
Returns
-------
ar1, ar1 : ndarray, ndarray
The intersection values.
]
variable[ar1] assign[=] call[call[name[np].asarray, parameter[name[ar1]]].ravel, parameter[]]
variable[ar2] assign[=] call[call[name[np].asarray, parameter[name[ar2]]].ravel, parameter[]]
variable[ar1_biggest_value] assign[=] call[name[ar1]][<ast.UnaryOp object at 0x7da1b11a5420>]
variable[ar1_smallest_value] assign[=] call[name[ar1]][constant[0]]
variable[ar2_biggest_value] assign[=] call[name[ar2]][<ast.UnaryOp object at 0x7da1b11a41c0>]
variable[ar2_smallest_value] assign[=] call[name[ar2]][constant[0]]
if <ast.BoolOp object at 0x7da1b11a4580> begin[:]
return[tuple[[<ast.Subscript object at 0x7da1b11a5c60>, <ast.Subscript object at 0x7da1b11a6410>]]]
variable[min_index_ar1] assign[=] call[name[np].argmin, parameter[compare[name[ar1] less[<] name[ar2_smallest_value]]]]
variable[max_index_ar1] assign[=] call[name[np].argmax, parameter[compare[name[ar1] greater[>] name[ar2_biggest_value]]]]
variable[min_index_ar2] assign[=] call[name[np].argmin, parameter[compare[name[ar2] less[<] name[ar1_smallest_value]]]]
variable[max_index_ar2] assign[=] call[name[np].argmax, parameter[compare[name[ar2] greater[>] name[ar1_biggest_value]]]]
if compare[name[min_index_ar1] less[<] constant[0]] begin[:]
variable[min_index_ar1] assign[=] constant[0]
if compare[name[min_index_ar2] less[<] constant[0]] begin[:]
variable[min_index_ar2] assign[=] constant[0]
if <ast.BoolOp object at 0x7da1b11e1540> begin[:]
variable[max_index_ar1] assign[=] call[name[ar1].shape][constant[0]]
if <ast.BoolOp object at 0x7da1b11e0580> begin[:]
variable[max_index_ar2] assign[=] call[name[ar2].shape][constant[0]]
return[tuple[[<ast.Subscript object at 0x7da1b11e3a90>, <ast.Subscript object at 0x7da1b11e38e0>]]] | keyword[def] identifier[reduce_sorted_to_intersect] ( identifier[ar1] , identifier[ar2] ):
literal[string]
identifier[ar1] = identifier[np] . identifier[asarray] ( identifier[ar1] ). identifier[ravel] ()
identifier[ar2] = identifier[np] . identifier[asarray] ( identifier[ar2] ). identifier[ravel] ()
identifier[ar1_biggest_value] = identifier[ar1] [- literal[int] ]
identifier[ar1_smallest_value] = identifier[ar1] [ literal[int] ]
identifier[ar2_biggest_value] = identifier[ar2] [- literal[int] ]
identifier[ar2_smallest_value] = identifier[ar2] [ literal[int] ]
keyword[if] identifier[ar1_biggest_value] < identifier[ar2_smallest_value] keyword[or] identifier[ar1_smallest_value] > identifier[ar2_biggest_value] :
keyword[return] identifier[ar1] [ literal[int] : literal[int] ], identifier[ar2] [ literal[int] : literal[int] ]
identifier[min_index_ar1] = identifier[np] . identifier[argmin] ( identifier[ar1] < identifier[ar2_smallest_value] )
identifier[max_index_ar1] = identifier[np] . identifier[argmax] ( identifier[ar1] > identifier[ar2_biggest_value] )
identifier[min_index_ar2] = identifier[np] . identifier[argmin] ( identifier[ar2] < identifier[ar1_smallest_value] )
identifier[max_index_ar2] = identifier[np] . identifier[argmax] ( identifier[ar2] > identifier[ar1_biggest_value] )
keyword[if] identifier[min_index_ar1] < literal[int] :
identifier[min_index_ar1] = literal[int]
keyword[if] identifier[min_index_ar2] < literal[int] :
identifier[min_index_ar2] = literal[int]
keyword[if] identifier[max_index_ar1] == literal[int] keyword[or] identifier[max_index_ar1] > identifier[ar1] . identifier[shape] [ literal[int] ]:
identifier[max_index_ar1] = identifier[ar1] . identifier[shape] [ literal[int] ]
keyword[if] identifier[max_index_ar2] == literal[int] keyword[or] identifier[max_index_ar2] > identifier[ar2] . identifier[shape] [ literal[int] ]:
identifier[max_index_ar2] = identifier[ar2] . identifier[shape] [ literal[int] ]
keyword[return] identifier[ar1] [ identifier[min_index_ar1] : identifier[max_index_ar1] ], identifier[ar2] [ identifier[min_index_ar2] : identifier[max_index_ar2] ] | def reduce_sorted_to_intersect(ar1, ar2):
"""
Takes two sorted arrays and return the intersection ar1 in ar2, ar2 in ar1.
Parameters
----------
ar1 : (M,) array_like
Input array.
ar2 : array_like
Input array.
Returns
-------
ar1, ar1 : ndarray, ndarray
The intersection values.
"""
# Ravel both arrays, behavior for the first array could be different
ar1 = np.asarray(ar1).ravel()
ar2 = np.asarray(ar2).ravel()
# get min max values of the arrays
ar1_biggest_value = ar1[-1]
ar1_smallest_value = ar1[0]
ar2_biggest_value = ar2[-1]
ar2_smallest_value = ar2[0]
if ar1_biggest_value < ar2_smallest_value or ar1_smallest_value > ar2_biggest_value: # special case, no intersection at all
return (ar1[0:0], ar2[0:0]) # depends on [control=['if'], data=[]]
# get min/max indices with values that are also in the other array
min_index_ar1 = np.argmin(ar1 < ar2_smallest_value)
max_index_ar1 = np.argmax(ar1 > ar2_biggest_value)
min_index_ar2 = np.argmin(ar2 < ar1_smallest_value)
max_index_ar2 = np.argmax(ar2 > ar1_biggest_value)
if min_index_ar1 < 0:
min_index_ar1 = 0 # depends on [control=['if'], data=['min_index_ar1']]
if min_index_ar2 < 0:
min_index_ar2 = 0 # depends on [control=['if'], data=['min_index_ar2']]
if max_index_ar1 == 0 or max_index_ar1 > ar1.shape[0]:
max_index_ar1 = ar1.shape[0] # depends on [control=['if'], data=[]]
if max_index_ar2 == 0 or max_index_ar2 > ar2.shape[0]:
max_index_ar2 = ar2.shape[0] # depends on [control=['if'], data=[]]
# reduce the data
return (ar1[min_index_ar1:max_index_ar1], ar2[min_index_ar2:max_index_ar2]) |
def get_file_to_stream(self, stream, share_name, directory_name, file_name, **kwargs):
"""
Download a file from Azure File Share.
:param stream: A filehandle to store the file to.
:type stream: file-like object
:param share_name: Name of the share.
:type share_name: str
:param directory_name: Name of the directory.
:type directory_name: str
:param file_name: Name of the file.
:type file_name: str
:param kwargs: Optional keyword arguments that
`FileService.get_file_to_stream()` takes.
:type kwargs: object
"""
self.connection.get_file_to_stream(share_name, directory_name,
file_name, stream, **kwargs) | def function[get_file_to_stream, parameter[self, stream, share_name, directory_name, file_name]]:
constant[
Download a file from Azure File Share.
:param stream: A filehandle to store the file to.
:type stream: file-like object
:param share_name: Name of the share.
:type share_name: str
:param directory_name: Name of the directory.
:type directory_name: str
:param file_name: Name of the file.
:type file_name: str
:param kwargs: Optional keyword arguments that
`FileService.get_file_to_stream()` takes.
:type kwargs: object
]
call[name[self].connection.get_file_to_stream, parameter[name[share_name], name[directory_name], name[file_name], name[stream]]] | keyword[def] identifier[get_file_to_stream] ( identifier[self] , identifier[stream] , identifier[share_name] , identifier[directory_name] , identifier[file_name] ,** identifier[kwargs] ):
literal[string]
identifier[self] . identifier[connection] . identifier[get_file_to_stream] ( identifier[share_name] , identifier[directory_name] ,
identifier[file_name] , identifier[stream] ,** identifier[kwargs] ) | def get_file_to_stream(self, stream, share_name, directory_name, file_name, **kwargs):
"""
Download a file from Azure File Share.
:param stream: A filehandle to store the file to.
:type stream: file-like object
:param share_name: Name of the share.
:type share_name: str
:param directory_name: Name of the directory.
:type directory_name: str
:param file_name: Name of the file.
:type file_name: str
:param kwargs: Optional keyword arguments that
`FileService.get_file_to_stream()` takes.
:type kwargs: object
"""
self.connection.get_file_to_stream(share_name, directory_name, file_name, stream, **kwargs) |
def __fetch_crate_version_downloads(self, crate_id):
"""Get crate version downloads"""
raw_version_downloads = self.client.crate_attribute(crate_id, "downloads")
version_downloads = json.loads(raw_version_downloads)
return version_downloads | def function[__fetch_crate_version_downloads, parameter[self, crate_id]]:
constant[Get crate version downloads]
variable[raw_version_downloads] assign[=] call[name[self].client.crate_attribute, parameter[name[crate_id], constant[downloads]]]
variable[version_downloads] assign[=] call[name[json].loads, parameter[name[raw_version_downloads]]]
return[name[version_downloads]] | keyword[def] identifier[__fetch_crate_version_downloads] ( identifier[self] , identifier[crate_id] ):
literal[string]
identifier[raw_version_downloads] = identifier[self] . identifier[client] . identifier[crate_attribute] ( identifier[crate_id] , literal[string] )
identifier[version_downloads] = identifier[json] . identifier[loads] ( identifier[raw_version_downloads] )
keyword[return] identifier[version_downloads] | def __fetch_crate_version_downloads(self, crate_id):
"""Get crate version downloads"""
raw_version_downloads = self.client.crate_attribute(crate_id, 'downloads')
version_downloads = json.loads(raw_version_downloads)
return version_downloads |
def tb_c_file_creation(target, source, env):
"""Compile tilebus file into a .h/.c pair for compilation into an ARM object"""
files = [str(x) for x in source]
try:
desc = TBDescriptor(files)
except pyparsing.ParseException as e:
raise BuildError("Could not parse tilebus file", parsing_exception=e)
block = desc.get_block()
block.render_template(block.CommandFileTemplate, out_path=str(target[0]))
block.render_template(block.CommandHeaderTemplate, out_path=str(target[1]))
block.render_template(block.ConfigFileTemplate, out_path=str(target[2]))
block.render_template(block.ConfigHeaderTemplate, out_path=str(target[3])) | def function[tb_c_file_creation, parameter[target, source, env]]:
constant[Compile tilebus file into a .h/.c pair for compilation into an ARM object]
variable[files] assign[=] <ast.ListComp object at 0x7da2045668c0>
<ast.Try object at 0x7da204566830>
variable[block] assign[=] call[name[desc].get_block, parameter[]]
call[name[block].render_template, parameter[name[block].CommandFileTemplate]]
call[name[block].render_template, parameter[name[block].CommandHeaderTemplate]]
call[name[block].render_template, parameter[name[block].ConfigFileTemplate]]
call[name[block].render_template, parameter[name[block].ConfigHeaderTemplate]] | keyword[def] identifier[tb_c_file_creation] ( identifier[target] , identifier[source] , identifier[env] ):
literal[string]
identifier[files] =[ identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[source] ]
keyword[try] :
identifier[desc] = identifier[TBDescriptor] ( identifier[files] )
keyword[except] identifier[pyparsing] . identifier[ParseException] keyword[as] identifier[e] :
keyword[raise] identifier[BuildError] ( literal[string] , identifier[parsing_exception] = identifier[e] )
identifier[block] = identifier[desc] . identifier[get_block] ()
identifier[block] . identifier[render_template] ( identifier[block] . identifier[CommandFileTemplate] , identifier[out_path] = identifier[str] ( identifier[target] [ literal[int] ]))
identifier[block] . identifier[render_template] ( identifier[block] . identifier[CommandHeaderTemplate] , identifier[out_path] = identifier[str] ( identifier[target] [ literal[int] ]))
identifier[block] . identifier[render_template] ( identifier[block] . identifier[ConfigFileTemplate] , identifier[out_path] = identifier[str] ( identifier[target] [ literal[int] ]))
identifier[block] . identifier[render_template] ( identifier[block] . identifier[ConfigHeaderTemplate] , identifier[out_path] = identifier[str] ( identifier[target] [ literal[int] ])) | def tb_c_file_creation(target, source, env):
"""Compile tilebus file into a .h/.c pair for compilation into an ARM object"""
files = [str(x) for x in source]
try:
desc = TBDescriptor(files) # depends on [control=['try'], data=[]]
except pyparsing.ParseException as e:
raise BuildError('Could not parse tilebus file', parsing_exception=e) # depends on [control=['except'], data=['e']]
block = desc.get_block()
block.render_template(block.CommandFileTemplate, out_path=str(target[0]))
block.render_template(block.CommandHeaderTemplate, out_path=str(target[1]))
block.render_template(block.ConfigFileTemplate, out_path=str(target[2]))
block.render_template(block.ConfigHeaderTemplate, out_path=str(target[3])) |
def is_course_run_upgradeable(course_run):
"""
Return true if the course run has a verified seat with an unexpired upgrade deadline, false otherwise.
"""
now = datetime.datetime.now(pytz.UTC)
for seat in course_run.get('seats', []):
if seat.get('type') == 'verified':
upgrade_deadline = parse_datetime_handle_invalid(seat.get('upgrade_deadline'))
return not upgrade_deadline or upgrade_deadline > now
return False | def function[is_course_run_upgradeable, parameter[course_run]]:
constant[
Return true if the course run has a verified seat with an unexpired upgrade deadline, false otherwise.
]
variable[now] assign[=] call[name[datetime].datetime.now, parameter[name[pytz].UTC]]
for taget[name[seat]] in starred[call[name[course_run].get, parameter[constant[seats], list[[]]]]] begin[:]
if compare[call[name[seat].get, parameter[constant[type]]] equal[==] constant[verified]] begin[:]
variable[upgrade_deadline] assign[=] call[name[parse_datetime_handle_invalid], parameter[call[name[seat].get, parameter[constant[upgrade_deadline]]]]]
return[<ast.BoolOp object at 0x7da18f09eef0>]
return[constant[False]] | keyword[def] identifier[is_course_run_upgradeable] ( identifier[course_run] ):
literal[string]
identifier[now] = identifier[datetime] . identifier[datetime] . identifier[now] ( identifier[pytz] . identifier[UTC] )
keyword[for] identifier[seat] keyword[in] identifier[course_run] . identifier[get] ( literal[string] ,[]):
keyword[if] identifier[seat] . identifier[get] ( literal[string] )== literal[string] :
identifier[upgrade_deadline] = identifier[parse_datetime_handle_invalid] ( identifier[seat] . identifier[get] ( literal[string] ))
keyword[return] keyword[not] identifier[upgrade_deadline] keyword[or] identifier[upgrade_deadline] > identifier[now]
keyword[return] keyword[False] | def is_course_run_upgradeable(course_run):
"""
Return true if the course run has a verified seat with an unexpired upgrade deadline, false otherwise.
"""
now = datetime.datetime.now(pytz.UTC)
for seat in course_run.get('seats', []):
if seat.get('type') == 'verified':
upgrade_deadline = parse_datetime_handle_invalid(seat.get('upgrade_deadline'))
return not upgrade_deadline or upgrade_deadline > now # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['seat']]
return False |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.