code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
async def encode_jwt(self, identity: SessionIdentity) -> str:
""" 将identity编码为JWT """
assert identity
payload = {
"sub": identity.identity,
"user_id": identity.user_id,
"exp": int(time.time() + self._max_age) # seconds from 1970-1-1 UTC
}
if identity.client_id:
payload['aud'] = identity.client_id
token = jwt.encode({'alg': 'HS256'}, payload, self._secure_key)
return token.decode('ascii') | <ast.AsyncFunctionDef object at 0x7da20c993b80> | keyword[async] keyword[def] identifier[encode_jwt] ( identifier[self] , identifier[identity] : identifier[SessionIdentity] )-> identifier[str] :
literal[string]
keyword[assert] identifier[identity]
identifier[payload] ={
literal[string] : identifier[identity] . identifier[identity] ,
literal[string] : identifier[identity] . identifier[user_id] ,
literal[string] : identifier[int] ( identifier[time] . identifier[time] ()+ identifier[self] . identifier[_max_age] )
}
keyword[if] identifier[identity] . identifier[client_id] :
identifier[payload] [ literal[string] ]= identifier[identity] . identifier[client_id]
identifier[token] = identifier[jwt] . identifier[encode] ({ literal[string] : literal[string] }, identifier[payload] , identifier[self] . identifier[_secure_key] )
keyword[return] identifier[token] . identifier[decode] ( literal[string] ) | async def encode_jwt(self, identity: SessionIdentity) -> str:
""" 将identity编码为JWT """
assert identity # seconds from 1970-1-1 UTC
payload = {'sub': identity.identity, 'user_id': identity.user_id, 'exp': int(time.time() + self._max_age)}
if identity.client_id:
payload['aud'] = identity.client_id # depends on [control=['if'], data=[]]
token = jwt.encode({'alg': 'HS256'}, payload, self._secure_key)
return token.decode('ascii') |
def _update_system_file(system_file, name, new_kvs):
"""Update the bcbio_system.yaml file with new resource information.
"""
if os.path.exists(system_file):
bak_file = system_file + ".bak%s" % datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")
shutil.copyfile(system_file, bak_file)
with open(system_file) as in_handle:
config = yaml.safe_load(in_handle)
else:
utils.safe_makedir(os.path.dirname(system_file))
config = {}
new_rs = {}
added = False
for rname, r_kvs in config.get("resources", {}).items():
if rname == name:
for k, v in new_kvs.items():
r_kvs[k] = v
added = True
new_rs[rname] = r_kvs
if not added:
new_rs[name] = new_kvs
config["resources"] = new_rs
with open(system_file, "w") as out_handle:
yaml.safe_dump(config, out_handle, default_flow_style=False, allow_unicode=False) | def function[_update_system_file, parameter[system_file, name, new_kvs]]:
constant[Update the bcbio_system.yaml file with new resource information.
]
if call[name[os].path.exists, parameter[name[system_file]]] begin[:]
variable[bak_file] assign[=] binary_operation[name[system_file] + binary_operation[constant[.bak%s] <ast.Mod object at 0x7da2590d6920> call[call[name[datetime].datetime.now, parameter[]].strftime, parameter[constant[%Y-%m-%d-%H-%M-%S]]]]]
call[name[shutil].copyfile, parameter[name[system_file], name[bak_file]]]
with call[name[open], parameter[name[system_file]]] begin[:]
variable[config] assign[=] call[name[yaml].safe_load, parameter[name[in_handle]]]
variable[new_rs] assign[=] dictionary[[], []]
variable[added] assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da1b19b8550>, <ast.Name object at 0x7da1b19b9660>]]] in starred[call[call[name[config].get, parameter[constant[resources], dictionary[[], []]]].items, parameter[]]] begin[:]
if compare[name[rname] equal[==] name[name]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b19b9b70>, <ast.Name object at 0x7da1b19b8ca0>]]] in starred[call[name[new_kvs].items, parameter[]]] begin[:]
call[name[r_kvs]][name[k]] assign[=] name[v]
variable[added] assign[=] constant[True]
call[name[new_rs]][name[rname]] assign[=] name[r_kvs]
if <ast.UnaryOp object at 0x7da1b19d9d50> begin[:]
call[name[new_rs]][name[name]] assign[=] name[new_kvs]
call[name[config]][constant[resources]] assign[=] name[new_rs]
with call[name[open], parameter[name[system_file], constant[w]]] begin[:]
call[name[yaml].safe_dump, parameter[name[config], name[out_handle]]] | keyword[def] identifier[_update_system_file] ( identifier[system_file] , identifier[name] , identifier[new_kvs] ):
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[system_file] ):
identifier[bak_file] = identifier[system_file] + literal[string] % identifier[datetime] . identifier[datetime] . identifier[now] (). identifier[strftime] ( literal[string] )
identifier[shutil] . identifier[copyfile] ( identifier[system_file] , identifier[bak_file] )
keyword[with] identifier[open] ( identifier[system_file] ) keyword[as] identifier[in_handle] :
identifier[config] = identifier[yaml] . identifier[safe_load] ( identifier[in_handle] )
keyword[else] :
identifier[utils] . identifier[safe_makedir] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[system_file] ))
identifier[config] ={}
identifier[new_rs] ={}
identifier[added] = keyword[False]
keyword[for] identifier[rname] , identifier[r_kvs] keyword[in] identifier[config] . identifier[get] ( literal[string] ,{}). identifier[items] ():
keyword[if] identifier[rname] == identifier[name] :
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[new_kvs] . identifier[items] ():
identifier[r_kvs] [ identifier[k] ]= identifier[v]
identifier[added] = keyword[True]
identifier[new_rs] [ identifier[rname] ]= identifier[r_kvs]
keyword[if] keyword[not] identifier[added] :
identifier[new_rs] [ identifier[name] ]= identifier[new_kvs]
identifier[config] [ literal[string] ]= identifier[new_rs]
keyword[with] identifier[open] ( identifier[system_file] , literal[string] ) keyword[as] identifier[out_handle] :
identifier[yaml] . identifier[safe_dump] ( identifier[config] , identifier[out_handle] , identifier[default_flow_style] = keyword[False] , identifier[allow_unicode] = keyword[False] ) | def _update_system_file(system_file, name, new_kvs):
"""Update the bcbio_system.yaml file with new resource information.
"""
if os.path.exists(system_file):
bak_file = system_file + '.bak%s' % datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S')
shutil.copyfile(system_file, bak_file)
with open(system_file) as in_handle:
config = yaml.safe_load(in_handle) # depends on [control=['with'], data=['in_handle']] # depends on [control=['if'], data=[]]
else:
utils.safe_makedir(os.path.dirname(system_file))
config = {}
new_rs = {}
added = False
for (rname, r_kvs) in config.get('resources', {}).items():
if rname == name:
for (k, v) in new_kvs.items():
r_kvs[k] = v # depends on [control=['for'], data=[]]
added = True # depends on [control=['if'], data=[]]
new_rs[rname] = r_kvs # depends on [control=['for'], data=[]]
if not added:
new_rs[name] = new_kvs # depends on [control=['if'], data=[]]
config['resources'] = new_rs
with open(system_file, 'w') as out_handle:
yaml.safe_dump(config, out_handle, default_flow_style=False, allow_unicode=False) # depends on [control=['with'], data=['out_handle']] |
def parse_simpleexprsp(self, tup_tree):
# pylint: disable=unused-argument
"""
This Function not implemented. This response is for export senders
(indication senders) so it is not implemented in the pywbem
client.
"""
raise CIMXMLParseError(
_format("Internal Error: Parsing support for element {0!A} is not "
"implemented", name(tup_tree)),
conn_id=self.conn_id) | def function[parse_simpleexprsp, parameter[self, tup_tree]]:
constant[
This Function not implemented. This response is for export senders
(indication senders) so it is not implemented in the pywbem
client.
]
<ast.Raise object at 0x7da18bcc9630> | keyword[def] identifier[parse_simpleexprsp] ( identifier[self] , identifier[tup_tree] ):
literal[string]
keyword[raise] identifier[CIMXMLParseError] (
identifier[_format] ( literal[string]
literal[string] , identifier[name] ( identifier[tup_tree] )),
identifier[conn_id] = identifier[self] . identifier[conn_id] ) | def parse_simpleexprsp(self, tup_tree):
# pylint: disable=unused-argument
'\n This Function not implemented. This response is for export senders\n (indication senders) so it is not implemented in the pywbem\n client.\n '
raise CIMXMLParseError(_format('Internal Error: Parsing support for element {0!A} is not implemented', name(tup_tree)), conn_id=self.conn_id) |
def get_column_info(connection, table_name):
"""
Return an in order list of (name, type) tuples describing the
columns in the given table.
"""
cursor = connection.cursor()
cursor.execute("SELECT sql FROM sqlite_master WHERE type == 'table' AND name == ?", (table_name,))
statement, = cursor.fetchone()
coldefs = re.match(_sql_create_table_pattern, statement).groupdict()["coldefs"]
return [(coldef.groupdict()["name"], coldef.groupdict()["type"]) for coldef in re.finditer(_sql_coldef_pattern, coldefs) if coldef.groupdict()["name"].upper() not in ("PRIMARY", "UNIQUE", "CHECK")] | def function[get_column_info, parameter[connection, table_name]]:
constant[
Return an in order list of (name, type) tuples describing the
columns in the given table.
]
variable[cursor] assign[=] call[name[connection].cursor, parameter[]]
call[name[cursor].execute, parameter[constant[SELECT sql FROM sqlite_master WHERE type == 'table' AND name == ?], tuple[[<ast.Name object at 0x7da1b0b118d0>]]]]
<ast.Tuple object at 0x7da1b0b13340> assign[=] call[name[cursor].fetchone, parameter[]]
variable[coldefs] assign[=] call[call[call[name[re].match, parameter[name[_sql_create_table_pattern], name[statement]]].groupdict, parameter[]]][constant[coldefs]]
return[<ast.ListComp object at 0x7da1b0a6bf40>] | keyword[def] identifier[get_column_info] ( identifier[connection] , identifier[table_name] ):
literal[string]
identifier[cursor] = identifier[connection] . identifier[cursor] ()
identifier[cursor] . identifier[execute] ( literal[string] ,( identifier[table_name] ,))
identifier[statement] ,= identifier[cursor] . identifier[fetchone] ()
identifier[coldefs] = identifier[re] . identifier[match] ( identifier[_sql_create_table_pattern] , identifier[statement] ). identifier[groupdict] ()[ literal[string] ]
keyword[return] [( identifier[coldef] . identifier[groupdict] ()[ literal[string] ], identifier[coldef] . identifier[groupdict] ()[ literal[string] ]) keyword[for] identifier[coldef] keyword[in] identifier[re] . identifier[finditer] ( identifier[_sql_coldef_pattern] , identifier[coldefs] ) keyword[if] identifier[coldef] . identifier[groupdict] ()[ literal[string] ]. identifier[upper] () keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] )] | def get_column_info(connection, table_name):
"""
Return an in order list of (name, type) tuples describing the
columns in the given table.
"""
cursor = connection.cursor()
cursor.execute("SELECT sql FROM sqlite_master WHERE type == 'table' AND name == ?", (table_name,))
(statement,) = cursor.fetchone()
coldefs = re.match(_sql_create_table_pattern, statement).groupdict()['coldefs']
return [(coldef.groupdict()['name'], coldef.groupdict()['type']) for coldef in re.finditer(_sql_coldef_pattern, coldefs) if coldef.groupdict()['name'].upper() not in ('PRIMARY', 'UNIQUE', 'CHECK')] |
def get_cantera_composition_string(self, species_conversion=None):
"""Get the composition in a string format suitable for input to Cantera.
Returns a formatted string no matter the type of composition. As such, this method
is not recommended for end users; instead, prefer the `get_cantera_mole_fraction`
or `get_cantera_mass_fraction` methods.
Arguments:
species_conversion (`dict`, optional): Mapping of species identifier to a
species name. This argument should be supplied when the name of the
species in the ChemKED YAML file does not match the name of the same
species in a chemical kinetic mechanism. The species identifier (the key
of the mapping) can be the name, InChI, or SMILES provided in the ChemKED
file, while the value associated with a key should be the desired name in
the Cantera format output string.
Returns:
`str`: String in the ``SPEC:AMT, SPEC:AMT`` format
Raises:
`ValueError`: If the composition type of the `DataPoint` is not one of
``'mass fraction'``, ``'mole fraction'``, or ``'mole percent'``
"""
if self.composition_type in ['mole fraction', 'mass fraction']:
factor = 1.0
elif self.composition_type == 'mole percent':
factor = 100.0
else:
raise ValueError('Unknown composition type: {}'.format(self.composition_type))
if species_conversion is None:
comps = ['{!s}:{:.4e}'.format(c.species_name,
c.amount.magnitude/factor) for c in self.composition.values()]
else:
comps = []
for c in self.composition.values():
amount = c.amount.magnitude/factor
idents = [getattr(c, s, False) for s in ['species_name', 'InChI', 'SMILES']]
present = [i in species_conversion for i in idents]
if not any(present):
comps.append('{!s}:{:.4e}'.format(c.species_name, amount))
else:
if len([i for i in present if i]) > 1:
raise ValueError('More than one conversion present for species {}'.format(
c.species_name))
ident = idents[present.index(True)]
species_replacement_name = species_conversion.pop(ident)
comps.append('{!s}:{:.4e}'.format(species_replacement_name, amount))
if len(species_conversion) > 0:
raise ValueError('Unknown species in conversion: {}'.format(species_conversion))
return ', '.join(comps) | def function[get_cantera_composition_string, parameter[self, species_conversion]]:
constant[Get the composition in a string format suitable for input to Cantera.
Returns a formatted string no matter the type of composition. As such, this method
is not recommended for end users; instead, prefer the `get_cantera_mole_fraction`
or `get_cantera_mass_fraction` methods.
Arguments:
species_conversion (`dict`, optional): Mapping of species identifier to a
species name. This argument should be supplied when the name of the
species in the ChemKED YAML file does not match the name of the same
species in a chemical kinetic mechanism. The species identifier (the key
of the mapping) can be the name, InChI, or SMILES provided in the ChemKED
file, while the value associated with a key should be the desired name in
the Cantera format output string.
Returns:
`str`: String in the ``SPEC:AMT, SPEC:AMT`` format
Raises:
`ValueError`: If the composition type of the `DataPoint` is not one of
``'mass fraction'``, ``'mole fraction'``, or ``'mole percent'``
]
if compare[name[self].composition_type in list[[<ast.Constant object at 0x7da1b2558a90>, <ast.Constant object at 0x7da1b255bc70>]]] begin[:]
variable[factor] assign[=] constant[1.0]
if compare[name[species_conversion] is constant[None]] begin[:]
variable[comps] assign[=] <ast.ListComp object at 0x7da1b2559f00>
return[call[constant[, ].join, parameter[name[comps]]]] | keyword[def] identifier[get_cantera_composition_string] ( identifier[self] , identifier[species_conversion] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[composition_type] keyword[in] [ literal[string] , literal[string] ]:
identifier[factor] = literal[int]
keyword[elif] identifier[self] . identifier[composition_type] == literal[string] :
identifier[factor] = literal[int]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[self] . identifier[composition_type] ))
keyword[if] identifier[species_conversion] keyword[is] keyword[None] :
identifier[comps] =[ literal[string] . identifier[format] ( identifier[c] . identifier[species_name] ,
identifier[c] . identifier[amount] . identifier[magnitude] / identifier[factor] ) keyword[for] identifier[c] keyword[in] identifier[self] . identifier[composition] . identifier[values] ()]
keyword[else] :
identifier[comps] =[]
keyword[for] identifier[c] keyword[in] identifier[self] . identifier[composition] . identifier[values] ():
identifier[amount] = identifier[c] . identifier[amount] . identifier[magnitude] / identifier[factor]
identifier[idents] =[ identifier[getattr] ( identifier[c] , identifier[s] , keyword[False] ) keyword[for] identifier[s] keyword[in] [ literal[string] , literal[string] , literal[string] ]]
identifier[present] =[ identifier[i] keyword[in] identifier[species_conversion] keyword[for] identifier[i] keyword[in] identifier[idents] ]
keyword[if] keyword[not] identifier[any] ( identifier[present] ):
identifier[comps] . identifier[append] ( literal[string] . identifier[format] ( identifier[c] . identifier[species_name] , identifier[amount] ))
keyword[else] :
keyword[if] identifier[len] ([ identifier[i] keyword[for] identifier[i] keyword[in] identifier[present] keyword[if] identifier[i] ])> literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] (
identifier[c] . identifier[species_name] ))
identifier[ident] = identifier[idents] [ identifier[present] . identifier[index] ( keyword[True] )]
identifier[species_replacement_name] = identifier[species_conversion] . identifier[pop] ( identifier[ident] )
identifier[comps] . identifier[append] ( literal[string] . identifier[format] ( identifier[species_replacement_name] , identifier[amount] ))
keyword[if] identifier[len] ( identifier[species_conversion] )> literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[species_conversion] ))
keyword[return] literal[string] . identifier[join] ( identifier[comps] ) | def get_cantera_composition_string(self, species_conversion=None):
"""Get the composition in a string format suitable for input to Cantera.
Returns a formatted string no matter the type of composition. As such, this method
is not recommended for end users; instead, prefer the `get_cantera_mole_fraction`
or `get_cantera_mass_fraction` methods.
Arguments:
species_conversion (`dict`, optional): Mapping of species identifier to a
species name. This argument should be supplied when the name of the
species in the ChemKED YAML file does not match the name of the same
species in a chemical kinetic mechanism. The species identifier (the key
of the mapping) can be the name, InChI, or SMILES provided in the ChemKED
file, while the value associated with a key should be the desired name in
the Cantera format output string.
Returns:
`str`: String in the ``SPEC:AMT, SPEC:AMT`` format
Raises:
`ValueError`: If the composition type of the `DataPoint` is not one of
``'mass fraction'``, ``'mole fraction'``, or ``'mole percent'``
"""
if self.composition_type in ['mole fraction', 'mass fraction']:
factor = 1.0 # depends on [control=['if'], data=[]]
elif self.composition_type == 'mole percent':
factor = 100.0 # depends on [control=['if'], data=[]]
else:
raise ValueError('Unknown composition type: {}'.format(self.composition_type))
if species_conversion is None:
comps = ['{!s}:{:.4e}'.format(c.species_name, c.amount.magnitude / factor) for c in self.composition.values()] # depends on [control=['if'], data=[]]
else:
comps = []
for c in self.composition.values():
amount = c.amount.magnitude / factor
idents = [getattr(c, s, False) for s in ['species_name', 'InChI', 'SMILES']]
present = [i in species_conversion for i in idents]
if not any(present):
comps.append('{!s}:{:.4e}'.format(c.species_name, amount)) # depends on [control=['if'], data=[]]
else:
if len([i for i in present if i]) > 1:
raise ValueError('More than one conversion present for species {}'.format(c.species_name)) # depends on [control=['if'], data=[]]
ident = idents[present.index(True)]
species_replacement_name = species_conversion.pop(ident)
comps.append('{!s}:{:.4e}'.format(species_replacement_name, amount)) # depends on [control=['for'], data=['c']]
if len(species_conversion) > 0:
raise ValueError('Unknown species in conversion: {}'.format(species_conversion)) # depends on [control=['if'], data=[]]
return ', '.join(comps) |
def remove_hotspot(self, hotspot, xy):
"""
Remove the hotspot at ``(x, y)``: Any previously rendered image where
the hotspot was placed is erased from the backing image, and will be
"undrawn" the next time the virtual device is refreshed. If the
specified hotspot is not found for ``(x, y)``, a ``ValueError`` is
raised.
"""
self._hotspots.remove((hotspot, xy))
eraser = Image.new(self.mode, hotspot.size)
self._backing_image.paste(eraser, xy) | def function[remove_hotspot, parameter[self, hotspot, xy]]:
constant[
Remove the hotspot at ``(x, y)``: Any previously rendered image where
the hotspot was placed is erased from the backing image, and will be
"undrawn" the next time the virtual device is refreshed. If the
specified hotspot is not found for ``(x, y)``, a ``ValueError`` is
raised.
]
call[name[self]._hotspots.remove, parameter[tuple[[<ast.Name object at 0x7da1b06d32e0>, <ast.Name object at 0x7da1b06d00d0>]]]]
variable[eraser] assign[=] call[name[Image].new, parameter[name[self].mode, name[hotspot].size]]
call[name[self]._backing_image.paste, parameter[name[eraser], name[xy]]] | keyword[def] identifier[remove_hotspot] ( identifier[self] , identifier[hotspot] , identifier[xy] ):
literal[string]
identifier[self] . identifier[_hotspots] . identifier[remove] (( identifier[hotspot] , identifier[xy] ))
identifier[eraser] = identifier[Image] . identifier[new] ( identifier[self] . identifier[mode] , identifier[hotspot] . identifier[size] )
identifier[self] . identifier[_backing_image] . identifier[paste] ( identifier[eraser] , identifier[xy] ) | def remove_hotspot(self, hotspot, xy):
"""
Remove the hotspot at ``(x, y)``: Any previously rendered image where
the hotspot was placed is erased from the backing image, and will be
"undrawn" the next time the virtual device is refreshed. If the
specified hotspot is not found for ``(x, y)``, a ``ValueError`` is
raised.
"""
self._hotspots.remove((hotspot, xy))
eraser = Image.new(self.mode, hotspot.size)
self._backing_image.paste(eraser, xy) |
def visual_at(self, pos):
"""Return the visual at a given position
Parameters
----------
pos : tuple
The position in logical coordinates to query.
Returns
-------
visual : instance of Visual | None
The visual at the position, if it exists.
"""
tr = self.transforms.get_transform('canvas', 'framebuffer')
fbpos = tr.map(pos)[:2]
try:
id_ = self._render_picking(region=(fbpos[0], fbpos[1],
1, 1))
vis = VisualNode._visual_ids.get(id_[0, 0], None)
except RuntimeError:
# Don't have read_pixels() support for IPython. Fall back to
# bounds checking.
return self._visual_bounds_at(pos)
return vis | def function[visual_at, parameter[self, pos]]:
constant[Return the visual at a given position
Parameters
----------
pos : tuple
The position in logical coordinates to query.
Returns
-------
visual : instance of Visual | None
The visual at the position, if it exists.
]
variable[tr] assign[=] call[name[self].transforms.get_transform, parameter[constant[canvas], constant[framebuffer]]]
variable[fbpos] assign[=] call[call[name[tr].map, parameter[name[pos]]]][<ast.Slice object at 0x7da1b0e71420>]
<ast.Try object at 0x7da1b0e73f70>
return[name[vis]] | keyword[def] identifier[visual_at] ( identifier[self] , identifier[pos] ):
literal[string]
identifier[tr] = identifier[self] . identifier[transforms] . identifier[get_transform] ( literal[string] , literal[string] )
identifier[fbpos] = identifier[tr] . identifier[map] ( identifier[pos] )[: literal[int] ]
keyword[try] :
identifier[id_] = identifier[self] . identifier[_render_picking] ( identifier[region] =( identifier[fbpos] [ literal[int] ], identifier[fbpos] [ literal[int] ],
literal[int] , literal[int] ))
identifier[vis] = identifier[VisualNode] . identifier[_visual_ids] . identifier[get] ( identifier[id_] [ literal[int] , literal[int] ], keyword[None] )
keyword[except] identifier[RuntimeError] :
keyword[return] identifier[self] . identifier[_visual_bounds_at] ( identifier[pos] )
keyword[return] identifier[vis] | def visual_at(self, pos):
"""Return the visual at a given position
Parameters
----------
pos : tuple
The position in logical coordinates to query.
Returns
-------
visual : instance of Visual | None
The visual at the position, if it exists.
"""
tr = self.transforms.get_transform('canvas', 'framebuffer')
fbpos = tr.map(pos)[:2]
try:
id_ = self._render_picking(region=(fbpos[0], fbpos[1], 1, 1))
vis = VisualNode._visual_ids.get(id_[0, 0], None) # depends on [control=['try'], data=[]]
except RuntimeError:
# Don't have read_pixels() support for IPython. Fall back to
# bounds checking.
return self._visual_bounds_at(pos) # depends on [control=['except'], data=[]]
return vis |
def create(cls, name, address, base_dn, bind_user_id=None, bind_password=None,
port=389, protocol='ldap', tls_profile=None, tls_identity=None,
domain_controller=None, supported_method=None, timeout=10, max_search_result=0,
page_size=0, internet_auth_service_enabled=False, **kwargs):
"""
Create an AD server element using basic settings. You can also provide additional
kwargs documented in the class description::
ActiveDirectoryServer.create(name='somedirectory',
address='10.10.10.10',
base_dn='dc=domain,dc=net',
bind_user_id='cn=admin,cn=users,dc=domain,dc=net',
bind_password='somecrazypassword')
Configure NPS along with Active Directory::
ActiveDirectoryServer.create(name='somedirectory5',
address='10.10.10.10',
base_dn='dc=lepages,dc=net',
internet_auth_service_enabled=True,
retries=3,
auth_ipaddress='10.10.10.15',
auth_port=1900,
shared_secret='123456')
:param str name: name of AD element for display
:param str address: address of AD server
:param str base_dn: base DN for which to retrieve users, format is 'dc=domain,dc=com'
:param str bind_user_id: bind user ID credentials, fully qualified. Format is
'cn=admin,cn=users,dc=domain,dc=com'. If not provided, anonymous bind is used
:param str bind_password: bind password, required if bind_user_id set
:param int port: LDAP bind port, (default: 389)
:param str protocol: Which LDAP protocol to use, options 'ldap/ldaps/ldap_tls'. If
ldaps or ldap_tls is used, you must provide a tls_profile element (default: ldap)
:param str,TLSProfile tls_profile by element of str href. Used when protocol is set
to ldaps or ldap_tls
:param str,TLSIdentity tls_identity: check server identity when establishing TLS connection
:param list(DomainController) domain_controller: list of domain controller objects to
add an additional domain controllers for AD communication
:param list(AuthenticationMethod) supported_method: authentication services allowed
for this resource
:param int timeout: The time (in seconds) that components wait for the server to reply
:param int max_search_result: The maximum number of LDAP entries that are returned in
an LDAP response (default: 0 for no limit)
:param int page_size: The maximum number of LDAP entries that are returned on each page
of the LDAP response. (default: 0 for no limit)
:param bool internet_auth_service_enabled: whether to attach an NPS service to this
AD controller (default: False). If setting to true, provide kwargs values for
auth_ipaddress, auth_port and shared_secret
:raises CreateElementFailed: failed creating element
:rtype: ActiveDirectoryServer
"""
json={'name': name, 'address': address, 'base_dn': base_dn,
'bind_user_id': bind_user_id, 'bind_password': bind_password,
'port': port, 'protocol': protocol, 'timeout': timeout,
'domain_controller': domain_controller or [],
'max_search_result': max_search_result, 'page_size': page_size,
'internet_auth_service_enabled': internet_auth_service_enabled,
'supported_method': element_resolver(supported_method) or []}
for obj_class in ('group_object_class', 'user_object_class'):
json[obj_class] = kwargs.pop(obj_class, [])
if protocol in ('ldaps', 'ldap_tls'):
if not tls_profile:
raise CreateElementFailed('You must provide a TLS Profile when TLS '
'connections are configured to the AD controller.')
json.update(tls_profile_ref=element_resolver(tls_profile),
tls_identity=tls_identity)
if internet_auth_service_enabled:
ias = {'auth_port': kwargs.pop('auth_port', 1812),
'auth_ipaddress': kwargs.pop('auth_ipaddress', ''),
'shared_secret': kwargs.pop('shared_secret'),
'retries': kwargs.pop('retries', 2)}
json.update(ias)
json.update(kwargs)
return ElementCreator(cls, json) | def function[create, parameter[cls, name, address, base_dn, bind_user_id, bind_password, port, protocol, tls_profile, tls_identity, domain_controller, supported_method, timeout, max_search_result, page_size, internet_auth_service_enabled]]:
constant[
Create an AD server element using basic settings. You can also provide additional
kwargs documented in the class description::
ActiveDirectoryServer.create(name='somedirectory',
address='10.10.10.10',
base_dn='dc=domain,dc=net',
bind_user_id='cn=admin,cn=users,dc=domain,dc=net',
bind_password='somecrazypassword')
Configure NPS along with Active Directory::
ActiveDirectoryServer.create(name='somedirectory5',
address='10.10.10.10',
base_dn='dc=lepages,dc=net',
internet_auth_service_enabled=True,
retries=3,
auth_ipaddress='10.10.10.15',
auth_port=1900,
shared_secret='123456')
:param str name: name of AD element for display
:param str address: address of AD server
:param str base_dn: base DN for which to retrieve users, format is 'dc=domain,dc=com'
:param str bind_user_id: bind user ID credentials, fully qualified. Format is
'cn=admin,cn=users,dc=domain,dc=com'. If not provided, anonymous bind is used
:param str bind_password: bind password, required if bind_user_id set
:param int port: LDAP bind port, (default: 389)
:param str protocol: Which LDAP protocol to use, options 'ldap/ldaps/ldap_tls'. If
ldaps or ldap_tls is used, you must provide a tls_profile element (default: ldap)
:param str,TLSProfile tls_profile by element of str href. Used when protocol is set
to ldaps or ldap_tls
:param str,TLSIdentity tls_identity: check server identity when establishing TLS connection
:param list(DomainController) domain_controller: list of domain controller objects to
add an additional domain controllers for AD communication
:param list(AuthenticationMethod) supported_method: authentication services allowed
for this resource
:param int timeout: The time (in seconds) that components wait for the server to reply
:param int max_search_result: The maximum number of LDAP entries that are returned in
an LDAP response (default: 0 for no limit)
:param int page_size: The maximum number of LDAP entries that are returned on each page
of the LDAP response. (default: 0 for no limit)
:param bool internet_auth_service_enabled: whether to attach an NPS service to this
AD controller (default: False). If setting to true, provide kwargs values for
auth_ipaddress, auth_port and shared_secret
:raises CreateElementFailed: failed creating element
:rtype: ActiveDirectoryServer
]
variable[json] assign[=] dictionary[[<ast.Constant object at 0x7da1b1b16da0>, <ast.Constant object at 0x7da1b1b17df0>, <ast.Constant object at 0x7da1b1b169b0>, <ast.Constant object at 0x7da1b1b14a30>, <ast.Constant object at 0x7da1b1b16a10>, <ast.Constant object at 0x7da1b1b16d40>, <ast.Constant object at 0x7da1b1b17670>, <ast.Constant object at 0x7da1b1b149a0>, <ast.Constant object at 0x7da1b1b17550>, <ast.Constant object at 0x7da1b1b16140>, <ast.Constant object at 0x7da1b1b17d90>, <ast.Constant object at 0x7da1b1b16170>, <ast.Constant object at 0x7da1b1b16110>], [<ast.Name object at 0x7da1b1b16050>, <ast.Name object at 0x7da1b1b16410>, <ast.Name object at 0x7da1b1b17fa0>, <ast.Name object at 0x7da1b1b17c70>, <ast.Name object at 0x7da1b1b362f0>, <ast.Name object at 0x7da1b1b361d0>, <ast.Name object at 0x7da1b1b34040>, <ast.Name object at 0x7da1b1b37250>, <ast.BoolOp object at 0x7da1b1b36140>, <ast.Name object at 0x7da1b1b34d60>, <ast.Name object at 0x7da1b1b363e0>, <ast.Name object at 0x7da1b1b37d00>, <ast.BoolOp object at 0x7da1b1b359f0>]]
for taget[name[obj_class]] in starred[tuple[[<ast.Constant object at 0x7da1b1b36f80>, <ast.Constant object at 0x7da1b1b367d0>]]] begin[:]
call[name[json]][name[obj_class]] assign[=] call[name[kwargs].pop, parameter[name[obj_class], list[[]]]]
if compare[name[protocol] in tuple[[<ast.Constant object at 0x7da1b1b35690>, <ast.Constant object at 0x7da1b1b36cb0>]]] begin[:]
if <ast.UnaryOp object at 0x7da1b1b34430> begin[:]
<ast.Raise object at 0x7da1b1b35f00>
call[name[json].update, parameter[]]
if name[internet_auth_service_enabled] begin[:]
variable[ias] assign[=] dictionary[[<ast.Constant object at 0x7da1b1b35b70>, <ast.Constant object at 0x7da1b1b37550>, <ast.Constant object at 0x7da1b1b34220>, <ast.Constant object at 0x7da1b1b36e90>], [<ast.Call object at 0x7da1b1b35e70>, <ast.Call object at 0x7da1b1b37370>, <ast.Call object at 0x7da1b1b34160>, <ast.Call object at 0x7da1b1b379a0>]]
call[name[json].update, parameter[name[ias]]]
call[name[json].update, parameter[name[kwargs]]]
return[call[name[ElementCreator], parameter[name[cls], name[json]]]] | keyword[def] identifier[create] ( identifier[cls] , identifier[name] , identifier[address] , identifier[base_dn] , identifier[bind_user_id] = keyword[None] , identifier[bind_password] = keyword[None] ,
identifier[port] = literal[int] , identifier[protocol] = literal[string] , identifier[tls_profile] = keyword[None] , identifier[tls_identity] = keyword[None] ,
identifier[domain_controller] = keyword[None] , identifier[supported_method] = keyword[None] , identifier[timeout] = literal[int] , identifier[max_search_result] = literal[int] ,
identifier[page_size] = literal[int] , identifier[internet_auth_service_enabled] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[json] ={ literal[string] : identifier[name] , literal[string] : identifier[address] , literal[string] : identifier[base_dn] ,
literal[string] : identifier[bind_user_id] , literal[string] : identifier[bind_password] ,
literal[string] : identifier[port] , literal[string] : identifier[protocol] , literal[string] : identifier[timeout] ,
literal[string] : identifier[domain_controller] keyword[or] [],
literal[string] : identifier[max_search_result] , literal[string] : identifier[page_size] ,
literal[string] : identifier[internet_auth_service_enabled] ,
literal[string] : identifier[element_resolver] ( identifier[supported_method] ) keyword[or] []}
keyword[for] identifier[obj_class] keyword[in] ( literal[string] , literal[string] ):
identifier[json] [ identifier[obj_class] ]= identifier[kwargs] . identifier[pop] ( identifier[obj_class] ,[])
keyword[if] identifier[protocol] keyword[in] ( literal[string] , literal[string] ):
keyword[if] keyword[not] identifier[tls_profile] :
keyword[raise] identifier[CreateElementFailed] ( literal[string]
literal[string] )
identifier[json] . identifier[update] ( identifier[tls_profile_ref] = identifier[element_resolver] ( identifier[tls_profile] ),
identifier[tls_identity] = identifier[tls_identity] )
keyword[if] identifier[internet_auth_service_enabled] :
identifier[ias] ={ literal[string] : identifier[kwargs] . identifier[pop] ( literal[string] , literal[int] ),
literal[string] : identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] ),
literal[string] : identifier[kwargs] . identifier[pop] ( literal[string] ),
literal[string] : identifier[kwargs] . identifier[pop] ( literal[string] , literal[int] )}
identifier[json] . identifier[update] ( identifier[ias] )
identifier[json] . identifier[update] ( identifier[kwargs] )
keyword[return] identifier[ElementCreator] ( identifier[cls] , identifier[json] ) | def create(cls, name, address, base_dn, bind_user_id=None, bind_password=None, port=389, protocol='ldap', tls_profile=None, tls_identity=None, domain_controller=None, supported_method=None, timeout=10, max_search_result=0, page_size=0, internet_auth_service_enabled=False, **kwargs):
"""
Create an AD server element using basic settings. You can also provide additional
kwargs documented in the class description::
ActiveDirectoryServer.create(name='somedirectory',
address='10.10.10.10',
base_dn='dc=domain,dc=net',
bind_user_id='cn=admin,cn=users,dc=domain,dc=net',
bind_password='somecrazypassword')
Configure NPS along with Active Directory::
ActiveDirectoryServer.create(name='somedirectory5',
address='10.10.10.10',
base_dn='dc=lepages,dc=net',
internet_auth_service_enabled=True,
retries=3,
auth_ipaddress='10.10.10.15',
auth_port=1900,
shared_secret='123456')
:param str name: name of AD element for display
:param str address: address of AD server
:param str base_dn: base DN for which to retrieve users, format is 'dc=domain,dc=com'
:param str bind_user_id: bind user ID credentials, fully qualified. Format is
'cn=admin,cn=users,dc=domain,dc=com'. If not provided, anonymous bind is used
:param str bind_password: bind password, required if bind_user_id set
:param int port: LDAP bind port, (default: 389)
:param str protocol: Which LDAP protocol to use, options 'ldap/ldaps/ldap_tls'. If
ldaps or ldap_tls is used, you must provide a tls_profile element (default: ldap)
:param str,TLSProfile tls_profile by element of str href. Used when protocol is set
to ldaps or ldap_tls
:param str,TLSIdentity tls_identity: check server identity when establishing TLS connection
:param list(DomainController) domain_controller: list of domain controller objects to
add an additional domain controllers for AD communication
:param list(AuthenticationMethod) supported_method: authentication services allowed
for this resource
:param int timeout: The time (in seconds) that components wait for the server to reply
:param int max_search_result: The maximum number of LDAP entries that are returned in
an LDAP response (default: 0 for no limit)
:param int page_size: The maximum number of LDAP entries that are returned on each page
of the LDAP response. (default: 0 for no limit)
:param bool internet_auth_service_enabled: whether to attach an NPS service to this
AD controller (default: False). If setting to true, provide kwargs values for
auth_ipaddress, auth_port and shared_secret
:raises CreateElementFailed: failed creating element
:rtype: ActiveDirectoryServer
"""
json = {'name': name, 'address': address, 'base_dn': base_dn, 'bind_user_id': bind_user_id, 'bind_password': bind_password, 'port': port, 'protocol': protocol, 'timeout': timeout, 'domain_controller': domain_controller or [], 'max_search_result': max_search_result, 'page_size': page_size, 'internet_auth_service_enabled': internet_auth_service_enabled, 'supported_method': element_resolver(supported_method) or []}
for obj_class in ('group_object_class', 'user_object_class'):
json[obj_class] = kwargs.pop(obj_class, []) # depends on [control=['for'], data=['obj_class']]
if protocol in ('ldaps', 'ldap_tls'):
if not tls_profile:
raise CreateElementFailed('You must provide a TLS Profile when TLS connections are configured to the AD controller.') # depends on [control=['if'], data=[]]
json.update(tls_profile_ref=element_resolver(tls_profile), tls_identity=tls_identity) # depends on [control=['if'], data=[]]
if internet_auth_service_enabled:
ias = {'auth_port': kwargs.pop('auth_port', 1812), 'auth_ipaddress': kwargs.pop('auth_ipaddress', ''), 'shared_secret': kwargs.pop('shared_secret'), 'retries': kwargs.pop('retries', 2)}
json.update(ias) # depends on [control=['if'], data=[]]
json.update(kwargs)
return ElementCreator(cls, json) |
def unmasked_sparse_to_sparse_from_mask_and_pixel_centres(mask, unmasked_sparse_grid_pixel_centres,
total_sparse_pixels):
"""Determine the mapping between every pixelization-grid pixel and masked pixelization-grid pixel. This is
performed by checking whether each pixelization-grid pixel is within the regular-masks, and mapping the indexes.
Pixelization pixels are paired with the next masked pixel index. This may mean that a pixel is not paired with a
pixel near it, if the next pixel is on the next row of the grid. This is not a problem, as it is only
unmasked pixels that are referened when computing image_to_pix, which is what this array is used for.
Parameters
-----------
total_sparse_pixels : int
The total number of pixels in the pixelization grid which fall within the regular-masks.
mask : ccd.masks.Mask
The regular-masks within which pixelization pixels must be inside
unmasked_sparse_grid_pixel_centres : ndarray
The centres of the unmasked pixelization grid pixels.
"""
total_unmasked_sparse_pixels = unmasked_sparse_grid_pixel_centres.shape[0]
unmasked_sparse_to_sparse = np.zeros(total_unmasked_sparse_pixels)
pixel_index = 0
for unmasked_sparse_pixel_index in range(total_unmasked_sparse_pixels):
y = unmasked_sparse_grid_pixel_centres[unmasked_sparse_pixel_index, 0]
x = unmasked_sparse_grid_pixel_centres[unmasked_sparse_pixel_index, 1]
unmasked_sparse_to_sparse[unmasked_sparse_pixel_index] = pixel_index
if not mask[y, x]:
if pixel_index < total_sparse_pixels - 1:
pixel_index += 1
return unmasked_sparse_to_sparse | def function[unmasked_sparse_to_sparse_from_mask_and_pixel_centres, parameter[mask, unmasked_sparse_grid_pixel_centres, total_sparse_pixels]]:
constant[Determine the mapping between every pixelization-grid pixel and masked pixelization-grid pixel. This is
performed by checking whether each pixelization-grid pixel is within the regular-masks, and mapping the indexes.
Pixelization pixels are paired with the next masked pixel index. This may mean that a pixel is not paired with a
pixel near it, if the next pixel is on the next row of the grid. This is not a problem, as it is only
unmasked pixels that are referened when computing image_to_pix, which is what this array is used for.
Parameters
-----------
total_sparse_pixels : int
The total number of pixels in the pixelization grid which fall within the regular-masks.
mask : ccd.masks.Mask
The regular-masks within which pixelization pixels must be inside
unmasked_sparse_grid_pixel_centres : ndarray
The centres of the unmasked pixelization grid pixels.
]
variable[total_unmasked_sparse_pixels] assign[=] call[name[unmasked_sparse_grid_pixel_centres].shape][constant[0]]
variable[unmasked_sparse_to_sparse] assign[=] call[name[np].zeros, parameter[name[total_unmasked_sparse_pixels]]]
variable[pixel_index] assign[=] constant[0]
for taget[name[unmasked_sparse_pixel_index]] in starred[call[name[range], parameter[name[total_unmasked_sparse_pixels]]]] begin[:]
variable[y] assign[=] call[name[unmasked_sparse_grid_pixel_centres]][tuple[[<ast.Name object at 0x7da18f00d060>, <ast.Constant object at 0x7da18f00df00>]]]
variable[x] assign[=] call[name[unmasked_sparse_grid_pixel_centres]][tuple[[<ast.Name object at 0x7da18f00e140>, <ast.Constant object at 0x7da18f00e890>]]]
call[name[unmasked_sparse_to_sparse]][name[unmasked_sparse_pixel_index]] assign[=] name[pixel_index]
if <ast.UnaryOp object at 0x7da18f00f4c0> begin[:]
if compare[name[pixel_index] less[<] binary_operation[name[total_sparse_pixels] - constant[1]]] begin[:]
<ast.AugAssign object at 0x7da18f00d3f0>
return[name[unmasked_sparse_to_sparse]] | keyword[def] identifier[unmasked_sparse_to_sparse_from_mask_and_pixel_centres] ( identifier[mask] , identifier[unmasked_sparse_grid_pixel_centres] ,
identifier[total_sparse_pixels] ):
literal[string]
identifier[total_unmasked_sparse_pixels] = identifier[unmasked_sparse_grid_pixel_centres] . identifier[shape] [ literal[int] ]
identifier[unmasked_sparse_to_sparse] = identifier[np] . identifier[zeros] ( identifier[total_unmasked_sparse_pixels] )
identifier[pixel_index] = literal[int]
keyword[for] identifier[unmasked_sparse_pixel_index] keyword[in] identifier[range] ( identifier[total_unmasked_sparse_pixels] ):
identifier[y] = identifier[unmasked_sparse_grid_pixel_centres] [ identifier[unmasked_sparse_pixel_index] , literal[int] ]
identifier[x] = identifier[unmasked_sparse_grid_pixel_centres] [ identifier[unmasked_sparse_pixel_index] , literal[int] ]
identifier[unmasked_sparse_to_sparse] [ identifier[unmasked_sparse_pixel_index] ]= identifier[pixel_index]
keyword[if] keyword[not] identifier[mask] [ identifier[y] , identifier[x] ]:
keyword[if] identifier[pixel_index] < identifier[total_sparse_pixels] - literal[int] :
identifier[pixel_index] += literal[int]
keyword[return] identifier[unmasked_sparse_to_sparse] | def unmasked_sparse_to_sparse_from_mask_and_pixel_centres(mask, unmasked_sparse_grid_pixel_centres, total_sparse_pixels):
"""Determine the mapping between every pixelization-grid pixel and masked pixelization-grid pixel. This is
performed by checking whether each pixelization-grid pixel is within the regular-masks, and mapping the indexes.
Pixelization pixels are paired with the next masked pixel index. This may mean that a pixel is not paired with a
pixel near it, if the next pixel is on the next row of the grid. This is not a problem, as it is only
unmasked pixels that are referened when computing image_to_pix, which is what this array is used for.
Parameters
-----------
total_sparse_pixels : int
The total number of pixels in the pixelization grid which fall within the regular-masks.
mask : ccd.masks.Mask
The regular-masks within which pixelization pixels must be inside
unmasked_sparse_grid_pixel_centres : ndarray
The centres of the unmasked pixelization grid pixels.
"""
total_unmasked_sparse_pixels = unmasked_sparse_grid_pixel_centres.shape[0]
unmasked_sparse_to_sparse = np.zeros(total_unmasked_sparse_pixels)
pixel_index = 0
for unmasked_sparse_pixel_index in range(total_unmasked_sparse_pixels):
y = unmasked_sparse_grid_pixel_centres[unmasked_sparse_pixel_index, 0]
x = unmasked_sparse_grid_pixel_centres[unmasked_sparse_pixel_index, 1]
unmasked_sparse_to_sparse[unmasked_sparse_pixel_index] = pixel_index
if not mask[y, x]:
if pixel_index < total_sparse_pixels - 1:
pixel_index += 1 # depends on [control=['if'], data=['pixel_index']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['unmasked_sparse_pixel_index']]
return unmasked_sparse_to_sparse |
def get_admins(self):
"""Check verification for all admins."""
# no nickserv support, assume people are who they say they are.
if not self.config['feature'].getboolean('nickserv'):
return
with self.db.session_scope() as session:
for a in session.query(orm.Permissions).all():
if not a.registered:
self.update_authstatus(a.nick) | def function[get_admins, parameter[self]]:
constant[Check verification for all admins.]
if <ast.UnaryOp object at 0x7da1b209dcc0> begin[:]
return[None]
with call[name[self].db.session_scope, parameter[]] begin[:]
for taget[name[a]] in starred[call[call[name[session].query, parameter[name[orm].Permissions]].all, parameter[]]] begin[:]
if <ast.UnaryOp object at 0x7da18f09f910> begin[:]
call[name[self].update_authstatus, parameter[name[a].nick]] | keyword[def] identifier[get_admins] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[config] [ literal[string] ]. identifier[getboolean] ( literal[string] ):
keyword[return]
keyword[with] identifier[self] . identifier[db] . identifier[session_scope] () keyword[as] identifier[session] :
keyword[for] identifier[a] keyword[in] identifier[session] . identifier[query] ( identifier[orm] . identifier[Permissions] ). identifier[all] ():
keyword[if] keyword[not] identifier[a] . identifier[registered] :
identifier[self] . identifier[update_authstatus] ( identifier[a] . identifier[nick] ) | def get_admins(self):
"""Check verification for all admins."""
# no nickserv support, assume people are who they say they are.
if not self.config['feature'].getboolean('nickserv'):
return # depends on [control=['if'], data=[]]
with self.db.session_scope() as session:
for a in session.query(orm.Permissions).all():
if not a.registered:
self.update_authstatus(a.nick) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['a']] # depends on [control=['with'], data=['session']] |
def cancelled(self):
"""Return whether this future was successfully cancelled."""
return self._state == self.S_EXCEPTION and isinstance(self._result, Cancelled) | def function[cancelled, parameter[self]]:
constant[Return whether this future was successfully cancelled.]
return[<ast.BoolOp object at 0x7da1b033afe0>] | keyword[def] identifier[cancelled] ( identifier[self] ):
literal[string]
keyword[return] identifier[self] . identifier[_state] == identifier[self] . identifier[S_EXCEPTION] keyword[and] identifier[isinstance] ( identifier[self] . identifier[_result] , identifier[Cancelled] ) | def cancelled(self):
"""Return whether this future was successfully cancelled."""
return self._state == self.S_EXCEPTION and isinstance(self._result, Cancelled) |
def set_figure(self, figure):
"""Call this with the matplotlib Figure() object."""
self.figure = figure
ax = self.figure.add_axes((0, 0, 1, 1), frame_on=False,
#viewer=self,
#projection='ginga'
)
#ax = fig.add_subplot(111)
self.ax_img = ax
# We don't want the axes cleared every time plot() is called
if MPL_V1:
# older versions of matplotlib
ax.hold(False)
# TODO: is this needed, since frame_on == False?
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
#ax.patch.set_alpha(0.0)
ax.patch.set_visible(False)
#ax.autoscale(enable=True, tight=True)
ax.autoscale(enable=False)
# Add an overlapped axis for drawing graphics
newax = self.figure.add_axes(self.ax_img.get_position(),
sharex=ax, sharey=ax,
frameon=False,
#viewer=self,
#projection='ginga'
)
if MPL_V1:
newax.hold(True)
newax.autoscale(enable=False)
newax.get_xaxis().set_visible(False)
newax.get_yaxis().set_visible(False)
self.ax_util = newax
# Create timers
self._msg_timer = None
self._defer_timer = None
if hasattr(figure.canvas, 'new_timer'):
self._msg_timer = Timer(mplcanvas=figure.canvas)
self._msg_timer.add_callback('expired',
lambda timer: self.onscreen_message(None))
self._defer_timer = Timer(mplcanvas=figure.canvas)
self._defer_timer.add_callback('expired',
lambda timer: self.delayed_redraw())
canvas = figure.canvas
if hasattr(canvas, 'viewer'):
canvas.set_viewer(self)
else:
canvas.mpl_connect("resize_event", self._resize_cb)
# Because we don't know if resize callback works with all backends
left, bottom, wd, ht = self.ax_img.bbox.bounds
self.configure_window(wd, ht) | def function[set_figure, parameter[self, figure]]:
constant[Call this with the matplotlib Figure() object.]
name[self].figure assign[=] name[figure]
variable[ax] assign[=] call[name[self].figure.add_axes, parameter[tuple[[<ast.Constant object at 0x7da2041d83d0>, <ast.Constant object at 0x7da2041da2c0>, <ast.Constant object at 0x7da2041d9090>, <ast.Constant object at 0x7da2041db370>]]]]
name[self].ax_img assign[=] name[ax]
if name[MPL_V1] begin[:]
call[name[ax].hold, parameter[constant[False]]]
call[call[name[ax].get_xaxis, parameter[]].set_visible, parameter[constant[False]]]
call[call[name[ax].get_yaxis, parameter[]].set_visible, parameter[constant[False]]]
call[name[ax].patch.set_visible, parameter[constant[False]]]
call[name[ax].autoscale, parameter[]]
variable[newax] assign[=] call[name[self].figure.add_axes, parameter[call[name[self].ax_img.get_position, parameter[]]]]
if name[MPL_V1] begin[:]
call[name[newax].hold, parameter[constant[True]]]
call[name[newax].autoscale, parameter[]]
call[call[name[newax].get_xaxis, parameter[]].set_visible, parameter[constant[False]]]
call[call[name[newax].get_yaxis, parameter[]].set_visible, parameter[constant[False]]]
name[self].ax_util assign[=] name[newax]
name[self]._msg_timer assign[=] constant[None]
name[self]._defer_timer assign[=] constant[None]
if call[name[hasattr], parameter[name[figure].canvas, constant[new_timer]]] begin[:]
name[self]._msg_timer assign[=] call[name[Timer], parameter[]]
call[name[self]._msg_timer.add_callback, parameter[constant[expired], <ast.Lambda object at 0x7da18dc07910>]]
name[self]._defer_timer assign[=] call[name[Timer], parameter[]]
call[name[self]._defer_timer.add_callback, parameter[constant[expired], <ast.Lambda object at 0x7da18dc07940>]]
variable[canvas] assign[=] name[figure].canvas
if call[name[hasattr], parameter[name[canvas], constant[viewer]]] begin[:]
call[name[canvas].set_viewer, parameter[name[self]]]
<ast.Tuple object at 0x7da1b0c25cf0> assign[=] name[self].ax_img.bbox.bounds
call[name[self].configure_window, parameter[name[wd], name[ht]]] | keyword[def] identifier[set_figure] ( identifier[self] , identifier[figure] ):
literal[string]
identifier[self] . identifier[figure] = identifier[figure]
identifier[ax] = identifier[self] . identifier[figure] . identifier[add_axes] (( literal[int] , literal[int] , literal[int] , literal[int] ), identifier[frame_on] = keyword[False] ,
)
identifier[self] . identifier[ax_img] = identifier[ax]
keyword[if] identifier[MPL_V1] :
identifier[ax] . identifier[hold] ( keyword[False] )
identifier[ax] . identifier[get_xaxis] (). identifier[set_visible] ( keyword[False] )
identifier[ax] . identifier[get_yaxis] (). identifier[set_visible] ( keyword[False] )
identifier[ax] . identifier[patch] . identifier[set_visible] ( keyword[False] )
identifier[ax] . identifier[autoscale] ( identifier[enable] = keyword[False] )
identifier[newax] = identifier[self] . identifier[figure] . identifier[add_axes] ( identifier[self] . identifier[ax_img] . identifier[get_position] (),
identifier[sharex] = identifier[ax] , identifier[sharey] = identifier[ax] ,
identifier[frameon] = keyword[False] ,
)
keyword[if] identifier[MPL_V1] :
identifier[newax] . identifier[hold] ( keyword[True] )
identifier[newax] . identifier[autoscale] ( identifier[enable] = keyword[False] )
identifier[newax] . identifier[get_xaxis] (). identifier[set_visible] ( keyword[False] )
identifier[newax] . identifier[get_yaxis] (). identifier[set_visible] ( keyword[False] )
identifier[self] . identifier[ax_util] = identifier[newax]
identifier[self] . identifier[_msg_timer] = keyword[None]
identifier[self] . identifier[_defer_timer] = keyword[None]
keyword[if] identifier[hasattr] ( identifier[figure] . identifier[canvas] , literal[string] ):
identifier[self] . identifier[_msg_timer] = identifier[Timer] ( identifier[mplcanvas] = identifier[figure] . identifier[canvas] )
identifier[self] . identifier[_msg_timer] . identifier[add_callback] ( literal[string] ,
keyword[lambda] identifier[timer] : identifier[self] . identifier[onscreen_message] ( keyword[None] ))
identifier[self] . identifier[_defer_timer] = identifier[Timer] ( identifier[mplcanvas] = identifier[figure] . identifier[canvas] )
identifier[self] . identifier[_defer_timer] . identifier[add_callback] ( literal[string] ,
keyword[lambda] identifier[timer] : identifier[self] . identifier[delayed_redraw] ())
identifier[canvas] = identifier[figure] . identifier[canvas]
keyword[if] identifier[hasattr] ( identifier[canvas] , literal[string] ):
identifier[canvas] . identifier[set_viewer] ( identifier[self] )
keyword[else] :
identifier[canvas] . identifier[mpl_connect] ( literal[string] , identifier[self] . identifier[_resize_cb] )
identifier[left] , identifier[bottom] , identifier[wd] , identifier[ht] = identifier[self] . identifier[ax_img] . identifier[bbox] . identifier[bounds]
identifier[self] . identifier[configure_window] ( identifier[wd] , identifier[ht] ) | def set_figure(self, figure):
"""Call this with the matplotlib Figure() object."""
self.figure = figure
#viewer=self,
#projection='ginga'
ax = self.figure.add_axes((0, 0, 1, 1), frame_on=False)
#ax = fig.add_subplot(111)
self.ax_img = ax
# We don't want the axes cleared every time plot() is called
if MPL_V1:
# older versions of matplotlib
ax.hold(False) # depends on [control=['if'], data=[]]
# TODO: is this needed, since frame_on == False?
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
#ax.patch.set_alpha(0.0)
ax.patch.set_visible(False)
#ax.autoscale(enable=True, tight=True)
ax.autoscale(enable=False)
# Add an overlapped axis for drawing graphics
#viewer=self,
#projection='ginga'
newax = self.figure.add_axes(self.ax_img.get_position(), sharex=ax, sharey=ax, frameon=False)
if MPL_V1:
newax.hold(True) # depends on [control=['if'], data=[]]
newax.autoscale(enable=False)
newax.get_xaxis().set_visible(False)
newax.get_yaxis().set_visible(False)
self.ax_util = newax
# Create timers
self._msg_timer = None
self._defer_timer = None
if hasattr(figure.canvas, 'new_timer'):
self._msg_timer = Timer(mplcanvas=figure.canvas)
self._msg_timer.add_callback('expired', lambda timer: self.onscreen_message(None))
self._defer_timer = Timer(mplcanvas=figure.canvas)
self._defer_timer.add_callback('expired', lambda timer: self.delayed_redraw()) # depends on [control=['if'], data=[]]
canvas = figure.canvas
if hasattr(canvas, 'viewer'):
canvas.set_viewer(self) # depends on [control=['if'], data=[]]
else:
canvas.mpl_connect('resize_event', self._resize_cb)
# Because we don't know if resize callback works with all backends
(left, bottom, wd, ht) = self.ax_img.bbox.bounds
self.configure_window(wd, ht) |
def revoke_all_tokens(self):
"""
Implementation of :meth:`twitcher.api.ITokenManager.revoke_all_tokens`.
"""
try:
self.store.clear_tokens()
except Exception:
LOGGER.exception('Failed to remove tokens.')
return False
else:
return True | def function[revoke_all_tokens, parameter[self]]:
constant[
Implementation of :meth:`twitcher.api.ITokenManager.revoke_all_tokens`.
]
<ast.Try object at 0x7da1affc18a0> | keyword[def] identifier[revoke_all_tokens] ( identifier[self] ):
literal[string]
keyword[try] :
identifier[self] . identifier[store] . identifier[clear_tokens] ()
keyword[except] identifier[Exception] :
identifier[LOGGER] . identifier[exception] ( literal[string] )
keyword[return] keyword[False]
keyword[else] :
keyword[return] keyword[True] | def revoke_all_tokens(self):
"""
Implementation of :meth:`twitcher.api.ITokenManager.revoke_all_tokens`.
"""
try:
self.store.clear_tokens() # depends on [control=['try'], data=[]]
except Exception:
LOGGER.exception('Failed to remove tokens.')
return False # depends on [control=['except'], data=[]]
else:
return True |
def serializable(wrapped):
"""
If a keyword argument 'serialize' with a True value is passed to the
Wrapped function, the return of the wrapped function will be serialized.
Nothing happens if the argument is not passed or the value is not True
"""
@wraps(wrapped)
def wrapper(*args, **kwargs):
should_serialize = kwargs.pop('serialize', False)
result = wrapped(*args, **kwargs)
return serialize(result) if should_serialize else result
if hasattr(wrapped, 'decorators'):
wrapper.decorators = wrapped.decorators
wrapper.decorators.append('serializable')
else:
wrapper.decorators = ['serializable']
return wrapper | def function[serializable, parameter[wrapped]]:
constant[
If a keyword argument 'serialize' with a True value is passed to the
Wrapped function, the return of the wrapped function will be serialized.
Nothing happens if the argument is not passed or the value is not True
]
def function[wrapper, parameter[]]:
variable[should_serialize] assign[=] call[name[kwargs].pop, parameter[constant[serialize], constant[False]]]
variable[result] assign[=] call[name[wrapped], parameter[<ast.Starred object at 0x7da20c7969b0>]]
return[<ast.IfExp object at 0x7da20c7968f0>]
if call[name[hasattr], parameter[name[wrapped], constant[decorators]]] begin[:]
name[wrapper].decorators assign[=] name[wrapped].decorators
call[name[wrapper].decorators.append, parameter[constant[serializable]]]
return[name[wrapper]] | keyword[def] identifier[serializable] ( identifier[wrapped] ):
literal[string]
@ identifier[wraps] ( identifier[wrapped] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
identifier[should_serialize] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] )
identifier[result] = identifier[wrapped] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[serialize] ( identifier[result] ) keyword[if] identifier[should_serialize] keyword[else] identifier[result]
keyword[if] identifier[hasattr] ( identifier[wrapped] , literal[string] ):
identifier[wrapper] . identifier[decorators] = identifier[wrapped] . identifier[decorators]
identifier[wrapper] . identifier[decorators] . identifier[append] ( literal[string] )
keyword[else] :
identifier[wrapper] . identifier[decorators] =[ literal[string] ]
keyword[return] identifier[wrapper] | def serializable(wrapped):
"""
If a keyword argument 'serialize' with a True value is passed to the
Wrapped function, the return of the wrapped function will be serialized.
Nothing happens if the argument is not passed or the value is not True
"""
@wraps(wrapped)
def wrapper(*args, **kwargs):
should_serialize = kwargs.pop('serialize', False)
result = wrapped(*args, **kwargs)
return serialize(result) if should_serialize else result
if hasattr(wrapped, 'decorators'):
wrapper.decorators = wrapped.decorators
wrapper.decorators.append('serializable') # depends on [control=['if'], data=[]]
else:
wrapper.decorators = ['serializable']
return wrapper |
def dafgsr(handle, recno, begin, end):
"""
Read a portion of the contents of (words in) a summary record in a DAF file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafgsr_c.html
:param handle: Handle of DAF.
:type handle: int
:param recno: Record number; word indices are 1-based, 1 to 128 inclusive.
:type recno: int
:param begin: Index of first word to read from record, will be clamped > 0.
:type begin: int
:param end: Index of last word to read, wll be clamped < 129
:type end: int
:return: Contents of request sub-record
:rtype: float numpy.ndarray
"""
handle = ctypes.c_int(handle)
recno = ctypes.c_int(recno)
begin = ctypes.c_int(begin)
end = ctypes.c_int(end)
# dafgsr_c will retrieve no more than 128 words
data = stypes.emptyDoubleVector(1 + min([128,end.value]) - max([begin.value,1]))
found = ctypes.c_int()
libspice.dafgsr_c(handle, recno, begin, end, data, ctypes.byref(found))
return stypes.cVectorToPython(data), bool(found.value) | def function[dafgsr, parameter[handle, recno, begin, end]]:
constant[
Read a portion of the contents of (words in) a summary record in a DAF file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafgsr_c.html
:param handle: Handle of DAF.
:type handle: int
:param recno: Record number; word indices are 1-based, 1 to 128 inclusive.
:type recno: int
:param begin: Index of first word to read from record, will be clamped > 0.
:type begin: int
:param end: Index of last word to read, wll be clamped < 129
:type end: int
:return: Contents of request sub-record
:rtype: float numpy.ndarray
]
variable[handle] assign[=] call[name[ctypes].c_int, parameter[name[handle]]]
variable[recno] assign[=] call[name[ctypes].c_int, parameter[name[recno]]]
variable[begin] assign[=] call[name[ctypes].c_int, parameter[name[begin]]]
variable[end] assign[=] call[name[ctypes].c_int, parameter[name[end]]]
variable[data] assign[=] call[name[stypes].emptyDoubleVector, parameter[binary_operation[binary_operation[constant[1] + call[name[min], parameter[list[[<ast.Constant object at 0x7da18f09c160>, <ast.Attribute object at 0x7da18f09ebf0>]]]]] - call[name[max], parameter[list[[<ast.Attribute object at 0x7da18f09e0e0>, <ast.Constant object at 0x7da18f09c670>]]]]]]]
variable[found] assign[=] call[name[ctypes].c_int, parameter[]]
call[name[libspice].dafgsr_c, parameter[name[handle], name[recno], name[begin], name[end], name[data], call[name[ctypes].byref, parameter[name[found]]]]]
return[tuple[[<ast.Call object at 0x7da18f09db40>, <ast.Call object at 0x7da18f09d8d0>]]] | keyword[def] identifier[dafgsr] ( identifier[handle] , identifier[recno] , identifier[begin] , identifier[end] ):
literal[string]
identifier[handle] = identifier[ctypes] . identifier[c_int] ( identifier[handle] )
identifier[recno] = identifier[ctypes] . identifier[c_int] ( identifier[recno] )
identifier[begin] = identifier[ctypes] . identifier[c_int] ( identifier[begin] )
identifier[end] = identifier[ctypes] . identifier[c_int] ( identifier[end] )
identifier[data] = identifier[stypes] . identifier[emptyDoubleVector] ( literal[int] + identifier[min] ([ literal[int] , identifier[end] . identifier[value] ])- identifier[max] ([ identifier[begin] . identifier[value] , literal[int] ]))
identifier[found] = identifier[ctypes] . identifier[c_int] ()
identifier[libspice] . identifier[dafgsr_c] ( identifier[handle] , identifier[recno] , identifier[begin] , identifier[end] , identifier[data] , identifier[ctypes] . identifier[byref] ( identifier[found] ))
keyword[return] identifier[stypes] . identifier[cVectorToPython] ( identifier[data] ), identifier[bool] ( identifier[found] . identifier[value] ) | def dafgsr(handle, recno, begin, end):
"""
Read a portion of the contents of (words in) a summary record in a DAF file.
http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/dafgsr_c.html
:param handle: Handle of DAF.
:type handle: int
:param recno: Record number; word indices are 1-based, 1 to 128 inclusive.
:type recno: int
:param begin: Index of first word to read from record, will be clamped > 0.
:type begin: int
:param end: Index of last word to read, wll be clamped < 129
:type end: int
:return: Contents of request sub-record
:rtype: float numpy.ndarray
"""
handle = ctypes.c_int(handle)
recno = ctypes.c_int(recno)
begin = ctypes.c_int(begin)
end = ctypes.c_int(end)
# dafgsr_c will retrieve no more than 128 words
data = stypes.emptyDoubleVector(1 + min([128, end.value]) - max([begin.value, 1]))
found = ctypes.c_int()
libspice.dafgsr_c(handle, recno, begin, end, data, ctypes.byref(found))
return (stypes.cVectorToPython(data), bool(found.value)) |
def sphere_average(dset,x,y,z,radius=1):
'''returns a list of average values (one for each subbrick/time point) within the coordinate ``(x,y,z)`` (in RAI order) using a sphere of radius ``radius`` in ``dset``'''
return_list = []
if isinstance(dset,basestring):
dset = [dset]
for d in dset:
return_list += [float(a) for a in subprocess.check_output(['3dmaskave','-q','-dball',str(x),str(y),str(z),str(radius),d],stderr=subprocess.PIPE).split()]
return return_list | def function[sphere_average, parameter[dset, x, y, z, radius]]:
constant[returns a list of average values (one for each subbrick/time point) within the coordinate ``(x,y,z)`` (in RAI order) using a sphere of radius ``radius`` in ``dset``]
variable[return_list] assign[=] list[[]]
if call[name[isinstance], parameter[name[dset], name[basestring]]] begin[:]
variable[dset] assign[=] list[[<ast.Name object at 0x7da207f03070>]]
for taget[name[d]] in starred[name[dset]] begin[:]
<ast.AugAssign object at 0x7da20c794310>
return[name[return_list]] | keyword[def] identifier[sphere_average] ( identifier[dset] , identifier[x] , identifier[y] , identifier[z] , identifier[radius] = literal[int] ):
literal[string]
identifier[return_list] =[]
keyword[if] identifier[isinstance] ( identifier[dset] , identifier[basestring] ):
identifier[dset] =[ identifier[dset] ]
keyword[for] identifier[d] keyword[in] identifier[dset] :
identifier[return_list] +=[ identifier[float] ( identifier[a] ) keyword[for] identifier[a] keyword[in] identifier[subprocess] . identifier[check_output] ([ literal[string] , literal[string] , literal[string] , identifier[str] ( identifier[x] ), identifier[str] ( identifier[y] ), identifier[str] ( identifier[z] ), identifier[str] ( identifier[radius] ), identifier[d] ], identifier[stderr] = identifier[subprocess] . identifier[PIPE] ). identifier[split] ()]
keyword[return] identifier[return_list] | def sphere_average(dset, x, y, z, radius=1):
"""returns a list of average values (one for each subbrick/time point) within the coordinate ``(x,y,z)`` (in RAI order) using a sphere of radius ``radius`` in ``dset``"""
return_list = []
if isinstance(dset, basestring):
dset = [dset] # depends on [control=['if'], data=[]]
for d in dset:
return_list += [float(a) for a in subprocess.check_output(['3dmaskave', '-q', '-dball', str(x), str(y), str(z), str(radius), d], stderr=subprocess.PIPE).split()] # depends on [control=['for'], data=['d']]
return return_list |
def ensure_exists(self):
"""Ensure that cache directory exists."""
if not cache.exists(self.config):
logger.debug("Wily cache not found, creating.")
cache.create(self.config)
logger.debug("Created wily cache")
else:
logger.debug(f"Cache {self.config.cache_path} exists") | def function[ensure_exists, parameter[self]]:
constant[Ensure that cache directory exists.]
if <ast.UnaryOp object at 0x7da20c7943a0> begin[:]
call[name[logger].debug, parameter[constant[Wily cache not found, creating.]]]
call[name[cache].create, parameter[name[self].config]]
call[name[logger].debug, parameter[constant[Created wily cache]]] | keyword[def] identifier[ensure_exists] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[cache] . identifier[exists] ( identifier[self] . identifier[config] ):
identifier[logger] . identifier[debug] ( literal[string] )
identifier[cache] . identifier[create] ( identifier[self] . identifier[config] )
identifier[logger] . identifier[debug] ( literal[string] )
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string] ) | def ensure_exists(self):
"""Ensure that cache directory exists."""
if not cache.exists(self.config):
logger.debug('Wily cache not found, creating.')
cache.create(self.config)
logger.debug('Created wily cache') # depends on [control=['if'], data=[]]
else:
logger.debug(f'Cache {self.config.cache_path} exists') |
def create_attach_volumes(name, kwargs, call=None):
'''
.. versionadded:: 2017.7.0
Create and attach multiple volumes to a node. The 'volumes' and 'node'
arguments are required, where 'node' is a libcloud node, and 'volumes'
is a list of maps, where each map contains:
size
The size of the new disk in GB. Required.
type
The disk type, either pd-standard or pd-ssd. Optional, defaults to pd-standard.
image
An image to use for this new disk. Optional.
snapshot
A snapshot to use for this new disk. Optional.
auto_delete
An option(bool) to keep or remove the disk upon instance deletion.
Optional, defaults to False.
Volumes are attached in the order in which they are given, thus on a new
node the first volume will be /dev/sdb, the second /dev/sdc, and so on.
'''
if call != 'action':
raise SaltCloudSystemExit(
'The create_attach_volumes action must be called with '
'-a or --action.'
)
volumes = literal_eval(kwargs['volumes'])
node = kwargs['node']
conn = get_conn()
node_data = _expand_node(conn.ex_get_node(node))
letter = ord('a') - 1
for idx, volume in enumerate(volumes):
volume_name = '{0}-sd{1}'.format(name, chr(letter + 2 + idx))
volume_dict = {
'disk_name': volume_name,
'location': node_data['extra']['zone']['name'],
'size': volume['size'],
'type': volume.get('type', 'pd-standard'),
'image': volume.get('image', None),
'snapshot': volume.get('snapshot', None),
'auto_delete': volume.get('auto_delete', False)
}
create_disk(volume_dict, 'function')
attach_disk(name, volume_dict, 'action') | def function[create_attach_volumes, parameter[name, kwargs, call]]:
constant[
.. versionadded:: 2017.7.0
Create and attach multiple volumes to a node. The 'volumes' and 'node'
arguments are required, where 'node' is a libcloud node, and 'volumes'
is a list of maps, where each map contains:
size
The size of the new disk in GB. Required.
type
The disk type, either pd-standard or pd-ssd. Optional, defaults to pd-standard.
image
An image to use for this new disk. Optional.
snapshot
A snapshot to use for this new disk. Optional.
auto_delete
An option(bool) to keep or remove the disk upon instance deletion.
Optional, defaults to False.
Volumes are attached in the order in which they are given, thus on a new
node the first volume will be /dev/sdb, the second /dev/sdc, and so on.
]
if compare[name[call] not_equal[!=] constant[action]] begin[:]
<ast.Raise object at 0x7da20c7caf50>
variable[volumes] assign[=] call[name[literal_eval], parameter[call[name[kwargs]][constant[volumes]]]]
variable[node] assign[=] call[name[kwargs]][constant[node]]
variable[conn] assign[=] call[name[get_conn], parameter[]]
variable[node_data] assign[=] call[name[_expand_node], parameter[call[name[conn].ex_get_node, parameter[name[node]]]]]
variable[letter] assign[=] binary_operation[call[name[ord], parameter[constant[a]]] - constant[1]]
for taget[tuple[[<ast.Name object at 0x7da1b26ac6a0>, <ast.Name object at 0x7da1b26acf10>]]] in starred[call[name[enumerate], parameter[name[volumes]]]] begin[:]
variable[volume_name] assign[=] call[constant[{0}-sd{1}].format, parameter[name[name], call[name[chr], parameter[binary_operation[binary_operation[name[letter] + constant[2]] + name[idx]]]]]]
variable[volume_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b26af460>, <ast.Constant object at 0x7da1b26ae5c0>, <ast.Constant object at 0x7da1b26ae1a0>, <ast.Constant object at 0x7da1b26adf90>, <ast.Constant object at 0x7da1b26acd90>, <ast.Constant object at 0x7da1b26af190>, <ast.Constant object at 0x7da1b26afac0>], [<ast.Name object at 0x7da1b26aefe0>, <ast.Subscript object at 0x7da1b26ae4d0>, <ast.Subscript object at 0x7da1b26afaf0>, <ast.Call object at 0x7da1b26ad600>, <ast.Call object at 0x7da1b26adb40>, <ast.Call object at 0x7da1b26af490>, <ast.Call object at 0x7da1b26ac850>]]
call[name[create_disk], parameter[name[volume_dict], constant[function]]]
call[name[attach_disk], parameter[name[name], name[volume_dict], constant[action]]] | keyword[def] identifier[create_attach_volumes] ( identifier[name] , identifier[kwargs] , identifier[call] = keyword[None] ):
literal[string]
keyword[if] identifier[call] != literal[string] :
keyword[raise] identifier[SaltCloudSystemExit] (
literal[string]
literal[string]
)
identifier[volumes] = identifier[literal_eval] ( identifier[kwargs] [ literal[string] ])
identifier[node] = identifier[kwargs] [ literal[string] ]
identifier[conn] = identifier[get_conn] ()
identifier[node_data] = identifier[_expand_node] ( identifier[conn] . identifier[ex_get_node] ( identifier[node] ))
identifier[letter] = identifier[ord] ( literal[string] )- literal[int]
keyword[for] identifier[idx] , identifier[volume] keyword[in] identifier[enumerate] ( identifier[volumes] ):
identifier[volume_name] = literal[string] . identifier[format] ( identifier[name] , identifier[chr] ( identifier[letter] + literal[int] + identifier[idx] ))
identifier[volume_dict] ={
literal[string] : identifier[volume_name] ,
literal[string] : identifier[node_data] [ literal[string] ][ literal[string] ][ literal[string] ],
literal[string] : identifier[volume] [ literal[string] ],
literal[string] : identifier[volume] . identifier[get] ( literal[string] , literal[string] ),
literal[string] : identifier[volume] . identifier[get] ( literal[string] , keyword[None] ),
literal[string] : identifier[volume] . identifier[get] ( literal[string] , keyword[None] ),
literal[string] : identifier[volume] . identifier[get] ( literal[string] , keyword[False] )
}
identifier[create_disk] ( identifier[volume_dict] , literal[string] )
identifier[attach_disk] ( identifier[name] , identifier[volume_dict] , literal[string] ) | def create_attach_volumes(name, kwargs, call=None):
"""
.. versionadded:: 2017.7.0
Create and attach multiple volumes to a node. The 'volumes' and 'node'
arguments are required, where 'node' is a libcloud node, and 'volumes'
is a list of maps, where each map contains:
size
The size of the new disk in GB. Required.
type
The disk type, either pd-standard or pd-ssd. Optional, defaults to pd-standard.
image
An image to use for this new disk. Optional.
snapshot
A snapshot to use for this new disk. Optional.
auto_delete
An option(bool) to keep or remove the disk upon instance deletion.
Optional, defaults to False.
Volumes are attached in the order in which they are given, thus on a new
node the first volume will be /dev/sdb, the second /dev/sdc, and so on.
"""
if call != 'action':
raise SaltCloudSystemExit('The create_attach_volumes action must be called with -a or --action.') # depends on [control=['if'], data=[]]
volumes = literal_eval(kwargs['volumes'])
node = kwargs['node']
conn = get_conn()
node_data = _expand_node(conn.ex_get_node(node))
letter = ord('a') - 1
for (idx, volume) in enumerate(volumes):
volume_name = '{0}-sd{1}'.format(name, chr(letter + 2 + idx))
volume_dict = {'disk_name': volume_name, 'location': node_data['extra']['zone']['name'], 'size': volume['size'], 'type': volume.get('type', 'pd-standard'), 'image': volume.get('image', None), 'snapshot': volume.get('snapshot', None), 'auto_delete': volume.get('auto_delete', False)}
create_disk(volume_dict, 'function')
attach_disk(name, volume_dict, 'action') # depends on [control=['for'], data=[]] |
async def reset(self, von_wallet: Wallet, seed: str = None) -> Wallet:
"""
Close and delete (open) VON anchor wallet and then create, open, and return
replacement on current link secret.
Note that this operation effectively destroys private keys for keyed data
structures such as credential offers or credential definitions.
Raise WalletState if the wallet is closed.
:param von_wallet: open wallet
:param seed: seed to use for new wallet (default random)
:return: replacement wallet
"""
LOGGER.debug('WalletManager.reset >>> von_wallet %s', von_wallet)
if not von_wallet.handle:
LOGGER.debug('WalletManager.reset <!< Wallet %s is closed', von_wallet.name)
raise WalletState('Wallet {} is closed'.format(von_wallet.name))
w_config = von_wallet.config # wallet under reset, no need to make copy
w_config['did'] = von_wallet.did
w_config['seed'] = seed
w_config['auto_create'] = von_wallet.auto_create # in case both auto_remove+auto_create set (create every open)
w_config['auto_remove'] = von_wallet.auto_remove
label = await von_wallet.get_link_secret_label()
if label:
w_config['link_secret_label'] = label
await von_wallet.close()
if not von_wallet.auto_remove:
await self.remove(von_wallet)
rv = await self.create(w_config, von_wallet.access)
await rv.open()
LOGGER.debug('WalletManager.reset <<< %s', rv)
return rv | <ast.AsyncFunctionDef object at 0x7da18bc73af0> | keyword[async] keyword[def] identifier[reset] ( identifier[self] , identifier[von_wallet] : identifier[Wallet] , identifier[seed] : identifier[str] = keyword[None] )-> identifier[Wallet] :
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[von_wallet] )
keyword[if] keyword[not] identifier[von_wallet] . identifier[handle] :
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[von_wallet] . identifier[name] )
keyword[raise] identifier[WalletState] ( literal[string] . identifier[format] ( identifier[von_wallet] . identifier[name] ))
identifier[w_config] = identifier[von_wallet] . identifier[config]
identifier[w_config] [ literal[string] ]= identifier[von_wallet] . identifier[did]
identifier[w_config] [ literal[string] ]= identifier[seed]
identifier[w_config] [ literal[string] ]= identifier[von_wallet] . identifier[auto_create]
identifier[w_config] [ literal[string] ]= identifier[von_wallet] . identifier[auto_remove]
identifier[label] = keyword[await] identifier[von_wallet] . identifier[get_link_secret_label] ()
keyword[if] identifier[label] :
identifier[w_config] [ literal[string] ]= identifier[label]
keyword[await] identifier[von_wallet] . identifier[close] ()
keyword[if] keyword[not] identifier[von_wallet] . identifier[auto_remove] :
keyword[await] identifier[self] . identifier[remove] ( identifier[von_wallet] )
identifier[rv] = keyword[await] identifier[self] . identifier[create] ( identifier[w_config] , identifier[von_wallet] . identifier[access] )
keyword[await] identifier[rv] . identifier[open] ()
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[rv] )
keyword[return] identifier[rv] | async def reset(self, von_wallet: Wallet, seed: str=None) -> Wallet:
"""
Close and delete (open) VON anchor wallet and then create, open, and return
replacement on current link secret.
Note that this operation effectively destroys private keys for keyed data
structures such as credential offers or credential definitions.
Raise WalletState if the wallet is closed.
:param von_wallet: open wallet
:param seed: seed to use for new wallet (default random)
:return: replacement wallet
"""
LOGGER.debug('WalletManager.reset >>> von_wallet %s', von_wallet)
if not von_wallet.handle:
LOGGER.debug('WalletManager.reset <!< Wallet %s is closed', von_wallet.name)
raise WalletState('Wallet {} is closed'.format(von_wallet.name)) # depends on [control=['if'], data=[]]
w_config = von_wallet.config # wallet under reset, no need to make copy
w_config['did'] = von_wallet.did
w_config['seed'] = seed
w_config['auto_create'] = von_wallet.auto_create # in case both auto_remove+auto_create set (create every open)
w_config['auto_remove'] = von_wallet.auto_remove
label = await von_wallet.get_link_secret_label()
if label:
w_config['link_secret_label'] = label # depends on [control=['if'], data=[]]
await von_wallet.close()
if not von_wallet.auto_remove:
await self.remove(von_wallet) # depends on [control=['if'], data=[]]
rv = await self.create(w_config, von_wallet.access)
await rv.open()
LOGGER.debug('WalletManager.reset <<< %s', rv)
return rv |
def match_keyword(token, keywords):
"""
Checks if the given token represents one of the given keywords
"""
if not token:
return False
if not token.is_keyword:
return False
return token.value.upper() in keywords | def function[match_keyword, parameter[token, keywords]]:
constant[
Checks if the given token represents one of the given keywords
]
if <ast.UnaryOp object at 0x7da18f720be0> begin[:]
return[constant[False]]
if <ast.UnaryOp object at 0x7da18f723460> begin[:]
return[constant[False]]
return[compare[call[name[token].value.upper, parameter[]] in name[keywords]]] | keyword[def] identifier[match_keyword] ( identifier[token] , identifier[keywords] ):
literal[string]
keyword[if] keyword[not] identifier[token] :
keyword[return] keyword[False]
keyword[if] keyword[not] identifier[token] . identifier[is_keyword] :
keyword[return] keyword[False]
keyword[return] identifier[token] . identifier[value] . identifier[upper] () keyword[in] identifier[keywords] | def match_keyword(token, keywords):
"""
Checks if the given token represents one of the given keywords
"""
if not token:
return False # depends on [control=['if'], data=[]]
if not token.is_keyword:
return False # depends on [control=['if'], data=[]]
return token.value.upper() in keywords |
def optimize_filter_dict(filter_dict, trgm=True):
"""Improve query speed for a Django queryset `filter` or `exclude` kwargs dict
WARNING: Wtthout `trgm`, this only improves the speed of exclude filters by 0.4%
Arguments:
filter_dict (dict): kwargs for Django ORM queryset `filter` and `exclude` queries
trgm (bool): whether to assume the Django ORM trigram (djorm-trgm) extension is available
Examples:
>>> # This is nothing different than what Django already does:
>>> optimize_filter_dict({'name__in': ['Smith', 'Jones', 'Smith']}) == {'name__in': set(('Smith', 'Jones'))}
True
>>> # This is an optimjization that Django doesn't do yet, probably because it actually slows `filter` queries down by 0.4%!:
>>> # However, it does speed up an `objects.exclude` query by about 1%:
>>> optimize_filter_dict({'name__in': ['Smith']}) == {'name': 'Smith'}
True
>>> # This is the only optimization that actually does significant good, but it requires `djorm-trgm`
>>> optimize_filter_dict({'name__contains': 'ith'}) == {'name__similar': 'ith', 'name__contains': 'ith'}
True
"""
optimized = {}
for k, v in filter_dict.iteritems():
if k.endswith('__in'):
v = set(v)
if len(v) == 1:
optimized[k[:-4]] = tuple(v)[0]
else:
optimized[k] = v
else:
optimized[k] = v
# This is the only optimization that actuall does some good
if trgm:
optimized_copy = dict(optimized)
for k, v in optimized_copy.iteritems():
if k.endswith('__contains'):
optimized[k[:-10] + '__similar'] = v
elif k.endswith('__icontains'):
optimized[k[:-11] + '__similar'] = v
return optimized | def function[optimize_filter_dict, parameter[filter_dict, trgm]]:
constant[Improve query speed for a Django queryset `filter` or `exclude` kwargs dict
WARNING: Wtthout `trgm`, this only improves the speed of exclude filters by 0.4%
Arguments:
filter_dict (dict): kwargs for Django ORM queryset `filter` and `exclude` queries
trgm (bool): whether to assume the Django ORM trigram (djorm-trgm) extension is available
Examples:
>>> # This is nothing different than what Django already does:
>>> optimize_filter_dict({'name__in': ['Smith', 'Jones', 'Smith']}) == {'name__in': set(('Smith', 'Jones'))}
True
>>> # This is an optimjization that Django doesn't do yet, probably because it actually slows `filter` queries down by 0.4%!:
>>> # However, it does speed up an `objects.exclude` query by about 1%:
>>> optimize_filter_dict({'name__in': ['Smith']}) == {'name': 'Smith'}
True
>>> # This is the only optimization that actually does significant good, but it requires `djorm-trgm`
>>> optimize_filter_dict({'name__contains': 'ith'}) == {'name__similar': 'ith', 'name__contains': 'ith'}
True
]
variable[optimized] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da2041d9600>, <ast.Name object at 0x7da2041d9150>]]] in starred[call[name[filter_dict].iteritems, parameter[]]] begin[:]
if call[name[k].endswith, parameter[constant[__in]]] begin[:]
variable[v] assign[=] call[name[set], parameter[name[v]]]
if compare[call[name[len], parameter[name[v]]] equal[==] constant[1]] begin[:]
call[name[optimized]][call[name[k]][<ast.Slice object at 0x7da20c6abd90>]] assign[=] call[call[name[tuple], parameter[name[v]]]][constant[0]]
if name[trgm] begin[:]
variable[optimized_copy] assign[=] call[name[dict], parameter[name[optimized]]]
for taget[tuple[[<ast.Name object at 0x7da20c6abf40>, <ast.Name object at 0x7da20c6a81f0>]]] in starred[call[name[optimized_copy].iteritems, parameter[]]] begin[:]
if call[name[k].endswith, parameter[constant[__contains]]] begin[:]
call[name[optimized]][binary_operation[call[name[k]][<ast.Slice object at 0x7da20c6aa3e0>] + constant[__similar]]] assign[=] name[v]
return[name[optimized]] | keyword[def] identifier[optimize_filter_dict] ( identifier[filter_dict] , identifier[trgm] = keyword[True] ):
literal[string]
identifier[optimized] ={}
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[filter_dict] . identifier[iteritems] ():
keyword[if] identifier[k] . identifier[endswith] ( literal[string] ):
identifier[v] = identifier[set] ( identifier[v] )
keyword[if] identifier[len] ( identifier[v] )== literal[int] :
identifier[optimized] [ identifier[k] [:- literal[int] ]]= identifier[tuple] ( identifier[v] )[ literal[int] ]
keyword[else] :
identifier[optimized] [ identifier[k] ]= identifier[v]
keyword[else] :
identifier[optimized] [ identifier[k] ]= identifier[v]
keyword[if] identifier[trgm] :
identifier[optimized_copy] = identifier[dict] ( identifier[optimized] )
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[optimized_copy] . identifier[iteritems] ():
keyword[if] identifier[k] . identifier[endswith] ( literal[string] ):
identifier[optimized] [ identifier[k] [:- literal[int] ]+ literal[string] ]= identifier[v]
keyword[elif] identifier[k] . identifier[endswith] ( literal[string] ):
identifier[optimized] [ identifier[k] [:- literal[int] ]+ literal[string] ]= identifier[v]
keyword[return] identifier[optimized] | def optimize_filter_dict(filter_dict, trgm=True):
"""Improve query speed for a Django queryset `filter` or `exclude` kwargs dict
WARNING: Wtthout `trgm`, this only improves the speed of exclude filters by 0.4%
Arguments:
filter_dict (dict): kwargs for Django ORM queryset `filter` and `exclude` queries
trgm (bool): whether to assume the Django ORM trigram (djorm-trgm) extension is available
Examples:
>>> # This is nothing different than what Django already does:
>>> optimize_filter_dict({'name__in': ['Smith', 'Jones', 'Smith']}) == {'name__in': set(('Smith', 'Jones'))}
True
>>> # This is an optimjization that Django doesn't do yet, probably because it actually slows `filter` queries down by 0.4%!:
>>> # However, it does speed up an `objects.exclude` query by about 1%:
>>> optimize_filter_dict({'name__in': ['Smith']}) == {'name': 'Smith'}
True
>>> # This is the only optimization that actually does significant good, but it requires `djorm-trgm`
>>> optimize_filter_dict({'name__contains': 'ith'}) == {'name__similar': 'ith', 'name__contains': 'ith'}
True
"""
optimized = {}
for (k, v) in filter_dict.iteritems():
if k.endswith('__in'):
v = set(v)
if len(v) == 1:
optimized[k[:-4]] = tuple(v)[0] # depends on [control=['if'], data=[]]
else:
optimized[k] = v # depends on [control=['if'], data=[]]
else:
optimized[k] = v # depends on [control=['for'], data=[]]
# This is the only optimization that actuall does some good
if trgm:
optimized_copy = dict(optimized)
for (k, v) in optimized_copy.iteritems():
if k.endswith('__contains'):
optimized[k[:-10] + '__similar'] = v # depends on [control=['if'], data=[]]
elif k.endswith('__icontains'):
optimized[k[:-11] + '__similar'] = v # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]]
return optimized |
def info(
self,
path="/", # type: Text
namespaces=None, # type: Optional[Collection[Text]]
**kwargs # type: Any
):
# type: (...) -> Iterator[Tuple[Text, Info]]
"""Walk a filesystem, yielding path and `Info` of resources.
Arguments:
path (str): A path to a directory.
namespaces (list, optional): A list of namespaces to include
in the resource information, e.g. ``['basic', 'access']``
(defaults to ``['basic']``).
Keyword Arguments:
ignore_errors (bool): If `True`, any errors reading a
directory will be ignored, otherwise exceptions will be
raised.
on_error (callable): If ``ignore_errors`` is `False`, then
this callable will be invoked with a path and the exception
object. It should return `True` to ignore the error, or
`False` to re-raise it.
search (str): If ``'breadth'`` then the directory will be
walked *top down*. Set to ``'depth'`` to walk *bottom up*.
filter (list): If supplied, this parameter should be a list
of file name patterns, e.g. ``['*.py']``. Files will only be
returned if the final component matches one of the
patterns.
exclude (list, optional): If supplied, this parameter should be
a list of filename patterns, e.g. ``['~*', '.*']``. Files matching
any of these patterns will be removed from the walk.
filter_dirs (list, optional): A list of patterns that will be used
to match directories paths. The walk will only open directories
that match at least one of these patterns.
exclude_dirs (list): A list of patterns that will be used
to filter out directories from the walk, e.g. ``['*.svn',
'*.git']``.
max_depth (int, optional): Maximum directory depth to walk.
Returns:
~collections.Iterable: an iterable yielding tuples of
``(<absolute path>, <resource info>)``.
This method invokes `Walker.info` with the bound `FS` object.
"""
walker = self._make_walker(**kwargs)
return walker.info(self.fs, path=path, namespaces=namespaces) | def function[info, parameter[self, path, namespaces]]:
constant[Walk a filesystem, yielding path and `Info` of resources.
Arguments:
path (str): A path to a directory.
namespaces (list, optional): A list of namespaces to include
in the resource information, e.g. ``['basic', 'access']``
(defaults to ``['basic']``).
Keyword Arguments:
ignore_errors (bool): If `True`, any errors reading a
directory will be ignored, otherwise exceptions will be
raised.
on_error (callable): If ``ignore_errors`` is `False`, then
this callable will be invoked with a path and the exception
object. It should return `True` to ignore the error, or
`False` to re-raise it.
search (str): If ``'breadth'`` then the directory will be
walked *top down*. Set to ``'depth'`` to walk *bottom up*.
filter (list): If supplied, this parameter should be a list
of file name patterns, e.g. ``['*.py']``. Files will only be
returned if the final component matches one of the
patterns.
exclude (list, optional): If supplied, this parameter should be
a list of filename patterns, e.g. ``['~*', '.*']``. Files matching
any of these patterns will be removed from the walk.
filter_dirs (list, optional): A list of patterns that will be used
to match directories paths. The walk will only open directories
that match at least one of these patterns.
exclude_dirs (list): A list of patterns that will be used
to filter out directories from the walk, e.g. ``['*.svn',
'*.git']``.
max_depth (int, optional): Maximum directory depth to walk.
Returns:
~collections.Iterable: an iterable yielding tuples of
``(<absolute path>, <resource info>)``.
This method invokes `Walker.info` with the bound `FS` object.
]
variable[walker] assign[=] call[name[self]._make_walker, parameter[]]
return[call[name[walker].info, parameter[name[self].fs]]] | keyword[def] identifier[info] (
identifier[self] ,
identifier[path] = literal[string] ,
identifier[namespaces] = keyword[None] ,
** identifier[kwargs]
):
literal[string]
identifier[walker] = identifier[self] . identifier[_make_walker] (** identifier[kwargs] )
keyword[return] identifier[walker] . identifier[info] ( identifier[self] . identifier[fs] , identifier[path] = identifier[path] , identifier[namespaces] = identifier[namespaces] ) | def info(self, path='/', namespaces=None, **kwargs): # type: Text
# type: Optional[Collection[Text]]
# type: Any
# type: (...) -> Iterator[Tuple[Text, Info]]
"Walk a filesystem, yielding path and `Info` of resources.\n\n Arguments:\n path (str): A path to a directory.\n namespaces (list, optional): A list of namespaces to include\n in the resource information, e.g. ``['basic', 'access']``\n (defaults to ``['basic']``).\n\n Keyword Arguments:\n ignore_errors (bool): If `True`, any errors reading a\n directory will be ignored, otherwise exceptions will be\n raised.\n on_error (callable): If ``ignore_errors`` is `False`, then\n this callable will be invoked with a path and the exception\n object. It should return `True` to ignore the error, or\n `False` to re-raise it.\n search (str): If ``'breadth'`` then the directory will be\n walked *top down*. Set to ``'depth'`` to walk *bottom up*.\n filter (list): If supplied, this parameter should be a list\n of file name patterns, e.g. ``['*.py']``. Files will only be\n returned if the final component matches one of the\n patterns.\n exclude (list, optional): If supplied, this parameter should be\n a list of filename patterns, e.g. ``['~*', '.*']``. Files matching\n any of these patterns will be removed from the walk.\n filter_dirs (list, optional): A list of patterns that will be used\n to match directories paths. The walk will only open directories\n that match at least one of these patterns.\n exclude_dirs (list): A list of patterns that will be used\n to filter out directories from the walk, e.g. ``['*.svn',\n '*.git']``.\n max_depth (int, optional): Maximum directory depth to walk.\n\n Returns:\n ~collections.Iterable: an iterable yielding tuples of\n ``(<absolute path>, <resource info>)``.\n\n This method invokes `Walker.info` with the bound `FS` object.\n\n "
walker = self._make_walker(**kwargs)
return walker.info(self.fs, path=path, namespaces=namespaces) |
def contains_entry(self, key, value):
"""
Returns whether the multimap contains an entry with the value.
:param key: (object), the specified key.
:param value: (object), the specified value.
:return: (bool), ``true`` if this multimap contains the key-value tuple.
"""
check_not_none(key, "key can't be None")
check_not_none(value, "value can't be None")
key_data = self._to_data(key)
value_data = self._to_data(value)
return self._encode_invoke_on_key(multi_map_contains_entry_codec, key_data, key=key_data,
value=value_data, thread_id=thread_id()) | def function[contains_entry, parameter[self, key, value]]:
constant[
Returns whether the multimap contains an entry with the value.
:param key: (object), the specified key.
:param value: (object), the specified value.
:return: (bool), ``true`` if this multimap contains the key-value tuple.
]
call[name[check_not_none], parameter[name[key], constant[key can't be None]]]
call[name[check_not_none], parameter[name[value], constant[value can't be None]]]
variable[key_data] assign[=] call[name[self]._to_data, parameter[name[key]]]
variable[value_data] assign[=] call[name[self]._to_data, parameter[name[value]]]
return[call[name[self]._encode_invoke_on_key, parameter[name[multi_map_contains_entry_codec], name[key_data]]]] | keyword[def] identifier[contains_entry] ( identifier[self] , identifier[key] , identifier[value] ):
literal[string]
identifier[check_not_none] ( identifier[key] , literal[string] )
identifier[check_not_none] ( identifier[value] , literal[string] )
identifier[key_data] = identifier[self] . identifier[_to_data] ( identifier[key] )
identifier[value_data] = identifier[self] . identifier[_to_data] ( identifier[value] )
keyword[return] identifier[self] . identifier[_encode_invoke_on_key] ( identifier[multi_map_contains_entry_codec] , identifier[key_data] , identifier[key] = identifier[key_data] ,
identifier[value] = identifier[value_data] , identifier[thread_id] = identifier[thread_id] ()) | def contains_entry(self, key, value):
"""
Returns whether the multimap contains an entry with the value.
:param key: (object), the specified key.
:param value: (object), the specified value.
:return: (bool), ``true`` if this multimap contains the key-value tuple.
"""
check_not_none(key, "key can't be None")
check_not_none(value, "value can't be None")
key_data = self._to_data(key)
value_data = self._to_data(value)
return self._encode_invoke_on_key(multi_map_contains_entry_codec, key_data, key=key_data, value=value_data, thread_id=thread_id()) |
def idx_num_to_name(L):
"""
Switch from index-by-number to index-by-name.
:param dict L: Metadata
:return dict L: Metadata
"""
logger_jsons.info("enter idx_num_to_name")
try:
if "paleoData" in L:
L["paleoData"] = _import_data(L["paleoData"], "paleo")
if "chronData" in L:
L["chronData"] = _import_data(L["chronData"], "chron")
except Exception as e:
logger_jsons.error("idx_num_to_name: {}".format(e))
print("Error: idx_name_to_num: {}".format(e))
logger_jsons.info("exit idx_num_to_name")
return L | def function[idx_num_to_name, parameter[L]]:
constant[
Switch from index-by-number to index-by-name.
:param dict L: Metadata
:return dict L: Metadata
]
call[name[logger_jsons].info, parameter[constant[enter idx_num_to_name]]]
<ast.Try object at 0x7da2044c1690>
call[name[logger_jsons].info, parameter[constant[exit idx_num_to_name]]]
return[name[L]] | keyword[def] identifier[idx_num_to_name] ( identifier[L] ):
literal[string]
identifier[logger_jsons] . identifier[info] ( literal[string] )
keyword[try] :
keyword[if] literal[string] keyword[in] identifier[L] :
identifier[L] [ literal[string] ]= identifier[_import_data] ( identifier[L] [ literal[string] ], literal[string] )
keyword[if] literal[string] keyword[in] identifier[L] :
identifier[L] [ literal[string] ]= identifier[_import_data] ( identifier[L] [ literal[string] ], literal[string] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[logger_jsons] . identifier[error] ( literal[string] . identifier[format] ( identifier[e] ))
identifier[print] ( literal[string] . identifier[format] ( identifier[e] ))
identifier[logger_jsons] . identifier[info] ( literal[string] )
keyword[return] identifier[L] | def idx_num_to_name(L):
"""
Switch from index-by-number to index-by-name.
:param dict L: Metadata
:return dict L: Metadata
"""
logger_jsons.info('enter idx_num_to_name')
try:
if 'paleoData' in L:
L['paleoData'] = _import_data(L['paleoData'], 'paleo') # depends on [control=['if'], data=['L']]
if 'chronData' in L:
L['chronData'] = _import_data(L['chronData'], 'chron') # depends on [control=['if'], data=['L']] # depends on [control=['try'], data=[]]
except Exception as e:
logger_jsons.error('idx_num_to_name: {}'.format(e))
print('Error: idx_name_to_num: {}'.format(e)) # depends on [control=['except'], data=['e']]
logger_jsons.info('exit idx_num_to_name')
return L |
def available_for_protocol(self, protocol):
"""Check if the current function can be executed from a request defining the given protocol"""
if self.protocol == ALL or protocol == ALL:
return True
return protocol in ensure_sequence(self.protocol) | def function[available_for_protocol, parameter[self, protocol]]:
constant[Check if the current function can be executed from a request defining the given protocol]
if <ast.BoolOp object at 0x7da1b04f4b80> begin[:]
return[constant[True]]
return[compare[name[protocol] in call[name[ensure_sequence], parameter[name[self].protocol]]]] | keyword[def] identifier[available_for_protocol] ( identifier[self] , identifier[protocol] ):
literal[string]
keyword[if] identifier[self] . identifier[protocol] == identifier[ALL] keyword[or] identifier[protocol] == identifier[ALL] :
keyword[return] keyword[True]
keyword[return] identifier[protocol] keyword[in] identifier[ensure_sequence] ( identifier[self] . identifier[protocol] ) | def available_for_protocol(self, protocol):
"""Check if the current function can be executed from a request defining the given protocol"""
if self.protocol == ALL or protocol == ALL:
return True # depends on [control=['if'], data=[]]
return protocol in ensure_sequence(self.protocol) |
def run():
"""Command line entrypoint for the ``refresh-lsst-bib`` program.
"""
args = parse_args()
if args.verbose:
log_level = logging.DEBUG
else:
log_level = logging.INFO
logging.basicConfig(
level=log_level,
format='%(asctime)s %(levelname)s %(name)s: %(message)s')
if not args.verbose:
# Manage third-party loggers
req_logger = logging.getLogger('requests')
req_logger.setLevel(logging.WARNING)
logger = logging.getLogger(__name__)
logger.info('refresh-lsst-bib version {}'.format(__version__))
error_count = process_bib_files(args.dir)
sys.exit(error_count) | def function[run, parameter[]]:
constant[Command line entrypoint for the ``refresh-lsst-bib`` program.
]
variable[args] assign[=] call[name[parse_args], parameter[]]
if name[args].verbose begin[:]
variable[log_level] assign[=] name[logging].DEBUG
call[name[logging].basicConfig, parameter[]]
if <ast.UnaryOp object at 0x7da1b2393700> begin[:]
variable[req_logger] assign[=] call[name[logging].getLogger, parameter[constant[requests]]]
call[name[req_logger].setLevel, parameter[name[logging].WARNING]]
variable[logger] assign[=] call[name[logging].getLogger, parameter[name[__name__]]]
call[name[logger].info, parameter[call[constant[refresh-lsst-bib version {}].format, parameter[name[__version__]]]]]
variable[error_count] assign[=] call[name[process_bib_files], parameter[name[args].dir]]
call[name[sys].exit, parameter[name[error_count]]] | keyword[def] identifier[run] ():
literal[string]
identifier[args] = identifier[parse_args] ()
keyword[if] identifier[args] . identifier[verbose] :
identifier[log_level] = identifier[logging] . identifier[DEBUG]
keyword[else] :
identifier[log_level] = identifier[logging] . identifier[INFO]
identifier[logging] . identifier[basicConfig] (
identifier[level] = identifier[log_level] ,
identifier[format] = literal[string] )
keyword[if] keyword[not] identifier[args] . identifier[verbose] :
identifier[req_logger] = identifier[logging] . identifier[getLogger] ( literal[string] )
identifier[req_logger] . identifier[setLevel] ( identifier[logging] . identifier[WARNING] )
identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] )
identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[__version__] ))
identifier[error_count] = identifier[process_bib_files] ( identifier[args] . identifier[dir] )
identifier[sys] . identifier[exit] ( identifier[error_count] ) | def run():
"""Command line entrypoint for the ``refresh-lsst-bib`` program.
"""
args = parse_args()
if args.verbose:
log_level = logging.DEBUG # depends on [control=['if'], data=[]]
else:
log_level = logging.INFO
logging.basicConfig(level=log_level, format='%(asctime)s %(levelname)s %(name)s: %(message)s')
if not args.verbose:
# Manage third-party loggers
req_logger = logging.getLogger('requests')
req_logger.setLevel(logging.WARNING) # depends on [control=['if'], data=[]]
logger = logging.getLogger(__name__)
logger.info('refresh-lsst-bib version {}'.format(__version__))
error_count = process_bib_files(args.dir)
sys.exit(error_count) |
def addComponentEditor(self):
"""Adds a new component to the model, and an editor for this component to this editor"""
row = self._model.rowCount()
comp_stack_editor = ExploreComponentEditor()
self.ui.trackStack.addWidget(comp_stack_editor)
idx_button = IndexButton(row)
idx_button.pickMe.connect(self.ui.trackStack.setCurrentIndex)
self.trackBtnGroup.addButton(idx_button)
self.ui.trackBtnLayout.addWidget(idx_button)
self.ui.trackStack.setCurrentIndex(row)
comp_stack_editor.closePlease.connect(self.removeComponentEditor)
delay = Silence()
comp_stack_editor.delaySpnbx.setValue(delay.duration())
self._model.insertComponent(delay, row,0)
self._allComponents.append([x() for x in self.stimuli_types if x.explore])
for stim in self._allComponents[row]:
editor = wrapComponent(stim).showEditor()
comp_stack_editor.addWidget(editor, stim.name)
exvocal = comp_stack_editor.widgetForName("Vocalization")
if exvocal is not None:
exvocal.filelistView.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
initcomp = self._allComponents[row][0]
self._model.insertComponent(initcomp, row, 1)
self.buttons.append(idx_button)
comp_stack_editor.exploreStimTypeCmbbx.currentIndexChanged.connect(lambda x : self.setStimIndex(row, x))
comp_stack_editor.delaySpnbx.valueChanged.connect(lambda x : self.setDelay(row, x))
comp_stack_editor.valueChanged.connect(self.valueChanged.emit)
return comp_stack_editor | def function[addComponentEditor, parameter[self]]:
constant[Adds a new component to the model, and an editor for this component to this editor]
variable[row] assign[=] call[name[self]._model.rowCount, parameter[]]
variable[comp_stack_editor] assign[=] call[name[ExploreComponentEditor], parameter[]]
call[name[self].ui.trackStack.addWidget, parameter[name[comp_stack_editor]]]
variable[idx_button] assign[=] call[name[IndexButton], parameter[name[row]]]
call[name[idx_button].pickMe.connect, parameter[name[self].ui.trackStack.setCurrentIndex]]
call[name[self].trackBtnGroup.addButton, parameter[name[idx_button]]]
call[name[self].ui.trackBtnLayout.addWidget, parameter[name[idx_button]]]
call[name[self].ui.trackStack.setCurrentIndex, parameter[name[row]]]
call[name[comp_stack_editor].closePlease.connect, parameter[name[self].removeComponentEditor]]
variable[delay] assign[=] call[name[Silence], parameter[]]
call[name[comp_stack_editor].delaySpnbx.setValue, parameter[call[name[delay].duration, parameter[]]]]
call[name[self]._model.insertComponent, parameter[name[delay], name[row], constant[0]]]
call[name[self]._allComponents.append, parameter[<ast.ListComp object at 0x7da1b1eec3d0>]]
for taget[name[stim]] in starred[call[name[self]._allComponents][name[row]]] begin[:]
variable[editor] assign[=] call[call[name[wrapComponent], parameter[name[stim]]].showEditor, parameter[]]
call[name[comp_stack_editor].addWidget, parameter[name[editor], name[stim].name]]
variable[exvocal] assign[=] call[name[comp_stack_editor].widgetForName, parameter[constant[Vocalization]]]
if compare[name[exvocal] is_not constant[None]] begin[:]
call[name[exvocal].filelistView.setSelectionMode, parameter[name[QtGui].QAbstractItemView.SingleSelection]]
variable[initcomp] assign[=] call[call[name[self]._allComponents][name[row]]][constant[0]]
call[name[self]._model.insertComponent, parameter[name[initcomp], name[row], constant[1]]]
call[name[self].buttons.append, parameter[name[idx_button]]]
call[name[comp_stack_editor].exploreStimTypeCmbbx.currentIndexChanged.connect, parameter[<ast.Lambda object at 0x7da1b1eecc40>]]
call[name[comp_stack_editor].delaySpnbx.valueChanged.connect, parameter[<ast.Lambda object at 0x7da1b1eee2f0>]]
call[name[comp_stack_editor].valueChanged.connect, parameter[name[self].valueChanged.emit]]
return[name[comp_stack_editor]] | keyword[def] identifier[addComponentEditor] ( identifier[self] ):
literal[string]
identifier[row] = identifier[self] . identifier[_model] . identifier[rowCount] ()
identifier[comp_stack_editor] = identifier[ExploreComponentEditor] ()
identifier[self] . identifier[ui] . identifier[trackStack] . identifier[addWidget] ( identifier[comp_stack_editor] )
identifier[idx_button] = identifier[IndexButton] ( identifier[row] )
identifier[idx_button] . identifier[pickMe] . identifier[connect] ( identifier[self] . identifier[ui] . identifier[trackStack] . identifier[setCurrentIndex] )
identifier[self] . identifier[trackBtnGroup] . identifier[addButton] ( identifier[idx_button] )
identifier[self] . identifier[ui] . identifier[trackBtnLayout] . identifier[addWidget] ( identifier[idx_button] )
identifier[self] . identifier[ui] . identifier[trackStack] . identifier[setCurrentIndex] ( identifier[row] )
identifier[comp_stack_editor] . identifier[closePlease] . identifier[connect] ( identifier[self] . identifier[removeComponentEditor] )
identifier[delay] = identifier[Silence] ()
identifier[comp_stack_editor] . identifier[delaySpnbx] . identifier[setValue] ( identifier[delay] . identifier[duration] ())
identifier[self] . identifier[_model] . identifier[insertComponent] ( identifier[delay] , identifier[row] , literal[int] )
identifier[self] . identifier[_allComponents] . identifier[append] ([ identifier[x] () keyword[for] identifier[x] keyword[in] identifier[self] . identifier[stimuli_types] keyword[if] identifier[x] . identifier[explore] ])
keyword[for] identifier[stim] keyword[in] identifier[self] . identifier[_allComponents] [ identifier[row] ]:
identifier[editor] = identifier[wrapComponent] ( identifier[stim] ). identifier[showEditor] ()
identifier[comp_stack_editor] . identifier[addWidget] ( identifier[editor] , identifier[stim] . identifier[name] )
identifier[exvocal] = identifier[comp_stack_editor] . identifier[widgetForName] ( literal[string] )
keyword[if] identifier[exvocal] keyword[is] keyword[not] keyword[None] :
identifier[exvocal] . identifier[filelistView] . identifier[setSelectionMode] ( identifier[QtGui] . identifier[QAbstractItemView] . identifier[SingleSelection] )
identifier[initcomp] = identifier[self] . identifier[_allComponents] [ identifier[row] ][ literal[int] ]
identifier[self] . identifier[_model] . identifier[insertComponent] ( identifier[initcomp] , identifier[row] , literal[int] )
identifier[self] . identifier[buttons] . identifier[append] ( identifier[idx_button] )
identifier[comp_stack_editor] . identifier[exploreStimTypeCmbbx] . identifier[currentIndexChanged] . identifier[connect] ( keyword[lambda] identifier[x] : identifier[self] . identifier[setStimIndex] ( identifier[row] , identifier[x] ))
identifier[comp_stack_editor] . identifier[delaySpnbx] . identifier[valueChanged] . identifier[connect] ( keyword[lambda] identifier[x] : identifier[self] . identifier[setDelay] ( identifier[row] , identifier[x] ))
identifier[comp_stack_editor] . identifier[valueChanged] . identifier[connect] ( identifier[self] . identifier[valueChanged] . identifier[emit] )
keyword[return] identifier[comp_stack_editor] | def addComponentEditor(self):
"""Adds a new component to the model, and an editor for this component to this editor"""
row = self._model.rowCount()
comp_stack_editor = ExploreComponentEditor()
self.ui.trackStack.addWidget(comp_stack_editor)
idx_button = IndexButton(row)
idx_button.pickMe.connect(self.ui.trackStack.setCurrentIndex)
self.trackBtnGroup.addButton(idx_button)
self.ui.trackBtnLayout.addWidget(idx_button)
self.ui.trackStack.setCurrentIndex(row)
comp_stack_editor.closePlease.connect(self.removeComponentEditor)
delay = Silence()
comp_stack_editor.delaySpnbx.setValue(delay.duration())
self._model.insertComponent(delay, row, 0)
self._allComponents.append([x() for x in self.stimuli_types if x.explore])
for stim in self._allComponents[row]:
editor = wrapComponent(stim).showEditor()
comp_stack_editor.addWidget(editor, stim.name) # depends on [control=['for'], data=['stim']]
exvocal = comp_stack_editor.widgetForName('Vocalization')
if exvocal is not None:
exvocal.filelistView.setSelectionMode(QtGui.QAbstractItemView.SingleSelection) # depends on [control=['if'], data=['exvocal']]
initcomp = self._allComponents[row][0]
self._model.insertComponent(initcomp, row, 1)
self.buttons.append(idx_button)
comp_stack_editor.exploreStimTypeCmbbx.currentIndexChanged.connect(lambda x: self.setStimIndex(row, x))
comp_stack_editor.delaySpnbx.valueChanged.connect(lambda x: self.setDelay(row, x))
comp_stack_editor.valueChanged.connect(self.valueChanged.emit)
return comp_stack_editor |
def to_map_with_default(value, default_value):
"""
Converts JSON string into map object or returns default value when conversion is not possible.
:param value: the JSON string to convert.
:param default_value: the default value.
:return: Map object value or default when conversion is not supported.
"""
result = JsonConverter.to_nullable_map(value)
return result if result != None else default_value | def function[to_map_with_default, parameter[value, default_value]]:
constant[
Converts JSON string into map object or returns default value when conversion is not possible.
:param value: the JSON string to convert.
:param default_value: the default value.
:return: Map object value or default when conversion is not supported.
]
variable[result] assign[=] call[name[JsonConverter].to_nullable_map, parameter[name[value]]]
return[<ast.IfExp object at 0x7da1b1470b50>] | keyword[def] identifier[to_map_with_default] ( identifier[value] , identifier[default_value] ):
literal[string]
identifier[result] = identifier[JsonConverter] . identifier[to_nullable_map] ( identifier[value] )
keyword[return] identifier[result] keyword[if] identifier[result] != keyword[None] keyword[else] identifier[default_value] | def to_map_with_default(value, default_value):
"""
Converts JSON string into map object or returns default value when conversion is not possible.
:param value: the JSON string to convert.
:param default_value: the default value.
:return: Map object value or default when conversion is not supported.
"""
result = JsonConverter.to_nullable_map(value)
return result if result != None else default_value |
def cumulative_max(self):
"""
Return the cumulative maximum value of the elements in the SArray.
Returns an SArray where each element in the output corresponds to the
maximum value of all the elements preceding and including it. The
SArray is expected to be of numeric type (int, float).
Returns
-------
out : SArray[int, float]
Notes
-----
- Missing values are ignored while performing the cumulative
aggregate operation.
Examples
--------
>>> sa = SArray([1, 0, 3, 4, 2])
>>> sa.cumulative_max()
dtype: int
rows: 3
[1, 1, 3, 4, 4]
"""
from .. import extensions
agg_op = "__builtin__cum_max__"
return SArray(_proxy = self.__proxy__.builtin_cumulative_aggregate(agg_op)) | def function[cumulative_max, parameter[self]]:
constant[
Return the cumulative maximum value of the elements in the SArray.
Returns an SArray where each element in the output corresponds to the
maximum value of all the elements preceding and including it. The
SArray is expected to be of numeric type (int, float).
Returns
-------
out : SArray[int, float]
Notes
-----
- Missing values are ignored while performing the cumulative
aggregate operation.
Examples
--------
>>> sa = SArray([1, 0, 3, 4, 2])
>>> sa.cumulative_max()
dtype: int
rows: 3
[1, 1, 3, 4, 4]
]
from relative_module[None] import module[extensions]
variable[agg_op] assign[=] constant[__builtin__cum_max__]
return[call[name[SArray], parameter[]]] | keyword[def] identifier[cumulative_max] ( identifier[self] ):
literal[string]
keyword[from] .. keyword[import] identifier[extensions]
identifier[agg_op] = literal[string]
keyword[return] identifier[SArray] ( identifier[_proxy] = identifier[self] . identifier[__proxy__] . identifier[builtin_cumulative_aggregate] ( identifier[agg_op] )) | def cumulative_max(self):
"""
Return the cumulative maximum value of the elements in the SArray.
Returns an SArray where each element in the output corresponds to the
maximum value of all the elements preceding and including it. The
SArray is expected to be of numeric type (int, float).
Returns
-------
out : SArray[int, float]
Notes
-----
- Missing values are ignored while performing the cumulative
aggregate operation.
Examples
--------
>>> sa = SArray([1, 0, 3, 4, 2])
>>> sa.cumulative_max()
dtype: int
rows: 3
[1, 1, 3, 4, 4]
"""
from .. import extensions
agg_op = '__builtin__cum_max__'
return SArray(_proxy=self.__proxy__.builtin_cumulative_aggregate(agg_op)) |
def remap_index_fn(ref_file):
"""Map sequence references to snap reference directory, using standard layout.
"""
snap_dir = os.path.join(os.path.dirname(ref_file), os.pardir, "snap")
assert os.path.exists(snap_dir) and os.path.isdir(snap_dir), snap_dir
return snap_dir | def function[remap_index_fn, parameter[ref_file]]:
constant[Map sequence references to snap reference directory, using standard layout.
]
variable[snap_dir] assign[=] call[name[os].path.join, parameter[call[name[os].path.dirname, parameter[name[ref_file]]], name[os].pardir, constant[snap]]]
assert[<ast.BoolOp object at 0x7da20c6e6200>]
return[name[snap_dir]] | keyword[def] identifier[remap_index_fn] ( identifier[ref_file] ):
literal[string]
identifier[snap_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[ref_file] ), identifier[os] . identifier[pardir] , literal[string] )
keyword[assert] identifier[os] . identifier[path] . identifier[exists] ( identifier[snap_dir] ) keyword[and] identifier[os] . identifier[path] . identifier[isdir] ( identifier[snap_dir] ), identifier[snap_dir]
keyword[return] identifier[snap_dir] | def remap_index_fn(ref_file):
"""Map sequence references to snap reference directory, using standard layout.
"""
snap_dir = os.path.join(os.path.dirname(ref_file), os.pardir, 'snap')
assert os.path.exists(snap_dir) and os.path.isdir(snap_dir), snap_dir
return snap_dir |
def aN(a, dim=3, dtype='int'):
"""
Convert an integer or iterable list to numpy array of length dim. This func
is used to allow other methods to take both scalars non-numpy arrays with
flexibility.
Parameters
----------
a : number, iterable, array-like
The object to convert to numpy array
dim : integer
The length of the resulting array
dtype : string or np.dtype
Type which the resulting array should be, e.g. 'float', np.int8
Returns
-------
arr : numpy array
Resulting numpy array of length ``dim`` and type ``dtype``
Examples
--------
>>> aN(1, dim=2, dtype='float')
array([ 1., 1.])
>>> aN(1, dtype='int')
array([1, 1, 1])
>>> aN(np.array([1,2,3]), dtype='float')
array([ 1., 2., 3.])
"""
if not hasattr(a, '__iter__'):
return np.array([a]*dim, dtype=dtype)
return np.array(a).astype(dtype) | def function[aN, parameter[a, dim, dtype]]:
constant[
Convert an integer or iterable list to numpy array of length dim. This func
is used to allow other methods to take both scalars non-numpy arrays with
flexibility.
Parameters
----------
a : number, iterable, array-like
The object to convert to numpy array
dim : integer
The length of the resulting array
dtype : string or np.dtype
Type which the resulting array should be, e.g. 'float', np.int8
Returns
-------
arr : numpy array
Resulting numpy array of length ``dim`` and type ``dtype``
Examples
--------
>>> aN(1, dim=2, dtype='float')
array([ 1., 1.])
>>> aN(1, dtype='int')
array([1, 1, 1])
>>> aN(np.array([1,2,3]), dtype='float')
array([ 1., 2., 3.])
]
if <ast.UnaryOp object at 0x7da18f09fb20> begin[:]
return[call[name[np].array, parameter[binary_operation[list[[<ast.Name object at 0x7da18f09cc40>]] * name[dim]]]]]
return[call[call[name[np].array, parameter[name[a]]].astype, parameter[name[dtype]]]] | keyword[def] identifier[aN] ( identifier[a] , identifier[dim] = literal[int] , identifier[dtype] = literal[string] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[a] , literal[string] ):
keyword[return] identifier[np] . identifier[array] ([ identifier[a] ]* identifier[dim] , identifier[dtype] = identifier[dtype] )
keyword[return] identifier[np] . identifier[array] ( identifier[a] ). identifier[astype] ( identifier[dtype] ) | def aN(a, dim=3, dtype='int'):
"""
Convert an integer or iterable list to numpy array of length dim. This func
is used to allow other methods to take both scalars non-numpy arrays with
flexibility.
Parameters
----------
a : number, iterable, array-like
The object to convert to numpy array
dim : integer
The length of the resulting array
dtype : string or np.dtype
Type which the resulting array should be, e.g. 'float', np.int8
Returns
-------
arr : numpy array
Resulting numpy array of length ``dim`` and type ``dtype``
Examples
--------
>>> aN(1, dim=2, dtype='float')
array([ 1., 1.])
>>> aN(1, dtype='int')
array([1, 1, 1])
>>> aN(np.array([1,2,3]), dtype='float')
array([ 1., 2., 3.])
"""
if not hasattr(a, '__iter__'):
return np.array([a] * dim, dtype=dtype) # depends on [control=['if'], data=[]]
return np.array(a).astype(dtype) |
def get_phase(self, nintp=None, rintp=None,
delta_offset_x=0, delta_offset_y=0):
"""Interpolate from the border fields to new coordinates
Parameters
----------
nintp: float or None
Refractive index of the sphere
rintp: float or None
Radius of sphere [m]
delta_offset_x: float
Offset in x-direction [px]
delta_offset_y: float
Offset in y-direction [px]
Returns
-------
phase_intp: 2D real-valued np.ndarray
Interpolated phase at the given parameters
Notes
-----
Not all combinations are poosible, e.g.
- One of nintp or rintp must be None
- The current interpolation range must include the values
for rintp and nintp
"""
if nintp is None:
nintp = self.sphere_index
if rintp is None:
rintp = self.radius
assert (rintp == self.radius or nintp ==
self.sphere_index), "Only r or n can be changed at a time."
assert rintp >= self.radius - self.dr
assert rintp <= self.radius + self.dr
assert nintp >= self.sphere_index - \
self.dn, "Out of range: {} !> {}".format(
nintp, self.sphere_index - self.dn)
assert nintp <= self.sphere_index + self.dn
left = self.get_border_phase(0, 0)
if rintp == self.radius:
dist = nintp - self.sphere_index
dmax = self.dn
if dist < 0:
righ = self.get_border_phase(-1, 0)
else:
righ = self.get_border_phase(1, 0)
else:
dist = rintp - self.radius
dmax = self.dr
if dist < 0:
righ = self.get_border_phase(0, -1)
else:
righ = self.get_border_phase(0, 1)
# make dist positive so that we are interpolating from left to right
dist = np.abs(dist)
# perform linear interpolation of data.
phas = left + (righ - left) * dist / dmax
# interpolation of lateral movement
ti = time.time()
ipphas = spintp.RectBivariateSpline(np.arange(phas.shape[0]),
np.arange(phas.shape[1]),
phas)
if delta_offset_x != 0 or delta_offset_y != 0:
# Shift the image. The offset values used here
# are not self.posx_offset and self.posy_offset!
# The offset values are added to the fields computed
# with self.posx_offset and self.posy_offset.
newx = np.arange(phas.shape[0]) + delta_offset_x
newy = np.arange(phas.shape[1]) + delta_offset_y
phas = ipphas(newx, newy)
if self.verbose > 2:
print("Interpolation time for {}: {}".format(
self.model, time.time() - ti))
return phas + self.pha_offset | def function[get_phase, parameter[self, nintp, rintp, delta_offset_x, delta_offset_y]]:
constant[Interpolate from the border fields to new coordinates
Parameters
----------
nintp: float or None
Refractive index of the sphere
rintp: float or None
Radius of sphere [m]
delta_offset_x: float
Offset in x-direction [px]
delta_offset_y: float
Offset in y-direction [px]
Returns
-------
phase_intp: 2D real-valued np.ndarray
Interpolated phase at the given parameters
Notes
-----
Not all combinations are poosible, e.g.
- One of nintp or rintp must be None
- The current interpolation range must include the values
for rintp and nintp
]
if compare[name[nintp] is constant[None]] begin[:]
variable[nintp] assign[=] name[self].sphere_index
if compare[name[rintp] is constant[None]] begin[:]
variable[rintp] assign[=] name[self].radius
assert[<ast.BoolOp object at 0x7da2044c32b0>]
assert[compare[name[rintp] greater_or_equal[>=] binary_operation[name[self].radius - name[self].dr]]]
assert[compare[name[rintp] less_or_equal[<=] binary_operation[name[self].radius + name[self].dr]]]
assert[compare[name[nintp] greater_or_equal[>=] binary_operation[name[self].sphere_index - name[self].dn]]]
assert[compare[name[nintp] less_or_equal[<=] binary_operation[name[self].sphere_index + name[self].dn]]]
variable[left] assign[=] call[name[self].get_border_phase, parameter[constant[0], constant[0]]]
if compare[name[rintp] equal[==] name[self].radius] begin[:]
variable[dist] assign[=] binary_operation[name[nintp] - name[self].sphere_index]
variable[dmax] assign[=] name[self].dn
if compare[name[dist] less[<] constant[0]] begin[:]
variable[righ] assign[=] call[name[self].get_border_phase, parameter[<ast.UnaryOp object at 0x7da2044c1a80>, constant[0]]]
variable[dist] assign[=] call[name[np].abs, parameter[name[dist]]]
variable[phas] assign[=] binary_operation[name[left] + binary_operation[binary_operation[binary_operation[name[righ] - name[left]] * name[dist]] / name[dmax]]]
variable[ti] assign[=] call[name[time].time, parameter[]]
variable[ipphas] assign[=] call[name[spintp].RectBivariateSpline, parameter[call[name[np].arange, parameter[call[name[phas].shape][constant[0]]]], call[name[np].arange, parameter[call[name[phas].shape][constant[1]]]], name[phas]]]
if <ast.BoolOp object at 0x7da20c6abeb0> begin[:]
variable[newx] assign[=] binary_operation[call[name[np].arange, parameter[call[name[phas].shape][constant[0]]]] + name[delta_offset_x]]
variable[newy] assign[=] binary_operation[call[name[np].arange, parameter[call[name[phas].shape][constant[1]]]] + name[delta_offset_y]]
variable[phas] assign[=] call[name[ipphas], parameter[name[newx], name[newy]]]
if compare[name[self].verbose greater[>] constant[2]] begin[:]
call[name[print], parameter[call[constant[Interpolation time for {}: {}].format, parameter[name[self].model, binary_operation[call[name[time].time, parameter[]] - name[ti]]]]]]
return[binary_operation[name[phas] + name[self].pha_offset]] | keyword[def] identifier[get_phase] ( identifier[self] , identifier[nintp] = keyword[None] , identifier[rintp] = keyword[None] ,
identifier[delta_offset_x] = literal[int] , identifier[delta_offset_y] = literal[int] ):
literal[string]
keyword[if] identifier[nintp] keyword[is] keyword[None] :
identifier[nintp] = identifier[self] . identifier[sphere_index]
keyword[if] identifier[rintp] keyword[is] keyword[None] :
identifier[rintp] = identifier[self] . identifier[radius]
keyword[assert] ( identifier[rintp] == identifier[self] . identifier[radius] keyword[or] identifier[nintp] ==
identifier[self] . identifier[sphere_index] ), literal[string]
keyword[assert] identifier[rintp] >= identifier[self] . identifier[radius] - identifier[self] . identifier[dr]
keyword[assert] identifier[rintp] <= identifier[self] . identifier[radius] + identifier[self] . identifier[dr]
keyword[assert] identifier[nintp] >= identifier[self] . identifier[sphere_index] - identifier[self] . identifier[dn] , literal[string] . identifier[format] (
identifier[nintp] , identifier[self] . identifier[sphere_index] - identifier[self] . identifier[dn] )
keyword[assert] identifier[nintp] <= identifier[self] . identifier[sphere_index] + identifier[self] . identifier[dn]
identifier[left] = identifier[self] . identifier[get_border_phase] ( literal[int] , literal[int] )
keyword[if] identifier[rintp] == identifier[self] . identifier[radius] :
identifier[dist] = identifier[nintp] - identifier[self] . identifier[sphere_index]
identifier[dmax] = identifier[self] . identifier[dn]
keyword[if] identifier[dist] < literal[int] :
identifier[righ] = identifier[self] . identifier[get_border_phase] (- literal[int] , literal[int] )
keyword[else] :
identifier[righ] = identifier[self] . identifier[get_border_phase] ( literal[int] , literal[int] )
keyword[else] :
identifier[dist] = identifier[rintp] - identifier[self] . identifier[radius]
identifier[dmax] = identifier[self] . identifier[dr]
keyword[if] identifier[dist] < literal[int] :
identifier[righ] = identifier[self] . identifier[get_border_phase] ( literal[int] ,- literal[int] )
keyword[else] :
identifier[righ] = identifier[self] . identifier[get_border_phase] ( literal[int] , literal[int] )
identifier[dist] = identifier[np] . identifier[abs] ( identifier[dist] )
identifier[phas] = identifier[left] +( identifier[righ] - identifier[left] )* identifier[dist] / identifier[dmax]
identifier[ti] = identifier[time] . identifier[time] ()
identifier[ipphas] = identifier[spintp] . identifier[RectBivariateSpline] ( identifier[np] . identifier[arange] ( identifier[phas] . identifier[shape] [ literal[int] ]),
identifier[np] . identifier[arange] ( identifier[phas] . identifier[shape] [ literal[int] ]),
identifier[phas] )
keyword[if] identifier[delta_offset_x] != literal[int] keyword[or] identifier[delta_offset_y] != literal[int] :
identifier[newx] = identifier[np] . identifier[arange] ( identifier[phas] . identifier[shape] [ literal[int] ])+ identifier[delta_offset_x]
identifier[newy] = identifier[np] . identifier[arange] ( identifier[phas] . identifier[shape] [ literal[int] ])+ identifier[delta_offset_y]
identifier[phas] = identifier[ipphas] ( identifier[newx] , identifier[newy] )
keyword[if] identifier[self] . identifier[verbose] > literal[int] :
identifier[print] ( literal[string] . identifier[format] (
identifier[self] . identifier[model] , identifier[time] . identifier[time] ()- identifier[ti] ))
keyword[return] identifier[phas] + identifier[self] . identifier[pha_offset] | def get_phase(self, nintp=None, rintp=None, delta_offset_x=0, delta_offset_y=0):
"""Interpolate from the border fields to new coordinates
Parameters
----------
nintp: float or None
Refractive index of the sphere
rintp: float or None
Radius of sphere [m]
delta_offset_x: float
Offset in x-direction [px]
delta_offset_y: float
Offset in y-direction [px]
Returns
-------
phase_intp: 2D real-valued np.ndarray
Interpolated phase at the given parameters
Notes
-----
Not all combinations are poosible, e.g.
- One of nintp or rintp must be None
- The current interpolation range must include the values
for rintp and nintp
"""
if nintp is None:
nintp = self.sphere_index # depends on [control=['if'], data=['nintp']]
if rintp is None:
rintp = self.radius # depends on [control=['if'], data=['rintp']]
assert rintp == self.radius or nintp == self.sphere_index, 'Only r or n can be changed at a time.'
assert rintp >= self.radius - self.dr
assert rintp <= self.radius + self.dr
assert nintp >= self.sphere_index - self.dn, 'Out of range: {} !> {}'.format(nintp, self.sphere_index - self.dn)
assert nintp <= self.sphere_index + self.dn
left = self.get_border_phase(0, 0)
if rintp == self.radius:
dist = nintp - self.sphere_index
dmax = self.dn
if dist < 0:
righ = self.get_border_phase(-1, 0) # depends on [control=['if'], data=[]]
else:
righ = self.get_border_phase(1, 0) # depends on [control=['if'], data=[]]
else:
dist = rintp - self.radius
dmax = self.dr
if dist < 0:
righ = self.get_border_phase(0, -1) # depends on [control=['if'], data=[]]
else:
righ = self.get_border_phase(0, 1)
# make dist positive so that we are interpolating from left to right
dist = np.abs(dist)
# perform linear interpolation of data.
phas = left + (righ - left) * dist / dmax
# interpolation of lateral movement
ti = time.time()
ipphas = spintp.RectBivariateSpline(np.arange(phas.shape[0]), np.arange(phas.shape[1]), phas)
if delta_offset_x != 0 or delta_offset_y != 0:
# Shift the image. The offset values used here
# are not self.posx_offset and self.posy_offset!
# The offset values are added to the fields computed
# with self.posx_offset and self.posy_offset.
newx = np.arange(phas.shape[0]) + delta_offset_x
newy = np.arange(phas.shape[1]) + delta_offset_y
phas = ipphas(newx, newy) # depends on [control=['if'], data=[]]
if self.verbose > 2:
print('Interpolation time for {}: {}'.format(self.model, time.time() - ti)) # depends on [control=['if'], data=[]]
return phas + self.pha_offset |
def _save_notebook(self, os_path, nb):
"""Save a notebook to an os_path."""
with self.atomic_writing(os_path, encoding='utf-8') as f:
if ftdetect(os_path) == 'notebook':
nbformat.write(nb, f, version=nbformat.NO_CONVERT)
elif ftdetect(os_path) == 'markdown':
nbjson = nbformat.writes(nb, version=nbformat.NO_CONVERT)
markdown = convert(nbjson,
informat='notebook',
outformat='markdown',
strip_outputs=self.strip_outputs)
f.write(markdown) | def function[_save_notebook, parameter[self, os_path, nb]]:
constant[Save a notebook to an os_path.]
with call[name[self].atomic_writing, parameter[name[os_path]]] begin[:]
if compare[call[name[ftdetect], parameter[name[os_path]]] equal[==] constant[notebook]] begin[:]
call[name[nbformat].write, parameter[name[nb], name[f]]] | keyword[def] identifier[_save_notebook] ( identifier[self] , identifier[os_path] , identifier[nb] ):
literal[string]
keyword[with] identifier[self] . identifier[atomic_writing] ( identifier[os_path] , identifier[encoding] = literal[string] ) keyword[as] identifier[f] :
keyword[if] identifier[ftdetect] ( identifier[os_path] )== literal[string] :
identifier[nbformat] . identifier[write] ( identifier[nb] , identifier[f] , identifier[version] = identifier[nbformat] . identifier[NO_CONVERT] )
keyword[elif] identifier[ftdetect] ( identifier[os_path] )== literal[string] :
identifier[nbjson] = identifier[nbformat] . identifier[writes] ( identifier[nb] , identifier[version] = identifier[nbformat] . identifier[NO_CONVERT] )
identifier[markdown] = identifier[convert] ( identifier[nbjson] ,
identifier[informat] = literal[string] ,
identifier[outformat] = literal[string] ,
identifier[strip_outputs] = identifier[self] . identifier[strip_outputs] )
identifier[f] . identifier[write] ( identifier[markdown] ) | def _save_notebook(self, os_path, nb):
"""Save a notebook to an os_path."""
with self.atomic_writing(os_path, encoding='utf-8') as f:
if ftdetect(os_path) == 'notebook':
nbformat.write(nb, f, version=nbformat.NO_CONVERT) # depends on [control=['if'], data=[]]
elif ftdetect(os_path) == 'markdown':
nbjson = nbformat.writes(nb, version=nbformat.NO_CONVERT)
markdown = convert(nbjson, informat='notebook', outformat='markdown', strip_outputs=self.strip_outputs)
f.write(markdown) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['f']] |
def _create_validator(self):
# type: () -> Generator[Text, None, None]
"""
Creates a generator that does all the work.
"""
# Group transactions by address to make it easier to iterate
# over inputs.
grouped_transactions = self.bundle.group_transactions()
# Define a few expected values.
bundle_hash = self.bundle.hash
last_index = len(self.bundle) - 1
# Track a few others as we go along.
balance = 0
# Check indices and balance first.
# Note that we use a counter to keep track of the current index,
# since at this point we can't trust that the transactions have
# correct ``current_index`` values.
counter = 0
for group in grouped_transactions:
for txn in group:
balance += txn.value
if txn.bundle_hash != bundle_hash:
yield 'Transaction {i} has invalid bundle hash.'.format(
i=counter,
)
if txn.current_index != counter:
yield (
'Transaction {i} has invalid current index value '
'(expected {i}, actual {actual}).'.format(
actual=txn.current_index,
i=counter,
)
)
if txn.last_index != last_index:
yield (
'Transaction {i} has invalid last index value '
'(expected {expected}, actual {actual}).'.format(
actual=txn.last_index,
expected=last_index,
i=counter,
)
)
counter += 1
# Bundle must be balanced (spends must match inputs).
if balance != 0:
yield (
'Bundle has invalid balance '
'(expected 0, actual {actual}).'.format(
actual=balance,
)
)
# Signature validation is only meaningful if the transactions
# are otherwise valid.
if not self._errors:
signature_validation_queue = [] # type: List[List[Transaction]]
for group in grouped_transactions:
# Signature validation only applies to inputs.
if group[0].value >= 0:
continue
validate_group_signature = True
for j, txn in enumerate(group):
if (j > 0) and (txn.value != 0):
# Input is malformed; signature fragments after
# the first should have zero value.
yield (
'Transaction {i} has invalid value '
'(expected 0, actual {actual}).'.format(
actual=txn.value,
# If we get to this point, we know that
# the ``current_index`` value for each
# transaction can be trusted.
i=txn.current_index,
)
)
# We won't be able to validate the signature,
# but continue anyway, so that we can check that
# the other transactions in the group have the
# correct ``value``.
validate_group_signature = False
continue
# After collecting the signature fragment from each
# transaction in the group, queue them up to run through
# the validator.
#
# We have to perform signature validation separately so
# that we can try different algorithms (for
# backwards-compatibility).
#
# References:
#
# - https://github.com/iotaledger/kerl#kerl-integration-in-iota
if validate_group_signature:
signature_validation_queue.append(group)
# Once we've finished checking the attributes from each
# transaction in the bundle, go back and validate
# signatures.
if signature_validation_queue:
# ``yield from`` is an option here, but for
# compatibility with Python 2 clients, we will do it the
# old-fashioned way.
for error in self._get_bundle_signature_errors(
signature_validation_queue
):
yield error | def function[_create_validator, parameter[self]]:
constant[
Creates a generator that does all the work.
]
variable[grouped_transactions] assign[=] call[name[self].bundle.group_transactions, parameter[]]
variable[bundle_hash] assign[=] name[self].bundle.hash
variable[last_index] assign[=] binary_operation[call[name[len], parameter[name[self].bundle]] - constant[1]]
variable[balance] assign[=] constant[0]
variable[counter] assign[=] constant[0]
for taget[name[group]] in starred[name[grouped_transactions]] begin[:]
for taget[name[txn]] in starred[name[group]] begin[:]
<ast.AugAssign object at 0x7da18f721240>
if compare[name[txn].bundle_hash not_equal[!=] name[bundle_hash]] begin[:]
<ast.Yield object at 0x7da18f721390>
if compare[name[txn].current_index not_equal[!=] name[counter]] begin[:]
<ast.Yield object at 0x7da18f7204c0>
if compare[name[txn].last_index not_equal[!=] name[last_index]] begin[:]
<ast.Yield object at 0x7da18f723f40>
<ast.AugAssign object at 0x7da18f722890>
if compare[name[balance] not_equal[!=] constant[0]] begin[:]
<ast.Yield object at 0x7da18f7219f0>
if <ast.UnaryOp object at 0x7da18f720a00> begin[:]
variable[signature_validation_queue] assign[=] list[[]]
for taget[name[group]] in starred[name[grouped_transactions]] begin[:]
if compare[call[name[group]][constant[0]].value greater_or_equal[>=] constant[0]] begin[:]
continue
variable[validate_group_signature] assign[=] constant[True]
for taget[tuple[[<ast.Name object at 0x7da2041db5e0>, <ast.Name object at 0x7da2041da2f0>]]] in starred[call[name[enumerate], parameter[name[group]]]] begin[:]
if <ast.BoolOp object at 0x7da2041dbf70> begin[:]
<ast.Yield object at 0x7da2041d9e40>
variable[validate_group_signature] assign[=] constant[False]
continue
if name[validate_group_signature] begin[:]
call[name[signature_validation_queue].append, parameter[name[group]]]
if name[signature_validation_queue] begin[:]
for taget[name[error]] in starred[call[name[self]._get_bundle_signature_errors, parameter[name[signature_validation_queue]]]] begin[:]
<ast.Yield object at 0x7da2041d8400> | keyword[def] identifier[_create_validator] ( identifier[self] ):
literal[string]
identifier[grouped_transactions] = identifier[self] . identifier[bundle] . identifier[group_transactions] ()
identifier[bundle_hash] = identifier[self] . identifier[bundle] . identifier[hash]
identifier[last_index] = identifier[len] ( identifier[self] . identifier[bundle] )- literal[int]
identifier[balance] = literal[int]
identifier[counter] = literal[int]
keyword[for] identifier[group] keyword[in] identifier[grouped_transactions] :
keyword[for] identifier[txn] keyword[in] identifier[group] :
identifier[balance] += identifier[txn] . identifier[value]
keyword[if] identifier[txn] . identifier[bundle_hash] != identifier[bundle_hash] :
keyword[yield] literal[string] . identifier[format] (
identifier[i] = identifier[counter] ,
)
keyword[if] identifier[txn] . identifier[current_index] != identifier[counter] :
keyword[yield] (
literal[string]
literal[string] . identifier[format] (
identifier[actual] = identifier[txn] . identifier[current_index] ,
identifier[i] = identifier[counter] ,
)
)
keyword[if] identifier[txn] . identifier[last_index] != identifier[last_index] :
keyword[yield] (
literal[string]
literal[string] . identifier[format] (
identifier[actual] = identifier[txn] . identifier[last_index] ,
identifier[expected] = identifier[last_index] ,
identifier[i] = identifier[counter] ,
)
)
identifier[counter] += literal[int]
keyword[if] identifier[balance] != literal[int] :
keyword[yield] (
literal[string]
literal[string] . identifier[format] (
identifier[actual] = identifier[balance] ,
)
)
keyword[if] keyword[not] identifier[self] . identifier[_errors] :
identifier[signature_validation_queue] =[]
keyword[for] identifier[group] keyword[in] identifier[grouped_transactions] :
keyword[if] identifier[group] [ literal[int] ]. identifier[value] >= literal[int] :
keyword[continue]
identifier[validate_group_signature] = keyword[True]
keyword[for] identifier[j] , identifier[txn] keyword[in] identifier[enumerate] ( identifier[group] ):
keyword[if] ( identifier[j] > literal[int] ) keyword[and] ( identifier[txn] . identifier[value] != literal[int] ):
keyword[yield] (
literal[string]
literal[string] . identifier[format] (
identifier[actual] = identifier[txn] . identifier[value] ,
identifier[i] = identifier[txn] . identifier[current_index] ,
)
)
identifier[validate_group_signature] = keyword[False]
keyword[continue]
keyword[if] identifier[validate_group_signature] :
identifier[signature_validation_queue] . identifier[append] ( identifier[group] )
keyword[if] identifier[signature_validation_queue] :
keyword[for] identifier[error] keyword[in] identifier[self] . identifier[_get_bundle_signature_errors] (
identifier[signature_validation_queue]
):
keyword[yield] identifier[error] | def _create_validator(self):
# type: () -> Generator[Text, None, None]
'\n Creates a generator that does all the work.\n '
# Group transactions by address to make it easier to iterate
# over inputs.
grouped_transactions = self.bundle.group_transactions()
# Define a few expected values.
bundle_hash = self.bundle.hash
last_index = len(self.bundle) - 1
# Track a few others as we go along.
balance = 0
# Check indices and balance first.
# Note that we use a counter to keep track of the current index,
# since at this point we can't trust that the transactions have
# correct ``current_index`` values.
counter = 0
for group in grouped_transactions:
for txn in group:
balance += txn.value
if txn.bundle_hash != bundle_hash:
yield 'Transaction {i} has invalid bundle hash.'.format(i=counter) # depends on [control=['if'], data=[]]
if txn.current_index != counter:
yield 'Transaction {i} has invalid current index value (expected {i}, actual {actual}).'.format(actual=txn.current_index, i=counter) # depends on [control=['if'], data=['counter']]
if txn.last_index != last_index:
yield 'Transaction {i} has invalid last index value (expected {expected}, actual {actual}).'.format(actual=txn.last_index, expected=last_index, i=counter) # depends on [control=['if'], data=['last_index']]
counter += 1 # depends on [control=['for'], data=['txn']] # depends on [control=['for'], data=['group']]
# Bundle must be balanced (spends must match inputs).
if balance != 0:
yield 'Bundle has invalid balance (expected 0, actual {actual}).'.format(actual=balance) # depends on [control=['if'], data=['balance']]
# Signature validation is only meaningful if the transactions
# are otherwise valid.
if not self._errors:
signature_validation_queue = [] # type: List[List[Transaction]]
for group in grouped_transactions:
# Signature validation only applies to inputs.
if group[0].value >= 0:
continue # depends on [control=['if'], data=[]]
validate_group_signature = True
for (j, txn) in enumerate(group):
if j > 0 and txn.value != 0:
# Input is malformed; signature fragments after
# the first should have zero value.
# If we get to this point, we know that
# the ``current_index`` value for each
# transaction can be trusted.
yield 'Transaction {i} has invalid value (expected 0, actual {actual}).'.format(actual=txn.value, i=txn.current_index)
# We won't be able to validate the signature,
# but continue anyway, so that we can check that
# the other transactions in the group have the
# correct ``value``.
validate_group_signature = False
continue # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# After collecting the signature fragment from each
# transaction in the group, queue them up to run through
# the validator.
#
# We have to perform signature validation separately so
# that we can try different algorithms (for
# backwards-compatibility).
#
# References:
#
# - https://github.com/iotaledger/kerl#kerl-integration-in-iota
if validate_group_signature:
signature_validation_queue.append(group) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['group']]
# Once we've finished checking the attributes from each
# transaction in the bundle, go back and validate
# signatures.
if signature_validation_queue:
# ``yield from`` is an option here, but for
# compatibility with Python 2 clients, we will do it the
# old-fashioned way.
for error in self._get_bundle_signature_errors(signature_validation_queue):
yield error # depends on [control=['for'], data=['error']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def _updateInternals(self):
"""Update internal attributes related to likelihood.
Should be called any time branch lengths or model parameters
are changed.
"""
rootnode = self.nnodes - 1
if self._distributionmodel:
catweights = self.model.catweights
else:
catweights = scipy.ones(1, dtype='float')
# When there are multiple categories, it is acceptable
# for some (but not all) of them to have underflow at
# any given site. Note that we still include a check for
# Underflow by ensuring that none of the site likelihoods is
# zero.
undererrstate = 'ignore' if len(catweights) > 1 else 'raise'
with scipy.errstate(over='raise', under=undererrstate,
divide='raise', invalid='raise'):
self.underflowlogscale.fill(0.0)
self._computePartialLikelihoods()
sitelik = scipy.zeros(self.nsites, dtype='float')
assert (self.L[rootnode] >= 0).all(), str(self.L[rootnode])
for k in self._catindices:
sitelik += scipy.sum(self._stationarystate(k) *
self.L[rootnode][k], axis=1) * catweights[k]
assert (sitelik > 0).all(), "Underflow:\n{0}\n{1}".format(
sitelik, self.underflowlogscale)
self.siteloglik = scipy.log(sitelik) + self.underflowlogscale
self.loglik = scipy.sum(self.siteloglik) + self.model.logprior
if self.dparamscurrent:
self._dloglik = {}
for param in self.model.freeparams:
if self._distributionmodel and (param in
self.model.distributionparams):
name = self.model.distributedparam
weighted_dk = (self.model.d_distributionparams[param]
* catweights)
else:
name = param
weighted_dk = catweights
dsiteloglik = 0
for k in self._catindices:
dsiteloglik += (scipy.sum(
self._dstationarystate(k, name) *
self.L[rootnode][k] + self.dL[name][rootnode][k] *
self._stationarystate(k), axis=-1) *
weighted_dk[k])
dsiteloglik /= sitelik
self._dloglik[param] = (scipy.sum(dsiteloglik, axis=-1)
+ self.model.dlogprior(param))
if self.dtcurrent:
self._dloglik_dt = 0
dLnroot_dt = scipy.array([self.dL_dt[n2][rootnode] for
n2 in sorted(self.dL_dt.keys())])
for k in self._catindices:
if isinstance(k, int):
dLnrootk_dt = dLnroot_dt.swapaxes(0, 1)[k]
else:
assert k == slice(None)
dLnrootk_dt = dLnroot_dt
self._dloglik_dt += catweights[k] * scipy.sum(
self._stationarystate(k) *
dLnrootk_dt, axis=-1)
self._dloglik_dt /= sitelik
self._dloglik_dt = scipy.sum(self._dloglik_dt, axis=-1)
assert self._dloglik_dt.shape == self.t.shape | def function[_updateInternals, parameter[self]]:
constant[Update internal attributes related to likelihood.
Should be called any time branch lengths or model parameters
are changed.
]
variable[rootnode] assign[=] binary_operation[name[self].nnodes - constant[1]]
if name[self]._distributionmodel begin[:]
variable[catweights] assign[=] name[self].model.catweights
variable[undererrstate] assign[=] <ast.IfExp object at 0x7da18f813f40>
with call[name[scipy].errstate, parameter[]] begin[:]
call[name[self].underflowlogscale.fill, parameter[constant[0.0]]]
call[name[self]._computePartialLikelihoods, parameter[]]
variable[sitelik] assign[=] call[name[scipy].zeros, parameter[name[self].nsites]]
assert[call[compare[call[name[self].L][name[rootnode]] greater_or_equal[>=] constant[0]].all, parameter[]]]
for taget[name[k]] in starred[name[self]._catindices] begin[:]
<ast.AugAssign object at 0x7da18f812a10>
assert[call[compare[name[sitelik] greater[>] constant[0]].all, parameter[]]]
name[self].siteloglik assign[=] binary_operation[call[name[scipy].log, parameter[name[sitelik]]] + name[self].underflowlogscale]
name[self].loglik assign[=] binary_operation[call[name[scipy].sum, parameter[name[self].siteloglik]] + name[self].model.logprior]
if name[self].dparamscurrent begin[:]
name[self]._dloglik assign[=] dictionary[[], []]
for taget[name[param]] in starred[name[self].model.freeparams] begin[:]
if <ast.BoolOp object at 0x7da18f811ea0> begin[:]
variable[name] assign[=] name[self].model.distributedparam
variable[weighted_dk] assign[=] binary_operation[call[name[self].model.d_distributionparams][name[param]] * name[catweights]]
variable[dsiteloglik] assign[=] constant[0]
for taget[name[k]] in starred[name[self]._catindices] begin[:]
<ast.AugAssign object at 0x7da18f811c60>
<ast.AugAssign object at 0x7da18f811f30>
call[name[self]._dloglik][name[param]] assign[=] binary_operation[call[name[scipy].sum, parameter[name[dsiteloglik]]] + call[name[self].model.dlogprior, parameter[name[param]]]]
if name[self].dtcurrent begin[:]
name[self]._dloglik_dt assign[=] constant[0]
variable[dLnroot_dt] assign[=] call[name[scipy].array, parameter[<ast.ListComp object at 0x7da18f8113f0>]]
for taget[name[k]] in starred[name[self]._catindices] begin[:]
if call[name[isinstance], parameter[name[k], name[int]]] begin[:]
variable[dLnrootk_dt] assign[=] call[call[name[dLnroot_dt].swapaxes, parameter[constant[0], constant[1]]]][name[k]]
<ast.AugAssign object at 0x7da18f8100a0>
<ast.AugAssign object at 0x7da1b0bdbe50>
name[self]._dloglik_dt assign[=] call[name[scipy].sum, parameter[name[self]._dloglik_dt]]
assert[compare[name[self]._dloglik_dt.shape equal[==] name[self].t.shape]] | keyword[def] identifier[_updateInternals] ( identifier[self] ):
literal[string]
identifier[rootnode] = identifier[self] . identifier[nnodes] - literal[int]
keyword[if] identifier[self] . identifier[_distributionmodel] :
identifier[catweights] = identifier[self] . identifier[model] . identifier[catweights]
keyword[else] :
identifier[catweights] = identifier[scipy] . identifier[ones] ( literal[int] , identifier[dtype] = literal[string] )
identifier[undererrstate] = literal[string] keyword[if] identifier[len] ( identifier[catweights] )> literal[int] keyword[else] literal[string]
keyword[with] identifier[scipy] . identifier[errstate] ( identifier[over] = literal[string] , identifier[under] = identifier[undererrstate] ,
identifier[divide] = literal[string] , identifier[invalid] = literal[string] ):
identifier[self] . identifier[underflowlogscale] . identifier[fill] ( literal[int] )
identifier[self] . identifier[_computePartialLikelihoods] ()
identifier[sitelik] = identifier[scipy] . identifier[zeros] ( identifier[self] . identifier[nsites] , identifier[dtype] = literal[string] )
keyword[assert] ( identifier[self] . identifier[L] [ identifier[rootnode] ]>= literal[int] ). identifier[all] (), identifier[str] ( identifier[self] . identifier[L] [ identifier[rootnode] ])
keyword[for] identifier[k] keyword[in] identifier[self] . identifier[_catindices] :
identifier[sitelik] += identifier[scipy] . identifier[sum] ( identifier[self] . identifier[_stationarystate] ( identifier[k] )*
identifier[self] . identifier[L] [ identifier[rootnode] ][ identifier[k] ], identifier[axis] = literal[int] )* identifier[catweights] [ identifier[k] ]
keyword[assert] ( identifier[sitelik] > literal[int] ). identifier[all] (), literal[string] . identifier[format] (
identifier[sitelik] , identifier[self] . identifier[underflowlogscale] )
identifier[self] . identifier[siteloglik] = identifier[scipy] . identifier[log] ( identifier[sitelik] )+ identifier[self] . identifier[underflowlogscale]
identifier[self] . identifier[loglik] = identifier[scipy] . identifier[sum] ( identifier[self] . identifier[siteloglik] )+ identifier[self] . identifier[model] . identifier[logprior]
keyword[if] identifier[self] . identifier[dparamscurrent] :
identifier[self] . identifier[_dloglik] ={}
keyword[for] identifier[param] keyword[in] identifier[self] . identifier[model] . identifier[freeparams] :
keyword[if] identifier[self] . identifier[_distributionmodel] keyword[and] ( identifier[param] keyword[in]
identifier[self] . identifier[model] . identifier[distributionparams] ):
identifier[name] = identifier[self] . identifier[model] . identifier[distributedparam]
identifier[weighted_dk] =( identifier[self] . identifier[model] . identifier[d_distributionparams] [ identifier[param] ]
* identifier[catweights] )
keyword[else] :
identifier[name] = identifier[param]
identifier[weighted_dk] = identifier[catweights]
identifier[dsiteloglik] = literal[int]
keyword[for] identifier[k] keyword[in] identifier[self] . identifier[_catindices] :
identifier[dsiteloglik] +=( identifier[scipy] . identifier[sum] (
identifier[self] . identifier[_dstationarystate] ( identifier[k] , identifier[name] )*
identifier[self] . identifier[L] [ identifier[rootnode] ][ identifier[k] ]+ identifier[self] . identifier[dL] [ identifier[name] ][ identifier[rootnode] ][ identifier[k] ]*
identifier[self] . identifier[_stationarystate] ( identifier[k] ), identifier[axis] =- literal[int] )*
identifier[weighted_dk] [ identifier[k] ])
identifier[dsiteloglik] /= identifier[sitelik]
identifier[self] . identifier[_dloglik] [ identifier[param] ]=( identifier[scipy] . identifier[sum] ( identifier[dsiteloglik] , identifier[axis] =- literal[int] )
+ identifier[self] . identifier[model] . identifier[dlogprior] ( identifier[param] ))
keyword[if] identifier[self] . identifier[dtcurrent] :
identifier[self] . identifier[_dloglik_dt] = literal[int]
identifier[dLnroot_dt] = identifier[scipy] . identifier[array] ([ identifier[self] . identifier[dL_dt] [ identifier[n2] ][ identifier[rootnode] ] keyword[for]
identifier[n2] keyword[in] identifier[sorted] ( identifier[self] . identifier[dL_dt] . identifier[keys] ())])
keyword[for] identifier[k] keyword[in] identifier[self] . identifier[_catindices] :
keyword[if] identifier[isinstance] ( identifier[k] , identifier[int] ):
identifier[dLnrootk_dt] = identifier[dLnroot_dt] . identifier[swapaxes] ( literal[int] , literal[int] )[ identifier[k] ]
keyword[else] :
keyword[assert] identifier[k] == identifier[slice] ( keyword[None] )
identifier[dLnrootk_dt] = identifier[dLnroot_dt]
identifier[self] . identifier[_dloglik_dt] += identifier[catweights] [ identifier[k] ]* identifier[scipy] . identifier[sum] (
identifier[self] . identifier[_stationarystate] ( identifier[k] )*
identifier[dLnrootk_dt] , identifier[axis] =- literal[int] )
identifier[self] . identifier[_dloglik_dt] /= identifier[sitelik]
identifier[self] . identifier[_dloglik_dt] = identifier[scipy] . identifier[sum] ( identifier[self] . identifier[_dloglik_dt] , identifier[axis] =- literal[int] )
keyword[assert] identifier[self] . identifier[_dloglik_dt] . identifier[shape] == identifier[self] . identifier[t] . identifier[shape] | def _updateInternals(self):
"""Update internal attributes related to likelihood.
Should be called any time branch lengths or model parameters
are changed.
"""
rootnode = self.nnodes - 1
if self._distributionmodel:
catweights = self.model.catweights # depends on [control=['if'], data=[]]
else:
catweights = scipy.ones(1, dtype='float')
# When there are multiple categories, it is acceptable
# for some (but not all) of them to have underflow at
# any given site. Note that we still include a check for
# Underflow by ensuring that none of the site likelihoods is
# zero.
undererrstate = 'ignore' if len(catweights) > 1 else 'raise'
with scipy.errstate(over='raise', under=undererrstate, divide='raise', invalid='raise'):
self.underflowlogscale.fill(0.0)
self._computePartialLikelihoods()
sitelik = scipy.zeros(self.nsites, dtype='float')
assert (self.L[rootnode] >= 0).all(), str(self.L[rootnode])
for k in self._catindices:
sitelik += scipy.sum(self._stationarystate(k) * self.L[rootnode][k], axis=1) * catweights[k] # depends on [control=['for'], data=['k']]
assert (sitelik > 0).all(), 'Underflow:\n{0}\n{1}'.format(sitelik, self.underflowlogscale)
self.siteloglik = scipy.log(sitelik) + self.underflowlogscale
self.loglik = scipy.sum(self.siteloglik) + self.model.logprior
if self.dparamscurrent:
self._dloglik = {}
for param in self.model.freeparams:
if self._distributionmodel and param in self.model.distributionparams:
name = self.model.distributedparam
weighted_dk = self.model.d_distributionparams[param] * catweights # depends on [control=['if'], data=[]]
else:
name = param
weighted_dk = catweights
dsiteloglik = 0
for k in self._catindices:
dsiteloglik += scipy.sum(self._dstationarystate(k, name) * self.L[rootnode][k] + self.dL[name][rootnode][k] * self._stationarystate(k), axis=-1) * weighted_dk[k] # depends on [control=['for'], data=['k']]
dsiteloglik /= sitelik
self._dloglik[param] = scipy.sum(dsiteloglik, axis=-1) + self.model.dlogprior(param) # depends on [control=['for'], data=['param']] # depends on [control=['if'], data=[]]
if self.dtcurrent:
self._dloglik_dt = 0
dLnroot_dt = scipy.array([self.dL_dt[n2][rootnode] for n2 in sorted(self.dL_dt.keys())])
for k in self._catindices:
if isinstance(k, int):
dLnrootk_dt = dLnroot_dt.swapaxes(0, 1)[k] # depends on [control=['if'], data=[]]
else:
assert k == slice(None)
dLnrootk_dt = dLnroot_dt
self._dloglik_dt += catweights[k] * scipy.sum(self._stationarystate(k) * dLnrootk_dt, axis=-1) # depends on [control=['for'], data=['k']]
self._dloglik_dt /= sitelik
self._dloglik_dt = scipy.sum(self._dloglik_dt, axis=-1)
assert self._dloglik_dt.shape == self.t.shape # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]] |
def attach_(dev=None):
'''
Attach a backing devices to a cache set
If no dev is given, all backing devices will be attached.
CLI example:
.. code-block:: bash
salt '*' bcache.attach sdc
salt '*' bcache.attach /dev/bcache1
:return: bool or None if nuttin' happened
'''
cache = uuid()
if not cache:
log.error('No cache to attach %s to', dev)
return False
if dev is None:
res = {}
for dev, data in status(alldevs=True).items():
if 'cache' in data:
res[dev] = attach_(dev)
return res if res else None
bcache = uuid(dev)
if bcache:
if bcache == cache:
log.info('%s is already attached to bcache %s, doing nothing', dev, cache)
return None
elif not detach(dev):
return False
log.debug('Attaching %s to bcache %s', dev, cache)
if not _bcsys(dev, 'attach', cache,
'error', 'Error attaching {0} to bcache {1}'.format(dev, cache)):
return False
return _wait(lambda: uuid(dev) == cache,
'error', '{0} received attach to bcache {1}, but did not comply'.format(dev, cache)) | def function[attach_, parameter[dev]]:
constant[
Attach a backing devices to a cache set
If no dev is given, all backing devices will be attached.
CLI example:
.. code-block:: bash
salt '*' bcache.attach sdc
salt '*' bcache.attach /dev/bcache1
:return: bool or None if nuttin' happened
]
variable[cache] assign[=] call[name[uuid], parameter[]]
if <ast.UnaryOp object at 0x7da204566b30> begin[:]
call[name[log].error, parameter[constant[No cache to attach %s to], name[dev]]]
return[constant[False]]
if compare[name[dev] is constant[None]] begin[:]
variable[res] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da204564850>, <ast.Name object at 0x7da204564460>]]] in starred[call[call[name[status], parameter[]].items, parameter[]]] begin[:]
if compare[constant[cache] in name[data]] begin[:]
call[name[res]][name[dev]] assign[=] call[name[attach_], parameter[name[dev]]]
return[<ast.IfExp object at 0x7da204564fa0>]
variable[bcache] assign[=] call[name[uuid], parameter[name[dev]]]
if name[bcache] begin[:]
if compare[name[bcache] equal[==] name[cache]] begin[:]
call[name[log].info, parameter[constant[%s is already attached to bcache %s, doing nothing], name[dev], name[cache]]]
return[constant[None]]
call[name[log].debug, parameter[constant[Attaching %s to bcache %s], name[dev], name[cache]]]
if <ast.UnaryOp object at 0x7da204565090> begin[:]
return[constant[False]]
return[call[name[_wait], parameter[<ast.Lambda object at 0x7da2046236d0>, constant[error], call[constant[{0} received attach to bcache {1}, but did not comply].format, parameter[name[dev], name[cache]]]]]] | keyword[def] identifier[attach_] ( identifier[dev] = keyword[None] ):
literal[string]
identifier[cache] = identifier[uuid] ()
keyword[if] keyword[not] identifier[cache] :
identifier[log] . identifier[error] ( literal[string] , identifier[dev] )
keyword[return] keyword[False]
keyword[if] identifier[dev] keyword[is] keyword[None] :
identifier[res] ={}
keyword[for] identifier[dev] , identifier[data] keyword[in] identifier[status] ( identifier[alldevs] = keyword[True] ). identifier[items] ():
keyword[if] literal[string] keyword[in] identifier[data] :
identifier[res] [ identifier[dev] ]= identifier[attach_] ( identifier[dev] )
keyword[return] identifier[res] keyword[if] identifier[res] keyword[else] keyword[None]
identifier[bcache] = identifier[uuid] ( identifier[dev] )
keyword[if] identifier[bcache] :
keyword[if] identifier[bcache] == identifier[cache] :
identifier[log] . identifier[info] ( literal[string] , identifier[dev] , identifier[cache] )
keyword[return] keyword[None]
keyword[elif] keyword[not] identifier[detach] ( identifier[dev] ):
keyword[return] keyword[False]
identifier[log] . identifier[debug] ( literal[string] , identifier[dev] , identifier[cache] )
keyword[if] keyword[not] identifier[_bcsys] ( identifier[dev] , literal[string] , identifier[cache] ,
literal[string] , literal[string] . identifier[format] ( identifier[dev] , identifier[cache] )):
keyword[return] keyword[False]
keyword[return] identifier[_wait] ( keyword[lambda] : identifier[uuid] ( identifier[dev] )== identifier[cache] ,
literal[string] , literal[string] . identifier[format] ( identifier[dev] , identifier[cache] )) | def attach_(dev=None):
"""
Attach a backing devices to a cache set
If no dev is given, all backing devices will be attached.
CLI example:
.. code-block:: bash
salt '*' bcache.attach sdc
salt '*' bcache.attach /dev/bcache1
:return: bool or None if nuttin' happened
"""
cache = uuid()
if not cache:
log.error('No cache to attach %s to', dev)
return False # depends on [control=['if'], data=[]]
if dev is None:
res = {}
for (dev, data) in status(alldevs=True).items():
if 'cache' in data:
res[dev] = attach_(dev) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return res if res else None # depends on [control=['if'], data=['dev']]
bcache = uuid(dev)
if bcache:
if bcache == cache:
log.info('%s is already attached to bcache %s, doing nothing', dev, cache)
return None # depends on [control=['if'], data=['cache']]
elif not detach(dev):
return False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
log.debug('Attaching %s to bcache %s', dev, cache)
if not _bcsys(dev, 'attach', cache, 'error', 'Error attaching {0} to bcache {1}'.format(dev, cache)):
return False # depends on [control=['if'], data=[]]
return _wait(lambda : uuid(dev) == cache, 'error', '{0} received attach to bcache {1}, but did not comply'.format(dev, cache)) |
def flush(self, parser):
""" Flush all current commands to the GLIR interpreter.
"""
if self._verbose:
show = self._verbose if isinstance(self._verbose, str) else None
self.show(show)
parser.parse(self._filter(self.clear(), parser)) | def function[flush, parameter[self, parser]]:
constant[ Flush all current commands to the GLIR interpreter.
]
if name[self]._verbose begin[:]
variable[show] assign[=] <ast.IfExp object at 0x7da1b0fb01f0>
call[name[self].show, parameter[name[show]]]
call[name[parser].parse, parameter[call[name[self]._filter, parameter[call[name[self].clear, parameter[]], name[parser]]]]] | keyword[def] identifier[flush] ( identifier[self] , identifier[parser] ):
literal[string]
keyword[if] identifier[self] . identifier[_verbose] :
identifier[show] = identifier[self] . identifier[_verbose] keyword[if] identifier[isinstance] ( identifier[self] . identifier[_verbose] , identifier[str] ) keyword[else] keyword[None]
identifier[self] . identifier[show] ( identifier[show] )
identifier[parser] . identifier[parse] ( identifier[self] . identifier[_filter] ( identifier[self] . identifier[clear] (), identifier[parser] )) | def flush(self, parser):
""" Flush all current commands to the GLIR interpreter.
"""
if self._verbose:
show = self._verbose if isinstance(self._verbose, str) else None
self.show(show) # depends on [control=['if'], data=[]]
parser.parse(self._filter(self.clear(), parser)) |
def find_by_id(self, _id, **kwargs):
"""
Pass me anything that looks like an _id : str, ObjectId, {"_id": str}, {"_id": ObjectId}
"""
if type(_id) == dict and _id.get("_id"):
return self.find_one({"_id": ObjectId(_id["_id"])}, **kwargs)
return self.find_one({"_id": ObjectId(_id)}, **kwargs) | def function[find_by_id, parameter[self, _id]]:
constant[
Pass me anything that looks like an _id : str, ObjectId, {"_id": str}, {"_id": ObjectId}
]
if <ast.BoolOp object at 0x7da1b264a020> begin[:]
return[call[name[self].find_one, parameter[dictionary[[<ast.Constant object at 0x7da1b265fca0>], [<ast.Call object at 0x7da1b265ecb0>]]]]]
return[call[name[self].find_one, parameter[dictionary[[<ast.Constant object at 0x7da1b265e470>], [<ast.Call object at 0x7da1b265e1a0>]]]]] | keyword[def] identifier[find_by_id] ( identifier[self] , identifier[_id] ,** identifier[kwargs] ):
literal[string]
keyword[if] identifier[type] ( identifier[_id] )== identifier[dict] keyword[and] identifier[_id] . identifier[get] ( literal[string] ):
keyword[return] identifier[self] . identifier[find_one] ({ literal[string] : identifier[ObjectId] ( identifier[_id] [ literal[string] ])},** identifier[kwargs] )
keyword[return] identifier[self] . identifier[find_one] ({ literal[string] : identifier[ObjectId] ( identifier[_id] )},** identifier[kwargs] ) | def find_by_id(self, _id, **kwargs):
"""
Pass me anything that looks like an _id : str, ObjectId, {"_id": str}, {"_id": ObjectId}
"""
if type(_id) == dict and _id.get('_id'):
return self.find_one({'_id': ObjectId(_id['_id'])}, **kwargs) # depends on [control=['if'], data=[]]
return self.find_one({'_id': ObjectId(_id)}, **kwargs) |
def _on_decisions_event(self, event=None, **kwargs):
"""Called when an Event is received on the decisions channel. Saves
the value in group_decisions. If num_subperiods is None, immediately
broadcasts the event back out on the group_decisions channel.
"""
if not self.ran_ready_function:
logger.warning('ignoring decision from {} before when_all_players_ready: {}'.format(event.participant.code, event.value))
return
with track('_on_decisions_event'):
self.group_decisions[event.participant.code] = event.value
self._group_decisions_updated = True
self.save(update_fields=['group_decisions', '_group_decisions_updated'])
if not self.num_subperiods() and not self.rate_limit():
self.send('group_decisions', self.group_decisions) | def function[_on_decisions_event, parameter[self, event]]:
constant[Called when an Event is received on the decisions channel. Saves
the value in group_decisions. If num_subperiods is None, immediately
broadcasts the event back out on the group_decisions channel.
]
if <ast.UnaryOp object at 0x7da20c76f010> begin[:]
call[name[logger].warning, parameter[call[constant[ignoring decision from {} before when_all_players_ready: {}].format, parameter[name[event].participant.code, name[event].value]]]]
return[None]
with call[name[track], parameter[constant[_on_decisions_event]]] begin[:]
call[name[self].group_decisions][name[event].participant.code] assign[=] name[event].value
name[self]._group_decisions_updated assign[=] constant[True]
call[name[self].save, parameter[]]
if <ast.BoolOp object at 0x7da20c76cb20> begin[:]
call[name[self].send, parameter[constant[group_decisions], name[self].group_decisions]] | keyword[def] identifier[_on_decisions_event] ( identifier[self] , identifier[event] = keyword[None] ,** identifier[kwargs] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[ran_ready_function] :
identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[event] . identifier[participant] . identifier[code] , identifier[event] . identifier[value] ))
keyword[return]
keyword[with] identifier[track] ( literal[string] ):
identifier[self] . identifier[group_decisions] [ identifier[event] . identifier[participant] . identifier[code] ]= identifier[event] . identifier[value]
identifier[self] . identifier[_group_decisions_updated] = keyword[True]
identifier[self] . identifier[save] ( identifier[update_fields] =[ literal[string] , literal[string] ])
keyword[if] keyword[not] identifier[self] . identifier[num_subperiods] () keyword[and] keyword[not] identifier[self] . identifier[rate_limit] ():
identifier[self] . identifier[send] ( literal[string] , identifier[self] . identifier[group_decisions] ) | def _on_decisions_event(self, event=None, **kwargs):
"""Called when an Event is received on the decisions channel. Saves
the value in group_decisions. If num_subperiods is None, immediately
broadcasts the event back out on the group_decisions channel.
"""
if not self.ran_ready_function:
logger.warning('ignoring decision from {} before when_all_players_ready: {}'.format(event.participant.code, event.value))
return # depends on [control=['if'], data=[]]
with track('_on_decisions_event'):
self.group_decisions[event.participant.code] = event.value
self._group_decisions_updated = True
self.save(update_fields=['group_decisions', '_group_decisions_updated'])
if not self.num_subperiods() and (not self.rate_limit()):
self.send('group_decisions', self.group_decisions) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]] |
async def set_digital_latch(self, pin, threshold_value, cb=None,
cb_type=None):
"""
This method "arms" a digital pin for its data to be latched and
saved in the latching table
If a callback method is provided, when latching criteria is achieved,
the callback function is called with latching data notification.
Data returned in the callback list has the pin number as the
first element,
:param pin: Digital pin number
:param threshold_value: 0 or 1
:param cb: callback function
:param cb_type: Constants.CB_TYPE_DIRECT = direct call or
Constants.CB_TYPE_ASYNCIO = asyncio coroutine
:returns: True if successful, False if parameter data is invalid
"""
if 0 <= threshold_value <= 1:
key = 'D' + str(pin)
self.latch_map[key] = [Constants.LATCH_ARMED, Constants.LATCH_EQ,
threshold_value, 0, 0, cb, cb_type]
return True
else:
return False | <ast.AsyncFunctionDef object at 0x7da18dc98910> | keyword[async] keyword[def] identifier[set_digital_latch] ( identifier[self] , identifier[pin] , identifier[threshold_value] , identifier[cb] = keyword[None] ,
identifier[cb_type] = keyword[None] ):
literal[string]
keyword[if] literal[int] <= identifier[threshold_value] <= literal[int] :
identifier[key] = literal[string] + identifier[str] ( identifier[pin] )
identifier[self] . identifier[latch_map] [ identifier[key] ]=[ identifier[Constants] . identifier[LATCH_ARMED] , identifier[Constants] . identifier[LATCH_EQ] ,
identifier[threshold_value] , literal[int] , literal[int] , identifier[cb] , identifier[cb_type] ]
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False] | async def set_digital_latch(self, pin, threshold_value, cb=None, cb_type=None):
"""
This method "arms" a digital pin for its data to be latched and
saved in the latching table
If a callback method is provided, when latching criteria is achieved,
the callback function is called with latching data notification.
Data returned in the callback list has the pin number as the
first element,
:param pin: Digital pin number
:param threshold_value: 0 or 1
:param cb: callback function
:param cb_type: Constants.CB_TYPE_DIRECT = direct call or
Constants.CB_TYPE_ASYNCIO = asyncio coroutine
:returns: True if successful, False if parameter data is invalid
"""
if 0 <= threshold_value <= 1:
key = 'D' + str(pin)
self.latch_map[key] = [Constants.LATCH_ARMED, Constants.LATCH_EQ, threshold_value, 0, 0, cb, cb_type]
return True # depends on [control=['if'], data=['threshold_value']]
else:
return False |
def fmt_ac_sia(ac_sia):
"""Format a AcSystemIrreducibilityAnalysis."""
body = (
'{ALPHA} = {alpha}\n'
'direction: {ac_sia.direction}\n'
'transition: {ac_sia.transition}\n'
'before state: {ac_sia.before_state}\n'
'after state: {ac_sia.after_state}\n'
'cut:\n{ac_sia.cut}\n'
'{account}\n'
'{partitioned_account}'.format(
ALPHA=ALPHA,
alpha=round(ac_sia.alpha, 4),
ac_sia=ac_sia,
account=fmt_account(
ac_sia.account, 'Account'),
partitioned_account=fmt_account(
ac_sia.partitioned_account, 'Partitioned Account')))
return box(header('AcSystemIrreducibilityAnalysis',
body,
under_char=HORIZONTAL_BAR)) | def function[fmt_ac_sia, parameter[ac_sia]]:
constant[Format a AcSystemIrreducibilityAnalysis.]
variable[body] assign[=] call[constant[{ALPHA} = {alpha}
direction: {ac_sia.direction}
transition: {ac_sia.transition}
before state: {ac_sia.before_state}
after state: {ac_sia.after_state}
cut:
{ac_sia.cut}
{account}
{partitioned_account}].format, parameter[]]
return[call[name[box], parameter[call[name[header], parameter[constant[AcSystemIrreducibilityAnalysis], name[body]]]]]] | keyword[def] identifier[fmt_ac_sia] ( identifier[ac_sia] ):
literal[string]
identifier[body] =(
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string]
literal[string] . identifier[format] (
identifier[ALPHA] = identifier[ALPHA] ,
identifier[alpha] = identifier[round] ( identifier[ac_sia] . identifier[alpha] , literal[int] ),
identifier[ac_sia] = identifier[ac_sia] ,
identifier[account] = identifier[fmt_account] (
identifier[ac_sia] . identifier[account] , literal[string] ),
identifier[partitioned_account] = identifier[fmt_account] (
identifier[ac_sia] . identifier[partitioned_account] , literal[string] )))
keyword[return] identifier[box] ( identifier[header] ( literal[string] ,
identifier[body] ,
identifier[under_char] = identifier[HORIZONTAL_BAR] )) | def fmt_ac_sia(ac_sia):
"""Format a AcSystemIrreducibilityAnalysis."""
body = '{ALPHA} = {alpha}\ndirection: {ac_sia.direction}\ntransition: {ac_sia.transition}\nbefore state: {ac_sia.before_state}\nafter state: {ac_sia.after_state}\ncut:\n{ac_sia.cut}\n{account}\n{partitioned_account}'.format(ALPHA=ALPHA, alpha=round(ac_sia.alpha, 4), ac_sia=ac_sia, account=fmt_account(ac_sia.account, 'Account'), partitioned_account=fmt_account(ac_sia.partitioned_account, 'Partitioned Account'))
return box(header('AcSystemIrreducibilityAnalysis', body, under_char=HORIZONTAL_BAR)) |
def _names(lexer):
"""Return a tuple of names."""
first = _expect_token(lexer, {NameToken}).value
rest = _zom_name(lexer)
rnames = (first, ) + rest
return rnames[::-1] | def function[_names, parameter[lexer]]:
constant[Return a tuple of names.]
variable[first] assign[=] call[name[_expect_token], parameter[name[lexer], <ast.Set object at 0x7da1b0c92260>]].value
variable[rest] assign[=] call[name[_zom_name], parameter[name[lexer]]]
variable[rnames] assign[=] binary_operation[tuple[[<ast.Name object at 0x7da1b0c91780>]] + name[rest]]
return[call[name[rnames]][<ast.Slice object at 0x7da1b0c91bd0>]] | keyword[def] identifier[_names] ( identifier[lexer] ):
literal[string]
identifier[first] = identifier[_expect_token] ( identifier[lexer] ,{ identifier[NameToken] }). identifier[value]
identifier[rest] = identifier[_zom_name] ( identifier[lexer] )
identifier[rnames] =( identifier[first] ,)+ identifier[rest]
keyword[return] identifier[rnames] [::- literal[int] ] | def _names(lexer):
"""Return a tuple of names."""
first = _expect_token(lexer, {NameToken}).value
rest = _zom_name(lexer)
rnames = (first,) + rest
return rnames[::-1] |
def start(self):
"""
Start the connection. This should be called after all
listeners have been registered. If this method is not called,
no frames will be received by the connection.
"""
self.running = True
self.attempt_connection()
receiver_thread = self.create_thread_fc(self.__receiver_loop)
receiver_thread.name = "StompReceiver%s" % getattr(receiver_thread, "name", "Thread")
self.notify('connecting') | def function[start, parameter[self]]:
constant[
Start the connection. This should be called after all
listeners have been registered. If this method is not called,
no frames will be received by the connection.
]
name[self].running assign[=] constant[True]
call[name[self].attempt_connection, parameter[]]
variable[receiver_thread] assign[=] call[name[self].create_thread_fc, parameter[name[self].__receiver_loop]]
name[receiver_thread].name assign[=] binary_operation[constant[StompReceiver%s] <ast.Mod object at 0x7da2590d6920> call[name[getattr], parameter[name[receiver_thread], constant[name], constant[Thread]]]]
call[name[self].notify, parameter[constant[connecting]]] | keyword[def] identifier[start] ( identifier[self] ):
literal[string]
identifier[self] . identifier[running] = keyword[True]
identifier[self] . identifier[attempt_connection] ()
identifier[receiver_thread] = identifier[self] . identifier[create_thread_fc] ( identifier[self] . identifier[__receiver_loop] )
identifier[receiver_thread] . identifier[name] = literal[string] % identifier[getattr] ( identifier[receiver_thread] , literal[string] , literal[string] )
identifier[self] . identifier[notify] ( literal[string] ) | def start(self):
"""
Start the connection. This should be called after all
listeners have been registered. If this method is not called,
no frames will be received by the connection.
"""
self.running = True
self.attempt_connection()
receiver_thread = self.create_thread_fc(self.__receiver_loop)
receiver_thread.name = 'StompReceiver%s' % getattr(receiver_thread, 'name', 'Thread')
self.notify('connecting') |
def alias(self, person, identity, path=KISSmetrics.ALIAS_PATH):
"""Map `person` to `identity`; actions done by one resolve to other.
:param person: consider as same individual ``identity``; the
source of the alias operation
:type person: str or unicode
:param identity: consider as an alias of ``person``; the target
of the alias operation
:type identity: str or unicode
:param path: HTTP endpoint to use; defaults to
``KISSmetrics.ALIAS_PATH``
:returns: an HTTP response for the request
:rtype: `urllib3.response.HTTPResponse`
Note the direction of the mapping is ``person`` to ``identity``
(so "``person`` is also known as ``identity``" or "``person`` =>
``identity``" when looking at it as "``<source>`` => ``<target>``")
When consulting the Aliasing documentation, `person` corresponds
to ``query_string.PERSON_PARAM`` and `identity` corresponds to
``query_string.ALIAS_PARAM``.
Aliasing is not a reversible operation. When aliasing to an
identity, take care not to use a session identifier or any other
value that is not relatively stable (a value that will not
change per request or per session).
For more information see the API Specifications on `Aliasing
<http://support.kissmetrics.com/apis/specifications.html#aliasing-users>`_.
"""
this_request = request.alias(self.key, person, identity,
scheme=self.trk_scheme,
host=self.trk_host, path=path)
return self._request(this_request) | def function[alias, parameter[self, person, identity, path]]:
constant[Map `person` to `identity`; actions done by one resolve to other.
:param person: consider as same individual ``identity``; the
source of the alias operation
:type person: str or unicode
:param identity: consider as an alias of ``person``; the target
of the alias operation
:type identity: str or unicode
:param path: HTTP endpoint to use; defaults to
``KISSmetrics.ALIAS_PATH``
:returns: an HTTP response for the request
:rtype: `urllib3.response.HTTPResponse`
Note the direction of the mapping is ``person`` to ``identity``
(so "``person`` is also known as ``identity``" or "``person`` =>
``identity``" when looking at it as "``<source>`` => ``<target>``")
When consulting the Aliasing documentation, `person` corresponds
to ``query_string.PERSON_PARAM`` and `identity` corresponds to
``query_string.ALIAS_PARAM``.
Aliasing is not a reversible operation. When aliasing to an
identity, take care not to use a session identifier or any other
value that is not relatively stable (a value that will not
change per request or per session).
For more information see the API Specifications on `Aliasing
<http://support.kissmetrics.com/apis/specifications.html#aliasing-users>`_.
]
variable[this_request] assign[=] call[name[request].alias, parameter[name[self].key, name[person], name[identity]]]
return[call[name[self]._request, parameter[name[this_request]]]] | keyword[def] identifier[alias] ( identifier[self] , identifier[person] , identifier[identity] , identifier[path] = identifier[KISSmetrics] . identifier[ALIAS_PATH] ):
literal[string]
identifier[this_request] = identifier[request] . identifier[alias] ( identifier[self] . identifier[key] , identifier[person] , identifier[identity] ,
identifier[scheme] = identifier[self] . identifier[trk_scheme] ,
identifier[host] = identifier[self] . identifier[trk_host] , identifier[path] = identifier[path] )
keyword[return] identifier[self] . identifier[_request] ( identifier[this_request] ) | def alias(self, person, identity, path=KISSmetrics.ALIAS_PATH):
"""Map `person` to `identity`; actions done by one resolve to other.
:param person: consider as same individual ``identity``; the
source of the alias operation
:type person: str or unicode
:param identity: consider as an alias of ``person``; the target
of the alias operation
:type identity: str or unicode
:param path: HTTP endpoint to use; defaults to
``KISSmetrics.ALIAS_PATH``
:returns: an HTTP response for the request
:rtype: `urllib3.response.HTTPResponse`
Note the direction of the mapping is ``person`` to ``identity``
(so "``person`` is also known as ``identity``" or "``person`` =>
``identity``" when looking at it as "``<source>`` => ``<target>``")
When consulting the Aliasing documentation, `person` corresponds
to ``query_string.PERSON_PARAM`` and `identity` corresponds to
``query_string.ALIAS_PARAM``.
Aliasing is not a reversible operation. When aliasing to an
identity, take care not to use a session identifier or any other
value that is not relatively stable (a value that will not
change per request or per session).
For more information see the API Specifications on `Aliasing
<http://support.kissmetrics.com/apis/specifications.html#aliasing-users>`_.
"""
this_request = request.alias(self.key, person, identity, scheme=self.trk_scheme, host=self.trk_host, path=path)
return self._request(this_request) |
def GetFormatStringAttributeNames(self):
"""Retrieves the attribute names in the format string.
Returns:
set(str): attribute names.
"""
if self._format_string_attribute_names is None:
self._format_string_attribute_names = (
self._FORMAT_STRING_ATTRIBUTE_NAME_RE.findall(
self.FORMAT_STRING))
return set(self._format_string_attribute_names) | def function[GetFormatStringAttributeNames, parameter[self]]:
constant[Retrieves the attribute names in the format string.
Returns:
set(str): attribute names.
]
if compare[name[self]._format_string_attribute_names is constant[None]] begin[:]
name[self]._format_string_attribute_names assign[=] call[name[self]._FORMAT_STRING_ATTRIBUTE_NAME_RE.findall, parameter[name[self].FORMAT_STRING]]
return[call[name[set], parameter[name[self]._format_string_attribute_names]]] | keyword[def] identifier[GetFormatStringAttributeNames] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[_format_string_attribute_names] keyword[is] keyword[None] :
identifier[self] . identifier[_format_string_attribute_names] =(
identifier[self] . identifier[_FORMAT_STRING_ATTRIBUTE_NAME_RE] . identifier[findall] (
identifier[self] . identifier[FORMAT_STRING] ))
keyword[return] identifier[set] ( identifier[self] . identifier[_format_string_attribute_names] ) | def GetFormatStringAttributeNames(self):
"""Retrieves the attribute names in the format string.
Returns:
set(str): attribute names.
"""
if self._format_string_attribute_names is None:
self._format_string_attribute_names = self._FORMAT_STRING_ATTRIBUTE_NAME_RE.findall(self.FORMAT_STRING) # depends on [control=['if'], data=[]]
return set(self._format_string_attribute_names) |
def amount(self):
"""
Determine the sum of mole amounts of all the compounds.
:returns: Amount. [kmol]
"""
return sum(self.get_compound_amount(c) for c in self.material.compounds) | def function[amount, parameter[self]]:
constant[
Determine the sum of mole amounts of all the compounds.
:returns: Amount. [kmol]
]
return[call[name[sum], parameter[<ast.GeneratorExp object at 0x7da18f8115d0>]]] | keyword[def] identifier[amount] ( identifier[self] ):
literal[string]
keyword[return] identifier[sum] ( identifier[self] . identifier[get_compound_amount] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[self] . identifier[material] . identifier[compounds] ) | def amount(self):
"""
Determine the sum of mole amounts of all the compounds.
:returns: Amount. [kmol]
"""
return sum((self.get_compound_amount(c) for c in self.material.compounds)) |
def convert_trig_to_hdf(workflow, hdfbank, xml_trigger_files, out_dir, tags=None):
"""Return the list of hdf5 trigger files outputs"""
if tags is None:
tags = []
#FIXME, make me not needed
logging.info('convert single inspiral trigger files to hdf5')
make_analysis_dir(out_dir)
trig_files = FileList()
for ifo, insp_group in zip(*xml_trigger_files.categorize_by_attr('ifo')):
trig2hdf_exe = PyCBCTrig2HDFExecutable(workflow.cp, 'trig2hdf',
ifos=ifo, out_dir=out_dir, tags=tags)
_, insp_bundles = insp_group.categorize_by_attr('segment')
for insps in insp_bundles:
trig2hdf_node = trig2hdf_exe.create_node(insps, hdfbank[0])
workflow.add_node(trig2hdf_node)
trig_files += trig2hdf_node.output_files
return trig_files | def function[convert_trig_to_hdf, parameter[workflow, hdfbank, xml_trigger_files, out_dir, tags]]:
constant[Return the list of hdf5 trigger files outputs]
if compare[name[tags] is constant[None]] begin[:]
variable[tags] assign[=] list[[]]
call[name[logging].info, parameter[constant[convert single inspiral trigger files to hdf5]]]
call[name[make_analysis_dir], parameter[name[out_dir]]]
variable[trig_files] assign[=] call[name[FileList], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da2041d8be0>, <ast.Name object at 0x7da2041da9b0>]]] in starred[call[name[zip], parameter[<ast.Starred object at 0x7da2041d9030>]]] begin[:]
variable[trig2hdf_exe] assign[=] call[name[PyCBCTrig2HDFExecutable], parameter[name[workflow].cp, constant[trig2hdf]]]
<ast.Tuple object at 0x7da2041d8c70> assign[=] call[name[insp_group].categorize_by_attr, parameter[constant[segment]]]
for taget[name[insps]] in starred[name[insp_bundles]] begin[:]
variable[trig2hdf_node] assign[=] call[name[trig2hdf_exe].create_node, parameter[name[insps], call[name[hdfbank]][constant[0]]]]
call[name[workflow].add_node, parameter[name[trig2hdf_node]]]
<ast.AugAssign object at 0x7da2041dbd00>
return[name[trig_files]] | keyword[def] identifier[convert_trig_to_hdf] ( identifier[workflow] , identifier[hdfbank] , identifier[xml_trigger_files] , identifier[out_dir] , identifier[tags] = keyword[None] ):
literal[string]
keyword[if] identifier[tags] keyword[is] keyword[None] :
identifier[tags] =[]
identifier[logging] . identifier[info] ( literal[string] )
identifier[make_analysis_dir] ( identifier[out_dir] )
identifier[trig_files] = identifier[FileList] ()
keyword[for] identifier[ifo] , identifier[insp_group] keyword[in] identifier[zip] (* identifier[xml_trigger_files] . identifier[categorize_by_attr] ( literal[string] )):
identifier[trig2hdf_exe] = identifier[PyCBCTrig2HDFExecutable] ( identifier[workflow] . identifier[cp] , literal[string] ,
identifier[ifos] = identifier[ifo] , identifier[out_dir] = identifier[out_dir] , identifier[tags] = identifier[tags] )
identifier[_] , identifier[insp_bundles] = identifier[insp_group] . identifier[categorize_by_attr] ( literal[string] )
keyword[for] identifier[insps] keyword[in] identifier[insp_bundles] :
identifier[trig2hdf_node] = identifier[trig2hdf_exe] . identifier[create_node] ( identifier[insps] , identifier[hdfbank] [ literal[int] ])
identifier[workflow] . identifier[add_node] ( identifier[trig2hdf_node] )
identifier[trig_files] += identifier[trig2hdf_node] . identifier[output_files]
keyword[return] identifier[trig_files] | def convert_trig_to_hdf(workflow, hdfbank, xml_trigger_files, out_dir, tags=None):
"""Return the list of hdf5 trigger files outputs"""
if tags is None:
tags = [] # depends on [control=['if'], data=['tags']]
#FIXME, make me not needed
logging.info('convert single inspiral trigger files to hdf5')
make_analysis_dir(out_dir)
trig_files = FileList()
for (ifo, insp_group) in zip(*xml_trigger_files.categorize_by_attr('ifo')):
trig2hdf_exe = PyCBCTrig2HDFExecutable(workflow.cp, 'trig2hdf', ifos=ifo, out_dir=out_dir, tags=tags)
(_, insp_bundles) = insp_group.categorize_by_attr('segment')
for insps in insp_bundles:
trig2hdf_node = trig2hdf_exe.create_node(insps, hdfbank[0])
workflow.add_node(trig2hdf_node)
trig_files += trig2hdf_node.output_files # depends on [control=['for'], data=['insps']] # depends on [control=['for'], data=[]]
return trig_files |
def rescale_mid(x, to=(0, 1), _from=None, mid=0):
"""
Rescale numeric vector to have specified minimum, midpoint,
and maximum.
Parameters
----------
x : array_like | numeric
1D vector of values to manipulate.
to : tuple
output range (numeric vector of length two)
_from : tuple
input range (numeric vector of length two).
If not given, is calculated from the range of x
mid : numeric
mid-point of input range
Returns
-------
out : array_like
Rescaled values
Examples
--------
>>> rescale_mid([1, 2, 3], mid=1)
array([0.5 , 0.75, 1. ])
>>> rescale_mid([1, 2, 3], mid=2)
array([0. , 0.5, 1. ])
"""
array_like = True
try:
len(x)
except TypeError:
array_like = False
x = [x]
if not hasattr(x, 'dtype'):
x = np.asarray(x)
if _from is None:
_from = np.array([np.min(x), np.max(x)])
else:
_from = np.asarray(_from)
if (zero_range(_from) or zero_range(to)):
out = np.repeat(np.mean(to), len(x))
else:
extent = 2 * np.max(np.abs(_from - mid))
out = (x - mid) / extent * np.diff(to) + np.mean(to)
if not array_like:
out = out[0]
return out | def function[rescale_mid, parameter[x, to, _from, mid]]:
constant[
Rescale numeric vector to have specified minimum, midpoint,
and maximum.
Parameters
----------
x : array_like | numeric
1D vector of values to manipulate.
to : tuple
output range (numeric vector of length two)
_from : tuple
input range (numeric vector of length two).
If not given, is calculated from the range of x
mid : numeric
mid-point of input range
Returns
-------
out : array_like
Rescaled values
Examples
--------
>>> rescale_mid([1, 2, 3], mid=1)
array([0.5 , 0.75, 1. ])
>>> rescale_mid([1, 2, 3], mid=2)
array([0. , 0.5, 1. ])
]
variable[array_like] assign[=] constant[True]
<ast.Try object at 0x7da18dc05f90>
if <ast.UnaryOp object at 0x7da18dc05c00> begin[:]
variable[x] assign[=] call[name[np].asarray, parameter[name[x]]]
if compare[name[_from] is constant[None]] begin[:]
variable[_from] assign[=] call[name[np].array, parameter[list[[<ast.Call object at 0x7da18dc06560>, <ast.Call object at 0x7da18dc05fc0>]]]]
if <ast.BoolOp object at 0x7da18dc04700> begin[:]
variable[out] assign[=] call[name[np].repeat, parameter[call[name[np].mean, parameter[name[to]]], call[name[len], parameter[name[x]]]]]
if <ast.UnaryOp object at 0x7da18dc07c70> begin[:]
variable[out] assign[=] call[name[out]][constant[0]]
return[name[out]] | keyword[def] identifier[rescale_mid] ( identifier[x] , identifier[to] =( literal[int] , literal[int] ), identifier[_from] = keyword[None] , identifier[mid] = literal[int] ):
literal[string]
identifier[array_like] = keyword[True]
keyword[try] :
identifier[len] ( identifier[x] )
keyword[except] identifier[TypeError] :
identifier[array_like] = keyword[False]
identifier[x] =[ identifier[x] ]
keyword[if] keyword[not] identifier[hasattr] ( identifier[x] , literal[string] ):
identifier[x] = identifier[np] . identifier[asarray] ( identifier[x] )
keyword[if] identifier[_from] keyword[is] keyword[None] :
identifier[_from] = identifier[np] . identifier[array] ([ identifier[np] . identifier[min] ( identifier[x] ), identifier[np] . identifier[max] ( identifier[x] )])
keyword[else] :
identifier[_from] = identifier[np] . identifier[asarray] ( identifier[_from] )
keyword[if] ( identifier[zero_range] ( identifier[_from] ) keyword[or] identifier[zero_range] ( identifier[to] )):
identifier[out] = identifier[np] . identifier[repeat] ( identifier[np] . identifier[mean] ( identifier[to] ), identifier[len] ( identifier[x] ))
keyword[else] :
identifier[extent] = literal[int] * identifier[np] . identifier[max] ( identifier[np] . identifier[abs] ( identifier[_from] - identifier[mid] ))
identifier[out] =( identifier[x] - identifier[mid] )/ identifier[extent] * identifier[np] . identifier[diff] ( identifier[to] )+ identifier[np] . identifier[mean] ( identifier[to] )
keyword[if] keyword[not] identifier[array_like] :
identifier[out] = identifier[out] [ literal[int] ]
keyword[return] identifier[out] | def rescale_mid(x, to=(0, 1), _from=None, mid=0):
"""
Rescale numeric vector to have specified minimum, midpoint,
and maximum.
Parameters
----------
x : array_like | numeric
1D vector of values to manipulate.
to : tuple
output range (numeric vector of length two)
_from : tuple
input range (numeric vector of length two).
If not given, is calculated from the range of x
mid : numeric
mid-point of input range
Returns
-------
out : array_like
Rescaled values
Examples
--------
>>> rescale_mid([1, 2, 3], mid=1)
array([0.5 , 0.75, 1. ])
>>> rescale_mid([1, 2, 3], mid=2)
array([0. , 0.5, 1. ])
"""
array_like = True
try:
len(x) # depends on [control=['try'], data=[]]
except TypeError:
array_like = False
x = [x] # depends on [control=['except'], data=[]]
if not hasattr(x, 'dtype'):
x = np.asarray(x) # depends on [control=['if'], data=[]]
if _from is None:
_from = np.array([np.min(x), np.max(x)]) # depends on [control=['if'], data=['_from']]
else:
_from = np.asarray(_from)
if zero_range(_from) or zero_range(to):
out = np.repeat(np.mean(to), len(x)) # depends on [control=['if'], data=[]]
else:
extent = 2 * np.max(np.abs(_from - mid))
out = (x - mid) / extent * np.diff(to) + np.mean(to)
if not array_like:
out = out[0] # depends on [control=['if'], data=[]]
return out |
def define_plugin_entries(groups):
"""
helper to all groups for plugins
"""
result = dict()
for group, modules in groups:
tempo = []
for module_name, names in modules:
tempo.extend([define_plugin_entry(name, module_name)
for name in names])
result[group] = tempo
return result | def function[define_plugin_entries, parameter[groups]]:
constant[
helper to all groups for plugins
]
variable[result] assign[=] call[name[dict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18f8107f0>, <ast.Name object at 0x7da18f812d40>]]] in starred[name[groups]] begin[:]
variable[tempo] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18f811390>, <ast.Name object at 0x7da18f810c40>]]] in starred[name[modules]] begin[:]
call[name[tempo].extend, parameter[<ast.ListComp object at 0x7da18f812380>]]
call[name[result]][name[group]] assign[=] name[tempo]
return[name[result]] | keyword[def] identifier[define_plugin_entries] ( identifier[groups] ):
literal[string]
identifier[result] = identifier[dict] ()
keyword[for] identifier[group] , identifier[modules] keyword[in] identifier[groups] :
identifier[tempo] =[]
keyword[for] identifier[module_name] , identifier[names] keyword[in] identifier[modules] :
identifier[tempo] . identifier[extend] ([ identifier[define_plugin_entry] ( identifier[name] , identifier[module_name] )
keyword[for] identifier[name] keyword[in] identifier[names] ])
identifier[result] [ identifier[group] ]= identifier[tempo]
keyword[return] identifier[result] | def define_plugin_entries(groups):
"""
helper to all groups for plugins
"""
result = dict()
for (group, modules) in groups:
tempo = []
for (module_name, names) in modules:
tempo.extend([define_plugin_entry(name, module_name) for name in names]) # depends on [control=['for'], data=[]]
result[group] = tempo # depends on [control=['for'], data=[]]
return result |
def _update_dprx(self):
"""Update `dprx`, accounting for dependence of `phi` on `beta`."""
super(ExpCM_empirical_phi, self)._update_dprx()
if 'beta' in self.freeparams:
dphi_over_phi = scipy.zeros(N_CODON, dtype='float')
for j in range(3):
dphi_over_phi += (self.dphi_dbeta / self.phi)[CODON_NT_INDEX[j]]
for r in range(self.nsites):
self.dprx['beta'][r] += self.prx[r] * (dphi_over_phi
- scipy.dot(dphi_over_phi, self.prx[r])) | def function[_update_dprx, parameter[self]]:
constant[Update `dprx`, accounting for dependence of `phi` on `beta`.]
call[call[name[super], parameter[name[ExpCM_empirical_phi], name[self]]]._update_dprx, parameter[]]
if compare[constant[beta] in name[self].freeparams] begin[:]
variable[dphi_over_phi] assign[=] call[name[scipy].zeros, parameter[name[N_CODON]]]
for taget[name[j]] in starred[call[name[range], parameter[constant[3]]]] begin[:]
<ast.AugAssign object at 0x7da20c76eaa0>
for taget[name[r]] in starred[call[name[range], parameter[name[self].nsites]]] begin[:]
<ast.AugAssign object at 0x7da20c76d990> | keyword[def] identifier[_update_dprx] ( identifier[self] ):
literal[string]
identifier[super] ( identifier[ExpCM_empirical_phi] , identifier[self] ). identifier[_update_dprx] ()
keyword[if] literal[string] keyword[in] identifier[self] . identifier[freeparams] :
identifier[dphi_over_phi] = identifier[scipy] . identifier[zeros] ( identifier[N_CODON] , identifier[dtype] = literal[string] )
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] ):
identifier[dphi_over_phi] +=( identifier[self] . identifier[dphi_dbeta] / identifier[self] . identifier[phi] )[ identifier[CODON_NT_INDEX] [ identifier[j] ]]
keyword[for] identifier[r] keyword[in] identifier[range] ( identifier[self] . identifier[nsites] ):
identifier[self] . identifier[dprx] [ literal[string] ][ identifier[r] ]+= identifier[self] . identifier[prx] [ identifier[r] ]*( identifier[dphi_over_phi]
- identifier[scipy] . identifier[dot] ( identifier[dphi_over_phi] , identifier[self] . identifier[prx] [ identifier[r] ])) | def _update_dprx(self):
"""Update `dprx`, accounting for dependence of `phi` on `beta`."""
super(ExpCM_empirical_phi, self)._update_dprx()
if 'beta' in self.freeparams:
dphi_over_phi = scipy.zeros(N_CODON, dtype='float')
for j in range(3):
dphi_over_phi += (self.dphi_dbeta / self.phi)[CODON_NT_INDEX[j]] # depends on [control=['for'], data=['j']]
for r in range(self.nsites):
self.dprx['beta'][r] += self.prx[r] * (dphi_over_phi - scipy.dot(dphi_over_phi, self.prx[r])) # depends on [control=['for'], data=['r']] # depends on [control=['if'], data=[]] |
def to_string(self, ast_obj=None, fmt: str = "medium") -> str:
"""Convert AST object to string
Args:
fmt (str): short, medium, long formatted BEL statements
short = short function and short relation format
medium = short function and long relation format
long = long function and long relation format
canonicalize
Returns:
str: string version of BEL AST
"""
if not ast_obj:
ast_obj = self
bel_relation = None
if self.bel_relation and fmt == "short":
bel_relation = self.spec["relations"]["to_short"].get(
self.bel_relation, self.bel_relation
)
elif self.bel_relation:
bel_relation = self.spec["relations"]["to_long"].get(
self.bel_relation, self.bel_relation
)
if self.bel_subject and bel_relation and self.bel_object:
if isinstance(self.bel_object, BELAst):
return "{} {} ({})".format(
self.bel_subject.to_string(fmt=fmt),
bel_relation,
self.bel_object.to_string(fmt=fmt),
)
else:
return "{} {} {}".format(
self.bel_subject.to_string(fmt=fmt),
bel_relation,
self.bel_object.to_string(fmt=fmt),
)
elif self.bel_subject:
return "{}".format(self.bel_subject.to_string(fmt=fmt))
else:
return "" | def function[to_string, parameter[self, ast_obj, fmt]]:
constant[Convert AST object to string
Args:
fmt (str): short, medium, long formatted BEL statements
short = short function and short relation format
medium = short function and long relation format
long = long function and long relation format
canonicalize
Returns:
str: string version of BEL AST
]
if <ast.UnaryOp object at 0x7da1b19cc700> begin[:]
variable[ast_obj] assign[=] name[self]
variable[bel_relation] assign[=] constant[None]
if <ast.BoolOp object at 0x7da1b19cc2e0> begin[:]
variable[bel_relation] assign[=] call[call[call[name[self].spec][constant[relations]]][constant[to_short]].get, parameter[name[self].bel_relation, name[self].bel_relation]]
if <ast.BoolOp object at 0x7da1b19ccdf0> begin[:]
if call[name[isinstance], parameter[name[self].bel_object, name[BELAst]]] begin[:]
return[call[constant[{} {} ({})].format, parameter[call[name[self].bel_subject.to_string, parameter[]], name[bel_relation], call[name[self].bel_object.to_string, parameter[]]]]] | keyword[def] identifier[to_string] ( identifier[self] , identifier[ast_obj] = keyword[None] , identifier[fmt] : identifier[str] = literal[string] )-> identifier[str] :
literal[string]
keyword[if] keyword[not] identifier[ast_obj] :
identifier[ast_obj] = identifier[self]
identifier[bel_relation] = keyword[None]
keyword[if] identifier[self] . identifier[bel_relation] keyword[and] identifier[fmt] == literal[string] :
identifier[bel_relation] = identifier[self] . identifier[spec] [ literal[string] ][ literal[string] ]. identifier[get] (
identifier[self] . identifier[bel_relation] , identifier[self] . identifier[bel_relation]
)
keyword[elif] identifier[self] . identifier[bel_relation] :
identifier[bel_relation] = identifier[self] . identifier[spec] [ literal[string] ][ literal[string] ]. identifier[get] (
identifier[self] . identifier[bel_relation] , identifier[self] . identifier[bel_relation]
)
keyword[if] identifier[self] . identifier[bel_subject] keyword[and] identifier[bel_relation] keyword[and] identifier[self] . identifier[bel_object] :
keyword[if] identifier[isinstance] ( identifier[self] . identifier[bel_object] , identifier[BELAst] ):
keyword[return] literal[string] . identifier[format] (
identifier[self] . identifier[bel_subject] . identifier[to_string] ( identifier[fmt] = identifier[fmt] ),
identifier[bel_relation] ,
identifier[self] . identifier[bel_object] . identifier[to_string] ( identifier[fmt] = identifier[fmt] ),
)
keyword[else] :
keyword[return] literal[string] . identifier[format] (
identifier[self] . identifier[bel_subject] . identifier[to_string] ( identifier[fmt] = identifier[fmt] ),
identifier[bel_relation] ,
identifier[self] . identifier[bel_object] . identifier[to_string] ( identifier[fmt] = identifier[fmt] ),
)
keyword[elif] identifier[self] . identifier[bel_subject] :
keyword[return] literal[string] . identifier[format] ( identifier[self] . identifier[bel_subject] . identifier[to_string] ( identifier[fmt] = identifier[fmt] ))
keyword[else] :
keyword[return] literal[string] | def to_string(self, ast_obj=None, fmt: str='medium') -> str:
"""Convert AST object to string
Args:
fmt (str): short, medium, long formatted BEL statements
short = short function and short relation format
medium = short function and long relation format
long = long function and long relation format
canonicalize
Returns:
str: string version of BEL AST
"""
if not ast_obj:
ast_obj = self # depends on [control=['if'], data=[]]
bel_relation = None
if self.bel_relation and fmt == 'short':
bel_relation = self.spec['relations']['to_short'].get(self.bel_relation, self.bel_relation) # depends on [control=['if'], data=[]]
elif self.bel_relation:
bel_relation = self.spec['relations']['to_long'].get(self.bel_relation, self.bel_relation) # depends on [control=['if'], data=[]]
if self.bel_subject and bel_relation and self.bel_object:
if isinstance(self.bel_object, BELAst):
return '{} {} ({})'.format(self.bel_subject.to_string(fmt=fmt), bel_relation, self.bel_object.to_string(fmt=fmt)) # depends on [control=['if'], data=[]]
else:
return '{} {} {}'.format(self.bel_subject.to_string(fmt=fmt), bel_relation, self.bel_object.to_string(fmt=fmt)) # depends on [control=['if'], data=[]]
elif self.bel_subject:
return '{}'.format(self.bel_subject.to_string(fmt=fmt)) # depends on [control=['if'], data=[]]
else:
return '' |
def SetModel(self, loader):
"""Set our overall model (a loader object) and populate sub-controls"""
self.loader = loader
self.adapter, tree, rows = self.RootNode()
self.listControl.integrateRecords(rows.values())
self.activated_node = tree
self.squareMap.SetModel(tree, self.adapter)
self.RecordHistory() | def function[SetModel, parameter[self, loader]]:
constant[Set our overall model (a loader object) and populate sub-controls]
name[self].loader assign[=] name[loader]
<ast.Tuple object at 0x7da18f09cfd0> assign[=] call[name[self].RootNode, parameter[]]
call[name[self].listControl.integrateRecords, parameter[call[name[rows].values, parameter[]]]]
name[self].activated_node assign[=] name[tree]
call[name[self].squareMap.SetModel, parameter[name[tree], name[self].adapter]]
call[name[self].RecordHistory, parameter[]] | keyword[def] identifier[SetModel] ( identifier[self] , identifier[loader] ):
literal[string]
identifier[self] . identifier[loader] = identifier[loader]
identifier[self] . identifier[adapter] , identifier[tree] , identifier[rows] = identifier[self] . identifier[RootNode] ()
identifier[self] . identifier[listControl] . identifier[integrateRecords] ( identifier[rows] . identifier[values] ())
identifier[self] . identifier[activated_node] = identifier[tree]
identifier[self] . identifier[squareMap] . identifier[SetModel] ( identifier[tree] , identifier[self] . identifier[adapter] )
identifier[self] . identifier[RecordHistory] () | def SetModel(self, loader):
"""Set our overall model (a loader object) and populate sub-controls"""
self.loader = loader
(self.adapter, tree, rows) = self.RootNode()
self.listControl.integrateRecords(rows.values())
self.activated_node = tree
self.squareMap.SetModel(tree, self.adapter)
self.RecordHistory() |
def ping(self):
"""
Ping the broker.
Send a MQTT `PINGREQ <http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc398718081>`_ message for response.
This method is a *coroutine*.
"""
if self.session.transitions.is_connected():
yield from self._handler.mqtt_ping()
else:
self.logger.warning("MQTT PING request incompatible with current session state '%s'" %
self.session.transitions.state) | def function[ping, parameter[self]]:
constant[
Ping the broker.
Send a MQTT `PINGREQ <http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc398718081>`_ message for response.
This method is a *coroutine*.
]
if call[name[self].session.transitions.is_connected, parameter[]] begin[:]
<ast.YieldFrom object at 0x7da18ede5d20> | keyword[def] identifier[ping] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[session] . identifier[transitions] . identifier[is_connected] ():
keyword[yield] keyword[from] identifier[self] . identifier[_handler] . identifier[mqtt_ping] ()
keyword[else] :
identifier[self] . identifier[logger] . identifier[warning] ( literal[string] %
identifier[self] . identifier[session] . identifier[transitions] . identifier[state] ) | def ping(self):
"""
Ping the broker.
Send a MQTT `PINGREQ <http://docs.oasis-open.org/mqtt/mqtt/v3.1.1/os/mqtt-v3.1.1-os.html#_Toc398718081>`_ message for response.
This method is a *coroutine*.
"""
if self.session.transitions.is_connected():
yield from self._handler.mqtt_ping() # depends on [control=['if'], data=[]]
else:
self.logger.warning("MQTT PING request incompatible with current session state '%s'" % self.session.transitions.state) |
def sample_string(self, individual=-1):
"""Returns the VCF entry as it appears in the vcf file"""
base = str(self)
extra = self.get_sample_info(individual=individual)
extra = [':'.join([str(j) for j in i]) for i in zip(*extra.values())]
return '\t'.join([base, '\t'.join(extra)]) | def function[sample_string, parameter[self, individual]]:
constant[Returns the VCF entry as it appears in the vcf file]
variable[base] assign[=] call[name[str], parameter[name[self]]]
variable[extra] assign[=] call[name[self].get_sample_info, parameter[]]
variable[extra] assign[=] <ast.ListComp object at 0x7da1b19b7d60>
return[call[constant[ ].join, parameter[list[[<ast.Name object at 0x7da1b19b6ef0>, <ast.Call object at 0x7da1b19b4dc0>]]]]] | keyword[def] identifier[sample_string] ( identifier[self] , identifier[individual] =- literal[int] ):
literal[string]
identifier[base] = identifier[str] ( identifier[self] )
identifier[extra] = identifier[self] . identifier[get_sample_info] ( identifier[individual] = identifier[individual] )
identifier[extra] =[ literal[string] . identifier[join] ([ identifier[str] ( identifier[j] ) keyword[for] identifier[j] keyword[in] identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[zip] (* identifier[extra] . identifier[values] ())]
keyword[return] literal[string] . identifier[join] ([ identifier[base] , literal[string] . identifier[join] ( identifier[extra] )]) | def sample_string(self, individual=-1):
"""Returns the VCF entry as it appears in the vcf file"""
base = str(self)
extra = self.get_sample_info(individual=individual)
extra = [':'.join([str(j) for j in i]) for i in zip(*extra.values())]
return '\t'.join([base, '\t'.join(extra)]) |
def extend3(filename, key, array, **attrs):
"""
Extend an HDF5 file dataset with the given array
"""
with h5py.File(filename) as h5:
try:
dset = h5[key]
except KeyError:
if array.dtype.name == 'object': # vlen array
shape = (None,) + preshape(array[0])
else:
shape = (None,) + array.shape[1:]
dset = create(h5, key, array.dtype, shape)
length = extend(dset, array)
for key, val in attrs.items():
dset.attrs[key] = val
h5.flush()
return length | def function[extend3, parameter[filename, key, array]]:
constant[
Extend an HDF5 file dataset with the given array
]
with call[name[h5py].File, parameter[name[filename]]] begin[:]
<ast.Try object at 0x7da18f00f520>
variable[length] assign[=] call[name[extend], parameter[name[dset], name[array]]]
for taget[tuple[[<ast.Name object at 0x7da18f00ca90>, <ast.Name object at 0x7da18f00e260>]]] in starred[call[name[attrs].items, parameter[]]] begin[:]
call[name[dset].attrs][name[key]] assign[=] name[val]
call[name[h5].flush, parameter[]]
return[name[length]] | keyword[def] identifier[extend3] ( identifier[filename] , identifier[key] , identifier[array] ,** identifier[attrs] ):
literal[string]
keyword[with] identifier[h5py] . identifier[File] ( identifier[filename] ) keyword[as] identifier[h5] :
keyword[try] :
identifier[dset] = identifier[h5] [ identifier[key] ]
keyword[except] identifier[KeyError] :
keyword[if] identifier[array] . identifier[dtype] . identifier[name] == literal[string] :
identifier[shape] =( keyword[None] ,)+ identifier[preshape] ( identifier[array] [ literal[int] ])
keyword[else] :
identifier[shape] =( keyword[None] ,)+ identifier[array] . identifier[shape] [ literal[int] :]
identifier[dset] = identifier[create] ( identifier[h5] , identifier[key] , identifier[array] . identifier[dtype] , identifier[shape] )
identifier[length] = identifier[extend] ( identifier[dset] , identifier[array] )
keyword[for] identifier[key] , identifier[val] keyword[in] identifier[attrs] . identifier[items] ():
identifier[dset] . identifier[attrs] [ identifier[key] ]= identifier[val]
identifier[h5] . identifier[flush] ()
keyword[return] identifier[length] | def extend3(filename, key, array, **attrs):
"""
Extend an HDF5 file dataset with the given array
"""
with h5py.File(filename) as h5:
try:
dset = h5[key] # depends on [control=['try'], data=[]]
except KeyError:
if array.dtype.name == 'object': # vlen array
shape = (None,) + preshape(array[0]) # depends on [control=['if'], data=[]]
else:
shape = (None,) + array.shape[1:]
dset = create(h5, key, array.dtype, shape) # depends on [control=['except'], data=[]]
length = extend(dset, array)
for (key, val) in attrs.items():
dset.attrs[key] = val # depends on [control=['for'], data=[]]
h5.flush() # depends on [control=['with'], data=['h5']]
return length |
def get_jwt_data(token, token_type):
"""
Decodes encoded JWT token by using extension setting and validates token type
:param token: The encoded JWT string to decode
:param token_type: JWT type for type validation (access or refresh)
:return: Dictionary containing contents of the JWT
"""
jwt_data = decode_jwt(
encoded_token=token,
secret=current_app.config['JWT_SECRET_KEY'],
algorithm='HS256',
identity_claim_key=current_app.config['JWT_IDENTITY_CLAIM'],
user_claims_key=current_app.config['JWT_USER_CLAIMS']
)
# token type verification
if jwt_data['type'] != token_type:
raise WrongTokenError('Only {} tokens are allowed'.format(token_type))
return jwt_data | def function[get_jwt_data, parameter[token, token_type]]:
constant[
Decodes encoded JWT token by using extension setting and validates token type
:param token: The encoded JWT string to decode
:param token_type: JWT type for type validation (access or refresh)
:return: Dictionary containing contents of the JWT
]
variable[jwt_data] assign[=] call[name[decode_jwt], parameter[]]
if compare[call[name[jwt_data]][constant[type]] not_equal[!=] name[token_type]] begin[:]
<ast.Raise object at 0x7da1b1909780>
return[name[jwt_data]] | keyword[def] identifier[get_jwt_data] ( identifier[token] , identifier[token_type] ):
literal[string]
identifier[jwt_data] = identifier[decode_jwt] (
identifier[encoded_token] = identifier[token] ,
identifier[secret] = identifier[current_app] . identifier[config] [ literal[string] ],
identifier[algorithm] = literal[string] ,
identifier[identity_claim_key] = identifier[current_app] . identifier[config] [ literal[string] ],
identifier[user_claims_key] = identifier[current_app] . identifier[config] [ literal[string] ]
)
keyword[if] identifier[jwt_data] [ literal[string] ]!= identifier[token_type] :
keyword[raise] identifier[WrongTokenError] ( literal[string] . identifier[format] ( identifier[token_type] ))
keyword[return] identifier[jwt_data] | def get_jwt_data(token, token_type):
"""
Decodes encoded JWT token by using extension setting and validates token type
:param token: The encoded JWT string to decode
:param token_type: JWT type for type validation (access or refresh)
:return: Dictionary containing contents of the JWT
"""
jwt_data = decode_jwt(encoded_token=token, secret=current_app.config['JWT_SECRET_KEY'], algorithm='HS256', identity_claim_key=current_app.config['JWT_IDENTITY_CLAIM'], user_claims_key=current_app.config['JWT_USER_CLAIMS'])
# token type verification
if jwt_data['type'] != token_type:
raise WrongTokenError('Only {} tokens are allowed'.format(token_type)) # depends on [control=['if'], data=['token_type']]
return jwt_data |
def fromPostArgs(cls, args):
"""Construct a Message containing a set of POST arguments.
"""
self = cls()
# Partition into "openid." args and bare args
openid_args = {}
for key, value in args.items():
if isinstance(value, list):
raise TypeError("query dict must have one value for each key, "
"not lists of values. Query is %r" % (args,))
try:
prefix, rest = key.split('.', 1)
except ValueError:
prefix = None
if prefix != 'openid':
self.args[(BARE_NS, key)] = value
else:
openid_args[rest] = value
self._fromOpenIDArgs(openid_args)
return self | def function[fromPostArgs, parameter[cls, args]]:
constant[Construct a Message containing a set of POST arguments.
]
variable[self] assign[=] call[name[cls], parameter[]]
variable[openid_args] assign[=] dictionary[[], []]
for taget[tuple[[<ast.Name object at 0x7da2054a7a30>, <ast.Name object at 0x7da2054a64a0>]]] in starred[call[name[args].items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[value], name[list]]] begin[:]
<ast.Raise object at 0x7da2054a7520>
<ast.Try object at 0x7da2054a48b0>
if compare[name[prefix] not_equal[!=] constant[openid]] begin[:]
call[name[self].args][tuple[[<ast.Name object at 0x7da18eb55210>, <ast.Name object at 0x7da18eb55c00>]]] assign[=] name[value]
call[name[self]._fromOpenIDArgs, parameter[name[openid_args]]]
return[name[self]] | keyword[def] identifier[fromPostArgs] ( identifier[cls] , identifier[args] ):
literal[string]
identifier[self] = identifier[cls] ()
identifier[openid_args] ={}
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[args] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[value] , identifier[list] ):
keyword[raise] identifier[TypeError] ( literal[string]
literal[string] %( identifier[args] ,))
keyword[try] :
identifier[prefix] , identifier[rest] = identifier[key] . identifier[split] ( literal[string] , literal[int] )
keyword[except] identifier[ValueError] :
identifier[prefix] = keyword[None]
keyword[if] identifier[prefix] != literal[string] :
identifier[self] . identifier[args] [( identifier[BARE_NS] , identifier[key] )]= identifier[value]
keyword[else] :
identifier[openid_args] [ identifier[rest] ]= identifier[value]
identifier[self] . identifier[_fromOpenIDArgs] ( identifier[openid_args] )
keyword[return] identifier[self] | def fromPostArgs(cls, args):
"""Construct a Message containing a set of POST arguments.
"""
self = cls()
# Partition into "openid." args and bare args
openid_args = {}
for (key, value) in args.items():
if isinstance(value, list):
raise TypeError('query dict must have one value for each key, not lists of values. Query is %r' % (args,)) # depends on [control=['if'], data=[]]
try:
(prefix, rest) = key.split('.', 1) # depends on [control=['try'], data=[]]
except ValueError:
prefix = None # depends on [control=['except'], data=[]]
if prefix != 'openid':
self.args[BARE_NS, key] = value # depends on [control=['if'], data=[]]
else:
openid_args[rest] = value # depends on [control=['for'], data=[]]
self._fromOpenIDArgs(openid_args)
return self |
def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=None,
chunk_store=None):
"""Open a group using file-mode-like semantics.
Parameters
----------
store : MutableMapping or string, optional
Store or path to directory in file system or name of zip file.
mode : {'r', 'r+', 'a', 'w', 'w-'}, optional
Persistence mode: 'r' means read only (must exist); 'r+' means
read/write (must exist); 'a' means read/write (create if doesn't
exist); 'w' means create (overwrite if exists); 'w-' means create
(fail if exists).
cache_attrs : bool, optional
If True (default), user attributes will be cached for attribute read
operations. If False, user attributes are reloaded from the store prior
to all attribute read operations.
synchronizer : object, optional
Array synchronizer.
path : string, optional
Group path within store.
chunk_store : MutableMapping or string, optional
Store or path to directory in file system or name of zip file.
Returns
-------
g : zarr.hierarchy.Group
Examples
--------
>>> import zarr
>>> root = zarr.open_group('data/example.zarr', mode='w')
>>> foo = root.create_group('foo')
>>> bar = root.create_group('bar')
>>> root
<zarr.hierarchy.Group '/'>
>>> root2 = zarr.open_group('data/example.zarr', mode='a')
>>> root2
<zarr.hierarchy.Group '/'>
>>> root == root2
True
"""
# handle polymorphic store arg
store = _normalize_store_arg(store)
if chunk_store is not None:
chunk_store = _normalize_store_arg(chunk_store)
path = normalize_storage_path(path)
# ensure store is initialized
if mode in ['r', 'r+']:
if contains_array(store, path=path):
err_contains_array(path)
elif not contains_group(store, path=path):
err_group_not_found(path)
elif mode == 'w':
init_group(store, overwrite=True, path=path, chunk_store=chunk_store)
elif mode == 'a':
if contains_array(store, path=path):
err_contains_array(path)
if not contains_group(store, path=path):
init_group(store, path=path, chunk_store=chunk_store)
elif mode in ['w-', 'x']:
if contains_array(store, path=path):
err_contains_array(path)
elif contains_group(store, path=path):
err_contains_group(path)
else:
init_group(store, path=path, chunk_store=chunk_store)
# determine read only status
read_only = mode == 'r'
return Group(store, read_only=read_only, cache_attrs=cache_attrs,
synchronizer=synchronizer, path=path, chunk_store=chunk_store) | def function[open_group, parameter[store, mode, cache_attrs, synchronizer, path, chunk_store]]:
constant[Open a group using file-mode-like semantics.
Parameters
----------
store : MutableMapping or string, optional
Store or path to directory in file system or name of zip file.
mode : {'r', 'r+', 'a', 'w', 'w-'}, optional
Persistence mode: 'r' means read only (must exist); 'r+' means
read/write (must exist); 'a' means read/write (create if doesn't
exist); 'w' means create (overwrite if exists); 'w-' means create
(fail if exists).
cache_attrs : bool, optional
If True (default), user attributes will be cached for attribute read
operations. If False, user attributes are reloaded from the store prior
to all attribute read operations.
synchronizer : object, optional
Array synchronizer.
path : string, optional
Group path within store.
chunk_store : MutableMapping or string, optional
Store or path to directory in file system or name of zip file.
Returns
-------
g : zarr.hierarchy.Group
Examples
--------
>>> import zarr
>>> root = zarr.open_group('data/example.zarr', mode='w')
>>> foo = root.create_group('foo')
>>> bar = root.create_group('bar')
>>> root
<zarr.hierarchy.Group '/'>
>>> root2 = zarr.open_group('data/example.zarr', mode='a')
>>> root2
<zarr.hierarchy.Group '/'>
>>> root == root2
True
]
variable[store] assign[=] call[name[_normalize_store_arg], parameter[name[store]]]
if compare[name[chunk_store] is_not constant[None]] begin[:]
variable[chunk_store] assign[=] call[name[_normalize_store_arg], parameter[name[chunk_store]]]
variable[path] assign[=] call[name[normalize_storage_path], parameter[name[path]]]
if compare[name[mode] in list[[<ast.Constant object at 0x7da1b1982ce0>, <ast.Constant object at 0x7da1b1981030>]]] begin[:]
if call[name[contains_array], parameter[name[store]]] begin[:]
call[name[err_contains_array], parameter[name[path]]]
variable[read_only] assign[=] compare[name[mode] equal[==] constant[r]]
return[call[name[Group], parameter[name[store]]]] | keyword[def] identifier[open_group] ( identifier[store] = keyword[None] , identifier[mode] = literal[string] , identifier[cache_attrs] = keyword[True] , identifier[synchronizer] = keyword[None] , identifier[path] = keyword[None] ,
identifier[chunk_store] = keyword[None] ):
literal[string]
identifier[store] = identifier[_normalize_store_arg] ( identifier[store] )
keyword[if] identifier[chunk_store] keyword[is] keyword[not] keyword[None] :
identifier[chunk_store] = identifier[_normalize_store_arg] ( identifier[chunk_store] )
identifier[path] = identifier[normalize_storage_path] ( identifier[path] )
keyword[if] identifier[mode] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[contains_array] ( identifier[store] , identifier[path] = identifier[path] ):
identifier[err_contains_array] ( identifier[path] )
keyword[elif] keyword[not] identifier[contains_group] ( identifier[store] , identifier[path] = identifier[path] ):
identifier[err_group_not_found] ( identifier[path] )
keyword[elif] identifier[mode] == literal[string] :
identifier[init_group] ( identifier[store] , identifier[overwrite] = keyword[True] , identifier[path] = identifier[path] , identifier[chunk_store] = identifier[chunk_store] )
keyword[elif] identifier[mode] == literal[string] :
keyword[if] identifier[contains_array] ( identifier[store] , identifier[path] = identifier[path] ):
identifier[err_contains_array] ( identifier[path] )
keyword[if] keyword[not] identifier[contains_group] ( identifier[store] , identifier[path] = identifier[path] ):
identifier[init_group] ( identifier[store] , identifier[path] = identifier[path] , identifier[chunk_store] = identifier[chunk_store] )
keyword[elif] identifier[mode] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[contains_array] ( identifier[store] , identifier[path] = identifier[path] ):
identifier[err_contains_array] ( identifier[path] )
keyword[elif] identifier[contains_group] ( identifier[store] , identifier[path] = identifier[path] ):
identifier[err_contains_group] ( identifier[path] )
keyword[else] :
identifier[init_group] ( identifier[store] , identifier[path] = identifier[path] , identifier[chunk_store] = identifier[chunk_store] )
identifier[read_only] = identifier[mode] == literal[string]
keyword[return] identifier[Group] ( identifier[store] , identifier[read_only] = identifier[read_only] , identifier[cache_attrs] = identifier[cache_attrs] ,
identifier[synchronizer] = identifier[synchronizer] , identifier[path] = identifier[path] , identifier[chunk_store] = identifier[chunk_store] ) | def open_group(store=None, mode='a', cache_attrs=True, synchronizer=None, path=None, chunk_store=None):
"""Open a group using file-mode-like semantics.
Parameters
----------
store : MutableMapping or string, optional
Store or path to directory in file system or name of zip file.
mode : {'r', 'r+', 'a', 'w', 'w-'}, optional
Persistence mode: 'r' means read only (must exist); 'r+' means
read/write (must exist); 'a' means read/write (create if doesn't
exist); 'w' means create (overwrite if exists); 'w-' means create
(fail if exists).
cache_attrs : bool, optional
If True (default), user attributes will be cached for attribute read
operations. If False, user attributes are reloaded from the store prior
to all attribute read operations.
synchronizer : object, optional
Array synchronizer.
path : string, optional
Group path within store.
chunk_store : MutableMapping or string, optional
Store or path to directory in file system or name of zip file.
Returns
-------
g : zarr.hierarchy.Group
Examples
--------
>>> import zarr
>>> root = zarr.open_group('data/example.zarr', mode='w')
>>> foo = root.create_group('foo')
>>> bar = root.create_group('bar')
>>> root
<zarr.hierarchy.Group '/'>
>>> root2 = zarr.open_group('data/example.zarr', mode='a')
>>> root2
<zarr.hierarchy.Group '/'>
>>> root == root2
True
"""
# handle polymorphic store arg
store = _normalize_store_arg(store)
if chunk_store is not None:
chunk_store = _normalize_store_arg(chunk_store) # depends on [control=['if'], data=['chunk_store']]
path = normalize_storage_path(path)
# ensure store is initialized
if mode in ['r', 'r+']:
if contains_array(store, path=path):
err_contains_array(path) # depends on [control=['if'], data=[]]
elif not contains_group(store, path=path):
err_group_not_found(path) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif mode == 'w':
init_group(store, overwrite=True, path=path, chunk_store=chunk_store) # depends on [control=['if'], data=[]]
elif mode == 'a':
if contains_array(store, path=path):
err_contains_array(path) # depends on [control=['if'], data=[]]
if not contains_group(store, path=path):
init_group(store, path=path, chunk_store=chunk_store) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif mode in ['w-', 'x']:
if contains_array(store, path=path):
err_contains_array(path) # depends on [control=['if'], data=[]]
elif contains_group(store, path=path):
err_contains_group(path) # depends on [control=['if'], data=[]]
else:
init_group(store, path=path, chunk_store=chunk_store) # depends on [control=['if'], data=[]]
# determine read only status
read_only = mode == 'r'
return Group(store, read_only=read_only, cache_attrs=cache_attrs, synchronizer=synchronizer, path=path, chunk_store=chunk_store) |
def cli(ctx, id_number, new_value):
"""Update a status name
Output:
an empty dictionary
"""
return ctx.gi.status.update_status(id_number, new_value) | def function[cli, parameter[ctx, id_number, new_value]]:
constant[Update a status name
Output:
an empty dictionary
]
return[call[name[ctx].gi.status.update_status, parameter[name[id_number], name[new_value]]]] | keyword[def] identifier[cli] ( identifier[ctx] , identifier[id_number] , identifier[new_value] ):
literal[string]
keyword[return] identifier[ctx] . identifier[gi] . identifier[status] . identifier[update_status] ( identifier[id_number] , identifier[new_value] ) | def cli(ctx, id_number, new_value):
"""Update a status name
Output:
an empty dictionary
"""
return ctx.gi.status.update_status(id_number, new_value) |
def validate_instance_password(self, password):
''' Validate instance passwords '''
# 1-16 alphanumeric characters - first character must be a letter -
# cannot be a reserved MySQL word
if re.match('[\w-]+$', password) is not None:
if len(password) <= 41 and len(password) >= 8:
return True
return '*** Error: Passwords must be 8-41 alphanumeric characters' | def function[validate_instance_password, parameter[self, password]]:
constant[ Validate instance passwords ]
if compare[call[name[re].match, parameter[constant[[\w-]+$], name[password]]] is_not constant[None]] begin[:]
if <ast.BoolOp object at 0x7da1b08f8f70> begin[:]
return[constant[True]]
return[constant[*** Error: Passwords must be 8-41 alphanumeric characters]] | keyword[def] identifier[validate_instance_password] ( identifier[self] , identifier[password] ):
literal[string]
keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[password] ) keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[len] ( identifier[password] )<= literal[int] keyword[and] identifier[len] ( identifier[password] )>= literal[int] :
keyword[return] keyword[True]
keyword[return] literal[string] | def validate_instance_password(self, password):
""" Validate instance passwords """
# 1-16 alphanumeric characters - first character must be a letter -
# cannot be a reserved MySQL word
if re.match('[\\w-]+$', password) is not None:
if len(password) <= 41 and len(password) >= 8:
return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return '*** Error: Passwords must be 8-41 alphanumeric characters' |
def print_page_cb(self, print_op, print_context, keep_refs={}):
"""
Called for printing operation by Gtk
"""
ORIENTATION_PORTRAIT = 0
ORIENTATION_LANDSCAPE = 1
scaling = 2.0
img = self.img
(width, height) = img.size
# take care of rotating the image if required
if print_context.get_width() <= print_context.get_height():
print_orientation = ORIENTATION_PORTRAIT
else:
print_orientation = ORIENTATION_LANDSCAPE
if width <= height:
img_orientation = ORIENTATION_PORTRAIT
else:
img_orientation = ORIENTATION_LANDSCAPE
if print_orientation != img_orientation:
logger.info("Rotating the page ...")
img = img.rotate(90, expand=True)
(width, height) = img.size
# scale the image down
# XXX(Jflesch): beware that we get floats for the page size ...
scaling = min(
print_context.get_width() / width,
print_context.get_height() / height
)
logger.info("DPI: %fx%f" % (print_context.get_dpi_x(),
print_context.get_dpi_y()))
surface = image2surface(img)
keep_refs['surface_cache_' + str(self.page_nb)] = surface
# .. and print !
cairo_context = print_context.get_cairo_context()
cairo_context.scale(scaling, scaling)
cairo_context.set_source_surface(surface, 0, 0)
cairo_context.paint() | def function[print_page_cb, parameter[self, print_op, print_context, keep_refs]]:
constant[
Called for printing operation by Gtk
]
variable[ORIENTATION_PORTRAIT] assign[=] constant[0]
variable[ORIENTATION_LANDSCAPE] assign[=] constant[1]
variable[scaling] assign[=] constant[2.0]
variable[img] assign[=] name[self].img
<ast.Tuple object at 0x7da18ede4670> assign[=] name[img].size
if compare[call[name[print_context].get_width, parameter[]] less_or_equal[<=] call[name[print_context].get_height, parameter[]]] begin[:]
variable[print_orientation] assign[=] name[ORIENTATION_PORTRAIT]
if compare[name[width] less_or_equal[<=] name[height]] begin[:]
variable[img_orientation] assign[=] name[ORIENTATION_PORTRAIT]
if compare[name[print_orientation] not_equal[!=] name[img_orientation]] begin[:]
call[name[logger].info, parameter[constant[Rotating the page ...]]]
variable[img] assign[=] call[name[img].rotate, parameter[constant[90]]]
<ast.Tuple object at 0x7da20e9b3b80> assign[=] name[img].size
variable[scaling] assign[=] call[name[min], parameter[binary_operation[call[name[print_context].get_width, parameter[]] / name[width]], binary_operation[call[name[print_context].get_height, parameter[]] / name[height]]]]
call[name[logger].info, parameter[binary_operation[constant[DPI: %fx%f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da20e9b11e0>, <ast.Call object at 0x7da20e9b35e0>]]]]]
variable[surface] assign[=] call[name[image2surface], parameter[name[img]]]
call[name[keep_refs]][binary_operation[constant[surface_cache_] + call[name[str], parameter[name[self].page_nb]]]] assign[=] name[surface]
variable[cairo_context] assign[=] call[name[print_context].get_cairo_context, parameter[]]
call[name[cairo_context].scale, parameter[name[scaling], name[scaling]]]
call[name[cairo_context].set_source_surface, parameter[name[surface], constant[0], constant[0]]]
call[name[cairo_context].paint, parameter[]] | keyword[def] identifier[print_page_cb] ( identifier[self] , identifier[print_op] , identifier[print_context] , identifier[keep_refs] ={}):
literal[string]
identifier[ORIENTATION_PORTRAIT] = literal[int]
identifier[ORIENTATION_LANDSCAPE] = literal[int]
identifier[scaling] = literal[int]
identifier[img] = identifier[self] . identifier[img]
( identifier[width] , identifier[height] )= identifier[img] . identifier[size]
keyword[if] identifier[print_context] . identifier[get_width] ()<= identifier[print_context] . identifier[get_height] ():
identifier[print_orientation] = identifier[ORIENTATION_PORTRAIT]
keyword[else] :
identifier[print_orientation] = identifier[ORIENTATION_LANDSCAPE]
keyword[if] identifier[width] <= identifier[height] :
identifier[img_orientation] = identifier[ORIENTATION_PORTRAIT]
keyword[else] :
identifier[img_orientation] = identifier[ORIENTATION_LANDSCAPE]
keyword[if] identifier[print_orientation] != identifier[img_orientation] :
identifier[logger] . identifier[info] ( literal[string] )
identifier[img] = identifier[img] . identifier[rotate] ( literal[int] , identifier[expand] = keyword[True] )
( identifier[width] , identifier[height] )= identifier[img] . identifier[size]
identifier[scaling] = identifier[min] (
identifier[print_context] . identifier[get_width] ()/ identifier[width] ,
identifier[print_context] . identifier[get_height] ()/ identifier[height]
)
identifier[logger] . identifier[info] ( literal[string] %( identifier[print_context] . identifier[get_dpi_x] (),
identifier[print_context] . identifier[get_dpi_y] ()))
identifier[surface] = identifier[image2surface] ( identifier[img] )
identifier[keep_refs] [ literal[string] + identifier[str] ( identifier[self] . identifier[page_nb] )]= identifier[surface]
identifier[cairo_context] = identifier[print_context] . identifier[get_cairo_context] ()
identifier[cairo_context] . identifier[scale] ( identifier[scaling] , identifier[scaling] )
identifier[cairo_context] . identifier[set_source_surface] ( identifier[surface] , literal[int] , literal[int] )
identifier[cairo_context] . identifier[paint] () | def print_page_cb(self, print_op, print_context, keep_refs={}):
"""
Called for printing operation by Gtk
"""
ORIENTATION_PORTRAIT = 0
ORIENTATION_LANDSCAPE = 1
scaling = 2.0
img = self.img
(width, height) = img.size
# take care of rotating the image if required
if print_context.get_width() <= print_context.get_height():
print_orientation = ORIENTATION_PORTRAIT # depends on [control=['if'], data=[]]
else:
print_orientation = ORIENTATION_LANDSCAPE
if width <= height:
img_orientation = ORIENTATION_PORTRAIT # depends on [control=['if'], data=[]]
else:
img_orientation = ORIENTATION_LANDSCAPE
if print_orientation != img_orientation:
logger.info('Rotating the page ...')
img = img.rotate(90, expand=True) # depends on [control=['if'], data=[]]
(width, height) = img.size
# scale the image down
# XXX(Jflesch): beware that we get floats for the page size ...
scaling = min(print_context.get_width() / width, print_context.get_height() / height)
logger.info('DPI: %fx%f' % (print_context.get_dpi_x(), print_context.get_dpi_y()))
surface = image2surface(img)
keep_refs['surface_cache_' + str(self.page_nb)] = surface
# .. and print !
cairo_context = print_context.get_cairo_context()
cairo_context.scale(scaling, scaling)
cairo_context.set_source_surface(surface, 0, 0)
cairo_context.paint() |
def get_connection(service, module=None, region=None, key=None, keyid=None,
profile=None):
'''
Return a boto connection for the service.
.. code-block:: python
conn = __utils__['boto.get_connection']('ec2', profile='custom_profile')
'''
# future lint: disable=blacklisted-function
module = str(module or service)
module, submodule = (str('boto.') + module).rsplit(str('.'), 1)
# future lint: enable=blacklisted-function
svc_mod = getattr(__import__(module, fromlist=[submodule]), submodule)
cxkey, region, key, keyid = _get_profile(service, region, key,
keyid, profile)
cxkey = cxkey + ':conn'
if cxkey in __context__:
return __context__[cxkey]
try:
conn = svc_mod.connect_to_region(region, aws_access_key_id=keyid,
aws_secret_access_key=key)
if conn is None:
raise SaltInvocationError('Region "{0}" is not '
'valid.'.format(region))
except boto.exception.NoAuthHandlerFound:
raise SaltInvocationError('No authentication credentials found when '
'attempting to make boto {0} connection to '
'region "{1}".'.format(service, region))
__context__[cxkey] = conn
return conn | def function[get_connection, parameter[service, module, region, key, keyid, profile]]:
constant[
Return a boto connection for the service.
.. code-block:: python
conn = __utils__['boto.get_connection']('ec2', profile='custom_profile')
]
variable[module] assign[=] call[name[str], parameter[<ast.BoolOp object at 0x7da18dc07c70>]]
<ast.Tuple object at 0x7da18dc05e70> assign[=] call[binary_operation[call[name[str], parameter[constant[boto.]]] + name[module]].rsplit, parameter[call[name[str], parameter[constant[.]]], constant[1]]]
variable[svc_mod] assign[=] call[name[getattr], parameter[call[name[__import__], parameter[name[module]]], name[submodule]]]
<ast.Tuple object at 0x7da18dc046d0> assign[=] call[name[_get_profile], parameter[name[service], name[region], name[key], name[keyid], name[profile]]]
variable[cxkey] assign[=] binary_operation[name[cxkey] + constant[:conn]]
if compare[name[cxkey] in name[__context__]] begin[:]
return[call[name[__context__]][name[cxkey]]]
<ast.Try object at 0x7da1b208a410>
call[name[__context__]][name[cxkey]] assign[=] name[conn]
return[name[conn]] | keyword[def] identifier[get_connection] ( identifier[service] , identifier[module] = keyword[None] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] ,
identifier[profile] = keyword[None] ):
literal[string]
identifier[module] = identifier[str] ( identifier[module] keyword[or] identifier[service] )
identifier[module] , identifier[submodule] =( identifier[str] ( literal[string] )+ identifier[module] ). identifier[rsplit] ( identifier[str] ( literal[string] ), literal[int] )
identifier[svc_mod] = identifier[getattr] ( identifier[__import__] ( identifier[module] , identifier[fromlist] =[ identifier[submodule] ]), identifier[submodule] )
identifier[cxkey] , identifier[region] , identifier[key] , identifier[keyid] = identifier[_get_profile] ( identifier[service] , identifier[region] , identifier[key] ,
identifier[keyid] , identifier[profile] )
identifier[cxkey] = identifier[cxkey] + literal[string]
keyword[if] identifier[cxkey] keyword[in] identifier[__context__] :
keyword[return] identifier[__context__] [ identifier[cxkey] ]
keyword[try] :
identifier[conn] = identifier[svc_mod] . identifier[connect_to_region] ( identifier[region] , identifier[aws_access_key_id] = identifier[keyid] ,
identifier[aws_secret_access_key] = identifier[key] )
keyword[if] identifier[conn] keyword[is] keyword[None] :
keyword[raise] identifier[SaltInvocationError] ( literal[string]
literal[string] . identifier[format] ( identifier[region] ))
keyword[except] identifier[boto] . identifier[exception] . identifier[NoAuthHandlerFound] :
keyword[raise] identifier[SaltInvocationError] ( literal[string]
literal[string]
literal[string] . identifier[format] ( identifier[service] , identifier[region] ))
identifier[__context__] [ identifier[cxkey] ]= identifier[conn]
keyword[return] identifier[conn] | def get_connection(service, module=None, region=None, key=None, keyid=None, profile=None):
"""
Return a boto connection for the service.
.. code-block:: python
conn = __utils__['boto.get_connection']('ec2', profile='custom_profile')
"""
# future lint: disable=blacklisted-function
module = str(module or service)
(module, submodule) = (str('boto.') + module).rsplit(str('.'), 1)
# future lint: enable=blacklisted-function
svc_mod = getattr(__import__(module, fromlist=[submodule]), submodule)
(cxkey, region, key, keyid) = _get_profile(service, region, key, keyid, profile)
cxkey = cxkey + ':conn'
if cxkey in __context__:
return __context__[cxkey] # depends on [control=['if'], data=['cxkey', '__context__']]
try:
conn = svc_mod.connect_to_region(region, aws_access_key_id=keyid, aws_secret_access_key=key)
if conn is None:
raise SaltInvocationError('Region "{0}" is not valid.'.format(region)) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except boto.exception.NoAuthHandlerFound:
raise SaltInvocationError('No authentication credentials found when attempting to make boto {0} connection to region "{1}".'.format(service, region)) # depends on [control=['except'], data=[]]
__context__[cxkey] = conn
return conn |
def show_slug_with_level(context, page, lang=None, fallback=True):
"""Display slug with level by language."""
if not lang:
lang = context.get('lang', pages_settings.PAGE_DEFAULT_LANGUAGE)
page = get_page_from_string_or_id(page, lang)
if not page:
return ''
return {'content': page.slug_with_level(lang)} | def function[show_slug_with_level, parameter[context, page, lang, fallback]]:
constant[Display slug with level by language.]
if <ast.UnaryOp object at 0x7da204962500> begin[:]
variable[lang] assign[=] call[name[context].get, parameter[constant[lang], name[pages_settings].PAGE_DEFAULT_LANGUAGE]]
variable[page] assign[=] call[name[get_page_from_string_or_id], parameter[name[page], name[lang]]]
if <ast.UnaryOp object at 0x7da2049604f0> begin[:]
return[constant[]]
return[dictionary[[<ast.Constant object at 0x7da204960640>], [<ast.Call object at 0x7da204960310>]]] | keyword[def] identifier[show_slug_with_level] ( identifier[context] , identifier[page] , identifier[lang] = keyword[None] , identifier[fallback] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[lang] :
identifier[lang] = identifier[context] . identifier[get] ( literal[string] , identifier[pages_settings] . identifier[PAGE_DEFAULT_LANGUAGE] )
identifier[page] = identifier[get_page_from_string_or_id] ( identifier[page] , identifier[lang] )
keyword[if] keyword[not] identifier[page] :
keyword[return] literal[string]
keyword[return] { literal[string] : identifier[page] . identifier[slug_with_level] ( identifier[lang] )} | def show_slug_with_level(context, page, lang=None, fallback=True):
"""Display slug with level by language."""
if not lang:
lang = context.get('lang', pages_settings.PAGE_DEFAULT_LANGUAGE) # depends on [control=['if'], data=[]]
page = get_page_from_string_or_id(page, lang)
if not page:
return '' # depends on [control=['if'], data=[]]
return {'content': page.slug_with_level(lang)} |
def instance_variables(self):
"""
Returns all instance variables in the class, sorted
alphabetically as a list of `pydoc.Variable`. Instance variables
are attributes of `self` defined in a class's `__init__`
method.
"""
p = lambda o: isinstance(o, Variable) and self.module._docfilter(o)
return filter(p, self.doc_init.values()) | def function[instance_variables, parameter[self]]:
constant[
Returns all instance variables in the class, sorted
alphabetically as a list of `pydoc.Variable`. Instance variables
are attributes of `self` defined in a class's `__init__`
method.
]
variable[p] assign[=] <ast.Lambda object at 0x7da1b140a1a0>
return[call[name[filter], parameter[name[p], call[name[self].doc_init.values, parameter[]]]]] | keyword[def] identifier[instance_variables] ( identifier[self] ):
literal[string]
identifier[p] = keyword[lambda] identifier[o] : identifier[isinstance] ( identifier[o] , identifier[Variable] ) keyword[and] identifier[self] . identifier[module] . identifier[_docfilter] ( identifier[o] )
keyword[return] identifier[filter] ( identifier[p] , identifier[self] . identifier[doc_init] . identifier[values] ()) | def instance_variables(self):
"""
Returns all instance variables in the class, sorted
alphabetically as a list of `pydoc.Variable`. Instance variables
are attributes of `self` defined in a class's `__init__`
method.
"""
p = lambda o: isinstance(o, Variable) and self.module._docfilter(o)
return filter(p, self.doc_init.values()) |
def _thread_init(cls):
"""Ensure thread local is initialized."""
if not hasattr(cls._local, '_in_order_futures'):
cls._local._in_order_futures = set()
cls._local._activated = False | def function[_thread_init, parameter[cls]]:
constant[Ensure thread local is initialized.]
if <ast.UnaryOp object at 0x7da18eb54580> begin[:]
name[cls]._local._in_order_futures assign[=] call[name[set], parameter[]]
name[cls]._local._activated assign[=] constant[False] | keyword[def] identifier[_thread_init] ( identifier[cls] ):
literal[string]
keyword[if] keyword[not] identifier[hasattr] ( identifier[cls] . identifier[_local] , literal[string] ):
identifier[cls] . identifier[_local] . identifier[_in_order_futures] = identifier[set] ()
identifier[cls] . identifier[_local] . identifier[_activated] = keyword[False] | def _thread_init(cls):
"""Ensure thread local is initialized."""
if not hasattr(cls._local, '_in_order_futures'):
cls._local._in_order_futures = set()
cls._local._activated = False # depends on [control=['if'], data=[]] |
def _word_to_score(self, ids, scores):
"""Return a map from each word to its score.
:param (np.array) ids: a vector of word ids
:param (np.array) scores: a vector of scores
:return (dict[unicode, float]): a map from each word (unicode) to its score (float)
"""
# should be 1-D vectors
assert len(ids.shape) == 1
assert ids.shape == scores.shape
w2s = {}
for i in range(len(ids)):
w2s[self.vocab.index2word(ids[i])] = scores[i]
return w2s | def function[_word_to_score, parameter[self, ids, scores]]:
constant[Return a map from each word to its score.
:param (np.array) ids: a vector of word ids
:param (np.array) scores: a vector of scores
:return (dict[unicode, float]): a map from each word (unicode) to its score (float)
]
assert[compare[call[name[len], parameter[name[ids].shape]] equal[==] constant[1]]]
assert[compare[name[ids].shape equal[==] name[scores].shape]]
variable[w2s] assign[=] dictionary[[], []]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[ids]]]]]] begin[:]
call[name[w2s]][call[name[self].vocab.index2word, parameter[call[name[ids]][name[i]]]]] assign[=] call[name[scores]][name[i]]
return[name[w2s]] | keyword[def] identifier[_word_to_score] ( identifier[self] , identifier[ids] , identifier[scores] ):
literal[string]
keyword[assert] identifier[len] ( identifier[ids] . identifier[shape] )== literal[int]
keyword[assert] identifier[ids] . identifier[shape] == identifier[scores] . identifier[shape]
identifier[w2s] ={}
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[ids] )):
identifier[w2s] [ identifier[self] . identifier[vocab] . identifier[index2word] ( identifier[ids] [ identifier[i] ])]= identifier[scores] [ identifier[i] ]
keyword[return] identifier[w2s] | def _word_to_score(self, ids, scores):
"""Return a map from each word to its score.
:param (np.array) ids: a vector of word ids
:param (np.array) scores: a vector of scores
:return (dict[unicode, float]): a map from each word (unicode) to its score (float)
"""
# should be 1-D vectors
assert len(ids.shape) == 1
assert ids.shape == scores.shape
w2s = {}
for i in range(len(ids)):
w2s[self.vocab.index2word(ids[i])] = scores[i] # depends on [control=['for'], data=['i']]
return w2s |
def fit(self, x, y, dcoef='none'):
'''
performs the fit
x, y : list
Matching data arrays that define a numerical function y(x),
this is the data to be fitted.
dcoef : list or string
You can provide a different guess for the coefficients, or
provide the string 'none' to use the inital guess. The
default is 'none'.
Returns
-------
ierr
Values between 1 and 4 signal success.
Notes
-----
self.fcoef, contains the fitted coefficients.
'''
self.x = x
self.y = y
if dcoef is not 'none':
coef = dcoef
else:
coef = self.coef
fcoef=optimize.leastsq(self.residual,coef,args=(y,self.func,x))
self.fcoef = fcoef[0].tolist()
return fcoef[1] | def function[fit, parameter[self, x, y, dcoef]]:
constant[
performs the fit
x, y : list
Matching data arrays that define a numerical function y(x),
this is the data to be fitted.
dcoef : list or string
You can provide a different guess for the coefficients, or
provide the string 'none' to use the inital guess. The
default is 'none'.
Returns
-------
ierr
Values between 1 and 4 signal success.
Notes
-----
self.fcoef, contains the fitted coefficients.
]
name[self].x assign[=] name[x]
name[self].y assign[=] name[y]
if compare[name[dcoef] is_not constant[none]] begin[:]
variable[coef] assign[=] name[dcoef]
variable[fcoef] assign[=] call[name[optimize].leastsq, parameter[name[self].residual, name[coef]]]
name[self].fcoef assign[=] call[call[name[fcoef]][constant[0]].tolist, parameter[]]
return[call[name[fcoef]][constant[1]]] | keyword[def] identifier[fit] ( identifier[self] , identifier[x] , identifier[y] , identifier[dcoef] = literal[string] ):
literal[string]
identifier[self] . identifier[x] = identifier[x]
identifier[self] . identifier[y] = identifier[y]
keyword[if] identifier[dcoef] keyword[is] keyword[not] literal[string] :
identifier[coef] = identifier[dcoef]
keyword[else] :
identifier[coef] = identifier[self] . identifier[coef]
identifier[fcoef] = identifier[optimize] . identifier[leastsq] ( identifier[self] . identifier[residual] , identifier[coef] , identifier[args] =( identifier[y] , identifier[self] . identifier[func] , identifier[x] ))
identifier[self] . identifier[fcoef] = identifier[fcoef] [ literal[int] ]. identifier[tolist] ()
keyword[return] identifier[fcoef] [ literal[int] ] | def fit(self, x, y, dcoef='none'):
"""
performs the fit
x, y : list
Matching data arrays that define a numerical function y(x),
this is the data to be fitted.
dcoef : list or string
You can provide a different guess for the coefficients, or
provide the string 'none' to use the inital guess. The
default is 'none'.
Returns
-------
ierr
Values between 1 and 4 signal success.
Notes
-----
self.fcoef, contains the fitted coefficients.
"""
self.x = x
self.y = y
if dcoef is not 'none':
coef = dcoef # depends on [control=['if'], data=['dcoef']]
else:
coef = self.coef
fcoef = optimize.leastsq(self.residual, coef, args=(y, self.func, x))
self.fcoef = fcoef[0].tolist()
return fcoef[1] |
def pca(x, subtract_mean=False, normalize=False, sort_components=True,
reducedim=None, algorithm=pca_eig):
"""Calculate principal component analysis (PCA).
Parameters
----------
x : ndarray, shape (trials, channels, samples) or (channels, samples)
Input data.
subtract_mean : bool, optional
Subtract sample mean from x.
normalize : bool, optional
Normalize variances before applying PCA.
sort_components : bool, optional
Sort principal components in order of decreasing eigenvalues.
reducedim : float or int or None, optional
A value less than 1 is interpreted as the fraction of variance that
should be retained in the data. All components that account for less
than `1 - reducedim` of the variance are removed.
An integer value of 1 or greater is interpreted as the number of
(sorted) components to retain.
If None, do not reduce dimensionality (i.e. keep all components).
algorithm : func, optional
Function to use for eigenvalue decomposition
(:func:`pca_eig` or :func:`pca_svd`).
Returns
-------
w : ndarray, shape (channels, components)
PCA transformation matrix.
v : ndarray, shape (components, channels)
Inverse PCA transformation matrix.
"""
x = np.asarray(x)
if x.ndim == 3:
x = cat_trials(x)
if reducedim:
sort_components = True
if subtract_mean:
x = x - np.mean(x, axis=1, keepdims=True)
k, l = None, None
if normalize:
l = np.std(x, axis=1, ddof=1)
k = np.diag(1.0 / l)
l = np.diag(l)
x = np.dot(k, x)
w, latent = algorithm(x)
# PCA is just a rotation, so inverse is equal to transpose
v = w.T
if normalize:
w = np.dot(k, w)
v = np.dot(v, l)
latent /= sum(latent)
if sort_components:
order = np.argsort(latent)[::-1]
w = w[:, order]
v = v[order, :]
latent = latent[order]
if reducedim is not None:
if reducedim < 1:
selected = np.nonzero(np.cumsum(latent) < reducedim)[0]
try:
selected = np.concatenate([selected, [selected[-1] + 1]])
except IndexError:
selected = [0]
if selected[-1] >= w.shape[1]:
selected = selected[0:-1]
w = w[:, selected]
v = v[selected, :]
else:
w = w[:, :reducedim]
v = v[:reducedim, :]
return w, v | def function[pca, parameter[x, subtract_mean, normalize, sort_components, reducedim, algorithm]]:
constant[Calculate principal component analysis (PCA).
Parameters
----------
x : ndarray, shape (trials, channels, samples) or (channels, samples)
Input data.
subtract_mean : bool, optional
Subtract sample mean from x.
normalize : bool, optional
Normalize variances before applying PCA.
sort_components : bool, optional
Sort principal components in order of decreasing eigenvalues.
reducedim : float or int or None, optional
A value less than 1 is interpreted as the fraction of variance that
should be retained in the data. All components that account for less
than `1 - reducedim` of the variance are removed.
An integer value of 1 or greater is interpreted as the number of
(sorted) components to retain.
If None, do not reduce dimensionality (i.e. keep all components).
algorithm : func, optional
Function to use for eigenvalue decomposition
(:func:`pca_eig` or :func:`pca_svd`).
Returns
-------
w : ndarray, shape (channels, components)
PCA transformation matrix.
v : ndarray, shape (components, channels)
Inverse PCA transformation matrix.
]
variable[x] assign[=] call[name[np].asarray, parameter[name[x]]]
if compare[name[x].ndim equal[==] constant[3]] begin[:]
variable[x] assign[=] call[name[cat_trials], parameter[name[x]]]
if name[reducedim] begin[:]
variable[sort_components] assign[=] constant[True]
if name[subtract_mean] begin[:]
variable[x] assign[=] binary_operation[name[x] - call[name[np].mean, parameter[name[x]]]]
<ast.Tuple object at 0x7da1b26a3490> assign[=] tuple[[<ast.Constant object at 0x7da1b26a3100>, <ast.Constant object at 0x7da1b26a3130>]]
if name[normalize] begin[:]
variable[l] assign[=] call[name[np].std, parameter[name[x]]]
variable[k] assign[=] call[name[np].diag, parameter[binary_operation[constant[1.0] / name[l]]]]
variable[l] assign[=] call[name[np].diag, parameter[name[l]]]
variable[x] assign[=] call[name[np].dot, parameter[name[k], name[x]]]
<ast.Tuple object at 0x7da1b26cba90> assign[=] call[name[algorithm], parameter[name[x]]]
variable[v] assign[=] name[w].T
if name[normalize] begin[:]
variable[w] assign[=] call[name[np].dot, parameter[name[k], name[w]]]
variable[v] assign[=] call[name[np].dot, parameter[name[v], name[l]]]
<ast.AugAssign object at 0x7da1b26cb610>
if name[sort_components] begin[:]
variable[order] assign[=] call[call[name[np].argsort, parameter[name[latent]]]][<ast.Slice object at 0x7da1b26cbb20>]
variable[w] assign[=] call[name[w]][tuple[[<ast.Slice object at 0x7da1b26ca920>, <ast.Name object at 0x7da1b26ca8c0>]]]
variable[v] assign[=] call[name[v]][tuple[[<ast.Name object at 0x7da1b26c9810>, <ast.Slice object at 0x7da1b26c83d0>]]]
variable[latent] assign[=] call[name[latent]][name[order]]
if compare[name[reducedim] is_not constant[None]] begin[:]
if compare[name[reducedim] less[<] constant[1]] begin[:]
variable[selected] assign[=] call[call[name[np].nonzero, parameter[compare[call[name[np].cumsum, parameter[name[latent]]] less[<] name[reducedim]]]]][constant[0]]
<ast.Try object at 0x7da1b26c9a50>
if compare[call[name[selected]][<ast.UnaryOp object at 0x7da1b2604430>] greater_or_equal[>=] call[name[w].shape][constant[1]]] begin[:]
variable[selected] assign[=] call[name[selected]][<ast.Slice object at 0x7da1b2604550>]
variable[w] assign[=] call[name[w]][tuple[[<ast.Slice object at 0x7da1b2607820>, <ast.Name object at 0x7da1b2605960>]]]
variable[v] assign[=] call[name[v]][tuple[[<ast.Name object at 0x7da1b26042e0>, <ast.Slice object at 0x7da1b2604370>]]]
return[tuple[[<ast.Name object at 0x7da1b26042b0>, <ast.Name object at 0x7da1b2607d30>]]] | keyword[def] identifier[pca] ( identifier[x] , identifier[subtract_mean] = keyword[False] , identifier[normalize] = keyword[False] , identifier[sort_components] = keyword[True] ,
identifier[reducedim] = keyword[None] , identifier[algorithm] = identifier[pca_eig] ):
literal[string]
identifier[x] = identifier[np] . identifier[asarray] ( identifier[x] )
keyword[if] identifier[x] . identifier[ndim] == literal[int] :
identifier[x] = identifier[cat_trials] ( identifier[x] )
keyword[if] identifier[reducedim] :
identifier[sort_components] = keyword[True]
keyword[if] identifier[subtract_mean] :
identifier[x] = identifier[x] - identifier[np] . identifier[mean] ( identifier[x] , identifier[axis] = literal[int] , identifier[keepdims] = keyword[True] )
identifier[k] , identifier[l] = keyword[None] , keyword[None]
keyword[if] identifier[normalize] :
identifier[l] = identifier[np] . identifier[std] ( identifier[x] , identifier[axis] = literal[int] , identifier[ddof] = literal[int] )
identifier[k] = identifier[np] . identifier[diag] ( literal[int] / identifier[l] )
identifier[l] = identifier[np] . identifier[diag] ( identifier[l] )
identifier[x] = identifier[np] . identifier[dot] ( identifier[k] , identifier[x] )
identifier[w] , identifier[latent] = identifier[algorithm] ( identifier[x] )
identifier[v] = identifier[w] . identifier[T]
keyword[if] identifier[normalize] :
identifier[w] = identifier[np] . identifier[dot] ( identifier[k] , identifier[w] )
identifier[v] = identifier[np] . identifier[dot] ( identifier[v] , identifier[l] )
identifier[latent] /= identifier[sum] ( identifier[latent] )
keyword[if] identifier[sort_components] :
identifier[order] = identifier[np] . identifier[argsort] ( identifier[latent] )[::- literal[int] ]
identifier[w] = identifier[w] [:, identifier[order] ]
identifier[v] = identifier[v] [ identifier[order] ,:]
identifier[latent] = identifier[latent] [ identifier[order] ]
keyword[if] identifier[reducedim] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[reducedim] < literal[int] :
identifier[selected] = identifier[np] . identifier[nonzero] ( identifier[np] . identifier[cumsum] ( identifier[latent] )< identifier[reducedim] )[ literal[int] ]
keyword[try] :
identifier[selected] = identifier[np] . identifier[concatenate] ([ identifier[selected] ,[ identifier[selected] [- literal[int] ]+ literal[int] ]])
keyword[except] identifier[IndexError] :
identifier[selected] =[ literal[int] ]
keyword[if] identifier[selected] [- literal[int] ]>= identifier[w] . identifier[shape] [ literal[int] ]:
identifier[selected] = identifier[selected] [ literal[int] :- literal[int] ]
identifier[w] = identifier[w] [:, identifier[selected] ]
identifier[v] = identifier[v] [ identifier[selected] ,:]
keyword[else] :
identifier[w] = identifier[w] [:,: identifier[reducedim] ]
identifier[v] = identifier[v] [: identifier[reducedim] ,:]
keyword[return] identifier[w] , identifier[v] | def pca(x, subtract_mean=False, normalize=False, sort_components=True, reducedim=None, algorithm=pca_eig):
"""Calculate principal component analysis (PCA).
Parameters
----------
x : ndarray, shape (trials, channels, samples) or (channels, samples)
Input data.
subtract_mean : bool, optional
Subtract sample mean from x.
normalize : bool, optional
Normalize variances before applying PCA.
sort_components : bool, optional
Sort principal components in order of decreasing eigenvalues.
reducedim : float or int or None, optional
A value less than 1 is interpreted as the fraction of variance that
should be retained in the data. All components that account for less
than `1 - reducedim` of the variance are removed.
An integer value of 1 or greater is interpreted as the number of
(sorted) components to retain.
If None, do not reduce dimensionality (i.e. keep all components).
algorithm : func, optional
Function to use for eigenvalue decomposition
(:func:`pca_eig` or :func:`pca_svd`).
Returns
-------
w : ndarray, shape (channels, components)
PCA transformation matrix.
v : ndarray, shape (components, channels)
Inverse PCA transformation matrix.
"""
x = np.asarray(x)
if x.ndim == 3:
x = cat_trials(x) # depends on [control=['if'], data=[]]
if reducedim:
sort_components = True # depends on [control=['if'], data=[]]
if subtract_mean:
x = x - np.mean(x, axis=1, keepdims=True) # depends on [control=['if'], data=[]]
(k, l) = (None, None)
if normalize:
l = np.std(x, axis=1, ddof=1)
k = np.diag(1.0 / l)
l = np.diag(l)
x = np.dot(k, x) # depends on [control=['if'], data=[]]
(w, latent) = algorithm(x)
# PCA is just a rotation, so inverse is equal to transpose
v = w.T
if normalize:
w = np.dot(k, w)
v = np.dot(v, l) # depends on [control=['if'], data=[]]
latent /= sum(latent)
if sort_components:
order = np.argsort(latent)[::-1]
w = w[:, order]
v = v[order, :]
latent = latent[order] # depends on [control=['if'], data=[]]
if reducedim is not None:
if reducedim < 1:
selected = np.nonzero(np.cumsum(latent) < reducedim)[0]
try:
selected = np.concatenate([selected, [selected[-1] + 1]]) # depends on [control=['try'], data=[]]
except IndexError:
selected = [0] # depends on [control=['except'], data=[]]
if selected[-1] >= w.shape[1]:
selected = selected[0:-1] # depends on [control=['if'], data=[]]
w = w[:, selected]
v = v[selected, :] # depends on [control=['if'], data=['reducedim']]
else:
w = w[:, :reducedim]
v = v[:reducedim, :] # depends on [control=['if'], data=['reducedim']]
return (w, v) |
def list_properties(type):
"""
:param type: a Python GObject instance or type that the signal is associated with
:type type: :obj:`GObject.Object`
:returns: a list of :obj:`GObject.ParamSpec`
:rtype: [:obj:`GObject.ParamSpec`]
Takes a GObject/GInterface subclass or a GType and returns a list of
GParamSpecs for all properties of `type`.
"""
if isinstance(type, PGType):
type = type.pytype
from pgi.obj import Object, InterfaceBase
if not issubclass(type, (Object, InterfaceBase)):
raise TypeError("Must be a subclass of %s or %s" %
(Object.__name__, InterfaceBase.__name__))
gparams = []
for key in dir(type.props):
if not key.startswith("_"):
gparams.append(getattr(type.props, key))
return gparams | def function[list_properties, parameter[type]]:
constant[
:param type: a Python GObject instance or type that the signal is associated with
:type type: :obj:`GObject.Object`
:returns: a list of :obj:`GObject.ParamSpec`
:rtype: [:obj:`GObject.ParamSpec`]
Takes a GObject/GInterface subclass or a GType and returns a list of
GParamSpecs for all properties of `type`.
]
if call[name[isinstance], parameter[name[type], name[PGType]]] begin[:]
variable[type] assign[=] name[type].pytype
from relative_module[pgi.obj] import module[Object], module[InterfaceBase]
if <ast.UnaryOp object at 0x7da1b0fae320> begin[:]
<ast.Raise object at 0x7da1b0fadb40>
variable[gparams] assign[=] list[[]]
for taget[name[key]] in starred[call[name[dir], parameter[name[type].props]]] begin[:]
if <ast.UnaryOp object at 0x7da1b0fad030> begin[:]
call[name[gparams].append, parameter[call[name[getattr], parameter[name[type].props, name[key]]]]]
return[name[gparams]] | keyword[def] identifier[list_properties] ( identifier[type] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[type] , identifier[PGType] ):
identifier[type] = identifier[type] . identifier[pytype]
keyword[from] identifier[pgi] . identifier[obj] keyword[import] identifier[Object] , identifier[InterfaceBase]
keyword[if] keyword[not] identifier[issubclass] ( identifier[type] ,( identifier[Object] , identifier[InterfaceBase] )):
keyword[raise] identifier[TypeError] ( literal[string] %
( identifier[Object] . identifier[__name__] , identifier[InterfaceBase] . identifier[__name__] ))
identifier[gparams] =[]
keyword[for] identifier[key] keyword[in] identifier[dir] ( identifier[type] . identifier[props] ):
keyword[if] keyword[not] identifier[key] . identifier[startswith] ( literal[string] ):
identifier[gparams] . identifier[append] ( identifier[getattr] ( identifier[type] . identifier[props] , identifier[key] ))
keyword[return] identifier[gparams] | def list_properties(type):
"""
:param type: a Python GObject instance or type that the signal is associated with
:type type: :obj:`GObject.Object`
:returns: a list of :obj:`GObject.ParamSpec`
:rtype: [:obj:`GObject.ParamSpec`]
Takes a GObject/GInterface subclass or a GType and returns a list of
GParamSpecs for all properties of `type`.
"""
if isinstance(type, PGType):
type = type.pytype # depends on [control=['if'], data=[]]
from pgi.obj import Object, InterfaceBase
if not issubclass(type, (Object, InterfaceBase)):
raise TypeError('Must be a subclass of %s or %s' % (Object.__name__, InterfaceBase.__name__)) # depends on [control=['if'], data=[]]
gparams = []
for key in dir(type.props):
if not key.startswith('_'):
gparams.append(getattr(type.props, key)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
return gparams |
def enforce_duration(self, duration_thresh):
"""
This method takes a quantized pitch contour and filters out
those time sections where the contour is not long enough, as specified
by duration threshold (given in milliseconds).
All transactions assume data in cent scale.
"""
i = 1
while i < len(self.pitch)-1:
if self.pitch[i] == -10000:
i += 1
continue
if self.pitch[i]-self.pitch[i-1] != 0 and self.pitch[i+1]-self.pitch[i] == 0:
start = i
while i < len(self.pitch) and self.pitch[i+1]-self.pitch[i] == 0:
i += 1
if (self.timestamps[i]-self.timestamps[start])*1000 < duration_thresh:
self.pitch[start:i+1] = np.zeros(i+1-start)-10000
else:
self.pitch[i] = -10000
i += 1 | def function[enforce_duration, parameter[self, duration_thresh]]:
constant[
This method takes a quantized pitch contour and filters out
those time sections where the contour is not long enough, as specified
by duration threshold (given in milliseconds).
All transactions assume data in cent scale.
]
variable[i] assign[=] constant[1]
while compare[name[i] less[<] binary_operation[call[name[len], parameter[name[self].pitch]] - constant[1]]] begin[:]
if compare[call[name[self].pitch][name[i]] equal[==] <ast.UnaryOp object at 0x7da18f58e290>] begin[:]
<ast.AugAssign object at 0x7da18f58e0e0>
continue
if <ast.BoolOp object at 0x7da18f58d180> begin[:]
variable[start] assign[=] name[i]
while <ast.BoolOp object at 0x7da20c76cf10> begin[:]
<ast.AugAssign object at 0x7da18eb57670>
if compare[binary_operation[binary_operation[call[name[self].timestamps][name[i]] - call[name[self].timestamps][name[start]]] * constant[1000]] less[<] name[duration_thresh]] begin[:]
call[name[self].pitch][<ast.Slice object at 0x7da18eb547f0>] assign[=] binary_operation[call[name[np].zeros, parameter[binary_operation[binary_operation[name[i] + constant[1]] - name[start]]]] - constant[10000]] | keyword[def] identifier[enforce_duration] ( identifier[self] , identifier[duration_thresh] ):
literal[string]
identifier[i] = literal[int]
keyword[while] identifier[i] < identifier[len] ( identifier[self] . identifier[pitch] )- literal[int] :
keyword[if] identifier[self] . identifier[pitch] [ identifier[i] ]==- literal[int] :
identifier[i] += literal[int]
keyword[continue]
keyword[if] identifier[self] . identifier[pitch] [ identifier[i] ]- identifier[self] . identifier[pitch] [ identifier[i] - literal[int] ]!= literal[int] keyword[and] identifier[self] . identifier[pitch] [ identifier[i] + literal[int] ]- identifier[self] . identifier[pitch] [ identifier[i] ]== literal[int] :
identifier[start] = identifier[i]
keyword[while] identifier[i] < identifier[len] ( identifier[self] . identifier[pitch] ) keyword[and] identifier[self] . identifier[pitch] [ identifier[i] + literal[int] ]- identifier[self] . identifier[pitch] [ identifier[i] ]== literal[int] :
identifier[i] += literal[int]
keyword[if] ( identifier[self] . identifier[timestamps] [ identifier[i] ]- identifier[self] . identifier[timestamps] [ identifier[start] ])* literal[int] < identifier[duration_thresh] :
identifier[self] . identifier[pitch] [ identifier[start] : identifier[i] + literal[int] ]= identifier[np] . identifier[zeros] ( identifier[i] + literal[int] - identifier[start] )- literal[int]
keyword[else] :
identifier[self] . identifier[pitch] [ identifier[i] ]=- literal[int]
identifier[i] += literal[int] | def enforce_duration(self, duration_thresh):
"""
This method takes a quantized pitch contour and filters out
those time sections where the contour is not long enough, as specified
by duration threshold (given in milliseconds).
All transactions assume data in cent scale.
"""
i = 1
while i < len(self.pitch) - 1:
if self.pitch[i] == -10000:
i += 1
continue # depends on [control=['if'], data=[]]
if self.pitch[i] - self.pitch[i - 1] != 0 and self.pitch[i + 1] - self.pitch[i] == 0:
start = i
while i < len(self.pitch) and self.pitch[i + 1] - self.pitch[i] == 0:
i += 1 # depends on [control=['while'], data=[]]
if (self.timestamps[i] - self.timestamps[start]) * 1000 < duration_thresh:
self.pitch[start:i + 1] = np.zeros(i + 1 - start) - 10000 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
self.pitch[i] = -10000
i += 1 # depends on [control=['while'], data=['i']] |
def element_text_color_should_be(self, locator, expected):
"""Verifies the element identified by `locator` has the expected
text color (it verifies the CSS attribute color). Color should be in
RGBA format.
Example of rgba format: rgba(RED, GREEN, BLUE, ALPHA)
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| expected | expected color | rgba(0, 128, 0, 1) |"""
self._info("Verifying element '%s' has text color '%s'" % (locator, expected))
self._check_element_css_value(locator, 'color', expected) | def function[element_text_color_should_be, parameter[self, locator, expected]]:
constant[Verifies the element identified by `locator` has the expected
text color (it verifies the CSS attribute color). Color should be in
RGBA format.
Example of rgba format: rgba(RED, GREEN, BLUE, ALPHA)
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| expected | expected color | rgba(0, 128, 0, 1) |]
call[name[self]._info, parameter[binary_operation[constant[Verifying element '%s' has text color '%s'] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e9b1c90>, <ast.Name object at 0x7da20e9b1810>]]]]]
call[name[self]._check_element_css_value, parameter[name[locator], constant[color], name[expected]]] | keyword[def] identifier[element_text_color_should_be] ( identifier[self] , identifier[locator] , identifier[expected] ):
literal[string]
identifier[self] . identifier[_info] ( literal[string] %( identifier[locator] , identifier[expected] ))
identifier[self] . identifier[_check_element_css_value] ( identifier[locator] , literal[string] , identifier[expected] ) | def element_text_color_should_be(self, locator, expected):
"""Verifies the element identified by `locator` has the expected
text color (it verifies the CSS attribute color). Color should be in
RGBA format.
Example of rgba format: rgba(RED, GREEN, BLUE, ALPHA)
| *Argument* | *Description* | *Example* |
| locator | Selenium 2 element locator | id=my_id |
| expected | expected color | rgba(0, 128, 0, 1) |"""
self._info("Verifying element '%s' has text color '%s'" % (locator, expected))
self._check_element_css_value(locator, 'color', expected) |
def dry_run(self):
""" print information about the jenkins job """
LOGGER.info("Job Info: {name} -> {host}".format(
name=self.name,
host=self.jenkins_host.baseurl
)) | def function[dry_run, parameter[self]]:
constant[ print information about the jenkins job ]
call[name[LOGGER].info, parameter[call[constant[Job Info: {name} -> {host}].format, parameter[]]]] | keyword[def] identifier[dry_run] ( identifier[self] ):
literal[string]
identifier[LOGGER] . identifier[info] ( literal[string] . identifier[format] (
identifier[name] = identifier[self] . identifier[name] ,
identifier[host] = identifier[self] . identifier[jenkins_host] . identifier[baseurl]
)) | def dry_run(self):
""" print information about the jenkins job """
LOGGER.info('Job Info: {name} -> {host}'.format(name=self.name, host=self.jenkins_host.baseurl)) |
def add_experiences(self, curr_info: AllBrainInfo, next_info: AllBrainInfo,
take_action_outputs):
"""
Adds experiences to each agent's experience history.
:param curr_info: Current AllBrainInfo (Dictionary of all current brains and corresponding BrainInfo).
:param next_info: Next AllBrainInfo (Dictionary of all current brains and corresponding BrainInfo).
:param take_action_outputs: The outputs of the take action method.
"""
# Used to collect information about student performance.
info_student = curr_info[self.brain_name]
next_info_student = next_info[self.brain_name]
for agent_id in info_student.agents:
self.evaluation_buffer[agent_id].last_brain_info = info_student
for agent_id in next_info_student.agents:
stored_info_student = self.evaluation_buffer[agent_id].last_brain_info
if stored_info_student is None:
continue
else:
next_idx = next_info_student.agents.index(agent_id)
if agent_id not in self.cumulative_rewards:
self.cumulative_rewards[agent_id] = 0
self.cumulative_rewards[agent_id] += next_info_student.rewards[next_idx]
if not next_info_student.local_done[next_idx]:
if agent_id not in self.episode_steps:
self.episode_steps[agent_id] = 0
self.episode_steps[agent_id] += 1 | def function[add_experiences, parameter[self, curr_info, next_info, take_action_outputs]]:
constant[
Adds experiences to each agent's experience history.
:param curr_info: Current AllBrainInfo (Dictionary of all current brains and corresponding BrainInfo).
:param next_info: Next AllBrainInfo (Dictionary of all current brains and corresponding BrainInfo).
:param take_action_outputs: The outputs of the take action method.
]
variable[info_student] assign[=] call[name[curr_info]][name[self].brain_name]
variable[next_info_student] assign[=] call[name[next_info]][name[self].brain_name]
for taget[name[agent_id]] in starred[name[info_student].agents] begin[:]
call[name[self].evaluation_buffer][name[agent_id]].last_brain_info assign[=] name[info_student]
for taget[name[agent_id]] in starred[name[next_info_student].agents] begin[:]
variable[stored_info_student] assign[=] call[name[self].evaluation_buffer][name[agent_id]].last_brain_info
if compare[name[stored_info_student] is constant[None]] begin[:]
continue | keyword[def] identifier[add_experiences] ( identifier[self] , identifier[curr_info] : identifier[AllBrainInfo] , identifier[next_info] : identifier[AllBrainInfo] ,
identifier[take_action_outputs] ):
literal[string]
identifier[info_student] = identifier[curr_info] [ identifier[self] . identifier[brain_name] ]
identifier[next_info_student] = identifier[next_info] [ identifier[self] . identifier[brain_name] ]
keyword[for] identifier[agent_id] keyword[in] identifier[info_student] . identifier[agents] :
identifier[self] . identifier[evaluation_buffer] [ identifier[agent_id] ]. identifier[last_brain_info] = identifier[info_student]
keyword[for] identifier[agent_id] keyword[in] identifier[next_info_student] . identifier[agents] :
identifier[stored_info_student] = identifier[self] . identifier[evaluation_buffer] [ identifier[agent_id] ]. identifier[last_brain_info]
keyword[if] identifier[stored_info_student] keyword[is] keyword[None] :
keyword[continue]
keyword[else] :
identifier[next_idx] = identifier[next_info_student] . identifier[agents] . identifier[index] ( identifier[agent_id] )
keyword[if] identifier[agent_id] keyword[not] keyword[in] identifier[self] . identifier[cumulative_rewards] :
identifier[self] . identifier[cumulative_rewards] [ identifier[agent_id] ]= literal[int]
identifier[self] . identifier[cumulative_rewards] [ identifier[agent_id] ]+= identifier[next_info_student] . identifier[rewards] [ identifier[next_idx] ]
keyword[if] keyword[not] identifier[next_info_student] . identifier[local_done] [ identifier[next_idx] ]:
keyword[if] identifier[agent_id] keyword[not] keyword[in] identifier[self] . identifier[episode_steps] :
identifier[self] . identifier[episode_steps] [ identifier[agent_id] ]= literal[int]
identifier[self] . identifier[episode_steps] [ identifier[agent_id] ]+= literal[int] | def add_experiences(self, curr_info: AllBrainInfo, next_info: AllBrainInfo, take_action_outputs):
"""
Adds experiences to each agent's experience history.
:param curr_info: Current AllBrainInfo (Dictionary of all current brains and corresponding BrainInfo).
:param next_info: Next AllBrainInfo (Dictionary of all current brains and corresponding BrainInfo).
:param take_action_outputs: The outputs of the take action method.
"""
# Used to collect information about student performance.
info_student = curr_info[self.brain_name]
next_info_student = next_info[self.brain_name]
for agent_id in info_student.agents:
self.evaluation_buffer[agent_id].last_brain_info = info_student # depends on [control=['for'], data=['agent_id']]
for agent_id in next_info_student.agents:
stored_info_student = self.evaluation_buffer[agent_id].last_brain_info
if stored_info_student is None:
continue # depends on [control=['if'], data=[]]
else:
next_idx = next_info_student.agents.index(agent_id)
if agent_id not in self.cumulative_rewards:
self.cumulative_rewards[agent_id] = 0 # depends on [control=['if'], data=['agent_id']]
self.cumulative_rewards[agent_id] += next_info_student.rewards[next_idx]
if not next_info_student.local_done[next_idx]:
if agent_id not in self.episode_steps:
self.episode_steps[agent_id] = 0 # depends on [control=['if'], data=['agent_id']]
self.episode_steps[agent_id] += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['agent_id']] |
def _to_torch(Z, dtype=None):
"""Converts a None, list, np.ndarray, or torch.Tensor to torch.Tensor;
also handles converting sparse input to dense."""
if Z is None:
return None
elif issparse(Z):
Z = torch.from_numpy(Z.toarray())
elif isinstance(Z, torch.Tensor):
pass
elif isinstance(Z, list):
Z = torch.from_numpy(np.array(Z))
elif isinstance(Z, np.ndarray):
Z = torch.from_numpy(Z)
else:
msg = (
f"Expected list, numpy.ndarray or torch.Tensor, "
f"got {type(Z)} instead."
)
raise Exception(msg)
return Z.type(dtype) if dtype else Z | def function[_to_torch, parameter[Z, dtype]]:
constant[Converts a None, list, np.ndarray, or torch.Tensor to torch.Tensor;
also handles converting sparse input to dense.]
if compare[name[Z] is constant[None]] begin[:]
return[constant[None]]
return[<ast.IfExp object at 0x7da1b1b65030>] | keyword[def] identifier[_to_torch] ( identifier[Z] , identifier[dtype] = keyword[None] ):
literal[string]
keyword[if] identifier[Z] keyword[is] keyword[None] :
keyword[return] keyword[None]
keyword[elif] identifier[issparse] ( identifier[Z] ):
identifier[Z] = identifier[torch] . identifier[from_numpy] ( identifier[Z] . identifier[toarray] ())
keyword[elif] identifier[isinstance] ( identifier[Z] , identifier[torch] . identifier[Tensor] ):
keyword[pass]
keyword[elif] identifier[isinstance] ( identifier[Z] , identifier[list] ):
identifier[Z] = identifier[torch] . identifier[from_numpy] ( identifier[np] . identifier[array] ( identifier[Z] ))
keyword[elif] identifier[isinstance] ( identifier[Z] , identifier[np] . identifier[ndarray] ):
identifier[Z] = identifier[torch] . identifier[from_numpy] ( identifier[Z] )
keyword[else] :
identifier[msg] =(
literal[string]
literal[string]
)
keyword[raise] identifier[Exception] ( identifier[msg] )
keyword[return] identifier[Z] . identifier[type] ( identifier[dtype] ) keyword[if] identifier[dtype] keyword[else] identifier[Z] | def _to_torch(Z, dtype=None):
"""Converts a None, list, np.ndarray, or torch.Tensor to torch.Tensor;
also handles converting sparse input to dense."""
if Z is None:
return None # depends on [control=['if'], data=[]]
elif issparse(Z):
Z = torch.from_numpy(Z.toarray()) # depends on [control=['if'], data=[]]
elif isinstance(Z, torch.Tensor):
pass # depends on [control=['if'], data=[]]
elif isinstance(Z, list):
Z = torch.from_numpy(np.array(Z)) # depends on [control=['if'], data=[]]
elif isinstance(Z, np.ndarray):
Z = torch.from_numpy(Z) # depends on [control=['if'], data=[]]
else:
msg = f'Expected list, numpy.ndarray or torch.Tensor, got {type(Z)} instead.'
raise Exception(msg)
return Z.type(dtype) if dtype else Z |
def determine_num_chunks(chunk_size, file_size):
"""
Figure out how many pieces we are sending the file in.
NOTE: duke-data-service requires an empty chunk to be uploaded for empty files.
"""
if file_size == 0:
return 1
return int(math.ceil(float(file_size) / float(chunk_size))) | def function[determine_num_chunks, parameter[chunk_size, file_size]]:
constant[
Figure out how many pieces we are sending the file in.
NOTE: duke-data-service requires an empty chunk to be uploaded for empty files.
]
if compare[name[file_size] equal[==] constant[0]] begin[:]
return[constant[1]]
return[call[name[int], parameter[call[name[math].ceil, parameter[binary_operation[call[name[float], parameter[name[file_size]]] / call[name[float], parameter[name[chunk_size]]]]]]]]] | keyword[def] identifier[determine_num_chunks] ( identifier[chunk_size] , identifier[file_size] ):
literal[string]
keyword[if] identifier[file_size] == literal[int] :
keyword[return] literal[int]
keyword[return] identifier[int] ( identifier[math] . identifier[ceil] ( identifier[float] ( identifier[file_size] )/ identifier[float] ( identifier[chunk_size] ))) | def determine_num_chunks(chunk_size, file_size):
"""
Figure out how many pieces we are sending the file in.
NOTE: duke-data-service requires an empty chunk to be uploaded for empty files.
"""
if file_size == 0:
return 1 # depends on [control=['if'], data=[]]
return int(math.ceil(float(file_size) / float(chunk_size))) |
def hash_from_algo(algo):
"""
Return a :mod:`hashlib` hash given the :xep:`300` `algo`.
:param algo: The algorithm identifier as defined in :xep:`300`.
:type algo: :class:`str`
:raises NotImplementedError: if the hash algortihm is not supported by
:mod:`hashlib`.
:raises ValueError: if the hash algorithm MUST NOT be supported.
:return: A hash object from :mod:`hashlib` or compatible.
If the `algo` is not supported by the :mod:`hashlib` module,
:class:`NotImplementedError` is raised.
"""
try:
enabled, (fun_name, fun_args, fun_kwargs) = _HASH_ALGO_MAP[algo]
except KeyError:
raise NotImplementedError(
"hash algorithm {!r} unknown".format(algo)
) from None
if not enabled:
raise ValueError(
"support of {} in XMPP is forbidden".format(algo)
)
try:
fun = getattr(hashlib, fun_name)
except AttributeError as exc:
raise NotImplementedError(
"{} not supported by hashlib".format(algo)
) from exc
return fun(*fun_args, **fun_kwargs) | def function[hash_from_algo, parameter[algo]]:
constant[
Return a :mod:`hashlib` hash given the :xep:`300` `algo`.
:param algo: The algorithm identifier as defined in :xep:`300`.
:type algo: :class:`str`
:raises NotImplementedError: if the hash algortihm is not supported by
:mod:`hashlib`.
:raises ValueError: if the hash algorithm MUST NOT be supported.
:return: A hash object from :mod:`hashlib` or compatible.
If the `algo` is not supported by the :mod:`hashlib` module,
:class:`NotImplementedError` is raised.
]
<ast.Try object at 0x7da20c6e7940>
if <ast.UnaryOp object at 0x7da18ede5e70> begin[:]
<ast.Raise object at 0x7da18ede4850>
<ast.Try object at 0x7da18ede70d0>
return[call[name[fun], parameter[<ast.Starred object at 0x7da18ede4070>]]] | keyword[def] identifier[hash_from_algo] ( identifier[algo] ):
literal[string]
keyword[try] :
identifier[enabled] ,( identifier[fun_name] , identifier[fun_args] , identifier[fun_kwargs] )= identifier[_HASH_ALGO_MAP] [ identifier[algo] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[NotImplementedError] (
literal[string] . identifier[format] ( identifier[algo] )
) keyword[from] keyword[None]
keyword[if] keyword[not] identifier[enabled] :
keyword[raise] identifier[ValueError] (
literal[string] . identifier[format] ( identifier[algo] )
)
keyword[try] :
identifier[fun] = identifier[getattr] ( identifier[hashlib] , identifier[fun_name] )
keyword[except] identifier[AttributeError] keyword[as] identifier[exc] :
keyword[raise] identifier[NotImplementedError] (
literal[string] . identifier[format] ( identifier[algo] )
) keyword[from] identifier[exc]
keyword[return] identifier[fun] (* identifier[fun_args] ,** identifier[fun_kwargs] ) | def hash_from_algo(algo):
"""
Return a :mod:`hashlib` hash given the :xep:`300` `algo`.
:param algo: The algorithm identifier as defined in :xep:`300`.
:type algo: :class:`str`
:raises NotImplementedError: if the hash algortihm is not supported by
:mod:`hashlib`.
:raises ValueError: if the hash algorithm MUST NOT be supported.
:return: A hash object from :mod:`hashlib` or compatible.
If the `algo` is not supported by the :mod:`hashlib` module,
:class:`NotImplementedError` is raised.
"""
try:
(enabled, (fun_name, fun_args, fun_kwargs)) = _HASH_ALGO_MAP[algo] # depends on [control=['try'], data=[]]
except KeyError:
raise NotImplementedError('hash algorithm {!r} unknown'.format(algo)) from None # depends on [control=['except'], data=[]]
if not enabled:
raise ValueError('support of {} in XMPP is forbidden'.format(algo)) # depends on [control=['if'], data=[]]
try:
fun = getattr(hashlib, fun_name) # depends on [control=['try'], data=[]]
except AttributeError as exc:
raise NotImplementedError('{} not supported by hashlib'.format(algo)) from exc # depends on [control=['except'], data=['exc']]
return fun(*fun_args, **fun_kwargs) |
def remove_ctx(properties):
"""
Transform a dictionary of {name: [(elt, value)+]} into a dictionary of
{name: [value+]}.
:param dict properties: properties from where get only values.
:return: dictionary of parameter values by names.
:rtype: dict
"""
result = {}
for name in properties:
elt_properties = properties[name]
result[name] = []
for _, value in elt_properties:
result[name].append(value)
return result | def function[remove_ctx, parameter[properties]]:
constant[
Transform a dictionary of {name: [(elt, value)+]} into a dictionary of
{name: [value+]}.
:param dict properties: properties from where get only values.
:return: dictionary of parameter values by names.
:rtype: dict
]
variable[result] assign[=] dictionary[[], []]
for taget[name[name]] in starred[name[properties]] begin[:]
variable[elt_properties] assign[=] call[name[properties]][name[name]]
call[name[result]][name[name]] assign[=] list[[]]
for taget[tuple[[<ast.Name object at 0x7da18f720580>, <ast.Name object at 0x7da18f723790>]]] in starred[name[elt_properties]] begin[:]
call[call[name[result]][name[name]].append, parameter[name[value]]]
return[name[result]] | keyword[def] identifier[remove_ctx] ( identifier[properties] ):
literal[string]
identifier[result] ={}
keyword[for] identifier[name] keyword[in] identifier[properties] :
identifier[elt_properties] = identifier[properties] [ identifier[name] ]
identifier[result] [ identifier[name] ]=[]
keyword[for] identifier[_] , identifier[value] keyword[in] identifier[elt_properties] :
identifier[result] [ identifier[name] ]. identifier[append] ( identifier[value] )
keyword[return] identifier[result] | def remove_ctx(properties):
"""
Transform a dictionary of {name: [(elt, value)+]} into a dictionary of
{name: [value+]}.
:param dict properties: properties from where get only values.
:return: dictionary of parameter values by names.
:rtype: dict
"""
result = {}
for name in properties:
elt_properties = properties[name]
result[name] = []
for (_, value) in elt_properties:
result[name].append(value) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['name']]
return result |
def filter_list(lst, takeout, case_sensitive=True):
"""Return a modified list removing items specified.
Args:
lst: Original list of values
takeout: Object or objects to remove from lst
case_sensitive: if the search should be case sensitive
Returns:
list: Filtered list of values
"""
takeout = force_list(takeout)
if not case_sensitive:
lst = [x.lower() for x in lst]
takeout = [y.lower() for y in takeout]
return [x for x in lst if x not in takeout] | def function[filter_list, parameter[lst, takeout, case_sensitive]]:
constant[Return a modified list removing items specified.
Args:
lst: Original list of values
takeout: Object or objects to remove from lst
case_sensitive: if the search should be case sensitive
Returns:
list: Filtered list of values
]
variable[takeout] assign[=] call[name[force_list], parameter[name[takeout]]]
if <ast.UnaryOp object at 0x7da18f09ce80> begin[:]
variable[lst] assign[=] <ast.ListComp object at 0x7da18eb54e20>
variable[takeout] assign[=] <ast.ListComp object at 0x7da18eb57310>
return[<ast.ListComp object at 0x7da18eb574f0>] | keyword[def] identifier[filter_list] ( identifier[lst] , identifier[takeout] , identifier[case_sensitive] = keyword[True] ):
literal[string]
identifier[takeout] = identifier[force_list] ( identifier[takeout] )
keyword[if] keyword[not] identifier[case_sensitive] :
identifier[lst] =[ identifier[x] . identifier[lower] () keyword[for] identifier[x] keyword[in] identifier[lst] ]
identifier[takeout] =[ identifier[y] . identifier[lower] () keyword[for] identifier[y] keyword[in] identifier[takeout] ]
keyword[return] [ identifier[x] keyword[for] identifier[x] keyword[in] identifier[lst] keyword[if] identifier[x] keyword[not] keyword[in] identifier[takeout] ] | def filter_list(lst, takeout, case_sensitive=True):
"""Return a modified list removing items specified.
Args:
lst: Original list of values
takeout: Object or objects to remove from lst
case_sensitive: if the search should be case sensitive
Returns:
list: Filtered list of values
"""
takeout = force_list(takeout)
if not case_sensitive:
lst = [x.lower() for x in lst]
takeout = [y.lower() for y in takeout] # depends on [control=['if'], data=[]]
return [x for x in lst if x not in takeout] |
def validate_annotation_content(experiment_config, spec_key, builtin_name):
'''
Valid whether useAnnotation and searchSpacePath is coexist
spec_key: 'advisor' or 'tuner'
builtin_name: 'builtinAdvisorName' or 'builtinTunerName'
'''
if experiment_config.get('useAnnotation'):
if experiment_config.get('searchSpacePath'):
print_error('If you set useAnnotation=true, please leave searchSpacePath empty')
exit(1)
else:
# validate searchSpaceFile
if experiment_config[spec_key].get(builtin_name) == 'NetworkMorphism':
return
if experiment_config[spec_key].get(builtin_name):
if experiment_config.get('searchSpacePath') is None:
print_error('Please set searchSpacePath!')
exit(1)
validate_search_space_content(experiment_config) | def function[validate_annotation_content, parameter[experiment_config, spec_key, builtin_name]]:
constant[
Valid whether useAnnotation and searchSpacePath is coexist
spec_key: 'advisor' or 'tuner'
builtin_name: 'builtinAdvisorName' or 'builtinTunerName'
]
if call[name[experiment_config].get, parameter[constant[useAnnotation]]] begin[:]
if call[name[experiment_config].get, parameter[constant[searchSpacePath]]] begin[:]
call[name[print_error], parameter[constant[If you set useAnnotation=true, please leave searchSpacePath empty]]]
call[name[exit], parameter[constant[1]]] | keyword[def] identifier[validate_annotation_content] ( identifier[experiment_config] , identifier[spec_key] , identifier[builtin_name] ):
literal[string]
keyword[if] identifier[experiment_config] . identifier[get] ( literal[string] ):
keyword[if] identifier[experiment_config] . identifier[get] ( literal[string] ):
identifier[print_error] ( literal[string] )
identifier[exit] ( literal[int] )
keyword[else] :
keyword[if] identifier[experiment_config] [ identifier[spec_key] ]. identifier[get] ( identifier[builtin_name] )== literal[string] :
keyword[return]
keyword[if] identifier[experiment_config] [ identifier[spec_key] ]. identifier[get] ( identifier[builtin_name] ):
keyword[if] identifier[experiment_config] . identifier[get] ( literal[string] ) keyword[is] keyword[None] :
identifier[print_error] ( literal[string] )
identifier[exit] ( literal[int] )
identifier[validate_search_space_content] ( identifier[experiment_config] ) | def validate_annotation_content(experiment_config, spec_key, builtin_name):
"""
Valid whether useAnnotation and searchSpacePath is coexist
spec_key: 'advisor' or 'tuner'
builtin_name: 'builtinAdvisorName' or 'builtinTunerName'
"""
if experiment_config.get('useAnnotation'):
if experiment_config.get('searchSpacePath'):
print_error('If you set useAnnotation=true, please leave searchSpacePath empty')
exit(1) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
# validate searchSpaceFile
if experiment_config[spec_key].get(builtin_name) == 'NetworkMorphism':
return # depends on [control=['if'], data=[]]
if experiment_config[spec_key].get(builtin_name):
if experiment_config.get('searchSpacePath') is None:
print_error('Please set searchSpacePath!')
exit(1) # depends on [control=['if'], data=[]]
validate_search_space_content(experiment_config) # depends on [control=['if'], data=[]] |
def openTypeHheaCaretSlopeRiseFallback(info):
"""
Fallback to *openTypeHheaCaretSlopeRise*. If the italicAngle is zero,
return 1. If italicAngle is non-zero, compute the slope rise from the
complementary openTypeHheaCaretSlopeRun, if the latter is defined.
Else, default to an arbitrary fixed reference point (1000).
"""
italicAngle = getAttrWithFallback(info, "italicAngle")
if italicAngle != 0:
if (hasattr(info, "openTypeHheaCaretSlopeRun") and
info.openTypeHheaCaretSlopeRun is not None):
slopeRun = info.openTypeHheaCaretSlopeRun
return otRound(slopeRun / math.tan(math.radians(-italicAngle)))
else:
return 1000 # just an arbitrary non-zero reference point
return 1 | def function[openTypeHheaCaretSlopeRiseFallback, parameter[info]]:
constant[
Fallback to *openTypeHheaCaretSlopeRise*. If the italicAngle is zero,
return 1. If italicAngle is non-zero, compute the slope rise from the
complementary openTypeHheaCaretSlopeRun, if the latter is defined.
Else, default to an arbitrary fixed reference point (1000).
]
variable[italicAngle] assign[=] call[name[getAttrWithFallback], parameter[name[info], constant[italicAngle]]]
if compare[name[italicAngle] not_equal[!=] constant[0]] begin[:]
if <ast.BoolOp object at 0x7da20c9904f0> begin[:]
variable[slopeRun] assign[=] name[info].openTypeHheaCaretSlopeRun
return[call[name[otRound], parameter[binary_operation[name[slopeRun] / call[name[math].tan, parameter[call[name[math].radians, parameter[<ast.UnaryOp object at 0x7da20c992320>]]]]]]]]
return[constant[1]] | keyword[def] identifier[openTypeHheaCaretSlopeRiseFallback] ( identifier[info] ):
literal[string]
identifier[italicAngle] = identifier[getAttrWithFallback] ( identifier[info] , literal[string] )
keyword[if] identifier[italicAngle] != literal[int] :
keyword[if] ( identifier[hasattr] ( identifier[info] , literal[string] ) keyword[and]
identifier[info] . identifier[openTypeHheaCaretSlopeRun] keyword[is] keyword[not] keyword[None] ):
identifier[slopeRun] = identifier[info] . identifier[openTypeHheaCaretSlopeRun]
keyword[return] identifier[otRound] ( identifier[slopeRun] / identifier[math] . identifier[tan] ( identifier[math] . identifier[radians] (- identifier[italicAngle] )))
keyword[else] :
keyword[return] literal[int]
keyword[return] literal[int] | def openTypeHheaCaretSlopeRiseFallback(info):
"""
Fallback to *openTypeHheaCaretSlopeRise*. If the italicAngle is zero,
return 1. If italicAngle is non-zero, compute the slope rise from the
complementary openTypeHheaCaretSlopeRun, if the latter is defined.
Else, default to an arbitrary fixed reference point (1000).
"""
italicAngle = getAttrWithFallback(info, 'italicAngle')
if italicAngle != 0:
if hasattr(info, 'openTypeHheaCaretSlopeRun') and info.openTypeHheaCaretSlopeRun is not None:
slopeRun = info.openTypeHheaCaretSlopeRun
return otRound(slopeRun / math.tan(math.radians(-italicAngle))) # depends on [control=['if'], data=[]]
else:
return 1000 # just an arbitrary non-zero reference point # depends on [control=['if'], data=['italicAngle']]
return 1 |
def encode(self, x):
"""
Given an input array `x` it returns its associated encoding `y(x)`, that is,
a stable configuration (local energy minimum) of the hidden units
while the visible units are clampled to `x`.
Note that NO learning takes place.
"""
E = self.energy
y_min = self.find_energy_minimum(E, x)
return y_min | def function[encode, parameter[self, x]]:
constant[
Given an input array `x` it returns its associated encoding `y(x)`, that is,
a stable configuration (local energy minimum) of the hidden units
while the visible units are clampled to `x`.
Note that NO learning takes place.
]
variable[E] assign[=] name[self].energy
variable[y_min] assign[=] call[name[self].find_energy_minimum, parameter[name[E], name[x]]]
return[name[y_min]] | keyword[def] identifier[encode] ( identifier[self] , identifier[x] ):
literal[string]
identifier[E] = identifier[self] . identifier[energy]
identifier[y_min] = identifier[self] . identifier[find_energy_minimum] ( identifier[E] , identifier[x] )
keyword[return] identifier[y_min] | def encode(self, x):
"""
Given an input array `x` it returns its associated encoding `y(x)`, that is,
a stable configuration (local energy minimum) of the hidden units
while the visible units are clampled to `x`.
Note that NO learning takes place.
"""
E = self.energy
y_min = self.find_energy_minimum(E, x)
return y_min |
def storage_resolveBasedOnKey(self, *args, **kwargs):
"""
Call the remote service and ask for the storage location based on the key.
:param args:
:param kwargs:
:return:
"""
global Gd_internalvar
d_msg = {
'action': 'internalctl',
'meta': {
'var': 'key2address',
'compute': '<key>'
}
}
str_key = ""
b_status = False
for k,v in kwargs.items():
if k == 'key': str_key = v
d_msg['meta']['key'] = str_key
#
d_ret = self.pullPath_core(d_msg = d_msg)
return {
'status': b_status,
'path': str_internalLocation
} | def function[storage_resolveBasedOnKey, parameter[self]]:
constant[
Call the remote service and ask for the storage location based on the key.
:param args:
:param kwargs:
:return:
]
<ast.Global object at 0x7da1b2348490>
variable[d_msg] assign[=] dictionary[[<ast.Constant object at 0x7da1b234ab90>, <ast.Constant object at 0x7da1b2349db0>], [<ast.Constant object at 0x7da1b234a0e0>, <ast.Dict object at 0x7da1b2348550>]]
variable[str_key] assign[=] constant[]
variable[b_status] assign[=] constant[False]
for taget[tuple[[<ast.Name object at 0x7da1b234aa70>, <ast.Name object at 0x7da1b2349a20>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:]
if compare[name[k] equal[==] constant[key]] begin[:]
variable[str_key] assign[=] name[v]
call[call[name[d_msg]][constant[meta]]][constant[key]] assign[=] name[str_key]
variable[d_ret] assign[=] call[name[self].pullPath_core, parameter[]]
return[dictionary[[<ast.Constant object at 0x7da204345360>, <ast.Constant object at 0x7da204344e50>], [<ast.Name object at 0x7da204344df0>, <ast.Name object at 0x7da204344fd0>]]] | keyword[def] identifier[storage_resolveBasedOnKey] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
keyword[global] identifier[Gd_internalvar]
identifier[d_msg] ={
literal[string] : literal[string] ,
literal[string] :{
literal[string] : literal[string] ,
literal[string] : literal[string]
}
}
identifier[str_key] = literal[string]
identifier[b_status] = keyword[False]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[items] ():
keyword[if] identifier[k] == literal[string] : identifier[str_key] = identifier[v]
identifier[d_msg] [ literal[string] ][ literal[string] ]= identifier[str_key]
identifier[d_ret] = identifier[self] . identifier[pullPath_core] ( identifier[d_msg] = identifier[d_msg] )
keyword[return] {
literal[string] : identifier[b_status] ,
literal[string] : identifier[str_internalLocation]
} | def storage_resolveBasedOnKey(self, *args, **kwargs):
"""
Call the remote service and ask for the storage location based on the key.
:param args:
:param kwargs:
:return:
"""
global Gd_internalvar
d_msg = {'action': 'internalctl', 'meta': {'var': 'key2address', 'compute': '<key>'}}
str_key = ''
b_status = False
for (k, v) in kwargs.items():
if k == 'key':
str_key = v # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
d_msg['meta']['key'] = str_key #
d_ret = self.pullPath_core(d_msg=d_msg)
return {'status': b_status, 'path': str_internalLocation} |
def writeNSdict(self, nsdict):
'''Write a namespace dictionary, taking care to not clobber the
standard (or reserved by us) prefixes.
'''
for k,v in nsdict.items():
if (k,v) in _standard_ns: continue
rv = _reserved_ns.get(k)
if rv:
if rv != v:
raise KeyError("Reserved namespace " + str((k,v)) + " used")
continue
if k:
self.dom.setNamespaceAttribute(k, v)
else:
self.dom.setNamespaceAttribute('xmlns', v) | def function[writeNSdict, parameter[self, nsdict]]:
constant[Write a namespace dictionary, taking care to not clobber the
standard (or reserved by us) prefixes.
]
for taget[tuple[[<ast.Name object at 0x7da18c4cf0a0>, <ast.Name object at 0x7da18c4cc610>]]] in starred[call[name[nsdict].items, parameter[]]] begin[:]
if compare[tuple[[<ast.Name object at 0x7da18c4cc1c0>, <ast.Name object at 0x7da18c4cd9f0>]] in name[_standard_ns]] begin[:]
continue
variable[rv] assign[=] call[name[_reserved_ns].get, parameter[name[k]]]
if name[rv] begin[:]
if compare[name[rv] not_equal[!=] name[v]] begin[:]
<ast.Raise object at 0x7da18c4cec50>
continue
if name[k] begin[:]
call[name[self].dom.setNamespaceAttribute, parameter[name[k], name[v]]] | keyword[def] identifier[writeNSdict] ( identifier[self] , identifier[nsdict] ):
literal[string]
keyword[for] identifier[k] , identifier[v] keyword[in] identifier[nsdict] . identifier[items] ():
keyword[if] ( identifier[k] , identifier[v] ) keyword[in] identifier[_standard_ns] : keyword[continue]
identifier[rv] = identifier[_reserved_ns] . identifier[get] ( identifier[k] )
keyword[if] identifier[rv] :
keyword[if] identifier[rv] != identifier[v] :
keyword[raise] identifier[KeyError] ( literal[string] + identifier[str] (( identifier[k] , identifier[v] ))+ literal[string] )
keyword[continue]
keyword[if] identifier[k] :
identifier[self] . identifier[dom] . identifier[setNamespaceAttribute] ( identifier[k] , identifier[v] )
keyword[else] :
identifier[self] . identifier[dom] . identifier[setNamespaceAttribute] ( literal[string] , identifier[v] ) | def writeNSdict(self, nsdict):
"""Write a namespace dictionary, taking care to not clobber the
standard (or reserved by us) prefixes.
"""
for (k, v) in nsdict.items():
if (k, v) in _standard_ns:
continue # depends on [control=['if'], data=[]]
rv = _reserved_ns.get(k)
if rv:
if rv != v:
raise KeyError('Reserved namespace ' + str((k, v)) + ' used') # depends on [control=['if'], data=['v']]
continue # depends on [control=['if'], data=[]]
if k:
self.dom.setNamespaceAttribute(k, v) # depends on [control=['if'], data=[]]
else:
self.dom.setNamespaceAttribute('xmlns', v) # depends on [control=['for'], data=[]] |
def _release(level):
"""TODO: we should make sure that we are on master release"""
version, comment = _new_version(level)
if version is not None:
run(['git',
'commit',
str(VER_PATH.relative_to(BASE_PATH)),
str(CHANGES_PATH.relative_to(BASE_PATH)),
'--amend',
'--no-edit',
])
run(['git',
'tag',
'-a',
'v' + version,
'-m',
'"' + comment + '"',
])
run(['git',
'push',
'origin',
'--tags',
])
run(['git',
'push',
'origin',
'master',
'-f',
]) | def function[_release, parameter[level]]:
constant[TODO: we should make sure that we are on master release]
<ast.Tuple object at 0x7da18bcc9810> assign[=] call[name[_new_version], parameter[name[level]]]
if compare[name[version] is_not constant[None]] begin[:]
call[name[run], parameter[list[[<ast.Constant object at 0x7da18bcca410>, <ast.Constant object at 0x7da18bccba90>, <ast.Call object at 0x7da18bccbfa0>, <ast.Call object at 0x7da18bcca230>, <ast.Constant object at 0x7da18bcca980>, <ast.Constant object at 0x7da18bcc8d00>]]]]
call[name[run], parameter[list[[<ast.Constant object at 0x7da18bcc9750>, <ast.Constant object at 0x7da18bcca4d0>, <ast.Constant object at 0x7da18bccb970>, <ast.BinOp object at 0x7da18bcc80a0>, <ast.Constant object at 0x7da18bcca7a0>, <ast.BinOp object at 0x7da18bccad70>]]]]
call[name[run], parameter[list[[<ast.Constant object at 0x7da18bcc9a50>, <ast.Constant object at 0x7da18bccbca0>, <ast.Constant object at 0x7da18bccb6d0>, <ast.Constant object at 0x7da18bcc8280>]]]]
call[name[run], parameter[list[[<ast.Constant object at 0x7da18bccbdc0>, <ast.Constant object at 0x7da18bcc9420>, <ast.Constant object at 0x7da18bcc8940>, <ast.Constant object at 0x7da18bcc85b0>, <ast.Constant object at 0x7da18bcca200>]]]] | keyword[def] identifier[_release] ( identifier[level] ):
literal[string]
identifier[version] , identifier[comment] = identifier[_new_version] ( identifier[level] )
keyword[if] identifier[version] keyword[is] keyword[not] keyword[None] :
identifier[run] ([ literal[string] ,
literal[string] ,
identifier[str] ( identifier[VER_PATH] . identifier[relative_to] ( identifier[BASE_PATH] )),
identifier[str] ( identifier[CHANGES_PATH] . identifier[relative_to] ( identifier[BASE_PATH] )),
literal[string] ,
literal[string] ,
])
identifier[run] ([ literal[string] ,
literal[string] ,
literal[string] ,
literal[string] + identifier[version] ,
literal[string] ,
literal[string] + identifier[comment] + literal[string] ,
])
identifier[run] ([ literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
])
identifier[run] ([ literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
]) | def _release(level):
"""TODO: we should make sure that we are on master release"""
(version, comment) = _new_version(level)
if version is not None:
run(['git', 'commit', str(VER_PATH.relative_to(BASE_PATH)), str(CHANGES_PATH.relative_to(BASE_PATH)), '--amend', '--no-edit'])
run(['git', 'tag', '-a', 'v' + version, '-m', '"' + comment + '"'])
run(['git', 'push', 'origin', '--tags'])
run(['git', 'push', 'origin', 'master', '-f']) # depends on [control=['if'], data=['version']] |
def _difference(self, original_keys, updated_keys, name, item_index):
"""Calculate difference between the original and updated sets of keys.
Removed items will be removed from item_index, new items should have
been added by the discovery process. (?help or ?sensor-list)
This method is for use in inspect_requests and inspect_sensors only.
Returns
-------
(added, removed)
added : set of str
Names of the keys that were added
removed : set of str
Names of the keys that were removed
"""
original_keys = set(original_keys)
updated_keys = set(updated_keys)
added_keys = updated_keys.difference(original_keys)
removed_keys = set()
if name is None:
removed_keys = original_keys.difference(updated_keys)
elif name not in updated_keys and name in original_keys:
removed_keys = set([name])
for key in removed_keys:
if key in item_index:
del(item_index[key])
# Check the keys that was not added now or not lined up for removal,
# and see if they changed.
for key in updated_keys.difference(added_keys.union(removed_keys)):
if item_index[key].get('_changed'):
item_index[key]['_changed'] = False
removed_keys.add(key)
added_keys.add(key)
return added_keys, removed_keys | def function[_difference, parameter[self, original_keys, updated_keys, name, item_index]]:
constant[Calculate difference between the original and updated sets of keys.
Removed items will be removed from item_index, new items should have
been added by the discovery process. (?help or ?sensor-list)
This method is for use in inspect_requests and inspect_sensors only.
Returns
-------
(added, removed)
added : set of str
Names of the keys that were added
removed : set of str
Names of the keys that were removed
]
variable[original_keys] assign[=] call[name[set], parameter[name[original_keys]]]
variable[updated_keys] assign[=] call[name[set], parameter[name[updated_keys]]]
variable[added_keys] assign[=] call[name[updated_keys].difference, parameter[name[original_keys]]]
variable[removed_keys] assign[=] call[name[set], parameter[]]
if compare[name[name] is constant[None]] begin[:]
variable[removed_keys] assign[=] call[name[original_keys].difference, parameter[name[updated_keys]]]
for taget[name[key]] in starred[name[removed_keys]] begin[:]
if compare[name[key] in name[item_index]] begin[:]
<ast.Delete object at 0x7da1b055ab90>
for taget[name[key]] in starred[call[name[updated_keys].difference, parameter[call[name[added_keys].union, parameter[name[removed_keys]]]]]] begin[:]
if call[call[name[item_index]][name[key]].get, parameter[constant[_changed]]] begin[:]
call[call[name[item_index]][name[key]]][constant[_changed]] assign[=] constant[False]
call[name[removed_keys].add, parameter[name[key]]]
call[name[added_keys].add, parameter[name[key]]]
return[tuple[[<ast.Name object at 0x7da1b055b6d0>, <ast.Name object at 0x7da1b0558190>]]] | keyword[def] identifier[_difference] ( identifier[self] , identifier[original_keys] , identifier[updated_keys] , identifier[name] , identifier[item_index] ):
literal[string]
identifier[original_keys] = identifier[set] ( identifier[original_keys] )
identifier[updated_keys] = identifier[set] ( identifier[updated_keys] )
identifier[added_keys] = identifier[updated_keys] . identifier[difference] ( identifier[original_keys] )
identifier[removed_keys] = identifier[set] ()
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[removed_keys] = identifier[original_keys] . identifier[difference] ( identifier[updated_keys] )
keyword[elif] identifier[name] keyword[not] keyword[in] identifier[updated_keys] keyword[and] identifier[name] keyword[in] identifier[original_keys] :
identifier[removed_keys] = identifier[set] ([ identifier[name] ])
keyword[for] identifier[key] keyword[in] identifier[removed_keys] :
keyword[if] identifier[key] keyword[in] identifier[item_index] :
keyword[del] ( identifier[item_index] [ identifier[key] ])
keyword[for] identifier[key] keyword[in] identifier[updated_keys] . identifier[difference] ( identifier[added_keys] . identifier[union] ( identifier[removed_keys] )):
keyword[if] identifier[item_index] [ identifier[key] ]. identifier[get] ( literal[string] ):
identifier[item_index] [ identifier[key] ][ literal[string] ]= keyword[False]
identifier[removed_keys] . identifier[add] ( identifier[key] )
identifier[added_keys] . identifier[add] ( identifier[key] )
keyword[return] identifier[added_keys] , identifier[removed_keys] | def _difference(self, original_keys, updated_keys, name, item_index):
"""Calculate difference between the original and updated sets of keys.
Removed items will be removed from item_index, new items should have
been added by the discovery process. (?help or ?sensor-list)
This method is for use in inspect_requests and inspect_sensors only.
Returns
-------
(added, removed)
added : set of str
Names of the keys that were added
removed : set of str
Names of the keys that were removed
"""
original_keys = set(original_keys)
updated_keys = set(updated_keys)
added_keys = updated_keys.difference(original_keys)
removed_keys = set()
if name is None:
removed_keys = original_keys.difference(updated_keys) # depends on [control=['if'], data=[]]
elif name not in updated_keys and name in original_keys:
removed_keys = set([name]) # depends on [control=['if'], data=[]]
for key in removed_keys:
if key in item_index:
del item_index[key] # depends on [control=['if'], data=['key', 'item_index']] # depends on [control=['for'], data=['key']]
# Check the keys that was not added now or not lined up for removal,
# and see if they changed.
for key in updated_keys.difference(added_keys.union(removed_keys)):
if item_index[key].get('_changed'):
item_index[key]['_changed'] = False
removed_keys.add(key)
added_keys.add(key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
return (added_keys, removed_keys) |
def source_decode(sourcecode, verbose=0):
"""Decode operator source and import operator class.
Parameters
----------
sourcecode: string
a string of operator source (e.g 'sklearn.feature_selection.RFE')
verbose: int, optional (default: 0)
How much information TPOT communicates while it's running.
0 = none, 1 = minimal, 2 = high, 3 = all.
if verbose > 2 then ImportError will rasie during initialization
Returns
-------
import_str: string
a string of operator class source (e.g. 'sklearn.feature_selection')
op_str: string
a string of operator class (e.g. 'RFE')
op_obj: object
operator class (e.g. RFE)
"""
tmp_path = sourcecode.split('.')
op_str = tmp_path.pop()
import_str = '.'.join(tmp_path)
try:
if sourcecode.startswith('tpot.'):
exec('from {} import {}'.format(import_str[4:], op_str))
else:
exec('from {} import {}'.format(import_str, op_str))
op_obj = eval(op_str)
except Exception as e:
if verbose > 2:
raise ImportError('Error: could not import {}.\n{}'.format(sourcecode, e))
else:
print('Warning: {} is not available and will not be used by TPOT.'.format(sourcecode))
op_obj = None
return import_str, op_str, op_obj | def function[source_decode, parameter[sourcecode, verbose]]:
constant[Decode operator source and import operator class.
Parameters
----------
sourcecode: string
a string of operator source (e.g 'sklearn.feature_selection.RFE')
verbose: int, optional (default: 0)
How much information TPOT communicates while it's running.
0 = none, 1 = minimal, 2 = high, 3 = all.
if verbose > 2 then ImportError will rasie during initialization
Returns
-------
import_str: string
a string of operator class source (e.g. 'sklearn.feature_selection')
op_str: string
a string of operator class (e.g. 'RFE')
op_obj: object
operator class (e.g. RFE)
]
variable[tmp_path] assign[=] call[name[sourcecode].split, parameter[constant[.]]]
variable[op_str] assign[=] call[name[tmp_path].pop, parameter[]]
variable[import_str] assign[=] call[constant[.].join, parameter[name[tmp_path]]]
<ast.Try object at 0x7da20e955ff0>
return[tuple[[<ast.Name object at 0x7da204567f10>, <ast.Name object at 0x7da2045677c0>, <ast.Name object at 0x7da204566ef0>]]] | keyword[def] identifier[source_decode] ( identifier[sourcecode] , identifier[verbose] = literal[int] ):
literal[string]
identifier[tmp_path] = identifier[sourcecode] . identifier[split] ( literal[string] )
identifier[op_str] = identifier[tmp_path] . identifier[pop] ()
identifier[import_str] = literal[string] . identifier[join] ( identifier[tmp_path] )
keyword[try] :
keyword[if] identifier[sourcecode] . identifier[startswith] ( literal[string] ):
identifier[exec] ( literal[string] . identifier[format] ( identifier[import_str] [ literal[int] :], identifier[op_str] ))
keyword[else] :
identifier[exec] ( literal[string] . identifier[format] ( identifier[import_str] , identifier[op_str] ))
identifier[op_obj] = identifier[eval] ( identifier[op_str] )
keyword[except] identifier[Exception] keyword[as] identifier[e] :
keyword[if] identifier[verbose] > literal[int] :
keyword[raise] identifier[ImportError] ( literal[string] . identifier[format] ( identifier[sourcecode] , identifier[e] ))
keyword[else] :
identifier[print] ( literal[string] . identifier[format] ( identifier[sourcecode] ))
identifier[op_obj] = keyword[None]
keyword[return] identifier[import_str] , identifier[op_str] , identifier[op_obj] | def source_decode(sourcecode, verbose=0):
"""Decode operator source and import operator class.
Parameters
----------
sourcecode: string
a string of operator source (e.g 'sklearn.feature_selection.RFE')
verbose: int, optional (default: 0)
How much information TPOT communicates while it's running.
0 = none, 1 = minimal, 2 = high, 3 = all.
if verbose > 2 then ImportError will rasie during initialization
Returns
-------
import_str: string
a string of operator class source (e.g. 'sklearn.feature_selection')
op_str: string
a string of operator class (e.g. 'RFE')
op_obj: object
operator class (e.g. RFE)
"""
tmp_path = sourcecode.split('.')
op_str = tmp_path.pop()
import_str = '.'.join(tmp_path)
try:
if sourcecode.startswith('tpot.'):
exec('from {} import {}'.format(import_str[4:], op_str)) # depends on [control=['if'], data=[]]
else:
exec('from {} import {}'.format(import_str, op_str))
op_obj = eval(op_str) # depends on [control=['try'], data=[]]
except Exception as e:
if verbose > 2:
raise ImportError('Error: could not import {}.\n{}'.format(sourcecode, e)) # depends on [control=['if'], data=[]]
else:
print('Warning: {} is not available and will not be used by TPOT.'.format(sourcecode))
op_obj = None # depends on [control=['except'], data=['e']]
return (import_str, op_str, op_obj) |
def normalize_url(url):
"""
Returns the given URL with all query keys properly escaped.
Args:
url (str): The URL to normalize.
Returns:
str: The normalized URL.
"""
uri = urlparse(url)
query = uri.query or ""
pairs = parse_qsl(query)
decoded_pairs = [(unquote(key), value) for key, value in pairs]
encoded_pairs = [(quote(key), value) for key, value in decoded_pairs]
normalized_query = urlencode(encoded_pairs)
return ParseResult(
scheme=uri.scheme,
netloc=uri.netloc,
path=uri.path,
params=uri.params,
query=normalized_query,
fragment=uri.fragment).geturl() | def function[normalize_url, parameter[url]]:
constant[
Returns the given URL with all query keys properly escaped.
Args:
url (str): The URL to normalize.
Returns:
str: The normalized URL.
]
variable[uri] assign[=] call[name[urlparse], parameter[name[url]]]
variable[query] assign[=] <ast.BoolOp object at 0x7da1b02105b0>
variable[pairs] assign[=] call[name[parse_qsl], parameter[name[query]]]
variable[decoded_pairs] assign[=] <ast.ListComp object at 0x7da1b0211f30>
variable[encoded_pairs] assign[=] <ast.ListComp object at 0x7da1b0211f60>
variable[normalized_query] assign[=] call[name[urlencode], parameter[name[encoded_pairs]]]
return[call[call[name[ParseResult], parameter[]].geturl, parameter[]]] | keyword[def] identifier[normalize_url] ( identifier[url] ):
literal[string]
identifier[uri] = identifier[urlparse] ( identifier[url] )
identifier[query] = identifier[uri] . identifier[query] keyword[or] literal[string]
identifier[pairs] = identifier[parse_qsl] ( identifier[query] )
identifier[decoded_pairs] =[( identifier[unquote] ( identifier[key] ), identifier[value] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[pairs] ]
identifier[encoded_pairs] =[( identifier[quote] ( identifier[key] ), identifier[value] ) keyword[for] identifier[key] , identifier[value] keyword[in] identifier[decoded_pairs] ]
identifier[normalized_query] = identifier[urlencode] ( identifier[encoded_pairs] )
keyword[return] identifier[ParseResult] (
identifier[scheme] = identifier[uri] . identifier[scheme] ,
identifier[netloc] = identifier[uri] . identifier[netloc] ,
identifier[path] = identifier[uri] . identifier[path] ,
identifier[params] = identifier[uri] . identifier[params] ,
identifier[query] = identifier[normalized_query] ,
identifier[fragment] = identifier[uri] . identifier[fragment] ). identifier[geturl] () | def normalize_url(url):
"""
Returns the given URL with all query keys properly escaped.
Args:
url (str): The URL to normalize.
Returns:
str: The normalized URL.
"""
uri = urlparse(url)
query = uri.query or ''
pairs = parse_qsl(query)
decoded_pairs = [(unquote(key), value) for (key, value) in pairs]
encoded_pairs = [(quote(key), value) for (key, value) in decoded_pairs]
normalized_query = urlencode(encoded_pairs)
return ParseResult(scheme=uri.scheme, netloc=uri.netloc, path=uri.path, params=uri.params, query=normalized_query, fragment=uri.fragment).geturl() |
def remove_child_bin(self, bin_id, child_id):
"""Removes a child from a bin.
arg: bin_id (osid.id.Id): the ``Id`` of a bin
arg: child_id (osid.id.Id): the ``Id`` of the new child
raise: NotFound - ``bin_id`` not a parent of ``child_id``
raise: NullArgument - ``bin_id`` or ``child_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchyDesignSession.remove_child_bin_template
if self._catalog_session is not None:
return self._catalog_session.remove_child_catalog(catalog_id=bin_id, child_id=child_id)
return self._hierarchy_session.remove_child(id_=bin_id, child_id=child_id) | def function[remove_child_bin, parameter[self, bin_id, child_id]]:
constant[Removes a child from a bin.
arg: bin_id (osid.id.Id): the ``Id`` of a bin
arg: child_id (osid.id.Id): the ``Id`` of the new child
raise: NotFound - ``bin_id`` not a parent of ``child_id``
raise: NullArgument - ``bin_id`` or ``child_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
if compare[name[self]._catalog_session is_not constant[None]] begin[:]
return[call[name[self]._catalog_session.remove_child_catalog, parameter[]]]
return[call[name[self]._hierarchy_session.remove_child, parameter[]]] | keyword[def] identifier[remove_child_bin] ( identifier[self] , identifier[bin_id] , identifier[child_id] ):
literal[string]
keyword[if] identifier[self] . identifier[_catalog_session] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[self] . identifier[_catalog_session] . identifier[remove_child_catalog] ( identifier[catalog_id] = identifier[bin_id] , identifier[child_id] = identifier[child_id] )
keyword[return] identifier[self] . identifier[_hierarchy_session] . identifier[remove_child] ( identifier[id_] = identifier[bin_id] , identifier[child_id] = identifier[child_id] ) | def remove_child_bin(self, bin_id, child_id):
"""Removes a child from a bin.
arg: bin_id (osid.id.Id): the ``Id`` of a bin
arg: child_id (osid.id.Id): the ``Id`` of the new child
raise: NotFound - ``bin_id`` not a parent of ``child_id``
raise: NullArgument - ``bin_id`` or ``child_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
# Implemented from template for
# osid.resource.BinHierarchyDesignSession.remove_child_bin_template
if self._catalog_session is not None:
return self._catalog_session.remove_child_catalog(catalog_id=bin_id, child_id=child_id) # depends on [control=['if'], data=[]]
return self._hierarchy_session.remove_child(id_=bin_id, child_id=child_id) |
def do_summary(self, params):
"""
\x1b[1mNAME\x1b[0m
summary - Prints summarized details of a path's children
\x1b[1mSYNOPSIS\x1b[0m
summary [path] [top]
\x1b[1mDESCRIPTION\x1b[0m
The results are sorted by name.
\x1b[1mOPTIONS\x1b[0m
* path: the path (default: cwd)
* top: number of results to be displayed (0 is all) (default: 0)
\x1b[1mEXAMPLES\x1b[0m
> summary /services/registrations
Created Last modified Owner Name
Thu Oct 11 09:14:39 2014 Thu Oct 11 09:14:39 2014 - bar
Thu Oct 16 18:54:39 2014 Thu Oct 16 18:54:39 2014 - foo
Thu Oct 12 10:04:01 2014 Thu Oct 12 10:04:01 2014 0x14911e869aa0dc1 member_0000001
"""
self.show_output("%s%s%s%s",
"Created".ljust(32),
"Last modified".ljust(32),
"Owner".ljust(23),
"Name")
results = sorted(self._zk.stat_map(params.path))
# what slice do we want?
if params.top == 0:
start, end = 0, len(results)
elif params.top > 0:
start, end = 0, params.top if params.top < len(results) else len(results)
else:
start = len(results) + params.top if abs(params.top) < len(results) else 0
end = len(results)
offs = 1 if params.path == "/" else len(params.path) + 1
for i in range(start, end):
path, stat = results[i]
self.show_output(
"%s%s%s%s",
time.ctime(stat.created).ljust(32),
time.ctime(stat.last_modified).ljust(32),
("0x%x" % stat.ephemeralOwner).ljust(23),
path[offs:]
) | def function[do_summary, parameter[self, params]]:
constant[
[1mNAME[0m
summary - Prints summarized details of a path's children
[1mSYNOPSIS[0m
summary [path] [top]
[1mDESCRIPTION[0m
The results are sorted by name.
[1mOPTIONS[0m
* path: the path (default: cwd)
* top: number of results to be displayed (0 is all) (default: 0)
[1mEXAMPLES[0m
> summary /services/registrations
Created Last modified Owner Name
Thu Oct 11 09:14:39 2014 Thu Oct 11 09:14:39 2014 - bar
Thu Oct 16 18:54:39 2014 Thu Oct 16 18:54:39 2014 - foo
Thu Oct 12 10:04:01 2014 Thu Oct 12 10:04:01 2014 0x14911e869aa0dc1 member_0000001
]
call[name[self].show_output, parameter[constant[%s%s%s%s], call[constant[Created].ljust, parameter[constant[32]]], call[constant[Last modified].ljust, parameter[constant[32]]], call[constant[Owner].ljust, parameter[constant[23]]], constant[Name]]]
variable[results] assign[=] call[name[sorted], parameter[call[name[self]._zk.stat_map, parameter[name[params].path]]]]
if compare[name[params].top equal[==] constant[0]] begin[:]
<ast.Tuple object at 0x7da18f00c400> assign[=] tuple[[<ast.Constant object at 0x7da18f00d690>, <ast.Call object at 0x7da18f00e5c0>]]
variable[offs] assign[=] <ast.IfExp object at 0x7da18f00efe0>
for taget[name[i]] in starred[call[name[range], parameter[name[start], name[end]]]] begin[:]
<ast.Tuple object at 0x7da1b26ac850> assign[=] call[name[results]][name[i]]
call[name[self].show_output, parameter[constant[%s%s%s%s], call[call[name[time].ctime, parameter[name[stat].created]].ljust, parameter[constant[32]]], call[call[name[time].ctime, parameter[name[stat].last_modified]].ljust, parameter[constant[32]]], call[binary_operation[constant[0x%x] <ast.Mod object at 0x7da2590d6920> name[stat].ephemeralOwner].ljust, parameter[constant[23]]], call[name[path]][<ast.Slice object at 0x7da1b26af790>]]] | keyword[def] identifier[do_summary] ( identifier[self] , identifier[params] ):
literal[string]
identifier[self] . identifier[show_output] ( literal[string] ,
literal[string] . identifier[ljust] ( literal[int] ),
literal[string] . identifier[ljust] ( literal[int] ),
literal[string] . identifier[ljust] ( literal[int] ),
literal[string] )
identifier[results] = identifier[sorted] ( identifier[self] . identifier[_zk] . identifier[stat_map] ( identifier[params] . identifier[path] ))
keyword[if] identifier[params] . identifier[top] == literal[int] :
identifier[start] , identifier[end] = literal[int] , identifier[len] ( identifier[results] )
keyword[elif] identifier[params] . identifier[top] > literal[int] :
identifier[start] , identifier[end] = literal[int] , identifier[params] . identifier[top] keyword[if] identifier[params] . identifier[top] < identifier[len] ( identifier[results] ) keyword[else] identifier[len] ( identifier[results] )
keyword[else] :
identifier[start] = identifier[len] ( identifier[results] )+ identifier[params] . identifier[top] keyword[if] identifier[abs] ( identifier[params] . identifier[top] )< identifier[len] ( identifier[results] ) keyword[else] literal[int]
identifier[end] = identifier[len] ( identifier[results] )
identifier[offs] = literal[int] keyword[if] identifier[params] . identifier[path] == literal[string] keyword[else] identifier[len] ( identifier[params] . identifier[path] )+ literal[int]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[start] , identifier[end] ):
identifier[path] , identifier[stat] = identifier[results] [ identifier[i] ]
identifier[self] . identifier[show_output] (
literal[string] ,
identifier[time] . identifier[ctime] ( identifier[stat] . identifier[created] ). identifier[ljust] ( literal[int] ),
identifier[time] . identifier[ctime] ( identifier[stat] . identifier[last_modified] ). identifier[ljust] ( literal[int] ),
( literal[string] % identifier[stat] . identifier[ephemeralOwner] ). identifier[ljust] ( literal[int] ),
identifier[path] [ identifier[offs] :]
) | def do_summary(self, params):
"""
\x1b[1mNAME\x1b[0m
summary - Prints summarized details of a path's children
\x1b[1mSYNOPSIS\x1b[0m
summary [path] [top]
\x1b[1mDESCRIPTION\x1b[0m
The results are sorted by name.
\x1b[1mOPTIONS\x1b[0m
* path: the path (default: cwd)
* top: number of results to be displayed (0 is all) (default: 0)
\x1b[1mEXAMPLES\x1b[0m
> summary /services/registrations
Created Last modified Owner Name
Thu Oct 11 09:14:39 2014 Thu Oct 11 09:14:39 2014 - bar
Thu Oct 16 18:54:39 2014 Thu Oct 16 18:54:39 2014 - foo
Thu Oct 12 10:04:01 2014 Thu Oct 12 10:04:01 2014 0x14911e869aa0dc1 member_0000001
"""
self.show_output('%s%s%s%s', 'Created'.ljust(32), 'Last modified'.ljust(32), 'Owner'.ljust(23), 'Name')
results = sorted(self._zk.stat_map(params.path))
# what slice do we want?
if params.top == 0:
(start, end) = (0, len(results)) # depends on [control=['if'], data=[]]
elif params.top > 0:
(start, end) = (0, params.top if params.top < len(results) else len(results)) # depends on [control=['if'], data=[]]
else:
start = len(results) + params.top if abs(params.top) < len(results) else 0
end = len(results)
offs = 1 if params.path == '/' else len(params.path) + 1
for i in range(start, end):
(path, stat) = results[i]
self.show_output('%s%s%s%s', time.ctime(stat.created).ljust(32), time.ctime(stat.last_modified).ljust(32), ('0x%x' % stat.ephemeralOwner).ljust(23), path[offs:]) # depends on [control=['for'], data=['i']] |
def decorate_func(self, func, *decorator_args, **decorator_kwargs):
"""override this in a child class with your own logic, it must return a
function that calls self.func
:param func: callback -- the function being decorated
:param decorator_args: tuple -- the arguments passed into the decorator (eg, @dec(1, 2))
:param decorator_kwargs: dict -- the named args passed into the decorator (eg, @dec(foo=1))
:returns: the wrapped func with our decorator func
"""
raise RuntimeError("decorator {} does not support function decoration".format(self.__class__.__name__))
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper | def function[decorate_func, parameter[self, func]]:
constant[override this in a child class with your own logic, it must return a
function that calls self.func
:param func: callback -- the function being decorated
:param decorator_args: tuple -- the arguments passed into the decorator (eg, @dec(1, 2))
:param decorator_kwargs: dict -- the named args passed into the decorator (eg, @dec(foo=1))
:returns: the wrapped func with our decorator func
]
<ast.Raise object at 0x7da18f09dae0>
def function[wrapper, parameter[]]:
return[call[name[func], parameter[<ast.Starred object at 0x7da18f09c6d0>]]]
return[name[wrapper]] | keyword[def] identifier[decorate_func] ( identifier[self] , identifier[func] ,* identifier[decorator_args] ,** identifier[decorator_kwargs] ):
literal[string]
keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[self] . identifier[__class__] . identifier[__name__] ))
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[return] identifier[wrapper] | def decorate_func(self, func, *decorator_args, **decorator_kwargs):
"""override this in a child class with your own logic, it must return a
function that calls self.func
:param func: callback -- the function being decorated
:param decorator_args: tuple -- the arguments passed into the decorator (eg, @dec(1, 2))
:param decorator_kwargs: dict -- the named args passed into the decorator (eg, @dec(foo=1))
:returns: the wrapped func with our decorator func
"""
raise RuntimeError('decorator {} does not support function decoration'.format(self.__class__.__name__))
def wrapper(*args, **kwargs):
return func(*args, **kwargs)
return wrapper |
def parse(self, response):
'''
根据对 ``start_urls`` 中提供链接的请求响应包内容,解析生成具体文章链接请求
:param Response response: 由 ``Scrapy`` 调用并传入的请求响应对象
'''
content_raw = response.body.decode()
self.logger.debug('响应body原始数据:{}'.format(content_raw))
content = json.loads(content_raw, encoding='UTF-8')
self.logger.debug(content)
# 文章发布日期
date = datetime.datetime.strptime(content['date'], '%Y%m%d')
strftime = date.strftime("%Y-%m-%d")
self.logger.info('日期:{}'.format(strftime))
# 处理头条文章列表,将其 `top` 标记到相应 __story__ 中
if 'top_stories' in content:
self.logger.info('处理头条文章')
for item in content['top_stories']:
for story in content['stories']:
if item['id'] == story['id']:
story['top'] = 1
break
self.logger.debug(item)
# 处理今日文章,并抛出具体文章请求
post_num = len(content['stories'])
self.logger.info('处理今日文章,共{:>2}篇'.format(post_num))
for item in content['stories']:
self.logger.info(item)
post_num = 0 if post_num < 0 else post_num
pub_time = date + datetime.timedelta(minutes=post_num)
post_num -= 1
url = 'http://news-at.zhihu.com/api/4/news/{}'.format(item['id'])
request = scrapy.Request(url, callback=self.parse_post)
post_dict = {
'spider': ZhihuDailySpider.name,
'date': pub_time.strftime("%Y-%m-%d %H:%M:%S"),
'meta': {
'spider.zhihu_daily.id': str(item.get('id', ''))
}
}
if item.get('top'):
post_dict['meta']['spider.zhihu_daily.top'] = \
str(item.get('top', 0))
request.meta['post'] = post_dict
self.item_list.append(post_dict)
yield request | def function[parse, parameter[self, response]]:
constant[
根据对 ``start_urls`` 中提供链接的请求响应包内容,解析生成具体文章链接请求
:param Response response: 由 ``Scrapy`` 调用并传入的请求响应对象
]
variable[content_raw] assign[=] call[name[response].body.decode, parameter[]]
call[name[self].logger.debug, parameter[call[constant[响应body原始数据:{}].format, parameter[name[content_raw]]]]]
variable[content] assign[=] call[name[json].loads, parameter[name[content_raw]]]
call[name[self].logger.debug, parameter[name[content]]]
variable[date] assign[=] call[name[datetime].datetime.strptime, parameter[call[name[content]][constant[date]], constant[%Y%m%d]]]
variable[strftime] assign[=] call[name[date].strftime, parameter[constant[%Y-%m-%d]]]
call[name[self].logger.info, parameter[call[constant[日期:{}].format, parameter[name[strftime]]]]]
if compare[constant[top_stories] in name[content]] begin[:]
call[name[self].logger.info, parameter[constant[处理头条文章]]]
for taget[name[item]] in starred[call[name[content]][constant[top_stories]]] begin[:]
for taget[name[story]] in starred[call[name[content]][constant[stories]]] begin[:]
if compare[call[name[item]][constant[id]] equal[==] call[name[story]][constant[id]]] begin[:]
call[name[story]][constant[top]] assign[=] constant[1]
break
call[name[self].logger.debug, parameter[name[item]]]
variable[post_num] assign[=] call[name[len], parameter[call[name[content]][constant[stories]]]]
call[name[self].logger.info, parameter[call[constant[处理今日文章,共{:>2}篇].format, parameter[name[post_num]]]]]
for taget[name[item]] in starred[call[name[content]][constant[stories]]] begin[:]
call[name[self].logger.info, parameter[name[item]]]
variable[post_num] assign[=] <ast.IfExp object at 0x7da204564880>
variable[pub_time] assign[=] binary_operation[name[date] + call[name[datetime].timedelta, parameter[]]]
<ast.AugAssign object at 0x7da20c991b10>
variable[url] assign[=] call[constant[http://news-at.zhihu.com/api/4/news/{}].format, parameter[call[name[item]][constant[id]]]]
variable[request] assign[=] call[name[scrapy].Request, parameter[name[url]]]
variable[post_dict] assign[=] dictionary[[<ast.Constant object at 0x7da20c991510>, <ast.Constant object at 0x7da20c990a00>, <ast.Constant object at 0x7da20c993d60>], [<ast.Attribute object at 0x7da20c9926e0>, <ast.Call object at 0x7da20c990820>, <ast.Dict object at 0x7da20c990370>]]
if call[name[item].get, parameter[constant[top]]] begin[:]
call[call[name[post_dict]][constant[meta]]][constant[spider.zhihu_daily.top]] assign[=] call[name[str], parameter[call[name[item].get, parameter[constant[top], constant[0]]]]]
call[name[request].meta][constant[post]] assign[=] name[post_dict]
call[name[self].item_list.append, parameter[name[post_dict]]]
<ast.Yield object at 0x7da20c9936d0> | keyword[def] identifier[parse] ( identifier[self] , identifier[response] ):
literal[string]
identifier[content_raw] = identifier[response] . identifier[body] . identifier[decode] ()
identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[content_raw] ))
identifier[content] = identifier[json] . identifier[loads] ( identifier[content_raw] , identifier[encoding] = literal[string] )
identifier[self] . identifier[logger] . identifier[debug] ( identifier[content] )
identifier[date] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[content] [ literal[string] ], literal[string] )
identifier[strftime] = identifier[date] . identifier[strftime] ( literal[string] )
identifier[self] . identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[strftime] ))
keyword[if] literal[string] keyword[in] identifier[content] :
identifier[self] . identifier[logger] . identifier[info] ( literal[string] )
keyword[for] identifier[item] keyword[in] identifier[content] [ literal[string] ]:
keyword[for] identifier[story] keyword[in] identifier[content] [ literal[string] ]:
keyword[if] identifier[item] [ literal[string] ]== identifier[story] [ literal[string] ]:
identifier[story] [ literal[string] ]= literal[int]
keyword[break]
identifier[self] . identifier[logger] . identifier[debug] ( identifier[item] )
identifier[post_num] = identifier[len] ( identifier[content] [ literal[string] ])
identifier[self] . identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[post_num] ))
keyword[for] identifier[item] keyword[in] identifier[content] [ literal[string] ]:
identifier[self] . identifier[logger] . identifier[info] ( identifier[item] )
identifier[post_num] = literal[int] keyword[if] identifier[post_num] < literal[int] keyword[else] identifier[post_num]
identifier[pub_time] = identifier[date] + identifier[datetime] . identifier[timedelta] ( identifier[minutes] = identifier[post_num] )
identifier[post_num] -= literal[int]
identifier[url] = literal[string] . identifier[format] ( identifier[item] [ literal[string] ])
identifier[request] = identifier[scrapy] . identifier[Request] ( identifier[url] , identifier[callback] = identifier[self] . identifier[parse_post] )
identifier[post_dict] ={
literal[string] : identifier[ZhihuDailySpider] . identifier[name] ,
literal[string] : identifier[pub_time] . identifier[strftime] ( literal[string] ),
literal[string] :{
literal[string] : identifier[str] ( identifier[item] . identifier[get] ( literal[string] , literal[string] ))
}
}
keyword[if] identifier[item] . identifier[get] ( literal[string] ):
identifier[post_dict] [ literal[string] ][ literal[string] ]= identifier[str] ( identifier[item] . identifier[get] ( literal[string] , literal[int] ))
identifier[request] . identifier[meta] [ literal[string] ]= identifier[post_dict]
identifier[self] . identifier[item_list] . identifier[append] ( identifier[post_dict] )
keyword[yield] identifier[request] | def parse(self, response):
"""
根据对 ``start_urls`` 中提供链接的请求响应包内容,解析生成具体文章链接请求
:param Response response: 由 ``Scrapy`` 调用并传入的请求响应对象
"""
content_raw = response.body.decode()
self.logger.debug('响应body原始数据:{}'.format(content_raw))
content = json.loads(content_raw, encoding='UTF-8')
self.logger.debug(content)
# 文章发布日期
date = datetime.datetime.strptime(content['date'], '%Y%m%d')
strftime = date.strftime('%Y-%m-%d')
self.logger.info('日期:{}'.format(strftime))
# 处理头条文章列表,将其 `top` 标记到相应 __story__ 中
if 'top_stories' in content:
self.logger.info('处理头条文章')
for item in content['top_stories']:
for story in content['stories']:
if item['id'] == story['id']:
story['top'] = 1
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['story']]
self.logger.debug(item) # depends on [control=['for'], data=['item']] # depends on [control=['if'], data=['content']]
# 处理今日文章,并抛出具体文章请求
post_num = len(content['stories'])
self.logger.info('处理今日文章,共{:>2}篇'.format(post_num))
for item in content['stories']:
self.logger.info(item)
post_num = 0 if post_num < 0 else post_num
pub_time = date + datetime.timedelta(minutes=post_num)
post_num -= 1
url = 'http://news-at.zhihu.com/api/4/news/{}'.format(item['id'])
request = scrapy.Request(url, callback=self.parse_post)
post_dict = {'spider': ZhihuDailySpider.name, 'date': pub_time.strftime('%Y-%m-%d %H:%M:%S'), 'meta': {'spider.zhihu_daily.id': str(item.get('id', ''))}}
if item.get('top'):
post_dict['meta']['spider.zhihu_daily.top'] = str(item.get('top', 0)) # depends on [control=['if'], data=[]]
request.meta['post'] = post_dict
self.item_list.append(post_dict)
yield request # depends on [control=['for'], data=['item']] |
def _expired(self):
"""Hold timer expired event handler.
"""
LOG.info('Negotiated hold time %s expired.', self._holdtime)
code = BGP_ERROR_HOLD_TIMER_EXPIRED
subcode = BGP_ERROR_SUB_HOLD_TIMER_EXPIRED
self.send_notification(code, subcode)
self.connection_lost('Negotiated hold time %s expired.' %
self._holdtime)
self.stop() | def function[_expired, parameter[self]]:
constant[Hold timer expired event handler.
]
call[name[LOG].info, parameter[constant[Negotiated hold time %s expired.], name[self]._holdtime]]
variable[code] assign[=] name[BGP_ERROR_HOLD_TIMER_EXPIRED]
variable[subcode] assign[=] name[BGP_ERROR_SUB_HOLD_TIMER_EXPIRED]
call[name[self].send_notification, parameter[name[code], name[subcode]]]
call[name[self].connection_lost, parameter[binary_operation[constant[Negotiated hold time %s expired.] <ast.Mod object at 0x7da2590d6920> name[self]._holdtime]]]
call[name[self].stop, parameter[]] | keyword[def] identifier[_expired] ( identifier[self] ):
literal[string]
identifier[LOG] . identifier[info] ( literal[string] , identifier[self] . identifier[_holdtime] )
identifier[code] = identifier[BGP_ERROR_HOLD_TIMER_EXPIRED]
identifier[subcode] = identifier[BGP_ERROR_SUB_HOLD_TIMER_EXPIRED]
identifier[self] . identifier[send_notification] ( identifier[code] , identifier[subcode] )
identifier[self] . identifier[connection_lost] ( literal[string] %
identifier[self] . identifier[_holdtime] )
identifier[self] . identifier[stop] () | def _expired(self):
"""Hold timer expired event handler.
"""
LOG.info('Negotiated hold time %s expired.', self._holdtime)
code = BGP_ERROR_HOLD_TIMER_EXPIRED
subcode = BGP_ERROR_SUB_HOLD_TIMER_EXPIRED
self.send_notification(code, subcode)
self.connection_lost('Negotiated hold time %s expired.' % self._holdtime)
self.stop() |
def organization_requests(self, id, **kwargs):
"https://developer.zendesk.com/rest_api/docs/core/requests#list-requests"
api_path = "/api/v2/organizations/{id}/requests.json"
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) | def function[organization_requests, parameter[self, id]]:
constant[https://developer.zendesk.com/rest_api/docs/core/requests#list-requests]
variable[api_path] assign[=] constant[/api/v2/organizations/{id}/requests.json]
variable[api_path] assign[=] call[name[api_path].format, parameter[]]
return[call[name[self].call, parameter[name[api_path]]]] | keyword[def] identifier[organization_requests] ( identifier[self] , identifier[id] ,** identifier[kwargs] ):
literal[string]
identifier[api_path] = literal[string]
identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[id] = identifier[id] )
keyword[return] identifier[self] . identifier[call] ( identifier[api_path] ,** identifier[kwargs] ) | def organization_requests(self, id, **kwargs):
"""https://developer.zendesk.com/rest_api/docs/core/requests#list-requests"""
api_path = '/api/v2/organizations/{id}/requests.json'
api_path = api_path.format(id=id)
return self.call(api_path, **kwargs) |
def autobuild_shiparchive(src_file):
"""Create a ship file archive containing a yaml_file and its dependencies.
If yaml_file depends on any build products as external files, it must
be a jinja2 template that references the file using the find_product
filter so that we can figure out where those build products are going
and create the right dependency graph.
Args:
src_file (str): The path to the input yaml file template. This
file path must end .yaml.tpl and is rendered into a .yaml
file and then packaged into a .ship file along with any
products that are referenced in it.
"""
if not src_file.endswith('.tpl'):
raise BuildError("You must pass a .tpl file to autobuild_shiparchive", src_file=src_file)
env = Environment(tools=[])
family = ArchitectureGroup('module_settings.json')
target = family.platform_independent_target()
resolver = ProductResolver.Create()
#Parse through build_step products to see what needs to imported
custom_steps = []
for build_step in family.tile.find_products('build_step'):
full_file_name = build_step.split(":")[0]
basename = os.path.splitext(os.path.basename(full_file_name))[0]
folder = os.path.dirname(full_file_name)
fileobj, pathname, description = imp.find_module(basename, [folder])
mod = imp.load_module(basename, fileobj, pathname, description)
full_file_name, class_name = build_step.split(":")
custom_steps.append((class_name, getattr(mod, class_name)))
env['CUSTOM_STEPS'] = custom_steps
env["RESOLVER"] = resolver
base_name, tpl_name = _find_basename(src_file)
yaml_name = tpl_name[:-4]
ship_name = yaml_name[:-5] + ".ship"
output_dir = target.build_dirs()['output']
build_dir = os.path.join(target.build_dirs()['build'], base_name)
tpl_path = os.path.join(build_dir, tpl_name)
yaml_path = os.path.join(build_dir, yaml_name)
ship_path = os.path.join(build_dir, ship_name)
output_path = os.path.join(output_dir, ship_name)
# We want to build up all related files in
# <build_dir>/<ship archive_folder>/
# - First copy the template yaml over
# - Then render the template yaml
# - Then find all products referenced in the template yaml and copy them
# - over
# - Then build a .ship archive
# - Then copy that archive into output_dir
ship_deps = [yaml_path]
env.Command([tpl_path], [src_file], Copy("$TARGET", "$SOURCE"))
prod_deps = _find_product_dependencies(src_file, resolver)
env.Command([yaml_path], [tpl_path], action=Action(template_shipfile_action, "Rendering $TARGET"))
for prod in prod_deps:
dest_file = os.path.join(build_dir, prod.short_name)
ship_deps.append(dest_file)
env.Command([dest_file], [prod.full_path], Copy("$TARGET", "$SOURCE"))
env.Command([ship_path], [ship_deps], action=Action(create_shipfile, "Archiving Ship Recipe $TARGET"))
env.Command([output_path], [ship_path], Copy("$TARGET", "$SOURCE")) | def function[autobuild_shiparchive, parameter[src_file]]:
constant[Create a ship file archive containing a yaml_file and its dependencies.
If yaml_file depends on any build products as external files, it must
be a jinja2 template that references the file using the find_product
filter so that we can figure out where those build products are going
and create the right dependency graph.
Args:
src_file (str): The path to the input yaml file template. This
file path must end .yaml.tpl and is rendered into a .yaml
file and then packaged into a .ship file along with any
products that are referenced in it.
]
if <ast.UnaryOp object at 0x7da1b0c01990> begin[:]
<ast.Raise object at 0x7da1b0c01420>
variable[env] assign[=] call[name[Environment], parameter[]]
variable[family] assign[=] call[name[ArchitectureGroup], parameter[constant[module_settings.json]]]
variable[target] assign[=] call[name[family].platform_independent_target, parameter[]]
variable[resolver] assign[=] call[name[ProductResolver].Create, parameter[]]
variable[custom_steps] assign[=] list[[]]
for taget[name[build_step]] in starred[call[name[family].tile.find_products, parameter[constant[build_step]]]] begin[:]
variable[full_file_name] assign[=] call[call[name[build_step].split, parameter[constant[:]]]][constant[0]]
variable[basename] assign[=] call[call[name[os].path.splitext, parameter[call[name[os].path.basename, parameter[name[full_file_name]]]]]][constant[0]]
variable[folder] assign[=] call[name[os].path.dirname, parameter[name[full_file_name]]]
<ast.Tuple object at 0x7da20e9b0760> assign[=] call[name[imp].find_module, parameter[name[basename], list[[<ast.Name object at 0x7da20e9b3eb0>]]]]
variable[mod] assign[=] call[name[imp].load_module, parameter[name[basename], name[fileobj], name[pathname], name[description]]]
<ast.Tuple object at 0x7da20e9b0cd0> assign[=] call[name[build_step].split, parameter[constant[:]]]
call[name[custom_steps].append, parameter[tuple[[<ast.Name object at 0x7da20e9b0490>, <ast.Call object at 0x7da20e9b0550>]]]]
call[name[env]][constant[CUSTOM_STEPS]] assign[=] name[custom_steps]
call[name[env]][constant[RESOLVER]] assign[=] name[resolver]
<ast.Tuple object at 0x7da20e9b0790> assign[=] call[name[_find_basename], parameter[name[src_file]]]
variable[yaml_name] assign[=] call[name[tpl_name]][<ast.Slice object at 0x7da20e9b1600>]
variable[ship_name] assign[=] binary_operation[call[name[yaml_name]][<ast.Slice object at 0x7da20e9b04f0>] + constant[.ship]]
variable[output_dir] assign[=] call[call[name[target].build_dirs, parameter[]]][constant[output]]
variable[build_dir] assign[=] call[name[os].path.join, parameter[call[call[name[target].build_dirs, parameter[]]][constant[build]], name[base_name]]]
variable[tpl_path] assign[=] call[name[os].path.join, parameter[name[build_dir], name[tpl_name]]]
variable[yaml_path] assign[=] call[name[os].path.join, parameter[name[build_dir], name[yaml_name]]]
variable[ship_path] assign[=] call[name[os].path.join, parameter[name[build_dir], name[ship_name]]]
variable[output_path] assign[=] call[name[os].path.join, parameter[name[output_dir], name[ship_name]]]
variable[ship_deps] assign[=] list[[<ast.Name object at 0x7da20e9b3790>]]
call[name[env].Command, parameter[list[[<ast.Name object at 0x7da20e9b2890>]], list[[<ast.Name object at 0x7da20e9b0e20>]], call[name[Copy], parameter[constant[$TARGET], constant[$SOURCE]]]]]
variable[prod_deps] assign[=] call[name[_find_product_dependencies], parameter[name[src_file], name[resolver]]]
call[name[env].Command, parameter[list[[<ast.Name object at 0x7da20e9b2e90>]], list[[<ast.Name object at 0x7da20e9b3b80>]]]]
for taget[name[prod]] in starred[name[prod_deps]] begin[:]
variable[dest_file] assign[=] call[name[os].path.join, parameter[name[build_dir], name[prod].short_name]]
call[name[ship_deps].append, parameter[name[dest_file]]]
call[name[env].Command, parameter[list[[<ast.Name object at 0x7da18f8113c0>]], list[[<ast.Attribute object at 0x7da18f813310>]], call[name[Copy], parameter[constant[$TARGET], constant[$SOURCE]]]]]
call[name[env].Command, parameter[list[[<ast.Name object at 0x7da18f8103d0>]], list[[<ast.Name object at 0x7da18f810f70>]]]]
call[name[env].Command, parameter[list[[<ast.Name object at 0x7da18f8131c0>]], list[[<ast.Name object at 0x7da18f813100>]], call[name[Copy], parameter[constant[$TARGET], constant[$SOURCE]]]]] | keyword[def] identifier[autobuild_shiparchive] ( identifier[src_file] ):
literal[string]
keyword[if] keyword[not] identifier[src_file] . identifier[endswith] ( literal[string] ):
keyword[raise] identifier[BuildError] ( literal[string] , identifier[src_file] = identifier[src_file] )
identifier[env] = identifier[Environment] ( identifier[tools] =[])
identifier[family] = identifier[ArchitectureGroup] ( literal[string] )
identifier[target] = identifier[family] . identifier[platform_independent_target] ()
identifier[resolver] = identifier[ProductResolver] . identifier[Create] ()
identifier[custom_steps] =[]
keyword[for] identifier[build_step] keyword[in] identifier[family] . identifier[tile] . identifier[find_products] ( literal[string] ):
identifier[full_file_name] = identifier[build_step] . identifier[split] ( literal[string] )[ literal[int] ]
identifier[basename] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[os] . identifier[path] . identifier[basename] ( identifier[full_file_name] ))[ literal[int] ]
identifier[folder] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[full_file_name] )
identifier[fileobj] , identifier[pathname] , identifier[description] = identifier[imp] . identifier[find_module] ( identifier[basename] ,[ identifier[folder] ])
identifier[mod] = identifier[imp] . identifier[load_module] ( identifier[basename] , identifier[fileobj] , identifier[pathname] , identifier[description] )
identifier[full_file_name] , identifier[class_name] = identifier[build_step] . identifier[split] ( literal[string] )
identifier[custom_steps] . identifier[append] (( identifier[class_name] , identifier[getattr] ( identifier[mod] , identifier[class_name] )))
identifier[env] [ literal[string] ]= identifier[custom_steps]
identifier[env] [ literal[string] ]= identifier[resolver]
identifier[base_name] , identifier[tpl_name] = identifier[_find_basename] ( identifier[src_file] )
identifier[yaml_name] = identifier[tpl_name] [:- literal[int] ]
identifier[ship_name] = identifier[yaml_name] [:- literal[int] ]+ literal[string]
identifier[output_dir] = identifier[target] . identifier[build_dirs] ()[ literal[string] ]
identifier[build_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[target] . identifier[build_dirs] ()[ literal[string] ], identifier[base_name] )
identifier[tpl_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[build_dir] , identifier[tpl_name] )
identifier[yaml_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[build_dir] , identifier[yaml_name] )
identifier[ship_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[build_dir] , identifier[ship_name] )
identifier[output_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[output_dir] , identifier[ship_name] )
identifier[ship_deps] =[ identifier[yaml_path] ]
identifier[env] . identifier[Command] ([ identifier[tpl_path] ],[ identifier[src_file] ], identifier[Copy] ( literal[string] , literal[string] ))
identifier[prod_deps] = identifier[_find_product_dependencies] ( identifier[src_file] , identifier[resolver] )
identifier[env] . identifier[Command] ([ identifier[yaml_path] ],[ identifier[tpl_path] ], identifier[action] = identifier[Action] ( identifier[template_shipfile_action] , literal[string] ))
keyword[for] identifier[prod] keyword[in] identifier[prod_deps] :
identifier[dest_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[build_dir] , identifier[prod] . identifier[short_name] )
identifier[ship_deps] . identifier[append] ( identifier[dest_file] )
identifier[env] . identifier[Command] ([ identifier[dest_file] ],[ identifier[prod] . identifier[full_path] ], identifier[Copy] ( literal[string] , literal[string] ))
identifier[env] . identifier[Command] ([ identifier[ship_path] ],[ identifier[ship_deps] ], identifier[action] = identifier[Action] ( identifier[create_shipfile] , literal[string] ))
identifier[env] . identifier[Command] ([ identifier[output_path] ],[ identifier[ship_path] ], identifier[Copy] ( literal[string] , literal[string] )) | def autobuild_shiparchive(src_file):
"""Create a ship file archive containing a yaml_file and its dependencies.
If yaml_file depends on any build products as external files, it must
be a jinja2 template that references the file using the find_product
filter so that we can figure out where those build products are going
and create the right dependency graph.
Args:
src_file (str): The path to the input yaml file template. This
file path must end .yaml.tpl and is rendered into a .yaml
file and then packaged into a .ship file along with any
products that are referenced in it.
"""
if not src_file.endswith('.tpl'):
raise BuildError('You must pass a .tpl file to autobuild_shiparchive', src_file=src_file) # depends on [control=['if'], data=[]]
env = Environment(tools=[])
family = ArchitectureGroup('module_settings.json')
target = family.platform_independent_target()
resolver = ProductResolver.Create()
#Parse through build_step products to see what needs to imported
custom_steps = []
for build_step in family.tile.find_products('build_step'):
full_file_name = build_step.split(':')[0]
basename = os.path.splitext(os.path.basename(full_file_name))[0]
folder = os.path.dirname(full_file_name)
(fileobj, pathname, description) = imp.find_module(basename, [folder])
mod = imp.load_module(basename, fileobj, pathname, description)
(full_file_name, class_name) = build_step.split(':')
custom_steps.append((class_name, getattr(mod, class_name))) # depends on [control=['for'], data=['build_step']]
env['CUSTOM_STEPS'] = custom_steps
env['RESOLVER'] = resolver
(base_name, tpl_name) = _find_basename(src_file)
yaml_name = tpl_name[:-4]
ship_name = yaml_name[:-5] + '.ship'
output_dir = target.build_dirs()['output']
build_dir = os.path.join(target.build_dirs()['build'], base_name)
tpl_path = os.path.join(build_dir, tpl_name)
yaml_path = os.path.join(build_dir, yaml_name)
ship_path = os.path.join(build_dir, ship_name)
output_path = os.path.join(output_dir, ship_name)
# We want to build up all related files in
# <build_dir>/<ship archive_folder>/
# - First copy the template yaml over
# - Then render the template yaml
# - Then find all products referenced in the template yaml and copy them
# - over
# - Then build a .ship archive
# - Then copy that archive into output_dir
ship_deps = [yaml_path]
env.Command([tpl_path], [src_file], Copy('$TARGET', '$SOURCE'))
prod_deps = _find_product_dependencies(src_file, resolver)
env.Command([yaml_path], [tpl_path], action=Action(template_shipfile_action, 'Rendering $TARGET'))
for prod in prod_deps:
dest_file = os.path.join(build_dir, prod.short_name)
ship_deps.append(dest_file)
env.Command([dest_file], [prod.full_path], Copy('$TARGET', '$SOURCE')) # depends on [control=['for'], data=['prod']]
env.Command([ship_path], [ship_deps], action=Action(create_shipfile, 'Archiving Ship Recipe $TARGET'))
env.Command([output_path], [ship_path], Copy('$TARGET', '$SOURCE')) |
def checkArgs(args):
"""Checks the arguments and options.
:param args: a :py:class:`Namespace` object containing the options of the
program.
:type args: :py:class:`argparse.Namespace`
:returns: ``True`` if everything was OK.
If there is a problem with an option, an exception is raised using the
:py:class:`ProgramError` class, a message is printed
to the :class:`sys.stderr` and the program exists with code 1.
"""
# Check if we have the binary files
for prefix in [args.bfile, args.gold_bfile]:
if prefix is None:
msg = "no input file"
raise ProgramError(msg)
for fileName in [prefix + i for i in [".bed", ".bim", ".fam"]]:
if not os.path.isfile(fileName):
msg = "{}: no such file".format(fileName)
raise ProgramError(msg)
# Check for the same sample file
if not os.path.isfile(args.same_samples):
msg = "{}: no such file".format(args.same_samples)
raise ProgramError(msg)
# Check we have either a manifest or a allele file
if (args.source_manifest is None) and (args.source_alleles is None):
msg = ("need an allele file (either an Illumina manifest "
"[--source-manifest] or a file containing alleles for each "
"marker [--source-alleles]")
raise ProgramError(msg)
if ((args.source_manifest is not None) and
(args.source_alleles is not None)):
msg = ("use either --source-manifest or --source-alleles, not both")
raise ProgramError(msg)
# Check for the manifests
if args.source_manifest is not None:
if not os.path.isfile(args.source_manifest):
msg = "{}: no such file".format(args.source_manifest)
raise ProgramError(msg)
if args.source_alleles is not None:
if not os.path.isfile(args.source_alleles):
msg = "{}: no such file".format(args.source_alleles)
raise ProgramError(msg)
return True | def function[checkArgs, parameter[args]]:
constant[Checks the arguments and options.
:param args: a :py:class:`Namespace` object containing the options of the
program.
:type args: :py:class:`argparse.Namespace`
:returns: ``True`` if everything was OK.
If there is a problem with an option, an exception is raised using the
:py:class:`ProgramError` class, a message is printed
to the :class:`sys.stderr` and the program exists with code 1.
]
for taget[name[prefix]] in starred[list[[<ast.Attribute object at 0x7da1b0a72050>, <ast.Attribute object at 0x7da1b0949c30>]]] begin[:]
if compare[name[prefix] is constant[None]] begin[:]
variable[msg] assign[=] constant[no input file]
<ast.Raise object at 0x7da1b094b160>
for taget[name[fileName]] in starred[<ast.ListComp object at 0x7da1b094a410>] begin[:]
if <ast.UnaryOp object at 0x7da1b0a7a9b0> begin[:]
variable[msg] assign[=] call[constant[{}: no such file].format, parameter[name[fileName]]]
<ast.Raise object at 0x7da1b0a79750>
if <ast.UnaryOp object at 0x7da1b0a782b0> begin[:]
variable[msg] assign[=] call[constant[{}: no such file].format, parameter[name[args].same_samples]]
<ast.Raise object at 0x7da1b0a7a440>
if <ast.BoolOp object at 0x7da1b0a7a770> begin[:]
variable[msg] assign[=] constant[need an allele file (either an Illumina manifest [--source-manifest] or a file containing alleles for each marker [--source-alleles]]
<ast.Raise object at 0x7da1b0a7a590>
if <ast.BoolOp object at 0x7da1b0a78e20> begin[:]
variable[msg] assign[=] constant[use either --source-manifest or --source-alleles, not both]
<ast.Raise object at 0x7da1b0a7a800>
if compare[name[args].source_manifest is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da1b0a7a8c0> begin[:]
variable[msg] assign[=] call[constant[{}: no such file].format, parameter[name[args].source_manifest]]
<ast.Raise object at 0x7da1b0a78970>
if compare[name[args].source_alleles is_not constant[None]] begin[:]
if <ast.UnaryOp object at 0x7da1b0a787c0> begin[:]
variable[msg] assign[=] call[constant[{}: no such file].format, parameter[name[args].source_alleles]]
<ast.Raise object at 0x7da1b0a439a0>
return[constant[True]] | keyword[def] identifier[checkArgs] ( identifier[args] ):
literal[string]
keyword[for] identifier[prefix] keyword[in] [ identifier[args] . identifier[bfile] , identifier[args] . identifier[gold_bfile] ]:
keyword[if] identifier[prefix] keyword[is] keyword[None] :
identifier[msg] = literal[string]
keyword[raise] identifier[ProgramError] ( identifier[msg] )
keyword[for] identifier[fileName] keyword[in] [ identifier[prefix] + identifier[i] keyword[for] identifier[i] keyword[in] [ literal[string] , literal[string] , literal[string] ]]:
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[fileName] ):
identifier[msg] = literal[string] . identifier[format] ( identifier[fileName] )
keyword[raise] identifier[ProgramError] ( identifier[msg] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[args] . identifier[same_samples] ):
identifier[msg] = literal[string] . identifier[format] ( identifier[args] . identifier[same_samples] )
keyword[raise] identifier[ProgramError] ( identifier[msg] )
keyword[if] ( identifier[args] . identifier[source_manifest] keyword[is] keyword[None] ) keyword[and] ( identifier[args] . identifier[source_alleles] keyword[is] keyword[None] ):
identifier[msg] =( literal[string]
literal[string]
literal[string] )
keyword[raise] identifier[ProgramError] ( identifier[msg] )
keyword[if] (( identifier[args] . identifier[source_manifest] keyword[is] keyword[not] keyword[None] ) keyword[and]
( identifier[args] . identifier[source_alleles] keyword[is] keyword[not] keyword[None] )):
identifier[msg] =( literal[string] )
keyword[raise] identifier[ProgramError] ( identifier[msg] )
keyword[if] identifier[args] . identifier[source_manifest] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[args] . identifier[source_manifest] ):
identifier[msg] = literal[string] . identifier[format] ( identifier[args] . identifier[source_manifest] )
keyword[raise] identifier[ProgramError] ( identifier[msg] )
keyword[if] identifier[args] . identifier[source_alleles] keyword[is] keyword[not] keyword[None] :
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[args] . identifier[source_alleles] ):
identifier[msg] = literal[string] . identifier[format] ( identifier[args] . identifier[source_alleles] )
keyword[raise] identifier[ProgramError] ( identifier[msg] )
keyword[return] keyword[True] | def checkArgs(args):
"""Checks the arguments and options.
:param args: a :py:class:`Namespace` object containing the options of the
program.
:type args: :py:class:`argparse.Namespace`
:returns: ``True`` if everything was OK.
If there is a problem with an option, an exception is raised using the
:py:class:`ProgramError` class, a message is printed
to the :class:`sys.stderr` and the program exists with code 1.
"""
# Check if we have the binary files
for prefix in [args.bfile, args.gold_bfile]:
if prefix is None:
msg = 'no input file'
raise ProgramError(msg) # depends on [control=['if'], data=[]]
for fileName in [prefix + i for i in ['.bed', '.bim', '.fam']]:
if not os.path.isfile(fileName):
msg = '{}: no such file'.format(fileName)
raise ProgramError(msg) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['fileName']] # depends on [control=['for'], data=['prefix']]
# Check for the same sample file
if not os.path.isfile(args.same_samples):
msg = '{}: no such file'.format(args.same_samples)
raise ProgramError(msg) # depends on [control=['if'], data=[]]
# Check we have either a manifest or a allele file
if args.source_manifest is None and args.source_alleles is None:
msg = 'need an allele file (either an Illumina manifest [--source-manifest] or a file containing alleles for each marker [--source-alleles]'
raise ProgramError(msg) # depends on [control=['if'], data=[]]
if args.source_manifest is not None and args.source_alleles is not None:
msg = 'use either --source-manifest or --source-alleles, not both'
raise ProgramError(msg) # depends on [control=['if'], data=[]]
# Check for the manifests
if args.source_manifest is not None:
if not os.path.isfile(args.source_manifest):
msg = '{}: no such file'.format(args.source_manifest)
raise ProgramError(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if args.source_alleles is not None:
if not os.path.isfile(args.source_alleles):
msg = '{}: no such file'.format(args.source_alleles)
raise ProgramError(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
return True |
def bootstrap_executive_office(self, election):
"""
For executive offices, create page content for the office.
For the president, create pages for each state result.
"""
division = election.race.office.jurisdiction.division
content_type = ContentType.objects.get_for_model(election.race.office)
PageContent.objects.get_or_create(
content_type=content_type,
object_id=election.race.office.pk,
election_day=election.election_day,
division=division,
)
if division.level == self.NATIONAL_LEVEL:
self.bootstrap_executive_office_states(election)
else:
# Create state governor page type
page_type, created = PageType.objects.get_or_create(
model_type=ContentType.objects.get(
app_label="government", model="office"
),
election_day=election.election_day,
division_level=self.STATE_LEVEL,
)
PageContent.objects.get_or_create(
content_type=ContentType.objects.get_for_model(page_type),
object_id=page_type.pk,
election_day=election.election_day,
)
generic_state_page_type, created = PageType.objects.get_or_create(
model_type=ContentType.objects.get(
app_label="geography", model="division"
),
election_day=election.election_day,
division_level=DivisionLevel.objects.get(
name=DivisionLevel.STATE
),
)
PageContent.objects.get_or_create(
content_type=ContentType.objects.get_for_model(
generic_state_page_type
),
object_id=generic_state_page_type.pk,
election_day=election.election_day,
)
PageContent.objects.get_or_create(
content_type=ContentType.objects.get_for_model(division),
object_id=division.pk,
election_day=election.election_day,
special_election=False,
) | def function[bootstrap_executive_office, parameter[self, election]]:
constant[
For executive offices, create page content for the office.
For the president, create pages for each state result.
]
variable[division] assign[=] name[election].race.office.jurisdiction.division
variable[content_type] assign[=] call[name[ContentType].objects.get_for_model, parameter[name[election].race.office]]
call[name[PageContent].objects.get_or_create, parameter[]]
if compare[name[division].level equal[==] name[self].NATIONAL_LEVEL] begin[:]
call[name[self].bootstrap_executive_office_states, parameter[name[election]]] | keyword[def] identifier[bootstrap_executive_office] ( identifier[self] , identifier[election] ):
literal[string]
identifier[division] = identifier[election] . identifier[race] . identifier[office] . identifier[jurisdiction] . identifier[division]
identifier[content_type] = identifier[ContentType] . identifier[objects] . identifier[get_for_model] ( identifier[election] . identifier[race] . identifier[office] )
identifier[PageContent] . identifier[objects] . identifier[get_or_create] (
identifier[content_type] = identifier[content_type] ,
identifier[object_id] = identifier[election] . identifier[race] . identifier[office] . identifier[pk] ,
identifier[election_day] = identifier[election] . identifier[election_day] ,
identifier[division] = identifier[division] ,
)
keyword[if] identifier[division] . identifier[level] == identifier[self] . identifier[NATIONAL_LEVEL] :
identifier[self] . identifier[bootstrap_executive_office_states] ( identifier[election] )
keyword[else] :
identifier[page_type] , identifier[created] = identifier[PageType] . identifier[objects] . identifier[get_or_create] (
identifier[model_type] = identifier[ContentType] . identifier[objects] . identifier[get] (
identifier[app_label] = literal[string] , identifier[model] = literal[string]
),
identifier[election_day] = identifier[election] . identifier[election_day] ,
identifier[division_level] = identifier[self] . identifier[STATE_LEVEL] ,
)
identifier[PageContent] . identifier[objects] . identifier[get_or_create] (
identifier[content_type] = identifier[ContentType] . identifier[objects] . identifier[get_for_model] ( identifier[page_type] ),
identifier[object_id] = identifier[page_type] . identifier[pk] ,
identifier[election_day] = identifier[election] . identifier[election_day] ,
)
identifier[generic_state_page_type] , identifier[created] = identifier[PageType] . identifier[objects] . identifier[get_or_create] (
identifier[model_type] = identifier[ContentType] . identifier[objects] . identifier[get] (
identifier[app_label] = literal[string] , identifier[model] = literal[string]
),
identifier[election_day] = identifier[election] . identifier[election_day] ,
identifier[division_level] = identifier[DivisionLevel] . identifier[objects] . identifier[get] (
identifier[name] = identifier[DivisionLevel] . identifier[STATE]
),
)
identifier[PageContent] . identifier[objects] . identifier[get_or_create] (
identifier[content_type] = identifier[ContentType] . identifier[objects] . identifier[get_for_model] (
identifier[generic_state_page_type]
),
identifier[object_id] = identifier[generic_state_page_type] . identifier[pk] ,
identifier[election_day] = identifier[election] . identifier[election_day] ,
)
identifier[PageContent] . identifier[objects] . identifier[get_or_create] (
identifier[content_type] = identifier[ContentType] . identifier[objects] . identifier[get_for_model] ( identifier[division] ),
identifier[object_id] = identifier[division] . identifier[pk] ,
identifier[election_day] = identifier[election] . identifier[election_day] ,
identifier[special_election] = keyword[False] ,
) | def bootstrap_executive_office(self, election):
"""
For executive offices, create page content for the office.
For the president, create pages for each state result.
"""
division = election.race.office.jurisdiction.division
content_type = ContentType.objects.get_for_model(election.race.office)
PageContent.objects.get_or_create(content_type=content_type, object_id=election.race.office.pk, election_day=election.election_day, division=division)
if division.level == self.NATIONAL_LEVEL:
self.bootstrap_executive_office_states(election) # depends on [control=['if'], data=[]]
else:
# Create state governor page type
(page_type, created) = PageType.objects.get_or_create(model_type=ContentType.objects.get(app_label='government', model='office'), election_day=election.election_day, division_level=self.STATE_LEVEL)
PageContent.objects.get_or_create(content_type=ContentType.objects.get_for_model(page_type), object_id=page_type.pk, election_day=election.election_day)
(generic_state_page_type, created) = PageType.objects.get_or_create(model_type=ContentType.objects.get(app_label='geography', model='division'), election_day=election.election_day, division_level=DivisionLevel.objects.get(name=DivisionLevel.STATE))
PageContent.objects.get_or_create(content_type=ContentType.objects.get_for_model(generic_state_page_type), object_id=generic_state_page_type.pk, election_day=election.election_day)
PageContent.objects.get_or_create(content_type=ContentType.objects.get_for_model(division), object_id=division.pk, election_day=election.election_day, special_election=False) |
def resolve_upload_path(self, filename=None):
"""Resolve upload path for use with the executor.
:param filename: Filename to resolve
:return: Resolved filename, which can be used to access the
given uploaded file in programs executed using this
executor
"""
if filename is None:
return settings.FLOW_EXECUTOR['UPLOAD_DIR']
return os.path.join(settings.FLOW_EXECUTOR['UPLOAD_DIR'], filename) | def function[resolve_upload_path, parameter[self, filename]]:
constant[Resolve upload path for use with the executor.
:param filename: Filename to resolve
:return: Resolved filename, which can be used to access the
given uploaded file in programs executed using this
executor
]
if compare[name[filename] is constant[None]] begin[:]
return[call[name[settings].FLOW_EXECUTOR][constant[UPLOAD_DIR]]]
return[call[name[os].path.join, parameter[call[name[settings].FLOW_EXECUTOR][constant[UPLOAD_DIR]], name[filename]]]] | keyword[def] identifier[resolve_upload_path] ( identifier[self] , identifier[filename] = keyword[None] ):
literal[string]
keyword[if] identifier[filename] keyword[is] keyword[None] :
keyword[return] identifier[settings] . identifier[FLOW_EXECUTOR] [ literal[string] ]
keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[settings] . identifier[FLOW_EXECUTOR] [ literal[string] ], identifier[filename] ) | def resolve_upload_path(self, filename=None):
"""Resolve upload path for use with the executor.
:param filename: Filename to resolve
:return: Resolved filename, which can be used to access the
given uploaded file in programs executed using this
executor
"""
if filename is None:
return settings.FLOW_EXECUTOR['UPLOAD_DIR'] # depends on [control=['if'], data=[]]
return os.path.join(settings.FLOW_EXECUTOR['UPLOAD_DIR'], filename) |
def update_tab_label(self, state_m):
"""Update all tab labels
:param rafcon.state_machine.states.state.State state_m: State model who's tab label is to be updated
"""
state_identifier = self.get_state_identifier(state_m)
if state_identifier not in self.tabs and state_identifier not in self.closed_tabs:
return
tab_info = self.tabs[state_identifier] if state_identifier in self.tabs else self.closed_tabs[state_identifier]
page = tab_info['page']
set_tab_label_texts(page.title_label, state_m, tab_info['source_code_view_is_dirty']) | def function[update_tab_label, parameter[self, state_m]]:
constant[Update all tab labels
:param rafcon.state_machine.states.state.State state_m: State model who's tab label is to be updated
]
variable[state_identifier] assign[=] call[name[self].get_state_identifier, parameter[name[state_m]]]
if <ast.BoolOp object at 0x7da18bc73790> begin[:]
return[None]
variable[tab_info] assign[=] <ast.IfExp object at 0x7da18bc709a0>
variable[page] assign[=] call[name[tab_info]][constant[page]]
call[name[set_tab_label_texts], parameter[name[page].title_label, name[state_m], call[name[tab_info]][constant[source_code_view_is_dirty]]]] | keyword[def] identifier[update_tab_label] ( identifier[self] , identifier[state_m] ):
literal[string]
identifier[state_identifier] = identifier[self] . identifier[get_state_identifier] ( identifier[state_m] )
keyword[if] identifier[state_identifier] keyword[not] keyword[in] identifier[self] . identifier[tabs] keyword[and] identifier[state_identifier] keyword[not] keyword[in] identifier[self] . identifier[closed_tabs] :
keyword[return]
identifier[tab_info] = identifier[self] . identifier[tabs] [ identifier[state_identifier] ] keyword[if] identifier[state_identifier] keyword[in] identifier[self] . identifier[tabs] keyword[else] identifier[self] . identifier[closed_tabs] [ identifier[state_identifier] ]
identifier[page] = identifier[tab_info] [ literal[string] ]
identifier[set_tab_label_texts] ( identifier[page] . identifier[title_label] , identifier[state_m] , identifier[tab_info] [ literal[string] ]) | def update_tab_label(self, state_m):
"""Update all tab labels
:param rafcon.state_machine.states.state.State state_m: State model who's tab label is to be updated
"""
state_identifier = self.get_state_identifier(state_m)
if state_identifier not in self.tabs and state_identifier not in self.closed_tabs:
return # depends on [control=['if'], data=[]]
tab_info = self.tabs[state_identifier] if state_identifier in self.tabs else self.closed_tabs[state_identifier]
page = tab_info['page']
set_tab_label_texts(page.title_label, state_m, tab_info['source_code_view_is_dirty']) |
def __update(self):
"""
This is called each time an attribute is asked, to be sure every params are updated, beceause of callbacks.
"""
# I can not set the size attr because it is my property, so I set the width and height separately
width, height = self.size
super(BaseWidget, self).__setattr__("width", width)
super(BaseWidget, self).__setattr__("height", height)
super(BaseWidget, self).__setattr__(self.anchor, self.pos) | def function[__update, parameter[self]]:
constant[
This is called each time an attribute is asked, to be sure every params are updated, beceause of callbacks.
]
<ast.Tuple object at 0x7da18f09dea0> assign[=] name[self].size
call[call[name[super], parameter[name[BaseWidget], name[self]]].__setattr__, parameter[constant[width], name[width]]]
call[call[name[super], parameter[name[BaseWidget], name[self]]].__setattr__, parameter[constant[height], name[height]]]
call[call[name[super], parameter[name[BaseWidget], name[self]]].__setattr__, parameter[name[self].anchor, name[self].pos]] | keyword[def] identifier[__update] ( identifier[self] ):
literal[string]
identifier[width] , identifier[height] = identifier[self] . identifier[size]
identifier[super] ( identifier[BaseWidget] , identifier[self] ). identifier[__setattr__] ( literal[string] , identifier[width] )
identifier[super] ( identifier[BaseWidget] , identifier[self] ). identifier[__setattr__] ( literal[string] , identifier[height] )
identifier[super] ( identifier[BaseWidget] , identifier[self] ). identifier[__setattr__] ( identifier[self] . identifier[anchor] , identifier[self] . identifier[pos] ) | def __update(self):
"""
This is called each time an attribute is asked, to be sure every params are updated, beceause of callbacks.
"""
# I can not set the size attr because it is my property, so I set the width and height separately
(width, height) = self.size
super(BaseWidget, self).__setattr__('width', width)
super(BaseWidget, self).__setattr__('height', height)
super(BaseWidget, self).__setattr__(self.anchor, self.pos) |
def knot_refinement(degree, knotvector, ctrlpts, **kwargs):
""" Computes the knot vector and the control points of the rational/non-rational spline after knot refinement.
Implementation of Algorithm A5.4 of The NURBS Book by Piegl & Tiller, 2nd Edition.
The algorithm automatically find the knots to be refined, i.e. the middle knots in the knot vector, and their
multiplicities, i.e. number of same knots in the knot vector. This is the basis of knot refinement algorithm.
This operation can be overridden by providing a list of knots via ``knot_list`` argument. In addition, users can
provide a list of additional knots to be inserted in the knot vector via ``add_knot_list`` argument.
Moreover, a numerical ``density`` argument can be used to automate extra knot insertions. If ``density`` is bigger
than 1, then the algorithm finds the middle knots in each internal knot span to increase the number of knots to be
refined.
**Example**: Let the knot vector to be refined is ``[0, 2, 4]`` with the superfluous knots from the start and end
are removed:
* If ``density`` is 1, knot vector to be refined is ``[0, 2, 4]``
* If ``density`` is 2, knot vector to be refined is ``[0, 1, 2, 3, 4]``
* If ``density`` is 3, knot vector to be refined is ``[0, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4]``
Keyword Arguments:
* ``knot_list``: knot list to be refined. *Default: list of internal knots*
* ``add_knot_list``: additional list of knots to be refined. *Default: []*
* ``density``: Density of the knots. *Default: 1*
:param degree: degree
:type degree: int
:param knotvector: knot vector
:type knotvector: list, tuple
:param ctrlpts: control points
:return: updated control points and knot vector
:rtype: tuple
"""
# Get keyword arguments
tol = kwargs.get('tol', 10e-8) # tolerance value for zero equality checking
check_num = kwargs.get('check_num', True) # enables/disables input validity checking
knot_list = kwargs.get('knot_list', knotvector[degree:-degree])
add_knot_list = kwargs.get('add_knot_list', list())
density = kwargs.get('density', 1)
# Input validity checking
if check_num:
if not isinstance(density, int):
raise GeomdlException("Density value must be an integer", data=dict(density=density))
if density < 1:
raise GeomdlException("Density value cannot be less than 1", data=dict(density=density))
# Add additional knots to be refined
if add_knot_list:
knot_list += list(add_knot_list)
# Sort the list and convert to a set to make sure that the values are unique
knot_list = sorted(set(knot_list))
# Increase knot density
for d in range(0, density - 1):
rknots = []
for i in range(len(knot_list) - 1):
knot_tmp = knot_list[i] + ((knot_list[i + 1] - knot_list[i]) / 2.0)
rknots.append(knot_list[i])
rknots.append(knot_tmp)
rknots.append(knot_list[i + 1])
knot_list = rknots
# Find how many knot insertions are necessary
X = []
for mk in knot_list:
s = find_multiplicity(mk, knotvector)
r = degree - s
X += [mk for _ in range(r)]
# Check if the knot refinement is possible
if not X:
raise GeomdlException("Cannot refine knot vector on this parametric dimension")
# Initialize common variables
r = len(X) - 1
n = len(ctrlpts) - 1
m = n + degree + 1
a = find_span_linear(degree, knotvector, n, X[0])
b = find_span_linear(degree, knotvector, n, X[r]) + 1
# Initialize new control points array
if isinstance(ctrlpts[0][0], float):
new_ctrlpts = [[] for _ in range(n + r + 2)]
else:
new_ctrlpts = [[[] for _ in range(len(ctrlpts[0]))] for _ in range(n + r + 2)]
# Fill unchanged control points
for j in range(0, a - degree + 1):
new_ctrlpts[j] = ctrlpts[j]
for j in range(b - 1, n + 1):
new_ctrlpts[j + r + 1] = ctrlpts[j]
# Initialize new knot vector array
new_kv = [0.0 for _ in range(m + r + 2)]
# Fill unchanged knots
for j in range(0, a + 1):
new_kv[j] = knotvector[j]
for j in range(b + degree, m + 1):
new_kv[j + r + 1] = knotvector[j]
# Initialize variables for knot refinement
i = b + degree - 1
k = b + degree + r
j = r
# Apply knot refinement
while j >= 0:
while X[j] <= knotvector[i] and i > a:
new_ctrlpts[k - degree - 1] = ctrlpts[i - degree - 1]
new_kv[k] = knotvector[i]
k -= 1
i -= 1
new_ctrlpts[k - degree - 1] = deepcopy(new_ctrlpts[k - degree])
for l in range(1, degree + 1):
idx = k - degree + l
alpha = new_kv[k + l] - X[j]
if abs(alpha) < tol:
new_ctrlpts[idx - 1] = deepcopy(new_ctrlpts[idx])
else:
alpha = alpha / (new_kv[k + l] - knotvector[i - degree + l])
if isinstance(ctrlpts[0][0], float):
new_ctrlpts[idx - 1] = [alpha * p1 + (1.0 - alpha) * p2 for p1, p2 in
zip(new_ctrlpts[idx - 1], new_ctrlpts[idx])]
else:
for idx2 in range(len(ctrlpts[0])):
new_ctrlpts[idx - 1][idx2] = [alpha * p1 + (1.0 - alpha) * p2 for p1, p2 in
zip(new_ctrlpts[idx - 1][idx2], new_ctrlpts[idx][idx2])]
new_kv[k] = X[j]
k = k - 1
j -= 1
# Return control points and knot vector after refinement
return new_ctrlpts, new_kv | def function[knot_refinement, parameter[degree, knotvector, ctrlpts]]:
constant[ Computes the knot vector and the control points of the rational/non-rational spline after knot refinement.
Implementation of Algorithm A5.4 of The NURBS Book by Piegl & Tiller, 2nd Edition.
The algorithm automatically find the knots to be refined, i.e. the middle knots in the knot vector, and their
multiplicities, i.e. number of same knots in the knot vector. This is the basis of knot refinement algorithm.
This operation can be overridden by providing a list of knots via ``knot_list`` argument. In addition, users can
provide a list of additional knots to be inserted in the knot vector via ``add_knot_list`` argument.
Moreover, a numerical ``density`` argument can be used to automate extra knot insertions. If ``density`` is bigger
than 1, then the algorithm finds the middle knots in each internal knot span to increase the number of knots to be
refined.
**Example**: Let the knot vector to be refined is ``[0, 2, 4]`` with the superfluous knots from the start and end
are removed:
* If ``density`` is 1, knot vector to be refined is ``[0, 2, 4]``
* If ``density`` is 2, knot vector to be refined is ``[0, 1, 2, 3, 4]``
* If ``density`` is 3, knot vector to be refined is ``[0, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4]``
Keyword Arguments:
* ``knot_list``: knot list to be refined. *Default: list of internal knots*
* ``add_knot_list``: additional list of knots to be refined. *Default: []*
* ``density``: Density of the knots. *Default: 1*
:param degree: degree
:type degree: int
:param knotvector: knot vector
:type knotvector: list, tuple
:param ctrlpts: control points
:return: updated control points and knot vector
:rtype: tuple
]
variable[tol] assign[=] call[name[kwargs].get, parameter[constant[tol], constant[1e-07]]]
variable[check_num] assign[=] call[name[kwargs].get, parameter[constant[check_num], constant[True]]]
variable[knot_list] assign[=] call[name[kwargs].get, parameter[constant[knot_list], call[name[knotvector]][<ast.Slice object at 0x7da1b17f8be0>]]]
variable[add_knot_list] assign[=] call[name[kwargs].get, parameter[constant[add_knot_list], call[name[list], parameter[]]]]
variable[density] assign[=] call[name[kwargs].get, parameter[constant[density], constant[1]]]
if name[check_num] begin[:]
if <ast.UnaryOp object at 0x7da1b17f8130> begin[:]
<ast.Raise object at 0x7da1b17f82b0>
if compare[name[density] less[<] constant[1]] begin[:]
<ast.Raise object at 0x7da1b17f9210>
if name[add_knot_list] begin[:]
<ast.AugAssign object at 0x7da1b16a72b0>
variable[knot_list] assign[=] call[name[sorted], parameter[call[name[set], parameter[name[knot_list]]]]]
for taget[name[d]] in starred[call[name[range], parameter[constant[0], binary_operation[name[density] - constant[1]]]]] begin[:]
variable[rknots] assign[=] list[[]]
for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[knot_list]]] - constant[1]]]]] begin[:]
variable[knot_tmp] assign[=] binary_operation[call[name[knot_list]][name[i]] + binary_operation[binary_operation[call[name[knot_list]][binary_operation[name[i] + constant[1]]] - call[name[knot_list]][name[i]]] / constant[2.0]]]
call[name[rknots].append, parameter[call[name[knot_list]][name[i]]]]
call[name[rknots].append, parameter[name[knot_tmp]]]
call[name[rknots].append, parameter[call[name[knot_list]][binary_operation[name[i] + constant[1]]]]]
variable[knot_list] assign[=] name[rknots]
variable[X] assign[=] list[[]]
for taget[name[mk]] in starred[name[knot_list]] begin[:]
variable[s] assign[=] call[name[find_multiplicity], parameter[name[mk], name[knotvector]]]
variable[r] assign[=] binary_operation[name[degree] - name[s]]
<ast.AugAssign object at 0x7da1b16ab820>
if <ast.UnaryOp object at 0x7da1b16a8610> begin[:]
<ast.Raise object at 0x7da1b16a8af0>
variable[r] assign[=] binary_operation[call[name[len], parameter[name[X]]] - constant[1]]
variable[n] assign[=] binary_operation[call[name[len], parameter[name[ctrlpts]]] - constant[1]]
variable[m] assign[=] binary_operation[binary_operation[name[n] + name[degree]] + constant[1]]
variable[a] assign[=] call[name[find_span_linear], parameter[name[degree], name[knotvector], name[n], call[name[X]][constant[0]]]]
variable[b] assign[=] binary_operation[call[name[find_span_linear], parameter[name[degree], name[knotvector], name[n], call[name[X]][name[r]]]] + constant[1]]
if call[name[isinstance], parameter[call[call[name[ctrlpts]][constant[0]]][constant[0]], name[float]]] begin[:]
variable[new_ctrlpts] assign[=] <ast.ListComp object at 0x7da1b16ab670>
for taget[name[j]] in starred[call[name[range], parameter[constant[0], binary_operation[binary_operation[name[a] - name[degree]] + constant[1]]]]] begin[:]
call[name[new_ctrlpts]][name[j]] assign[=] call[name[ctrlpts]][name[j]]
for taget[name[j]] in starred[call[name[range], parameter[binary_operation[name[b] - constant[1]], binary_operation[name[n] + constant[1]]]]] begin[:]
call[name[new_ctrlpts]][binary_operation[binary_operation[name[j] + name[r]] + constant[1]]] assign[=] call[name[ctrlpts]][name[j]]
variable[new_kv] assign[=] <ast.ListComp object at 0x7da1b16a9420>
for taget[name[j]] in starred[call[name[range], parameter[constant[0], binary_operation[name[a] + constant[1]]]]] begin[:]
call[name[new_kv]][name[j]] assign[=] call[name[knotvector]][name[j]]
for taget[name[j]] in starred[call[name[range], parameter[binary_operation[name[b] + name[degree]], binary_operation[name[m] + constant[1]]]]] begin[:]
call[name[new_kv]][binary_operation[binary_operation[name[j] + name[r]] + constant[1]]] assign[=] call[name[knotvector]][name[j]]
variable[i] assign[=] binary_operation[binary_operation[name[b] + name[degree]] - constant[1]]
variable[k] assign[=] binary_operation[binary_operation[name[b] + name[degree]] + name[r]]
variable[j] assign[=] name[r]
while compare[name[j] greater_or_equal[>=] constant[0]] begin[:]
while <ast.BoolOp object at 0x7da1b16aba30> begin[:]
call[name[new_ctrlpts]][binary_operation[binary_operation[name[k] - name[degree]] - constant[1]]] assign[=] call[name[ctrlpts]][binary_operation[binary_operation[name[i] - name[degree]] - constant[1]]]
call[name[new_kv]][name[k]] assign[=] call[name[knotvector]][name[i]]
<ast.AugAssign object at 0x7da1b16c4cd0>
<ast.AugAssign object at 0x7da1b16c4ca0>
call[name[new_ctrlpts]][binary_operation[binary_operation[name[k] - name[degree]] - constant[1]]] assign[=] call[name[deepcopy], parameter[call[name[new_ctrlpts]][binary_operation[name[k] - name[degree]]]]]
for taget[name[l]] in starred[call[name[range], parameter[constant[1], binary_operation[name[degree] + constant[1]]]]] begin[:]
variable[idx] assign[=] binary_operation[binary_operation[name[k] - name[degree]] + name[l]]
variable[alpha] assign[=] binary_operation[call[name[new_kv]][binary_operation[name[k] + name[l]]] - call[name[X]][name[j]]]
if compare[call[name[abs], parameter[name[alpha]]] less[<] name[tol]] begin[:]
call[name[new_ctrlpts]][binary_operation[name[idx] - constant[1]]] assign[=] call[name[deepcopy], parameter[call[name[new_ctrlpts]][name[idx]]]]
call[name[new_kv]][name[k]] assign[=] call[name[X]][name[j]]
variable[k] assign[=] binary_operation[name[k] - constant[1]]
<ast.AugAssign object at 0x7da1b16c5630>
return[tuple[[<ast.Name object at 0x7da1b16c5750>, <ast.Name object at 0x7da1b16c5450>]]] | keyword[def] identifier[knot_refinement] ( identifier[degree] , identifier[knotvector] , identifier[ctrlpts] ,** identifier[kwargs] ):
literal[string]
identifier[tol] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
identifier[check_num] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] )
identifier[knot_list] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[knotvector] [ identifier[degree] :- identifier[degree] ])
identifier[add_knot_list] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[list] ())
identifier[density] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] )
keyword[if] identifier[check_num] :
keyword[if] keyword[not] identifier[isinstance] ( identifier[density] , identifier[int] ):
keyword[raise] identifier[GeomdlException] ( literal[string] , identifier[data] = identifier[dict] ( identifier[density] = identifier[density] ))
keyword[if] identifier[density] < literal[int] :
keyword[raise] identifier[GeomdlException] ( literal[string] , identifier[data] = identifier[dict] ( identifier[density] = identifier[density] ))
keyword[if] identifier[add_knot_list] :
identifier[knot_list] += identifier[list] ( identifier[add_knot_list] )
identifier[knot_list] = identifier[sorted] ( identifier[set] ( identifier[knot_list] ))
keyword[for] identifier[d] keyword[in] identifier[range] ( literal[int] , identifier[density] - literal[int] ):
identifier[rknots] =[]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[knot_list] )- literal[int] ):
identifier[knot_tmp] = identifier[knot_list] [ identifier[i] ]+(( identifier[knot_list] [ identifier[i] + literal[int] ]- identifier[knot_list] [ identifier[i] ])/ literal[int] )
identifier[rknots] . identifier[append] ( identifier[knot_list] [ identifier[i] ])
identifier[rknots] . identifier[append] ( identifier[knot_tmp] )
identifier[rknots] . identifier[append] ( identifier[knot_list] [ identifier[i] + literal[int] ])
identifier[knot_list] = identifier[rknots]
identifier[X] =[]
keyword[for] identifier[mk] keyword[in] identifier[knot_list] :
identifier[s] = identifier[find_multiplicity] ( identifier[mk] , identifier[knotvector] )
identifier[r] = identifier[degree] - identifier[s]
identifier[X] +=[ identifier[mk] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[r] )]
keyword[if] keyword[not] identifier[X] :
keyword[raise] identifier[GeomdlException] ( literal[string] )
identifier[r] = identifier[len] ( identifier[X] )- literal[int]
identifier[n] = identifier[len] ( identifier[ctrlpts] )- literal[int]
identifier[m] = identifier[n] + identifier[degree] + literal[int]
identifier[a] = identifier[find_span_linear] ( identifier[degree] , identifier[knotvector] , identifier[n] , identifier[X] [ literal[int] ])
identifier[b] = identifier[find_span_linear] ( identifier[degree] , identifier[knotvector] , identifier[n] , identifier[X] [ identifier[r] ])+ literal[int]
keyword[if] identifier[isinstance] ( identifier[ctrlpts] [ literal[int] ][ literal[int] ], identifier[float] ):
identifier[new_ctrlpts] =[[] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[n] + identifier[r] + literal[int] )]
keyword[else] :
identifier[new_ctrlpts] =[[[] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[len] ( identifier[ctrlpts] [ literal[int] ]))] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[n] + identifier[r] + literal[int] )]
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[a] - identifier[degree] + literal[int] ):
identifier[new_ctrlpts] [ identifier[j] ]= identifier[ctrlpts] [ identifier[j] ]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[b] - literal[int] , identifier[n] + literal[int] ):
identifier[new_ctrlpts] [ identifier[j] + identifier[r] + literal[int] ]= identifier[ctrlpts] [ identifier[j] ]
identifier[new_kv] =[ literal[int] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[m] + identifier[r] + literal[int] )]
keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[a] + literal[int] ):
identifier[new_kv] [ identifier[j] ]= identifier[knotvector] [ identifier[j] ]
keyword[for] identifier[j] keyword[in] identifier[range] ( identifier[b] + identifier[degree] , identifier[m] + literal[int] ):
identifier[new_kv] [ identifier[j] + identifier[r] + literal[int] ]= identifier[knotvector] [ identifier[j] ]
identifier[i] = identifier[b] + identifier[degree] - literal[int]
identifier[k] = identifier[b] + identifier[degree] + identifier[r]
identifier[j] = identifier[r]
keyword[while] identifier[j] >= literal[int] :
keyword[while] identifier[X] [ identifier[j] ]<= identifier[knotvector] [ identifier[i] ] keyword[and] identifier[i] > identifier[a] :
identifier[new_ctrlpts] [ identifier[k] - identifier[degree] - literal[int] ]= identifier[ctrlpts] [ identifier[i] - identifier[degree] - literal[int] ]
identifier[new_kv] [ identifier[k] ]= identifier[knotvector] [ identifier[i] ]
identifier[k] -= literal[int]
identifier[i] -= literal[int]
identifier[new_ctrlpts] [ identifier[k] - identifier[degree] - literal[int] ]= identifier[deepcopy] ( identifier[new_ctrlpts] [ identifier[k] - identifier[degree] ])
keyword[for] identifier[l] keyword[in] identifier[range] ( literal[int] , identifier[degree] + literal[int] ):
identifier[idx] = identifier[k] - identifier[degree] + identifier[l]
identifier[alpha] = identifier[new_kv] [ identifier[k] + identifier[l] ]- identifier[X] [ identifier[j] ]
keyword[if] identifier[abs] ( identifier[alpha] )< identifier[tol] :
identifier[new_ctrlpts] [ identifier[idx] - literal[int] ]= identifier[deepcopy] ( identifier[new_ctrlpts] [ identifier[idx] ])
keyword[else] :
identifier[alpha] = identifier[alpha] /( identifier[new_kv] [ identifier[k] + identifier[l] ]- identifier[knotvector] [ identifier[i] - identifier[degree] + identifier[l] ])
keyword[if] identifier[isinstance] ( identifier[ctrlpts] [ literal[int] ][ literal[int] ], identifier[float] ):
identifier[new_ctrlpts] [ identifier[idx] - literal[int] ]=[ identifier[alpha] * identifier[p1] +( literal[int] - identifier[alpha] )* identifier[p2] keyword[for] identifier[p1] , identifier[p2] keyword[in]
identifier[zip] ( identifier[new_ctrlpts] [ identifier[idx] - literal[int] ], identifier[new_ctrlpts] [ identifier[idx] ])]
keyword[else] :
keyword[for] identifier[idx2] keyword[in] identifier[range] ( identifier[len] ( identifier[ctrlpts] [ literal[int] ])):
identifier[new_ctrlpts] [ identifier[idx] - literal[int] ][ identifier[idx2] ]=[ identifier[alpha] * identifier[p1] +( literal[int] - identifier[alpha] )* identifier[p2] keyword[for] identifier[p1] , identifier[p2] keyword[in]
identifier[zip] ( identifier[new_ctrlpts] [ identifier[idx] - literal[int] ][ identifier[idx2] ], identifier[new_ctrlpts] [ identifier[idx] ][ identifier[idx2] ])]
identifier[new_kv] [ identifier[k] ]= identifier[X] [ identifier[j] ]
identifier[k] = identifier[k] - literal[int]
identifier[j] -= literal[int]
keyword[return] identifier[new_ctrlpts] , identifier[new_kv] | def knot_refinement(degree, knotvector, ctrlpts, **kwargs):
""" Computes the knot vector and the control points of the rational/non-rational spline after knot refinement.
Implementation of Algorithm A5.4 of The NURBS Book by Piegl & Tiller, 2nd Edition.
The algorithm automatically find the knots to be refined, i.e. the middle knots in the knot vector, and their
multiplicities, i.e. number of same knots in the knot vector. This is the basis of knot refinement algorithm.
This operation can be overridden by providing a list of knots via ``knot_list`` argument. In addition, users can
provide a list of additional knots to be inserted in the knot vector via ``add_knot_list`` argument.
Moreover, a numerical ``density`` argument can be used to automate extra knot insertions. If ``density`` is bigger
than 1, then the algorithm finds the middle knots in each internal knot span to increase the number of knots to be
refined.
**Example**: Let the knot vector to be refined is ``[0, 2, 4]`` with the superfluous knots from the start and end
are removed:
* If ``density`` is 1, knot vector to be refined is ``[0, 2, 4]``
* If ``density`` is 2, knot vector to be refined is ``[0, 1, 2, 3, 4]``
* If ``density`` is 3, knot vector to be refined is ``[0, 0.5, 1, 1.5, 2, 2.5, 3, 3.5, 4]``
Keyword Arguments:
* ``knot_list``: knot list to be refined. *Default: list of internal knots*
* ``add_knot_list``: additional list of knots to be refined. *Default: []*
* ``density``: Density of the knots. *Default: 1*
:param degree: degree
:type degree: int
:param knotvector: knot vector
:type knotvector: list, tuple
:param ctrlpts: control points
:return: updated control points and knot vector
:rtype: tuple
"""
# Get keyword arguments
tol = kwargs.get('tol', 1e-07) # tolerance value for zero equality checking
check_num = kwargs.get('check_num', True) # enables/disables input validity checking
knot_list = kwargs.get('knot_list', knotvector[degree:-degree])
add_knot_list = kwargs.get('add_knot_list', list())
density = kwargs.get('density', 1)
# Input validity checking
if check_num:
if not isinstance(density, int):
raise GeomdlException('Density value must be an integer', data=dict(density=density)) # depends on [control=['if'], data=[]]
if density < 1:
raise GeomdlException('Density value cannot be less than 1', data=dict(density=density)) # depends on [control=['if'], data=['density']] # depends on [control=['if'], data=[]]
# Add additional knots to be refined
if add_knot_list:
knot_list += list(add_knot_list) # depends on [control=['if'], data=[]]
# Sort the list and convert to a set to make sure that the values are unique
knot_list = sorted(set(knot_list))
# Increase knot density
for d in range(0, density - 1):
rknots = []
for i in range(len(knot_list) - 1):
knot_tmp = knot_list[i] + (knot_list[i + 1] - knot_list[i]) / 2.0
rknots.append(knot_list[i])
rknots.append(knot_tmp) # depends on [control=['for'], data=['i']]
rknots.append(knot_list[i + 1])
knot_list = rknots # depends on [control=['for'], data=[]]
# Find how many knot insertions are necessary
X = []
for mk in knot_list:
s = find_multiplicity(mk, knotvector)
r = degree - s
X += [mk for _ in range(r)] # depends on [control=['for'], data=['mk']]
# Check if the knot refinement is possible
if not X:
raise GeomdlException('Cannot refine knot vector on this parametric dimension') # depends on [control=['if'], data=[]]
# Initialize common variables
r = len(X) - 1
n = len(ctrlpts) - 1
m = n + degree + 1
a = find_span_linear(degree, knotvector, n, X[0])
b = find_span_linear(degree, knotvector, n, X[r]) + 1
# Initialize new control points array
if isinstance(ctrlpts[0][0], float):
new_ctrlpts = [[] for _ in range(n + r + 2)] # depends on [control=['if'], data=[]]
else:
new_ctrlpts = [[[] for _ in range(len(ctrlpts[0]))] for _ in range(n + r + 2)]
# Fill unchanged control points
for j in range(0, a - degree + 1):
new_ctrlpts[j] = ctrlpts[j] # depends on [control=['for'], data=['j']]
for j in range(b - 1, n + 1):
new_ctrlpts[j + r + 1] = ctrlpts[j] # depends on [control=['for'], data=['j']]
# Initialize new knot vector array
new_kv = [0.0 for _ in range(m + r + 2)]
# Fill unchanged knots
for j in range(0, a + 1):
new_kv[j] = knotvector[j] # depends on [control=['for'], data=['j']]
for j in range(b + degree, m + 1):
new_kv[j + r + 1] = knotvector[j] # depends on [control=['for'], data=['j']]
# Initialize variables for knot refinement
i = b + degree - 1
k = b + degree + r
j = r
# Apply knot refinement
while j >= 0:
while X[j] <= knotvector[i] and i > a:
new_ctrlpts[k - degree - 1] = ctrlpts[i - degree - 1]
new_kv[k] = knotvector[i]
k -= 1
i -= 1 # depends on [control=['while'], data=[]]
new_ctrlpts[k - degree - 1] = deepcopy(new_ctrlpts[k - degree])
for l in range(1, degree + 1):
idx = k - degree + l
alpha = new_kv[k + l] - X[j]
if abs(alpha) < tol:
new_ctrlpts[idx - 1] = deepcopy(new_ctrlpts[idx]) # depends on [control=['if'], data=[]]
else:
alpha = alpha / (new_kv[k + l] - knotvector[i - degree + l])
if isinstance(ctrlpts[0][0], float):
new_ctrlpts[idx - 1] = [alpha * p1 + (1.0 - alpha) * p2 for (p1, p2) in zip(new_ctrlpts[idx - 1], new_ctrlpts[idx])] # depends on [control=['if'], data=[]]
else:
for idx2 in range(len(ctrlpts[0])):
new_ctrlpts[idx - 1][idx2] = [alpha * p1 + (1.0 - alpha) * p2 for (p1, p2) in zip(new_ctrlpts[idx - 1][idx2], new_ctrlpts[idx][idx2])] # depends on [control=['for'], data=['idx2']] # depends on [control=['for'], data=['l']]
new_kv[k] = X[j]
k = k - 1
j -= 1 # depends on [control=['while'], data=['j']]
# Return control points and knot vector after refinement
return (new_ctrlpts, new_kv) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.