code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def get_sdb_id_by_path(self, sdb_path):
""" Given the path, return the ID for the given safe deposit box."""
json_resp = self.get_sdbs()
# Deal with the supplied path possibly missing an ending slash
path = self._add_slash(sdb_path)
for r in json_resp:
if r['path'] == path:
return str(r['id'])
# If we haven't returned yet then we didn't find what we were
# looking for.
raise CerberusClientException("'%s' not found" % sdb_path) | def function[get_sdb_id_by_path, parameter[self, sdb_path]]:
constant[ Given the path, return the ID for the given safe deposit box.]
variable[json_resp] assign[=] call[name[self].get_sdbs, parameter[]]
variable[path] assign[=] call[name[self]._add_slash, parameter[name[sdb_path]]]
for taget[name[r]] in starred[name[json_resp]] begin[:]
if compare[call[name[r]][constant[path]] equal[==] name[path]] begin[:]
return[call[name[str], parameter[call[name[r]][constant[id]]]]]
<ast.Raise object at 0x7da1b04a76a0> | keyword[def] identifier[get_sdb_id_by_path] ( identifier[self] , identifier[sdb_path] ):
literal[string]
identifier[json_resp] = identifier[self] . identifier[get_sdbs] ()
identifier[path] = identifier[self] . identifier[_add_slash] ( identifier[sdb_path] )
keyword[for] identifier[r] keyword[in] identifier[json_resp] :
keyword[if] identifier[r] [ literal[string] ]== identifier[path] :
keyword[return] identifier[str] ( identifier[r] [ literal[string] ])
keyword[raise] identifier[CerberusClientException] ( literal[string] % identifier[sdb_path] ) | def get_sdb_id_by_path(self, sdb_path):
""" Given the path, return the ID for the given safe deposit box."""
json_resp = self.get_sdbs()
# Deal with the supplied path possibly missing an ending slash
path = self._add_slash(sdb_path)
for r in json_resp:
if r['path'] == path:
return str(r['id']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']]
# If we haven't returned yet then we didn't find what we were
# looking for.
raise CerberusClientException("'%s' not found" % sdb_path) |
def register_node_path(self, node):
"""
Registers given Node path in the **file_system_events_manager**.
:param node: Node.
:type node: FileNode or DirectoryNode or ProjectNode
:return: Method success.
:rtype: bool
"""
path = node.file if hasattr(node, "file") else node.path
path = foundations.strings.to_string(path)
if not foundations.common.path_exists(path):
return False
return self.register_file(path) | def function[register_node_path, parameter[self, node]]:
constant[
Registers given Node path in the **file_system_events_manager**.
:param node: Node.
:type node: FileNode or DirectoryNode or ProjectNode
:return: Method success.
:rtype: bool
]
variable[path] assign[=] <ast.IfExp object at 0x7da1b09d3f70>
variable[path] assign[=] call[name[foundations].strings.to_string, parameter[name[path]]]
if <ast.UnaryOp object at 0x7da1b09d0610> begin[:]
return[constant[False]]
return[call[name[self].register_file, parameter[name[path]]]] | keyword[def] identifier[register_node_path] ( identifier[self] , identifier[node] ):
literal[string]
identifier[path] = identifier[node] . identifier[file] keyword[if] identifier[hasattr] ( identifier[node] , literal[string] ) keyword[else] identifier[node] . identifier[path]
identifier[path] = identifier[foundations] . identifier[strings] . identifier[to_string] ( identifier[path] )
keyword[if] keyword[not] identifier[foundations] . identifier[common] . identifier[path_exists] ( identifier[path] ):
keyword[return] keyword[False]
keyword[return] identifier[self] . identifier[register_file] ( identifier[path] ) | def register_node_path(self, node):
"""
Registers given Node path in the **file_system_events_manager**.
:param node: Node.
:type node: FileNode or DirectoryNode or ProjectNode
:return: Method success.
:rtype: bool
"""
path = node.file if hasattr(node, 'file') else node.path
path = foundations.strings.to_string(path)
if not foundations.common.path_exists(path):
return False # depends on [control=['if'], data=[]]
return self.register_file(path) |
def easeOutBounce(n):
"""A bouncing tween function that hits the destination and then bounces to rest.
Args:
n (float): The time progress, starting at 0.0 and ending at 1.0.
Returns:
(float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
"""
_checkRange(n)
if n < (1/2.75):
return 7.5625 * n * n
elif n < (2/2.75):
n -= (1.5/2.75)
return 7.5625 * n * n + 0.75
elif n < (2.5/2.75):
n -= (2.25/2.75)
return 7.5625 * n * n + 0.9375
else:
n -= (2.65/2.75)
return 7.5625 * n * n + 0.984375 | def function[easeOutBounce, parameter[n]]:
constant[A bouncing tween function that hits the destination and then bounces to rest.
Args:
n (float): The time progress, starting at 0.0 and ending at 1.0.
Returns:
(float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
]
call[name[_checkRange], parameter[name[n]]]
if compare[name[n] less[<] binary_operation[constant[1] / constant[2.75]]] begin[:]
return[binary_operation[binary_operation[constant[7.5625] * name[n]] * name[n]]] | keyword[def] identifier[easeOutBounce] ( identifier[n] ):
literal[string]
identifier[_checkRange] ( identifier[n] )
keyword[if] identifier[n] <( literal[int] / literal[int] ):
keyword[return] literal[int] * identifier[n] * identifier[n]
keyword[elif] identifier[n] <( literal[int] / literal[int] ):
identifier[n] -=( literal[int] / literal[int] )
keyword[return] literal[int] * identifier[n] * identifier[n] + literal[int]
keyword[elif] identifier[n] <( literal[int] / literal[int] ):
identifier[n] -=( literal[int] / literal[int] )
keyword[return] literal[int] * identifier[n] * identifier[n] + literal[int]
keyword[else] :
identifier[n] -=( literal[int] / literal[int] )
keyword[return] literal[int] * identifier[n] * identifier[n] + literal[int] | def easeOutBounce(n):
"""A bouncing tween function that hits the destination and then bounces to rest.
Args:
n (float): The time progress, starting at 0.0 and ending at 1.0.
Returns:
(float) The line progress, starting at 0.0 and ending at 1.0. Suitable for passing to getPointOnLine().
"""
_checkRange(n)
if n < 1 / 2.75:
return 7.5625 * n * n # depends on [control=['if'], data=['n']]
elif n < 2 / 2.75:
n -= 1.5 / 2.75
return 7.5625 * n * n + 0.75 # depends on [control=['if'], data=['n']]
elif n < 2.5 / 2.75:
n -= 2.25 / 2.75
return 7.5625 * n * n + 0.9375 # depends on [control=['if'], data=['n']]
else:
n -= 2.65 / 2.75
return 7.5625 * n * n + 0.984375 |
def max_cation_removal(self):
"""
Maximum number of cation A that can be removed while maintaining charge-balance.
Returns:
integer amount of cation. Depends on cell size (this is an 'extrinsic' function!)
"""
# how much 'spare charge' is left in the redox metals for oxidation?
oxid_pot = sum(
[(Element(spec.symbol).max_oxidation_state - spec.oxi_state) * self.comp[spec] for spec
in self.comp if is_redox_active_intercalation(Element(spec.symbol))])
oxid_limit = oxid_pot / self.cation_charge
#the number of A that exist in the structure for removal
num_cation = self.comp[Specie(self.cation.symbol, self.cation_charge)]
return min(oxid_limit, num_cation) | def function[max_cation_removal, parameter[self]]:
constant[
Maximum number of cation A that can be removed while maintaining charge-balance.
Returns:
integer amount of cation. Depends on cell size (this is an 'extrinsic' function!)
]
variable[oxid_pot] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da20c796ad0>]]
variable[oxid_limit] assign[=] binary_operation[name[oxid_pot] / name[self].cation_charge]
variable[num_cation] assign[=] call[name[self].comp][call[name[Specie], parameter[name[self].cation.symbol, name[self].cation_charge]]]
return[call[name[min], parameter[name[oxid_limit], name[num_cation]]]] | keyword[def] identifier[max_cation_removal] ( identifier[self] ):
literal[string]
identifier[oxid_pot] = identifier[sum] (
[( identifier[Element] ( identifier[spec] . identifier[symbol] ). identifier[max_oxidation_state] - identifier[spec] . identifier[oxi_state] )* identifier[self] . identifier[comp] [ identifier[spec] ] keyword[for] identifier[spec]
keyword[in] identifier[self] . identifier[comp] keyword[if] identifier[is_redox_active_intercalation] ( identifier[Element] ( identifier[spec] . identifier[symbol] ))])
identifier[oxid_limit] = identifier[oxid_pot] / identifier[self] . identifier[cation_charge]
identifier[num_cation] = identifier[self] . identifier[comp] [ identifier[Specie] ( identifier[self] . identifier[cation] . identifier[symbol] , identifier[self] . identifier[cation_charge] )]
keyword[return] identifier[min] ( identifier[oxid_limit] , identifier[num_cation] ) | def max_cation_removal(self):
"""
Maximum number of cation A that can be removed while maintaining charge-balance.
Returns:
integer amount of cation. Depends on cell size (this is an 'extrinsic' function!)
"""
# how much 'spare charge' is left in the redox metals for oxidation?
oxid_pot = sum([(Element(spec.symbol).max_oxidation_state - spec.oxi_state) * self.comp[spec] for spec in self.comp if is_redox_active_intercalation(Element(spec.symbol))])
oxid_limit = oxid_pot / self.cation_charge
#the number of A that exist in the structure for removal
num_cation = self.comp[Specie(self.cation.symbol, self.cation_charge)]
return min(oxid_limit, num_cation) |
def migrate(self, app='', migration='', site=None, fake=0, ignore_errors=None, skip_databases=None, database=None, migrate_apps='', delete_ghosts=1):
"""
Runs the standard South migrate command for one or more sites.
"""
# Note, to pass a comma-delimted list in a fab command, escape the comma with a back slash.
#
# e.g.
#
# fab staging dj.migrate:migrate_apps=oneapp\,twoapp\,threeapp
r = self.local_renderer
ignore_errors = int(r.env.ignore_migration_errors if ignore_errors is None else ignore_errors)
delete_ghosts = int(delete_ghosts)
post_south = self.version_tuple >= (1, 7, 0)
if self.version_tuple >= (1, 9, 0):
delete_ghosts = 0
skip_databases = (skip_databases or '')
if isinstance(skip_databases, six.string_types):
skip_databases = [_.strip() for _ in skip_databases.split(',') if _.strip()]
migrate_apps = migrate_apps or ''
migrate_apps = [
_.strip().split('.')[-1]
for _ in migrate_apps.strip().split(',')
if _.strip()
]
if app:
migrate_apps.append(app)
r.env.migrate_migration = migration or ''
r.env.migrate_fake_str = '--fake' if int(fake) else ''
r.env.migrate_database = '--database=%s' % database if database else ''
r.env.migrate_merge = '--merge' if not post_south else ''
r.env.delete_ghosts = '--delete-ghost-migrations' if delete_ghosts and not post_south else ''
self.vprint('project_dir0:', r.env.project_dir, r.genv.get('dj_project_dir'), r.genv.get('project_dir'))
self.vprint('migrate_apps:', migrate_apps)
if self.is_local:
r.env.project_dir = r.env.local_project_dir
# CS 2017-3-29 Don't bypass the iterator. That causes reversion to the global env that could corrupt the generated commands.
#databases = list(self.iter_unique_databases(site=site))#TODO:remove
# CS 2017-4-24 Don't specify a single site as the default when none is supplied. Otherwise all other sites will be ignored.
#site = site or self.genv.SITE
site = site or ALL
databases = self.iter_unique_databases(site=site)
for _site, site_data in databases:
self.vprint('-'*80, file=sys.stderr)
self.vprint('site:', _site, file=sys.stderr)
if self.env.available_sites_by_host:
hostname = self.current_hostname
sites_on_host = self.env.available_sites_by_host.get(hostname, [])
if sites_on_host and _site not in sites_on_host:
self.vprint('skipping site:', _site, sites_on_host, file=sys.stderr)
continue
if not migrate_apps:
migrate_apps.append(' ')
for _app in migrate_apps:
# In cases where we're migrating built-in apps or apps with dotted names
# e.g. django.contrib.auth, extract the name used for the migrate command.
r.env.migrate_app = _app.split('.')[-1]
self.vprint('project_dir1:', r.env.project_dir, r.genv.get('dj_project_dir'), r.genv.get('project_dir'))
r.env.SITE = _site
with self.settings(warn_only=ignore_errors):
r.run_or_local(
'export SITE={SITE}; export ROLE={ROLE}; {migrate_pre_command} cd {project_dir}; '
'{manage_cmd} migrate --noinput {migrate_merge} --traceback '
'{migrate_database} {delete_ghosts} {migrate_app} {migrate_migration} '
'{migrate_fake_str}') | def function[migrate, parameter[self, app, migration, site, fake, ignore_errors, skip_databases, database, migrate_apps, delete_ghosts]]:
constant[
Runs the standard South migrate command for one or more sites.
]
variable[r] assign[=] name[self].local_renderer
variable[ignore_errors] assign[=] call[name[int], parameter[<ast.IfExp object at 0x7da1b0061a20>]]
variable[delete_ghosts] assign[=] call[name[int], parameter[name[delete_ghosts]]]
variable[post_south] assign[=] compare[name[self].version_tuple greater_or_equal[>=] tuple[[<ast.Constant object at 0x7da1b00610f0>, <ast.Constant object at 0x7da1b00620b0>, <ast.Constant object at 0x7da1b0062b30>]]]
if compare[name[self].version_tuple greater_or_equal[>=] tuple[[<ast.Constant object at 0x7da1b0060a00>, <ast.Constant object at 0x7da1b0060460>, <ast.Constant object at 0x7da1b00630a0>]]] begin[:]
variable[delete_ghosts] assign[=] constant[0]
variable[skip_databases] assign[=] <ast.BoolOp object at 0x7da1b0062a10>
if call[name[isinstance], parameter[name[skip_databases], name[six].string_types]] begin[:]
variable[skip_databases] assign[=] <ast.ListComp object at 0x7da1b00632e0>
variable[migrate_apps] assign[=] <ast.BoolOp object at 0x7da1b0061450>
variable[migrate_apps] assign[=] <ast.ListComp object at 0x7da1b00636d0>
if name[app] begin[:]
call[name[migrate_apps].append, parameter[name[app]]]
name[r].env.migrate_migration assign[=] <ast.BoolOp object at 0x7da1b0061ed0>
name[r].env.migrate_fake_str assign[=] <ast.IfExp object at 0x7da1b0062770>
name[r].env.migrate_database assign[=] <ast.IfExp object at 0x7da1b00636a0>
name[r].env.migrate_merge assign[=] <ast.IfExp object at 0x7da1b0060580>
name[r].env.delete_ghosts assign[=] <ast.IfExp object at 0x7da1b0063250>
call[name[self].vprint, parameter[constant[project_dir0:], name[r].env.project_dir, call[name[r].genv.get, parameter[constant[dj_project_dir]]], call[name[r].genv.get, parameter[constant[project_dir]]]]]
call[name[self].vprint, parameter[constant[migrate_apps:], name[migrate_apps]]]
if name[self].is_local begin[:]
name[r].env.project_dir assign[=] name[r].env.local_project_dir
variable[site] assign[=] <ast.BoolOp object at 0x7da1b0063460>
variable[databases] assign[=] call[name[self].iter_unique_databases, parameter[]]
for taget[tuple[[<ast.Name object at 0x7da1b0062e90>, <ast.Name object at 0x7da1b0063430>]]] in starred[name[databases]] begin[:]
call[name[self].vprint, parameter[binary_operation[constant[-] * constant[80]]]]
call[name[self].vprint, parameter[constant[site:], name[_site]]]
if name[self].env.available_sites_by_host begin[:]
variable[hostname] assign[=] name[self].current_hostname
variable[sites_on_host] assign[=] call[name[self].env.available_sites_by_host.get, parameter[name[hostname], list[[]]]]
if <ast.BoolOp object at 0x7da1b00637f0> begin[:]
call[name[self].vprint, parameter[constant[skipping site:], name[_site], name[sites_on_host]]]
continue
if <ast.UnaryOp object at 0x7da1b00621a0> begin[:]
call[name[migrate_apps].append, parameter[constant[ ]]]
for taget[name[_app]] in starred[name[migrate_apps]] begin[:]
name[r].env.migrate_app assign[=] call[call[name[_app].split, parameter[constant[.]]]][<ast.UnaryOp object at 0x7da1b00638e0>]
call[name[self].vprint, parameter[constant[project_dir1:], name[r].env.project_dir, call[name[r].genv.get, parameter[constant[dj_project_dir]]], call[name[r].genv.get, parameter[constant[project_dir]]]]]
name[r].env.SITE assign[=] name[_site]
with call[name[self].settings, parameter[]] begin[:]
call[name[r].run_or_local, parameter[constant[export SITE={SITE}; export ROLE={ROLE}; {migrate_pre_command} cd {project_dir}; {manage_cmd} migrate --noinput {migrate_merge} --traceback {migrate_database} {delete_ghosts} {migrate_app} {migrate_migration} {migrate_fake_str}]]] | keyword[def] identifier[migrate] ( identifier[self] , identifier[app] = literal[string] , identifier[migration] = literal[string] , identifier[site] = keyword[None] , identifier[fake] = literal[int] , identifier[ignore_errors] = keyword[None] , identifier[skip_databases] = keyword[None] , identifier[database] = keyword[None] , identifier[migrate_apps] = literal[string] , identifier[delete_ghosts] = literal[int] ):
literal[string]
identifier[r] = identifier[self] . identifier[local_renderer]
identifier[ignore_errors] = identifier[int] ( identifier[r] . identifier[env] . identifier[ignore_migration_errors] keyword[if] identifier[ignore_errors] keyword[is] keyword[None] keyword[else] identifier[ignore_errors] )
identifier[delete_ghosts] = identifier[int] ( identifier[delete_ghosts] )
identifier[post_south] = identifier[self] . identifier[version_tuple] >=( literal[int] , literal[int] , literal[int] )
keyword[if] identifier[self] . identifier[version_tuple] >=( literal[int] , literal[int] , literal[int] ):
identifier[delete_ghosts] = literal[int]
identifier[skip_databases] =( identifier[skip_databases] keyword[or] literal[string] )
keyword[if] identifier[isinstance] ( identifier[skip_databases] , identifier[six] . identifier[string_types] ):
identifier[skip_databases] =[ identifier[_] . identifier[strip] () keyword[for] identifier[_] keyword[in] identifier[skip_databases] . identifier[split] ( literal[string] ) keyword[if] identifier[_] . identifier[strip] ()]
identifier[migrate_apps] = identifier[migrate_apps] keyword[or] literal[string]
identifier[migrate_apps] =[
identifier[_] . identifier[strip] (). identifier[split] ( literal[string] )[- literal[int] ]
keyword[for] identifier[_] keyword[in] identifier[migrate_apps] . identifier[strip] (). identifier[split] ( literal[string] )
keyword[if] identifier[_] . identifier[strip] ()
]
keyword[if] identifier[app] :
identifier[migrate_apps] . identifier[append] ( identifier[app] )
identifier[r] . identifier[env] . identifier[migrate_migration] = identifier[migration] keyword[or] literal[string]
identifier[r] . identifier[env] . identifier[migrate_fake_str] = literal[string] keyword[if] identifier[int] ( identifier[fake] ) keyword[else] literal[string]
identifier[r] . identifier[env] . identifier[migrate_database] = literal[string] % identifier[database] keyword[if] identifier[database] keyword[else] literal[string]
identifier[r] . identifier[env] . identifier[migrate_merge] = literal[string] keyword[if] keyword[not] identifier[post_south] keyword[else] literal[string]
identifier[r] . identifier[env] . identifier[delete_ghosts] = literal[string] keyword[if] identifier[delete_ghosts] keyword[and] keyword[not] identifier[post_south] keyword[else] literal[string]
identifier[self] . identifier[vprint] ( literal[string] , identifier[r] . identifier[env] . identifier[project_dir] , identifier[r] . identifier[genv] . identifier[get] ( literal[string] ), identifier[r] . identifier[genv] . identifier[get] ( literal[string] ))
identifier[self] . identifier[vprint] ( literal[string] , identifier[migrate_apps] )
keyword[if] identifier[self] . identifier[is_local] :
identifier[r] . identifier[env] . identifier[project_dir] = identifier[r] . identifier[env] . identifier[local_project_dir]
identifier[site] = identifier[site] keyword[or] identifier[ALL]
identifier[databases] = identifier[self] . identifier[iter_unique_databases] ( identifier[site] = identifier[site] )
keyword[for] identifier[_site] , identifier[site_data] keyword[in] identifier[databases] :
identifier[self] . identifier[vprint] ( literal[string] * literal[int] , identifier[file] = identifier[sys] . identifier[stderr] )
identifier[self] . identifier[vprint] ( literal[string] , identifier[_site] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[if] identifier[self] . identifier[env] . identifier[available_sites_by_host] :
identifier[hostname] = identifier[self] . identifier[current_hostname]
identifier[sites_on_host] = identifier[self] . identifier[env] . identifier[available_sites_by_host] . identifier[get] ( identifier[hostname] ,[])
keyword[if] identifier[sites_on_host] keyword[and] identifier[_site] keyword[not] keyword[in] identifier[sites_on_host] :
identifier[self] . identifier[vprint] ( literal[string] , identifier[_site] , identifier[sites_on_host] , identifier[file] = identifier[sys] . identifier[stderr] )
keyword[continue]
keyword[if] keyword[not] identifier[migrate_apps] :
identifier[migrate_apps] . identifier[append] ( literal[string] )
keyword[for] identifier[_app] keyword[in] identifier[migrate_apps] :
identifier[r] . identifier[env] . identifier[migrate_app] = identifier[_app] . identifier[split] ( literal[string] )[- literal[int] ]
identifier[self] . identifier[vprint] ( literal[string] , identifier[r] . identifier[env] . identifier[project_dir] , identifier[r] . identifier[genv] . identifier[get] ( literal[string] ), identifier[r] . identifier[genv] . identifier[get] ( literal[string] ))
identifier[r] . identifier[env] . identifier[SITE] = identifier[_site]
keyword[with] identifier[self] . identifier[settings] ( identifier[warn_only] = identifier[ignore_errors] ):
identifier[r] . identifier[run_or_local] (
literal[string]
literal[string]
literal[string]
literal[string] ) | def migrate(self, app='', migration='', site=None, fake=0, ignore_errors=None, skip_databases=None, database=None, migrate_apps='', delete_ghosts=1):
"""
Runs the standard South migrate command for one or more sites.
"""
# Note, to pass a comma-delimted list in a fab command, escape the comma with a back slash.
#
# e.g.
#
# fab staging dj.migrate:migrate_apps=oneapp\,twoapp\,threeapp
r = self.local_renderer
ignore_errors = int(r.env.ignore_migration_errors if ignore_errors is None else ignore_errors)
delete_ghosts = int(delete_ghosts)
post_south = self.version_tuple >= (1, 7, 0)
if self.version_tuple >= (1, 9, 0):
delete_ghosts = 0 # depends on [control=['if'], data=[]]
skip_databases = skip_databases or ''
if isinstance(skip_databases, six.string_types):
skip_databases = [_.strip() for _ in skip_databases.split(',') if _.strip()] # depends on [control=['if'], data=[]]
migrate_apps = migrate_apps or ''
migrate_apps = [_.strip().split('.')[-1] for _ in migrate_apps.strip().split(',') if _.strip()]
if app:
migrate_apps.append(app) # depends on [control=['if'], data=[]]
r.env.migrate_migration = migration or ''
r.env.migrate_fake_str = '--fake' if int(fake) else ''
r.env.migrate_database = '--database=%s' % database if database else ''
r.env.migrate_merge = '--merge' if not post_south else ''
r.env.delete_ghosts = '--delete-ghost-migrations' if delete_ghosts and (not post_south) else ''
self.vprint('project_dir0:', r.env.project_dir, r.genv.get('dj_project_dir'), r.genv.get('project_dir'))
self.vprint('migrate_apps:', migrate_apps)
if self.is_local:
r.env.project_dir = r.env.local_project_dir # depends on [control=['if'], data=[]]
# CS 2017-3-29 Don't bypass the iterator. That causes reversion to the global env that could corrupt the generated commands.
#databases = list(self.iter_unique_databases(site=site))#TODO:remove
# CS 2017-4-24 Don't specify a single site as the default when none is supplied. Otherwise all other sites will be ignored.
#site = site or self.genv.SITE
site = site or ALL
databases = self.iter_unique_databases(site=site)
for (_site, site_data) in databases:
self.vprint('-' * 80, file=sys.stderr)
self.vprint('site:', _site, file=sys.stderr)
if self.env.available_sites_by_host:
hostname = self.current_hostname
sites_on_host = self.env.available_sites_by_host.get(hostname, [])
if sites_on_host and _site not in sites_on_host:
self.vprint('skipping site:', _site, sites_on_host, file=sys.stderr)
continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not migrate_apps:
migrate_apps.append(' ') # depends on [control=['if'], data=[]]
for _app in migrate_apps:
# In cases where we're migrating built-in apps or apps with dotted names
# e.g. django.contrib.auth, extract the name used for the migrate command.
r.env.migrate_app = _app.split('.')[-1]
self.vprint('project_dir1:', r.env.project_dir, r.genv.get('dj_project_dir'), r.genv.get('project_dir'))
r.env.SITE = _site
with self.settings(warn_only=ignore_errors):
r.run_or_local('export SITE={SITE}; export ROLE={ROLE}; {migrate_pre_command} cd {project_dir}; {manage_cmd} migrate --noinput {migrate_merge} --traceback {migrate_database} {delete_ghosts} {migrate_app} {migrate_migration} {migrate_fake_str}') # depends on [control=['with'], data=[]] # depends on [control=['for'], data=['_app']] # depends on [control=['for'], data=[]] |
def submit(self, eEye, pTexture, pBounds=None, nSubmitFlags=Submit_Default):
"""
Updated scene texture to display. If pBounds is NULL the entire texture will be used. If called from an OpenGL app, consider adding a glFlush after
Submitting both frames to signal the driver to start processing, otherwise it may wait until the command buffer fills up, causing the app to miss frames.
* OpenGL dirty state:
glBindTexture
* Return codes:
- IsNotSceneApplication (make sure to call VR_Init with VRApplicaiton_Scene)
- DoNotHaveFocus (some other app has taken focus)
- TextureIsOnWrongDevice (application did not use proper AdapterIndex - see IVRSystem.GetDXGIOutputInfo)
- SharedTexturesNotSupported (application needs to call CreateDXGIFactory1 or later before creating DX device)
- TextureUsesUnsupportedFormat (scene textures must be compatible with DXGI sharing rules - e.g. uncompressed, no mips, etc.)
- InvalidTexture (usually means bad arguments passed in)
- AlreadySubmitted (app has submitted two left textures or two right textures in a single frame - i.e. before calling WaitGetPoses again)
"""
fn = self.function_table.submit
result = fn(eEye, byref(pTexture), pBounds, nSubmitFlags)
return result | def function[submit, parameter[self, eEye, pTexture, pBounds, nSubmitFlags]]:
constant[
Updated scene texture to display. If pBounds is NULL the entire texture will be used. If called from an OpenGL app, consider adding a glFlush after
Submitting both frames to signal the driver to start processing, otherwise it may wait until the command buffer fills up, causing the app to miss frames.
* OpenGL dirty state:
glBindTexture
* Return codes:
- IsNotSceneApplication (make sure to call VR_Init with VRApplicaiton_Scene)
- DoNotHaveFocus (some other app has taken focus)
- TextureIsOnWrongDevice (application did not use proper AdapterIndex - see IVRSystem.GetDXGIOutputInfo)
- SharedTexturesNotSupported (application needs to call CreateDXGIFactory1 or later before creating DX device)
- TextureUsesUnsupportedFormat (scene textures must be compatible with DXGI sharing rules - e.g. uncompressed, no mips, etc.)
- InvalidTexture (usually means bad arguments passed in)
- AlreadySubmitted (app has submitted two left textures or two right textures in a single frame - i.e. before calling WaitGetPoses again)
]
variable[fn] assign[=] name[self].function_table.submit
variable[result] assign[=] call[name[fn], parameter[name[eEye], call[name[byref], parameter[name[pTexture]]], name[pBounds], name[nSubmitFlags]]]
return[name[result]] | keyword[def] identifier[submit] ( identifier[self] , identifier[eEye] , identifier[pTexture] , identifier[pBounds] = keyword[None] , identifier[nSubmitFlags] = identifier[Submit_Default] ):
literal[string]
identifier[fn] = identifier[self] . identifier[function_table] . identifier[submit]
identifier[result] = identifier[fn] ( identifier[eEye] , identifier[byref] ( identifier[pTexture] ), identifier[pBounds] , identifier[nSubmitFlags] )
keyword[return] identifier[result] | def submit(self, eEye, pTexture, pBounds=None, nSubmitFlags=Submit_Default):
"""
Updated scene texture to display. If pBounds is NULL the entire texture will be used. If called from an OpenGL app, consider adding a glFlush after
Submitting both frames to signal the driver to start processing, otherwise it may wait until the command buffer fills up, causing the app to miss frames.
* OpenGL dirty state:
glBindTexture
* Return codes:
- IsNotSceneApplication (make sure to call VR_Init with VRApplicaiton_Scene)
- DoNotHaveFocus (some other app has taken focus)
- TextureIsOnWrongDevice (application did not use proper AdapterIndex - see IVRSystem.GetDXGIOutputInfo)
- SharedTexturesNotSupported (application needs to call CreateDXGIFactory1 or later before creating DX device)
- TextureUsesUnsupportedFormat (scene textures must be compatible with DXGI sharing rules - e.g. uncompressed, no mips, etc.)
- InvalidTexture (usually means bad arguments passed in)
- AlreadySubmitted (app has submitted two left textures or two right textures in a single frame - i.e. before calling WaitGetPoses again)
"""
fn = self.function_table.submit
result = fn(eEye, byref(pTexture), pBounds, nSubmitFlags)
return result |
def load_module(self, module_name, path=None):
'''Import a module into this isolation context and return a proxy for it.'''
self.ensure_started()
if path is None:
path = sys.path
mod = self.client.call(_load_module, module_name, path)
mod.__isolation_context__ = self
return mod | def function[load_module, parameter[self, module_name, path]]:
constant[Import a module into this isolation context and return a proxy for it.]
call[name[self].ensure_started, parameter[]]
if compare[name[path] is constant[None]] begin[:]
variable[path] assign[=] name[sys].path
variable[mod] assign[=] call[name[self].client.call, parameter[name[_load_module], name[module_name], name[path]]]
name[mod].__isolation_context__ assign[=] name[self]
return[name[mod]] | keyword[def] identifier[load_module] ( identifier[self] , identifier[module_name] , identifier[path] = keyword[None] ):
literal[string]
identifier[self] . identifier[ensure_started] ()
keyword[if] identifier[path] keyword[is] keyword[None] :
identifier[path] = identifier[sys] . identifier[path]
identifier[mod] = identifier[self] . identifier[client] . identifier[call] ( identifier[_load_module] , identifier[module_name] , identifier[path] )
identifier[mod] . identifier[__isolation_context__] = identifier[self]
keyword[return] identifier[mod] | def load_module(self, module_name, path=None):
"""Import a module into this isolation context and return a proxy for it."""
self.ensure_started()
if path is None:
path = sys.path # depends on [control=['if'], data=['path']]
mod = self.client.call(_load_module, module_name, path)
mod.__isolation_context__ = self
return mod |
def _pc_decode(self, msg):
"""PC: PLC (lighting) change."""
housecode = msg[4:7]
return {'housecode': housecode, 'index': housecode_to_index(housecode),
'light_level': int(msg[7:9])} | def function[_pc_decode, parameter[self, msg]]:
constant[PC: PLC (lighting) change.]
variable[housecode] assign[=] call[name[msg]][<ast.Slice object at 0x7da18eb54130>]
return[dictionary[[<ast.Constant object at 0x7da18bc71ea0>, <ast.Constant object at 0x7da18bc70b80>, <ast.Constant object at 0x7da18bc715a0>], [<ast.Name object at 0x7da18bc72aa0>, <ast.Call object at 0x7da18bc703d0>, <ast.Call object at 0x7da18bc722f0>]]] | keyword[def] identifier[_pc_decode] ( identifier[self] , identifier[msg] ):
literal[string]
identifier[housecode] = identifier[msg] [ literal[int] : literal[int] ]
keyword[return] { literal[string] : identifier[housecode] , literal[string] : identifier[housecode_to_index] ( identifier[housecode] ),
literal[string] : identifier[int] ( identifier[msg] [ literal[int] : literal[int] ])} | def _pc_decode(self, msg):
"""PC: PLC (lighting) change."""
housecode = msg[4:7]
return {'housecode': housecode, 'index': housecode_to_index(housecode), 'light_level': int(msg[7:9])} |
def derivativeZ(self,x,y,z):
'''
Evaluate the first derivative with respect to z of the function at given
state space points.
Parameters
----------
x : np.array
First input values.
y : np.array
Second input values; should be of same shape as x.
z : np.array
Third input values; should be of same shape as x.
Returns
-------
dfdz_out : np.array
First derivative of function with respect to the third input,
evaluated at (x,y,z), of same shape as inputs.
'''
xShift = self.lowerBound(y)
dfdz_out = self.func.derivativeZ(x-xShift,y,z)
return dfdz_out | def function[derivativeZ, parameter[self, x, y, z]]:
constant[
Evaluate the first derivative with respect to z of the function at given
state space points.
Parameters
----------
x : np.array
First input values.
y : np.array
Second input values; should be of same shape as x.
z : np.array
Third input values; should be of same shape as x.
Returns
-------
dfdz_out : np.array
First derivative of function with respect to the third input,
evaluated at (x,y,z), of same shape as inputs.
]
variable[xShift] assign[=] call[name[self].lowerBound, parameter[name[y]]]
variable[dfdz_out] assign[=] call[name[self].func.derivativeZ, parameter[binary_operation[name[x] - name[xShift]], name[y], name[z]]]
return[name[dfdz_out]] | keyword[def] identifier[derivativeZ] ( identifier[self] , identifier[x] , identifier[y] , identifier[z] ):
literal[string]
identifier[xShift] = identifier[self] . identifier[lowerBound] ( identifier[y] )
identifier[dfdz_out] = identifier[self] . identifier[func] . identifier[derivativeZ] ( identifier[x] - identifier[xShift] , identifier[y] , identifier[z] )
keyword[return] identifier[dfdz_out] | def derivativeZ(self, x, y, z):
"""
Evaluate the first derivative with respect to z of the function at given
state space points.
Parameters
----------
x : np.array
First input values.
y : np.array
Second input values; should be of same shape as x.
z : np.array
Third input values; should be of same shape as x.
Returns
-------
dfdz_out : np.array
First derivative of function with respect to the third input,
evaluated at (x,y,z), of same shape as inputs.
"""
xShift = self.lowerBound(y)
dfdz_out = self.func.derivativeZ(x - xShift, y, z)
return dfdz_out |
def i2le_script(number):
'''Convert int to signed little endian (l.e.) hex for scripts
Args:
number (int): int value to convert to bytes in l.e. format
Returns:
(str): the hex-encoded signed LE number
'''
if number == 0:
return '00'
for i in range(80):
try:
return number.to_bytes(
length=i, # minimal bytes lol
byteorder='little',
signed=True).hex()
except Exception:
continue | def function[i2le_script, parameter[number]]:
constant[Convert int to signed little endian (l.e.) hex for scripts
Args:
number (int): int value to convert to bytes in l.e. format
Returns:
(str): the hex-encoded signed LE number
]
if compare[name[number] equal[==] constant[0]] begin[:]
return[constant[00]]
for taget[name[i]] in starred[call[name[range], parameter[constant[80]]]] begin[:]
<ast.Try object at 0x7da1b0516350> | keyword[def] identifier[i2le_script] ( identifier[number] ):
literal[string]
keyword[if] identifier[number] == literal[int] :
keyword[return] literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ):
keyword[try] :
keyword[return] identifier[number] . identifier[to_bytes] (
identifier[length] = identifier[i] ,
identifier[byteorder] = literal[string] ,
identifier[signed] = keyword[True] ). identifier[hex] ()
keyword[except] identifier[Exception] :
keyword[continue] | def i2le_script(number):
"""Convert int to signed little endian (l.e.) hex for scripts
Args:
number (int): int value to convert to bytes in l.e. format
Returns:
(str): the hex-encoded signed LE number
"""
if number == 0:
return '00' # depends on [control=['if'], data=[]]
for i in range(80):
try: # minimal bytes lol
return number.to_bytes(length=i, byteorder='little', signed=True).hex() # depends on [control=['try'], data=[]]
except Exception:
continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']] |
def paths(self):
"""All the entries of sys.path, possibly restricted by --path"""
if not self.select_paths:
return sys.path
result = []
match_any = set()
for path in sys.path:
path = os.path.normcase(os.path.abspath(path))
for match in self.select_paths:
match = os.path.normcase(os.path.abspath(match))
if '*' in match:
if re.search(fnmatch.translate(match+'*'), path):
result.append(path)
match_any.add(match)
break
else:
if path.startswith(match):
result.append(path)
match_any.add(match)
break
else:
logger.debug("Skipping path %s because it doesn't match %s"
% (path, ', '.join(self.select_paths)))
for match in self.select_paths:
if match not in match_any and '*' not in match:
result.append(match)
logger.debug("Adding path %s because it doesn't match anything already on sys.path"
% match)
return result | def function[paths, parameter[self]]:
constant[All the entries of sys.path, possibly restricted by --path]
if <ast.UnaryOp object at 0x7da20c991630> begin[:]
return[name[sys].path]
variable[result] assign[=] list[[]]
variable[match_any] assign[=] call[name[set], parameter[]]
for taget[name[path]] in starred[name[sys].path] begin[:]
variable[path] assign[=] call[name[os].path.normcase, parameter[call[name[os].path.abspath, parameter[name[path]]]]]
for taget[name[match]] in starred[name[self].select_paths] begin[:]
variable[match] assign[=] call[name[os].path.normcase, parameter[call[name[os].path.abspath, parameter[name[match]]]]]
if compare[constant[*] in name[match]] begin[:]
if call[name[re].search, parameter[call[name[fnmatch].translate, parameter[binary_operation[name[match] + constant[*]]]], name[path]]] begin[:]
call[name[result].append, parameter[name[path]]]
call[name[match_any].add, parameter[name[match]]]
break
for taget[name[match]] in starred[name[self].select_paths] begin[:]
if <ast.BoolOp object at 0x7da18bccab60> begin[:]
call[name[result].append, parameter[name[match]]]
call[name[logger].debug, parameter[binary_operation[constant[Adding path %s because it doesn't match anything already on sys.path] <ast.Mod object at 0x7da2590d6920> name[match]]]]
return[name[result]] | keyword[def] identifier[paths] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[select_paths] :
keyword[return] identifier[sys] . identifier[path]
identifier[result] =[]
identifier[match_any] = identifier[set] ()
keyword[for] identifier[path] keyword[in] identifier[sys] . identifier[path] :
identifier[path] = identifier[os] . identifier[path] . identifier[normcase] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[path] ))
keyword[for] identifier[match] keyword[in] identifier[self] . identifier[select_paths] :
identifier[match] = identifier[os] . identifier[path] . identifier[normcase] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[match] ))
keyword[if] literal[string] keyword[in] identifier[match] :
keyword[if] identifier[re] . identifier[search] ( identifier[fnmatch] . identifier[translate] ( identifier[match] + literal[string] ), identifier[path] ):
identifier[result] . identifier[append] ( identifier[path] )
identifier[match_any] . identifier[add] ( identifier[match] )
keyword[break]
keyword[else] :
keyword[if] identifier[path] . identifier[startswith] ( identifier[match] ):
identifier[result] . identifier[append] ( identifier[path] )
identifier[match_any] . identifier[add] ( identifier[match] )
keyword[break]
keyword[else] :
identifier[logger] . identifier[debug] ( literal[string]
%( identifier[path] , literal[string] . identifier[join] ( identifier[self] . identifier[select_paths] )))
keyword[for] identifier[match] keyword[in] identifier[self] . identifier[select_paths] :
keyword[if] identifier[match] keyword[not] keyword[in] identifier[match_any] keyword[and] literal[string] keyword[not] keyword[in] identifier[match] :
identifier[result] . identifier[append] ( identifier[match] )
identifier[logger] . identifier[debug] ( literal[string]
% identifier[match] )
keyword[return] identifier[result] | def paths(self):
"""All the entries of sys.path, possibly restricted by --path"""
if not self.select_paths:
return sys.path # depends on [control=['if'], data=[]]
result = []
match_any = set()
for path in sys.path:
path = os.path.normcase(os.path.abspath(path))
for match in self.select_paths:
match = os.path.normcase(os.path.abspath(match))
if '*' in match:
if re.search(fnmatch.translate(match + '*'), path):
result.append(path)
match_any.add(match)
break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['match']]
elif path.startswith(match):
result.append(path)
match_any.add(match)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['match']]
else:
logger.debug("Skipping path %s because it doesn't match %s" % (path, ', '.join(self.select_paths))) # depends on [control=['for'], data=['path']]
for match in self.select_paths:
if match not in match_any and '*' not in match:
result.append(match)
logger.debug("Adding path %s because it doesn't match anything already on sys.path" % match) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['match']]
return result |
def initialize_res(residue):
'''Creates a new structure containing a single amino acid. The type and
geometry of the amino acid are determined by the argument, which has to be
either a geometry object or a single-letter amino acid code.
The amino acid will be placed into chain A of model 0.'''
if isinstance( residue, Geo ):
geo = residue
else:
geo=geometry(residue)
segID=1
AA= geo.residue_name
CA_N_length=geo.CA_N_length
CA_C_length=geo.CA_C_length
N_CA_C_angle=geo.N_CA_C_angle
CA_coord= numpy.array([0.,0.,0.])
C_coord= numpy.array([CA_C_length,0,0])
N_coord = numpy.array([CA_N_length*math.cos(N_CA_C_angle*(math.pi/180.0)),CA_N_length*math.sin(N_CA_C_angle*(math.pi/180.0)),0])
N= Atom("N", N_coord, 0.0 , 1.0, " "," N", 0, "N")
CA=Atom("CA", CA_coord, 0.0 , 1.0, " "," CA", 0,"C")
C= Atom("C", C_coord, 0.0, 1.0, " ", " C",0,"C")
##Create Carbonyl atom (to be moved later)
C_O_length=geo.C_O_length
CA_C_O_angle=geo.CA_C_O_angle
N_CA_C_O_diangle=geo.N_CA_C_O_diangle
carbonyl=calculateCoordinates(N, CA, C, C_O_length, CA_C_O_angle, N_CA_C_O_diangle)
O= Atom("O",carbonyl , 0.0 , 1.0, " "," O", 0, "O")
if(AA=='G'):
res=makeGly(segID, N, CA, C, O, geo)
elif(AA=='A'):
res=makeAla(segID, N, CA, C, O, geo)
elif(AA=='S'):
res=makeSer(segID, N, CA, C, O, geo)
elif(AA=='C'):
res=makeCys(segID, N, CA, C, O, geo)
elif(AA=='V'):
res=makeVal(segID, N, CA, C, O, geo)
elif(AA=='I'):
res=makeIle(segID, N, CA, C, O, geo)
elif(AA=='L'):
res=makeLeu(segID, N, CA, C, O, geo)
elif(AA=='T'):
res=makeThr(segID, N, CA, C, O, geo)
elif(AA=='R'):
res=makeArg(segID, N, CA, C, O, geo)
elif(AA=='K'):
res=makeLys(segID, N, CA, C, O, geo)
elif(AA=='D'):
res=makeAsp(segID, N, CA, C, O, geo)
elif(AA=='E'):
res=makeGlu(segID, N, CA, C, O, geo)
elif(AA=='N'):
res=makeAsn(segID, N, CA, C, O, geo)
elif(AA=='Q'):
res=makeGln(segID, N, CA, C, O, geo)
elif(AA=='M'):
res=makeMet(segID, N, CA, C, O, geo)
elif(AA=='H'):
res=makeHis(segID, N, CA, C, O, geo)
elif(AA=='P'):
res=makePro(segID, N, CA, C, O, geo)
elif(AA=='F'):
res=makePhe(segID, N, CA, C, O, geo)
elif(AA=='Y'):
res=makeTyr(segID, N, CA, C, O, geo)
elif(AA=='W'):
res=makeTrp(segID, N, CA, C, O, geo)
else:
res=makeGly(segID, N, CA, C, O, geo)
cha= Chain('A')
cha.add(res)
mod= Model(0)
mod.add(cha)
struc= Structure('X')
struc.add(mod)
return struc | def function[initialize_res, parameter[residue]]:
constant[Creates a new structure containing a single amino acid. The type and
geometry of the amino acid are determined by the argument, which has to be
either a geometry object or a single-letter amino acid code.
The amino acid will be placed into chain A of model 0.]
if call[name[isinstance], parameter[name[residue], name[Geo]]] begin[:]
variable[geo] assign[=] name[residue]
variable[segID] assign[=] constant[1]
variable[AA] assign[=] name[geo].residue_name
variable[CA_N_length] assign[=] name[geo].CA_N_length
variable[CA_C_length] assign[=] name[geo].CA_C_length
variable[N_CA_C_angle] assign[=] name[geo].N_CA_C_angle
variable[CA_coord] assign[=] call[name[numpy].array, parameter[list[[<ast.Constant object at 0x7da20c6c7eb0>, <ast.Constant object at 0x7da20c6c7250>, <ast.Constant object at 0x7da20c6c7100>]]]]
variable[C_coord] assign[=] call[name[numpy].array, parameter[list[[<ast.Name object at 0x7da20c6c6770>, <ast.Constant object at 0x7da20c6c4070>, <ast.Constant object at 0x7da20c6c61d0>]]]]
variable[N_coord] assign[=] call[name[numpy].array, parameter[list[[<ast.BinOp object at 0x7da20c6c5720>, <ast.BinOp object at 0x7da20c6c7a60>, <ast.Constant object at 0x7da20c6c5d80>]]]]
variable[N] assign[=] call[name[Atom], parameter[constant[N], name[N_coord], constant[0.0], constant[1.0], constant[ ], constant[ N], constant[0], constant[N]]]
variable[CA] assign[=] call[name[Atom], parameter[constant[CA], name[CA_coord], constant[0.0], constant[1.0], constant[ ], constant[ CA], constant[0], constant[C]]]
variable[C] assign[=] call[name[Atom], parameter[constant[C], name[C_coord], constant[0.0], constant[1.0], constant[ ], constant[ C], constant[0], constant[C]]]
variable[C_O_length] assign[=] name[geo].C_O_length
variable[CA_C_O_angle] assign[=] name[geo].CA_C_O_angle
variable[N_CA_C_O_diangle] assign[=] name[geo].N_CA_C_O_diangle
variable[carbonyl] assign[=] call[name[calculateCoordinates], parameter[name[N], name[CA], name[C], name[C_O_length], name[CA_C_O_angle], name[N_CA_C_O_diangle]]]
variable[O] assign[=] call[name[Atom], parameter[constant[O], name[carbonyl], constant[0.0], constant[1.0], constant[ ], constant[ O], constant[0], constant[O]]]
if compare[name[AA] equal[==] constant[G]] begin[:]
variable[res] assign[=] call[name[makeGly], parameter[name[segID], name[N], name[CA], name[C], name[O], name[geo]]]
variable[cha] assign[=] call[name[Chain], parameter[constant[A]]]
call[name[cha].add, parameter[name[res]]]
variable[mod] assign[=] call[name[Model], parameter[constant[0]]]
call[name[mod].add, parameter[name[cha]]]
variable[struc] assign[=] call[name[Structure], parameter[constant[X]]]
call[name[struc].add, parameter[name[mod]]]
return[name[struc]] | keyword[def] identifier[initialize_res] ( identifier[residue] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[residue] , identifier[Geo] ):
identifier[geo] = identifier[residue]
keyword[else] :
identifier[geo] = identifier[geometry] ( identifier[residue] )
identifier[segID] = literal[int]
identifier[AA] = identifier[geo] . identifier[residue_name]
identifier[CA_N_length] = identifier[geo] . identifier[CA_N_length]
identifier[CA_C_length] = identifier[geo] . identifier[CA_C_length]
identifier[N_CA_C_angle] = identifier[geo] . identifier[N_CA_C_angle]
identifier[CA_coord] = identifier[numpy] . identifier[array] ([ literal[int] , literal[int] , literal[int] ])
identifier[C_coord] = identifier[numpy] . identifier[array] ([ identifier[CA_C_length] , literal[int] , literal[int] ])
identifier[N_coord] = identifier[numpy] . identifier[array] ([ identifier[CA_N_length] * identifier[math] . identifier[cos] ( identifier[N_CA_C_angle] *( identifier[math] . identifier[pi] / literal[int] )), identifier[CA_N_length] * identifier[math] . identifier[sin] ( identifier[N_CA_C_angle] *( identifier[math] . identifier[pi] / literal[int] )), literal[int] ])
identifier[N] = identifier[Atom] ( literal[string] , identifier[N_coord] , literal[int] , literal[int] , literal[string] , literal[string] , literal[int] , literal[string] )
identifier[CA] = identifier[Atom] ( literal[string] , identifier[CA_coord] , literal[int] , literal[int] , literal[string] , literal[string] , literal[int] , literal[string] )
identifier[C] = identifier[Atom] ( literal[string] , identifier[C_coord] , literal[int] , literal[int] , literal[string] , literal[string] , literal[int] , literal[string] )
identifier[C_O_length] = identifier[geo] . identifier[C_O_length]
identifier[CA_C_O_angle] = identifier[geo] . identifier[CA_C_O_angle]
identifier[N_CA_C_O_diangle] = identifier[geo] . identifier[N_CA_C_O_diangle]
identifier[carbonyl] = identifier[calculateCoordinates] ( identifier[N] , identifier[CA] , identifier[C] , identifier[C_O_length] , identifier[CA_C_O_angle] , identifier[N_CA_C_O_diangle] )
identifier[O] = identifier[Atom] ( literal[string] , identifier[carbonyl] , literal[int] , literal[int] , literal[string] , literal[string] , literal[int] , literal[string] )
keyword[if] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeGly] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeAla] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeSer] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeCys] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeVal] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeIle] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeLeu] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeThr] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeArg] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeLys] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeAsp] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeGlu] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeAsn] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeGln] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeMet] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeHis] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makePro] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makePhe] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeTyr] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[elif] ( identifier[AA] == literal[string] ):
identifier[res] = identifier[makeTrp] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
keyword[else] :
identifier[res] = identifier[makeGly] ( identifier[segID] , identifier[N] , identifier[CA] , identifier[C] , identifier[O] , identifier[geo] )
identifier[cha] = identifier[Chain] ( literal[string] )
identifier[cha] . identifier[add] ( identifier[res] )
identifier[mod] = identifier[Model] ( literal[int] )
identifier[mod] . identifier[add] ( identifier[cha] )
identifier[struc] = identifier[Structure] ( literal[string] )
identifier[struc] . identifier[add] ( identifier[mod] )
keyword[return] identifier[struc] | def initialize_res(residue):
"""Creates a new structure containing a single amino acid. The type and
geometry of the amino acid are determined by the argument, which has to be
either a geometry object or a single-letter amino acid code.
The amino acid will be placed into chain A of model 0."""
if isinstance(residue, Geo):
geo = residue # depends on [control=['if'], data=[]]
else:
geo = geometry(residue)
segID = 1
AA = geo.residue_name
CA_N_length = geo.CA_N_length
CA_C_length = geo.CA_C_length
N_CA_C_angle = geo.N_CA_C_angle
CA_coord = numpy.array([0.0, 0.0, 0.0])
C_coord = numpy.array([CA_C_length, 0, 0])
N_coord = numpy.array([CA_N_length * math.cos(N_CA_C_angle * (math.pi / 180.0)), CA_N_length * math.sin(N_CA_C_angle * (math.pi / 180.0)), 0])
N = Atom('N', N_coord, 0.0, 1.0, ' ', ' N', 0, 'N')
CA = Atom('CA', CA_coord, 0.0, 1.0, ' ', ' CA', 0, 'C')
C = Atom('C', C_coord, 0.0, 1.0, ' ', ' C', 0, 'C')
##Create Carbonyl atom (to be moved later)
C_O_length = geo.C_O_length
CA_C_O_angle = geo.CA_C_O_angle
N_CA_C_O_diangle = geo.N_CA_C_O_diangle
carbonyl = calculateCoordinates(N, CA, C, C_O_length, CA_C_O_angle, N_CA_C_O_diangle)
O = Atom('O', carbonyl, 0.0, 1.0, ' ', ' O', 0, 'O')
if AA == 'G':
res = makeGly(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'A':
res = makeAla(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'S':
res = makeSer(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'C':
res = makeCys(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'V':
res = makeVal(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'I':
res = makeIle(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'L':
res = makeLeu(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'T':
res = makeThr(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'R':
res = makeArg(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'K':
res = makeLys(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'D':
res = makeAsp(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'E':
res = makeGlu(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'N':
res = makeAsn(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'Q':
res = makeGln(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'M':
res = makeMet(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'H':
res = makeHis(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'P':
res = makePro(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'F':
res = makePhe(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'Y':
res = makeTyr(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
elif AA == 'W':
res = makeTrp(segID, N, CA, C, O, geo) # depends on [control=['if'], data=[]]
else:
res = makeGly(segID, N, CA, C, O, geo)
cha = Chain('A')
cha.add(res)
mod = Model(0)
mod.add(cha)
struc = Structure('X')
struc.add(mod)
return struc |
def init(self):
"""! @brief Reads TPIU capabilities.
Currently this method simply checks whether the TPIU supports SWO in asynchronous
UART mode. The result of this check is available via the has_swo_uart property.
"""
devid = self.ap.read32(self.address + TPIU.DEVID)
self._has_swo_uart = (devid & TPIU.DEVID_NRZ_MASK) != 0 | def function[init, parameter[self]]:
constant[! @brief Reads TPIU capabilities.
Currently this method simply checks whether the TPIU supports SWO in asynchronous
UART mode. The result of this check is available via the has_swo_uart property.
]
variable[devid] assign[=] call[name[self].ap.read32, parameter[binary_operation[name[self].address + name[TPIU].DEVID]]]
name[self]._has_swo_uart assign[=] compare[binary_operation[name[devid] <ast.BitAnd object at 0x7da2590d6b60> name[TPIU].DEVID_NRZ_MASK] not_equal[!=] constant[0]] | keyword[def] identifier[init] ( identifier[self] ):
literal[string]
identifier[devid] = identifier[self] . identifier[ap] . identifier[read32] ( identifier[self] . identifier[address] + identifier[TPIU] . identifier[DEVID] )
identifier[self] . identifier[_has_swo_uart] =( identifier[devid] & identifier[TPIU] . identifier[DEVID_NRZ_MASK] )!= literal[int] | def init(self):
"""! @brief Reads TPIU capabilities.
Currently this method simply checks whether the TPIU supports SWO in asynchronous
UART mode. The result of this check is available via the has_swo_uart property.
"""
devid = self.ap.read32(self.address + TPIU.DEVID)
self._has_swo_uart = devid & TPIU.DEVID_NRZ_MASK != 0 |
def _load(self, **kwargs):
'''Load python Service object with response JSON from BIG-IP®.
:params kwargs: keyword arguments for talking to the device
:returns: populated Service object
'''
# Some kwargs should be popped before we do a load
for key in self._meta_data['disallowed_load_parameters']:
if key in kwargs:
kwargs.pop(key)
self._check_load_parameters(**kwargs)
name = kwargs.pop('name', '')
partition = kwargs.pop('partition', '')
read_session = self._meta_data['icr_session']
base_uri = self._meta_data['container']._meta_data['uri']
load_uri = self._build_service_uri(base_uri, partition, name)
response = read_session.get(load_uri, uri_as_parts=False, **kwargs)
return self._produce_instance(response) | def function[_load, parameter[self]]:
constant[Load python Service object with response JSON from BIG-IP®.
:params kwargs: keyword arguments for talking to the device
:returns: populated Service object
]
for taget[name[key]] in starred[call[name[self]._meta_data][constant[disallowed_load_parameters]]] begin[:]
if compare[name[key] in name[kwargs]] begin[:]
call[name[kwargs].pop, parameter[name[key]]]
call[name[self]._check_load_parameters, parameter[]]
variable[name] assign[=] call[name[kwargs].pop, parameter[constant[name], constant[]]]
variable[partition] assign[=] call[name[kwargs].pop, parameter[constant[partition], constant[]]]
variable[read_session] assign[=] call[name[self]._meta_data][constant[icr_session]]
variable[base_uri] assign[=] call[call[name[self]._meta_data][constant[container]]._meta_data][constant[uri]]
variable[load_uri] assign[=] call[name[self]._build_service_uri, parameter[name[base_uri], name[partition], name[name]]]
variable[response] assign[=] call[name[read_session].get, parameter[name[load_uri]]]
return[call[name[self]._produce_instance, parameter[name[response]]]] | keyword[def] identifier[_load] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
keyword[for] identifier[key] keyword[in] identifier[self] . identifier[_meta_data] [ literal[string] ]:
keyword[if] identifier[key] keyword[in] identifier[kwargs] :
identifier[kwargs] . identifier[pop] ( identifier[key] )
identifier[self] . identifier[_check_load_parameters] (** identifier[kwargs] )
identifier[name] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] )
identifier[partition] = identifier[kwargs] . identifier[pop] ( literal[string] , literal[string] )
identifier[read_session] = identifier[self] . identifier[_meta_data] [ literal[string] ]
identifier[base_uri] = identifier[self] . identifier[_meta_data] [ literal[string] ]. identifier[_meta_data] [ literal[string] ]
identifier[load_uri] = identifier[self] . identifier[_build_service_uri] ( identifier[base_uri] , identifier[partition] , identifier[name] )
identifier[response] = identifier[read_session] . identifier[get] ( identifier[load_uri] , identifier[uri_as_parts] = keyword[False] ,** identifier[kwargs] )
keyword[return] identifier[self] . identifier[_produce_instance] ( identifier[response] ) | def _load(self, **kwargs):
"""Load python Service object with response JSON from BIG-IP®.
:params kwargs: keyword arguments for talking to the device
:returns: populated Service object
"""
# Some kwargs should be popped before we do a load
for key in self._meta_data['disallowed_load_parameters']:
if key in kwargs:
kwargs.pop(key) # depends on [control=['if'], data=['key', 'kwargs']] # depends on [control=['for'], data=['key']]
self._check_load_parameters(**kwargs)
name = kwargs.pop('name', '')
partition = kwargs.pop('partition', '')
read_session = self._meta_data['icr_session']
base_uri = self._meta_data['container']._meta_data['uri']
load_uri = self._build_service_uri(base_uri, partition, name)
response = read_session.get(load_uri, uri_as_parts=False, **kwargs)
return self._produce_instance(response) |
def infinity_norm(A):
"""Infinity norm of a matrix (maximum absolute row sum).
Parameters
----------
A : csr_matrix, csc_matrix, sparse, or numpy matrix
Sparse or dense matrix
Returns
-------
n : float
Infinity norm of the matrix
Notes
-----
- This serves as an upper bound on spectral radius.
- csr and csc avoid a deep copy
- dense calls scipy.linalg.norm
See Also
--------
scipy.linalg.norm : dense matrix norms
Examples
--------
>>> import numpy as np
>>> from scipy.sparse import spdiags
>>> from pyamg.util.linalg import infinity_norm
>>> n=10
>>> e = np.ones((n,1)).ravel()
>>> data = [ -1*e, 2*e, -1*e ]
>>> A = spdiags(data,[-1,0,1],n,n)
>>> print infinity_norm(A)
4.0
"""
if sparse.isspmatrix_csr(A) or sparse.isspmatrix_csc(A):
# avoid copying index and ptr arrays
abs_A = A.__class__((np.abs(A.data), A.indices, A.indptr),
shape=A.shape)
return (abs_A * np.ones((A.shape[1]), dtype=A.dtype)).max()
elif sparse.isspmatrix(A):
return (abs(A) * np.ones((A.shape[1]), dtype=A.dtype)).max()
else:
return np.dot(np.abs(A), np.ones((A.shape[1],),
dtype=A.dtype)).max() | def function[infinity_norm, parameter[A]]:
constant[Infinity norm of a matrix (maximum absolute row sum).
Parameters
----------
A : csr_matrix, csc_matrix, sparse, or numpy matrix
Sparse or dense matrix
Returns
-------
n : float
Infinity norm of the matrix
Notes
-----
- This serves as an upper bound on spectral radius.
- csr and csc avoid a deep copy
- dense calls scipy.linalg.norm
See Also
--------
scipy.linalg.norm : dense matrix norms
Examples
--------
>>> import numpy as np
>>> from scipy.sparse import spdiags
>>> from pyamg.util.linalg import infinity_norm
>>> n=10
>>> e = np.ones((n,1)).ravel()
>>> data = [ -1*e, 2*e, -1*e ]
>>> A = spdiags(data,[-1,0,1],n,n)
>>> print infinity_norm(A)
4.0
]
if <ast.BoolOp object at 0x7da204623940> begin[:]
variable[abs_A] assign[=] call[name[A].__class__, parameter[tuple[[<ast.Call object at 0x7da204622aa0>, <ast.Attribute object at 0x7da204621ae0>, <ast.Attribute object at 0x7da204623f70>]]]]
return[call[binary_operation[name[abs_A] * call[name[np].ones, parameter[call[name[A].shape][constant[1]]]]].max, parameter[]]] | keyword[def] identifier[infinity_norm] ( identifier[A] ):
literal[string]
keyword[if] identifier[sparse] . identifier[isspmatrix_csr] ( identifier[A] ) keyword[or] identifier[sparse] . identifier[isspmatrix_csc] ( identifier[A] ):
identifier[abs_A] = identifier[A] . identifier[__class__] (( identifier[np] . identifier[abs] ( identifier[A] . identifier[data] ), identifier[A] . identifier[indices] , identifier[A] . identifier[indptr] ),
identifier[shape] = identifier[A] . identifier[shape] )
keyword[return] ( identifier[abs_A] * identifier[np] . identifier[ones] (( identifier[A] . identifier[shape] [ literal[int] ]), identifier[dtype] = identifier[A] . identifier[dtype] )). identifier[max] ()
keyword[elif] identifier[sparse] . identifier[isspmatrix] ( identifier[A] ):
keyword[return] ( identifier[abs] ( identifier[A] )* identifier[np] . identifier[ones] (( identifier[A] . identifier[shape] [ literal[int] ]), identifier[dtype] = identifier[A] . identifier[dtype] )). identifier[max] ()
keyword[else] :
keyword[return] identifier[np] . identifier[dot] ( identifier[np] . identifier[abs] ( identifier[A] ), identifier[np] . identifier[ones] (( identifier[A] . identifier[shape] [ literal[int] ],),
identifier[dtype] = identifier[A] . identifier[dtype] )). identifier[max] () | def infinity_norm(A):
"""Infinity norm of a matrix (maximum absolute row sum).
Parameters
----------
A : csr_matrix, csc_matrix, sparse, or numpy matrix
Sparse or dense matrix
Returns
-------
n : float
Infinity norm of the matrix
Notes
-----
- This serves as an upper bound on spectral radius.
- csr and csc avoid a deep copy
- dense calls scipy.linalg.norm
See Also
--------
scipy.linalg.norm : dense matrix norms
Examples
--------
>>> import numpy as np
>>> from scipy.sparse import spdiags
>>> from pyamg.util.linalg import infinity_norm
>>> n=10
>>> e = np.ones((n,1)).ravel()
>>> data = [ -1*e, 2*e, -1*e ]
>>> A = spdiags(data,[-1,0,1],n,n)
>>> print infinity_norm(A)
4.0
"""
if sparse.isspmatrix_csr(A) or sparse.isspmatrix_csc(A):
# avoid copying index and ptr arrays
abs_A = A.__class__((np.abs(A.data), A.indices, A.indptr), shape=A.shape)
return (abs_A * np.ones(A.shape[1], dtype=A.dtype)).max() # depends on [control=['if'], data=[]]
elif sparse.isspmatrix(A):
return (abs(A) * np.ones(A.shape[1], dtype=A.dtype)).max() # depends on [control=['if'], data=[]]
else:
return np.dot(np.abs(A), np.ones((A.shape[1],), dtype=A.dtype)).max() |
def show_multi_exposure(self):
"""Show InaSAFE Multi Exposure."""
from safe.gui.tools.multi_exposure_dialog import MultiExposureDialog
dialog = MultiExposureDialog(
self.iface.mainWindow(), self.iface)
dialog.exec_() | def function[show_multi_exposure, parameter[self]]:
constant[Show InaSAFE Multi Exposure.]
from relative_module[safe.gui.tools.multi_exposure_dialog] import module[MultiExposureDialog]
variable[dialog] assign[=] call[name[MultiExposureDialog], parameter[call[name[self].iface.mainWindow, parameter[]], name[self].iface]]
call[name[dialog].exec_, parameter[]] | keyword[def] identifier[show_multi_exposure] ( identifier[self] ):
literal[string]
keyword[from] identifier[safe] . identifier[gui] . identifier[tools] . identifier[multi_exposure_dialog] keyword[import] identifier[MultiExposureDialog]
identifier[dialog] = identifier[MultiExposureDialog] (
identifier[self] . identifier[iface] . identifier[mainWindow] (), identifier[self] . identifier[iface] )
identifier[dialog] . identifier[exec_] () | def show_multi_exposure(self):
"""Show InaSAFE Multi Exposure."""
from safe.gui.tools.multi_exposure_dialog import MultiExposureDialog
dialog = MultiExposureDialog(self.iface.mainWindow(), self.iface)
dialog.exec_() |
def turn_left(self, angle_degrees, rate=RATE):
"""
Turn to the left, staying on the spot
:param angle_degrees: How far to turn (degrees)
:param rate: The trurning speed (degrees/second)
:return:
"""
flight_time = angle_degrees / rate
self.start_turn_left(rate)
time.sleep(flight_time)
self.stop() | def function[turn_left, parameter[self, angle_degrees, rate]]:
constant[
Turn to the left, staying on the spot
:param angle_degrees: How far to turn (degrees)
:param rate: The trurning speed (degrees/second)
:return:
]
variable[flight_time] assign[=] binary_operation[name[angle_degrees] / name[rate]]
call[name[self].start_turn_left, parameter[name[rate]]]
call[name[time].sleep, parameter[name[flight_time]]]
call[name[self].stop, parameter[]] | keyword[def] identifier[turn_left] ( identifier[self] , identifier[angle_degrees] , identifier[rate] = identifier[RATE] ):
literal[string]
identifier[flight_time] = identifier[angle_degrees] / identifier[rate]
identifier[self] . identifier[start_turn_left] ( identifier[rate] )
identifier[time] . identifier[sleep] ( identifier[flight_time] )
identifier[self] . identifier[stop] () | def turn_left(self, angle_degrees, rate=RATE):
"""
Turn to the left, staying on the spot
:param angle_degrees: How far to turn (degrees)
:param rate: The trurning speed (degrees/second)
:return:
"""
flight_time = angle_degrees / rate
self.start_turn_left(rate)
time.sleep(flight_time)
self.stop() |
def unpacktar(tarfile, destdir):
""" Unpack given tarball into the specified dir """
nullfd = open(os.devnull, "w")
tarfile = cygpath(os.path.abspath(tarfile))
log.debug("unpack tar %s into %s", tarfile, destdir)
try:
check_call([TAR, '-xzf', tarfile], cwd=destdir,
stdout=nullfd, preexec_fn=_noumask)
except Exception:
log.exception("Error unpacking tar file %s to %s", tarfile, destdir)
raise
nullfd.close() | def function[unpacktar, parameter[tarfile, destdir]]:
constant[ Unpack given tarball into the specified dir ]
variable[nullfd] assign[=] call[name[open], parameter[name[os].devnull, constant[w]]]
variable[tarfile] assign[=] call[name[cygpath], parameter[call[name[os].path.abspath, parameter[name[tarfile]]]]]
call[name[log].debug, parameter[constant[unpack tar %s into %s], name[tarfile], name[destdir]]]
<ast.Try object at 0x7da1b0b811e0>
call[name[nullfd].close, parameter[]] | keyword[def] identifier[unpacktar] ( identifier[tarfile] , identifier[destdir] ):
literal[string]
identifier[nullfd] = identifier[open] ( identifier[os] . identifier[devnull] , literal[string] )
identifier[tarfile] = identifier[cygpath] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[tarfile] ))
identifier[log] . identifier[debug] ( literal[string] , identifier[tarfile] , identifier[destdir] )
keyword[try] :
identifier[check_call] ([ identifier[TAR] , literal[string] , identifier[tarfile] ], identifier[cwd] = identifier[destdir] ,
identifier[stdout] = identifier[nullfd] , identifier[preexec_fn] = identifier[_noumask] )
keyword[except] identifier[Exception] :
identifier[log] . identifier[exception] ( literal[string] , identifier[tarfile] , identifier[destdir] )
keyword[raise]
identifier[nullfd] . identifier[close] () | def unpacktar(tarfile, destdir):
""" Unpack given tarball into the specified dir """
nullfd = open(os.devnull, 'w')
tarfile = cygpath(os.path.abspath(tarfile))
log.debug('unpack tar %s into %s', tarfile, destdir)
try:
check_call([TAR, '-xzf', tarfile], cwd=destdir, stdout=nullfd, preexec_fn=_noumask) # depends on [control=['try'], data=[]]
except Exception:
log.exception('Error unpacking tar file %s to %s', tarfile, destdir)
raise # depends on [control=['except'], data=[]]
nullfd.close() |
def _collapse_device(self, node, flat):
"""Collapse device hierarchy into a flat folder."""
items = [item
for branch in node.branches
for item in self._collapse_device(branch, flat)
if item]
show_all = not flat or self._quickmenu_actions == 'all'
methods = node.methods if show_all else [
method
for method in node.methods
if method.method in self._quickmenu_actions
]
if flat:
items.extend(methods)
else:
items.append(MenuSection(None, methods))
return items | def function[_collapse_device, parameter[self, node, flat]]:
constant[Collapse device hierarchy into a flat folder.]
variable[items] assign[=] <ast.ListComp object at 0x7da18c4cdab0>
variable[show_all] assign[=] <ast.BoolOp object at 0x7da207f000d0>
variable[methods] assign[=] <ast.IfExp object at 0x7da207f01db0>
if name[flat] begin[:]
call[name[items].extend, parameter[name[methods]]]
return[name[items]] | keyword[def] identifier[_collapse_device] ( identifier[self] , identifier[node] , identifier[flat] ):
literal[string]
identifier[items] =[ identifier[item]
keyword[for] identifier[branch] keyword[in] identifier[node] . identifier[branches]
keyword[for] identifier[item] keyword[in] identifier[self] . identifier[_collapse_device] ( identifier[branch] , identifier[flat] )
keyword[if] identifier[item] ]
identifier[show_all] = keyword[not] identifier[flat] keyword[or] identifier[self] . identifier[_quickmenu_actions] == literal[string]
identifier[methods] = identifier[node] . identifier[methods] keyword[if] identifier[show_all] keyword[else] [
identifier[method]
keyword[for] identifier[method] keyword[in] identifier[node] . identifier[methods]
keyword[if] identifier[method] . identifier[method] keyword[in] identifier[self] . identifier[_quickmenu_actions]
]
keyword[if] identifier[flat] :
identifier[items] . identifier[extend] ( identifier[methods] )
keyword[else] :
identifier[items] . identifier[append] ( identifier[MenuSection] ( keyword[None] , identifier[methods] ))
keyword[return] identifier[items] | def _collapse_device(self, node, flat):
"""Collapse device hierarchy into a flat folder."""
items = [item for branch in node.branches for item in self._collapse_device(branch, flat) if item]
show_all = not flat or self._quickmenu_actions == 'all'
methods = node.methods if show_all else [method for method in node.methods if method.method in self._quickmenu_actions]
if flat:
items.extend(methods) # depends on [control=['if'], data=[]]
else:
items.append(MenuSection(None, methods))
return items |
def train_episode(agent, envs, preprocessors, t_max, render):
"""Complete an episode's worth of training for each environment."""
num_envs = len(envs)
# Buffers to hold trajectories, e.g. `env_xs[i]` will hold the observations
# for environment `i`.
env_xs, env_as = _2d_list(num_envs), _2d_list(num_envs)
env_rs, env_vs = _2d_list(num_envs), _2d_list(num_envs)
episode_rs = np.zeros(num_envs, dtype=np.float)
for p in preprocessors:
p.reset()
observations = [p.preprocess(e.reset())
for p, e in zip(preprocessors, envs)]
done = np.array([False for _ in range(num_envs)])
all_done = False
t = 1
while not all_done:
if render:
envs[0].render()
# NOTE(reed): Reshape to set the data shape.
agent.model.reshape([('data', (num_envs, preprocessors[0].obs_size))])
step_xs = np.vstack([o.ravel() for o in observations])
# Get actions and values for all environments in a single forward pass.
step_xs_nd = mx.nd.array(step_xs, ctx=agent.ctx)
data_batch = mx.io.DataBatch(data=[step_xs_nd], label=None)
agent.model.forward(data_batch, is_train=False)
_, step_vs, _, step_ps = agent.model.get_outputs()
step_ps = step_ps.asnumpy()
step_vs = step_vs.asnumpy()
step_as = agent.act(step_ps)
# Step each environment whose episode has not completed.
for i, env in enumerate(envs):
if not done[i]:
obs, r, done[i], _ = env.step(step_as[i])
# Record the observation, action, value, and reward in the
# buffers.
env_xs[i].append(step_xs[i].ravel())
env_as[i].append(step_as[i])
env_vs[i].append(step_vs[i][0])
env_rs[i].append(r)
episode_rs[i] += r
# Add 0 as the state value when done.
if done[i]:
env_vs[i].append(0.0)
else:
observations[i] = preprocessors[i].preprocess(obs)
# Perform an update every `t_max` steps.
if t == t_max:
# If the episode has not finished, add current state's value. This
# will be used to 'bootstrap' the final return (see Algorithm S3
# in A3C paper).
step_xs = np.vstack([o.ravel() for o in observations])
step_xs_nd = mx.nd.array(step_xs, ctx=agent.ctx)
data_batch = mx.io.DataBatch(data=[step_xs_nd], label=None)
agent.model.forward(data_batch, is_train=False)
_, extra_vs, _, _ = agent.model.get_outputs()
extra_vs = extra_vs.asnumpy()
for i in range(num_envs):
if not done[i]:
env_vs[i].append(extra_vs[i][0])
# Perform update and clear buffers.
env_xs = np.vstack(list(chain.from_iterable(env_xs)))
agent.train_step(env_xs, env_as, env_rs, env_vs)
env_xs, env_as = _2d_list(num_envs), _2d_list(num_envs)
env_rs, env_vs = _2d_list(num_envs), _2d_list(num_envs)
t = 0
all_done = np.all(done)
t += 1
return episode_rs | def function[train_episode, parameter[agent, envs, preprocessors, t_max, render]]:
constant[Complete an episode's worth of training for each environment.]
variable[num_envs] assign[=] call[name[len], parameter[name[envs]]]
<ast.Tuple object at 0x7da20c76cd90> assign[=] tuple[[<ast.Call object at 0x7da20c76c9a0>, <ast.Call object at 0x7da20c76c4c0>]]
<ast.Tuple object at 0x7da20c76d5d0> assign[=] tuple[[<ast.Call object at 0x7da20c76ebf0>, <ast.Call object at 0x7da20c76e050>]]
variable[episode_rs] assign[=] call[name[np].zeros, parameter[name[num_envs]]]
for taget[name[p]] in starred[name[preprocessors]] begin[:]
call[name[p].reset, parameter[]]
variable[observations] assign[=] <ast.ListComp object at 0x7da1b1f96740>
variable[done] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da1b1f968c0>]]
variable[all_done] assign[=] constant[False]
variable[t] assign[=] constant[1]
while <ast.UnaryOp object at 0x7da1b1ef1b40> begin[:]
if name[render] begin[:]
call[call[name[envs]][constant[0]].render, parameter[]]
call[name[agent].model.reshape, parameter[list[[<ast.Tuple object at 0x7da1b1ef1450>]]]]
variable[step_xs] assign[=] call[name[np].vstack, parameter[<ast.ListComp object at 0x7da1b1ef0f70>]]
variable[step_xs_nd] assign[=] call[name[mx].nd.array, parameter[name[step_xs]]]
variable[data_batch] assign[=] call[name[mx].io.DataBatch, parameter[]]
call[name[agent].model.forward, parameter[name[data_batch]]]
<ast.Tuple object at 0x7da1b1e77a00> assign[=] call[name[agent].model.get_outputs, parameter[]]
variable[step_ps] assign[=] call[name[step_ps].asnumpy, parameter[]]
variable[step_vs] assign[=] call[name[step_vs].asnumpy, parameter[]]
variable[step_as] assign[=] call[name[agent].act, parameter[name[step_ps]]]
for taget[tuple[[<ast.Name object at 0x7da1b1e774f0>, <ast.Name object at 0x7da1b1e774c0>]]] in starred[call[name[enumerate], parameter[name[envs]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b1e773d0> begin[:]
<ast.Tuple object at 0x7da1b1e772e0> assign[=] call[name[env].step, parameter[call[name[step_as]][name[i]]]]
call[call[name[env_xs]][name[i]].append, parameter[call[call[name[step_xs]][name[i]].ravel, parameter[]]]]
call[call[name[env_as]][name[i]].append, parameter[call[name[step_as]][name[i]]]]
call[call[name[env_vs]][name[i]].append, parameter[call[call[name[step_vs]][name[i]]][constant[0]]]]
call[call[name[env_rs]][name[i]].append, parameter[name[r]]]
<ast.AugAssign object at 0x7da1b1e76890>
if call[name[done]][name[i]] begin[:]
call[call[name[env_vs]][name[i]].append, parameter[constant[0.0]]]
if compare[name[t] equal[==] name[t_max]] begin[:]
variable[step_xs] assign[=] call[name[np].vstack, parameter[<ast.ListComp object at 0x7da1b1e761d0>]]
variable[step_xs_nd] assign[=] call[name[mx].nd.array, parameter[name[step_xs]]]
variable[data_batch] assign[=] call[name[mx].io.DataBatch, parameter[]]
call[name[agent].model.forward, parameter[name[data_batch]]]
<ast.Tuple object at 0x7da1b1e75a80> assign[=] call[name[agent].model.get_outputs, parameter[]]
variable[extra_vs] assign[=] call[name[extra_vs].asnumpy, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[name[num_envs]]]] begin[:]
if <ast.UnaryOp object at 0x7da1b1e756c0> begin[:]
call[call[name[env_vs]][name[i]].append, parameter[call[call[name[extra_vs]][name[i]]][constant[0]]]]
variable[env_xs] assign[=] call[name[np].vstack, parameter[call[name[list], parameter[call[name[chain].from_iterable, parameter[name[env_xs]]]]]]]
call[name[agent].train_step, parameter[name[env_xs], name[env_as], name[env_rs], name[env_vs]]]
<ast.Tuple object at 0x7da1b1e74fd0> assign[=] tuple[[<ast.Call object at 0x7da1b1e74f10>, <ast.Call object at 0x7da1b1e74e80>]]
<ast.Tuple object at 0x7da1b1e74dc0> assign[=] tuple[[<ast.Call object at 0x7da2054a6c80>, <ast.Call object at 0x7da2054a54e0>]]
variable[t] assign[=] constant[0]
variable[all_done] assign[=] call[name[np].all, parameter[name[done]]]
<ast.AugAssign object at 0x7da2054a52a0>
return[name[episode_rs]] | keyword[def] identifier[train_episode] ( identifier[agent] , identifier[envs] , identifier[preprocessors] , identifier[t_max] , identifier[render] ):
literal[string]
identifier[num_envs] = identifier[len] ( identifier[envs] )
identifier[env_xs] , identifier[env_as] = identifier[_2d_list] ( identifier[num_envs] ), identifier[_2d_list] ( identifier[num_envs] )
identifier[env_rs] , identifier[env_vs] = identifier[_2d_list] ( identifier[num_envs] ), identifier[_2d_list] ( identifier[num_envs] )
identifier[episode_rs] = identifier[np] . identifier[zeros] ( identifier[num_envs] , identifier[dtype] = identifier[np] . identifier[float] )
keyword[for] identifier[p] keyword[in] identifier[preprocessors] :
identifier[p] . identifier[reset] ()
identifier[observations] =[ identifier[p] . identifier[preprocess] ( identifier[e] . identifier[reset] ())
keyword[for] identifier[p] , identifier[e] keyword[in] identifier[zip] ( identifier[preprocessors] , identifier[envs] )]
identifier[done] = identifier[np] . identifier[array] ([ keyword[False] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[num_envs] )])
identifier[all_done] = keyword[False]
identifier[t] = literal[int]
keyword[while] keyword[not] identifier[all_done] :
keyword[if] identifier[render] :
identifier[envs] [ literal[int] ]. identifier[render] ()
identifier[agent] . identifier[model] . identifier[reshape] ([( literal[string] ,( identifier[num_envs] , identifier[preprocessors] [ literal[int] ]. identifier[obs_size] ))])
identifier[step_xs] = identifier[np] . identifier[vstack] ([ identifier[o] . identifier[ravel] () keyword[for] identifier[o] keyword[in] identifier[observations] ])
identifier[step_xs_nd] = identifier[mx] . identifier[nd] . identifier[array] ( identifier[step_xs] , identifier[ctx] = identifier[agent] . identifier[ctx] )
identifier[data_batch] = identifier[mx] . identifier[io] . identifier[DataBatch] ( identifier[data] =[ identifier[step_xs_nd] ], identifier[label] = keyword[None] )
identifier[agent] . identifier[model] . identifier[forward] ( identifier[data_batch] , identifier[is_train] = keyword[False] )
identifier[_] , identifier[step_vs] , identifier[_] , identifier[step_ps] = identifier[agent] . identifier[model] . identifier[get_outputs] ()
identifier[step_ps] = identifier[step_ps] . identifier[asnumpy] ()
identifier[step_vs] = identifier[step_vs] . identifier[asnumpy] ()
identifier[step_as] = identifier[agent] . identifier[act] ( identifier[step_ps] )
keyword[for] identifier[i] , identifier[env] keyword[in] identifier[enumerate] ( identifier[envs] ):
keyword[if] keyword[not] identifier[done] [ identifier[i] ]:
identifier[obs] , identifier[r] , identifier[done] [ identifier[i] ], identifier[_] = identifier[env] . identifier[step] ( identifier[step_as] [ identifier[i] ])
identifier[env_xs] [ identifier[i] ]. identifier[append] ( identifier[step_xs] [ identifier[i] ]. identifier[ravel] ())
identifier[env_as] [ identifier[i] ]. identifier[append] ( identifier[step_as] [ identifier[i] ])
identifier[env_vs] [ identifier[i] ]. identifier[append] ( identifier[step_vs] [ identifier[i] ][ literal[int] ])
identifier[env_rs] [ identifier[i] ]. identifier[append] ( identifier[r] )
identifier[episode_rs] [ identifier[i] ]+= identifier[r]
keyword[if] identifier[done] [ identifier[i] ]:
identifier[env_vs] [ identifier[i] ]. identifier[append] ( literal[int] )
keyword[else] :
identifier[observations] [ identifier[i] ]= identifier[preprocessors] [ identifier[i] ]. identifier[preprocess] ( identifier[obs] )
keyword[if] identifier[t] == identifier[t_max] :
identifier[step_xs] = identifier[np] . identifier[vstack] ([ identifier[o] . identifier[ravel] () keyword[for] identifier[o] keyword[in] identifier[observations] ])
identifier[step_xs_nd] = identifier[mx] . identifier[nd] . identifier[array] ( identifier[step_xs] , identifier[ctx] = identifier[agent] . identifier[ctx] )
identifier[data_batch] = identifier[mx] . identifier[io] . identifier[DataBatch] ( identifier[data] =[ identifier[step_xs_nd] ], identifier[label] = keyword[None] )
identifier[agent] . identifier[model] . identifier[forward] ( identifier[data_batch] , identifier[is_train] = keyword[False] )
identifier[_] , identifier[extra_vs] , identifier[_] , identifier[_] = identifier[agent] . identifier[model] . identifier[get_outputs] ()
identifier[extra_vs] = identifier[extra_vs] . identifier[asnumpy] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_envs] ):
keyword[if] keyword[not] identifier[done] [ identifier[i] ]:
identifier[env_vs] [ identifier[i] ]. identifier[append] ( identifier[extra_vs] [ identifier[i] ][ literal[int] ])
identifier[env_xs] = identifier[np] . identifier[vstack] ( identifier[list] ( identifier[chain] . identifier[from_iterable] ( identifier[env_xs] )))
identifier[agent] . identifier[train_step] ( identifier[env_xs] , identifier[env_as] , identifier[env_rs] , identifier[env_vs] )
identifier[env_xs] , identifier[env_as] = identifier[_2d_list] ( identifier[num_envs] ), identifier[_2d_list] ( identifier[num_envs] )
identifier[env_rs] , identifier[env_vs] = identifier[_2d_list] ( identifier[num_envs] ), identifier[_2d_list] ( identifier[num_envs] )
identifier[t] = literal[int]
identifier[all_done] = identifier[np] . identifier[all] ( identifier[done] )
identifier[t] += literal[int]
keyword[return] identifier[episode_rs] | def train_episode(agent, envs, preprocessors, t_max, render):
"""Complete an episode's worth of training for each environment."""
num_envs = len(envs)
# Buffers to hold trajectories, e.g. `env_xs[i]` will hold the observations
# for environment `i`.
(env_xs, env_as) = (_2d_list(num_envs), _2d_list(num_envs))
(env_rs, env_vs) = (_2d_list(num_envs), _2d_list(num_envs))
episode_rs = np.zeros(num_envs, dtype=np.float)
for p in preprocessors:
p.reset() # depends on [control=['for'], data=['p']]
observations = [p.preprocess(e.reset()) for (p, e) in zip(preprocessors, envs)]
done = np.array([False for _ in range(num_envs)])
all_done = False
t = 1
while not all_done:
if render:
envs[0].render() # depends on [control=['if'], data=[]]
# NOTE(reed): Reshape to set the data shape.
agent.model.reshape([('data', (num_envs, preprocessors[0].obs_size))])
step_xs = np.vstack([o.ravel() for o in observations])
# Get actions and values for all environments in a single forward pass.
step_xs_nd = mx.nd.array(step_xs, ctx=agent.ctx)
data_batch = mx.io.DataBatch(data=[step_xs_nd], label=None)
agent.model.forward(data_batch, is_train=False)
(_, step_vs, _, step_ps) = agent.model.get_outputs()
step_ps = step_ps.asnumpy()
step_vs = step_vs.asnumpy()
step_as = agent.act(step_ps)
# Step each environment whose episode has not completed.
for (i, env) in enumerate(envs):
if not done[i]:
(obs, r, done[i], _) = env.step(step_as[i])
# Record the observation, action, value, and reward in the
# buffers.
env_xs[i].append(step_xs[i].ravel())
env_as[i].append(step_as[i])
env_vs[i].append(step_vs[i][0])
env_rs[i].append(r)
episode_rs[i] += r
# Add 0 as the state value when done.
if done[i]:
env_vs[i].append(0.0) # depends on [control=['if'], data=[]]
else:
observations[i] = preprocessors[i].preprocess(obs) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# Perform an update every `t_max` steps.
if t == t_max:
# If the episode has not finished, add current state's value. This
# will be used to 'bootstrap' the final return (see Algorithm S3
# in A3C paper).
step_xs = np.vstack([o.ravel() for o in observations])
step_xs_nd = mx.nd.array(step_xs, ctx=agent.ctx)
data_batch = mx.io.DataBatch(data=[step_xs_nd], label=None)
agent.model.forward(data_batch, is_train=False)
(_, extra_vs, _, _) = agent.model.get_outputs()
extra_vs = extra_vs.asnumpy()
for i in range(num_envs):
if not done[i]:
env_vs[i].append(extra_vs[i][0]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
# Perform update and clear buffers.
env_xs = np.vstack(list(chain.from_iterable(env_xs)))
agent.train_step(env_xs, env_as, env_rs, env_vs)
(env_xs, env_as) = (_2d_list(num_envs), _2d_list(num_envs))
(env_rs, env_vs) = (_2d_list(num_envs), _2d_list(num_envs))
t = 0 # depends on [control=['if'], data=['t']]
all_done = np.all(done)
t += 1 # depends on [control=['while'], data=[]]
return episode_rs |
def as_dict(self, replace_value_names=True):
"""Return the whole section as a dict"""
old_children = self.children
self.children = self.terms
d = super(SectionTerm, self).as_dict(replace_value_names)
self.children = old_children
return d | def function[as_dict, parameter[self, replace_value_names]]:
constant[Return the whole section as a dict]
variable[old_children] assign[=] name[self].children
name[self].children assign[=] name[self].terms
variable[d] assign[=] call[call[name[super], parameter[name[SectionTerm], name[self]]].as_dict, parameter[name[replace_value_names]]]
name[self].children assign[=] name[old_children]
return[name[d]] | keyword[def] identifier[as_dict] ( identifier[self] , identifier[replace_value_names] = keyword[True] ):
literal[string]
identifier[old_children] = identifier[self] . identifier[children]
identifier[self] . identifier[children] = identifier[self] . identifier[terms]
identifier[d] = identifier[super] ( identifier[SectionTerm] , identifier[self] ). identifier[as_dict] ( identifier[replace_value_names] )
identifier[self] . identifier[children] = identifier[old_children]
keyword[return] identifier[d] | def as_dict(self, replace_value_names=True):
"""Return the whole section as a dict"""
old_children = self.children
self.children = self.terms
d = super(SectionTerm, self).as_dict(replace_value_names)
self.children = old_children
return d |
def delete_colormap(self, name=None):
"""
Deletes the colormap with the specified name. None means use the internal
name (see get_name())
"""
if name == None: name = self.get_name()
if name == "" or not type(name)==str: return "Error: invalid name."
# assemble the path to the colormap
path = _os.path.join(_settings.path_home, 'colormaps', name+".cmap")
_os.unlink(path)
return self | def function[delete_colormap, parameter[self, name]]:
constant[
Deletes the colormap with the specified name. None means use the internal
name (see get_name())
]
if compare[name[name] equal[==] constant[None]] begin[:]
variable[name] assign[=] call[name[self].get_name, parameter[]]
if <ast.BoolOp object at 0x7da18dc98d90> begin[:]
return[constant[Error: invalid name.]]
variable[path] assign[=] call[name[_os].path.join, parameter[name[_settings].path_home, constant[colormaps], binary_operation[name[name] + constant[.cmap]]]]
call[name[_os].unlink, parameter[name[path]]]
return[name[self]] | keyword[def] identifier[delete_colormap] ( identifier[self] , identifier[name] = keyword[None] ):
literal[string]
keyword[if] identifier[name] == keyword[None] : identifier[name] = identifier[self] . identifier[get_name] ()
keyword[if] identifier[name] == literal[string] keyword[or] keyword[not] identifier[type] ( identifier[name] )== identifier[str] : keyword[return] literal[string]
identifier[path] = identifier[_os] . identifier[path] . identifier[join] ( identifier[_settings] . identifier[path_home] , literal[string] , identifier[name] + literal[string] )
identifier[_os] . identifier[unlink] ( identifier[path] )
keyword[return] identifier[self] | def delete_colormap(self, name=None):
"""
Deletes the colormap with the specified name. None means use the internal
name (see get_name())
"""
if name == None:
name = self.get_name() # depends on [control=['if'], data=['name']]
if name == '' or not type(name) == str:
return 'Error: invalid name.' # depends on [control=['if'], data=[]]
# assemble the path to the colormap
path = _os.path.join(_settings.path_home, 'colormaps', name + '.cmap')
_os.unlink(path)
return self |
def looks_like_python(name):
# type: (str) -> bool
"""
Determine whether the supplied filename looks like a possible name of python.
:param str name: The name of the provided file.
:return: Whether the provided name looks like python.
:rtype: bool
"""
if not any(name.lower().startswith(py_name) for py_name in PYTHON_IMPLEMENTATIONS):
return False
match = RE_MATCHER.match(name)
if match:
return any(fnmatch(name, rule) for rule in MATCH_RULES)
return False | def function[looks_like_python, parameter[name]]:
constant[
Determine whether the supplied filename looks like a possible name of python.
:param str name: The name of the provided file.
:return: Whether the provided name looks like python.
:rtype: bool
]
if <ast.UnaryOp object at 0x7da2054a47c0> begin[:]
return[constant[False]]
variable[match] assign[=] call[name[RE_MATCHER].match, parameter[name[name]]]
if name[match] begin[:]
return[call[name[any], parameter[<ast.GeneratorExp object at 0x7da18bcc92a0>]]]
return[constant[False]] | keyword[def] identifier[looks_like_python] ( identifier[name] ):
literal[string]
keyword[if] keyword[not] identifier[any] ( identifier[name] . identifier[lower] (). identifier[startswith] ( identifier[py_name] ) keyword[for] identifier[py_name] keyword[in] identifier[PYTHON_IMPLEMENTATIONS] ):
keyword[return] keyword[False]
identifier[match] = identifier[RE_MATCHER] . identifier[match] ( identifier[name] )
keyword[if] identifier[match] :
keyword[return] identifier[any] ( identifier[fnmatch] ( identifier[name] , identifier[rule] ) keyword[for] identifier[rule] keyword[in] identifier[MATCH_RULES] )
keyword[return] keyword[False] | def looks_like_python(name):
# type: (str) -> bool
'\n Determine whether the supplied filename looks like a possible name of python.\n\n :param str name: The name of the provided file.\n :return: Whether the provided name looks like python.\n :rtype: bool\n '
if not any((name.lower().startswith(py_name) for py_name in PYTHON_IMPLEMENTATIONS)):
return False # depends on [control=['if'], data=[]]
match = RE_MATCHER.match(name)
if match:
return any((fnmatch(name, rule) for rule in MATCH_RULES)) # depends on [control=['if'], data=[]]
return False |
def parameterize(string, separator='-'):
"""
Replace special characters in a string so that it may be used as part of a
'pretty' URL.
Example::
>>> parameterize(u"Donald E. Knuth")
'donald-e-knuth'
"""
string = transliterate(string)
# Turn unwanted chars into the separator
string = re.sub(r"(?i)[^a-z0-9\-_]+", separator, string)
if separator:
re_sep = re.escape(separator)
# No more than one of the separator in a row.
string = re.sub(r'%s{2,}' % re_sep, separator, string)
# Remove leading/trailing separator.
string = re.sub(r"(?i)^{sep}|{sep}$".format(sep=re_sep), '', string)
return string.lower() | def function[parameterize, parameter[string, separator]]:
constant[
Replace special characters in a string so that it may be used as part of a
'pretty' URL.
Example::
>>> parameterize(u"Donald E. Knuth")
'donald-e-knuth'
]
variable[string] assign[=] call[name[transliterate], parameter[name[string]]]
variable[string] assign[=] call[name[re].sub, parameter[constant[(?i)[^a-z0-9\-_]+], name[separator], name[string]]]
if name[separator] begin[:]
variable[re_sep] assign[=] call[name[re].escape, parameter[name[separator]]]
variable[string] assign[=] call[name[re].sub, parameter[binary_operation[constant[%s{2,}] <ast.Mod object at 0x7da2590d6920> name[re_sep]], name[separator], name[string]]]
variable[string] assign[=] call[name[re].sub, parameter[call[constant[(?i)^{sep}|{sep}$].format, parameter[]], constant[], name[string]]]
return[call[name[string].lower, parameter[]]] | keyword[def] identifier[parameterize] ( identifier[string] , identifier[separator] = literal[string] ):
literal[string]
identifier[string] = identifier[transliterate] ( identifier[string] )
identifier[string] = identifier[re] . identifier[sub] ( literal[string] , identifier[separator] , identifier[string] )
keyword[if] identifier[separator] :
identifier[re_sep] = identifier[re] . identifier[escape] ( identifier[separator] )
identifier[string] = identifier[re] . identifier[sub] ( literal[string] % identifier[re_sep] , identifier[separator] , identifier[string] )
identifier[string] = identifier[re] . identifier[sub] ( literal[string] . identifier[format] ( identifier[sep] = identifier[re_sep] ), literal[string] , identifier[string] )
keyword[return] identifier[string] . identifier[lower] () | def parameterize(string, separator='-'):
"""
Replace special characters in a string so that it may be used as part of a
'pretty' URL.
Example::
>>> parameterize(u"Donald E. Knuth")
'donald-e-knuth'
"""
string = transliterate(string)
# Turn unwanted chars into the separator
string = re.sub('(?i)[^a-z0-9\\-_]+', separator, string)
if separator:
re_sep = re.escape(separator)
# No more than one of the separator in a row.
string = re.sub('%s{2,}' % re_sep, separator, string)
# Remove leading/trailing separator.
string = re.sub('(?i)^{sep}|{sep}$'.format(sep=re_sep), '', string) # depends on [control=['if'], data=[]]
return string.lower() |
def pitch(self, n_semitones, quick=False):
'''Pitch shift the audio without changing the tempo.
This effect uses the WSOLA algorithm. The audio is chopped up into
segments which are then shifted in the time domain and overlapped
(cross-faded) at points where their waveforms are most similar as
determined by measurement of least squares.
Parameters
----------
n_semitones : float
The number of semitones to shift. Can be positive or negative.
quick : bool, default=False
If True, this effect will run faster but with lower sound quality.
See Also
--------
bend, speed, tempo
'''
if not is_number(n_semitones):
raise ValueError("n_semitones must be a positive number")
if n_semitones < -12 or n_semitones > 12:
logger.warning(
"Using an extreme pitch shift. "
"Quality of results will be poor"
)
if not isinstance(quick, bool):
raise ValueError("quick must be a boolean.")
effect_args = ['pitch']
if quick:
effect_args.append('-q')
effect_args.append('{:f}'.format(n_semitones * 100.))
self.effects.extend(effect_args)
self.effects_log.append('pitch')
return self | def function[pitch, parameter[self, n_semitones, quick]]:
constant[Pitch shift the audio without changing the tempo.
This effect uses the WSOLA algorithm. The audio is chopped up into
segments which are then shifted in the time domain and overlapped
(cross-faded) at points where their waveforms are most similar as
determined by measurement of least squares.
Parameters
----------
n_semitones : float
The number of semitones to shift. Can be positive or negative.
quick : bool, default=False
If True, this effect will run faster but with lower sound quality.
See Also
--------
bend, speed, tempo
]
if <ast.UnaryOp object at 0x7da1b013cd30> begin[:]
<ast.Raise object at 0x7da1b013d450>
if <ast.BoolOp object at 0x7da1b013cf70> begin[:]
call[name[logger].warning, parameter[constant[Using an extreme pitch shift. Quality of results will be poor]]]
if <ast.UnaryOp object at 0x7da1b013e740> begin[:]
<ast.Raise object at 0x7da1b013ea40>
variable[effect_args] assign[=] list[[<ast.Constant object at 0x7da1b013d900>]]
if name[quick] begin[:]
call[name[effect_args].append, parameter[constant[-q]]]
call[name[effect_args].append, parameter[call[constant[{:f}].format, parameter[binary_operation[name[n_semitones] * constant[100.0]]]]]]
call[name[self].effects.extend, parameter[name[effect_args]]]
call[name[self].effects_log.append, parameter[constant[pitch]]]
return[name[self]] | keyword[def] identifier[pitch] ( identifier[self] , identifier[n_semitones] , identifier[quick] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[is_number] ( identifier[n_semitones] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[n_semitones] <- literal[int] keyword[or] identifier[n_semitones] > literal[int] :
identifier[logger] . identifier[warning] (
literal[string]
literal[string]
)
keyword[if] keyword[not] identifier[isinstance] ( identifier[quick] , identifier[bool] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[effect_args] =[ literal[string] ]
keyword[if] identifier[quick] :
identifier[effect_args] . identifier[append] ( literal[string] )
identifier[effect_args] . identifier[append] ( literal[string] . identifier[format] ( identifier[n_semitones] * literal[int] ))
identifier[self] . identifier[effects] . identifier[extend] ( identifier[effect_args] )
identifier[self] . identifier[effects_log] . identifier[append] ( literal[string] )
keyword[return] identifier[self] | def pitch(self, n_semitones, quick=False):
"""Pitch shift the audio without changing the tempo.
This effect uses the WSOLA algorithm. The audio is chopped up into
segments which are then shifted in the time domain and overlapped
(cross-faded) at points where their waveforms are most similar as
determined by measurement of least squares.
Parameters
----------
n_semitones : float
The number of semitones to shift. Can be positive or negative.
quick : bool, default=False
If True, this effect will run faster but with lower sound quality.
See Also
--------
bend, speed, tempo
"""
if not is_number(n_semitones):
raise ValueError('n_semitones must be a positive number') # depends on [control=['if'], data=[]]
if n_semitones < -12 or n_semitones > 12:
logger.warning('Using an extreme pitch shift. Quality of results will be poor') # depends on [control=['if'], data=[]]
if not isinstance(quick, bool):
raise ValueError('quick must be a boolean.') # depends on [control=['if'], data=[]]
effect_args = ['pitch']
if quick:
effect_args.append('-q') # depends on [control=['if'], data=[]]
effect_args.append('{:f}'.format(n_semitones * 100.0))
self.effects.extend(effect_args)
self.effects_log.append('pitch')
return self |
def save(self, dolist=0):
"""Return .par format string for this parameter
If dolist is set, returns fields as a list of strings. Default
is to return a single string appropriate for writing to a file.
"""
quoted = not dolist
array_size = 1
for d in self.shape:
array_size = d*array_size
ndim = len(self.shape)
fields = (7+2*ndim+len(self.value))*[""]
fields[0] = self.name
fields[1] = self.type
fields[2] = self.mode
fields[3] = str(ndim)
next = 4
for d in self.shape:
fields[next] = str(d); next += 1
fields[next] = '1'; next += 1
nvstart = 7+2*ndim
if self.choice is not None:
schoice = list(map(self.toString, self.choice))
schoice.insert(0,'')
schoice.append('')
fields[nvstart-3] = repr('|'.join(schoice))
elif self.min not in [None,INDEF]:
fields[nvstart-3] = self.toString(self.min,quoted=quoted)
# insert an escaped line break before min field
if quoted:
fields[nvstart-3] = '\\\n' + fields[nvstart-3]
if self.max not in [None,INDEF]:
fields[nvstart-2] = self.toString(self.max,quoted=quoted)
if self.prompt:
if quoted:
sprompt = repr(self.prompt)
else:
sprompt = self.prompt
# prompt can have embedded newlines (which are printed)
sprompt = sprompt.replace(r'\012', '\n')
sprompt = sprompt.replace(r'\n', '\n')
fields[nvstart-1] = sprompt
for i in range(len(self.value)):
fields[nvstart+i] = self.toString(self.value[i],quoted=quoted)
# insert an escaped line break before value fields
if dolist:
return fields
else:
fields[nvstart] = '\\\n' + fields[nvstart]
return ','.join(fields) | def function[save, parameter[self, dolist]]:
constant[Return .par format string for this parameter
If dolist is set, returns fields as a list of strings. Default
is to return a single string appropriate for writing to a file.
]
variable[quoted] assign[=] <ast.UnaryOp object at 0x7da1b0fad4e0>
variable[array_size] assign[=] constant[1]
for taget[name[d]] in starred[name[self].shape] begin[:]
variable[array_size] assign[=] binary_operation[name[d] * name[array_size]]
variable[ndim] assign[=] call[name[len], parameter[name[self].shape]]
variable[fields] assign[=] binary_operation[binary_operation[binary_operation[constant[7] + binary_operation[constant[2] * name[ndim]]] + call[name[len], parameter[name[self].value]]] * list[[<ast.Constant object at 0x7da1b0faee90>]]]
call[name[fields]][constant[0]] assign[=] name[self].name
call[name[fields]][constant[1]] assign[=] name[self].type
call[name[fields]][constant[2]] assign[=] name[self].mode
call[name[fields]][constant[3]] assign[=] call[name[str], parameter[name[ndim]]]
variable[next] assign[=] constant[4]
for taget[name[d]] in starred[name[self].shape] begin[:]
call[name[fields]][name[next]] assign[=] call[name[str], parameter[name[d]]]
<ast.AugAssign object at 0x7da1b0facf70>
call[name[fields]][name[next]] assign[=] constant[1]
<ast.AugAssign object at 0x7da1b0fae200>
variable[nvstart] assign[=] binary_operation[constant[7] + binary_operation[constant[2] * name[ndim]]]
if compare[name[self].choice is_not constant[None]] begin[:]
variable[schoice] assign[=] call[name[list], parameter[call[name[map], parameter[name[self].toString, name[self].choice]]]]
call[name[schoice].insert, parameter[constant[0], constant[]]]
call[name[schoice].append, parameter[constant[]]]
call[name[fields]][binary_operation[name[nvstart] - constant[3]]] assign[=] call[name[repr], parameter[call[constant[|].join, parameter[name[schoice]]]]]
if name[quoted] begin[:]
call[name[fields]][binary_operation[name[nvstart] - constant[3]]] assign[=] binary_operation[constant[\
] + call[name[fields]][binary_operation[name[nvstart] - constant[3]]]]
if compare[name[self].max <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b0fada20>, <ast.Name object at 0x7da1b0fae5c0>]]] begin[:]
call[name[fields]][binary_operation[name[nvstart] - constant[2]]] assign[=] call[name[self].toString, parameter[name[self].max]]
if name[self].prompt begin[:]
if name[quoted] begin[:]
variable[sprompt] assign[=] call[name[repr], parameter[name[self].prompt]]
variable[sprompt] assign[=] call[name[sprompt].replace, parameter[constant[\012], constant[
]]]
variable[sprompt] assign[=] call[name[sprompt].replace, parameter[constant[\n], constant[
]]]
call[name[fields]][binary_operation[name[nvstart] - constant[1]]] assign[=] name[sprompt]
for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[self].value]]]]] begin[:]
call[name[fields]][binary_operation[name[nvstart] + name[i]]] assign[=] call[name[self].toString, parameter[call[name[self].value][name[i]]]]
if name[dolist] begin[:]
return[name[fields]] | keyword[def] identifier[save] ( identifier[self] , identifier[dolist] = literal[int] ):
literal[string]
identifier[quoted] = keyword[not] identifier[dolist]
identifier[array_size] = literal[int]
keyword[for] identifier[d] keyword[in] identifier[self] . identifier[shape] :
identifier[array_size] = identifier[d] * identifier[array_size]
identifier[ndim] = identifier[len] ( identifier[self] . identifier[shape] )
identifier[fields] =( literal[int] + literal[int] * identifier[ndim] + identifier[len] ( identifier[self] . identifier[value] ))*[ literal[string] ]
identifier[fields] [ literal[int] ]= identifier[self] . identifier[name]
identifier[fields] [ literal[int] ]= identifier[self] . identifier[type]
identifier[fields] [ literal[int] ]= identifier[self] . identifier[mode]
identifier[fields] [ literal[int] ]= identifier[str] ( identifier[ndim] )
identifier[next] = literal[int]
keyword[for] identifier[d] keyword[in] identifier[self] . identifier[shape] :
identifier[fields] [ identifier[next] ]= identifier[str] ( identifier[d] ); identifier[next] += literal[int]
identifier[fields] [ identifier[next] ]= literal[string] ; identifier[next] += literal[int]
identifier[nvstart] = literal[int] + literal[int] * identifier[ndim]
keyword[if] identifier[self] . identifier[choice] keyword[is] keyword[not] keyword[None] :
identifier[schoice] = identifier[list] ( identifier[map] ( identifier[self] . identifier[toString] , identifier[self] . identifier[choice] ))
identifier[schoice] . identifier[insert] ( literal[int] , literal[string] )
identifier[schoice] . identifier[append] ( literal[string] )
identifier[fields] [ identifier[nvstart] - literal[int] ]= identifier[repr] ( literal[string] . identifier[join] ( identifier[schoice] ))
keyword[elif] identifier[self] . identifier[min] keyword[not] keyword[in] [ keyword[None] , identifier[INDEF] ]:
identifier[fields] [ identifier[nvstart] - literal[int] ]= identifier[self] . identifier[toString] ( identifier[self] . identifier[min] , identifier[quoted] = identifier[quoted] )
keyword[if] identifier[quoted] :
identifier[fields] [ identifier[nvstart] - literal[int] ]= literal[string] + identifier[fields] [ identifier[nvstart] - literal[int] ]
keyword[if] identifier[self] . identifier[max] keyword[not] keyword[in] [ keyword[None] , identifier[INDEF] ]:
identifier[fields] [ identifier[nvstart] - literal[int] ]= identifier[self] . identifier[toString] ( identifier[self] . identifier[max] , identifier[quoted] = identifier[quoted] )
keyword[if] identifier[self] . identifier[prompt] :
keyword[if] identifier[quoted] :
identifier[sprompt] = identifier[repr] ( identifier[self] . identifier[prompt] )
keyword[else] :
identifier[sprompt] = identifier[self] . identifier[prompt]
identifier[sprompt] = identifier[sprompt] . identifier[replace] ( literal[string] , literal[string] )
identifier[sprompt] = identifier[sprompt] . identifier[replace] ( literal[string] , literal[string] )
identifier[fields] [ identifier[nvstart] - literal[int] ]= identifier[sprompt]
keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[value] )):
identifier[fields] [ identifier[nvstart] + identifier[i] ]= identifier[self] . identifier[toString] ( identifier[self] . identifier[value] [ identifier[i] ], identifier[quoted] = identifier[quoted] )
keyword[if] identifier[dolist] :
keyword[return] identifier[fields]
keyword[else] :
identifier[fields] [ identifier[nvstart] ]= literal[string] + identifier[fields] [ identifier[nvstart] ]
keyword[return] literal[string] . identifier[join] ( identifier[fields] ) | def save(self, dolist=0):
"""Return .par format string for this parameter
If dolist is set, returns fields as a list of strings. Default
is to return a single string appropriate for writing to a file.
"""
quoted = not dolist
array_size = 1
for d in self.shape:
array_size = d * array_size # depends on [control=['for'], data=['d']]
ndim = len(self.shape)
fields = (7 + 2 * ndim + len(self.value)) * ['']
fields[0] = self.name
fields[1] = self.type
fields[2] = self.mode
fields[3] = str(ndim)
next = 4
for d in self.shape:
fields[next] = str(d)
next += 1
fields[next] = '1'
next += 1 # depends on [control=['for'], data=['d']]
nvstart = 7 + 2 * ndim
if self.choice is not None:
schoice = list(map(self.toString, self.choice))
schoice.insert(0, '')
schoice.append('')
fields[nvstart - 3] = repr('|'.join(schoice)) # depends on [control=['if'], data=[]]
elif self.min not in [None, INDEF]:
fields[nvstart - 3] = self.toString(self.min, quoted=quoted) # depends on [control=['if'], data=[]]
# insert an escaped line break before min field
if quoted:
fields[nvstart - 3] = '\\\n' + fields[nvstart - 3] # depends on [control=['if'], data=[]]
if self.max not in [None, INDEF]:
fields[nvstart - 2] = self.toString(self.max, quoted=quoted) # depends on [control=['if'], data=[]]
if self.prompt:
if quoted:
sprompt = repr(self.prompt) # depends on [control=['if'], data=[]]
else:
sprompt = self.prompt
# prompt can have embedded newlines (which are printed)
sprompt = sprompt.replace('\\012', '\n')
sprompt = sprompt.replace('\\n', '\n')
fields[nvstart - 1] = sprompt # depends on [control=['if'], data=[]]
for i in range(len(self.value)):
fields[nvstart + i] = self.toString(self.value[i], quoted=quoted) # depends on [control=['for'], data=['i']]
# insert an escaped line break before value fields
if dolist:
return fields # depends on [control=['if'], data=[]]
else:
fields[nvstart] = '\\\n' + fields[nvstart]
return ','.join(fields) |
def diff(name, **kwargs):
'''
Gets the difference between the candidate and the current configuration.
.. code-block:: yaml
get the diff:
junos:
- diff
- id: 10
Parameters:
Optional
* id:
The rollback id value [0-49]. (default = 0)
'''
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
ret['changes'] = __salt__['junos.diff'](**kwargs)
return ret | def function[diff, parameter[name]]:
constant[
Gets the difference between the candidate and the current configuration.
.. code-block:: yaml
get the diff:
junos:
- diff
- id: 10
Parameters:
Optional
* id:
The rollback id value [0-49]. (default = 0)
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b216b040>, <ast.Constant object at 0x7da1b2169d80>, <ast.Constant object at 0x7da1b216b1c0>, <ast.Constant object at 0x7da1b2169e10>], [<ast.Name object at 0x7da1b2169e70>, <ast.Dict object at 0x7da1b2169060>, <ast.Constant object at 0x7da1b2168490>, <ast.Constant object at 0x7da1b21699f0>]]
call[name[ret]][constant[changes]] assign[=] call[call[name[__salt__]][constant[junos.diff]], parameter[]]
return[name[ret]] | keyword[def] identifier[diff] ( identifier[name] ,** identifier[kwargs] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] , literal[string] :{}, literal[string] : keyword[True] , literal[string] : literal[string] }
identifier[ret] [ literal[string] ]= identifier[__salt__] [ literal[string] ](** identifier[kwargs] )
keyword[return] identifier[ret] | def diff(name, **kwargs):
"""
Gets the difference between the candidate and the current configuration.
.. code-block:: yaml
get the diff:
junos:
- diff
- id: 10
Parameters:
Optional
* id:
The rollback id value [0-49]. (default = 0)
"""
ret = {'name': name, 'changes': {}, 'result': True, 'comment': ''}
ret['changes'] = __salt__['junos.diff'](**kwargs)
return ret |
def _sig_handler(self, signum, stack):
'''
Handle process INT signal.
'''
log_debug("Got SIGINT.")
if signum == signal.SIGINT:
LLNetReal.running = False
if self._pktqueue.qsize() == 0:
# put dummy pkt in queue to unblock a
# possibly stuck user thread
self._pktqueue.put( (None,None,None) ) | def function[_sig_handler, parameter[self, signum, stack]]:
constant[
Handle process INT signal.
]
call[name[log_debug], parameter[constant[Got SIGINT.]]]
if compare[name[signum] equal[==] name[signal].SIGINT] begin[:]
name[LLNetReal].running assign[=] constant[False]
if compare[call[name[self]._pktqueue.qsize, parameter[]] equal[==] constant[0]] begin[:]
call[name[self]._pktqueue.put, parameter[tuple[[<ast.Constant object at 0x7da18f7232e0>, <ast.Constant object at 0x7da18f721cf0>, <ast.Constant object at 0x7da18f722f80>]]]] | keyword[def] identifier[_sig_handler] ( identifier[self] , identifier[signum] , identifier[stack] ):
literal[string]
identifier[log_debug] ( literal[string] )
keyword[if] identifier[signum] == identifier[signal] . identifier[SIGINT] :
identifier[LLNetReal] . identifier[running] = keyword[False]
keyword[if] identifier[self] . identifier[_pktqueue] . identifier[qsize] ()== literal[int] :
identifier[self] . identifier[_pktqueue] . identifier[put] (( keyword[None] , keyword[None] , keyword[None] )) | def _sig_handler(self, signum, stack):
"""
Handle process INT signal.
"""
log_debug('Got SIGINT.')
if signum == signal.SIGINT:
LLNetReal.running = False
if self._pktqueue.qsize() == 0: # put dummy pkt in queue to unblock a
# possibly stuck user thread
self._pktqueue.put((None, None, None)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def get_certificate():
''' Read openvswitch certificate from disk '''
if os.path.exists(CERT_PATH):
log('Reading ovs certificate from {}'.format(CERT_PATH))
with open(CERT_PATH, 'r') as cert:
full_cert = cert.read()
begin_marker = "-----BEGIN CERTIFICATE-----"
end_marker = "-----END CERTIFICATE-----"
begin_index = full_cert.find(begin_marker)
end_index = full_cert.rfind(end_marker)
if end_index == -1 or begin_index == -1:
raise RuntimeError("Certificate does not contain valid begin"
" and end markers.")
full_cert = full_cert[begin_index:(end_index + len(end_marker))]
return full_cert
else:
log('Certificate not found', level=WARNING)
return None | def function[get_certificate, parameter[]]:
constant[ Read openvswitch certificate from disk ]
if call[name[os].path.exists, parameter[name[CERT_PATH]]] begin[:]
call[name[log], parameter[call[constant[Reading ovs certificate from {}].format, parameter[name[CERT_PATH]]]]]
with call[name[open], parameter[name[CERT_PATH], constant[r]]] begin[:]
variable[full_cert] assign[=] call[name[cert].read, parameter[]]
variable[begin_marker] assign[=] constant[-----BEGIN CERTIFICATE-----]
variable[end_marker] assign[=] constant[-----END CERTIFICATE-----]
variable[begin_index] assign[=] call[name[full_cert].find, parameter[name[begin_marker]]]
variable[end_index] assign[=] call[name[full_cert].rfind, parameter[name[end_marker]]]
if <ast.BoolOp object at 0x7da18dc9b6d0> begin[:]
<ast.Raise object at 0x7da18dc99f90>
variable[full_cert] assign[=] call[name[full_cert]][<ast.Slice object at 0x7da18dc9bb80>]
return[name[full_cert]] | keyword[def] identifier[get_certificate] ():
literal[string]
keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[CERT_PATH] ):
identifier[log] ( literal[string] . identifier[format] ( identifier[CERT_PATH] ))
keyword[with] identifier[open] ( identifier[CERT_PATH] , literal[string] ) keyword[as] identifier[cert] :
identifier[full_cert] = identifier[cert] . identifier[read] ()
identifier[begin_marker] = literal[string]
identifier[end_marker] = literal[string]
identifier[begin_index] = identifier[full_cert] . identifier[find] ( identifier[begin_marker] )
identifier[end_index] = identifier[full_cert] . identifier[rfind] ( identifier[end_marker] )
keyword[if] identifier[end_index] ==- literal[int] keyword[or] identifier[begin_index] ==- literal[int] :
keyword[raise] identifier[RuntimeError] ( literal[string]
literal[string] )
identifier[full_cert] = identifier[full_cert] [ identifier[begin_index] :( identifier[end_index] + identifier[len] ( identifier[end_marker] ))]
keyword[return] identifier[full_cert]
keyword[else] :
identifier[log] ( literal[string] , identifier[level] = identifier[WARNING] )
keyword[return] keyword[None] | def get_certificate():
""" Read openvswitch certificate from disk """
if os.path.exists(CERT_PATH):
log('Reading ovs certificate from {}'.format(CERT_PATH))
with open(CERT_PATH, 'r') as cert:
full_cert = cert.read()
begin_marker = '-----BEGIN CERTIFICATE-----'
end_marker = '-----END CERTIFICATE-----'
begin_index = full_cert.find(begin_marker)
end_index = full_cert.rfind(end_marker)
if end_index == -1 or begin_index == -1:
raise RuntimeError('Certificate does not contain valid begin and end markers.') # depends on [control=['if'], data=[]]
full_cert = full_cert[begin_index:end_index + len(end_marker)]
return full_cert # depends on [control=['with'], data=['cert']] # depends on [control=['if'], data=[]]
else:
log('Certificate not found', level=WARNING)
return None |
def get_shipping_cost(settings, country_code=None, name=None):
"""Return the shipping cost for a given country code and shipping option (shipping rate name)
"""
shipping_rate = None
if settings.default_shipping_enabled:
shipping_rate = {
"rate": settings.default_shipping_rate,
"description": "Standard shipping to rest of world",
"carrier": settings.default_shipping_carrier
}
elif not country_code:
raise InvalidShippingCountry
if country_code:
qrs = models.ShippingRate.objects.filter(countries__in=[country_code], name=name)
count = qrs.count()
if count == 1:
shipping_rate_qrs = qrs[0]
else:
raise InvalidShippingRate()
shipping_rate = {
"rate": shipping_rate_qrs.rate,
"description": shipping_rate_qrs.description,
"carrier": shipping_rate_qrs.carrier}
return shipping_rate | def function[get_shipping_cost, parameter[settings, country_code, name]]:
constant[Return the shipping cost for a given country code and shipping option (shipping rate name)
]
variable[shipping_rate] assign[=] constant[None]
if name[settings].default_shipping_enabled begin[:]
variable[shipping_rate] assign[=] dictionary[[<ast.Constant object at 0x7da20c991750>, <ast.Constant object at 0x7da20c991b10>, <ast.Constant object at 0x7da20c993bb0>], [<ast.Attribute object at 0x7da20c9934f0>, <ast.Constant object at 0x7da20c9928f0>, <ast.Attribute object at 0x7da20c990e20>]]
if name[country_code] begin[:]
variable[qrs] assign[=] call[name[models].ShippingRate.objects.filter, parameter[]]
variable[count] assign[=] call[name[qrs].count, parameter[]]
if compare[name[count] equal[==] constant[1]] begin[:]
variable[shipping_rate_qrs] assign[=] call[name[qrs]][constant[0]]
variable[shipping_rate] assign[=] dictionary[[<ast.Constant object at 0x7da1b2347370>, <ast.Constant object at 0x7da1b2345060>, <ast.Constant object at 0x7da1b23465f0>], [<ast.Attribute object at 0x7da1b2346e60>, <ast.Attribute object at 0x7da1b2346bc0>, <ast.Attribute object at 0x7da1b23469b0>]]
return[name[shipping_rate]] | keyword[def] identifier[get_shipping_cost] ( identifier[settings] , identifier[country_code] = keyword[None] , identifier[name] = keyword[None] ):
literal[string]
identifier[shipping_rate] = keyword[None]
keyword[if] identifier[settings] . identifier[default_shipping_enabled] :
identifier[shipping_rate] ={
literal[string] : identifier[settings] . identifier[default_shipping_rate] ,
literal[string] : literal[string] ,
literal[string] : identifier[settings] . identifier[default_shipping_carrier]
}
keyword[elif] keyword[not] identifier[country_code] :
keyword[raise] identifier[InvalidShippingCountry]
keyword[if] identifier[country_code] :
identifier[qrs] = identifier[models] . identifier[ShippingRate] . identifier[objects] . identifier[filter] ( identifier[countries__in] =[ identifier[country_code] ], identifier[name] = identifier[name] )
identifier[count] = identifier[qrs] . identifier[count] ()
keyword[if] identifier[count] == literal[int] :
identifier[shipping_rate_qrs] = identifier[qrs] [ literal[int] ]
keyword[else] :
keyword[raise] identifier[InvalidShippingRate] ()
identifier[shipping_rate] ={
literal[string] : identifier[shipping_rate_qrs] . identifier[rate] ,
literal[string] : identifier[shipping_rate_qrs] . identifier[description] ,
literal[string] : identifier[shipping_rate_qrs] . identifier[carrier] }
keyword[return] identifier[shipping_rate] | def get_shipping_cost(settings, country_code=None, name=None):
"""Return the shipping cost for a given country code and shipping option (shipping rate name)
"""
shipping_rate = None
if settings.default_shipping_enabled:
shipping_rate = {'rate': settings.default_shipping_rate, 'description': 'Standard shipping to rest of world', 'carrier': settings.default_shipping_carrier} # depends on [control=['if'], data=[]]
elif not country_code:
raise InvalidShippingCountry # depends on [control=['if'], data=[]]
if country_code:
qrs = models.ShippingRate.objects.filter(countries__in=[country_code], name=name)
count = qrs.count()
if count == 1:
shipping_rate_qrs = qrs[0] # depends on [control=['if'], data=[]]
else:
raise InvalidShippingRate()
shipping_rate = {'rate': shipping_rate_qrs.rate, 'description': shipping_rate_qrs.description, 'carrier': shipping_rate_qrs.carrier} # depends on [control=['if'], data=[]]
return shipping_rate |
def wrap_xblock(self, block, view, frag, context): # pylint: disable=W0613
"""
Creates a div which identifies the xblock and writes out the json_init_args into a script tag.
If there's a `wrap_child` method, it calls that with a deprecation warning.
The default implementation creates a frag to wraps frag w/ a div identifying the xblock. If you have
javascript, you'll need to override this impl
"""
if hasattr(self, 'wrap_child'):
log.warning("wrap_child is deprecated in favor of wrap_xblock and wrap_aside %s", self.__class__)
return self.wrap_child(block, view, frag, context) # pylint: disable=no-member
extra_data = {'name': block.name} if block.name else {}
return self._wrap_ele(block, view, frag, extra_data) | def function[wrap_xblock, parameter[self, block, view, frag, context]]:
constant[
Creates a div which identifies the xblock and writes out the json_init_args into a script tag.
If there's a `wrap_child` method, it calls that with a deprecation warning.
The default implementation creates a frag to wraps frag w/ a div identifying the xblock. If you have
javascript, you'll need to override this impl
]
if call[name[hasattr], parameter[name[self], constant[wrap_child]]] begin[:]
call[name[log].warning, parameter[constant[wrap_child is deprecated in favor of wrap_xblock and wrap_aside %s], name[self].__class__]]
return[call[name[self].wrap_child, parameter[name[block], name[view], name[frag], name[context]]]]
variable[extra_data] assign[=] <ast.IfExp object at 0x7da18f7210c0>
return[call[name[self]._wrap_ele, parameter[name[block], name[view], name[frag], name[extra_data]]]] | keyword[def] identifier[wrap_xblock] ( identifier[self] , identifier[block] , identifier[view] , identifier[frag] , identifier[context] ):
literal[string]
keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ):
identifier[log] . identifier[warning] ( literal[string] , identifier[self] . identifier[__class__] )
keyword[return] identifier[self] . identifier[wrap_child] ( identifier[block] , identifier[view] , identifier[frag] , identifier[context] )
identifier[extra_data] ={ literal[string] : identifier[block] . identifier[name] } keyword[if] identifier[block] . identifier[name] keyword[else] {}
keyword[return] identifier[self] . identifier[_wrap_ele] ( identifier[block] , identifier[view] , identifier[frag] , identifier[extra_data] ) | def wrap_xblock(self, block, view, frag, context): # pylint: disable=W0613
"\n Creates a div which identifies the xblock and writes out the json_init_args into a script tag.\n\n If there's a `wrap_child` method, it calls that with a deprecation warning.\n\n The default implementation creates a frag to wraps frag w/ a div identifying the xblock. If you have\n javascript, you'll need to override this impl\n "
if hasattr(self, 'wrap_child'):
log.warning('wrap_child is deprecated in favor of wrap_xblock and wrap_aside %s', self.__class__)
return self.wrap_child(block, view, frag, context) # pylint: disable=no-member # depends on [control=['if'], data=[]]
extra_data = {'name': block.name} if block.name else {}
return self._wrap_ele(block, view, frag, extra_data) |
def sign_message(message, wif, hashfn=hashlib.sha256):
""" Sign a digest with a wif key
:param str wif: Private key in
"""
if not isinstance(message, bytes):
message = bytes(message, "utf-8")
digest = hashfn(message).digest()
priv_key = PrivateKey(wif)
p = bytes(priv_key)
if SECP256K1_MODULE == "secp256k1":
ndata = secp256k1.ffi.new("const int *ndata")
ndata[0] = 0
while True:
ndata[0] += 1
privkey = secp256k1.PrivateKey(p, raw=True)
sig = secp256k1.ffi.new("secp256k1_ecdsa_recoverable_signature *")
signed = secp256k1.lib.secp256k1_ecdsa_sign_recoverable(
privkey.ctx, sig, digest, privkey.private_key, secp256k1.ffi.NULL, ndata
)
if not signed == 1: # pragma: no cover
raise AssertionError()
signature, i = privkey.ecdsa_recoverable_serialize(sig)
if _is_canonical(signature):
i += 4 # compressed
i += 27 # compact
break
elif SECP256K1_MODULE == "cryptography":
cnt = 0
private_key = ec.derive_private_key(
int(repr(priv_key), 16), ec.SECP256K1(), default_backend()
)
public_key = private_key.public_key()
while True:
cnt += 1
if not cnt % 20: # pragma: no cover
log.info(
"Still searching for a canonical signature. Tried %d times already!"
% cnt
)
order = ecdsa.SECP256k1.order
# signer = private_key.signer(ec.ECDSA(hashes.SHA256()))
# signer.update(message)
# sigder = signer.finalize()
sigder = private_key.sign(message, ec.ECDSA(hashes.SHA256()))
r, s = decode_dss_signature(sigder)
signature = ecdsa.util.sigencode_string(r, s, order)
# Make sure signature is canonical!
#
sigder = bytearray(sigder)
lenR = sigder[3]
lenS = sigder[5 + lenR]
if lenR is 32 and lenS is 32:
# Derive the recovery parameter
#
i = recoverPubkeyParameter(message, digest, signature, public_key)
i += 4 # compressed
i += 27 # compact
break
else: # pragma: no branch # pragma: no cover
cnt = 0
sk = ecdsa.SigningKey.from_string(p, curve=ecdsa.SECP256k1)
while 1:
cnt += 1
if not cnt % 20: # pragma: no branch
log.info(
"Still searching for a canonical signature. Tried %d times already!"
% cnt
)
# Deterministic k
#
k = ecdsa.rfc6979.generate_k(
sk.curve.generator.order(),
sk.privkey.secret_multiplier,
hashlib.sha256,
hashlib.sha256(
digest
+ struct.pack(
"d", time.time()
) # use the local time to randomize the signature
).digest(),
)
# Sign message
#
sigder = sk.sign_digest(digest, sigencode=ecdsa.util.sigencode_der, k=k)
# Reformating of signature
#
r, s = ecdsa.util.sigdecode_der(sigder, sk.curve.generator.order())
signature = ecdsa.util.sigencode_string(r, s, sk.curve.generator.order())
# Make sure signature is canonical!
#
sigder = bytearray(sigder)
lenR = sigder[3]
lenS = sigder[5 + lenR]
if lenR is 32 and lenS is 32:
# Derive the recovery parameter
#
i = recoverPubkeyParameter(
message, digest, signature, sk.get_verifying_key()
)
i += 4 # compressed
i += 27 # compact
break
# pack signature
#
sigstr = struct.pack("<B", i)
sigstr += signature
return sigstr | def function[sign_message, parameter[message, wif, hashfn]]:
constant[ Sign a digest with a wif key
:param str wif: Private key in
]
if <ast.UnaryOp object at 0x7da1b0127d90> begin[:]
variable[message] assign[=] call[name[bytes], parameter[name[message], constant[utf-8]]]
variable[digest] assign[=] call[call[name[hashfn], parameter[name[message]]].digest, parameter[]]
variable[priv_key] assign[=] call[name[PrivateKey], parameter[name[wif]]]
variable[p] assign[=] call[name[bytes], parameter[name[priv_key]]]
if compare[name[SECP256K1_MODULE] equal[==] constant[secp256k1]] begin[:]
variable[ndata] assign[=] call[name[secp256k1].ffi.new, parameter[constant[const int *ndata]]]
call[name[ndata]][constant[0]] assign[=] constant[0]
while constant[True] begin[:]
<ast.AugAssign object at 0x7da1b01274f0>
variable[privkey] assign[=] call[name[secp256k1].PrivateKey, parameter[name[p]]]
variable[sig] assign[=] call[name[secp256k1].ffi.new, parameter[constant[secp256k1_ecdsa_recoverable_signature *]]]
variable[signed] assign[=] call[name[secp256k1].lib.secp256k1_ecdsa_sign_recoverable, parameter[name[privkey].ctx, name[sig], name[digest], name[privkey].private_key, name[secp256k1].ffi.NULL, name[ndata]]]
if <ast.UnaryOp object at 0x7da1b0126dd0> begin[:]
<ast.Raise object at 0x7da1b0126d10>
<ast.Tuple object at 0x7da1b0126c50> assign[=] call[name[privkey].ecdsa_recoverable_serialize, parameter[name[sig]]]
if call[name[_is_canonical], parameter[name[signature]]] begin[:]
<ast.AugAssign object at 0x7da1b0125840>
<ast.AugAssign object at 0x7da1b01257b0>
break
variable[sigstr] assign[=] call[name[struct].pack, parameter[constant[<B], name[i]]]
<ast.AugAssign object at 0x7da20c990880>
return[name[sigstr]] | keyword[def] identifier[sign_message] ( identifier[message] , identifier[wif] , identifier[hashfn] = identifier[hashlib] . identifier[sha256] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[message] , identifier[bytes] ):
identifier[message] = identifier[bytes] ( identifier[message] , literal[string] )
identifier[digest] = identifier[hashfn] ( identifier[message] ). identifier[digest] ()
identifier[priv_key] = identifier[PrivateKey] ( identifier[wif] )
identifier[p] = identifier[bytes] ( identifier[priv_key] )
keyword[if] identifier[SECP256K1_MODULE] == literal[string] :
identifier[ndata] = identifier[secp256k1] . identifier[ffi] . identifier[new] ( literal[string] )
identifier[ndata] [ literal[int] ]= literal[int]
keyword[while] keyword[True] :
identifier[ndata] [ literal[int] ]+= literal[int]
identifier[privkey] = identifier[secp256k1] . identifier[PrivateKey] ( identifier[p] , identifier[raw] = keyword[True] )
identifier[sig] = identifier[secp256k1] . identifier[ffi] . identifier[new] ( literal[string] )
identifier[signed] = identifier[secp256k1] . identifier[lib] . identifier[secp256k1_ecdsa_sign_recoverable] (
identifier[privkey] . identifier[ctx] , identifier[sig] , identifier[digest] , identifier[privkey] . identifier[private_key] , identifier[secp256k1] . identifier[ffi] . identifier[NULL] , identifier[ndata]
)
keyword[if] keyword[not] identifier[signed] == literal[int] :
keyword[raise] identifier[AssertionError] ()
identifier[signature] , identifier[i] = identifier[privkey] . identifier[ecdsa_recoverable_serialize] ( identifier[sig] )
keyword[if] identifier[_is_canonical] ( identifier[signature] ):
identifier[i] += literal[int]
identifier[i] += literal[int]
keyword[break]
keyword[elif] identifier[SECP256K1_MODULE] == literal[string] :
identifier[cnt] = literal[int]
identifier[private_key] = identifier[ec] . identifier[derive_private_key] (
identifier[int] ( identifier[repr] ( identifier[priv_key] ), literal[int] ), identifier[ec] . identifier[SECP256K1] (), identifier[default_backend] ()
)
identifier[public_key] = identifier[private_key] . identifier[public_key] ()
keyword[while] keyword[True] :
identifier[cnt] += literal[int]
keyword[if] keyword[not] identifier[cnt] % literal[int] :
identifier[log] . identifier[info] (
literal[string]
% identifier[cnt]
)
identifier[order] = identifier[ecdsa] . identifier[SECP256k1] . identifier[order]
identifier[sigder] = identifier[private_key] . identifier[sign] ( identifier[message] , identifier[ec] . identifier[ECDSA] ( identifier[hashes] . identifier[SHA256] ()))
identifier[r] , identifier[s] = identifier[decode_dss_signature] ( identifier[sigder] )
identifier[signature] = identifier[ecdsa] . identifier[util] . identifier[sigencode_string] ( identifier[r] , identifier[s] , identifier[order] )
identifier[sigder] = identifier[bytearray] ( identifier[sigder] )
identifier[lenR] = identifier[sigder] [ literal[int] ]
identifier[lenS] = identifier[sigder] [ literal[int] + identifier[lenR] ]
keyword[if] identifier[lenR] keyword[is] literal[int] keyword[and] identifier[lenS] keyword[is] literal[int] :
identifier[i] = identifier[recoverPubkeyParameter] ( identifier[message] , identifier[digest] , identifier[signature] , identifier[public_key] )
identifier[i] += literal[int]
identifier[i] += literal[int]
keyword[break]
keyword[else] :
identifier[cnt] = literal[int]
identifier[sk] = identifier[ecdsa] . identifier[SigningKey] . identifier[from_string] ( identifier[p] , identifier[curve] = identifier[ecdsa] . identifier[SECP256k1] )
keyword[while] literal[int] :
identifier[cnt] += literal[int]
keyword[if] keyword[not] identifier[cnt] % literal[int] :
identifier[log] . identifier[info] (
literal[string]
% identifier[cnt]
)
identifier[k] = identifier[ecdsa] . identifier[rfc6979] . identifier[generate_k] (
identifier[sk] . identifier[curve] . identifier[generator] . identifier[order] (),
identifier[sk] . identifier[privkey] . identifier[secret_multiplier] ,
identifier[hashlib] . identifier[sha256] ,
identifier[hashlib] . identifier[sha256] (
identifier[digest]
+ identifier[struct] . identifier[pack] (
literal[string] , identifier[time] . identifier[time] ()
)
). identifier[digest] (),
)
identifier[sigder] = identifier[sk] . identifier[sign_digest] ( identifier[digest] , identifier[sigencode] = identifier[ecdsa] . identifier[util] . identifier[sigencode_der] , identifier[k] = identifier[k] )
identifier[r] , identifier[s] = identifier[ecdsa] . identifier[util] . identifier[sigdecode_der] ( identifier[sigder] , identifier[sk] . identifier[curve] . identifier[generator] . identifier[order] ())
identifier[signature] = identifier[ecdsa] . identifier[util] . identifier[sigencode_string] ( identifier[r] , identifier[s] , identifier[sk] . identifier[curve] . identifier[generator] . identifier[order] ())
identifier[sigder] = identifier[bytearray] ( identifier[sigder] )
identifier[lenR] = identifier[sigder] [ literal[int] ]
identifier[lenS] = identifier[sigder] [ literal[int] + identifier[lenR] ]
keyword[if] identifier[lenR] keyword[is] literal[int] keyword[and] identifier[lenS] keyword[is] literal[int] :
identifier[i] = identifier[recoverPubkeyParameter] (
identifier[message] , identifier[digest] , identifier[signature] , identifier[sk] . identifier[get_verifying_key] ()
)
identifier[i] += literal[int]
identifier[i] += literal[int]
keyword[break]
identifier[sigstr] = identifier[struct] . identifier[pack] ( literal[string] , identifier[i] )
identifier[sigstr] += identifier[signature]
keyword[return] identifier[sigstr] | def sign_message(message, wif, hashfn=hashlib.sha256):
""" Sign a digest with a wif key
:param str wif: Private key in
"""
if not isinstance(message, bytes):
message = bytes(message, 'utf-8') # depends on [control=['if'], data=[]]
digest = hashfn(message).digest()
priv_key = PrivateKey(wif)
p = bytes(priv_key)
if SECP256K1_MODULE == 'secp256k1':
ndata = secp256k1.ffi.new('const int *ndata')
ndata[0] = 0
while True:
ndata[0] += 1
privkey = secp256k1.PrivateKey(p, raw=True)
sig = secp256k1.ffi.new('secp256k1_ecdsa_recoverable_signature *')
signed = secp256k1.lib.secp256k1_ecdsa_sign_recoverable(privkey.ctx, sig, digest, privkey.private_key, secp256k1.ffi.NULL, ndata)
if not signed == 1: # pragma: no cover
raise AssertionError() # depends on [control=['if'], data=[]]
(signature, i) = privkey.ecdsa_recoverable_serialize(sig)
if _is_canonical(signature):
i += 4 # compressed
i += 27 # compact
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
elif SECP256K1_MODULE == 'cryptography':
cnt = 0
private_key = ec.derive_private_key(int(repr(priv_key), 16), ec.SECP256K1(), default_backend())
public_key = private_key.public_key()
while True:
cnt += 1
if not cnt % 20: # pragma: no cover
log.info('Still searching for a canonical signature. Tried %d times already!' % cnt) # depends on [control=['if'], data=[]]
order = ecdsa.SECP256k1.order
# signer = private_key.signer(ec.ECDSA(hashes.SHA256()))
# signer.update(message)
# sigder = signer.finalize()
sigder = private_key.sign(message, ec.ECDSA(hashes.SHA256()))
(r, s) = decode_dss_signature(sigder)
signature = ecdsa.util.sigencode_string(r, s, order)
# Make sure signature is canonical!
#
sigder = bytearray(sigder)
lenR = sigder[3]
lenS = sigder[5 + lenR]
if lenR is 32 and lenS is 32:
# Derive the recovery parameter
#
i = recoverPubkeyParameter(message, digest, signature, public_key)
i += 4 # compressed
i += 27 # compact
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]]
else: # pragma: no branch # pragma: no cover
cnt = 0
sk = ecdsa.SigningKey.from_string(p, curve=ecdsa.SECP256k1)
while 1:
cnt += 1
if not cnt % 20: # pragma: no branch
log.info('Still searching for a canonical signature. Tried %d times already!' % cnt) # depends on [control=['if'], data=[]]
# Deterministic k
#
# use the local time to randomize the signature
k = ecdsa.rfc6979.generate_k(sk.curve.generator.order(), sk.privkey.secret_multiplier, hashlib.sha256, hashlib.sha256(digest + struct.pack('d', time.time())).digest())
# Sign message
#
sigder = sk.sign_digest(digest, sigencode=ecdsa.util.sigencode_der, k=k)
# Reformating of signature
#
(r, s) = ecdsa.util.sigdecode_der(sigder, sk.curve.generator.order())
signature = ecdsa.util.sigencode_string(r, s, sk.curve.generator.order())
# Make sure signature is canonical!
#
sigder = bytearray(sigder)
lenR = sigder[3]
lenS = sigder[5 + lenR]
if lenR is 32 and lenS is 32:
# Derive the recovery parameter
#
i = recoverPubkeyParameter(message, digest, signature, sk.get_verifying_key())
i += 4 # compressed
i += 27 # compact
break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
# pack signature
#
sigstr = struct.pack('<B', i)
sigstr += signature
return sigstr |
def subword(w):
"""
Function used in the Key Expansion routine that takes a four-byte input word
and applies an S-box to each of the four bytes to produce an output word.
"""
w = w.reshape(4, 8)
return SBOX[w[0]] + SBOX[w[1]] + SBOX[w[2]] + SBOX[w[3]] | def function[subword, parameter[w]]:
constant[
Function used in the Key Expansion routine that takes a four-byte input word
and applies an S-box to each of the four bytes to produce an output word.
]
variable[w] assign[=] call[name[w].reshape, parameter[constant[4], constant[8]]]
return[binary_operation[binary_operation[binary_operation[call[name[SBOX]][call[name[w]][constant[0]]] + call[name[SBOX]][call[name[w]][constant[1]]]] + call[name[SBOX]][call[name[w]][constant[2]]]] + call[name[SBOX]][call[name[w]][constant[3]]]]] | keyword[def] identifier[subword] ( identifier[w] ):
literal[string]
identifier[w] = identifier[w] . identifier[reshape] ( literal[int] , literal[int] )
keyword[return] identifier[SBOX] [ identifier[w] [ literal[int] ]]+ identifier[SBOX] [ identifier[w] [ literal[int] ]]+ identifier[SBOX] [ identifier[w] [ literal[int] ]]+ identifier[SBOX] [ identifier[w] [ literal[int] ]] | def subword(w):
"""
Function used in the Key Expansion routine that takes a four-byte input word
and applies an S-box to each of the four bytes to produce an output word.
"""
w = w.reshape(4, 8)
return SBOX[w[0]] + SBOX[w[1]] + SBOX[w[2]] + SBOX[w[3]] |
def port_policy_absent(name, sel_type=None, protocol=None, port=None):
'''
.. versionadded:: 2019.2.0
Makes sure an SELinux port policy for a given port, protocol and SELinux context type is absent.
name
The protocol and port spec. Can be formatted as ``(tcp|udp)/(port|port-range)``.
sel_type
The SELinux Type. Optional; can be used in determining if policy is present,
ignored by ``semanage port --delete``.
protocol
The protocol for the port, ``tcp`` or ``udp``. Required if name is not formatted.
port
The port or port range. Required if name is not formatted.
'''
ret = {'name': name, 'result': False, 'changes': {}, 'comment': ''}
old_state = __salt__['selinux.port_get_policy'](
name=name,
sel_type=sel_type,
protocol=protocol,
port=port, )
if not old_state:
ret.update({'result': True,
'comment': 'SELinux policy for "{0}" already absent '.format(name) +
'with specified sel_type "{0}", protocol "{1}" and port "{2}".'.format(
sel_type, protocol, port)})
return ret
if __opts__['test']:
ret.update({'result': None})
else:
delete_ret = __salt__['selinux.port_delete_policy'](
name=name,
protocol=protocol,
port=port, )
if delete_ret['retcode'] != 0:
ret.update({'comment': 'Error deleting policy: {0}'.format(delete_ret)})
else:
ret.update({'result': True})
new_state = __salt__['selinux.port_get_policy'](
name=name,
sel_type=sel_type,
protocol=protocol,
port=port, )
ret['changes'].update({'old': old_state, 'new': new_state})
return ret | def function[port_policy_absent, parameter[name, sel_type, protocol, port]]:
constant[
.. versionadded:: 2019.2.0
Makes sure an SELinux port policy for a given port, protocol and SELinux context type is absent.
name
The protocol and port spec. Can be formatted as ``(tcp|udp)/(port|port-range)``.
sel_type
The SELinux Type. Optional; can be used in determining if policy is present,
ignored by ``semanage port --delete``.
protocol
The protocol for the port, ``tcp`` or ``udp``. Required if name is not formatted.
port
The port or port range. Required if name is not formatted.
]
variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da18f812b30>, <ast.Constant object at 0x7da18f812f20>, <ast.Constant object at 0x7da18f813a00>, <ast.Constant object at 0x7da18f810af0>], [<ast.Name object at 0x7da18f811ba0>, <ast.Constant object at 0x7da18f8116f0>, <ast.Dict object at 0x7da18f813670>, <ast.Constant object at 0x7da18f813730>]]
variable[old_state] assign[=] call[call[name[__salt__]][constant[selinux.port_get_policy]], parameter[]]
if <ast.UnaryOp object at 0x7da18f812ec0> begin[:]
call[name[ret].update, parameter[dictionary[[<ast.Constant object at 0x7da18f811f60>, <ast.Constant object at 0x7da18f810250>], [<ast.Constant object at 0x7da18f811d50>, <ast.BinOp object at 0x7da18f813f40>]]]]
return[name[ret]]
if call[name[__opts__]][constant[test]] begin[:]
call[name[ret].update, parameter[dictionary[[<ast.Constant object at 0x7da18f810d00>], [<ast.Constant object at 0x7da18f8132e0>]]]]
return[name[ret]] | keyword[def] identifier[port_policy_absent] ( identifier[name] , identifier[sel_type] = keyword[None] , identifier[protocol] = keyword[None] , identifier[port] = keyword[None] ):
literal[string]
identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[False] , literal[string] :{}, literal[string] : literal[string] }
identifier[old_state] = identifier[__salt__] [ literal[string] ](
identifier[name] = identifier[name] ,
identifier[sel_type] = identifier[sel_type] ,
identifier[protocol] = identifier[protocol] ,
identifier[port] = identifier[port] ,)
keyword[if] keyword[not] identifier[old_state] :
identifier[ret] . identifier[update] ({ literal[string] : keyword[True] ,
literal[string] : literal[string] . identifier[format] ( identifier[name] )+
literal[string] . identifier[format] (
identifier[sel_type] , identifier[protocol] , identifier[port] )})
keyword[return] identifier[ret]
keyword[if] identifier[__opts__] [ literal[string] ]:
identifier[ret] . identifier[update] ({ literal[string] : keyword[None] })
keyword[else] :
identifier[delete_ret] = identifier[__salt__] [ literal[string] ](
identifier[name] = identifier[name] ,
identifier[protocol] = identifier[protocol] ,
identifier[port] = identifier[port] ,)
keyword[if] identifier[delete_ret] [ literal[string] ]!= literal[int] :
identifier[ret] . identifier[update] ({ literal[string] : literal[string] . identifier[format] ( identifier[delete_ret] )})
keyword[else] :
identifier[ret] . identifier[update] ({ literal[string] : keyword[True] })
identifier[new_state] = identifier[__salt__] [ literal[string] ](
identifier[name] = identifier[name] ,
identifier[sel_type] = identifier[sel_type] ,
identifier[protocol] = identifier[protocol] ,
identifier[port] = identifier[port] ,)
identifier[ret] [ literal[string] ]. identifier[update] ({ literal[string] : identifier[old_state] , literal[string] : identifier[new_state] })
keyword[return] identifier[ret] | def port_policy_absent(name, sel_type=None, protocol=None, port=None):
"""
.. versionadded:: 2019.2.0
Makes sure an SELinux port policy for a given port, protocol and SELinux context type is absent.
name
The protocol and port spec. Can be formatted as ``(tcp|udp)/(port|port-range)``.
sel_type
The SELinux Type. Optional; can be used in determining if policy is present,
ignored by ``semanage port --delete``.
protocol
The protocol for the port, ``tcp`` or ``udp``. Required if name is not formatted.
port
The port or port range. Required if name is not formatted.
"""
ret = {'name': name, 'result': False, 'changes': {}, 'comment': ''}
old_state = __salt__['selinux.port_get_policy'](name=name, sel_type=sel_type, protocol=protocol, port=port)
if not old_state:
ret.update({'result': True, 'comment': 'SELinux policy for "{0}" already absent '.format(name) + 'with specified sel_type "{0}", protocol "{1}" and port "{2}".'.format(sel_type, protocol, port)})
return ret # depends on [control=['if'], data=[]]
if __opts__['test']:
ret.update({'result': None}) # depends on [control=['if'], data=[]]
else:
delete_ret = __salt__['selinux.port_delete_policy'](name=name, protocol=protocol, port=port)
if delete_ret['retcode'] != 0:
ret.update({'comment': 'Error deleting policy: {0}'.format(delete_ret)}) # depends on [control=['if'], data=[]]
else:
ret.update({'result': True})
new_state = __salt__['selinux.port_get_policy'](name=name, sel_type=sel_type, protocol=protocol, port=port)
ret['changes'].update({'old': old_state, 'new': new_state})
return ret |
def reinitialize_command(self, command, reinit_subcommands=0):
"""
Supplement reinitialize_command to work around
http://bugs.python.org/issue20819
"""
cmd = self.distribution.reinitialize_command(
command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None
return cmd | def function[reinitialize_command, parameter[self, command, reinit_subcommands]]:
constant[
Supplement reinitialize_command to work around
http://bugs.python.org/issue20819
]
variable[cmd] assign[=] call[name[self].distribution.reinitialize_command, parameter[name[command], name[reinit_subcommands]]]
if compare[name[command] in tuple[[<ast.Constant object at 0x7da1b1b873a0>, <ast.Constant object at 0x7da1b1b87580>]]] begin[:]
name[cmd].install_lib assign[=] constant[None]
return[name[cmd]] | keyword[def] identifier[reinitialize_command] ( identifier[self] , identifier[command] , identifier[reinit_subcommands] = literal[int] ):
literal[string]
identifier[cmd] = identifier[self] . identifier[distribution] . identifier[reinitialize_command] (
identifier[command] , identifier[reinit_subcommands] )
keyword[if] identifier[command] keyword[in] ( literal[string] , literal[string] ):
identifier[cmd] . identifier[install_lib] = keyword[None]
keyword[return] identifier[cmd] | def reinitialize_command(self, command, reinit_subcommands=0):
"""
Supplement reinitialize_command to work around
http://bugs.python.org/issue20819
"""
cmd = self.distribution.reinitialize_command(command, reinit_subcommands)
if command in ('install', 'install_lib'):
cmd.install_lib = None # depends on [control=['if'], data=[]]
return cmd |
def remaining(self):
"""Determines how many bytes are remaining in the current context."""
if self.depth == 0:
return _STREAM_REMAINING
return self.limit - self.queue.position | def function[remaining, parameter[self]]:
constant[Determines how many bytes are remaining in the current context.]
if compare[name[self].depth equal[==] constant[0]] begin[:]
return[name[_STREAM_REMAINING]]
return[binary_operation[name[self].limit - name[self].queue.position]] | keyword[def] identifier[remaining] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[depth] == literal[int] :
keyword[return] identifier[_STREAM_REMAINING]
keyword[return] identifier[self] . identifier[limit] - identifier[self] . identifier[queue] . identifier[position] | def remaining(self):
"""Determines how many bytes are remaining in the current context."""
if self.depth == 0:
return _STREAM_REMAINING # depends on [control=['if'], data=[]]
return self.limit - self.queue.position |
def get_obj_doc0(obj, alt="(no doc)"):
"""Returns first line of cls.__doc__, or alternative text"""
ret = obj.__doc__.strip().split("\n")[0] if obj.__doc__ is not None else alt
return ret | def function[get_obj_doc0, parameter[obj, alt]]:
constant[Returns first line of cls.__doc__, or alternative text]
variable[ret] assign[=] <ast.IfExp object at 0x7da18dc07d60>
return[name[ret]] | keyword[def] identifier[get_obj_doc0] ( identifier[obj] , identifier[alt] = literal[string] ):
literal[string]
identifier[ret] = identifier[obj] . identifier[__doc__] . identifier[strip] (). identifier[split] ( literal[string] )[ literal[int] ] keyword[if] identifier[obj] . identifier[__doc__] keyword[is] keyword[not] keyword[None] keyword[else] identifier[alt]
keyword[return] identifier[ret] | def get_obj_doc0(obj, alt='(no doc)'):
"""Returns first line of cls.__doc__, or alternative text"""
ret = obj.__doc__.strip().split('\n')[0] if obj.__doc__ is not None else alt
return ret |
def get_parent_folder_name(file_path):
"""Finds parent folder of file
:param file_path: path
:return: Name of folder container
"""
return os.path.split(os.path.split(os.path.abspath(file_path))[0])[-1] | def function[get_parent_folder_name, parameter[file_path]]:
constant[Finds parent folder of file
:param file_path: path
:return: Name of folder container
]
return[call[call[name[os].path.split, parameter[call[call[name[os].path.split, parameter[call[name[os].path.abspath, parameter[name[file_path]]]]]][constant[0]]]]][<ast.UnaryOp object at 0x7da20c76c0d0>]] | keyword[def] identifier[get_parent_folder_name] ( identifier[file_path] ):
literal[string]
keyword[return] identifier[os] . identifier[path] . identifier[split] ( identifier[os] . identifier[path] . identifier[split] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[file_path] ))[ literal[int] ])[- literal[int] ] | def get_parent_folder_name(file_path):
"""Finds parent folder of file
:param file_path: path
:return: Name of folder container
"""
return os.path.split(os.path.split(os.path.abspath(file_path))[0])[-1] |
def get_resized_size(self):
"""
Get target size for the stretched or shirnked image to fit within the
target dimensions. Do not stretch images if not format.stretch.
Note that this method is designed to operate on already cropped image.
"""
f = self.fmt
iw, ih = self.image.size
if not f.stretch and iw <= self.fw and ih <= self.fh:
return
if self.image_ratio == self.format_ratio:
# same ratio, just resize
return (self.fw, self.fh)
elif self.image_ratio < self.format_ratio:
# image taller than format
return (self.fh * iw / ih, self.fh)
else: # self.image_ratio > self.format_ratio
# image wider than format
return (self.fw, self.fw * ih / iw) | def function[get_resized_size, parameter[self]]:
constant[
Get target size for the stretched or shirnked image to fit within the
target dimensions. Do not stretch images if not format.stretch.
Note that this method is designed to operate on already cropped image.
]
variable[f] assign[=] name[self].fmt
<ast.Tuple object at 0x7da20c6ab760> assign[=] name[self].image.size
if <ast.BoolOp object at 0x7da20c6a96f0> begin[:]
return[None]
if compare[name[self].image_ratio equal[==] name[self].format_ratio] begin[:]
return[tuple[[<ast.Attribute object at 0x7da20c6aa0e0>, <ast.Attribute object at 0x7da20c6a8790>]]] | keyword[def] identifier[get_resized_size] ( identifier[self] ):
literal[string]
identifier[f] = identifier[self] . identifier[fmt]
identifier[iw] , identifier[ih] = identifier[self] . identifier[image] . identifier[size]
keyword[if] keyword[not] identifier[f] . identifier[stretch] keyword[and] identifier[iw] <= identifier[self] . identifier[fw] keyword[and] identifier[ih] <= identifier[self] . identifier[fh] :
keyword[return]
keyword[if] identifier[self] . identifier[image_ratio] == identifier[self] . identifier[format_ratio] :
keyword[return] ( identifier[self] . identifier[fw] , identifier[self] . identifier[fh] )
keyword[elif] identifier[self] . identifier[image_ratio] < identifier[self] . identifier[format_ratio] :
keyword[return] ( identifier[self] . identifier[fh] * identifier[iw] / identifier[ih] , identifier[self] . identifier[fh] )
keyword[else] :
keyword[return] ( identifier[self] . identifier[fw] , identifier[self] . identifier[fw] * identifier[ih] / identifier[iw] ) | def get_resized_size(self):
"""
Get target size for the stretched or shirnked image to fit within the
target dimensions. Do not stretch images if not format.stretch.
Note that this method is designed to operate on already cropped image.
"""
f = self.fmt
(iw, ih) = self.image.size
if not f.stretch and iw <= self.fw and (ih <= self.fh):
return # depends on [control=['if'], data=[]]
if self.image_ratio == self.format_ratio:
# same ratio, just resize
return (self.fw, self.fh) # depends on [control=['if'], data=[]]
elif self.image_ratio < self.format_ratio:
# image taller than format
return (self.fh * iw / ih, self.fh) # depends on [control=['if'], data=[]]
else: # self.image_ratio > self.format_ratio
# image wider than format
return (self.fw, self.fw * ih / iw) |
def get_build_work_items_refs(self, project, build_id, top=None):
"""GetBuildWorkItemsRefs.
Gets the work items associated with a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param int top: The maximum number of work items to return.
:rtype: [ResourceRef]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str')
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int')
query_parameters = {}
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int')
response = self._send(http_method='GET',
location_id='5a21f5d2-5642-47e4-a0bd-1356e6731bee',
version='5.0',
route_values=route_values,
query_parameters=query_parameters)
return self._deserialize('[ResourceRef]', self._unwrap_collection(response)) | def function[get_build_work_items_refs, parameter[self, project, build_id, top]]:
constant[GetBuildWorkItemsRefs.
Gets the work items associated with a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param int top: The maximum number of work items to return.
:rtype: [ResourceRef]
]
variable[route_values] assign[=] dictionary[[], []]
if compare[name[project] is_not constant[None]] begin[:]
call[name[route_values]][constant[project]] assign[=] call[name[self]._serialize.url, parameter[constant[project], name[project], constant[str]]]
if compare[name[build_id] is_not constant[None]] begin[:]
call[name[route_values]][constant[buildId]] assign[=] call[name[self]._serialize.url, parameter[constant[build_id], name[build_id], constant[int]]]
variable[query_parameters] assign[=] dictionary[[], []]
if compare[name[top] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[$top]] assign[=] call[name[self]._serialize.query, parameter[constant[top], name[top], constant[int]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[[ResourceRef]], call[name[self]._unwrap_collection, parameter[name[response]]]]]] | keyword[def] identifier[get_build_work_items_refs] ( identifier[self] , identifier[project] , identifier[build_id] , identifier[top] = keyword[None] ):
literal[string]
identifier[route_values] ={}
keyword[if] identifier[project] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[project] , literal[string] )
keyword[if] identifier[build_id] keyword[is] keyword[not] keyword[None] :
identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[build_id] , literal[string] )
identifier[query_parameters] ={}
keyword[if] identifier[top] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[top] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[route_values] = identifier[route_values] ,
identifier[query_parameters] = identifier[query_parameters] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[self] . identifier[_unwrap_collection] ( identifier[response] )) | def get_build_work_items_refs(self, project, build_id, top=None):
"""GetBuildWorkItemsRefs.
Gets the work items associated with a build.
:param str project: Project ID or project name
:param int build_id: The ID of the build.
:param int top: The maximum number of work items to return.
:rtype: [ResourceRef]
"""
route_values = {}
if project is not None:
route_values['project'] = self._serialize.url('project', project, 'str') # depends on [control=['if'], data=['project']]
if build_id is not None:
route_values['buildId'] = self._serialize.url('build_id', build_id, 'int') # depends on [control=['if'], data=['build_id']]
query_parameters = {}
if top is not None:
query_parameters['$top'] = self._serialize.query('top', top, 'int') # depends on [control=['if'], data=['top']]
response = self._send(http_method='GET', location_id='5a21f5d2-5642-47e4-a0bd-1356e6731bee', version='5.0', route_values=route_values, query_parameters=query_parameters)
return self._deserialize('[ResourceRef]', self._unwrap_collection(response)) |
def _declarations_as_string(self, declarations):
"""
Returns a list of declarations as a formatted CSS string
:param declarations: The list of tinycss Declarations to format
:type declarations: list of tinycss.css21.Declaration
:returns: The CSS string for the declarations list
:rtype: str
"""
return ''.join('%s:%s%s;' % (
d.name,
d.value.as_css(),
' !' + d.priority if d.priority else '') for d in declarations) | def function[_declarations_as_string, parameter[self, declarations]]:
constant[
Returns a list of declarations as a formatted CSS string
:param declarations: The list of tinycss Declarations to format
:type declarations: list of tinycss.css21.Declaration
:returns: The CSS string for the declarations list
:rtype: str
]
return[call[constant[].join, parameter[<ast.GeneratorExp object at 0x7da20c993280>]]] | keyword[def] identifier[_declarations_as_string] ( identifier[self] , identifier[declarations] ):
literal[string]
keyword[return] literal[string] . identifier[join] ( literal[string] %(
identifier[d] . identifier[name] ,
identifier[d] . identifier[value] . identifier[as_css] (),
literal[string] + identifier[d] . identifier[priority] keyword[if] identifier[d] . identifier[priority] keyword[else] literal[string] ) keyword[for] identifier[d] keyword[in] identifier[declarations] ) | def _declarations_as_string(self, declarations):
"""
Returns a list of declarations as a formatted CSS string
:param declarations: The list of tinycss Declarations to format
:type declarations: list of tinycss.css21.Declaration
:returns: The CSS string for the declarations list
:rtype: str
"""
return ''.join(('%s:%s%s;' % (d.name, d.value.as_css(), ' !' + d.priority if d.priority else '') for d in declarations)) |
def es_get_class_defs(cls_def, cls_name):
"""
Reads through the class defs and gets the related es class
defintions
Args:
-----
class_defs: RdfDataset of class definitions
"""
rtn_dict = {key: value for key, value in cls_def.items() \
if key.startswith("kds_es")}
for key in rtn_dict:
del cls_def[key]
return rtn_dict | def function[es_get_class_defs, parameter[cls_def, cls_name]]:
constant[
Reads through the class defs and gets the related es class
defintions
Args:
-----
class_defs: RdfDataset of class definitions
]
variable[rtn_dict] assign[=] <ast.DictComp object at 0x7da20c990c40>
for taget[name[key]] in starred[name[rtn_dict]] begin[:]
<ast.Delete object at 0x7da20c9902b0>
return[name[rtn_dict]] | keyword[def] identifier[es_get_class_defs] ( identifier[cls_def] , identifier[cls_name] ):
literal[string]
identifier[rtn_dict] ={ identifier[key] : identifier[value] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[cls_def] . identifier[items] () keyword[if] identifier[key] . identifier[startswith] ( literal[string] )}
keyword[for] identifier[key] keyword[in] identifier[rtn_dict] :
keyword[del] identifier[cls_def] [ identifier[key] ]
keyword[return] identifier[rtn_dict] | def es_get_class_defs(cls_def, cls_name):
"""
Reads through the class defs and gets the related es class
defintions
Args:
-----
class_defs: RdfDataset of class definitions
"""
rtn_dict = {key: value for (key, value) in cls_def.items() if key.startswith('kds_es')}
for key in rtn_dict:
del cls_def[key] # depends on [control=['for'], data=['key']]
return rtn_dict |
def weekday(self):
"""Returns the day of the week on which the game occurred.
:returns: String representation of the day of the week for the game.
"""
days = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday',
'Saturday', 'Sunday']
date = self.date()
wd = date.weekday()
return days[wd] | def function[weekday, parameter[self]]:
constant[Returns the day of the week on which the game occurred.
:returns: String representation of the day of the week for the game.
]
variable[days] assign[=] list[[<ast.Constant object at 0x7da1b01c3fd0>, <ast.Constant object at 0x7da1b01c1450>, <ast.Constant object at 0x7da1b01c3a30>, <ast.Constant object at 0x7da1b01c0dc0>, <ast.Constant object at 0x7da1b01c1480>, <ast.Constant object at 0x7da1b01c0610>, <ast.Constant object at 0x7da1b01c2bf0>]]
variable[date] assign[=] call[name[self].date, parameter[]]
variable[wd] assign[=] call[name[date].weekday, parameter[]]
return[call[name[days]][name[wd]]] | keyword[def] identifier[weekday] ( identifier[self] ):
literal[string]
identifier[days] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] ]
identifier[date] = identifier[self] . identifier[date] ()
identifier[wd] = identifier[date] . identifier[weekday] ()
keyword[return] identifier[days] [ identifier[wd] ] | def weekday(self):
"""Returns the day of the week on which the game occurred.
:returns: String representation of the day of the week for the game.
"""
days = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
date = self.date()
wd = date.weekday()
return days[wd] |
def get_provider(self, id):
'''
Get a provider by id or :term:`uri`.
:param str id: The identifier for the provider. This can either be the
id with which it was registered or the :term:`uri` of the conceptscheme
that the provider services.
:returns: A :class:`skosprovider.providers.VocabularyProvider`
or `False` if the id or uri is unknown.
'''
if id in self.providers:
return self.providers.get(id, False)
elif is_uri(id) and id in self.concept_scheme_uri_map:
return self.providers.get(self.concept_scheme_uri_map[id], False)
return False | def function[get_provider, parameter[self, id]]:
constant[
Get a provider by id or :term:`uri`.
:param str id: The identifier for the provider. This can either be the
id with which it was registered or the :term:`uri` of the conceptscheme
that the provider services.
:returns: A :class:`skosprovider.providers.VocabularyProvider`
or `False` if the id or uri is unknown.
]
if compare[name[id] in name[self].providers] begin[:]
return[call[name[self].providers.get, parameter[name[id], constant[False]]]]
return[constant[False]] | keyword[def] identifier[get_provider] ( identifier[self] , identifier[id] ):
literal[string]
keyword[if] identifier[id] keyword[in] identifier[self] . identifier[providers] :
keyword[return] identifier[self] . identifier[providers] . identifier[get] ( identifier[id] , keyword[False] )
keyword[elif] identifier[is_uri] ( identifier[id] ) keyword[and] identifier[id] keyword[in] identifier[self] . identifier[concept_scheme_uri_map] :
keyword[return] identifier[self] . identifier[providers] . identifier[get] ( identifier[self] . identifier[concept_scheme_uri_map] [ identifier[id] ], keyword[False] )
keyword[return] keyword[False] | def get_provider(self, id):
"""
Get a provider by id or :term:`uri`.
:param str id: The identifier for the provider. This can either be the
id with which it was registered or the :term:`uri` of the conceptscheme
that the provider services.
:returns: A :class:`skosprovider.providers.VocabularyProvider`
or `False` if the id or uri is unknown.
"""
if id in self.providers:
return self.providers.get(id, False) # depends on [control=['if'], data=['id']]
elif is_uri(id) and id in self.concept_scheme_uri_map:
return self.providers.get(self.concept_scheme_uri_map[id], False) # depends on [control=['if'], data=[]]
return False |
def set_default_host(cls, value):
"""
Default: "http://127.0.0.1:80"
A string that will be automatically included at the beginning of the url generated for doing each http request.
"""
if value is None:
cls.DEFAULT_HOST = "http://127.0.0.1:80"
else:
scheme, host, port = get_hostname_parameters_from_url(value)
cls.DEFAULT_HOST = "%s://%s:%s" % (scheme, host, port) | def function[set_default_host, parameter[cls, value]]:
constant[
Default: "http://127.0.0.1:80"
A string that will be automatically included at the beginning of the url generated for doing each http request.
]
if compare[name[value] is constant[None]] begin[:]
name[cls].DEFAULT_HOST assign[=] constant[http://127.0.0.1:80] | keyword[def] identifier[set_default_host] ( identifier[cls] , identifier[value] ):
literal[string]
keyword[if] identifier[value] keyword[is] keyword[None] :
identifier[cls] . identifier[DEFAULT_HOST] = literal[string]
keyword[else] :
identifier[scheme] , identifier[host] , identifier[port] = identifier[get_hostname_parameters_from_url] ( identifier[value] )
identifier[cls] . identifier[DEFAULT_HOST] = literal[string] %( identifier[scheme] , identifier[host] , identifier[port] ) | def set_default_host(cls, value):
"""
Default: "http://127.0.0.1:80"
A string that will be automatically included at the beginning of the url generated for doing each http request.
"""
if value is None:
cls.DEFAULT_HOST = 'http://127.0.0.1:80' # depends on [control=['if'], data=[]]
else:
(scheme, host, port) = get_hostname_parameters_from_url(value)
cls.DEFAULT_HOST = '%s://%s:%s' % (scheme, host, port) |
def read(self, len):
"""Refresh the content of the input buffer, the old data are
considered consumed This routine handle the I18N
transcoding to internal UTF-8 """
ret = libxml2mod.xmlParserInputBufferRead(self._o, len)
return ret | def function[read, parameter[self, len]]:
constant[Refresh the content of the input buffer, the old data are
considered consumed This routine handle the I18N
transcoding to internal UTF-8 ]
variable[ret] assign[=] call[name[libxml2mod].xmlParserInputBufferRead, parameter[name[self]._o, name[len]]]
return[name[ret]] | keyword[def] identifier[read] ( identifier[self] , identifier[len] ):
literal[string]
identifier[ret] = identifier[libxml2mod] . identifier[xmlParserInputBufferRead] ( identifier[self] . identifier[_o] , identifier[len] )
keyword[return] identifier[ret] | def read(self, len):
"""Refresh the content of the input buffer, the old data are
considered consumed This routine handle the I18N
transcoding to internal UTF-8 """
ret = libxml2mod.xmlParserInputBufferRead(self._o, len)
return ret |
def get_attribute_cardinality(attribute):
"""
Returns the cardinality of the given resource attribute.
:returns: One of the constants defined in
:class:`evererst.constants.CARDINALITY_CONSTANTS`.
:raises ValueError: If the given attribute is not a relation attribute
(i.e., if it is a terminal attribute).
"""
if attribute.kind == RESOURCE_ATTRIBUTE_KINDS.MEMBER:
card = CARDINALITY_CONSTANTS.ONE
elif attribute.kind == RESOURCE_ATTRIBUTE_KINDS.COLLECTION:
card = CARDINALITY_CONSTANTS.MANY
else:
raise ValueError('Can not determine cardinality for non-terminal '
'attributes.')
return card | def function[get_attribute_cardinality, parameter[attribute]]:
constant[
Returns the cardinality of the given resource attribute.
:returns: One of the constants defined in
:class:`evererst.constants.CARDINALITY_CONSTANTS`.
:raises ValueError: If the given attribute is not a relation attribute
(i.e., if it is a terminal attribute).
]
if compare[name[attribute].kind equal[==] name[RESOURCE_ATTRIBUTE_KINDS].MEMBER] begin[:]
variable[card] assign[=] name[CARDINALITY_CONSTANTS].ONE
return[name[card]] | keyword[def] identifier[get_attribute_cardinality] ( identifier[attribute] ):
literal[string]
keyword[if] identifier[attribute] . identifier[kind] == identifier[RESOURCE_ATTRIBUTE_KINDS] . identifier[MEMBER] :
identifier[card] = identifier[CARDINALITY_CONSTANTS] . identifier[ONE]
keyword[elif] identifier[attribute] . identifier[kind] == identifier[RESOURCE_ATTRIBUTE_KINDS] . identifier[COLLECTION] :
identifier[card] = identifier[CARDINALITY_CONSTANTS] . identifier[MANY]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] )
keyword[return] identifier[card] | def get_attribute_cardinality(attribute):
"""
Returns the cardinality of the given resource attribute.
:returns: One of the constants defined in
:class:`evererst.constants.CARDINALITY_CONSTANTS`.
:raises ValueError: If the given attribute is not a relation attribute
(i.e., if it is a terminal attribute).
"""
if attribute.kind == RESOURCE_ATTRIBUTE_KINDS.MEMBER:
card = CARDINALITY_CONSTANTS.ONE # depends on [control=['if'], data=[]]
elif attribute.kind == RESOURCE_ATTRIBUTE_KINDS.COLLECTION:
card = CARDINALITY_CONSTANTS.MANY # depends on [control=['if'], data=[]]
else:
raise ValueError('Can not determine cardinality for non-terminal attributes.')
return card |
def p_rddl_block(self, p):
'''rddl_block : rddl_block domain_block
| rddl_block instance_block
| rddl_block nonfluent_block
| empty'''
if p[1] is None:
p[0] = dict()
else:
name, block = p[2]
p[1][name] = block
p[0] = p[1] | def function[p_rddl_block, parameter[self, p]]:
constant[rddl_block : rddl_block domain_block
| rddl_block instance_block
| rddl_block nonfluent_block
| empty]
if compare[call[name[p]][constant[1]] is constant[None]] begin[:]
call[name[p]][constant[0]] assign[=] call[name[dict], parameter[]] | keyword[def] identifier[p_rddl_block] ( identifier[self] , identifier[p] ):
literal[string]
keyword[if] identifier[p] [ literal[int] ] keyword[is] keyword[None] :
identifier[p] [ literal[int] ]= identifier[dict] ()
keyword[else] :
identifier[name] , identifier[block] = identifier[p] [ literal[int] ]
identifier[p] [ literal[int] ][ identifier[name] ]= identifier[block]
identifier[p] [ literal[int] ]= identifier[p] [ literal[int] ] | def p_rddl_block(self, p):
"""rddl_block : rddl_block domain_block
| rddl_block instance_block
| rddl_block nonfluent_block
| empty"""
if p[1] is None:
p[0] = dict() # depends on [control=['if'], data=[]]
else:
(name, block) = p[2]
p[1][name] = block
p[0] = p[1] |
def longest_one_seg_prefix(self, word):
"""Return longest IPA Unicode prefix of `word`
Args:
word (unicode): word as IPA string
Returns:
unicode: longest single-segment prefix of `word`
"""
match = self.seg_regex.match(word)
if match:
return match.group(0)
else:
return '' | def function[longest_one_seg_prefix, parameter[self, word]]:
constant[Return longest IPA Unicode prefix of `word`
Args:
word (unicode): word as IPA string
Returns:
unicode: longest single-segment prefix of `word`
]
variable[match] assign[=] call[name[self].seg_regex.match, parameter[name[word]]]
if name[match] begin[:]
return[call[name[match].group, parameter[constant[0]]]] | keyword[def] identifier[longest_one_seg_prefix] ( identifier[self] , identifier[word] ):
literal[string]
identifier[match] = identifier[self] . identifier[seg_regex] . identifier[match] ( identifier[word] )
keyword[if] identifier[match] :
keyword[return] identifier[match] . identifier[group] ( literal[int] )
keyword[else] :
keyword[return] literal[string] | def longest_one_seg_prefix(self, word):
"""Return longest IPA Unicode prefix of `word`
Args:
word (unicode): word as IPA string
Returns:
unicode: longest single-segment prefix of `word`
"""
match = self.seg_regex.match(word)
if match:
return match.group(0) # depends on [control=['if'], data=[]]
else:
return '' |
def fetch(self, transfer_id, data={}, **kwargs):
""""
Fetch Transfer for given Id
Args:
transfer_id : Id for which transfer object has to be retrieved
Returns:
Transfer dict for given transfer Id
"""
return super(Transfer, self).fetch(transfer_id, data, **kwargs) | def function[fetch, parameter[self, transfer_id, data]]:
constant["
Fetch Transfer for given Id
Args:
transfer_id : Id for which transfer object has to be retrieved
Returns:
Transfer dict for given transfer Id
]
return[call[call[name[super], parameter[name[Transfer], name[self]]].fetch, parameter[name[transfer_id], name[data]]]] | keyword[def] identifier[fetch] ( identifier[self] , identifier[transfer_id] , identifier[data] ={},** identifier[kwargs] ):
literal[string]
keyword[return] identifier[super] ( identifier[Transfer] , identifier[self] ). identifier[fetch] ( identifier[transfer_id] , identifier[data] ,** identifier[kwargs] ) | def fetch(self, transfer_id, data={}, **kwargs):
""""
Fetch Transfer for given Id
Args:
transfer_id : Id for which transfer object has to be retrieved
Returns:
Transfer dict for given transfer Id
"""
return super(Transfer, self).fetch(transfer_id, data, **kwargs) |
def makeMNBaseURL(url):
"""Attempt to create a valid MN BaseURL when one or more sections of the URL are
missing."""
o = urllib.parse.urlparse(url, scheme=d1_common.const.DEFAULT_MN_PROTOCOL)
if o.netloc and o.path:
netloc = o.netloc
path = o.path
elif o.netloc:
netloc = o.netloc
path = d1_common.const.DEFAULT_MN_PATH
elif o.path:
s = o.path.split('/', 1)
netloc = s[0]
if len(s) == 1:
path = d1_common.const.DEFAULT_MN_PATH
else:
path = s[1]
else:
netloc = d1_common.const.DEFAULT_MN_HOST
path = d1_common.const.DEFAULT_MN_PATH
return urllib.parse.urlunparse(
(o.scheme, netloc, path, o.params, o.query, o.fragment)
) | def function[makeMNBaseURL, parameter[url]]:
constant[Attempt to create a valid MN BaseURL when one or more sections of the URL are
missing.]
variable[o] assign[=] call[name[urllib].parse.urlparse, parameter[name[url]]]
if <ast.BoolOp object at 0x7da1b1991960> begin[:]
variable[netloc] assign[=] name[o].netloc
variable[path] assign[=] name[o].path
return[call[name[urllib].parse.urlunparse, parameter[tuple[[<ast.Attribute object at 0x7da1b1990100>, <ast.Name object at 0x7da1b1991060>, <ast.Name object at 0x7da1b1992260>, <ast.Attribute object at 0x7da1b1992ce0>, <ast.Attribute object at 0x7da1b19904f0>, <ast.Attribute object at 0x7da1b1991d50>]]]]] | keyword[def] identifier[makeMNBaseURL] ( identifier[url] ):
literal[string]
identifier[o] = identifier[urllib] . identifier[parse] . identifier[urlparse] ( identifier[url] , identifier[scheme] = identifier[d1_common] . identifier[const] . identifier[DEFAULT_MN_PROTOCOL] )
keyword[if] identifier[o] . identifier[netloc] keyword[and] identifier[o] . identifier[path] :
identifier[netloc] = identifier[o] . identifier[netloc]
identifier[path] = identifier[o] . identifier[path]
keyword[elif] identifier[o] . identifier[netloc] :
identifier[netloc] = identifier[o] . identifier[netloc]
identifier[path] = identifier[d1_common] . identifier[const] . identifier[DEFAULT_MN_PATH]
keyword[elif] identifier[o] . identifier[path] :
identifier[s] = identifier[o] . identifier[path] . identifier[split] ( literal[string] , literal[int] )
identifier[netloc] = identifier[s] [ literal[int] ]
keyword[if] identifier[len] ( identifier[s] )== literal[int] :
identifier[path] = identifier[d1_common] . identifier[const] . identifier[DEFAULT_MN_PATH]
keyword[else] :
identifier[path] = identifier[s] [ literal[int] ]
keyword[else] :
identifier[netloc] = identifier[d1_common] . identifier[const] . identifier[DEFAULT_MN_HOST]
identifier[path] = identifier[d1_common] . identifier[const] . identifier[DEFAULT_MN_PATH]
keyword[return] identifier[urllib] . identifier[parse] . identifier[urlunparse] (
( identifier[o] . identifier[scheme] , identifier[netloc] , identifier[path] , identifier[o] . identifier[params] , identifier[o] . identifier[query] , identifier[o] . identifier[fragment] )
) | def makeMNBaseURL(url):
"""Attempt to create a valid MN BaseURL when one or more sections of the URL are
missing."""
o = urllib.parse.urlparse(url, scheme=d1_common.const.DEFAULT_MN_PROTOCOL)
if o.netloc and o.path:
netloc = o.netloc
path = o.path # depends on [control=['if'], data=[]]
elif o.netloc:
netloc = o.netloc
path = d1_common.const.DEFAULT_MN_PATH # depends on [control=['if'], data=[]]
elif o.path:
s = o.path.split('/', 1)
netloc = s[0]
if len(s) == 1:
path = d1_common.const.DEFAULT_MN_PATH # depends on [control=['if'], data=[]]
else:
path = s[1] # depends on [control=['if'], data=[]]
else:
netloc = d1_common.const.DEFAULT_MN_HOST
path = d1_common.const.DEFAULT_MN_PATH
return urllib.parse.urlunparse((o.scheme, netloc, path, o.params, o.query, o.fragment)) |
def _players(self):
"""Get player attributes with index. No Gaia."""
for i in range(1, self._header.replay.num_players):
yield i, self._header.initial.players[i].attributes | def function[_players, parameter[self]]:
constant[Get player attributes with index. No Gaia.]
for taget[name[i]] in starred[call[name[range], parameter[constant[1], name[self]._header.replay.num_players]]] begin[:]
<ast.Yield object at 0x7da1b25ec280> | keyword[def] identifier[_players] ( identifier[self] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[_header] . identifier[replay] . identifier[num_players] ):
keyword[yield] identifier[i] , identifier[self] . identifier[_header] . identifier[initial] . identifier[players] [ identifier[i] ]. identifier[attributes] | def _players(self):
"""Get player attributes with index. No Gaia."""
for i in range(1, self._header.replay.num_players):
yield (i, self._header.initial.players[i].attributes) # depends on [control=['for'], data=['i']] |
def close(self):
"""Close this metrics repository."""
for reporter in self._reporters:
reporter.close()
self._metrics.clear() | def function[close, parameter[self]]:
constant[Close this metrics repository.]
for taget[name[reporter]] in starred[name[self]._reporters] begin[:]
call[name[reporter].close, parameter[]]
call[name[self]._metrics.clear, parameter[]] | keyword[def] identifier[close] ( identifier[self] ):
literal[string]
keyword[for] identifier[reporter] keyword[in] identifier[self] . identifier[_reporters] :
identifier[reporter] . identifier[close] ()
identifier[self] . identifier[_metrics] . identifier[clear] () | def close(self):
"""Close this metrics repository."""
for reporter in self._reporters:
reporter.close() # depends on [control=['for'], data=['reporter']]
self._metrics.clear() |
def _construct_first_indent(self, pos):
"""
build spacer to occupy the first indentation level from pos to the
left. This is separate as it adds arrowtip and sibling connector.
"""
cols = []
void = urwid.AttrMap(urwid.SolidFill(' '), self._arrow_att)
available_width = self._indent
if self._tree.depth(pos) > 0:
connector = self._construct_connector(pos)
if connector is not None:
width = connector.pack()[0]
if width > available_width:
raise NoSpaceError()
available_width -= width
if self._tree.next_sibling_position(pos) is not None:
barw = urwid.SolidFill(self._arrow_vbar_char)
below = urwid.AttrMap(barw, self._arrow_vbar_att or
self._arrow_att)
else:
below = void
# pile up connector and bar
spacer = urwid.Pile([('pack', connector), below])
cols.append((width, spacer))
#arrow tip
awidth, at = self._construct_arrow_tip(pos)
if at is not None:
if awidth > available_width:
raise NoSpaceError()
available_width -= awidth
at_spacer = urwid.Pile([('pack', at), void])
cols.append((awidth, at_spacer))
# bar between connector and arrow tip
if available_width > 0:
barw = urwid.SolidFill(self._arrow_hbar_char)
bar = urwid.AttrMap(
barw, self._arrow_hbar_att or self._arrow_att)
hb_spacer = urwid.Pile([(1, bar), void])
cols.insert(1, (available_width, hb_spacer))
return cols | def function[_construct_first_indent, parameter[self, pos]]:
constant[
build spacer to occupy the first indentation level from pos to the
left. This is separate as it adds arrowtip and sibling connector.
]
variable[cols] assign[=] list[[]]
variable[void] assign[=] call[name[urwid].AttrMap, parameter[call[name[urwid].SolidFill, parameter[constant[ ]]], name[self]._arrow_att]]
variable[available_width] assign[=] name[self]._indent
if compare[call[name[self]._tree.depth, parameter[name[pos]]] greater[>] constant[0]] begin[:]
variable[connector] assign[=] call[name[self]._construct_connector, parameter[name[pos]]]
if compare[name[connector] is_not constant[None]] begin[:]
variable[width] assign[=] call[call[name[connector].pack, parameter[]]][constant[0]]
if compare[name[width] greater[>] name[available_width]] begin[:]
<ast.Raise object at 0x7da18f810160>
<ast.AugAssign object at 0x7da18f812800>
if compare[call[name[self]._tree.next_sibling_position, parameter[name[pos]]] is_not constant[None]] begin[:]
variable[barw] assign[=] call[name[urwid].SolidFill, parameter[name[self]._arrow_vbar_char]]
variable[below] assign[=] call[name[urwid].AttrMap, parameter[name[barw], <ast.BoolOp object at 0x7da20c6c7ca0>]]
variable[spacer] assign[=] call[name[urwid].Pile, parameter[list[[<ast.Tuple object at 0x7da20c6c5870>, <ast.Name object at 0x7da20c6c7a90>]]]]
call[name[cols].append, parameter[tuple[[<ast.Name object at 0x7da207f989d0>, <ast.Name object at 0x7da207f9a620>]]]]
<ast.Tuple object at 0x7da207f99210> assign[=] call[name[self]._construct_arrow_tip, parameter[name[pos]]]
if compare[name[at] is_not constant[None]] begin[:]
if compare[name[awidth] greater[>] name[available_width]] begin[:]
<ast.Raise object at 0x7da207f9b940>
<ast.AugAssign object at 0x7da207f9b7c0>
variable[at_spacer] assign[=] call[name[urwid].Pile, parameter[list[[<ast.Tuple object at 0x7da207f9aef0>, <ast.Name object at 0x7da207f9a530>]]]]
call[name[cols].append, parameter[tuple[[<ast.Name object at 0x7da207f9bdc0>, <ast.Name object at 0x7da207f9b340>]]]]
if compare[name[available_width] greater[>] constant[0]] begin[:]
variable[barw] assign[=] call[name[urwid].SolidFill, parameter[name[self]._arrow_hbar_char]]
variable[bar] assign[=] call[name[urwid].AttrMap, parameter[name[barw], <ast.BoolOp object at 0x7da207f99690>]]
variable[hb_spacer] assign[=] call[name[urwid].Pile, parameter[list[[<ast.Tuple object at 0x7da207f994b0>, <ast.Name object at 0x7da207f981f0>]]]]
call[name[cols].insert, parameter[constant[1], tuple[[<ast.Name object at 0x7da207f9ab60>, <ast.Name object at 0x7da207f9b250>]]]]
return[name[cols]] | keyword[def] identifier[_construct_first_indent] ( identifier[self] , identifier[pos] ):
literal[string]
identifier[cols] =[]
identifier[void] = identifier[urwid] . identifier[AttrMap] ( identifier[urwid] . identifier[SolidFill] ( literal[string] ), identifier[self] . identifier[_arrow_att] )
identifier[available_width] = identifier[self] . identifier[_indent]
keyword[if] identifier[self] . identifier[_tree] . identifier[depth] ( identifier[pos] )> literal[int] :
identifier[connector] = identifier[self] . identifier[_construct_connector] ( identifier[pos] )
keyword[if] identifier[connector] keyword[is] keyword[not] keyword[None] :
identifier[width] = identifier[connector] . identifier[pack] ()[ literal[int] ]
keyword[if] identifier[width] > identifier[available_width] :
keyword[raise] identifier[NoSpaceError] ()
identifier[available_width] -= identifier[width]
keyword[if] identifier[self] . identifier[_tree] . identifier[next_sibling_position] ( identifier[pos] ) keyword[is] keyword[not] keyword[None] :
identifier[barw] = identifier[urwid] . identifier[SolidFill] ( identifier[self] . identifier[_arrow_vbar_char] )
identifier[below] = identifier[urwid] . identifier[AttrMap] ( identifier[barw] , identifier[self] . identifier[_arrow_vbar_att] keyword[or]
identifier[self] . identifier[_arrow_att] )
keyword[else] :
identifier[below] = identifier[void]
identifier[spacer] = identifier[urwid] . identifier[Pile] ([( literal[string] , identifier[connector] ), identifier[below] ])
identifier[cols] . identifier[append] (( identifier[width] , identifier[spacer] ))
identifier[awidth] , identifier[at] = identifier[self] . identifier[_construct_arrow_tip] ( identifier[pos] )
keyword[if] identifier[at] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[awidth] > identifier[available_width] :
keyword[raise] identifier[NoSpaceError] ()
identifier[available_width] -= identifier[awidth]
identifier[at_spacer] = identifier[urwid] . identifier[Pile] ([( literal[string] , identifier[at] ), identifier[void] ])
identifier[cols] . identifier[append] (( identifier[awidth] , identifier[at_spacer] ))
keyword[if] identifier[available_width] > literal[int] :
identifier[barw] = identifier[urwid] . identifier[SolidFill] ( identifier[self] . identifier[_arrow_hbar_char] )
identifier[bar] = identifier[urwid] . identifier[AttrMap] (
identifier[barw] , identifier[self] . identifier[_arrow_hbar_att] keyword[or] identifier[self] . identifier[_arrow_att] )
identifier[hb_spacer] = identifier[urwid] . identifier[Pile] ([( literal[int] , identifier[bar] ), identifier[void] ])
identifier[cols] . identifier[insert] ( literal[int] ,( identifier[available_width] , identifier[hb_spacer] ))
keyword[return] identifier[cols] | def _construct_first_indent(self, pos):
"""
build spacer to occupy the first indentation level from pos to the
left. This is separate as it adds arrowtip and sibling connector.
"""
cols = []
void = urwid.AttrMap(urwid.SolidFill(' '), self._arrow_att)
available_width = self._indent
if self._tree.depth(pos) > 0:
connector = self._construct_connector(pos)
if connector is not None:
width = connector.pack()[0]
if width > available_width:
raise NoSpaceError() # depends on [control=['if'], data=[]]
available_width -= width
if self._tree.next_sibling_position(pos) is not None:
barw = urwid.SolidFill(self._arrow_vbar_char)
below = urwid.AttrMap(barw, self._arrow_vbar_att or self._arrow_att) # depends on [control=['if'], data=[]]
else:
below = void
# pile up connector and bar
spacer = urwid.Pile([('pack', connector), below])
cols.append((width, spacer)) # depends on [control=['if'], data=['connector']]
#arrow tip
(awidth, at) = self._construct_arrow_tip(pos)
if at is not None:
if awidth > available_width:
raise NoSpaceError() # depends on [control=['if'], data=[]]
available_width -= awidth
at_spacer = urwid.Pile([('pack', at), void])
cols.append((awidth, at_spacer)) # depends on [control=['if'], data=['at']]
# bar between connector and arrow tip
if available_width > 0:
barw = urwid.SolidFill(self._arrow_hbar_char)
bar = urwid.AttrMap(barw, self._arrow_hbar_att or self._arrow_att)
hb_spacer = urwid.Pile([(1, bar), void])
cols.insert(1, (available_width, hb_spacer)) # depends on [control=['if'], data=['available_width']] # depends on [control=['if'], data=[]]
return cols |
def get(self, sid):
"""
Constructs a FieldTypeContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.autopilot.v1.assistant.field_type.FieldTypeContext
:rtype: twilio.rest.autopilot.v1.assistant.field_type.FieldTypeContext
"""
return FieldTypeContext(self._version, assistant_sid=self._solution['assistant_sid'], sid=sid, ) | def function[get, parameter[self, sid]]:
constant[
Constructs a FieldTypeContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.autopilot.v1.assistant.field_type.FieldTypeContext
:rtype: twilio.rest.autopilot.v1.assistant.field_type.FieldTypeContext
]
return[call[name[FieldTypeContext], parameter[name[self]._version]]] | keyword[def] identifier[get] ( identifier[self] , identifier[sid] ):
literal[string]
keyword[return] identifier[FieldTypeContext] ( identifier[self] . identifier[_version] , identifier[assistant_sid] = identifier[self] . identifier[_solution] [ literal[string] ], identifier[sid] = identifier[sid] ,) | def get(self, sid):
"""
Constructs a FieldTypeContext
:param sid: The unique string that identifies the resource
:returns: twilio.rest.autopilot.v1.assistant.field_type.FieldTypeContext
:rtype: twilio.rest.autopilot.v1.assistant.field_type.FieldTypeContext
"""
return FieldTypeContext(self._version, assistant_sid=self._solution['assistant_sid'], sid=sid) |
def execute(self):
"""
Execute the task.
The sync map produced will be stored inside the task object.
:raises: :class:`~aeneas.executetask.ExecuteTaskInputError`: if there is a problem with the input parameters
:raises: :class:`~aeneas.executetask.ExecuteTaskExecutionError`: if there is a problem during the task execution
"""
self.log(u"Executing task...")
# check that we have the AudioFile object
if self.task.audio_file is None:
self.log_exc(u"The task does not seem to have its audio file set", None, True, ExecuteTaskInputError)
if (
(self.task.audio_file.audio_length is None) or
(self.task.audio_file.audio_length <= 0)
):
self.log_exc(u"The task seems to have an invalid audio file", None, True, ExecuteTaskInputError)
task_max_audio_length = self.rconf[RuntimeConfiguration.TASK_MAX_AUDIO_LENGTH]
if (
(task_max_audio_length > 0) and
(self.task.audio_file.audio_length > task_max_audio_length)
):
self.log_exc(u"The audio file of the task has length %.3f, more than the maximum allowed (%.3f)." % (self.task.audio_file.audio_length, task_max_audio_length), None, True, ExecuteTaskInputError)
# check that we have the TextFile object
if self.task.text_file is None:
self.log_exc(u"The task does not seem to have its text file set", None, True, ExecuteTaskInputError)
if len(self.task.text_file) == 0:
self.log_exc(u"The task text file seems to have no text fragments", None, True, ExecuteTaskInputError)
task_max_text_length = self.rconf[RuntimeConfiguration.TASK_MAX_TEXT_LENGTH]
if (
(task_max_text_length > 0) and
(len(self.task.text_file) > task_max_text_length)
):
self.log_exc(u"The text file of the task has %d fragments, more than the maximum allowed (%d)." % (len(self.task.text_file), task_max_text_length), None, True, ExecuteTaskInputError)
if self.task.text_file.chars == 0:
self.log_exc(u"The task text file seems to have empty text", None, True, ExecuteTaskInputError)
self.log(u"Both audio and text input file are present")
# execute
self.step_index = 1
self.step_total = 0.000
if self.task.text_file.file_format in TextFileFormat.MULTILEVEL_VALUES:
self._execute_multi_level_task()
else:
self._execute_single_level_task()
self.log(u"Executing task... done") | def function[execute, parameter[self]]:
constant[
Execute the task.
The sync map produced will be stored inside the task object.
:raises: :class:`~aeneas.executetask.ExecuteTaskInputError`: if there is a problem with the input parameters
:raises: :class:`~aeneas.executetask.ExecuteTaskExecutionError`: if there is a problem during the task execution
]
call[name[self].log, parameter[constant[Executing task...]]]
if compare[name[self].task.audio_file is constant[None]] begin[:]
call[name[self].log_exc, parameter[constant[The task does not seem to have its audio file set], constant[None], constant[True], name[ExecuteTaskInputError]]]
if <ast.BoolOp object at 0x7da2045679d0> begin[:]
call[name[self].log_exc, parameter[constant[The task seems to have an invalid audio file], constant[None], constant[True], name[ExecuteTaskInputError]]]
variable[task_max_audio_length] assign[=] call[name[self].rconf][name[RuntimeConfiguration].TASK_MAX_AUDIO_LENGTH]
if <ast.BoolOp object at 0x7da204567be0> begin[:]
call[name[self].log_exc, parameter[binary_operation[constant[The audio file of the task has length %.3f, more than the maximum allowed (%.3f).] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da204567fd0>, <ast.Name object at 0x7da204565c30>]]], constant[None], constant[True], name[ExecuteTaskInputError]]]
if compare[name[self].task.text_file is constant[None]] begin[:]
call[name[self].log_exc, parameter[constant[The task does not seem to have its text file set], constant[None], constant[True], name[ExecuteTaskInputError]]]
if compare[call[name[len], parameter[name[self].task.text_file]] equal[==] constant[0]] begin[:]
call[name[self].log_exc, parameter[constant[The task text file seems to have no text fragments], constant[None], constant[True], name[ExecuteTaskInputError]]]
variable[task_max_text_length] assign[=] call[name[self].rconf][name[RuntimeConfiguration].TASK_MAX_TEXT_LENGTH]
if <ast.BoolOp object at 0x7da204566500> begin[:]
call[name[self].log_exc, parameter[binary_operation[constant[The text file of the task has %d fragments, more than the maximum allowed (%d).] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da204567a90>, <ast.Name object at 0x7da204565d50>]]], constant[None], constant[True], name[ExecuteTaskInputError]]]
if compare[name[self].task.text_file.chars equal[==] constant[0]] begin[:]
call[name[self].log_exc, parameter[constant[The task text file seems to have empty text], constant[None], constant[True], name[ExecuteTaskInputError]]]
call[name[self].log, parameter[constant[Both audio and text input file are present]]]
name[self].step_index assign[=] constant[1]
name[self].step_total assign[=] constant[0.0]
if compare[name[self].task.text_file.file_format in name[TextFileFormat].MULTILEVEL_VALUES] begin[:]
call[name[self]._execute_multi_level_task, parameter[]]
call[name[self].log, parameter[constant[Executing task... done]]] | keyword[def] identifier[execute] ( identifier[self] ):
literal[string]
identifier[self] . identifier[log] ( literal[string] )
keyword[if] identifier[self] . identifier[task] . identifier[audio_file] keyword[is] keyword[None] :
identifier[self] . identifier[log_exc] ( literal[string] , keyword[None] , keyword[True] , identifier[ExecuteTaskInputError] )
keyword[if] (
( identifier[self] . identifier[task] . identifier[audio_file] . identifier[audio_length] keyword[is] keyword[None] ) keyword[or]
( identifier[self] . identifier[task] . identifier[audio_file] . identifier[audio_length] <= literal[int] )
):
identifier[self] . identifier[log_exc] ( literal[string] , keyword[None] , keyword[True] , identifier[ExecuteTaskInputError] )
identifier[task_max_audio_length] = identifier[self] . identifier[rconf] [ identifier[RuntimeConfiguration] . identifier[TASK_MAX_AUDIO_LENGTH] ]
keyword[if] (
( identifier[task_max_audio_length] > literal[int] ) keyword[and]
( identifier[self] . identifier[task] . identifier[audio_file] . identifier[audio_length] > identifier[task_max_audio_length] )
):
identifier[self] . identifier[log_exc] ( literal[string] %( identifier[self] . identifier[task] . identifier[audio_file] . identifier[audio_length] , identifier[task_max_audio_length] ), keyword[None] , keyword[True] , identifier[ExecuteTaskInputError] )
keyword[if] identifier[self] . identifier[task] . identifier[text_file] keyword[is] keyword[None] :
identifier[self] . identifier[log_exc] ( literal[string] , keyword[None] , keyword[True] , identifier[ExecuteTaskInputError] )
keyword[if] identifier[len] ( identifier[self] . identifier[task] . identifier[text_file] )== literal[int] :
identifier[self] . identifier[log_exc] ( literal[string] , keyword[None] , keyword[True] , identifier[ExecuteTaskInputError] )
identifier[task_max_text_length] = identifier[self] . identifier[rconf] [ identifier[RuntimeConfiguration] . identifier[TASK_MAX_TEXT_LENGTH] ]
keyword[if] (
( identifier[task_max_text_length] > literal[int] ) keyword[and]
( identifier[len] ( identifier[self] . identifier[task] . identifier[text_file] )> identifier[task_max_text_length] )
):
identifier[self] . identifier[log_exc] ( literal[string] %( identifier[len] ( identifier[self] . identifier[task] . identifier[text_file] ), identifier[task_max_text_length] ), keyword[None] , keyword[True] , identifier[ExecuteTaskInputError] )
keyword[if] identifier[self] . identifier[task] . identifier[text_file] . identifier[chars] == literal[int] :
identifier[self] . identifier[log_exc] ( literal[string] , keyword[None] , keyword[True] , identifier[ExecuteTaskInputError] )
identifier[self] . identifier[log] ( literal[string] )
identifier[self] . identifier[step_index] = literal[int]
identifier[self] . identifier[step_total] = literal[int]
keyword[if] identifier[self] . identifier[task] . identifier[text_file] . identifier[file_format] keyword[in] identifier[TextFileFormat] . identifier[MULTILEVEL_VALUES] :
identifier[self] . identifier[_execute_multi_level_task] ()
keyword[else] :
identifier[self] . identifier[_execute_single_level_task] ()
identifier[self] . identifier[log] ( literal[string] ) | def execute(self):
"""
Execute the task.
The sync map produced will be stored inside the task object.
:raises: :class:`~aeneas.executetask.ExecuteTaskInputError`: if there is a problem with the input parameters
:raises: :class:`~aeneas.executetask.ExecuteTaskExecutionError`: if there is a problem during the task execution
"""
self.log(u'Executing task...')
# check that we have the AudioFile object
if self.task.audio_file is None:
self.log_exc(u'The task does not seem to have its audio file set', None, True, ExecuteTaskInputError) # depends on [control=['if'], data=[]]
if self.task.audio_file.audio_length is None or self.task.audio_file.audio_length <= 0:
self.log_exc(u'The task seems to have an invalid audio file', None, True, ExecuteTaskInputError) # depends on [control=['if'], data=[]]
task_max_audio_length = self.rconf[RuntimeConfiguration.TASK_MAX_AUDIO_LENGTH]
if task_max_audio_length > 0 and self.task.audio_file.audio_length > task_max_audio_length:
self.log_exc(u'The audio file of the task has length %.3f, more than the maximum allowed (%.3f).' % (self.task.audio_file.audio_length, task_max_audio_length), None, True, ExecuteTaskInputError) # depends on [control=['if'], data=[]]
# check that we have the TextFile object
if self.task.text_file is None:
self.log_exc(u'The task does not seem to have its text file set', None, True, ExecuteTaskInputError) # depends on [control=['if'], data=[]]
if len(self.task.text_file) == 0:
self.log_exc(u'The task text file seems to have no text fragments', None, True, ExecuteTaskInputError) # depends on [control=['if'], data=[]]
task_max_text_length = self.rconf[RuntimeConfiguration.TASK_MAX_TEXT_LENGTH]
if task_max_text_length > 0 and len(self.task.text_file) > task_max_text_length:
self.log_exc(u'The text file of the task has %d fragments, more than the maximum allowed (%d).' % (len(self.task.text_file), task_max_text_length), None, True, ExecuteTaskInputError) # depends on [control=['if'], data=[]]
if self.task.text_file.chars == 0:
self.log_exc(u'The task text file seems to have empty text', None, True, ExecuteTaskInputError) # depends on [control=['if'], data=[]]
self.log(u'Both audio and text input file are present')
# execute
self.step_index = 1
self.step_total = 0.0
if self.task.text_file.file_format in TextFileFormat.MULTILEVEL_VALUES:
self._execute_multi_level_task() # depends on [control=['if'], data=[]]
else:
self._execute_single_level_task()
self.log(u'Executing task... done') |
def _rollback_to_year(self, other):
"""
Roll `other` back to the most recent date that was on a fiscal year
end.
Return the date of that year-end, the number of full quarters
elapsed between that year-end and other, and the remaining Timedelta
since the most recent quarter-end.
Parameters
----------
other : datetime or Timestamp
Returns
-------
tuple of
prev_year_end : Timestamp giving most recent fiscal year end
num_qtrs : int
tdelta : Timedelta
"""
num_qtrs = 0
norm = Timestamp(other).tz_localize(None)
start = self._offset.rollback(norm)
# Note: start <= norm and self._offset.onOffset(start)
if start < norm:
# roll adjustment
qtr_lens = self.get_weeks(norm)
# check thet qtr_lens is consistent with self._offset addition
end = liboffsets.shift_day(start, days=7 * sum(qtr_lens))
assert self._offset.onOffset(end), (start, end, qtr_lens)
tdelta = norm - start
for qlen in qtr_lens:
if qlen * 7 <= tdelta.days:
num_qtrs += 1
tdelta -= Timedelta(days=qlen * 7)
else:
break
else:
tdelta = Timedelta(0)
# Note: we always have tdelta.value >= 0
return start, num_qtrs, tdelta | def function[_rollback_to_year, parameter[self, other]]:
constant[
Roll `other` back to the most recent date that was on a fiscal year
end.
Return the date of that year-end, the number of full quarters
elapsed between that year-end and other, and the remaining Timedelta
since the most recent quarter-end.
Parameters
----------
other : datetime or Timestamp
Returns
-------
tuple of
prev_year_end : Timestamp giving most recent fiscal year end
num_qtrs : int
tdelta : Timedelta
]
variable[num_qtrs] assign[=] constant[0]
variable[norm] assign[=] call[call[name[Timestamp], parameter[name[other]]].tz_localize, parameter[constant[None]]]
variable[start] assign[=] call[name[self]._offset.rollback, parameter[name[norm]]]
if compare[name[start] less[<] name[norm]] begin[:]
variable[qtr_lens] assign[=] call[name[self].get_weeks, parameter[name[norm]]]
variable[end] assign[=] call[name[liboffsets].shift_day, parameter[name[start]]]
assert[call[name[self]._offset.onOffset, parameter[name[end]]]]
variable[tdelta] assign[=] binary_operation[name[norm] - name[start]]
for taget[name[qlen]] in starred[name[qtr_lens]] begin[:]
if compare[binary_operation[name[qlen] * constant[7]] less_or_equal[<=] name[tdelta].days] begin[:]
<ast.AugAssign object at 0x7da18f00f460>
<ast.AugAssign object at 0x7da18f00c4c0>
return[tuple[[<ast.Name object at 0x7da18f00f430>, <ast.Name object at 0x7da18f00c760>, <ast.Name object at 0x7da18f00eb30>]]] | keyword[def] identifier[_rollback_to_year] ( identifier[self] , identifier[other] ):
literal[string]
identifier[num_qtrs] = literal[int]
identifier[norm] = identifier[Timestamp] ( identifier[other] ). identifier[tz_localize] ( keyword[None] )
identifier[start] = identifier[self] . identifier[_offset] . identifier[rollback] ( identifier[norm] )
keyword[if] identifier[start] < identifier[norm] :
identifier[qtr_lens] = identifier[self] . identifier[get_weeks] ( identifier[norm] )
identifier[end] = identifier[liboffsets] . identifier[shift_day] ( identifier[start] , identifier[days] = literal[int] * identifier[sum] ( identifier[qtr_lens] ))
keyword[assert] identifier[self] . identifier[_offset] . identifier[onOffset] ( identifier[end] ),( identifier[start] , identifier[end] , identifier[qtr_lens] )
identifier[tdelta] = identifier[norm] - identifier[start]
keyword[for] identifier[qlen] keyword[in] identifier[qtr_lens] :
keyword[if] identifier[qlen] * literal[int] <= identifier[tdelta] . identifier[days] :
identifier[num_qtrs] += literal[int]
identifier[tdelta] -= identifier[Timedelta] ( identifier[days] = identifier[qlen] * literal[int] )
keyword[else] :
keyword[break]
keyword[else] :
identifier[tdelta] = identifier[Timedelta] ( literal[int] )
keyword[return] identifier[start] , identifier[num_qtrs] , identifier[tdelta] | def _rollback_to_year(self, other):
"""
Roll `other` back to the most recent date that was on a fiscal year
end.
Return the date of that year-end, the number of full quarters
elapsed between that year-end and other, and the remaining Timedelta
since the most recent quarter-end.
Parameters
----------
other : datetime or Timestamp
Returns
-------
tuple of
prev_year_end : Timestamp giving most recent fiscal year end
num_qtrs : int
tdelta : Timedelta
"""
num_qtrs = 0
norm = Timestamp(other).tz_localize(None)
start = self._offset.rollback(norm)
# Note: start <= norm and self._offset.onOffset(start)
if start < norm:
# roll adjustment
qtr_lens = self.get_weeks(norm)
# check thet qtr_lens is consistent with self._offset addition
end = liboffsets.shift_day(start, days=7 * sum(qtr_lens))
assert self._offset.onOffset(end), (start, end, qtr_lens)
tdelta = norm - start
for qlen in qtr_lens:
if qlen * 7 <= tdelta.days:
num_qtrs += 1
tdelta -= Timedelta(days=qlen * 7) # depends on [control=['if'], data=[]]
else:
break # depends on [control=['for'], data=['qlen']] # depends on [control=['if'], data=['start', 'norm']]
else:
tdelta = Timedelta(0)
# Note: we always have tdelta.value >= 0
return (start, num_qtrs, tdelta) |
def load_genotypes(self):
"""Prepares the files for genotype parsing.
:return: None
"""
if self.file_index < len(self.archives):
self.current_file = self.archives[self.file_index]
info_filename = self.current_file.replace(Parser.gen_ext, Parser.info_ext)
if len(self.info_files) > 0:
info_filename = self.info_files[self.file_index]
self.info_file = open(info_filename)
self.info_file.readline() # Dump the header
if DataParser.compressed_pedigree:
self.freq_file = gzip.open("%s" % (self.current_file), 'rb')
else:
self.freq_file = open(self.current_file)
self.current_chrom = self.chroms[self.file_index]
self.file_index += 1
else:
raise StopIteration | def function[load_genotypes, parameter[self]]:
constant[Prepares the files for genotype parsing.
:return: None
]
if compare[name[self].file_index less[<] call[name[len], parameter[name[self].archives]]] begin[:]
name[self].current_file assign[=] call[name[self].archives][name[self].file_index]
variable[info_filename] assign[=] call[name[self].current_file.replace, parameter[name[Parser].gen_ext, name[Parser].info_ext]]
if compare[call[name[len], parameter[name[self].info_files]] greater[>] constant[0]] begin[:]
variable[info_filename] assign[=] call[name[self].info_files][name[self].file_index]
name[self].info_file assign[=] call[name[open], parameter[name[info_filename]]]
call[name[self].info_file.readline, parameter[]]
if name[DataParser].compressed_pedigree begin[:]
name[self].freq_file assign[=] call[name[gzip].open, parameter[binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> name[self].current_file], constant[rb]]]
name[self].current_chrom assign[=] call[name[self].chroms][name[self].file_index]
<ast.AugAssign object at 0x7da18ede5c30> | keyword[def] identifier[load_genotypes] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[file_index] < identifier[len] ( identifier[self] . identifier[archives] ):
identifier[self] . identifier[current_file] = identifier[self] . identifier[archives] [ identifier[self] . identifier[file_index] ]
identifier[info_filename] = identifier[self] . identifier[current_file] . identifier[replace] ( identifier[Parser] . identifier[gen_ext] , identifier[Parser] . identifier[info_ext] )
keyword[if] identifier[len] ( identifier[self] . identifier[info_files] )> literal[int] :
identifier[info_filename] = identifier[self] . identifier[info_files] [ identifier[self] . identifier[file_index] ]
identifier[self] . identifier[info_file] = identifier[open] ( identifier[info_filename] )
identifier[self] . identifier[info_file] . identifier[readline] ()
keyword[if] identifier[DataParser] . identifier[compressed_pedigree] :
identifier[self] . identifier[freq_file] = identifier[gzip] . identifier[open] ( literal[string] %( identifier[self] . identifier[current_file] ), literal[string] )
keyword[else] :
identifier[self] . identifier[freq_file] = identifier[open] ( identifier[self] . identifier[current_file] )
identifier[self] . identifier[current_chrom] = identifier[self] . identifier[chroms] [ identifier[self] . identifier[file_index] ]
identifier[self] . identifier[file_index] += literal[int]
keyword[else] :
keyword[raise] identifier[StopIteration] | def load_genotypes(self):
"""Prepares the files for genotype parsing.
:return: None
"""
if self.file_index < len(self.archives):
self.current_file = self.archives[self.file_index]
info_filename = self.current_file.replace(Parser.gen_ext, Parser.info_ext)
if len(self.info_files) > 0:
info_filename = self.info_files[self.file_index] # depends on [control=['if'], data=[]]
self.info_file = open(info_filename)
self.info_file.readline() # Dump the header
if DataParser.compressed_pedigree:
self.freq_file = gzip.open('%s' % self.current_file, 'rb') # depends on [control=['if'], data=[]]
else:
self.freq_file = open(self.current_file)
self.current_chrom = self.chroms[self.file_index]
self.file_index += 1 # depends on [control=['if'], data=[]]
else:
raise StopIteration |
def stop(self):
"""Stop the sensor.
"""
# check that everything is running
if not self._running:
logging.warning('Realsense not running. Aborting stop.')
return False
self._pipe.stop()
self._running = False
return True | def function[stop, parameter[self]]:
constant[Stop the sensor.
]
if <ast.UnaryOp object at 0x7da1b26adb70> begin[:]
call[name[logging].warning, parameter[constant[Realsense not running. Aborting stop.]]]
return[constant[False]]
call[name[self]._pipe.stop, parameter[]]
name[self]._running assign[=] constant[False]
return[constant[True]] | keyword[def] identifier[stop] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_running] :
identifier[logging] . identifier[warning] ( literal[string] )
keyword[return] keyword[False]
identifier[self] . identifier[_pipe] . identifier[stop] ()
identifier[self] . identifier[_running] = keyword[False]
keyword[return] keyword[True] | def stop(self):
"""Stop the sensor.
"""
# check that everything is running
if not self._running:
logging.warning('Realsense not running. Aborting stop.')
return False # depends on [control=['if'], data=[]]
self._pipe.stop()
self._running = False
return True |
def validate_sbml_model(filename,
check_model=True,
internal_consistency=True,
check_units_consistency=False,
check_modeling_practice=False, **kwargs):
"""Validate SBML model and returns the model along with a list of errors.
Parameters
----------
filename : str
The filename (or SBML string) of the SBML model to be validated.
internal_consistency: boolean {True, False}
Check internal consistency.
check_units_consistency: boolean {True, False}
Check consistency of units.
check_modeling_practice: boolean {True, False}
Check modeling practise.
check_model: boolean {True, False}
Whether to also check some basic model properties such as reaction
boundaries and compartment formulas.
Returns
-------
(model, errors)
model : :class:`~cobra.core.Model.Model` object
The cobra model if the file could be read successfully or None
otherwise.
errors : dict
Warnings and errors grouped by their respective types.
Raises
------
CobraSBMLError
"""
# Errors and warnings are grouped based on their type. SBML_* types are
# from the libsbml validator. COBRA_* types are from the cobrapy SBML
# parser.
keys = (
"SBML_FATAL",
"SBML_ERROR",
"SBML_SCHEMA_ERROR",
"SBML_WARNING",
"COBRA_FATAL",
"COBRA_ERROR",
"COBRA_WARNING",
"COBRA_CHECK",
)
errors = {key: [] for key in keys}
# [1] libsbml validation
doc = _get_doc_from_filename(filename) # type: libsbml.SBMLDocument
# set checking of units & modeling practise
doc.setConsistencyChecks(libsbml.LIBSBML_CAT_UNITS_CONSISTENCY,
check_units_consistency)
doc.setConsistencyChecks(libsbml.LIBSBML_CAT_MODELING_PRACTICE,
check_modeling_practice)
# check internal consistency
if internal_consistency:
doc.checkInternalConsistency()
doc.checkConsistency()
for k in range(doc.getNumErrors()):
e = doc.getError(k) # type: libsbml.SBMLError
msg = _error_string(e, k=k)
sev = e.getSeverity()
if sev == libsbml.LIBSBML_SEV_FATAL:
errors["SBML_FATAL"].append(msg)
elif sev == libsbml.LIBSBML_SEV_ERROR:
errors["SBML_ERROR"].append(msg)
elif sev == libsbml.LIBSBML_SEV_SCHEMA_ERROR:
errors["SBML_SCHEMA_ERROR"].append(msg)
elif sev == libsbml.LIBSBML_SEV_WARNING:
errors["SBML_WARNING"].append(msg)
# [2] cobrapy validation (check that SBML can be read into model)
# all warnings generated while loading will be logged as errors
log_stream = StringIO()
stream_handler = logging.StreamHandler(log_stream)
formatter = logging.Formatter('%(levelname)s:%(message)s')
stream_handler.setFormatter(formatter)
stream_handler.setLevel(logging.INFO)
LOGGER.addHandler(stream_handler)
LOGGER.propagate = False
try:
# read model and allow additional parser arguments
model = _sbml_to_model(doc, **kwargs)
except CobraSBMLError as e:
errors["COBRA_ERROR"].append(str(e))
return None, errors
except Exception as e:
errors["COBRA_FATAL"].append(str(e))
return None, errors
cobra_errors = log_stream.getvalue().split("\n")
for cobra_error in cobra_errors:
tokens = cobra_error.split(":")
error_type = tokens[0]
error_msg = ":".join(tokens[1:])
if error_type == "WARNING":
errors["COBRA_WARNING"].append(error_msg)
elif error_type == "ERROR":
errors["COBRA_ERROR"].append(error_msg)
# remove stream handler
LOGGER.removeHandler(stream_handler)
LOGGER.propagate = True
# [3] additional model tests
if check_model:
errors["COBRA_CHECK"].extend(
check_metabolite_compartment_formula(model)
)
for key in ["SBML_FATAL", "SBML_ERROR", "SBML_SCHEMA_ERROR"]:
if len(errors[key]) > 0:
LOGGER.error("SBML errors in validation, check error log "
"for details.")
break
for key in ["SBML_WARNING"]:
if len(errors[key]) > 0:
LOGGER.error("SBML warnings in validation, check error log "
"for details.")
break
for key in ["COBRA_FATAL", "COBRA_ERROR"]:
if len(errors[key]) > 0:
LOGGER.error("COBRA errors in validation, check error log "
"for details.")
break
for key in ["COBRA_WARNING", "COBRA_CHECK"]:
if len(errors[key]) > 0:
LOGGER.error("COBRA warnings in validation, check error log "
"for details.")
break
return model, errors | def function[validate_sbml_model, parameter[filename, check_model, internal_consistency, check_units_consistency, check_modeling_practice]]:
constant[Validate SBML model and returns the model along with a list of errors.
Parameters
----------
filename : str
The filename (or SBML string) of the SBML model to be validated.
internal_consistency: boolean {True, False}
Check internal consistency.
check_units_consistency: boolean {True, False}
Check consistency of units.
check_modeling_practice: boolean {True, False}
Check modeling practise.
check_model: boolean {True, False}
Whether to also check some basic model properties such as reaction
boundaries and compartment formulas.
Returns
-------
(model, errors)
model : :class:`~cobra.core.Model.Model` object
The cobra model if the file could be read successfully or None
otherwise.
errors : dict
Warnings and errors grouped by their respective types.
Raises
------
CobraSBMLError
]
variable[keys] assign[=] tuple[[<ast.Constant object at 0x7da204963e80>, <ast.Constant object at 0x7da204960b50>, <ast.Constant object at 0x7da204963460>, <ast.Constant object at 0x7da204961780>, <ast.Constant object at 0x7da204960b20>, <ast.Constant object at 0x7da2049630a0>, <ast.Constant object at 0x7da204961d80>, <ast.Constant object at 0x7da204962e30>]]
variable[errors] assign[=] <ast.DictComp object at 0x7da204960820>
variable[doc] assign[=] call[name[_get_doc_from_filename], parameter[name[filename]]]
call[name[doc].setConsistencyChecks, parameter[name[libsbml].LIBSBML_CAT_UNITS_CONSISTENCY, name[check_units_consistency]]]
call[name[doc].setConsistencyChecks, parameter[name[libsbml].LIBSBML_CAT_MODELING_PRACTICE, name[check_modeling_practice]]]
if name[internal_consistency] begin[:]
call[name[doc].checkInternalConsistency, parameter[]]
call[name[doc].checkConsistency, parameter[]]
for taget[name[k]] in starred[call[name[range], parameter[call[name[doc].getNumErrors, parameter[]]]]] begin[:]
variable[e] assign[=] call[name[doc].getError, parameter[name[k]]]
variable[msg] assign[=] call[name[_error_string], parameter[name[e]]]
variable[sev] assign[=] call[name[e].getSeverity, parameter[]]
if compare[name[sev] equal[==] name[libsbml].LIBSBML_SEV_FATAL] begin[:]
call[call[name[errors]][constant[SBML_FATAL]].append, parameter[name[msg]]]
variable[log_stream] assign[=] call[name[StringIO], parameter[]]
variable[stream_handler] assign[=] call[name[logging].StreamHandler, parameter[name[log_stream]]]
variable[formatter] assign[=] call[name[logging].Formatter, parameter[constant[%(levelname)s:%(message)s]]]
call[name[stream_handler].setFormatter, parameter[name[formatter]]]
call[name[stream_handler].setLevel, parameter[name[logging].INFO]]
call[name[LOGGER].addHandler, parameter[name[stream_handler]]]
name[LOGGER].propagate assign[=] constant[False]
<ast.Try object at 0x7da18fe90100>
variable[cobra_errors] assign[=] call[call[name[log_stream].getvalue, parameter[]].split, parameter[constant[
]]]
for taget[name[cobra_error]] in starred[name[cobra_errors]] begin[:]
variable[tokens] assign[=] call[name[cobra_error].split, parameter[constant[:]]]
variable[error_type] assign[=] call[name[tokens]][constant[0]]
variable[error_msg] assign[=] call[constant[:].join, parameter[call[name[tokens]][<ast.Slice object at 0x7da18fe908b0>]]]
if compare[name[error_type] equal[==] constant[WARNING]] begin[:]
call[call[name[errors]][constant[COBRA_WARNING]].append, parameter[name[error_msg]]]
call[name[LOGGER].removeHandler, parameter[name[stream_handler]]]
name[LOGGER].propagate assign[=] constant[True]
if name[check_model] begin[:]
call[call[name[errors]][constant[COBRA_CHECK]].extend, parameter[call[name[check_metabolite_compartment_formula], parameter[name[model]]]]]
for taget[name[key]] in starred[list[[<ast.Constant object at 0x7da18fe906d0>, <ast.Constant object at 0x7da18fe911e0>, <ast.Constant object at 0x7da18fe92bc0>]]] begin[:]
if compare[call[name[len], parameter[call[name[errors]][name[key]]]] greater[>] constant[0]] begin[:]
call[name[LOGGER].error, parameter[constant[SBML errors in validation, check error log for details.]]]
break
for taget[name[key]] in starred[list[[<ast.Constant object at 0x7da18fe904f0>]]] begin[:]
if compare[call[name[len], parameter[call[name[errors]][name[key]]]] greater[>] constant[0]] begin[:]
call[name[LOGGER].error, parameter[constant[SBML warnings in validation, check error log for details.]]]
break
for taget[name[key]] in starred[list[[<ast.Constant object at 0x7da18fe91510>, <ast.Constant object at 0x7da18fe93940>]]] begin[:]
if compare[call[name[len], parameter[call[name[errors]][name[key]]]] greater[>] constant[0]] begin[:]
call[name[LOGGER].error, parameter[constant[COBRA errors in validation, check error log for details.]]]
break
for taget[name[key]] in starred[list[[<ast.Constant object at 0x7da18fe91b40>, <ast.Constant object at 0x7da18fe938b0>]]] begin[:]
if compare[call[name[len], parameter[call[name[errors]][name[key]]]] greater[>] constant[0]] begin[:]
call[name[LOGGER].error, parameter[constant[COBRA warnings in validation, check error log for details.]]]
break
return[tuple[[<ast.Name object at 0x7da18fe90070>, <ast.Name object at 0x7da18fe92f80>]]] | keyword[def] identifier[validate_sbml_model] ( identifier[filename] ,
identifier[check_model] = keyword[True] ,
identifier[internal_consistency] = keyword[True] ,
identifier[check_units_consistency] = keyword[False] ,
identifier[check_modeling_practice] = keyword[False] ,** identifier[kwargs] ):
literal[string]
identifier[keys] =(
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
)
identifier[errors] ={ identifier[key] :[] keyword[for] identifier[key] keyword[in] identifier[keys] }
identifier[doc] = identifier[_get_doc_from_filename] ( identifier[filename] )
identifier[doc] . identifier[setConsistencyChecks] ( identifier[libsbml] . identifier[LIBSBML_CAT_UNITS_CONSISTENCY] ,
identifier[check_units_consistency] )
identifier[doc] . identifier[setConsistencyChecks] ( identifier[libsbml] . identifier[LIBSBML_CAT_MODELING_PRACTICE] ,
identifier[check_modeling_practice] )
keyword[if] identifier[internal_consistency] :
identifier[doc] . identifier[checkInternalConsistency] ()
identifier[doc] . identifier[checkConsistency] ()
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[doc] . identifier[getNumErrors] ()):
identifier[e] = identifier[doc] . identifier[getError] ( identifier[k] )
identifier[msg] = identifier[_error_string] ( identifier[e] , identifier[k] = identifier[k] )
identifier[sev] = identifier[e] . identifier[getSeverity] ()
keyword[if] identifier[sev] == identifier[libsbml] . identifier[LIBSBML_SEV_FATAL] :
identifier[errors] [ literal[string] ]. identifier[append] ( identifier[msg] )
keyword[elif] identifier[sev] == identifier[libsbml] . identifier[LIBSBML_SEV_ERROR] :
identifier[errors] [ literal[string] ]. identifier[append] ( identifier[msg] )
keyword[elif] identifier[sev] == identifier[libsbml] . identifier[LIBSBML_SEV_SCHEMA_ERROR] :
identifier[errors] [ literal[string] ]. identifier[append] ( identifier[msg] )
keyword[elif] identifier[sev] == identifier[libsbml] . identifier[LIBSBML_SEV_WARNING] :
identifier[errors] [ literal[string] ]. identifier[append] ( identifier[msg] )
identifier[log_stream] = identifier[StringIO] ()
identifier[stream_handler] = identifier[logging] . identifier[StreamHandler] ( identifier[log_stream] )
identifier[formatter] = identifier[logging] . identifier[Formatter] ( literal[string] )
identifier[stream_handler] . identifier[setFormatter] ( identifier[formatter] )
identifier[stream_handler] . identifier[setLevel] ( identifier[logging] . identifier[INFO] )
identifier[LOGGER] . identifier[addHandler] ( identifier[stream_handler] )
identifier[LOGGER] . identifier[propagate] = keyword[False]
keyword[try] :
identifier[model] = identifier[_sbml_to_model] ( identifier[doc] ,** identifier[kwargs] )
keyword[except] identifier[CobraSBMLError] keyword[as] identifier[e] :
identifier[errors] [ literal[string] ]. identifier[append] ( identifier[str] ( identifier[e] ))
keyword[return] keyword[None] , identifier[errors]
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[errors] [ literal[string] ]. identifier[append] ( identifier[str] ( identifier[e] ))
keyword[return] keyword[None] , identifier[errors]
identifier[cobra_errors] = identifier[log_stream] . identifier[getvalue] (). identifier[split] ( literal[string] )
keyword[for] identifier[cobra_error] keyword[in] identifier[cobra_errors] :
identifier[tokens] = identifier[cobra_error] . identifier[split] ( literal[string] )
identifier[error_type] = identifier[tokens] [ literal[int] ]
identifier[error_msg] = literal[string] . identifier[join] ( identifier[tokens] [ literal[int] :])
keyword[if] identifier[error_type] == literal[string] :
identifier[errors] [ literal[string] ]. identifier[append] ( identifier[error_msg] )
keyword[elif] identifier[error_type] == literal[string] :
identifier[errors] [ literal[string] ]. identifier[append] ( identifier[error_msg] )
identifier[LOGGER] . identifier[removeHandler] ( identifier[stream_handler] )
identifier[LOGGER] . identifier[propagate] = keyword[True]
keyword[if] identifier[check_model] :
identifier[errors] [ literal[string] ]. identifier[extend] (
identifier[check_metabolite_compartment_formula] ( identifier[model] )
)
keyword[for] identifier[key] keyword[in] [ literal[string] , literal[string] , literal[string] ]:
keyword[if] identifier[len] ( identifier[errors] [ identifier[key] ])> literal[int] :
identifier[LOGGER] . identifier[error] ( literal[string]
literal[string] )
keyword[break]
keyword[for] identifier[key] keyword[in] [ literal[string] ]:
keyword[if] identifier[len] ( identifier[errors] [ identifier[key] ])> literal[int] :
identifier[LOGGER] . identifier[error] ( literal[string]
literal[string] )
keyword[break]
keyword[for] identifier[key] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[len] ( identifier[errors] [ identifier[key] ])> literal[int] :
identifier[LOGGER] . identifier[error] ( literal[string]
literal[string] )
keyword[break]
keyword[for] identifier[key] keyword[in] [ literal[string] , literal[string] ]:
keyword[if] identifier[len] ( identifier[errors] [ identifier[key] ])> literal[int] :
identifier[LOGGER] . identifier[error] ( literal[string]
literal[string] )
keyword[break]
keyword[return] identifier[model] , identifier[errors] | def validate_sbml_model(filename, check_model=True, internal_consistency=True, check_units_consistency=False, check_modeling_practice=False, **kwargs):
"""Validate SBML model and returns the model along with a list of errors.
Parameters
----------
filename : str
The filename (or SBML string) of the SBML model to be validated.
internal_consistency: boolean {True, False}
Check internal consistency.
check_units_consistency: boolean {True, False}
Check consistency of units.
check_modeling_practice: boolean {True, False}
Check modeling practise.
check_model: boolean {True, False}
Whether to also check some basic model properties such as reaction
boundaries and compartment formulas.
Returns
-------
(model, errors)
model : :class:`~cobra.core.Model.Model` object
The cobra model if the file could be read successfully or None
otherwise.
errors : dict
Warnings and errors grouped by their respective types.
Raises
------
CobraSBMLError
"""
# Errors and warnings are grouped based on their type. SBML_* types are
# from the libsbml validator. COBRA_* types are from the cobrapy SBML
# parser.
keys = ('SBML_FATAL', 'SBML_ERROR', 'SBML_SCHEMA_ERROR', 'SBML_WARNING', 'COBRA_FATAL', 'COBRA_ERROR', 'COBRA_WARNING', 'COBRA_CHECK')
errors = {key: [] for key in keys}
# [1] libsbml validation
doc = _get_doc_from_filename(filename) # type: libsbml.SBMLDocument
# set checking of units & modeling practise
doc.setConsistencyChecks(libsbml.LIBSBML_CAT_UNITS_CONSISTENCY, check_units_consistency)
doc.setConsistencyChecks(libsbml.LIBSBML_CAT_MODELING_PRACTICE, check_modeling_practice)
# check internal consistency
if internal_consistency:
doc.checkInternalConsistency() # depends on [control=['if'], data=[]]
doc.checkConsistency()
for k in range(doc.getNumErrors()):
e = doc.getError(k) # type: libsbml.SBMLError
msg = _error_string(e, k=k)
sev = e.getSeverity()
if sev == libsbml.LIBSBML_SEV_FATAL:
errors['SBML_FATAL'].append(msg) # depends on [control=['if'], data=[]]
elif sev == libsbml.LIBSBML_SEV_ERROR:
errors['SBML_ERROR'].append(msg) # depends on [control=['if'], data=[]]
elif sev == libsbml.LIBSBML_SEV_SCHEMA_ERROR:
errors['SBML_SCHEMA_ERROR'].append(msg) # depends on [control=['if'], data=[]]
elif sev == libsbml.LIBSBML_SEV_WARNING:
errors['SBML_WARNING'].append(msg) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']]
# [2] cobrapy validation (check that SBML can be read into model)
# all warnings generated while loading will be logged as errors
log_stream = StringIO()
stream_handler = logging.StreamHandler(log_stream)
formatter = logging.Formatter('%(levelname)s:%(message)s')
stream_handler.setFormatter(formatter)
stream_handler.setLevel(logging.INFO)
LOGGER.addHandler(stream_handler)
LOGGER.propagate = False
try:
# read model and allow additional parser arguments
model = _sbml_to_model(doc, **kwargs) # depends on [control=['try'], data=[]]
except CobraSBMLError as e:
errors['COBRA_ERROR'].append(str(e))
return (None, errors) # depends on [control=['except'], data=['e']]
except Exception as e:
errors['COBRA_FATAL'].append(str(e))
return (None, errors) # depends on [control=['except'], data=['e']]
cobra_errors = log_stream.getvalue().split('\n')
for cobra_error in cobra_errors:
tokens = cobra_error.split(':')
error_type = tokens[0]
error_msg = ':'.join(tokens[1:])
if error_type == 'WARNING':
errors['COBRA_WARNING'].append(error_msg) # depends on [control=['if'], data=[]]
elif error_type == 'ERROR':
errors['COBRA_ERROR'].append(error_msg) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cobra_error']]
# remove stream handler
LOGGER.removeHandler(stream_handler)
LOGGER.propagate = True
# [3] additional model tests
if check_model:
errors['COBRA_CHECK'].extend(check_metabolite_compartment_formula(model)) # depends on [control=['if'], data=[]]
for key in ['SBML_FATAL', 'SBML_ERROR', 'SBML_SCHEMA_ERROR']:
if len(errors[key]) > 0:
LOGGER.error('SBML errors in validation, check error log for details.')
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
for key in ['SBML_WARNING']:
if len(errors[key]) > 0:
LOGGER.error('SBML warnings in validation, check error log for details.')
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
for key in ['COBRA_FATAL', 'COBRA_ERROR']:
if len(errors[key]) > 0:
LOGGER.error('COBRA errors in validation, check error log for details.')
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
for key in ['COBRA_WARNING', 'COBRA_CHECK']:
if len(errors[key]) > 0:
LOGGER.error('COBRA warnings in validation, check error log for details.')
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
return (model, errors) |
def create_project_connection(self, create_connection_inputs, project):
"""CreateProjectConnection.
[Preview API] Creates a new Pipeline connection between the provider installation and the specified project. Returns the PipelineConnection object created.
:param :class:`<CreatePipelineConnectionInputs> <azure.devops.v5_1.cix.models.CreatePipelineConnectionInputs>` create_connection_inputs:
:param str project:
:rtype: :class:`<PipelineConnection> <azure.devops.v5_1.cix.models.PipelineConnection>`
"""
query_parameters = {}
if project is not None:
query_parameters['project'] = self._serialize.query('project', project, 'str')
content = self._serialize.body(create_connection_inputs, 'CreatePipelineConnectionInputs')
response = self._send(http_method='POST',
location_id='00df4879-9216-45d5-b38d-4a487b626b2c',
version='5.1-preview.1',
query_parameters=query_parameters,
content=content)
return self._deserialize('PipelineConnection', response) | def function[create_project_connection, parameter[self, create_connection_inputs, project]]:
constant[CreateProjectConnection.
[Preview API] Creates a new Pipeline connection between the provider installation and the specified project. Returns the PipelineConnection object created.
:param :class:`<CreatePipelineConnectionInputs> <azure.devops.v5_1.cix.models.CreatePipelineConnectionInputs>` create_connection_inputs:
:param str project:
:rtype: :class:`<PipelineConnection> <azure.devops.v5_1.cix.models.PipelineConnection>`
]
variable[query_parameters] assign[=] dictionary[[], []]
if compare[name[project] is_not constant[None]] begin[:]
call[name[query_parameters]][constant[project]] assign[=] call[name[self]._serialize.query, parameter[constant[project], name[project], constant[str]]]
variable[content] assign[=] call[name[self]._serialize.body, parameter[name[create_connection_inputs], constant[CreatePipelineConnectionInputs]]]
variable[response] assign[=] call[name[self]._send, parameter[]]
return[call[name[self]._deserialize, parameter[constant[PipelineConnection], name[response]]]] | keyword[def] identifier[create_project_connection] ( identifier[self] , identifier[create_connection_inputs] , identifier[project] ):
literal[string]
identifier[query_parameters] ={}
keyword[if] identifier[project] keyword[is] keyword[not] keyword[None] :
identifier[query_parameters] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[query] ( literal[string] , identifier[project] , literal[string] )
identifier[content] = identifier[self] . identifier[_serialize] . identifier[body] ( identifier[create_connection_inputs] , literal[string] )
identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] ,
identifier[location_id] = literal[string] ,
identifier[version] = literal[string] ,
identifier[query_parameters] = identifier[query_parameters] ,
identifier[content] = identifier[content] )
keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] ) | def create_project_connection(self, create_connection_inputs, project):
"""CreateProjectConnection.
[Preview API] Creates a new Pipeline connection between the provider installation and the specified project. Returns the PipelineConnection object created.
:param :class:`<CreatePipelineConnectionInputs> <azure.devops.v5_1.cix.models.CreatePipelineConnectionInputs>` create_connection_inputs:
:param str project:
:rtype: :class:`<PipelineConnection> <azure.devops.v5_1.cix.models.PipelineConnection>`
"""
query_parameters = {}
if project is not None:
query_parameters['project'] = self._serialize.query('project', project, 'str') # depends on [control=['if'], data=['project']]
content = self._serialize.body(create_connection_inputs, 'CreatePipelineConnectionInputs')
response = self._send(http_method='POST', location_id='00df4879-9216-45d5-b38d-4a487b626b2c', version='5.1-preview.1', query_parameters=query_parameters, content=content)
return self._deserialize('PipelineConnection', response) |
def rectwidth(self, wavelengths=None):
"""Calculate :ref:`bandpass rectangular width <synphot-formula-rectw>`.
Parameters
----------
wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None`
Wavelength values for sampling.
If not a Quantity, assumed to be in Angstrom.
If `None`, ``self.waveset`` is used.
Returns
-------
rectw : `~astropy.units.quantity.Quantity`
Bandpass rectangular width.
"""
equvw = self.equivwidth(wavelengths=wavelengths)
tpeak = self.tpeak(wavelengths=wavelengths)
if tpeak.value == 0: # pragma: no cover
rectw = 0.0 * self._internal_wave_unit
else:
rectw = equvw / tpeak
return rectw | def function[rectwidth, parameter[self, wavelengths]]:
constant[Calculate :ref:`bandpass rectangular width <synphot-formula-rectw>`.
Parameters
----------
wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None`
Wavelength values for sampling.
If not a Quantity, assumed to be in Angstrom.
If `None`, ``self.waveset`` is used.
Returns
-------
rectw : `~astropy.units.quantity.Quantity`
Bandpass rectangular width.
]
variable[equvw] assign[=] call[name[self].equivwidth, parameter[]]
variable[tpeak] assign[=] call[name[self].tpeak, parameter[]]
if compare[name[tpeak].value equal[==] constant[0]] begin[:]
variable[rectw] assign[=] binary_operation[constant[0.0] * name[self]._internal_wave_unit]
return[name[rectw]] | keyword[def] identifier[rectwidth] ( identifier[self] , identifier[wavelengths] = keyword[None] ):
literal[string]
identifier[equvw] = identifier[self] . identifier[equivwidth] ( identifier[wavelengths] = identifier[wavelengths] )
identifier[tpeak] = identifier[self] . identifier[tpeak] ( identifier[wavelengths] = identifier[wavelengths] )
keyword[if] identifier[tpeak] . identifier[value] == literal[int] :
identifier[rectw] = literal[int] * identifier[self] . identifier[_internal_wave_unit]
keyword[else] :
identifier[rectw] = identifier[equvw] / identifier[tpeak]
keyword[return] identifier[rectw] | def rectwidth(self, wavelengths=None):
"""Calculate :ref:`bandpass rectangular width <synphot-formula-rectw>`.
Parameters
----------
wavelengths : array-like, `~astropy.units.quantity.Quantity`, or `None`
Wavelength values for sampling.
If not a Quantity, assumed to be in Angstrom.
If `None`, ``self.waveset`` is used.
Returns
-------
rectw : `~astropy.units.quantity.Quantity`
Bandpass rectangular width.
"""
equvw = self.equivwidth(wavelengths=wavelengths)
tpeak = self.tpeak(wavelengths=wavelengths)
if tpeak.value == 0: # pragma: no cover
rectw = 0.0 * self._internal_wave_unit # depends on [control=['if'], data=[]]
else:
rectw = equvw / tpeak
return rectw |
def ln_growth(eqdata, **kwargs):
"""
Return the natural log of growth.
See also
--------
:func:`growth`
"""
if 'outputcol' not in kwargs:
kwargs['outputcol'] = 'LnGrowth'
return np.log(growth(eqdata, **kwargs)) | def function[ln_growth, parameter[eqdata]]:
constant[
Return the natural log of growth.
See also
--------
:func:`growth`
]
if compare[constant[outputcol] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:]
call[name[kwargs]][constant[outputcol]] assign[=] constant[LnGrowth]
return[call[name[np].log, parameter[call[name[growth], parameter[name[eqdata]]]]]] | keyword[def] identifier[ln_growth] ( identifier[eqdata] ,** identifier[kwargs] ):
literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] :
identifier[kwargs] [ literal[string] ]= literal[string]
keyword[return] identifier[np] . identifier[log] ( identifier[growth] ( identifier[eqdata] ,** identifier[kwargs] )) | def ln_growth(eqdata, **kwargs):
"""
Return the natural log of growth.
See also
--------
:func:`growth`
"""
if 'outputcol' not in kwargs:
kwargs['outputcol'] = 'LnGrowth' # depends on [control=['if'], data=['kwargs']]
return np.log(growth(eqdata, **kwargs)) |
def package_depends_on(self, name_a, name_b):
"""Returns dependency information about two packages:
0: A does not depend, directly or indirectly, on B;
1: A depends indirectly on B;
2: A depends directly on B.
"""
assert self._context
if self._dependency_lookup is None:
self._dependency_graph = self._context.get_dependency_graph()
self._dependency_lookup = accessibility(self._dependency_graph)
downstream = self._dependency_lookup.get(name_a, [])
accessible = (name_b in downstream)
if accessible:
neighbours = self._dependency_graph.neighbors(name_a)
return 2 if name_b in neighbours else 1
else:
return 0 | def function[package_depends_on, parameter[self, name_a, name_b]]:
constant[Returns dependency information about two packages:
0: A does not depend, directly or indirectly, on B;
1: A depends indirectly on B;
2: A depends directly on B.
]
assert[name[self]._context]
if compare[name[self]._dependency_lookup is constant[None]] begin[:]
name[self]._dependency_graph assign[=] call[name[self]._context.get_dependency_graph, parameter[]]
name[self]._dependency_lookup assign[=] call[name[accessibility], parameter[name[self]._dependency_graph]]
variable[downstream] assign[=] call[name[self]._dependency_lookup.get, parameter[name[name_a], list[[]]]]
variable[accessible] assign[=] compare[name[name_b] in name[downstream]]
if name[accessible] begin[:]
variable[neighbours] assign[=] call[name[self]._dependency_graph.neighbors, parameter[name[name_a]]]
return[<ast.IfExp object at 0x7da1b175fe80>] | keyword[def] identifier[package_depends_on] ( identifier[self] , identifier[name_a] , identifier[name_b] ):
literal[string]
keyword[assert] identifier[self] . identifier[_context]
keyword[if] identifier[self] . identifier[_dependency_lookup] keyword[is] keyword[None] :
identifier[self] . identifier[_dependency_graph] = identifier[self] . identifier[_context] . identifier[get_dependency_graph] ()
identifier[self] . identifier[_dependency_lookup] = identifier[accessibility] ( identifier[self] . identifier[_dependency_graph] )
identifier[downstream] = identifier[self] . identifier[_dependency_lookup] . identifier[get] ( identifier[name_a] ,[])
identifier[accessible] =( identifier[name_b] keyword[in] identifier[downstream] )
keyword[if] identifier[accessible] :
identifier[neighbours] = identifier[self] . identifier[_dependency_graph] . identifier[neighbors] ( identifier[name_a] )
keyword[return] literal[int] keyword[if] identifier[name_b] keyword[in] identifier[neighbours] keyword[else] literal[int]
keyword[else] :
keyword[return] literal[int] | def package_depends_on(self, name_a, name_b):
"""Returns dependency information about two packages:
0: A does not depend, directly or indirectly, on B;
1: A depends indirectly on B;
2: A depends directly on B.
"""
assert self._context
if self._dependency_lookup is None:
self._dependency_graph = self._context.get_dependency_graph()
self._dependency_lookup = accessibility(self._dependency_graph) # depends on [control=['if'], data=[]]
downstream = self._dependency_lookup.get(name_a, [])
accessible = name_b in downstream
if accessible:
neighbours = self._dependency_graph.neighbors(name_a)
return 2 if name_b in neighbours else 1 # depends on [control=['if'], data=[]]
else:
return 0 |
def replaceChild(self, child, content):
"""
Replace I{child} with the specified I{content}.
@param child: A child element.
@type child: L{Element}
@param content: An element or collection of elements.
@type content: L{Element} or [L{Element},]
"""
if child not in self.children:
raise Exception('child not-found')
index = self.children.index(child)
self.remove(child)
if not isinstance(content, (list, tuple)):
content = (content,)
for node in content:
self.children.insert(index, node.detach())
node.parent = self
index += 1 | def function[replaceChild, parameter[self, child, content]]:
constant[
Replace I{child} with the specified I{content}.
@param child: A child element.
@type child: L{Element}
@param content: An element or collection of elements.
@type content: L{Element} or [L{Element},]
]
if compare[name[child] <ast.NotIn object at 0x7da2590d7190> name[self].children] begin[:]
<ast.Raise object at 0x7da2041da470>
variable[index] assign[=] call[name[self].children.index, parameter[name[child]]]
call[name[self].remove, parameter[name[child]]]
if <ast.UnaryOp object at 0x7da2041d8670> begin[:]
variable[content] assign[=] tuple[[<ast.Name object at 0x7da2041da4d0>]]
for taget[name[node]] in starred[name[content]] begin[:]
call[name[self].children.insert, parameter[name[index], call[name[node].detach, parameter[]]]]
name[node].parent assign[=] name[self]
<ast.AugAssign object at 0x7da1b08e5e10> | keyword[def] identifier[replaceChild] ( identifier[self] , identifier[child] , identifier[content] ):
literal[string]
keyword[if] identifier[child] keyword[not] keyword[in] identifier[self] . identifier[children] :
keyword[raise] identifier[Exception] ( literal[string] )
identifier[index] = identifier[self] . identifier[children] . identifier[index] ( identifier[child] )
identifier[self] . identifier[remove] ( identifier[child] )
keyword[if] keyword[not] identifier[isinstance] ( identifier[content] ,( identifier[list] , identifier[tuple] )):
identifier[content] =( identifier[content] ,)
keyword[for] identifier[node] keyword[in] identifier[content] :
identifier[self] . identifier[children] . identifier[insert] ( identifier[index] , identifier[node] . identifier[detach] ())
identifier[node] . identifier[parent] = identifier[self]
identifier[index] += literal[int] | def replaceChild(self, child, content):
"""
Replace I{child} with the specified I{content}.
@param child: A child element.
@type child: L{Element}
@param content: An element or collection of elements.
@type content: L{Element} or [L{Element},]
"""
if child not in self.children:
raise Exception('child not-found') # depends on [control=['if'], data=[]]
index = self.children.index(child)
self.remove(child)
if not isinstance(content, (list, tuple)):
content = (content,) # depends on [control=['if'], data=[]]
for node in content:
self.children.insert(index, node.detach())
node.parent = self
index += 1 # depends on [control=['for'], data=['node']] |
def handle_request(self, request, *args, **kwargs):
"""Give back list items + config"""
paginator = self.get_paginator()
# Call search first, it will reset page if search is changed
search = self.get_search()
page = self.get_page(paginator)
items = self.get_items(paginator, page)
return {
'search': search,
'page': page,
'page_size': self.get_page_size(),
'num_pages': paginator.num_pages,
'sort': self.get_sort(),
'current_fields': self.get_current_fields(),
'fields': self.get_all_fields(),
'items': items,
} | def function[handle_request, parameter[self, request]]:
constant[Give back list items + config]
variable[paginator] assign[=] call[name[self].get_paginator, parameter[]]
variable[search] assign[=] call[name[self].get_search, parameter[]]
variable[page] assign[=] call[name[self].get_page, parameter[name[paginator]]]
variable[items] assign[=] call[name[self].get_items, parameter[name[paginator], name[page]]]
return[dictionary[[<ast.Constant object at 0x7da20e963550>, <ast.Constant object at 0x7da20e961150>, <ast.Constant object at 0x7da20e961780>, <ast.Constant object at 0x7da20e960640>, <ast.Constant object at 0x7da20e960a00>, <ast.Constant object at 0x7da20e960af0>, <ast.Constant object at 0x7da20e963070>, <ast.Constant object at 0x7da20e9619f0>], [<ast.Name object at 0x7da20e960520>, <ast.Name object at 0x7da20e962230>, <ast.Call object at 0x7da20e963640>, <ast.Attribute object at 0x7da20e962470>, <ast.Call object at 0x7da20e961a20>, <ast.Call object at 0x7da20e963700>, <ast.Call object at 0x7da20e962500>, <ast.Name object at 0x7da20e960fa0>]]] | keyword[def] identifier[handle_request] ( identifier[self] , identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[paginator] = identifier[self] . identifier[get_paginator] ()
identifier[search] = identifier[self] . identifier[get_search] ()
identifier[page] = identifier[self] . identifier[get_page] ( identifier[paginator] )
identifier[items] = identifier[self] . identifier[get_items] ( identifier[paginator] , identifier[page] )
keyword[return] {
literal[string] : identifier[search] ,
literal[string] : identifier[page] ,
literal[string] : identifier[self] . identifier[get_page_size] (),
literal[string] : identifier[paginator] . identifier[num_pages] ,
literal[string] : identifier[self] . identifier[get_sort] (),
literal[string] : identifier[self] . identifier[get_current_fields] (),
literal[string] : identifier[self] . identifier[get_all_fields] (),
literal[string] : identifier[items] ,
} | def handle_request(self, request, *args, **kwargs):
"""Give back list items + config"""
paginator = self.get_paginator()
# Call search first, it will reset page if search is changed
search = self.get_search()
page = self.get_page(paginator)
items = self.get_items(paginator, page)
return {'search': search, 'page': page, 'page_size': self.get_page_size(), 'num_pages': paginator.num_pages, 'sort': self.get_sort(), 'current_fields': self.get_current_fields(), 'fields': self.get_all_fields(), 'items': items} |
def _filename(self, ifo, description, extension, segment):
"""
Construct the standard output filename. Should only be used internally
of the File class.
"""
if extension.startswith('.'):
extension = extension[1:]
# Follow the frame convention of using integer filenames,
# but stretching to cover partially covered seconds.
start = int(segment[0])
end = int(math.ceil(segment[1]))
duration = str(end-start)
start = str(start)
return "%s-%s-%s-%s.%s" % (ifo, description.upper(), start,
duration, extension) | def function[_filename, parameter[self, ifo, description, extension, segment]]:
constant[
Construct the standard output filename. Should only be used internally
of the File class.
]
if call[name[extension].startswith, parameter[constant[.]]] begin[:]
variable[extension] assign[=] call[name[extension]][<ast.Slice object at 0x7da18bc71f30>]
variable[start] assign[=] call[name[int], parameter[call[name[segment]][constant[0]]]]
variable[end] assign[=] call[name[int], parameter[call[name[math].ceil, parameter[call[name[segment]][constant[1]]]]]]
variable[duration] assign[=] call[name[str], parameter[binary_operation[name[end] - name[start]]]]
variable[start] assign[=] call[name[str], parameter[name[start]]]
return[binary_operation[constant[%s-%s-%s-%s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bc72f20>, <ast.Call object at 0x7da18bc70d60>, <ast.Name object at 0x7da18bc724d0>, <ast.Name object at 0x7da18bc70f70>, <ast.Name object at 0x7da18bc72da0>]]]] | keyword[def] identifier[_filename] ( identifier[self] , identifier[ifo] , identifier[description] , identifier[extension] , identifier[segment] ):
literal[string]
keyword[if] identifier[extension] . identifier[startswith] ( literal[string] ):
identifier[extension] = identifier[extension] [ literal[int] :]
identifier[start] = identifier[int] ( identifier[segment] [ literal[int] ])
identifier[end] = identifier[int] ( identifier[math] . identifier[ceil] ( identifier[segment] [ literal[int] ]))
identifier[duration] = identifier[str] ( identifier[end] - identifier[start] )
identifier[start] = identifier[str] ( identifier[start] )
keyword[return] literal[string] %( identifier[ifo] , identifier[description] . identifier[upper] (), identifier[start] ,
identifier[duration] , identifier[extension] ) | def _filename(self, ifo, description, extension, segment):
"""
Construct the standard output filename. Should only be used internally
of the File class.
"""
if extension.startswith('.'):
extension = extension[1:] # depends on [control=['if'], data=[]]
# Follow the frame convention of using integer filenames,
# but stretching to cover partially covered seconds.
start = int(segment[0])
end = int(math.ceil(segment[1]))
duration = str(end - start)
start = str(start)
return '%s-%s-%s-%s.%s' % (ifo, description.upper(), start, duration, extension) |
def start_task(self, method, *args, **kwargs):
""" Start a task in a separate thread
Args:
method: the method to start in a separate thread
args: Accept args/kwargs arguments
"""
thread = threading.Thread(target=method, args=args, kwargs=kwargs)
thread.is_daemon = False
thread.start()
self.threads.append(thread) | def function[start_task, parameter[self, method]]:
constant[ Start a task in a separate thread
Args:
method: the method to start in a separate thread
args: Accept args/kwargs arguments
]
variable[thread] assign[=] call[name[threading].Thread, parameter[]]
name[thread].is_daemon assign[=] constant[False]
call[name[thread].start, parameter[]]
call[name[self].threads.append, parameter[name[thread]]] | keyword[def] identifier[start_task] ( identifier[self] , identifier[method] ,* identifier[args] ,** identifier[kwargs] ):
literal[string]
identifier[thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[method] , identifier[args] = identifier[args] , identifier[kwargs] = identifier[kwargs] )
identifier[thread] . identifier[is_daemon] = keyword[False]
identifier[thread] . identifier[start] ()
identifier[self] . identifier[threads] . identifier[append] ( identifier[thread] ) | def start_task(self, method, *args, **kwargs):
""" Start a task in a separate thread
Args:
method: the method to start in a separate thread
args: Accept args/kwargs arguments
"""
thread = threading.Thread(target=method, args=args, kwargs=kwargs)
thread.is_daemon = False
thread.start()
self.threads.append(thread) |
def normalize(vat_id):
"""
Accepts a VAT ID and normaizes it, getting rid of spaces, periods, dashes
etc and converting it to upper case.
:param vat_id:
The VAT ID to check. Allows "GR" prefix for Greece, even though it
should be "EL".
:raises:
ValueError - If the is not a string or is not in the format of two characters plus an identifier
:return:
None if the VAT ID is blank or not for an EU country or Norway
Otherwise a normalized string containing the VAT ID
"""
if not vat_id:
return None
if not isinstance(vat_id, str_cls):
raise ValueError('VAT ID is not a string')
if len(vat_id) < 3:
raise ValueError('VAT ID must be at least three character long')
# Normalize the ID for simpler regexes
vat_id = re.sub('\\s+', '', vat_id)
vat_id = vat_id.replace('-', '')
vat_id = vat_id.replace('.', '')
vat_id = vat_id.upper()
country_prefix = vat_id[0:2]
# Fix people using GR prefix for Greece
if country_prefix == 'GR':
vat_id = 'EL' + vat_id[2:]
country_prefix = 'EL'
if country_prefix not in ID_PATTERNS:
return None
return vat_id | def function[normalize, parameter[vat_id]]:
constant[
Accepts a VAT ID and normaizes it, getting rid of spaces, periods, dashes
etc and converting it to upper case.
:param vat_id:
The VAT ID to check. Allows "GR" prefix for Greece, even though it
should be "EL".
:raises:
ValueError - If the is not a string or is not in the format of two characters plus an identifier
:return:
None if the VAT ID is blank or not for an EU country or Norway
Otherwise a normalized string containing the VAT ID
]
if <ast.UnaryOp object at 0x7da20c6ab2b0> begin[:]
return[constant[None]]
if <ast.UnaryOp object at 0x7da20c6ab4f0> begin[:]
<ast.Raise object at 0x7da20c6ab250>
if compare[call[name[len], parameter[name[vat_id]]] less[<] constant[3]] begin[:]
<ast.Raise object at 0x7da20c6a8100>
variable[vat_id] assign[=] call[name[re].sub, parameter[constant[\s+], constant[], name[vat_id]]]
variable[vat_id] assign[=] call[name[vat_id].replace, parameter[constant[-], constant[]]]
variable[vat_id] assign[=] call[name[vat_id].replace, parameter[constant[.], constant[]]]
variable[vat_id] assign[=] call[name[vat_id].upper, parameter[]]
variable[country_prefix] assign[=] call[name[vat_id]][<ast.Slice object at 0x7da1b25d6500>]
if compare[name[country_prefix] equal[==] constant[GR]] begin[:]
variable[vat_id] assign[=] binary_operation[constant[EL] + call[name[vat_id]][<ast.Slice object at 0x7da1b25d6ce0>]]
variable[country_prefix] assign[=] constant[EL]
if compare[name[country_prefix] <ast.NotIn object at 0x7da2590d7190> name[ID_PATTERNS]] begin[:]
return[constant[None]]
return[name[vat_id]] | keyword[def] identifier[normalize] ( identifier[vat_id] ):
literal[string]
keyword[if] keyword[not] identifier[vat_id] :
keyword[return] keyword[None]
keyword[if] keyword[not] identifier[isinstance] ( identifier[vat_id] , identifier[str_cls] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[len] ( identifier[vat_id] )< literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[vat_id] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[vat_id] )
identifier[vat_id] = identifier[vat_id] . identifier[replace] ( literal[string] , literal[string] )
identifier[vat_id] = identifier[vat_id] . identifier[replace] ( literal[string] , literal[string] )
identifier[vat_id] = identifier[vat_id] . identifier[upper] ()
identifier[country_prefix] = identifier[vat_id] [ literal[int] : literal[int] ]
keyword[if] identifier[country_prefix] == literal[string] :
identifier[vat_id] = literal[string] + identifier[vat_id] [ literal[int] :]
identifier[country_prefix] = literal[string]
keyword[if] identifier[country_prefix] keyword[not] keyword[in] identifier[ID_PATTERNS] :
keyword[return] keyword[None]
keyword[return] identifier[vat_id] | def normalize(vat_id):
"""
Accepts a VAT ID and normaizes it, getting rid of spaces, periods, dashes
etc and converting it to upper case.
:param vat_id:
The VAT ID to check. Allows "GR" prefix for Greece, even though it
should be "EL".
:raises:
ValueError - If the is not a string or is not in the format of two characters plus an identifier
:return:
None if the VAT ID is blank or not for an EU country or Norway
Otherwise a normalized string containing the VAT ID
"""
if not vat_id:
return None # depends on [control=['if'], data=[]]
if not isinstance(vat_id, str_cls):
raise ValueError('VAT ID is not a string') # depends on [control=['if'], data=[]]
if len(vat_id) < 3:
raise ValueError('VAT ID must be at least three character long') # depends on [control=['if'], data=[]]
# Normalize the ID for simpler regexes
vat_id = re.sub('\\s+', '', vat_id)
vat_id = vat_id.replace('-', '')
vat_id = vat_id.replace('.', '')
vat_id = vat_id.upper()
country_prefix = vat_id[0:2]
# Fix people using GR prefix for Greece
if country_prefix == 'GR':
vat_id = 'EL' + vat_id[2:]
country_prefix = 'EL' # depends on [control=['if'], data=['country_prefix']]
if country_prefix not in ID_PATTERNS:
return None # depends on [control=['if'], data=[]]
return vat_id |
def _try_resolve_parameter_refs(self, input, parameters):
"""
Try to resolve parameter references on the given input object. The object could be of any type.
If the input is not in the format used by intrinsics (ie. dictionary with one key), input is returned
unmodified. If the single key in dictionary is one of the supported intrinsic function types,
go ahead and try to resolve it.
:param input: Input object to resolve
:param parameters: Parameter values used to for ref substitution
:return:
"""
if not self._is_intrinsic_dict(input):
return input
function_type = list(input.keys())[0]
return self.supported_intrinsics[function_type].resolve_parameter_refs(input, parameters) | def function[_try_resolve_parameter_refs, parameter[self, input, parameters]]:
constant[
Try to resolve parameter references on the given input object. The object could be of any type.
If the input is not in the format used by intrinsics (ie. dictionary with one key), input is returned
unmodified. If the single key in dictionary is one of the supported intrinsic function types,
go ahead and try to resolve it.
:param input: Input object to resolve
:param parameters: Parameter values used to for ref substitution
:return:
]
if <ast.UnaryOp object at 0x7da20c7ca0e0> begin[:]
return[name[input]]
variable[function_type] assign[=] call[call[name[list], parameter[call[name[input].keys, parameter[]]]]][constant[0]]
return[call[call[name[self].supported_intrinsics][name[function_type]].resolve_parameter_refs, parameter[name[input], name[parameters]]]] | keyword[def] identifier[_try_resolve_parameter_refs] ( identifier[self] , identifier[input] , identifier[parameters] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_is_intrinsic_dict] ( identifier[input] ):
keyword[return] identifier[input]
identifier[function_type] = identifier[list] ( identifier[input] . identifier[keys] ())[ literal[int] ]
keyword[return] identifier[self] . identifier[supported_intrinsics] [ identifier[function_type] ]. identifier[resolve_parameter_refs] ( identifier[input] , identifier[parameters] ) | def _try_resolve_parameter_refs(self, input, parameters):
"""
Try to resolve parameter references on the given input object. The object could be of any type.
If the input is not in the format used by intrinsics (ie. dictionary with one key), input is returned
unmodified. If the single key in dictionary is one of the supported intrinsic function types,
go ahead and try to resolve it.
:param input: Input object to resolve
:param parameters: Parameter values used to for ref substitution
:return:
"""
if not self._is_intrinsic_dict(input):
return input # depends on [control=['if'], data=[]]
function_type = list(input.keys())[0]
return self.supported_intrinsics[function_type].resolve_parameter_refs(input, parameters) |
def permission_required(perms, login_url=None, raise_exception=False, redirect_field_name=REDIRECT_FIELD_NAME):
"""
this is analog to django's builtin ``permission_required`` decorator, but
improved to check per slug ACLRules and default permissions for
anonymous and logged in users
if there is a rule affecting a slug, the user needs to be part of the
rule's allowed users. If there isn't a matching rule, defaults permissions
apply.
"""
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if check_perms(perms, request.user, kwargs['slug'], raise_exception=raise_exception):
return view_func(request, *args, **kwargs)
if is_authenticated(request.user):
if WALIKI_RENDER_403:
return render(request, 'waliki/403.html', kwargs, status=403)
else:
raise PermissionDenied
path = request.build_absolute_uri()
# urlparse chokes on lazy objects in Python 3, force to str
resolved_login_url = force_str(
resolve_url(login_url or settings.LOGIN_URL))
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse(resolved_login_url)[:2]
current_scheme, current_netloc = urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(
path, resolved_login_url, redirect_field_name)
return _wrapped_view
return decorator | def function[permission_required, parameter[perms, login_url, raise_exception, redirect_field_name]]:
constant[
this is analog to django's builtin ``permission_required`` decorator, but
improved to check per slug ACLRules and default permissions for
anonymous and logged in users
if there is a rule affecting a slug, the user needs to be part of the
rule's allowed users. If there isn't a matching rule, defaults permissions
apply.
]
def function[decorator, parameter[view_func]]:
def function[_wrapped_view, parameter[request]]:
if call[name[check_perms], parameter[name[perms], name[request].user, call[name[kwargs]][constant[slug]]]] begin[:]
return[call[name[view_func], parameter[name[request], <ast.Starred object at 0x7da207f998d0>]]]
if call[name[is_authenticated], parameter[name[request].user]] begin[:]
if name[WALIKI_RENDER_403] begin[:]
return[call[name[render], parameter[name[request], constant[waliki/403.html], name[kwargs]]]]
variable[path] assign[=] call[name[request].build_absolute_uri, parameter[]]
variable[resolved_login_url] assign[=] call[name[force_str], parameter[call[name[resolve_url], parameter[<ast.BoolOp object at 0x7da18ede69b0>]]]]
<ast.Tuple object at 0x7da18ede6860> assign[=] call[call[name[urlparse], parameter[name[resolved_login_url]]]][<ast.Slice object at 0x7da18ede5c90>]
<ast.Tuple object at 0x7da18ede61a0> assign[=] call[call[name[urlparse], parameter[name[path]]]][<ast.Slice object at 0x7da18ede41f0>]
if <ast.BoolOp object at 0x7da18ede7910> begin[:]
variable[path] assign[=] call[name[request].get_full_path, parameter[]]
from relative_module[django.contrib.auth.views] import module[redirect_to_login]
return[call[name[redirect_to_login], parameter[name[path], name[resolved_login_url], name[redirect_field_name]]]]
return[name[_wrapped_view]]
return[name[decorator]] | keyword[def] identifier[permission_required] ( identifier[perms] , identifier[login_url] = keyword[None] , identifier[raise_exception] = keyword[False] , identifier[redirect_field_name] = identifier[REDIRECT_FIELD_NAME] ):
literal[string]
keyword[def] identifier[decorator] ( identifier[view_func] ):
@ identifier[wraps] ( identifier[view_func] , identifier[assigned] = identifier[available_attrs] ( identifier[view_func] ))
keyword[def] identifier[_wrapped_view] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] ):
keyword[if] identifier[check_perms] ( identifier[perms] , identifier[request] . identifier[user] , identifier[kwargs] [ literal[string] ], identifier[raise_exception] = identifier[raise_exception] ):
keyword[return] identifier[view_func] ( identifier[request] ,* identifier[args] ,** identifier[kwargs] )
keyword[if] identifier[is_authenticated] ( identifier[request] . identifier[user] ):
keyword[if] identifier[WALIKI_RENDER_403] :
keyword[return] identifier[render] ( identifier[request] , literal[string] , identifier[kwargs] , identifier[status] = literal[int] )
keyword[else] :
keyword[raise] identifier[PermissionDenied]
identifier[path] = identifier[request] . identifier[build_absolute_uri] ()
identifier[resolved_login_url] = identifier[force_str] (
identifier[resolve_url] ( identifier[login_url] keyword[or] identifier[settings] . identifier[LOGIN_URL] ))
identifier[login_scheme] , identifier[login_netloc] = identifier[urlparse] ( identifier[resolved_login_url] )[: literal[int] ]
identifier[current_scheme] , identifier[current_netloc] = identifier[urlparse] ( identifier[path] )[: literal[int] ]
keyword[if] (( keyword[not] identifier[login_scheme] keyword[or] identifier[login_scheme] == identifier[current_scheme] ) keyword[and]
( keyword[not] identifier[login_netloc] keyword[or] identifier[login_netloc] == identifier[current_netloc] )):
identifier[path] = identifier[request] . identifier[get_full_path] ()
keyword[from] identifier[django] . identifier[contrib] . identifier[auth] . identifier[views] keyword[import] identifier[redirect_to_login]
keyword[return] identifier[redirect_to_login] (
identifier[path] , identifier[resolved_login_url] , identifier[redirect_field_name] )
keyword[return] identifier[_wrapped_view]
keyword[return] identifier[decorator] | def permission_required(perms, login_url=None, raise_exception=False, redirect_field_name=REDIRECT_FIELD_NAME):
"""
this is analog to django's builtin ``permission_required`` decorator, but
improved to check per slug ACLRules and default permissions for
anonymous and logged in users
if there is a rule affecting a slug, the user needs to be part of the
rule's allowed users. If there isn't a matching rule, defaults permissions
apply.
"""
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if check_perms(perms, request.user, kwargs['slug'], raise_exception=raise_exception):
return view_func(request, *args, **kwargs) # depends on [control=['if'], data=[]]
if is_authenticated(request.user):
if WALIKI_RENDER_403:
return render(request, 'waliki/403.html', kwargs, status=403) # depends on [control=['if'], data=[]]
else:
raise PermissionDenied # depends on [control=['if'], data=[]]
path = request.build_absolute_uri()
# urlparse chokes on lazy objects in Python 3, force to str
resolved_login_url = force_str(resolve_url(login_url or settings.LOGIN_URL))
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
(login_scheme, login_netloc) = urlparse(resolved_login_url)[:2]
(current_scheme, current_netloc) = urlparse(path)[:2]
if (not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc):
path = request.get_full_path() # depends on [control=['if'], data=[]]
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(path, resolved_login_url, redirect_field_name)
return _wrapped_view
return decorator |
def apply(self, doc):
"""
Generate MentionCaptions from a Document by parsing all of its Captions.
:param doc: The ``Document`` to parse.
:type doc: ``Document``
:raises TypeError: If the input doc is not of type ``Document``.
"""
if not isinstance(doc, Document):
raise TypeError(
"Input Contexts to MentionCaptions.apply() must be of type Document"
)
for caption in doc.captions:
yield TemporaryCaptionMention(caption) | def function[apply, parameter[self, doc]]:
constant[
Generate MentionCaptions from a Document by parsing all of its Captions.
:param doc: The ``Document`` to parse.
:type doc: ``Document``
:raises TypeError: If the input doc is not of type ``Document``.
]
if <ast.UnaryOp object at 0x7da1b26ac7c0> begin[:]
<ast.Raise object at 0x7da1b26adfc0>
for taget[name[caption]] in starred[name[doc].captions] begin[:]
<ast.Yield object at 0x7da1b26ae290> | keyword[def] identifier[apply] ( identifier[self] , identifier[doc] ):
literal[string]
keyword[if] keyword[not] identifier[isinstance] ( identifier[doc] , identifier[Document] ):
keyword[raise] identifier[TypeError] (
literal[string]
)
keyword[for] identifier[caption] keyword[in] identifier[doc] . identifier[captions] :
keyword[yield] identifier[TemporaryCaptionMention] ( identifier[caption] ) | def apply(self, doc):
"""
Generate MentionCaptions from a Document by parsing all of its Captions.
:param doc: The ``Document`` to parse.
:type doc: ``Document``
:raises TypeError: If the input doc is not of type ``Document``.
"""
if not isinstance(doc, Document):
raise TypeError('Input Contexts to MentionCaptions.apply() must be of type Document') # depends on [control=['if'], data=[]]
for caption in doc.captions:
yield TemporaryCaptionMention(caption) # depends on [control=['for'], data=['caption']] |
def get_mode_name(mode_const):
"""\
Returns the mode name for the provided mode constant.
:param int mode_const: The mode constant (see :py:module:`segno.consts`)
"""
for name, val in consts.MODE_MAPPING.items():
if val == mode_const:
return name
raise ModeError('Unknown mode "{0}"'.format(mode_const)) | def function[get_mode_name, parameter[mode_const]]:
constant[ Returns the mode name for the provided mode constant.
:param int mode_const: The mode constant (see :py:module:`segno.consts`)
]
for taget[tuple[[<ast.Name object at 0x7da207f02d70>, <ast.Name object at 0x7da207f03d30>]]] in starred[call[name[consts].MODE_MAPPING.items, parameter[]]] begin[:]
if compare[name[val] equal[==] name[mode_const]] begin[:]
return[name[name]]
<ast.Raise object at 0x7da207f00f40> | keyword[def] identifier[get_mode_name] ( identifier[mode_const] ):
literal[string]
keyword[for] identifier[name] , identifier[val] keyword[in] identifier[consts] . identifier[MODE_MAPPING] . identifier[items] ():
keyword[if] identifier[val] == identifier[mode_const] :
keyword[return] identifier[name]
keyword[raise] identifier[ModeError] ( literal[string] . identifier[format] ( identifier[mode_const] )) | def get_mode_name(mode_const):
""" Returns the mode name for the provided mode constant.
:param int mode_const: The mode constant (see :py:module:`segno.consts`)
"""
for (name, val) in consts.MODE_MAPPING.items():
if val == mode_const:
return name # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
raise ModeError('Unknown mode "{0}"'.format(mode_const)) |
def sync_nodes(self, clusters):
"""
Syncs the enabled/disabled status of nodes existing in HAProxy based
on the given clusters.
This is used to inform HAProxy of up/down nodes without necessarily
doing a restart of the process.
"""
logger.info("Syncing HAProxy backends.")
current_nodes, enabled_nodes = self.get_current_nodes(clusters)
for cluster_name, nodes in six.iteritems(current_nodes):
for node in nodes:
if node["svname"] in enabled_nodes[cluster_name]:
command = self.control.enable_node
else:
command = self.control.disable_node
try:
response = command(cluster_name, node["svname"])
except Exception:
logger.exception("Error when enabling/disabling node")
self.restart_required = True
else:
if response:
logger.error(
"Socket command for %s node %s failed: %s",
cluster_name, node["svname"], response
)
self.restart_required = True
return
logger.info("HAProxy nodes/servers synced.") | def function[sync_nodes, parameter[self, clusters]]:
constant[
Syncs the enabled/disabled status of nodes existing in HAProxy based
on the given clusters.
This is used to inform HAProxy of up/down nodes without necessarily
doing a restart of the process.
]
call[name[logger].info, parameter[constant[Syncing HAProxy backends.]]]
<ast.Tuple object at 0x7da204963d00> assign[=] call[name[self].get_current_nodes, parameter[name[clusters]]]
for taget[tuple[[<ast.Name object at 0x7da2049600d0>, <ast.Name object at 0x7da204962ec0>]]] in starred[call[name[six].iteritems, parameter[name[current_nodes]]]] begin[:]
for taget[name[node]] in starred[name[nodes]] begin[:]
if compare[call[name[node]][constant[svname]] in call[name[enabled_nodes]][name[cluster_name]]] begin[:]
variable[command] assign[=] name[self].control.enable_node
<ast.Try object at 0x7da204962110>
call[name[logger].info, parameter[constant[HAProxy nodes/servers synced.]]] | keyword[def] identifier[sync_nodes] ( identifier[self] , identifier[clusters] ):
literal[string]
identifier[logger] . identifier[info] ( literal[string] )
identifier[current_nodes] , identifier[enabled_nodes] = identifier[self] . identifier[get_current_nodes] ( identifier[clusters] )
keyword[for] identifier[cluster_name] , identifier[nodes] keyword[in] identifier[six] . identifier[iteritems] ( identifier[current_nodes] ):
keyword[for] identifier[node] keyword[in] identifier[nodes] :
keyword[if] identifier[node] [ literal[string] ] keyword[in] identifier[enabled_nodes] [ identifier[cluster_name] ]:
identifier[command] = identifier[self] . identifier[control] . identifier[enable_node]
keyword[else] :
identifier[command] = identifier[self] . identifier[control] . identifier[disable_node]
keyword[try] :
identifier[response] = identifier[command] ( identifier[cluster_name] , identifier[node] [ literal[string] ])
keyword[except] identifier[Exception] :
identifier[logger] . identifier[exception] ( literal[string] )
identifier[self] . identifier[restart_required] = keyword[True]
keyword[else] :
keyword[if] identifier[response] :
identifier[logger] . identifier[error] (
literal[string] ,
identifier[cluster_name] , identifier[node] [ literal[string] ], identifier[response]
)
identifier[self] . identifier[restart_required] = keyword[True]
keyword[return]
identifier[logger] . identifier[info] ( literal[string] ) | def sync_nodes(self, clusters):
"""
Syncs the enabled/disabled status of nodes existing in HAProxy based
on the given clusters.
This is used to inform HAProxy of up/down nodes without necessarily
doing a restart of the process.
"""
logger.info('Syncing HAProxy backends.')
(current_nodes, enabled_nodes) = self.get_current_nodes(clusters)
for (cluster_name, nodes) in six.iteritems(current_nodes):
for node in nodes:
if node['svname'] in enabled_nodes[cluster_name]:
command = self.control.enable_node # depends on [control=['if'], data=[]]
else:
command = self.control.disable_node
try:
response = command(cluster_name, node['svname']) # depends on [control=['try'], data=[]]
except Exception:
logger.exception('Error when enabling/disabling node')
self.restart_required = True # depends on [control=['except'], data=[]]
else:
if response:
logger.error('Socket command for %s node %s failed: %s', cluster_name, node['svname'], response)
self.restart_required = True
return # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']] # depends on [control=['for'], data=[]]
logger.info('HAProxy nodes/servers synced.') |
def init(name, *args):
"""
Initializes a matcher instance passing variadic arguments to
its constructor. Acts as a delegator proxy.
Arguments:
name (str): matcher class name or alias to execute.
*args (mixed): variadic argument
Returns:
matcher: matcher instance.
Raises:
ValueError: if matcher was not found.
"""
matcher = get(name)
if not matcher:
raise ValueError('Cannot find matcher: {}'.format(name))
return matcher(*args) | def function[init, parameter[name]]:
constant[
Initializes a matcher instance passing variadic arguments to
its constructor. Acts as a delegator proxy.
Arguments:
name (str): matcher class name or alias to execute.
*args (mixed): variadic argument
Returns:
matcher: matcher instance.
Raises:
ValueError: if matcher was not found.
]
variable[matcher] assign[=] call[name[get], parameter[name[name]]]
if <ast.UnaryOp object at 0x7da1b02f7160> begin[:]
<ast.Raise object at 0x7da1b02f6f20>
return[call[name[matcher], parameter[<ast.Starred object at 0x7da1b02f4f40>]]] | keyword[def] identifier[init] ( identifier[name] ,* identifier[args] ):
literal[string]
identifier[matcher] = identifier[get] ( identifier[name] )
keyword[if] keyword[not] identifier[matcher] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[name] ))
keyword[return] identifier[matcher] (* identifier[args] ) | def init(name, *args):
"""
Initializes a matcher instance passing variadic arguments to
its constructor. Acts as a delegator proxy.
Arguments:
name (str): matcher class name or alias to execute.
*args (mixed): variadic argument
Returns:
matcher: matcher instance.
Raises:
ValueError: if matcher was not found.
"""
matcher = get(name)
if not matcher:
raise ValueError('Cannot find matcher: {}'.format(name)) # depends on [control=['if'], data=[]]
return matcher(*args) |
def from_pubkey_line(cls, line):
"""Generate Key instance from a a string. Raise ValueError if string is
malformed"""
options, key_without_options = cls._extract_options(line)
if key_without_options == '':
raise ValueError("Empty key")
# the key (with options stripped out) should consist of the fields
# "type", "data", and optionally "comment", separated by a space.
# The comment field may contain additional spaces
fields = key_without_options.strip().split(None, 2) # maxsplit=2
if len(fields) == 3:
type_str, data64, comment = fields
elif len(fields) == 2:
type_str, data64 = fields
comment = None
else: # len(fields) <= 1
raise ValueError("Key has insufficient number of fields")
try:
data = b64decode(data64)
except (binascii.Error, TypeError):
raise ValueError("Key contains invalid data")
key_type = next(iter_prefixed(data))
if key_type == b'ssh-rsa':
key_class = RSAKey
elif key_type == b'ssh-dss':
key_class = DSAKey
elif key_type.startswith(b'ecdsa-'):
key_class = ECDSAKey
else:
raise ValueError('Unknown key type {}'.format(key_type))
return key_class(b64decode(data64), comment, options=options) | def function[from_pubkey_line, parameter[cls, line]]:
constant[Generate Key instance from a a string. Raise ValueError if string is
malformed]
<ast.Tuple object at 0x7da1b1455870> assign[=] call[name[cls]._extract_options, parameter[name[line]]]
if compare[name[key_without_options] equal[==] constant[]] begin[:]
<ast.Raise object at 0x7da1b1455e10>
variable[fields] assign[=] call[call[name[key_without_options].strip, parameter[]].split, parameter[constant[None], constant[2]]]
if compare[call[name[len], parameter[name[fields]]] equal[==] constant[3]] begin[:]
<ast.Tuple object at 0x7da1b14547c0> assign[=] name[fields]
<ast.Try object at 0x7da1b1454e80>
variable[key_type] assign[=] call[name[next], parameter[call[name[iter_prefixed], parameter[name[data]]]]]
if compare[name[key_type] equal[==] constant[b'ssh-rsa']] begin[:]
variable[key_class] assign[=] name[RSAKey]
return[call[name[key_class], parameter[call[name[b64decode], parameter[name[data64]]], name[comment]]]] | keyword[def] identifier[from_pubkey_line] ( identifier[cls] , identifier[line] ):
literal[string]
identifier[options] , identifier[key_without_options] = identifier[cls] . identifier[_extract_options] ( identifier[line] )
keyword[if] identifier[key_without_options] == literal[string] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[fields] = identifier[key_without_options] . identifier[strip] (). identifier[split] ( keyword[None] , literal[int] )
keyword[if] identifier[len] ( identifier[fields] )== literal[int] :
identifier[type_str] , identifier[data64] , identifier[comment] = identifier[fields]
keyword[elif] identifier[len] ( identifier[fields] )== literal[int] :
identifier[type_str] , identifier[data64] = identifier[fields]
identifier[comment] = keyword[None]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[try] :
identifier[data] = identifier[b64decode] ( identifier[data64] )
keyword[except] ( identifier[binascii] . identifier[Error] , identifier[TypeError] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[key_type] = identifier[next] ( identifier[iter_prefixed] ( identifier[data] ))
keyword[if] identifier[key_type] == literal[string] :
identifier[key_class] = identifier[RSAKey]
keyword[elif] identifier[key_type] == literal[string] :
identifier[key_class] = identifier[DSAKey]
keyword[elif] identifier[key_type] . identifier[startswith] ( literal[string] ):
identifier[key_class] = identifier[ECDSAKey]
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[key_type] ))
keyword[return] identifier[key_class] ( identifier[b64decode] ( identifier[data64] ), identifier[comment] , identifier[options] = identifier[options] ) | def from_pubkey_line(cls, line):
"""Generate Key instance from a a string. Raise ValueError if string is
malformed"""
(options, key_without_options) = cls._extract_options(line)
if key_without_options == '':
raise ValueError('Empty key') # depends on [control=['if'], data=[]]
# the key (with options stripped out) should consist of the fields
# "type", "data", and optionally "comment", separated by a space.
# The comment field may contain additional spaces
fields = key_without_options.strip().split(None, 2) # maxsplit=2
if len(fields) == 3:
(type_str, data64, comment) = fields # depends on [control=['if'], data=[]]
elif len(fields) == 2:
(type_str, data64) = fields
comment = None # depends on [control=['if'], data=[]]
else: # len(fields) <= 1
raise ValueError('Key has insufficient number of fields')
try:
data = b64decode(data64) # depends on [control=['try'], data=[]]
except (binascii.Error, TypeError):
raise ValueError('Key contains invalid data') # depends on [control=['except'], data=[]]
key_type = next(iter_prefixed(data))
if key_type == b'ssh-rsa':
key_class = RSAKey # depends on [control=['if'], data=[]]
elif key_type == b'ssh-dss':
key_class = DSAKey # depends on [control=['if'], data=[]]
elif key_type.startswith(b'ecdsa-'):
key_class = ECDSAKey # depends on [control=['if'], data=[]]
else:
raise ValueError('Unknown key type {}'.format(key_type))
return key_class(b64decode(data64), comment, options=options) |
def matplotlib_linegraph (plotdata, pconfig=None):
"""
Plot a line graph with Matplot lib and return a HTML string. Either embeds a base64
encoded image within HTML or writes the plot and links to it. Should be called by
plot_bargraph, which properly formats the input data.
"""
if pconfig is None:
pconfig = {}
# Plot group ID
if pconfig.get('id') is None:
pconfig['id'] = 'mqc_mplplot_'+''.join(random.sample(letters, 10))
# Sanitise plot ID and check for duplicates
pconfig['id'] = report.save_htmlid(pconfig['id'])
# Individual plot IDs
pids = []
for k in range(len(plotdata)):
try:
name = pconfig['data_labels'][k]['name']
except:
name = k+1
pid = 'mqc_{}_{}'.format(pconfig['id'], name)
pid = report.save_htmlid(pid, skiplint=True)
pids.append(pid)
html = '<p class="text-info"><small><span class="glyphicon glyphicon-picture" aria-hidden="true"></span> ' + \
'Flat image plot. Toolbox functions such as highlighting / hiding samples will not work ' + \
'(see the <a href="http://multiqc.info/docs/#flat--interactive-plots" target="_blank">docs</a>).</small></p>'
html += '<div class="mqc_mplplot_plotgroup" id="{}">'.format(pconfig['id'])
# Same defaults as HighCharts for consistency
default_colors = ['#7cb5ec', '#434348', '#90ed7d', '#f7a35c', '#8085e9',
'#f15c80', '#e4d354', '#2b908f', '#f45b5b', '#91e8e1']
# Buttons to cycle through different datasets
if len(plotdata) > 1 and not config.simple_output:
html += '<div class="btn-group mpl_switch_group mqc_mplplot_bargraph_switchds">\n'
for k, p in enumerate(plotdata):
pid = pids[k]
active = 'active' if k == 0 else ''
try:
name = pconfig['data_labels'][k]['name']
except:
name = k+1
html += '<button class="btn btn-default btn-sm {a}" data-target="#{pid}">{n}</button>\n'.format(a=active, pid=pid, n=name)
html += '</div>\n\n'
# Go through datasets creating plots
for pidx, pdata in enumerate(plotdata):
# Plot ID
pid = pids[pidx]
# Save plot data to file
fdata = OrderedDict()
lastcats = None
sharedcats = True
for d in pdata:
fdata[d['name']] = OrderedDict()
for i, x in enumerate(d['data']):
if type(x) is list:
fdata[d['name']][str(x[0])] = x[1]
# Check to see if all categories are the same
if lastcats is None:
lastcats = [x[0] for x in d['data']]
elif lastcats != [x[0] for x in d['data']]:
sharedcats = False
else:
try:
fdata[d['name']][pconfig['categories'][i]] = x
except (KeyError, IndexError):
fdata[d['name']][str(i)] = x
# Custom tsv output if the x axis varies
if not sharedcats and config.data_format == 'tsv':
fout = ''
for d in pdata:
fout += "\t"+"\t".join([str(x[0]) for x in d['data']])
fout += "\n{}\t".format(d['name'])
fout += "\t".join([str(x[1]) for x in d['data']])
fout += "\n"
with io.open (os.path.join(config.data_dir, '{}.txt'.format(pid)), 'w', encoding='utf-8') as f:
print( fout.encode('utf-8', 'ignore').decode('utf-8'), file=f )
else:
util_functions.write_data_file(fdata, pid)
# Set up figure
fig = plt.figure(figsize=(14, 6), frameon=False)
axes = fig.add_subplot(111)
# Go through data series
for idx, d in enumerate(pdata):
# Default colour index
cidx = idx
while cidx >= len(default_colors):
cidx -= len(default_colors)
# Line style
linestyle = 'solid'
if d.get('dashStyle', None) == 'Dash':
linestyle = 'dashed'
# Reformat data (again)
try:
axes.plot([x[0] for x in d['data']], [x[1] for x in d['data']], label=d['name'], color=d.get('color', default_colors[cidx]), linestyle=linestyle, linewidth=1, marker=None)
except TypeError:
# Categorical data on x axis
axes.plot(d['data'], label=d['name'], color=d.get('color', default_colors[cidx]), linewidth=1, marker=None)
# Tidy up axes
axes.tick_params(labelsize=8, direction='out', left=False, right=False, top=False, bottom=False)
axes.set_xlabel(pconfig.get('xlab', ''))
axes.set_ylabel(pconfig.get('ylab', ''))
# Dataset specific y label
try:
axes.set_ylabel(pconfig['data_labels'][pidx]['ylab'])
except:
pass
# Axis limits
default_ylimits = axes.get_ylim()
ymin = default_ylimits[0]
if 'ymin' in pconfig:
ymin = pconfig['ymin']
elif 'yFloor' in pconfig:
ymin = max(pconfig['yFloor'], default_ylimits[0])
ymax = default_ylimits[1]
if 'ymax' in pconfig:
ymax = pconfig['ymax']
elif 'yCeiling' in pconfig:
ymax = min(pconfig['yCeiling'], default_ylimits[1])
if (ymax - ymin) < pconfig.get('yMinRange', 0):
ymax = ymin + pconfig['yMinRange']
axes.set_ylim((ymin, ymax))
# Dataset specific ymax
try:
axes.set_ylim((ymin, pconfig['data_labels'][pidx]['ymax']))
except:
pass
default_xlimits = axes.get_xlim()
xmin = default_xlimits[0]
if 'xmin' in pconfig:
xmin = pconfig['xmin']
elif 'xFloor' in pconfig:
xmin = max(pconfig['xFloor'], default_xlimits[0])
xmax = default_xlimits[1]
if 'xmax' in pconfig:
xmax = pconfig['xmax']
elif 'xCeiling' in pconfig:
xmax = min(pconfig['xCeiling'], default_xlimits[1])
if (xmax - xmin) < pconfig.get('xMinRange', 0):
xmax = xmin + pconfig['xMinRange']
axes.set_xlim((xmin, xmax))
# Plot title
if 'title' in pconfig:
plt.text(0.5, 1.05, pconfig['title'], horizontalalignment='center', fontsize=16, transform=axes.transAxes)
axes.grid(True, zorder=10, which='both', axis='y', linestyle='-', color='#dedede', linewidth=1)
# X axis categories, if specified
if 'categories' in pconfig:
axes.set_xticks([i for i,v in enumerate(pconfig['categories'])])
axes.set_xticklabels(pconfig['categories'])
# Axis lines
xlim = axes.get_xlim()
axes.plot([xlim[0], xlim[1]], [0, 0], linestyle='-', color='#dedede', linewidth=2)
axes.set_axisbelow(True)
axes.spines['right'].set_visible(False)
axes.spines['top'].set_visible(False)
axes.spines['bottom'].set_visible(False)
axes.spines['left'].set_visible(False)
# Background colours, if specified
if 'yPlotBands' in pconfig:
xlim = axes.get_xlim()
for pb in pconfig['yPlotBands']:
axes.barh(pb['from'], xlim[1], height = pb['to']-pb['from'], left=xlim[0], color=pb['color'], linewidth=0, zorder=0)
if 'xPlotBands' in pconfig:
ylim = axes.get_ylim()
for pb in pconfig['xPlotBands']:
axes.bar(pb['from'], ylim[1], width = pb['to']-pb['from'], bottom=ylim[0], color=pb['color'], linewidth=0, zorder=0)
# Tight layout - makes sure that legend fits in and stuff
if len(pdata) <= 15:
axes.legend(loc='lower center', bbox_to_anchor=(0, -0.22, 1, .102), ncol=5, mode='expand', fontsize=8, frameon=False)
plt.tight_layout(rect=[0,0.08,1,0.92])
else:
plt.tight_layout(rect=[0,0,1,0.92])
# Should this plot be hidden on report load?
hidediv = ''
if pidx > 0:
hidediv = ' style="display:none;"'
# Save the plot to the data directory if export is requests
if config.export_plots:
for fformat in config.export_plot_formats:
# Make the directory if it doesn't already exist
plot_dir = os.path.join(config.plots_dir, fformat)
if not os.path.exists(plot_dir):
os.makedirs(plot_dir)
# Save the plot
plot_fn = os.path.join(plot_dir, '{}.{}'.format(pid, fformat))
fig.savefig(plot_fn, format=fformat, bbox_inches='tight')
# Output the figure to a base64 encoded string
if getattr(get_template_mod(), 'base64_plots', True) is True:
img_buffer = io.BytesIO()
fig.savefig(img_buffer, format='png', bbox_inches='tight')
b64_img = base64.b64encode(img_buffer.getvalue()).decode('utf8')
img_buffer.close()
html += '<div class="mqc_mplplot" id="{}"{}><img src="data:image/png;base64,{}" /></div>'.format(pid, hidediv, b64_img)
# Save to a file and link <img>
else:
plot_relpath = os.path.join(config.plots_dir_name, 'png', '{}.png'.format(pid))
html += '<div class="mqc_mplplot" id="{}"{}><img src="{}" /></div>'.format(pid, hidediv, plot_relpath)
plt.close(fig)
# Close wrapping div
html += '</div>'
report.num_mpl_plots += 1
return html | def function[matplotlib_linegraph, parameter[plotdata, pconfig]]:
constant[
Plot a line graph with Matplot lib and return a HTML string. Either embeds a base64
encoded image within HTML or writes the plot and links to it. Should be called by
plot_bargraph, which properly formats the input data.
]
if compare[name[pconfig] is constant[None]] begin[:]
variable[pconfig] assign[=] dictionary[[], []]
if compare[call[name[pconfig].get, parameter[constant[id]]] is constant[None]] begin[:]
call[name[pconfig]][constant[id]] assign[=] binary_operation[constant[mqc_mplplot_] + call[constant[].join, parameter[call[name[random].sample, parameter[name[letters], constant[10]]]]]]
call[name[pconfig]][constant[id]] assign[=] call[name[report].save_htmlid, parameter[call[name[pconfig]][constant[id]]]]
variable[pids] assign[=] list[[]]
for taget[name[k]] in starred[call[name[range], parameter[call[name[len], parameter[name[plotdata]]]]]] begin[:]
<ast.Try object at 0x7da20e956e60>
variable[pid] assign[=] call[constant[mqc_{}_{}].format, parameter[call[name[pconfig]][constant[id]], name[name]]]
variable[pid] assign[=] call[name[report].save_htmlid, parameter[name[pid]]]
call[name[pids].append, parameter[name[pid]]]
variable[html] assign[=] binary_operation[binary_operation[constant[<p class="text-info"><small><span class="glyphicon glyphicon-picture" aria-hidden="true"></span> ] + constant[Flat image plot. Toolbox functions such as highlighting / hiding samples will not work ]] + constant[(see the <a href="http://multiqc.info/docs/#flat--interactive-plots" target="_blank">docs</a>).</small></p>]]
<ast.AugAssign object at 0x7da18fe916f0>
variable[default_colors] assign[=] list[[<ast.Constant object at 0x7da18fe90d90>, <ast.Constant object at 0x7da18fe925f0>, <ast.Constant object at 0x7da18fe93bb0>, <ast.Constant object at 0x7da18fe93f40>, <ast.Constant object at 0x7da18fe914b0>, <ast.Constant object at 0x7da18fe91e10>, <ast.Constant object at 0x7da18fe91b70>, <ast.Constant object at 0x7da18fe93070>, <ast.Constant object at 0x7da18fe93160>, <ast.Constant object at 0x7da18fe92e30>]]
if <ast.BoolOp object at 0x7da18fe903d0> begin[:]
<ast.AugAssign object at 0x7da18fe93010>
for taget[tuple[[<ast.Name object at 0x7da18fe93220>, <ast.Name object at 0x7da18fe923e0>]]] in starred[call[name[enumerate], parameter[name[plotdata]]]] begin[:]
variable[pid] assign[=] call[name[pids]][name[k]]
variable[active] assign[=] <ast.IfExp object at 0x7da18fe93940>
<ast.Try object at 0x7da18fe90fa0>
<ast.AugAssign object at 0x7da18fe91270>
<ast.AugAssign object at 0x7da18fe930d0>
for taget[tuple[[<ast.Name object at 0x7da18fe907f0>, <ast.Name object at 0x7da18fe900d0>]]] in starred[call[name[enumerate], parameter[name[plotdata]]]] begin[:]
variable[pid] assign[=] call[name[pids]][name[pidx]]
variable[fdata] assign[=] call[name[OrderedDict], parameter[]]
variable[lastcats] assign[=] constant[None]
variable[sharedcats] assign[=] constant[True]
for taget[name[d]] in starred[name[pdata]] begin[:]
call[name[fdata]][call[name[d]][constant[name]]] assign[=] call[name[OrderedDict], parameter[]]
for taget[tuple[[<ast.Name object at 0x7da18fe90c70>, <ast.Name object at 0x7da18fe906a0>]]] in starred[call[name[enumerate], parameter[call[name[d]][constant[data]]]]] begin[:]
if compare[call[name[type], parameter[name[x]]] is name[list]] begin[:]
call[call[name[fdata]][call[name[d]][constant[name]]]][call[name[str], parameter[call[name[x]][constant[0]]]]] assign[=] call[name[x]][constant[1]]
if compare[name[lastcats] is constant[None]] begin[:]
variable[lastcats] assign[=] <ast.ListComp object at 0x7da18fe93730>
if <ast.BoolOp object at 0x7da18fe91510> begin[:]
variable[fout] assign[=] constant[]
for taget[name[d]] in starred[name[pdata]] begin[:]
<ast.AugAssign object at 0x7da18fe921d0>
<ast.AugAssign object at 0x7da18fe90490>
<ast.AugAssign object at 0x7da18fe92710>
<ast.AugAssign object at 0x7da18fe92200>
with call[name[io].open, parameter[call[name[os].path.join, parameter[name[config].data_dir, call[constant[{}.txt].format, parameter[name[pid]]]]], constant[w]]] begin[:]
call[name[print], parameter[call[call[name[fout].encode, parameter[constant[utf-8], constant[ignore]]].decode, parameter[constant[utf-8]]]]]
variable[fig] assign[=] call[name[plt].figure, parameter[]]
variable[axes] assign[=] call[name[fig].add_subplot, parameter[constant[111]]]
for taget[tuple[[<ast.Name object at 0x7da18fe90880>, <ast.Name object at 0x7da18fe91780>]]] in starred[call[name[enumerate], parameter[name[pdata]]]] begin[:]
variable[cidx] assign[=] name[idx]
while compare[name[cidx] greater_or_equal[>=] call[name[len], parameter[name[default_colors]]]] begin[:]
<ast.AugAssign object at 0x7da207f000a0>
variable[linestyle] assign[=] constant[solid]
if compare[call[name[d].get, parameter[constant[dashStyle], constant[None]]] equal[==] constant[Dash]] begin[:]
variable[linestyle] assign[=] constant[dashed]
<ast.Try object at 0x7da207f00eb0>
call[name[axes].tick_params, parameter[]]
call[name[axes].set_xlabel, parameter[call[name[pconfig].get, parameter[constant[xlab], constant[]]]]]
call[name[axes].set_ylabel, parameter[call[name[pconfig].get, parameter[constant[ylab], constant[]]]]]
<ast.Try object at 0x7da207f03c10>
variable[default_ylimits] assign[=] call[name[axes].get_ylim, parameter[]]
variable[ymin] assign[=] call[name[default_ylimits]][constant[0]]
if compare[constant[ymin] in name[pconfig]] begin[:]
variable[ymin] assign[=] call[name[pconfig]][constant[ymin]]
variable[ymax] assign[=] call[name[default_ylimits]][constant[1]]
if compare[constant[ymax] in name[pconfig]] begin[:]
variable[ymax] assign[=] call[name[pconfig]][constant[ymax]]
if compare[binary_operation[name[ymax] - name[ymin]] less[<] call[name[pconfig].get, parameter[constant[yMinRange], constant[0]]]] begin[:]
variable[ymax] assign[=] binary_operation[name[ymin] + call[name[pconfig]][constant[yMinRange]]]
call[name[axes].set_ylim, parameter[tuple[[<ast.Name object at 0x7da207f9a800>, <ast.Name object at 0x7da207f9acb0>]]]]
<ast.Try object at 0x7da207f98ca0>
variable[default_xlimits] assign[=] call[name[axes].get_xlim, parameter[]]
variable[xmin] assign[=] call[name[default_xlimits]][constant[0]]
if compare[constant[xmin] in name[pconfig]] begin[:]
variable[xmin] assign[=] call[name[pconfig]][constant[xmin]]
variable[xmax] assign[=] call[name[default_xlimits]][constant[1]]
if compare[constant[xmax] in name[pconfig]] begin[:]
variable[xmax] assign[=] call[name[pconfig]][constant[xmax]]
if compare[binary_operation[name[xmax] - name[xmin]] less[<] call[name[pconfig].get, parameter[constant[xMinRange], constant[0]]]] begin[:]
variable[xmax] assign[=] binary_operation[name[xmin] + call[name[pconfig]][constant[xMinRange]]]
call[name[axes].set_xlim, parameter[tuple[[<ast.Name object at 0x7da207f9bb50>, <ast.Name object at 0x7da207f98910>]]]]
if compare[constant[title] in name[pconfig]] begin[:]
call[name[plt].text, parameter[constant[0.5], constant[1.05], call[name[pconfig]][constant[title]]]]
call[name[axes].grid, parameter[constant[True]]]
if compare[constant[categories] in name[pconfig]] begin[:]
call[name[axes].set_xticks, parameter[<ast.ListComp object at 0x7da207f9a5c0>]]
call[name[axes].set_xticklabels, parameter[call[name[pconfig]][constant[categories]]]]
variable[xlim] assign[=] call[name[axes].get_xlim, parameter[]]
call[name[axes].plot, parameter[list[[<ast.Subscript object at 0x7da207f9bca0>, <ast.Subscript object at 0x7da207f98b20>]], list[[<ast.Constant object at 0x7da207f99bd0>, <ast.Constant object at 0x7da207f9a0e0>]]]]
call[name[axes].set_axisbelow, parameter[constant[True]]]
call[call[name[axes].spines][constant[right]].set_visible, parameter[constant[False]]]
call[call[name[axes].spines][constant[top]].set_visible, parameter[constant[False]]]
call[call[name[axes].spines][constant[bottom]].set_visible, parameter[constant[False]]]
call[call[name[axes].spines][constant[left]].set_visible, parameter[constant[False]]]
if compare[constant[yPlotBands] in name[pconfig]] begin[:]
variable[xlim] assign[=] call[name[axes].get_xlim, parameter[]]
for taget[name[pb]] in starred[call[name[pconfig]][constant[yPlotBands]]] begin[:]
call[name[axes].barh, parameter[call[name[pb]][constant[from]], call[name[xlim]][constant[1]]]]
if compare[constant[xPlotBands] in name[pconfig]] begin[:]
variable[ylim] assign[=] call[name[axes].get_ylim, parameter[]]
for taget[name[pb]] in starred[call[name[pconfig]][constant[xPlotBands]]] begin[:]
call[name[axes].bar, parameter[call[name[pb]][constant[from]], call[name[ylim]][constant[1]]]]
if compare[call[name[len], parameter[name[pdata]]] less_or_equal[<=] constant[15]] begin[:]
call[name[axes].legend, parameter[]]
call[name[plt].tight_layout, parameter[]]
variable[hidediv] assign[=] constant[]
if compare[name[pidx] greater[>] constant[0]] begin[:]
variable[hidediv] assign[=] constant[ style="display:none;"]
if name[config].export_plots begin[:]
for taget[name[fformat]] in starred[name[config].export_plot_formats] begin[:]
variable[plot_dir] assign[=] call[name[os].path.join, parameter[name[config].plots_dir, name[fformat]]]
if <ast.UnaryOp object at 0x7da18ede4f10> begin[:]
call[name[os].makedirs, parameter[name[plot_dir]]]
variable[plot_fn] assign[=] call[name[os].path.join, parameter[name[plot_dir], call[constant[{}.{}].format, parameter[name[pid], name[fformat]]]]]
call[name[fig].savefig, parameter[name[plot_fn]]]
if compare[call[name[getattr], parameter[call[name[get_template_mod], parameter[]], constant[base64_plots], constant[True]]] is constant[True]] begin[:]
variable[img_buffer] assign[=] call[name[io].BytesIO, parameter[]]
call[name[fig].savefig, parameter[name[img_buffer]]]
variable[b64_img] assign[=] call[call[name[base64].b64encode, parameter[call[name[img_buffer].getvalue, parameter[]]]].decode, parameter[constant[utf8]]]
call[name[img_buffer].close, parameter[]]
<ast.AugAssign object at 0x7da18ede6e90>
call[name[plt].close, parameter[name[fig]]]
<ast.AugAssign object at 0x7da18ede4550>
<ast.AugAssign object at 0x7da18ede5720>
return[name[html]] | keyword[def] identifier[matplotlib_linegraph] ( identifier[plotdata] , identifier[pconfig] = keyword[None] ):
literal[string]
keyword[if] identifier[pconfig] keyword[is] keyword[None] :
identifier[pconfig] ={}
keyword[if] identifier[pconfig] . identifier[get] ( literal[string] ) keyword[is] keyword[None] :
identifier[pconfig] [ literal[string] ]= literal[string] + literal[string] . identifier[join] ( identifier[random] . identifier[sample] ( identifier[letters] , literal[int] ))
identifier[pconfig] [ literal[string] ]= identifier[report] . identifier[save_htmlid] ( identifier[pconfig] [ literal[string] ])
identifier[pids] =[]
keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[plotdata] )):
keyword[try] :
identifier[name] = identifier[pconfig] [ literal[string] ][ identifier[k] ][ literal[string] ]
keyword[except] :
identifier[name] = identifier[k] + literal[int]
identifier[pid] = literal[string] . identifier[format] ( identifier[pconfig] [ literal[string] ], identifier[name] )
identifier[pid] = identifier[report] . identifier[save_htmlid] ( identifier[pid] , identifier[skiplint] = keyword[True] )
identifier[pids] . identifier[append] ( identifier[pid] )
identifier[html] = literal[string] + literal[string] + literal[string]
identifier[html] += literal[string] . identifier[format] ( identifier[pconfig] [ literal[string] ])
identifier[default_colors] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ,
literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[len] ( identifier[plotdata] )> literal[int] keyword[and] keyword[not] identifier[config] . identifier[simple_output] :
identifier[html] += literal[string]
keyword[for] identifier[k] , identifier[p] keyword[in] identifier[enumerate] ( identifier[plotdata] ):
identifier[pid] = identifier[pids] [ identifier[k] ]
identifier[active] = literal[string] keyword[if] identifier[k] == literal[int] keyword[else] literal[string]
keyword[try] :
identifier[name] = identifier[pconfig] [ literal[string] ][ identifier[k] ][ literal[string] ]
keyword[except] :
identifier[name] = identifier[k] + literal[int]
identifier[html] += literal[string] . identifier[format] ( identifier[a] = identifier[active] , identifier[pid] = identifier[pid] , identifier[n] = identifier[name] )
identifier[html] += literal[string]
keyword[for] identifier[pidx] , identifier[pdata] keyword[in] identifier[enumerate] ( identifier[plotdata] ):
identifier[pid] = identifier[pids] [ identifier[pidx] ]
identifier[fdata] = identifier[OrderedDict] ()
identifier[lastcats] = keyword[None]
identifier[sharedcats] = keyword[True]
keyword[for] identifier[d] keyword[in] identifier[pdata] :
identifier[fdata] [ identifier[d] [ literal[string] ]]= identifier[OrderedDict] ()
keyword[for] identifier[i] , identifier[x] keyword[in] identifier[enumerate] ( identifier[d] [ literal[string] ]):
keyword[if] identifier[type] ( identifier[x] ) keyword[is] identifier[list] :
identifier[fdata] [ identifier[d] [ literal[string] ]][ identifier[str] ( identifier[x] [ literal[int] ])]= identifier[x] [ literal[int] ]
keyword[if] identifier[lastcats] keyword[is] keyword[None] :
identifier[lastcats] =[ identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[d] [ literal[string] ]]
keyword[elif] identifier[lastcats] !=[ identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[d] [ literal[string] ]]:
identifier[sharedcats] = keyword[False]
keyword[else] :
keyword[try] :
identifier[fdata] [ identifier[d] [ literal[string] ]][ identifier[pconfig] [ literal[string] ][ identifier[i] ]]= identifier[x]
keyword[except] ( identifier[KeyError] , identifier[IndexError] ):
identifier[fdata] [ identifier[d] [ literal[string] ]][ identifier[str] ( identifier[i] )]= identifier[x]
keyword[if] keyword[not] identifier[sharedcats] keyword[and] identifier[config] . identifier[data_format] == literal[string] :
identifier[fout] = literal[string]
keyword[for] identifier[d] keyword[in] identifier[pdata] :
identifier[fout] += literal[string] + literal[string] . identifier[join] ([ identifier[str] ( identifier[x] [ literal[int] ]) keyword[for] identifier[x] keyword[in] identifier[d] [ literal[string] ]])
identifier[fout] += literal[string] . identifier[format] ( identifier[d] [ literal[string] ])
identifier[fout] += literal[string] . identifier[join] ([ identifier[str] ( identifier[x] [ literal[int] ]) keyword[for] identifier[x] keyword[in] identifier[d] [ literal[string] ]])
identifier[fout] += literal[string]
keyword[with] identifier[io] . identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[config] . identifier[data_dir] , literal[string] . identifier[format] ( identifier[pid] )), literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[f] :
identifier[print] ( identifier[fout] . identifier[encode] ( literal[string] , literal[string] ). identifier[decode] ( literal[string] ), identifier[file] = identifier[f] )
keyword[else] :
identifier[util_functions] . identifier[write_data_file] ( identifier[fdata] , identifier[pid] )
identifier[fig] = identifier[plt] . identifier[figure] ( identifier[figsize] =( literal[int] , literal[int] ), identifier[frameon] = keyword[False] )
identifier[axes] = identifier[fig] . identifier[add_subplot] ( literal[int] )
keyword[for] identifier[idx] , identifier[d] keyword[in] identifier[enumerate] ( identifier[pdata] ):
identifier[cidx] = identifier[idx]
keyword[while] identifier[cidx] >= identifier[len] ( identifier[default_colors] ):
identifier[cidx] -= identifier[len] ( identifier[default_colors] )
identifier[linestyle] = literal[string]
keyword[if] identifier[d] . identifier[get] ( literal[string] , keyword[None] )== literal[string] :
identifier[linestyle] = literal[string]
keyword[try] :
identifier[axes] . identifier[plot] ([ identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[d] [ literal[string] ]],[ identifier[x] [ literal[int] ] keyword[for] identifier[x] keyword[in] identifier[d] [ literal[string] ]], identifier[label] = identifier[d] [ literal[string] ], identifier[color] = identifier[d] . identifier[get] ( literal[string] , identifier[default_colors] [ identifier[cidx] ]), identifier[linestyle] = identifier[linestyle] , identifier[linewidth] = literal[int] , identifier[marker] = keyword[None] )
keyword[except] identifier[TypeError] :
identifier[axes] . identifier[plot] ( identifier[d] [ literal[string] ], identifier[label] = identifier[d] [ literal[string] ], identifier[color] = identifier[d] . identifier[get] ( literal[string] , identifier[default_colors] [ identifier[cidx] ]), identifier[linewidth] = literal[int] , identifier[marker] = keyword[None] )
identifier[axes] . identifier[tick_params] ( identifier[labelsize] = literal[int] , identifier[direction] = literal[string] , identifier[left] = keyword[False] , identifier[right] = keyword[False] , identifier[top] = keyword[False] , identifier[bottom] = keyword[False] )
identifier[axes] . identifier[set_xlabel] ( identifier[pconfig] . identifier[get] ( literal[string] , literal[string] ))
identifier[axes] . identifier[set_ylabel] ( identifier[pconfig] . identifier[get] ( literal[string] , literal[string] ))
keyword[try] :
identifier[axes] . identifier[set_ylabel] ( identifier[pconfig] [ literal[string] ][ identifier[pidx] ][ literal[string] ])
keyword[except] :
keyword[pass]
identifier[default_ylimits] = identifier[axes] . identifier[get_ylim] ()
identifier[ymin] = identifier[default_ylimits] [ literal[int] ]
keyword[if] literal[string] keyword[in] identifier[pconfig] :
identifier[ymin] = identifier[pconfig] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[pconfig] :
identifier[ymin] = identifier[max] ( identifier[pconfig] [ literal[string] ], identifier[default_ylimits] [ literal[int] ])
identifier[ymax] = identifier[default_ylimits] [ literal[int] ]
keyword[if] literal[string] keyword[in] identifier[pconfig] :
identifier[ymax] = identifier[pconfig] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[pconfig] :
identifier[ymax] = identifier[min] ( identifier[pconfig] [ literal[string] ], identifier[default_ylimits] [ literal[int] ])
keyword[if] ( identifier[ymax] - identifier[ymin] )< identifier[pconfig] . identifier[get] ( literal[string] , literal[int] ):
identifier[ymax] = identifier[ymin] + identifier[pconfig] [ literal[string] ]
identifier[axes] . identifier[set_ylim] (( identifier[ymin] , identifier[ymax] ))
keyword[try] :
identifier[axes] . identifier[set_ylim] (( identifier[ymin] , identifier[pconfig] [ literal[string] ][ identifier[pidx] ][ literal[string] ]))
keyword[except] :
keyword[pass]
identifier[default_xlimits] = identifier[axes] . identifier[get_xlim] ()
identifier[xmin] = identifier[default_xlimits] [ literal[int] ]
keyword[if] literal[string] keyword[in] identifier[pconfig] :
identifier[xmin] = identifier[pconfig] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[pconfig] :
identifier[xmin] = identifier[max] ( identifier[pconfig] [ literal[string] ], identifier[default_xlimits] [ literal[int] ])
identifier[xmax] = identifier[default_xlimits] [ literal[int] ]
keyword[if] literal[string] keyword[in] identifier[pconfig] :
identifier[xmax] = identifier[pconfig] [ literal[string] ]
keyword[elif] literal[string] keyword[in] identifier[pconfig] :
identifier[xmax] = identifier[min] ( identifier[pconfig] [ literal[string] ], identifier[default_xlimits] [ literal[int] ])
keyword[if] ( identifier[xmax] - identifier[xmin] )< identifier[pconfig] . identifier[get] ( literal[string] , literal[int] ):
identifier[xmax] = identifier[xmin] + identifier[pconfig] [ literal[string] ]
identifier[axes] . identifier[set_xlim] (( identifier[xmin] , identifier[xmax] ))
keyword[if] literal[string] keyword[in] identifier[pconfig] :
identifier[plt] . identifier[text] ( literal[int] , literal[int] , identifier[pconfig] [ literal[string] ], identifier[horizontalalignment] = literal[string] , identifier[fontsize] = literal[int] , identifier[transform] = identifier[axes] . identifier[transAxes] )
identifier[axes] . identifier[grid] ( keyword[True] , identifier[zorder] = literal[int] , identifier[which] = literal[string] , identifier[axis] = literal[string] , identifier[linestyle] = literal[string] , identifier[color] = literal[string] , identifier[linewidth] = literal[int] )
keyword[if] literal[string] keyword[in] identifier[pconfig] :
identifier[axes] . identifier[set_xticks] ([ identifier[i] keyword[for] identifier[i] , identifier[v] keyword[in] identifier[enumerate] ( identifier[pconfig] [ literal[string] ])])
identifier[axes] . identifier[set_xticklabels] ( identifier[pconfig] [ literal[string] ])
identifier[xlim] = identifier[axes] . identifier[get_xlim] ()
identifier[axes] . identifier[plot] ([ identifier[xlim] [ literal[int] ], identifier[xlim] [ literal[int] ]],[ literal[int] , literal[int] ], identifier[linestyle] = literal[string] , identifier[color] = literal[string] , identifier[linewidth] = literal[int] )
identifier[axes] . identifier[set_axisbelow] ( keyword[True] )
identifier[axes] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] )
identifier[axes] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] )
identifier[axes] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] )
identifier[axes] . identifier[spines] [ literal[string] ]. identifier[set_visible] ( keyword[False] )
keyword[if] literal[string] keyword[in] identifier[pconfig] :
identifier[xlim] = identifier[axes] . identifier[get_xlim] ()
keyword[for] identifier[pb] keyword[in] identifier[pconfig] [ literal[string] ]:
identifier[axes] . identifier[barh] ( identifier[pb] [ literal[string] ], identifier[xlim] [ literal[int] ], identifier[height] = identifier[pb] [ literal[string] ]- identifier[pb] [ literal[string] ], identifier[left] = identifier[xlim] [ literal[int] ], identifier[color] = identifier[pb] [ literal[string] ], identifier[linewidth] = literal[int] , identifier[zorder] = literal[int] )
keyword[if] literal[string] keyword[in] identifier[pconfig] :
identifier[ylim] = identifier[axes] . identifier[get_ylim] ()
keyword[for] identifier[pb] keyword[in] identifier[pconfig] [ literal[string] ]:
identifier[axes] . identifier[bar] ( identifier[pb] [ literal[string] ], identifier[ylim] [ literal[int] ], identifier[width] = identifier[pb] [ literal[string] ]- identifier[pb] [ literal[string] ], identifier[bottom] = identifier[ylim] [ literal[int] ], identifier[color] = identifier[pb] [ literal[string] ], identifier[linewidth] = literal[int] , identifier[zorder] = literal[int] )
keyword[if] identifier[len] ( identifier[pdata] )<= literal[int] :
identifier[axes] . identifier[legend] ( identifier[loc] = literal[string] , identifier[bbox_to_anchor] =( literal[int] ,- literal[int] , literal[int] , literal[int] ), identifier[ncol] = literal[int] , identifier[mode] = literal[string] , identifier[fontsize] = literal[int] , identifier[frameon] = keyword[False] )
identifier[plt] . identifier[tight_layout] ( identifier[rect] =[ literal[int] , literal[int] , literal[int] , literal[int] ])
keyword[else] :
identifier[plt] . identifier[tight_layout] ( identifier[rect] =[ literal[int] , literal[int] , literal[int] , literal[int] ])
identifier[hidediv] = literal[string]
keyword[if] identifier[pidx] > literal[int] :
identifier[hidediv] = literal[string]
keyword[if] identifier[config] . identifier[export_plots] :
keyword[for] identifier[fformat] keyword[in] identifier[config] . identifier[export_plot_formats] :
identifier[plot_dir] = identifier[os] . identifier[path] . identifier[join] ( identifier[config] . identifier[plots_dir] , identifier[fformat] )
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[plot_dir] ):
identifier[os] . identifier[makedirs] ( identifier[plot_dir] )
identifier[plot_fn] = identifier[os] . identifier[path] . identifier[join] ( identifier[plot_dir] , literal[string] . identifier[format] ( identifier[pid] , identifier[fformat] ))
identifier[fig] . identifier[savefig] ( identifier[plot_fn] , identifier[format] = identifier[fformat] , identifier[bbox_inches] = literal[string] )
keyword[if] identifier[getattr] ( identifier[get_template_mod] (), literal[string] , keyword[True] ) keyword[is] keyword[True] :
identifier[img_buffer] = identifier[io] . identifier[BytesIO] ()
identifier[fig] . identifier[savefig] ( identifier[img_buffer] , identifier[format] = literal[string] , identifier[bbox_inches] = literal[string] )
identifier[b64_img] = identifier[base64] . identifier[b64encode] ( identifier[img_buffer] . identifier[getvalue] ()). identifier[decode] ( literal[string] )
identifier[img_buffer] . identifier[close] ()
identifier[html] += literal[string] . identifier[format] ( identifier[pid] , identifier[hidediv] , identifier[b64_img] )
keyword[else] :
identifier[plot_relpath] = identifier[os] . identifier[path] . identifier[join] ( identifier[config] . identifier[plots_dir_name] , literal[string] , literal[string] . identifier[format] ( identifier[pid] ))
identifier[html] += literal[string] . identifier[format] ( identifier[pid] , identifier[hidediv] , identifier[plot_relpath] )
identifier[plt] . identifier[close] ( identifier[fig] )
identifier[html] += literal[string]
identifier[report] . identifier[num_mpl_plots] += literal[int]
keyword[return] identifier[html] | def matplotlib_linegraph(plotdata, pconfig=None):
"""
Plot a line graph with Matplot lib and return a HTML string. Either embeds a base64
encoded image within HTML or writes the plot and links to it. Should be called by
plot_bargraph, which properly formats the input data.
"""
if pconfig is None:
pconfig = {} # depends on [control=['if'], data=['pconfig']]
# Plot group ID
if pconfig.get('id') is None:
pconfig['id'] = 'mqc_mplplot_' + ''.join(random.sample(letters, 10)) # depends on [control=['if'], data=[]]
# Sanitise plot ID and check for duplicates
pconfig['id'] = report.save_htmlid(pconfig['id'])
# Individual plot IDs
pids = []
for k in range(len(plotdata)):
try:
name = pconfig['data_labels'][k]['name'] # depends on [control=['try'], data=[]]
except:
name = k + 1 # depends on [control=['except'], data=[]]
pid = 'mqc_{}_{}'.format(pconfig['id'], name)
pid = report.save_htmlid(pid, skiplint=True)
pids.append(pid) # depends on [control=['for'], data=['k']]
html = '<p class="text-info"><small><span class="glyphicon glyphicon-picture" aria-hidden="true"></span> ' + 'Flat image plot. Toolbox functions such as highlighting / hiding samples will not work ' + '(see the <a href="http://multiqc.info/docs/#flat--interactive-plots" target="_blank">docs</a>).</small></p>'
html += '<div class="mqc_mplplot_plotgroup" id="{}">'.format(pconfig['id'])
# Same defaults as HighCharts for consistency
default_colors = ['#7cb5ec', '#434348', '#90ed7d', '#f7a35c', '#8085e9', '#f15c80', '#e4d354', '#2b908f', '#f45b5b', '#91e8e1']
# Buttons to cycle through different datasets
if len(plotdata) > 1 and (not config.simple_output):
html += '<div class="btn-group mpl_switch_group mqc_mplplot_bargraph_switchds">\n'
for (k, p) in enumerate(plotdata):
pid = pids[k]
active = 'active' if k == 0 else ''
try:
name = pconfig['data_labels'][k]['name'] # depends on [control=['try'], data=[]]
except:
name = k + 1 # depends on [control=['except'], data=[]]
html += '<button class="btn btn-default btn-sm {a}" data-target="#{pid}">{n}</button>\n'.format(a=active, pid=pid, n=name) # depends on [control=['for'], data=[]]
html += '</div>\n\n' # depends on [control=['if'], data=[]]
# Go through datasets creating plots
for (pidx, pdata) in enumerate(plotdata):
# Plot ID
pid = pids[pidx]
# Save plot data to file
fdata = OrderedDict()
lastcats = None
sharedcats = True
for d in pdata:
fdata[d['name']] = OrderedDict()
for (i, x) in enumerate(d['data']):
if type(x) is list:
fdata[d['name']][str(x[0])] = x[1]
# Check to see if all categories are the same
if lastcats is None:
lastcats = [x[0] for x in d['data']] # depends on [control=['if'], data=['lastcats']]
elif lastcats != [x[0] for x in d['data']]:
sharedcats = False # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
try:
fdata[d['name']][pconfig['categories'][i]] = x # depends on [control=['try'], data=[]]
except (KeyError, IndexError):
fdata[d['name']][str(i)] = x # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['d']]
# Custom tsv output if the x axis varies
if not sharedcats and config.data_format == 'tsv':
fout = ''
for d in pdata:
fout += '\t' + '\t'.join([str(x[0]) for x in d['data']])
fout += '\n{}\t'.format(d['name'])
fout += '\t'.join([str(x[1]) for x in d['data']])
fout += '\n' # depends on [control=['for'], data=['d']]
with io.open(os.path.join(config.data_dir, '{}.txt'.format(pid)), 'w', encoding='utf-8') as f:
print(fout.encode('utf-8', 'ignore').decode('utf-8'), file=f) # depends on [control=['with'], data=['f']] # depends on [control=['if'], data=[]]
else:
util_functions.write_data_file(fdata, pid)
# Set up figure
fig = plt.figure(figsize=(14, 6), frameon=False)
axes = fig.add_subplot(111)
# Go through data series
for (idx, d) in enumerate(pdata):
# Default colour index
cidx = idx
while cidx >= len(default_colors):
cidx -= len(default_colors) # depends on [control=['while'], data=['cidx']]
# Line style
linestyle = 'solid'
if d.get('dashStyle', None) == 'Dash':
linestyle = 'dashed' # depends on [control=['if'], data=[]]
# Reformat data (again)
try:
axes.plot([x[0] for x in d['data']], [x[1] for x in d['data']], label=d['name'], color=d.get('color', default_colors[cidx]), linestyle=linestyle, linewidth=1, marker=None) # depends on [control=['try'], data=[]]
except TypeError:
# Categorical data on x axis
axes.plot(d['data'], label=d['name'], color=d.get('color', default_colors[cidx]), linewidth=1, marker=None) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]]
# Tidy up axes
axes.tick_params(labelsize=8, direction='out', left=False, right=False, top=False, bottom=False)
axes.set_xlabel(pconfig.get('xlab', ''))
axes.set_ylabel(pconfig.get('ylab', ''))
# Dataset specific y label
try:
axes.set_ylabel(pconfig['data_labels'][pidx]['ylab']) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
# Axis limits
default_ylimits = axes.get_ylim()
ymin = default_ylimits[0]
if 'ymin' in pconfig:
ymin = pconfig['ymin'] # depends on [control=['if'], data=['pconfig']]
elif 'yFloor' in pconfig:
ymin = max(pconfig['yFloor'], default_ylimits[0]) # depends on [control=['if'], data=['pconfig']]
ymax = default_ylimits[1]
if 'ymax' in pconfig:
ymax = pconfig['ymax'] # depends on [control=['if'], data=['pconfig']]
elif 'yCeiling' in pconfig:
ymax = min(pconfig['yCeiling'], default_ylimits[1]) # depends on [control=['if'], data=['pconfig']]
if ymax - ymin < pconfig.get('yMinRange', 0):
ymax = ymin + pconfig['yMinRange'] # depends on [control=['if'], data=[]]
axes.set_ylim((ymin, ymax))
# Dataset specific ymax
try:
axes.set_ylim((ymin, pconfig['data_labels'][pidx]['ymax'])) # depends on [control=['try'], data=[]]
except:
pass # depends on [control=['except'], data=[]]
default_xlimits = axes.get_xlim()
xmin = default_xlimits[0]
if 'xmin' in pconfig:
xmin = pconfig['xmin'] # depends on [control=['if'], data=['pconfig']]
elif 'xFloor' in pconfig:
xmin = max(pconfig['xFloor'], default_xlimits[0]) # depends on [control=['if'], data=['pconfig']]
xmax = default_xlimits[1]
if 'xmax' in pconfig:
xmax = pconfig['xmax'] # depends on [control=['if'], data=['pconfig']]
elif 'xCeiling' in pconfig:
xmax = min(pconfig['xCeiling'], default_xlimits[1]) # depends on [control=['if'], data=['pconfig']]
if xmax - xmin < pconfig.get('xMinRange', 0):
xmax = xmin + pconfig['xMinRange'] # depends on [control=['if'], data=[]]
axes.set_xlim((xmin, xmax))
# Plot title
if 'title' in pconfig:
plt.text(0.5, 1.05, pconfig['title'], horizontalalignment='center', fontsize=16, transform=axes.transAxes) # depends on [control=['if'], data=['pconfig']]
axes.grid(True, zorder=10, which='both', axis='y', linestyle='-', color='#dedede', linewidth=1)
# X axis categories, if specified
if 'categories' in pconfig:
axes.set_xticks([i for (i, v) in enumerate(pconfig['categories'])])
axes.set_xticklabels(pconfig['categories']) # depends on [control=['if'], data=['pconfig']]
# Axis lines
xlim = axes.get_xlim()
axes.plot([xlim[0], xlim[1]], [0, 0], linestyle='-', color='#dedede', linewidth=2)
axes.set_axisbelow(True)
axes.spines['right'].set_visible(False)
axes.spines['top'].set_visible(False)
axes.spines['bottom'].set_visible(False)
axes.spines['left'].set_visible(False)
# Background colours, if specified
if 'yPlotBands' in pconfig:
xlim = axes.get_xlim()
for pb in pconfig['yPlotBands']:
axes.barh(pb['from'], xlim[1], height=pb['to'] - pb['from'], left=xlim[0], color=pb['color'], linewidth=0, zorder=0) # depends on [control=['for'], data=['pb']] # depends on [control=['if'], data=['pconfig']]
if 'xPlotBands' in pconfig:
ylim = axes.get_ylim()
for pb in pconfig['xPlotBands']:
axes.bar(pb['from'], ylim[1], width=pb['to'] - pb['from'], bottom=ylim[0], color=pb['color'], linewidth=0, zorder=0) # depends on [control=['for'], data=['pb']] # depends on [control=['if'], data=['pconfig']]
# Tight layout - makes sure that legend fits in and stuff
if len(pdata) <= 15:
axes.legend(loc='lower center', bbox_to_anchor=(0, -0.22, 1, 0.102), ncol=5, mode='expand', fontsize=8, frameon=False)
plt.tight_layout(rect=[0, 0.08, 1, 0.92]) # depends on [control=['if'], data=[]]
else:
plt.tight_layout(rect=[0, 0, 1, 0.92])
# Should this plot be hidden on report load?
hidediv = ''
if pidx > 0:
hidediv = ' style="display:none;"' # depends on [control=['if'], data=[]]
# Save the plot to the data directory if export is requests
if config.export_plots:
for fformat in config.export_plot_formats:
# Make the directory if it doesn't already exist
plot_dir = os.path.join(config.plots_dir, fformat)
if not os.path.exists(plot_dir):
os.makedirs(plot_dir) # depends on [control=['if'], data=[]]
# Save the plot
plot_fn = os.path.join(plot_dir, '{}.{}'.format(pid, fformat))
fig.savefig(plot_fn, format=fformat, bbox_inches='tight') # depends on [control=['for'], data=['fformat']] # depends on [control=['if'], data=[]]
# Output the figure to a base64 encoded string
if getattr(get_template_mod(), 'base64_plots', True) is True:
img_buffer = io.BytesIO()
fig.savefig(img_buffer, format='png', bbox_inches='tight')
b64_img = base64.b64encode(img_buffer.getvalue()).decode('utf8')
img_buffer.close()
html += '<div class="mqc_mplplot" id="{}"{}><img src="data:image/png;base64,{}" /></div>'.format(pid, hidediv, b64_img) # depends on [control=['if'], data=[]]
else:
# Save to a file and link <img>
plot_relpath = os.path.join(config.plots_dir_name, 'png', '{}.png'.format(pid))
html += '<div class="mqc_mplplot" id="{}"{}><img src="{}" /></div>'.format(pid, hidediv, plot_relpath)
plt.close(fig) # depends on [control=['for'], data=[]]
# Close wrapping div
html += '</div>'
report.num_mpl_plots += 1
return html |
def learn(self, fit=0, size=0, configure=None):
"""
Learns all (nearly) optimal logical networks with give fitness and size tolerance.
The first optimum logical network found is saved in the attribute :attr:`optimum` while
all enumerated logical networks are saved in the attribute :attr:`networks`.
Example::
>>> from caspo import core, learn
>>> graph = core.Graph.read_sif('pkn.sif')
>>> dataset = core.Dataset('dataset.csv', 30)
>>> zipped = graph.compress(dataset.setup)
>>> learner = learn.Learner(zipped, dataset, 2, 'round', 100)
>>> learner.learn(0.02, 1)
>>> learner.networks.to_csv('networks.csv')
Parameters
----------
fit : float
Fitness tolerance, e.g., use 0.1 for 10% tolerance with respect to the optimum
size : int
Size tolerance with respect to the optimum
configure : callable
Callable object responsible of setting a custom clingo configuration
"""
encodings = ['guess', 'fixpoint', 'rss']
if self.optimum is None:
clingo = self.__get_clingo__(encodings + ['opt'])
if configure is not None:
configure(clingo.conf)
clingo.ground([("base", [])])
clingo.solve(on_model=self.__keep_last__)
self.stats['time_optimum'] = clingo.stats['time_total']
self._logger.info("Optimum logical network learned in %.4fs", self.stats['time_optimum'])
tuples = (f.args() for f in self._last)
self.optimum = core.LogicalNetwork.from_hypertuples(self.hypergraph, tuples)
predictions = self.optimum.predictions(self.dataset.clampings, self.dataset.readouts.columns).values
readouts = self.dataset.readouts.values
pos = ~np.isnan(readouts)
rss = np.sum((np.vectorize(self.discrete)(readouts[pos]) - predictions[pos]*self.factor)**2)
self.stats['optimum_mse'] = mean_squared_error(readouts[pos], predictions[pos])
self.stats['optimum_size'] = self.optimum.size
self._logger.info("Optimum logical networks has MSE %.4f and size %s", self.stats['optimum_mse'], self.stats['optimum_size'])
self.networks.reset()
args = ['-c maxrss=%s' % int(rss + rss*fit), '-c maxsize=%s' % (self.optimum.size + size)]
clingo = self.__get_clingo__(encodings + ['enum'], args)
clingo.conf.solve.models = '0'
if configure is not None:
configure(clingo.conf)
clingo.ground([("base", [])])
clingo.solve(on_model=self.__save__)
self.stats['time_enumeration'] = clingo.stats['time_total']
self._logger.info("%s (nearly) optimal logical networks learned in %.4fs", len(self.networks), self.stats['time_enumeration']) | def function[learn, parameter[self, fit, size, configure]]:
constant[
Learns all (nearly) optimal logical networks with give fitness and size tolerance.
The first optimum logical network found is saved in the attribute :attr:`optimum` while
all enumerated logical networks are saved in the attribute :attr:`networks`.
Example::
>>> from caspo import core, learn
>>> graph = core.Graph.read_sif('pkn.sif')
>>> dataset = core.Dataset('dataset.csv', 30)
>>> zipped = graph.compress(dataset.setup)
>>> learner = learn.Learner(zipped, dataset, 2, 'round', 100)
>>> learner.learn(0.02, 1)
>>> learner.networks.to_csv('networks.csv')
Parameters
----------
fit : float
Fitness tolerance, e.g., use 0.1 for 10% tolerance with respect to the optimum
size : int
Size tolerance with respect to the optimum
configure : callable
Callable object responsible of setting a custom clingo configuration
]
variable[encodings] assign[=] list[[<ast.Constant object at 0x7da1b0b3bd30>, <ast.Constant object at 0x7da1b0b3b1f0>, <ast.Constant object at 0x7da1b0b3a710>]]
if compare[name[self].optimum is constant[None]] begin[:]
variable[clingo] assign[=] call[name[self].__get_clingo__, parameter[binary_operation[name[encodings] + list[[<ast.Constant object at 0x7da18bc72d40>]]]]]
if compare[name[configure] is_not constant[None]] begin[:]
call[name[configure], parameter[name[clingo].conf]]
call[name[clingo].ground, parameter[list[[<ast.Tuple object at 0x7da18bc71990>]]]]
call[name[clingo].solve, parameter[]]
call[name[self].stats][constant[time_optimum]] assign[=] call[name[clingo].stats][constant[time_total]]
call[name[self]._logger.info, parameter[constant[Optimum logical network learned in %.4fs], call[name[self].stats][constant[time_optimum]]]]
variable[tuples] assign[=] <ast.GeneratorExp object at 0x7da18bc71f00>
name[self].optimum assign[=] call[name[core].LogicalNetwork.from_hypertuples, parameter[name[self].hypergraph, name[tuples]]]
variable[predictions] assign[=] call[name[self].optimum.predictions, parameter[name[self].dataset.clampings, name[self].dataset.readouts.columns]].values
variable[readouts] assign[=] name[self].dataset.readouts.values
variable[pos] assign[=] <ast.UnaryOp object at 0x7da18bc70fa0>
variable[rss] assign[=] call[name[np].sum, parameter[binary_operation[binary_operation[call[call[name[np].vectorize, parameter[name[self].discrete]], parameter[call[name[readouts]][name[pos]]]] - binary_operation[call[name[predictions]][name[pos]] * name[self].factor]] ** constant[2]]]]
call[name[self].stats][constant[optimum_mse]] assign[=] call[name[mean_squared_error], parameter[call[name[readouts]][name[pos]], call[name[predictions]][name[pos]]]]
call[name[self].stats][constant[optimum_size]] assign[=] name[self].optimum.size
call[name[self]._logger.info, parameter[constant[Optimum logical networks has MSE %.4f and size %s], call[name[self].stats][constant[optimum_mse]], call[name[self].stats][constant[optimum_size]]]]
call[name[self].networks.reset, parameter[]]
variable[args] assign[=] list[[<ast.BinOp object at 0x7da18bc73d30>, <ast.BinOp object at 0x7da18bc70970>]]
variable[clingo] assign[=] call[name[self].__get_clingo__, parameter[binary_operation[name[encodings] + list[[<ast.Constant object at 0x7da18bc70100>]]], name[args]]]
name[clingo].conf.solve.models assign[=] constant[0]
if compare[name[configure] is_not constant[None]] begin[:]
call[name[configure], parameter[name[clingo].conf]]
call[name[clingo].ground, parameter[list[[<ast.Tuple object at 0x7da18bc71180>]]]]
call[name[clingo].solve, parameter[]]
call[name[self].stats][constant[time_enumeration]] assign[=] call[name[clingo].stats][constant[time_total]]
call[name[self]._logger.info, parameter[constant[%s (nearly) optimal logical networks learned in %.4fs], call[name[len], parameter[name[self].networks]], call[name[self].stats][constant[time_enumeration]]]] | keyword[def] identifier[learn] ( identifier[self] , identifier[fit] = literal[int] , identifier[size] = literal[int] , identifier[configure] = keyword[None] ):
literal[string]
identifier[encodings] =[ literal[string] , literal[string] , literal[string] ]
keyword[if] identifier[self] . identifier[optimum] keyword[is] keyword[None] :
identifier[clingo] = identifier[self] . identifier[__get_clingo__] ( identifier[encodings] +[ literal[string] ])
keyword[if] identifier[configure] keyword[is] keyword[not] keyword[None] :
identifier[configure] ( identifier[clingo] . identifier[conf] )
identifier[clingo] . identifier[ground] ([( literal[string] ,[])])
identifier[clingo] . identifier[solve] ( identifier[on_model] = identifier[self] . identifier[__keep_last__] )
identifier[self] . identifier[stats] [ literal[string] ]= identifier[clingo] . identifier[stats] [ literal[string] ]
identifier[self] . identifier[_logger] . identifier[info] ( literal[string] , identifier[self] . identifier[stats] [ literal[string] ])
identifier[tuples] =( identifier[f] . identifier[args] () keyword[for] identifier[f] keyword[in] identifier[self] . identifier[_last] )
identifier[self] . identifier[optimum] = identifier[core] . identifier[LogicalNetwork] . identifier[from_hypertuples] ( identifier[self] . identifier[hypergraph] , identifier[tuples] )
identifier[predictions] = identifier[self] . identifier[optimum] . identifier[predictions] ( identifier[self] . identifier[dataset] . identifier[clampings] , identifier[self] . identifier[dataset] . identifier[readouts] . identifier[columns] ). identifier[values]
identifier[readouts] = identifier[self] . identifier[dataset] . identifier[readouts] . identifier[values]
identifier[pos] =~ identifier[np] . identifier[isnan] ( identifier[readouts] )
identifier[rss] = identifier[np] . identifier[sum] (( identifier[np] . identifier[vectorize] ( identifier[self] . identifier[discrete] )( identifier[readouts] [ identifier[pos] ])- identifier[predictions] [ identifier[pos] ]* identifier[self] . identifier[factor] )** literal[int] )
identifier[self] . identifier[stats] [ literal[string] ]= identifier[mean_squared_error] ( identifier[readouts] [ identifier[pos] ], identifier[predictions] [ identifier[pos] ])
identifier[self] . identifier[stats] [ literal[string] ]= identifier[self] . identifier[optimum] . identifier[size]
identifier[self] . identifier[_logger] . identifier[info] ( literal[string] , identifier[self] . identifier[stats] [ literal[string] ], identifier[self] . identifier[stats] [ literal[string] ])
identifier[self] . identifier[networks] . identifier[reset] ()
identifier[args] =[ literal[string] % identifier[int] ( identifier[rss] + identifier[rss] * identifier[fit] ), literal[string] %( identifier[self] . identifier[optimum] . identifier[size] + identifier[size] )]
identifier[clingo] = identifier[self] . identifier[__get_clingo__] ( identifier[encodings] +[ literal[string] ], identifier[args] )
identifier[clingo] . identifier[conf] . identifier[solve] . identifier[models] = literal[string]
keyword[if] identifier[configure] keyword[is] keyword[not] keyword[None] :
identifier[configure] ( identifier[clingo] . identifier[conf] )
identifier[clingo] . identifier[ground] ([( literal[string] ,[])])
identifier[clingo] . identifier[solve] ( identifier[on_model] = identifier[self] . identifier[__save__] )
identifier[self] . identifier[stats] [ literal[string] ]= identifier[clingo] . identifier[stats] [ literal[string] ]
identifier[self] . identifier[_logger] . identifier[info] ( literal[string] , identifier[len] ( identifier[self] . identifier[networks] ), identifier[self] . identifier[stats] [ literal[string] ]) | def learn(self, fit=0, size=0, configure=None):
"""
Learns all (nearly) optimal logical networks with give fitness and size tolerance.
The first optimum logical network found is saved in the attribute :attr:`optimum` while
all enumerated logical networks are saved in the attribute :attr:`networks`.
Example::
>>> from caspo import core, learn
>>> graph = core.Graph.read_sif('pkn.sif')
>>> dataset = core.Dataset('dataset.csv', 30)
>>> zipped = graph.compress(dataset.setup)
>>> learner = learn.Learner(zipped, dataset, 2, 'round', 100)
>>> learner.learn(0.02, 1)
>>> learner.networks.to_csv('networks.csv')
Parameters
----------
fit : float
Fitness tolerance, e.g., use 0.1 for 10% tolerance with respect to the optimum
size : int
Size tolerance with respect to the optimum
configure : callable
Callable object responsible of setting a custom clingo configuration
"""
encodings = ['guess', 'fixpoint', 'rss']
if self.optimum is None:
clingo = self.__get_clingo__(encodings + ['opt'])
if configure is not None:
configure(clingo.conf) # depends on [control=['if'], data=['configure']]
clingo.ground([('base', [])])
clingo.solve(on_model=self.__keep_last__)
self.stats['time_optimum'] = clingo.stats['time_total']
self._logger.info('Optimum logical network learned in %.4fs', self.stats['time_optimum'])
tuples = (f.args() for f in self._last)
self.optimum = core.LogicalNetwork.from_hypertuples(self.hypergraph, tuples) # depends on [control=['if'], data=[]]
predictions = self.optimum.predictions(self.dataset.clampings, self.dataset.readouts.columns).values
readouts = self.dataset.readouts.values
pos = ~np.isnan(readouts)
rss = np.sum((np.vectorize(self.discrete)(readouts[pos]) - predictions[pos] * self.factor) ** 2)
self.stats['optimum_mse'] = mean_squared_error(readouts[pos], predictions[pos])
self.stats['optimum_size'] = self.optimum.size
self._logger.info('Optimum logical networks has MSE %.4f and size %s', self.stats['optimum_mse'], self.stats['optimum_size'])
self.networks.reset()
args = ['-c maxrss=%s' % int(rss + rss * fit), '-c maxsize=%s' % (self.optimum.size + size)]
clingo = self.__get_clingo__(encodings + ['enum'], args)
clingo.conf.solve.models = '0'
if configure is not None:
configure(clingo.conf) # depends on [control=['if'], data=['configure']]
clingo.ground([('base', [])])
clingo.solve(on_model=self.__save__)
self.stats['time_enumeration'] = clingo.stats['time_total']
self._logger.info('%s (nearly) optimal logical networks learned in %.4fs', len(self.networks), self.stats['time_enumeration']) |
def _memoize_function(f, name, cache_scope=_CS_FOREVER):
"""
Wraps a function for memoization and ties it's cache into the
Orca cacheing system.
Parameters
----------
f : function
name : str
Name of injectable.
cache_scope : {'step', 'iteration', 'forever'}, optional
Scope for which to cache data. Default is to cache forever
(or until manually cleared). 'iteration' caches data for each
complete iteration of the pipeline, 'step' caches data for
a single step of the pipeline.
"""
cache = {}
@wraps(f)
def wrapper(*args, **kwargs):
try:
cache_key = (
args or None, frozenset(kwargs.items()) if kwargs else None)
in_cache = cache_key in cache
except TypeError:
raise TypeError(
'function arguments must be hashable for memoization')
if _CACHING and in_cache:
return cache[cache_key]
else:
result = f(*args, **kwargs)
cache[cache_key] = result
return result
wrapper.__wrapped__ = f
wrapper.cache = cache
wrapper.clear_cached = lambda: cache.clear()
_MEMOIZED[name] = CacheItem(name, wrapper, cache_scope)
return wrapper | def function[_memoize_function, parameter[f, name, cache_scope]]:
constant[
Wraps a function for memoization and ties it's cache into the
Orca cacheing system.
Parameters
----------
f : function
name : str
Name of injectable.
cache_scope : {'step', 'iteration', 'forever'}, optional
Scope for which to cache data. Default is to cache forever
(or until manually cleared). 'iteration' caches data for each
complete iteration of the pipeline, 'step' caches data for
a single step of the pipeline.
]
variable[cache] assign[=] dictionary[[], []]
def function[wrapper, parameter[]]:
<ast.Try object at 0x7da1b27b5750>
if <ast.BoolOp object at 0x7da1b27b6dd0> begin[:]
return[call[name[cache]][name[cache_key]]]
name[wrapper].__wrapped__ assign[=] name[f]
name[wrapper].cache assign[=] name[cache]
name[wrapper].clear_cached assign[=] <ast.Lambda object at 0x7da1b27b47c0>
call[name[_MEMOIZED]][name[name]] assign[=] call[name[CacheItem], parameter[name[name], name[wrapper], name[cache_scope]]]
return[name[wrapper]] | keyword[def] identifier[_memoize_function] ( identifier[f] , identifier[name] , identifier[cache_scope] = identifier[_CS_FOREVER] ):
literal[string]
identifier[cache] ={}
@ identifier[wraps] ( identifier[f] )
keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ):
keyword[try] :
identifier[cache_key] =(
identifier[args] keyword[or] keyword[None] , identifier[frozenset] ( identifier[kwargs] . identifier[items] ()) keyword[if] identifier[kwargs] keyword[else] keyword[None] )
identifier[in_cache] = identifier[cache_key] keyword[in] identifier[cache]
keyword[except] identifier[TypeError] :
keyword[raise] identifier[TypeError] (
literal[string] )
keyword[if] identifier[_CACHING] keyword[and] identifier[in_cache] :
keyword[return] identifier[cache] [ identifier[cache_key] ]
keyword[else] :
identifier[result] = identifier[f] (* identifier[args] ,** identifier[kwargs] )
identifier[cache] [ identifier[cache_key] ]= identifier[result]
keyword[return] identifier[result]
identifier[wrapper] . identifier[__wrapped__] = identifier[f]
identifier[wrapper] . identifier[cache] = identifier[cache]
identifier[wrapper] . identifier[clear_cached] = keyword[lambda] : identifier[cache] . identifier[clear] ()
identifier[_MEMOIZED] [ identifier[name] ]= identifier[CacheItem] ( identifier[name] , identifier[wrapper] , identifier[cache_scope] )
keyword[return] identifier[wrapper] | def _memoize_function(f, name, cache_scope=_CS_FOREVER):
"""
Wraps a function for memoization and ties it's cache into the
Orca cacheing system.
Parameters
----------
f : function
name : str
Name of injectable.
cache_scope : {'step', 'iteration', 'forever'}, optional
Scope for which to cache data. Default is to cache forever
(or until manually cleared). 'iteration' caches data for each
complete iteration of the pipeline, 'step' caches data for
a single step of the pipeline.
"""
cache = {}
@wraps(f)
def wrapper(*args, **kwargs):
try:
cache_key = (args or None, frozenset(kwargs.items()) if kwargs else None)
in_cache = cache_key in cache # depends on [control=['try'], data=[]]
except TypeError:
raise TypeError('function arguments must be hashable for memoization') # depends on [control=['except'], data=[]]
if _CACHING and in_cache:
return cache[cache_key] # depends on [control=['if'], data=[]]
else:
result = f(*args, **kwargs)
cache[cache_key] = result
return result
wrapper.__wrapped__ = f
wrapper.cache = cache
wrapper.clear_cached = lambda : cache.clear()
_MEMOIZED[name] = CacheItem(name, wrapper, cache_scope)
return wrapper |
def plural_noun(self, text, count=None):
"""
Return the plural of text, where text is a noun.
If count supplied, then return text if count is one of:
1, a, an, one, each, every, this, that
otherwise return the plural.
Whitespace at the start and end is preserved.
"""
pre, word, post = self.partition_word(text)
if not word:
return text
plural = self.postprocess(word, self._plnoun(word, count))
return "{}{}{}".format(pre, plural, post) | def function[plural_noun, parameter[self, text, count]]:
constant[
Return the plural of text, where text is a noun.
If count supplied, then return text if count is one of:
1, a, an, one, each, every, this, that
otherwise return the plural.
Whitespace at the start and end is preserved.
]
<ast.Tuple object at 0x7da20c796470> assign[=] call[name[self].partition_word, parameter[name[text]]]
if <ast.UnaryOp object at 0x7da2044c0ca0> begin[:]
return[name[text]]
variable[plural] assign[=] call[name[self].postprocess, parameter[name[word], call[name[self]._plnoun, parameter[name[word], name[count]]]]]
return[call[constant[{}{}{}].format, parameter[name[pre], name[plural], name[post]]]] | keyword[def] identifier[plural_noun] ( identifier[self] , identifier[text] , identifier[count] = keyword[None] ):
literal[string]
identifier[pre] , identifier[word] , identifier[post] = identifier[self] . identifier[partition_word] ( identifier[text] )
keyword[if] keyword[not] identifier[word] :
keyword[return] identifier[text]
identifier[plural] = identifier[self] . identifier[postprocess] ( identifier[word] , identifier[self] . identifier[_plnoun] ( identifier[word] , identifier[count] ))
keyword[return] literal[string] . identifier[format] ( identifier[pre] , identifier[plural] , identifier[post] ) | def plural_noun(self, text, count=None):
"""
Return the plural of text, where text is a noun.
If count supplied, then return text if count is one of:
1, a, an, one, each, every, this, that
otherwise return the plural.
Whitespace at the start and end is preserved.
"""
(pre, word, post) = self.partition_word(text)
if not word:
return text # depends on [control=['if'], data=[]]
plural = self.postprocess(word, self._plnoun(word, count))
return '{}{}{}'.format(pre, plural, post) |
def get_event(self, num):
"""Extract event from dataset."""
if num < 0 or num >= self.params["events_num"]:
raise IndexError("Index out of range [0:%s]" %
(self.params["events_num"]))
ch_num = self.params['channel_number']
ev_size = self.params['b_size']
event = {}
self.file.seek(7168 + num * (96 + 2 * ch_num * ev_size))
event["text_hdr"] = self.file.read(64)
event["ev_num"] = struct.unpack('I', self.file.read(4))[0]
self.file.read(4)
start_time = struct.unpack('Q', self.file.read(8))[0]
event["start_time"] = datetime.fromtimestamp(start_time)
ns_since_epoch = struct.unpack('Q', self.file.read(8))[0]
if ns_since_epoch:
event['ns_since_epoch'] = ns_since_epoch
self.file.read(8)
event_data = self.file.read(2 * ev_size * ch_num)
event["data"] = np.fromstring(event_data, np.short)
return event | def function[get_event, parameter[self, num]]:
constant[Extract event from dataset.]
if <ast.BoolOp object at 0x7da18dc9bee0> begin[:]
<ast.Raise object at 0x7da18dc98cd0>
variable[ch_num] assign[=] call[name[self].params][constant[channel_number]]
variable[ev_size] assign[=] call[name[self].params][constant[b_size]]
variable[event] assign[=] dictionary[[], []]
call[name[self].file.seek, parameter[binary_operation[constant[7168] + binary_operation[name[num] * binary_operation[constant[96] + binary_operation[binary_operation[constant[2] * name[ch_num]] * name[ev_size]]]]]]]
call[name[event]][constant[text_hdr]] assign[=] call[name[self].file.read, parameter[constant[64]]]
call[name[event]][constant[ev_num]] assign[=] call[call[name[struct].unpack, parameter[constant[I], call[name[self].file.read, parameter[constant[4]]]]]][constant[0]]
call[name[self].file.read, parameter[constant[4]]]
variable[start_time] assign[=] call[call[name[struct].unpack, parameter[constant[Q], call[name[self].file.read, parameter[constant[8]]]]]][constant[0]]
call[name[event]][constant[start_time]] assign[=] call[name[datetime].fromtimestamp, parameter[name[start_time]]]
variable[ns_since_epoch] assign[=] call[call[name[struct].unpack, parameter[constant[Q], call[name[self].file.read, parameter[constant[8]]]]]][constant[0]]
if name[ns_since_epoch] begin[:]
call[name[event]][constant[ns_since_epoch]] assign[=] name[ns_since_epoch]
call[name[self].file.read, parameter[constant[8]]]
variable[event_data] assign[=] call[name[self].file.read, parameter[binary_operation[binary_operation[constant[2] * name[ev_size]] * name[ch_num]]]]
call[name[event]][constant[data]] assign[=] call[name[np].fromstring, parameter[name[event_data], name[np].short]]
return[name[event]] | keyword[def] identifier[get_event] ( identifier[self] , identifier[num] ):
literal[string]
keyword[if] identifier[num] < literal[int] keyword[or] identifier[num] >= identifier[self] . identifier[params] [ literal[string] ]:
keyword[raise] identifier[IndexError] ( literal[string] %
( identifier[self] . identifier[params] [ literal[string] ]))
identifier[ch_num] = identifier[self] . identifier[params] [ literal[string] ]
identifier[ev_size] = identifier[self] . identifier[params] [ literal[string] ]
identifier[event] ={}
identifier[self] . identifier[file] . identifier[seek] ( literal[int] + identifier[num] *( literal[int] + literal[int] * identifier[ch_num] * identifier[ev_size] ))
identifier[event] [ literal[string] ]= identifier[self] . identifier[file] . identifier[read] ( literal[int] )
identifier[event] [ literal[string] ]= identifier[struct] . identifier[unpack] ( literal[string] , identifier[self] . identifier[file] . identifier[read] ( literal[int] ))[ literal[int] ]
identifier[self] . identifier[file] . identifier[read] ( literal[int] )
identifier[start_time] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[self] . identifier[file] . identifier[read] ( literal[int] ))[ literal[int] ]
identifier[event] [ literal[string] ]= identifier[datetime] . identifier[fromtimestamp] ( identifier[start_time] )
identifier[ns_since_epoch] = identifier[struct] . identifier[unpack] ( literal[string] , identifier[self] . identifier[file] . identifier[read] ( literal[int] ))[ literal[int] ]
keyword[if] identifier[ns_since_epoch] :
identifier[event] [ literal[string] ]= identifier[ns_since_epoch]
identifier[self] . identifier[file] . identifier[read] ( literal[int] )
identifier[event_data] = identifier[self] . identifier[file] . identifier[read] ( literal[int] * identifier[ev_size] * identifier[ch_num] )
identifier[event] [ literal[string] ]= identifier[np] . identifier[fromstring] ( identifier[event_data] , identifier[np] . identifier[short] )
keyword[return] identifier[event] | def get_event(self, num):
"""Extract event from dataset."""
if num < 0 or num >= self.params['events_num']:
raise IndexError('Index out of range [0:%s]' % self.params['events_num']) # depends on [control=['if'], data=[]]
ch_num = self.params['channel_number']
ev_size = self.params['b_size']
event = {}
self.file.seek(7168 + num * (96 + 2 * ch_num * ev_size))
event['text_hdr'] = self.file.read(64)
event['ev_num'] = struct.unpack('I', self.file.read(4))[0]
self.file.read(4)
start_time = struct.unpack('Q', self.file.read(8))[0]
event['start_time'] = datetime.fromtimestamp(start_time)
ns_since_epoch = struct.unpack('Q', self.file.read(8))[0]
if ns_since_epoch:
event['ns_since_epoch'] = ns_since_epoch # depends on [control=['if'], data=[]]
self.file.read(8)
event_data = self.file.read(2 * ev_size * ch_num)
event['data'] = np.fromstring(event_data, np.short)
return event |
def lag_matrix(blk, max_lag=None):
"""
Finds the lag matrix for a given 1-D block sequence.
Parameters
----------
blk :
An iterable with well-defined length. Don't use this function with Stream
objects!
max_lag :
The size of the result, the lags you'd need. Defaults to ``len(blk) - 1``,
the maximum lag that doesn't create fully zeroed matrices.
Returns
-------
The covariance matrix as a list of lists. Each cell (i, j) contains the sum
of ``blk[n - i] * blk[n - j]`` elements for all n that allows such without
padding the given block.
"""
if max_lag is None:
max_lag = len(blk) - 1
elif max_lag >= len(blk):
raise ValueError("Block length should be higher than order")
return [[sum(blk[n - i] * blk[n - j] for n in xrange(max_lag, len(blk))
) for i in xrange(max_lag + 1)
] for j in xrange(max_lag + 1)] | def function[lag_matrix, parameter[blk, max_lag]]:
constant[
Finds the lag matrix for a given 1-D block sequence.
Parameters
----------
blk :
An iterable with well-defined length. Don't use this function with Stream
objects!
max_lag :
The size of the result, the lags you'd need. Defaults to ``len(blk) - 1``,
the maximum lag that doesn't create fully zeroed matrices.
Returns
-------
The covariance matrix as a list of lists. Each cell (i, j) contains the sum
of ``blk[n - i] * blk[n - j]`` elements for all n that allows such without
padding the given block.
]
if compare[name[max_lag] is constant[None]] begin[:]
variable[max_lag] assign[=] binary_operation[call[name[len], parameter[name[blk]]] - constant[1]]
return[<ast.ListComp object at 0x7da1b06d34c0>] | keyword[def] identifier[lag_matrix] ( identifier[blk] , identifier[max_lag] = keyword[None] ):
literal[string]
keyword[if] identifier[max_lag] keyword[is] keyword[None] :
identifier[max_lag] = identifier[len] ( identifier[blk] )- literal[int]
keyword[elif] identifier[max_lag] >= identifier[len] ( identifier[blk] ):
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[return] [[ identifier[sum] ( identifier[blk] [ identifier[n] - identifier[i] ]* identifier[blk] [ identifier[n] - identifier[j] ] keyword[for] identifier[n] keyword[in] identifier[xrange] ( identifier[max_lag] , identifier[len] ( identifier[blk] ))
) keyword[for] identifier[i] keyword[in] identifier[xrange] ( identifier[max_lag] + literal[int] )
] keyword[for] identifier[j] keyword[in] identifier[xrange] ( identifier[max_lag] + literal[int] )] | def lag_matrix(blk, max_lag=None):
"""
Finds the lag matrix for a given 1-D block sequence.
Parameters
----------
blk :
An iterable with well-defined length. Don't use this function with Stream
objects!
max_lag :
The size of the result, the lags you'd need. Defaults to ``len(blk) - 1``,
the maximum lag that doesn't create fully zeroed matrices.
Returns
-------
The covariance matrix as a list of lists. Each cell (i, j) contains the sum
of ``blk[n - i] * blk[n - j]`` elements for all n that allows such without
padding the given block.
"""
if max_lag is None:
max_lag = len(blk) - 1 # depends on [control=['if'], data=['max_lag']]
elif max_lag >= len(blk):
raise ValueError('Block length should be higher than order') # depends on [control=['if'], data=[]]
return [[sum((blk[n - i] * blk[n - j] for n in xrange(max_lag, len(blk)))) for i in xrange(max_lag + 1)] for j in xrange(max_lag + 1)] |
def eld_another(U,p_min,p_max,d,brk):
"""eld -- economic load dispatching in electricity generation
Parameters:
- U: set of generators (units)
- p_min[u]: minimum operating power for unit u
- p_max[u]: maximum operating power for unit u
- d: demand
- brk[u][k]: (x,y) coordinates of breakpoint k, k=0,...,K for unit u
Returns a model, ready to be solved.
"""
model = Model("Economic load dispatching")
# set objective based on piecewise linear approximation
p,F,z = {},{},{}
for u in U:
abrk = [X for (X,Y) in brk[u]]
bbrk = [Y for (X,Y) in brk[u]]
p[u],F[u],z[u] = convex_comb_sos(model,abrk,bbrk)
p[u].lb = p_min[u]
p[u].ub = p_max[u]
# demand satisfaction
model.addCons(quicksum(p[u] for u in U) == d, "demand")
# objective
model.setObjective(quicksum(F[u] for u in U), "minimize")
model.data = p
return model | def function[eld_another, parameter[U, p_min, p_max, d, brk]]:
constant[eld -- economic load dispatching in electricity generation
Parameters:
- U: set of generators (units)
- p_min[u]: minimum operating power for unit u
- p_max[u]: maximum operating power for unit u
- d: demand
- brk[u][k]: (x,y) coordinates of breakpoint k, k=0,...,K for unit u
Returns a model, ready to be solved.
]
variable[model] assign[=] call[name[Model], parameter[constant[Economic load dispatching]]]
<ast.Tuple object at 0x7da18f00d390> assign[=] tuple[[<ast.Dict object at 0x7da18f00d210>, <ast.Dict object at 0x7da18f00d750>, <ast.Dict object at 0x7da18f00d8a0>]]
for taget[name[u]] in starred[name[U]] begin[:]
variable[abrk] assign[=] <ast.ListComp object at 0x7da18f00cd60>
variable[bbrk] assign[=] <ast.ListComp object at 0x7da18f00cb80>
<ast.Tuple object at 0x7da18f00e2c0> assign[=] call[name[convex_comb_sos], parameter[name[model], name[abrk], name[bbrk]]]
call[name[p]][name[u]].lb assign[=] call[name[p_min]][name[u]]
call[name[p]][name[u]].ub assign[=] call[name[p_max]][name[u]]
call[name[model].addCons, parameter[compare[call[name[quicksum], parameter[<ast.GeneratorExp object at 0x7da20c6c5360>]] equal[==] name[d]], constant[demand]]]
call[name[model].setObjective, parameter[call[name[quicksum], parameter[<ast.GeneratorExp object at 0x7da20c6c6800>]], constant[minimize]]]
name[model].data assign[=] name[p]
return[name[model]] | keyword[def] identifier[eld_another] ( identifier[U] , identifier[p_min] , identifier[p_max] , identifier[d] , identifier[brk] ):
literal[string]
identifier[model] = identifier[Model] ( literal[string] )
identifier[p] , identifier[F] , identifier[z] ={},{},{}
keyword[for] identifier[u] keyword[in] identifier[U] :
identifier[abrk] =[ identifier[X] keyword[for] ( identifier[X] , identifier[Y] ) keyword[in] identifier[brk] [ identifier[u] ]]
identifier[bbrk] =[ identifier[Y] keyword[for] ( identifier[X] , identifier[Y] ) keyword[in] identifier[brk] [ identifier[u] ]]
identifier[p] [ identifier[u] ], identifier[F] [ identifier[u] ], identifier[z] [ identifier[u] ]= identifier[convex_comb_sos] ( identifier[model] , identifier[abrk] , identifier[bbrk] )
identifier[p] [ identifier[u] ]. identifier[lb] = identifier[p_min] [ identifier[u] ]
identifier[p] [ identifier[u] ]. identifier[ub] = identifier[p_max] [ identifier[u] ]
identifier[model] . identifier[addCons] ( identifier[quicksum] ( identifier[p] [ identifier[u] ] keyword[for] identifier[u] keyword[in] identifier[U] )== identifier[d] , literal[string] )
identifier[model] . identifier[setObjective] ( identifier[quicksum] ( identifier[F] [ identifier[u] ] keyword[for] identifier[u] keyword[in] identifier[U] ), literal[string] )
identifier[model] . identifier[data] = identifier[p]
keyword[return] identifier[model] | def eld_another(U, p_min, p_max, d, brk):
"""eld -- economic load dispatching in electricity generation
Parameters:
- U: set of generators (units)
- p_min[u]: minimum operating power for unit u
- p_max[u]: maximum operating power for unit u
- d: demand
- brk[u][k]: (x,y) coordinates of breakpoint k, k=0,...,K for unit u
Returns a model, ready to be solved.
"""
model = Model('Economic load dispatching')
# set objective based on piecewise linear approximation
(p, F, z) = ({}, {}, {})
for u in U:
abrk = [X for (X, Y) in brk[u]]
bbrk = [Y for (X, Y) in brk[u]]
(p[u], F[u], z[u]) = convex_comb_sos(model, abrk, bbrk)
p[u].lb = p_min[u]
p[u].ub = p_max[u] # depends on [control=['for'], data=['u']]
# demand satisfaction
model.addCons(quicksum((p[u] for u in U)) == d, 'demand')
# objective
model.setObjective(quicksum((F[u] for u in U)), 'minimize')
model.data = p
return model |
def topology_mdtraj(traj):
'''Generate topology spec for the MolecularViewer from mdtraj.
:param mdtraj.Trajectory traj: the trajectory
:return: A chemview-compatible dictionary corresponding to the topology defined in mdtraj.
'''
import mdtraj as md
top = {}
top['atom_types'] = [a.element.symbol for a in traj.topology.atoms]
top['atom_names'] = [a.name for a in traj.topology.atoms]
top['bonds'] = [(a.index, b.index) for a, b in traj.topology.bonds]
top['secondary_structure'] = md.compute_dssp(traj[0])[0]
top['residue_types'] = [r.name for r in traj.topology.residues ]
top['residue_indices'] = [ [a.index for a in r.atoms] for r in traj.topology.residues ]
return top | def function[topology_mdtraj, parameter[traj]]:
constant[Generate topology spec for the MolecularViewer from mdtraj.
:param mdtraj.Trajectory traj: the trajectory
:return: A chemview-compatible dictionary corresponding to the topology defined in mdtraj.
]
import module[mdtraj] as alias[md]
variable[top] assign[=] dictionary[[], []]
call[name[top]][constant[atom_types]] assign[=] <ast.ListComp object at 0x7da20cabd090>
call[name[top]][constant[atom_names]] assign[=] <ast.ListComp object at 0x7da20cabeef0>
call[name[top]][constant[bonds]] assign[=] <ast.ListComp object at 0x7da20cabd750>
call[name[top]][constant[secondary_structure]] assign[=] call[call[name[md].compute_dssp, parameter[call[name[traj]][constant[0]]]]][constant[0]]
call[name[top]][constant[residue_types]] assign[=] <ast.ListComp object at 0x7da20e9576d0>
call[name[top]][constant[residue_indices]] assign[=] <ast.ListComp object at 0x7da20e956890>
return[name[top]] | keyword[def] identifier[topology_mdtraj] ( identifier[traj] ):
literal[string]
keyword[import] identifier[mdtraj] keyword[as] identifier[md]
identifier[top] ={}
identifier[top] [ literal[string] ]=[ identifier[a] . identifier[element] . identifier[symbol] keyword[for] identifier[a] keyword[in] identifier[traj] . identifier[topology] . identifier[atoms] ]
identifier[top] [ literal[string] ]=[ identifier[a] . identifier[name] keyword[for] identifier[a] keyword[in] identifier[traj] . identifier[topology] . identifier[atoms] ]
identifier[top] [ literal[string] ]=[( identifier[a] . identifier[index] , identifier[b] . identifier[index] ) keyword[for] identifier[a] , identifier[b] keyword[in] identifier[traj] . identifier[topology] . identifier[bonds] ]
identifier[top] [ literal[string] ]= identifier[md] . identifier[compute_dssp] ( identifier[traj] [ literal[int] ])[ literal[int] ]
identifier[top] [ literal[string] ]=[ identifier[r] . identifier[name] keyword[for] identifier[r] keyword[in] identifier[traj] . identifier[topology] . identifier[residues] ]
identifier[top] [ literal[string] ]=[[ identifier[a] . identifier[index] keyword[for] identifier[a] keyword[in] identifier[r] . identifier[atoms] ] keyword[for] identifier[r] keyword[in] identifier[traj] . identifier[topology] . identifier[residues] ]
keyword[return] identifier[top] | def topology_mdtraj(traj):
"""Generate topology spec for the MolecularViewer from mdtraj.
:param mdtraj.Trajectory traj: the trajectory
:return: A chemview-compatible dictionary corresponding to the topology defined in mdtraj.
"""
import mdtraj as md
top = {}
top['atom_types'] = [a.element.symbol for a in traj.topology.atoms]
top['atom_names'] = [a.name for a in traj.topology.atoms]
top['bonds'] = [(a.index, b.index) for (a, b) in traj.topology.bonds]
top['secondary_structure'] = md.compute_dssp(traj[0])[0]
top['residue_types'] = [r.name for r in traj.topology.residues]
top['residue_indices'] = [[a.index for a in r.atoms] for r in traj.topology.residues]
return top |
def verify(self, proof, challenge, state):
"""returns true if the proof matches the challenge. verifies that the
server possesses the encoded file.
:param proof: the proof that was returned from the server
:param challenge: the challenge provided to the server
:param state: the state of the file, which includes the merkle root of
of the merkle tree, for verification.
"""
state.checksig(self.key)
if (proof.leaf.index != challenge.index):
return False
return MerkleTree.verify_branch(proof.leaf,
proof.branch,
state.root) | def function[verify, parameter[self, proof, challenge, state]]:
constant[returns true if the proof matches the challenge. verifies that the
server possesses the encoded file.
:param proof: the proof that was returned from the server
:param challenge: the challenge provided to the server
:param state: the state of the file, which includes the merkle root of
of the merkle tree, for verification.
]
call[name[state].checksig, parameter[name[self].key]]
if compare[name[proof].leaf.index not_equal[!=] name[challenge].index] begin[:]
return[constant[False]]
return[call[name[MerkleTree].verify_branch, parameter[name[proof].leaf, name[proof].branch, name[state].root]]] | keyword[def] identifier[verify] ( identifier[self] , identifier[proof] , identifier[challenge] , identifier[state] ):
literal[string]
identifier[state] . identifier[checksig] ( identifier[self] . identifier[key] )
keyword[if] ( identifier[proof] . identifier[leaf] . identifier[index] != identifier[challenge] . identifier[index] ):
keyword[return] keyword[False]
keyword[return] identifier[MerkleTree] . identifier[verify_branch] ( identifier[proof] . identifier[leaf] ,
identifier[proof] . identifier[branch] ,
identifier[state] . identifier[root] ) | def verify(self, proof, challenge, state):
"""returns true if the proof matches the challenge. verifies that the
server possesses the encoded file.
:param proof: the proof that was returned from the server
:param challenge: the challenge provided to the server
:param state: the state of the file, which includes the merkle root of
of the merkle tree, for verification.
"""
state.checksig(self.key)
if proof.leaf.index != challenge.index:
return False # depends on [control=['if'], data=[]]
return MerkleTree.verify_branch(proof.leaf, proof.branch, state.root) |
def temp_dir(remover=shutil.rmtree):
"""
Create a temporary directory context. Pass a custom remover
to override the removal behavior.
"""
temp_dir = tempfile.mkdtemp()
try:
yield temp_dir
finally:
remover(temp_dir) | def function[temp_dir, parameter[remover]]:
constant[
Create a temporary directory context. Pass a custom remover
to override the removal behavior.
]
variable[temp_dir] assign[=] call[name[tempfile].mkdtemp, parameter[]]
<ast.Try object at 0x7da1b13bb700> | keyword[def] identifier[temp_dir] ( identifier[remover] = identifier[shutil] . identifier[rmtree] ):
literal[string]
identifier[temp_dir] = identifier[tempfile] . identifier[mkdtemp] ()
keyword[try] :
keyword[yield] identifier[temp_dir]
keyword[finally] :
identifier[remover] ( identifier[temp_dir] ) | def temp_dir(remover=shutil.rmtree):
"""
Create a temporary directory context. Pass a custom remover
to override the removal behavior.
"""
temp_dir = tempfile.mkdtemp()
try:
yield temp_dir # depends on [control=['try'], data=[]]
finally:
remover(temp_dir) |
def unassign_assessment_part_from_bank(self, assessment_part_id, bank_id):
"""Removes an ``AssessmentPart`` from an ``Bank``.
arg: assessment_part_id (osid.id.Id): the ``Id`` of the
``AssessmentPart``
arg: bank_id (osid.id.Id): the ``Id`` of the ``Bank``
raise: NotFound - ``assessment_part_id`` or ``bank_id`` not
found or ``assessment_part_id`` not assigned to
``bank_id``
raise: NullArgument - ``assessment_part_id`` or ``bank_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
mgr = self._get_provider_manager('ASSESSMENT', local=True)
lookup_session = mgr.get_bank_lookup_session(proxy=self._proxy)
lookup_session.get_bank(bank_id) # to raise NotFound
self._unassign_object_from_catalog(assessment_part_id, bank_id) | def function[unassign_assessment_part_from_bank, parameter[self, assessment_part_id, bank_id]]:
constant[Removes an ``AssessmentPart`` from an ``Bank``.
arg: assessment_part_id (osid.id.Id): the ``Id`` of the
``AssessmentPart``
arg: bank_id (osid.id.Id): the ``Id`` of the ``Bank``
raise: NotFound - ``assessment_part_id`` or ``bank_id`` not
found or ``assessment_part_id`` not assigned to
``bank_id``
raise: NullArgument - ``assessment_part_id`` or ``bank_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
]
variable[mgr] assign[=] call[name[self]._get_provider_manager, parameter[constant[ASSESSMENT]]]
variable[lookup_session] assign[=] call[name[mgr].get_bank_lookup_session, parameter[]]
call[name[lookup_session].get_bank, parameter[name[bank_id]]]
call[name[self]._unassign_object_from_catalog, parameter[name[assessment_part_id], name[bank_id]]] | keyword[def] identifier[unassign_assessment_part_from_bank] ( identifier[self] , identifier[assessment_part_id] , identifier[bank_id] ):
literal[string]
identifier[mgr] = identifier[self] . identifier[_get_provider_manager] ( literal[string] , identifier[local] = keyword[True] )
identifier[lookup_session] = identifier[mgr] . identifier[get_bank_lookup_session] ( identifier[proxy] = identifier[self] . identifier[_proxy] )
identifier[lookup_session] . identifier[get_bank] ( identifier[bank_id] )
identifier[self] . identifier[_unassign_object_from_catalog] ( identifier[assessment_part_id] , identifier[bank_id] ) | def unassign_assessment_part_from_bank(self, assessment_part_id, bank_id):
"""Removes an ``AssessmentPart`` from an ``Bank``.
arg: assessment_part_id (osid.id.Id): the ``Id`` of the
``AssessmentPart``
arg: bank_id (osid.id.Id): the ``Id`` of the ``Bank``
raise: NotFound - ``assessment_part_id`` or ``bank_id`` not
found or ``assessment_part_id`` not assigned to
``bank_id``
raise: NullArgument - ``assessment_part_id`` or ``bank_id`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.*
"""
mgr = self._get_provider_manager('ASSESSMENT', local=True)
lookup_session = mgr.get_bank_lookup_session(proxy=self._proxy)
lookup_session.get_bank(bank_id) # to raise NotFound
self._unassign_object_from_catalog(assessment_part_id, bank_id) |
def _set_value(self, value):
"""Called by a Job object to tell the result is ready, and
provides the value of this result. The object will become
ready and successful. The collector's notify_ready() method
will be called, and the callback method too"""
assert not self.ready()
self._data = value
self._success = True
self._event.set()
if self._collector is not None:
self._collector.notify_ready(self)
if self._callback is not None:
try:
self._callback(value)
except:
traceback.print_exc() | def function[_set_value, parameter[self, value]]:
constant[Called by a Job object to tell the result is ready, and
provides the value of this result. The object will become
ready and successful. The collector's notify_ready() method
will be called, and the callback method too]
assert[<ast.UnaryOp object at 0x7da2045655a0>]
name[self]._data assign[=] name[value]
name[self]._success assign[=] constant[True]
call[name[self]._event.set, parameter[]]
if compare[name[self]._collector is_not constant[None]] begin[:]
call[name[self]._collector.notify_ready, parameter[name[self]]]
if compare[name[self]._callback is_not constant[None]] begin[:]
<ast.Try object at 0x7da2047e8d90> | keyword[def] identifier[_set_value] ( identifier[self] , identifier[value] ):
literal[string]
keyword[assert] keyword[not] identifier[self] . identifier[ready] ()
identifier[self] . identifier[_data] = identifier[value]
identifier[self] . identifier[_success] = keyword[True]
identifier[self] . identifier[_event] . identifier[set] ()
keyword[if] identifier[self] . identifier[_collector] keyword[is] keyword[not] keyword[None] :
identifier[self] . identifier[_collector] . identifier[notify_ready] ( identifier[self] )
keyword[if] identifier[self] . identifier[_callback] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[self] . identifier[_callback] ( identifier[value] )
keyword[except] :
identifier[traceback] . identifier[print_exc] () | def _set_value(self, value):
"""Called by a Job object to tell the result is ready, and
provides the value of this result. The object will become
ready and successful. The collector's notify_ready() method
will be called, and the callback method too"""
assert not self.ready()
self._data = value
self._success = True
self._event.set()
if self._collector is not None:
self._collector.notify_ready(self) # depends on [control=['if'], data=[]]
if self._callback is not None:
try:
self._callback(value) # depends on [control=['try'], data=[]]
except:
traceback.print_exc() # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] |
def delete(self, template_id, session):
'''taobao.delivery.template.delete 删除运费模板
根据用户指定的模板ID删除指定的模板'''
request = TOPRequest('taobao.delivery.template.delete')
request['template_id'] = template_id
self.create(self.execute(request, session), fields=['complete', ])
return self.complete | def function[delete, parameter[self, template_id, session]]:
constant[taobao.delivery.template.delete 删除运费模板
根据用户指定的模板ID删除指定的模板]
variable[request] assign[=] call[name[TOPRequest], parameter[constant[taobao.delivery.template.delete]]]
call[name[request]][constant[template_id]] assign[=] name[template_id]
call[name[self].create, parameter[call[name[self].execute, parameter[name[request], name[session]]]]]
return[name[self].complete] | keyword[def] identifier[delete] ( identifier[self] , identifier[template_id] , identifier[session] ):
literal[string]
identifier[request] = identifier[TOPRequest] ( literal[string] )
identifier[request] [ literal[string] ]= identifier[template_id]
identifier[self] . identifier[create] ( identifier[self] . identifier[execute] ( identifier[request] , identifier[session] ), identifier[fields] =[ literal[string] ,])
keyword[return] identifier[self] . identifier[complete] | def delete(self, template_id, session):
"""taobao.delivery.template.delete 删除运费模板
根据用户指定的模板ID删除指定的模板"""
request = TOPRequest('taobao.delivery.template.delete')
request['template_id'] = template_id
self.create(self.execute(request, session), fields=['complete'])
return self.complete |
def _qualified_key(self, key):
"""
Prepends the configured prefix to the key (if applicable).
For Consul we also lstrip any '/' chars from the prefixed key.
:param key: The unprefixed key.
:return: The key with any configured prefix prepended.
"""
fq_key = super(Reader, self)._qualified_key(key)
return fq_key.lstrip('/') | def function[_qualified_key, parameter[self, key]]:
constant[
Prepends the configured prefix to the key (if applicable).
For Consul we also lstrip any '/' chars from the prefixed key.
:param key: The unprefixed key.
:return: The key with any configured prefix prepended.
]
variable[fq_key] assign[=] call[call[name[super], parameter[name[Reader], name[self]]]._qualified_key, parameter[name[key]]]
return[call[name[fq_key].lstrip, parameter[constant[/]]]] | keyword[def] identifier[_qualified_key] ( identifier[self] , identifier[key] ):
literal[string]
identifier[fq_key] = identifier[super] ( identifier[Reader] , identifier[self] ). identifier[_qualified_key] ( identifier[key] )
keyword[return] identifier[fq_key] . identifier[lstrip] ( literal[string] ) | def _qualified_key(self, key):
"""
Prepends the configured prefix to the key (if applicable).
For Consul we also lstrip any '/' chars from the prefixed key.
:param key: The unprefixed key.
:return: The key with any configured prefix prepended.
"""
fq_key = super(Reader, self)._qualified_key(key)
return fq_key.lstrip('/') |
def run_details(self, run_bc, run_date=None):
"""Next Gen LIMS specific API functionality.
"""
try:
details = self._get("/nglims/api_run_details", dict(run=run_bc))
except ValueError:
raise ValueError("Could not find information in Galaxy for run: %s" % run_bc)
if "error" in details and run_date is not None:
try:
details = self._get("/nglims/api_run_details", dict(run=run_date))
except ValueError:
raise ValueError("Could not find information in Galaxy for run: %s" % run_date)
return details | def function[run_details, parameter[self, run_bc, run_date]]:
constant[Next Gen LIMS specific API functionality.
]
<ast.Try object at 0x7da1b18f8b80>
if <ast.BoolOp object at 0x7da1b18978e0> begin[:]
<ast.Try object at 0x7da1b1894310>
return[name[details]] | keyword[def] identifier[run_details] ( identifier[self] , identifier[run_bc] , identifier[run_date] = keyword[None] ):
literal[string]
keyword[try] :
identifier[details] = identifier[self] . identifier[_get] ( literal[string] , identifier[dict] ( identifier[run] = identifier[run_bc] ))
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[run_bc] )
keyword[if] literal[string] keyword[in] identifier[details] keyword[and] identifier[run_date] keyword[is] keyword[not] keyword[None] :
keyword[try] :
identifier[details] = identifier[self] . identifier[_get] ( literal[string] , identifier[dict] ( identifier[run] = identifier[run_date] ))
keyword[except] identifier[ValueError] :
keyword[raise] identifier[ValueError] ( literal[string] % identifier[run_date] )
keyword[return] identifier[details] | def run_details(self, run_bc, run_date=None):
"""Next Gen LIMS specific API functionality.
"""
try:
details = self._get('/nglims/api_run_details', dict(run=run_bc)) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('Could not find information in Galaxy for run: %s' % run_bc) # depends on [control=['except'], data=[]]
if 'error' in details and run_date is not None:
try:
details = self._get('/nglims/api_run_details', dict(run=run_date)) # depends on [control=['try'], data=[]]
except ValueError:
raise ValueError('Could not find information in Galaxy for run: %s' % run_date) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return details |
def wsgi(self, environ, start_response):
"""Implements the mapper's WSGI interface."""
request = Request(environ)
ctx = Context(request)
try:
try:
response = self(request, ctx)
ctx._run_callbacks('finalize', (request, response))
response = response.conditional_to(request)
except HTTPException as e:
response = e.response
except Exception:
self.handle_error(request, ctx)
response = InternalServerError().response
response.add_callback(lambda: ctx._run_callbacks('close'))
return response(environ, start_response)
finally:
ctx._run_callbacks('teardown', log_errors=True) | def function[wsgi, parameter[self, environ, start_response]]:
constant[Implements the mapper's WSGI interface.]
variable[request] assign[=] call[name[Request], parameter[name[environ]]]
variable[ctx] assign[=] call[name[Context], parameter[name[request]]]
<ast.Try object at 0x7da20c76eb60> | keyword[def] identifier[wsgi] ( identifier[self] , identifier[environ] , identifier[start_response] ):
literal[string]
identifier[request] = identifier[Request] ( identifier[environ] )
identifier[ctx] = identifier[Context] ( identifier[request] )
keyword[try] :
keyword[try] :
identifier[response] = identifier[self] ( identifier[request] , identifier[ctx] )
identifier[ctx] . identifier[_run_callbacks] ( literal[string] ,( identifier[request] , identifier[response] ))
identifier[response] = identifier[response] . identifier[conditional_to] ( identifier[request] )
keyword[except] identifier[HTTPException] keyword[as] identifier[e] :
identifier[response] = identifier[e] . identifier[response]
keyword[except] identifier[Exception] :
identifier[self] . identifier[handle_error] ( identifier[request] , identifier[ctx] )
identifier[response] = identifier[InternalServerError] (). identifier[response]
identifier[response] . identifier[add_callback] ( keyword[lambda] : identifier[ctx] . identifier[_run_callbacks] ( literal[string] ))
keyword[return] identifier[response] ( identifier[environ] , identifier[start_response] )
keyword[finally] :
identifier[ctx] . identifier[_run_callbacks] ( literal[string] , identifier[log_errors] = keyword[True] ) | def wsgi(self, environ, start_response):
"""Implements the mapper's WSGI interface."""
request = Request(environ)
ctx = Context(request)
try:
try:
response = self(request, ctx)
ctx._run_callbacks('finalize', (request, response))
response = response.conditional_to(request) # depends on [control=['try'], data=[]]
except HTTPException as e:
response = e.response # depends on [control=['except'], data=['e']]
except Exception:
self.handle_error(request, ctx)
response = InternalServerError().response # depends on [control=['except'], data=[]]
response.add_callback(lambda : ctx._run_callbacks('close'))
return response(environ, start_response) # depends on [control=['try'], data=[]]
finally:
ctx._run_callbacks('teardown', log_errors=True) |
def get_schema_input_version(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
get_schema = ET.Element("get_schema")
config = get_schema
input = ET.SubElement(get_schema, "input")
version = ET.SubElement(input, "version")
version.text = kwargs.pop('version')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[get_schema_input_version, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[get_schema] assign[=] call[name[ET].Element, parameter[constant[get_schema]]]
variable[config] assign[=] name[get_schema]
variable[input] assign[=] call[name[ET].SubElement, parameter[name[get_schema], constant[input]]]
variable[version] assign[=] call[name[ET].SubElement, parameter[name[input], constant[version]]]
name[version].text assign[=] call[name[kwargs].pop, parameter[constant[version]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[get_schema_input_version] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[get_schema] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[config] = identifier[get_schema]
identifier[input] = identifier[ET] . identifier[SubElement] ( identifier[get_schema] , literal[string] )
identifier[version] = identifier[ET] . identifier[SubElement] ( identifier[input] , literal[string] )
identifier[version] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def get_schema_input_version(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
get_schema = ET.Element('get_schema')
config = get_schema
input = ET.SubElement(get_schema, 'input')
version = ET.SubElement(input, 'version')
version.text = kwargs.pop('version')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def to_latlon(easting, northing, zone_number, zone_letter=None, northern=None, strict=True):
"""This function convert an UTM coordinate into Latitude and Longitude
Parameters
----------
easting: int
Easting value of UTM coordinate
northing: int
Northing value of UTM coordinate
zone number: int
Zone Number is represented with global map numbers of an UTM Zone
Numbers Map. More information see utmzones [1]_
zone_letter: str
Zone Letter can be represented as string values. Where UTM Zone
Designators can be accessed in [1]_
northern: bool
You can set True or False to set this parameter. Default is None
.. _[1]: http://www.jaworski.ca/utmzones.htm
"""
if not zone_letter and northern is None:
raise ValueError('either zone_letter or northern needs to be set')
elif zone_letter and northern is not None:
raise ValueError('set either zone_letter or northern, but not both')
if strict:
if not in_bounds(easting, 100000, 1000000, upper_strict=True):
raise OutOfRangeError('easting out of range (must be between 100.000 m and 999.999 m)')
if not in_bounds(northing, 0, 10000000):
raise OutOfRangeError('northing out of range (must be between 0 m and 10.000.000 m)')
check_valid_zone(zone_number, zone_letter)
if zone_letter:
zone_letter = zone_letter.upper()
northern = (zone_letter >= 'N')
x = easting - 500000
y = northing
if not northern:
y -= 10000000
m = y / K0
mu = m / (R * M1)
p_rad = (mu +
P2 * mathlib.sin(2 * mu) +
P3 * mathlib.sin(4 * mu) +
P4 * mathlib.sin(6 * mu) +
P5 * mathlib.sin(8 * mu))
p_sin = mathlib.sin(p_rad)
p_sin2 = p_sin * p_sin
p_cos = mathlib.cos(p_rad)
p_tan = p_sin / p_cos
p_tan2 = p_tan * p_tan
p_tan4 = p_tan2 * p_tan2
ep_sin = 1 - E * p_sin2
ep_sin_sqrt = mathlib.sqrt(1 - E * p_sin2)
n = R / ep_sin_sqrt
r = (1 - E) / ep_sin
c = _E * p_cos**2
c2 = c * c
d = x / (n * K0)
d2 = d * d
d3 = d2 * d
d4 = d3 * d
d5 = d4 * d
d6 = d5 * d
latitude = (p_rad - (p_tan / r) *
(d2 / 2 -
d4 / 24 * (5 + 3 * p_tan2 + 10 * c - 4 * c2 - 9 * E_P2)) +
d6 / 720 * (61 + 90 * p_tan2 + 298 * c + 45 * p_tan4 - 252 * E_P2 - 3 * c2))
longitude = (d -
d3 / 6 * (1 + 2 * p_tan2 + c) +
d5 / 120 * (5 - 2 * c + 28 * p_tan2 - 3 * c2 + 8 * E_P2 + 24 * p_tan4)) / p_cos
return (mathlib.degrees(latitude),
mathlib.degrees(longitude) + zone_number_to_central_longitude(zone_number)) | def function[to_latlon, parameter[easting, northing, zone_number, zone_letter, northern, strict]]:
constant[This function convert an UTM coordinate into Latitude and Longitude
Parameters
----------
easting: int
Easting value of UTM coordinate
northing: int
Northing value of UTM coordinate
zone number: int
Zone Number is represented with global map numbers of an UTM Zone
Numbers Map. More information see utmzones [1]_
zone_letter: str
Zone Letter can be represented as string values. Where UTM Zone
Designators can be accessed in [1]_
northern: bool
You can set True or False to set this parameter. Default is None
.. _[1]: http://www.jaworski.ca/utmzones.htm
]
if <ast.BoolOp object at 0x7da204344190> begin[:]
<ast.Raise object at 0x7da204346500>
if name[strict] begin[:]
if <ast.UnaryOp object at 0x7da204344f10> begin[:]
<ast.Raise object at 0x7da204345660>
if <ast.UnaryOp object at 0x7da2043462c0> begin[:]
<ast.Raise object at 0x7da204344a60>
call[name[check_valid_zone], parameter[name[zone_number], name[zone_letter]]]
if name[zone_letter] begin[:]
variable[zone_letter] assign[=] call[name[zone_letter].upper, parameter[]]
variable[northern] assign[=] compare[name[zone_letter] greater_or_equal[>=] constant[N]]
variable[x] assign[=] binary_operation[name[easting] - constant[500000]]
variable[y] assign[=] name[northing]
if <ast.UnaryOp object at 0x7da204347970> begin[:]
<ast.AugAssign object at 0x7da2043462f0>
variable[m] assign[=] binary_operation[name[y] / name[K0]]
variable[mu] assign[=] binary_operation[name[m] / binary_operation[name[R] * name[M1]]]
variable[p_rad] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[mu] + binary_operation[name[P2] * call[name[mathlib].sin, parameter[binary_operation[constant[2] * name[mu]]]]]] + binary_operation[name[P3] * call[name[mathlib].sin, parameter[binary_operation[constant[4] * name[mu]]]]]] + binary_operation[name[P4] * call[name[mathlib].sin, parameter[binary_operation[constant[6] * name[mu]]]]]] + binary_operation[name[P5] * call[name[mathlib].sin, parameter[binary_operation[constant[8] * name[mu]]]]]]
variable[p_sin] assign[=] call[name[mathlib].sin, parameter[name[p_rad]]]
variable[p_sin2] assign[=] binary_operation[name[p_sin] * name[p_sin]]
variable[p_cos] assign[=] call[name[mathlib].cos, parameter[name[p_rad]]]
variable[p_tan] assign[=] binary_operation[name[p_sin] / name[p_cos]]
variable[p_tan2] assign[=] binary_operation[name[p_tan] * name[p_tan]]
variable[p_tan4] assign[=] binary_operation[name[p_tan2] * name[p_tan2]]
variable[ep_sin] assign[=] binary_operation[constant[1] - binary_operation[name[E] * name[p_sin2]]]
variable[ep_sin_sqrt] assign[=] call[name[mathlib].sqrt, parameter[binary_operation[constant[1] - binary_operation[name[E] * name[p_sin2]]]]]
variable[n] assign[=] binary_operation[name[R] / name[ep_sin_sqrt]]
variable[r] assign[=] binary_operation[binary_operation[constant[1] - name[E]] / name[ep_sin]]
variable[c] assign[=] binary_operation[name[_E] * binary_operation[name[p_cos] ** constant[2]]]
variable[c2] assign[=] binary_operation[name[c] * name[c]]
variable[d] assign[=] binary_operation[name[x] / binary_operation[name[n] * name[K0]]]
variable[d2] assign[=] binary_operation[name[d] * name[d]]
variable[d3] assign[=] binary_operation[name[d2] * name[d]]
variable[d4] assign[=] binary_operation[name[d3] * name[d]]
variable[d5] assign[=] binary_operation[name[d4] * name[d]]
variable[d6] assign[=] binary_operation[name[d5] * name[d]]
variable[latitude] assign[=] binary_operation[binary_operation[name[p_rad] - binary_operation[binary_operation[name[p_tan] / name[r]] * binary_operation[binary_operation[name[d2] / constant[2]] - binary_operation[binary_operation[name[d4] / constant[24]] * binary_operation[binary_operation[binary_operation[binary_operation[constant[5] + binary_operation[constant[3] * name[p_tan2]]] + binary_operation[constant[10] * name[c]]] - binary_operation[constant[4] * name[c2]]] - binary_operation[constant[9] * name[E_P2]]]]]]] + binary_operation[binary_operation[name[d6] / constant[720]] * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[61] + binary_operation[constant[90] * name[p_tan2]]] + binary_operation[constant[298] * name[c]]] + binary_operation[constant[45] * name[p_tan4]]] - binary_operation[constant[252] * name[E_P2]]] - binary_operation[constant[3] * name[c2]]]]]
variable[longitude] assign[=] binary_operation[binary_operation[binary_operation[name[d] - binary_operation[binary_operation[name[d3] / constant[6]] * binary_operation[binary_operation[constant[1] + binary_operation[constant[2] * name[p_tan2]]] + name[c]]]] + binary_operation[binary_operation[name[d5] / constant[120]] * binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[5] - binary_operation[constant[2] * name[c]]] + binary_operation[constant[28] * name[p_tan2]]] - binary_operation[constant[3] * name[c2]]] + binary_operation[constant[8] * name[E_P2]]] + binary_operation[constant[24] * name[p_tan4]]]]] / name[p_cos]]
return[tuple[[<ast.Call object at 0x7da18fe93130>, <ast.BinOp object at 0x7da18fe92ef0>]]] | keyword[def] identifier[to_latlon] ( identifier[easting] , identifier[northing] , identifier[zone_number] , identifier[zone_letter] = keyword[None] , identifier[northern] = keyword[None] , identifier[strict] = keyword[True] ):
literal[string]
keyword[if] keyword[not] identifier[zone_letter] keyword[and] identifier[northern] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[elif] identifier[zone_letter] keyword[and] identifier[northern] keyword[is] keyword[not] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[strict] :
keyword[if] keyword[not] identifier[in_bounds] ( identifier[easting] , literal[int] , literal[int] , identifier[upper_strict] = keyword[True] ):
keyword[raise] identifier[OutOfRangeError] ( literal[string] )
keyword[if] keyword[not] identifier[in_bounds] ( identifier[northing] , literal[int] , literal[int] ):
keyword[raise] identifier[OutOfRangeError] ( literal[string] )
identifier[check_valid_zone] ( identifier[zone_number] , identifier[zone_letter] )
keyword[if] identifier[zone_letter] :
identifier[zone_letter] = identifier[zone_letter] . identifier[upper] ()
identifier[northern] =( identifier[zone_letter] >= literal[string] )
identifier[x] = identifier[easting] - literal[int]
identifier[y] = identifier[northing]
keyword[if] keyword[not] identifier[northern] :
identifier[y] -= literal[int]
identifier[m] = identifier[y] / identifier[K0]
identifier[mu] = identifier[m] /( identifier[R] * identifier[M1] )
identifier[p_rad] =( identifier[mu] +
identifier[P2] * identifier[mathlib] . identifier[sin] ( literal[int] * identifier[mu] )+
identifier[P3] * identifier[mathlib] . identifier[sin] ( literal[int] * identifier[mu] )+
identifier[P4] * identifier[mathlib] . identifier[sin] ( literal[int] * identifier[mu] )+
identifier[P5] * identifier[mathlib] . identifier[sin] ( literal[int] * identifier[mu] ))
identifier[p_sin] = identifier[mathlib] . identifier[sin] ( identifier[p_rad] )
identifier[p_sin2] = identifier[p_sin] * identifier[p_sin]
identifier[p_cos] = identifier[mathlib] . identifier[cos] ( identifier[p_rad] )
identifier[p_tan] = identifier[p_sin] / identifier[p_cos]
identifier[p_tan2] = identifier[p_tan] * identifier[p_tan]
identifier[p_tan4] = identifier[p_tan2] * identifier[p_tan2]
identifier[ep_sin] = literal[int] - identifier[E] * identifier[p_sin2]
identifier[ep_sin_sqrt] = identifier[mathlib] . identifier[sqrt] ( literal[int] - identifier[E] * identifier[p_sin2] )
identifier[n] = identifier[R] / identifier[ep_sin_sqrt]
identifier[r] =( literal[int] - identifier[E] )/ identifier[ep_sin]
identifier[c] = identifier[_E] * identifier[p_cos] ** literal[int]
identifier[c2] = identifier[c] * identifier[c]
identifier[d] = identifier[x] /( identifier[n] * identifier[K0] )
identifier[d2] = identifier[d] * identifier[d]
identifier[d3] = identifier[d2] * identifier[d]
identifier[d4] = identifier[d3] * identifier[d]
identifier[d5] = identifier[d4] * identifier[d]
identifier[d6] = identifier[d5] * identifier[d]
identifier[latitude] =( identifier[p_rad] -( identifier[p_tan] / identifier[r] )*
( identifier[d2] / literal[int] -
identifier[d4] / literal[int] *( literal[int] + literal[int] * identifier[p_tan2] + literal[int] * identifier[c] - literal[int] * identifier[c2] - literal[int] * identifier[E_P2] ))+
identifier[d6] / literal[int] *( literal[int] + literal[int] * identifier[p_tan2] + literal[int] * identifier[c] + literal[int] * identifier[p_tan4] - literal[int] * identifier[E_P2] - literal[int] * identifier[c2] ))
identifier[longitude] =( identifier[d] -
identifier[d3] / literal[int] *( literal[int] + literal[int] * identifier[p_tan2] + identifier[c] )+
identifier[d5] / literal[int] *( literal[int] - literal[int] * identifier[c] + literal[int] * identifier[p_tan2] - literal[int] * identifier[c2] + literal[int] * identifier[E_P2] + literal[int] * identifier[p_tan4] ))/ identifier[p_cos]
keyword[return] ( identifier[mathlib] . identifier[degrees] ( identifier[latitude] ),
identifier[mathlib] . identifier[degrees] ( identifier[longitude] )+ identifier[zone_number_to_central_longitude] ( identifier[zone_number] )) | def to_latlon(easting, northing, zone_number, zone_letter=None, northern=None, strict=True):
"""This function convert an UTM coordinate into Latitude and Longitude
Parameters
----------
easting: int
Easting value of UTM coordinate
northing: int
Northing value of UTM coordinate
zone number: int
Zone Number is represented with global map numbers of an UTM Zone
Numbers Map. More information see utmzones [1]_
zone_letter: str
Zone Letter can be represented as string values. Where UTM Zone
Designators can be accessed in [1]_
northern: bool
You can set True or False to set this parameter. Default is None
.. _[1]: http://www.jaworski.ca/utmzones.htm
"""
if not zone_letter and northern is None:
raise ValueError('either zone_letter or northern needs to be set') # depends on [control=['if'], data=[]]
elif zone_letter and northern is not None:
raise ValueError('set either zone_letter or northern, but not both') # depends on [control=['if'], data=[]]
if strict:
if not in_bounds(easting, 100000, 1000000, upper_strict=True):
raise OutOfRangeError('easting out of range (must be between 100.000 m and 999.999 m)') # depends on [control=['if'], data=[]]
if not in_bounds(northing, 0, 10000000):
raise OutOfRangeError('northing out of range (must be between 0 m and 10.000.000 m)') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
check_valid_zone(zone_number, zone_letter)
if zone_letter:
zone_letter = zone_letter.upper()
northern = zone_letter >= 'N' # depends on [control=['if'], data=[]]
x = easting - 500000
y = northing
if not northern:
y -= 10000000 # depends on [control=['if'], data=[]]
m = y / K0
mu = m / (R * M1)
p_rad = mu + P2 * mathlib.sin(2 * mu) + P3 * mathlib.sin(4 * mu) + P4 * mathlib.sin(6 * mu) + P5 * mathlib.sin(8 * mu)
p_sin = mathlib.sin(p_rad)
p_sin2 = p_sin * p_sin
p_cos = mathlib.cos(p_rad)
p_tan = p_sin / p_cos
p_tan2 = p_tan * p_tan
p_tan4 = p_tan2 * p_tan2
ep_sin = 1 - E * p_sin2
ep_sin_sqrt = mathlib.sqrt(1 - E * p_sin2)
n = R / ep_sin_sqrt
r = (1 - E) / ep_sin
c = _E * p_cos ** 2
c2 = c * c
d = x / (n * K0)
d2 = d * d
d3 = d2 * d
d4 = d3 * d
d5 = d4 * d
d6 = d5 * d
latitude = p_rad - p_tan / r * (d2 / 2 - d4 / 24 * (5 + 3 * p_tan2 + 10 * c - 4 * c2 - 9 * E_P2)) + d6 / 720 * (61 + 90 * p_tan2 + 298 * c + 45 * p_tan4 - 252 * E_P2 - 3 * c2)
longitude = (d - d3 / 6 * (1 + 2 * p_tan2 + c) + d5 / 120 * (5 - 2 * c + 28 * p_tan2 - 3 * c2 + 8 * E_P2 + 24 * p_tan4)) / p_cos
return (mathlib.degrees(latitude), mathlib.degrees(longitude) + zone_number_to_central_longitude(zone_number)) |
def merge_profile(mean_profile, new_profile):
"""Add a new list of values to a list of rolling means."""
for i in range(0, len(mean_profile)):
if new_profile[i] is None:
continue
mean_profile[i].add(new_profile[i]) | def function[merge_profile, parameter[mean_profile, new_profile]]:
constant[Add a new list of values to a list of rolling means.]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[mean_profile]]]]]] begin[:]
if compare[call[name[new_profile]][name[i]] is constant[None]] begin[:]
continue
call[call[name[mean_profile]][name[i]].add, parameter[call[name[new_profile]][name[i]]]] | keyword[def] identifier[merge_profile] ( identifier[mean_profile] , identifier[new_profile] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[mean_profile] )):
keyword[if] identifier[new_profile] [ identifier[i] ] keyword[is] keyword[None] :
keyword[continue]
identifier[mean_profile] [ identifier[i] ]. identifier[add] ( identifier[new_profile] [ identifier[i] ]) | def merge_profile(mean_profile, new_profile):
"""Add a new list of values to a list of rolling means."""
for i in range(0, len(mean_profile)):
if new_profile[i] is None:
continue # depends on [control=['if'], data=[]]
mean_profile[i].add(new_profile[i]) # depends on [control=['for'], data=['i']] |
def populate(self, installed_bots=None):
"""
Load bots.
Import each bot module.
It is thread-safe and idempotent, but not re-entrant.
"""
if self.ready:
return
# populate() might be called by two threads in parallel on servers
# that create threads before initializing the WSGI callable.
with self._lock:
if self.ready:
return
# An RLock prevents other threads from entering this section. The
# compare and set operation below is atomic.
if self.loading:
# Prevent re-entrant calls to avoid running AppConfig.ready()
# methods twice.
raise RuntimeError("populate() isn't re-entrant")
self.loading = True
# Phase 1: Initialize bots
for entry in installed_bots or {}:
if isinstance(entry, Bot):
cls = entry
entry = '.'.join([cls.__module__, cls.__name__])
bot_reg = BotRegistry.create(entry)
if bot_reg.label in self.bots:
raise ImproperlyConfigured(
"Bot labels aren't unique, "
"duplicates: %s" % bot_reg.label)
self.bots[bot_reg.label] = bot_reg
bot_reg.bots = self
# Check for duplicate bot names.
counts = Counter(
bot_reg.name for bot_reg in self.bots.values())
duplicates = [
name for name, count in counts.most_common() if count > 1]
if duplicates:
raise ImproperlyConfigured(
"Bot names aren't unique, "
"duplicates: %s" % ", ".join(duplicates))
self.bots_ready = True
# Phase 2: import config files
for bot in self.bots.values():
bot.import_configs()
self.configs_ready = True
self.ready = True | def function[populate, parameter[self, installed_bots]]:
constant[
Load bots.
Import each bot module.
It is thread-safe and idempotent, but not re-entrant.
]
if name[self].ready begin[:]
return[None]
with name[self]._lock begin[:]
if name[self].ready begin[:]
return[None]
if name[self].loading begin[:]
<ast.Raise object at 0x7da1b24bbeb0>
name[self].loading assign[=] constant[True]
for taget[name[entry]] in starred[<ast.BoolOp object at 0x7da1b24b86d0>] begin[:]
if call[name[isinstance], parameter[name[entry], name[Bot]]] begin[:]
variable[cls] assign[=] name[entry]
variable[entry] assign[=] call[constant[.].join, parameter[list[[<ast.Attribute object at 0x7da1b24bb640>, <ast.Attribute object at 0x7da1b24b88e0>]]]]
variable[bot_reg] assign[=] call[name[BotRegistry].create, parameter[name[entry]]]
if compare[name[bot_reg].label in name[self].bots] begin[:]
<ast.Raise object at 0x7da1b24b8190>
call[name[self].bots][name[bot_reg].label] assign[=] name[bot_reg]
name[bot_reg].bots assign[=] name[self]
variable[counts] assign[=] call[name[Counter], parameter[<ast.GeneratorExp object at 0x7da1b24b8790>]]
variable[duplicates] assign[=] <ast.ListComp object at 0x7da1b24bbac0>
if name[duplicates] begin[:]
<ast.Raise object at 0x7da18dc04e50>
name[self].bots_ready assign[=] constant[True]
for taget[name[bot]] in starred[call[name[self].bots.values, parameter[]]] begin[:]
call[name[bot].import_configs, parameter[]]
name[self].configs_ready assign[=] constant[True]
name[self].ready assign[=] constant[True] | keyword[def] identifier[populate] ( identifier[self] , identifier[installed_bots] = keyword[None] ):
literal[string]
keyword[if] identifier[self] . identifier[ready] :
keyword[return]
keyword[with] identifier[self] . identifier[_lock] :
keyword[if] identifier[self] . identifier[ready] :
keyword[return]
keyword[if] identifier[self] . identifier[loading] :
keyword[raise] identifier[RuntimeError] ( literal[string] )
identifier[self] . identifier[loading] = keyword[True]
keyword[for] identifier[entry] keyword[in] identifier[installed_bots] keyword[or] {}:
keyword[if] identifier[isinstance] ( identifier[entry] , identifier[Bot] ):
identifier[cls] = identifier[entry]
identifier[entry] = literal[string] . identifier[join] ([ identifier[cls] . identifier[__module__] , identifier[cls] . identifier[__name__] ])
identifier[bot_reg] = identifier[BotRegistry] . identifier[create] ( identifier[entry] )
keyword[if] identifier[bot_reg] . identifier[label] keyword[in] identifier[self] . identifier[bots] :
keyword[raise] identifier[ImproperlyConfigured] (
literal[string]
literal[string] % identifier[bot_reg] . identifier[label] )
identifier[self] . identifier[bots] [ identifier[bot_reg] . identifier[label] ]= identifier[bot_reg]
identifier[bot_reg] . identifier[bots] = identifier[self]
identifier[counts] = identifier[Counter] (
identifier[bot_reg] . identifier[name] keyword[for] identifier[bot_reg] keyword[in] identifier[self] . identifier[bots] . identifier[values] ())
identifier[duplicates] =[
identifier[name] keyword[for] identifier[name] , identifier[count] keyword[in] identifier[counts] . identifier[most_common] () keyword[if] identifier[count] > literal[int] ]
keyword[if] identifier[duplicates] :
keyword[raise] identifier[ImproperlyConfigured] (
literal[string]
literal[string] % literal[string] . identifier[join] ( identifier[duplicates] ))
identifier[self] . identifier[bots_ready] = keyword[True]
keyword[for] identifier[bot] keyword[in] identifier[self] . identifier[bots] . identifier[values] ():
identifier[bot] . identifier[import_configs] ()
identifier[self] . identifier[configs_ready] = keyword[True]
identifier[self] . identifier[ready] = keyword[True] | def populate(self, installed_bots=None):
"""
Load bots.
Import each bot module.
It is thread-safe and idempotent, but not re-entrant.
"""
if self.ready:
return # depends on [control=['if'], data=[]]
# populate() might be called by two threads in parallel on servers
# that create threads before initializing the WSGI callable.
with self._lock:
if self.ready:
return # depends on [control=['if'], data=[]]
# An RLock prevents other threads from entering this section. The
# compare and set operation below is atomic.
if self.loading:
# Prevent re-entrant calls to avoid running AppConfig.ready()
# methods twice.
raise RuntimeError("populate() isn't re-entrant") # depends on [control=['if'], data=[]]
self.loading = True
# Phase 1: Initialize bots
for entry in installed_bots or {}:
if isinstance(entry, Bot):
cls = entry
entry = '.'.join([cls.__module__, cls.__name__]) # depends on [control=['if'], data=[]]
bot_reg = BotRegistry.create(entry)
if bot_reg.label in self.bots:
raise ImproperlyConfigured("Bot labels aren't unique, duplicates: %s" % bot_reg.label) # depends on [control=['if'], data=[]]
self.bots[bot_reg.label] = bot_reg
bot_reg.bots = self # depends on [control=['for'], data=['entry']]
# Check for duplicate bot names.
counts = Counter((bot_reg.name for bot_reg in self.bots.values()))
duplicates = [name for (name, count) in counts.most_common() if count > 1]
if duplicates:
raise ImproperlyConfigured("Bot names aren't unique, duplicates: %s" % ', '.join(duplicates)) # depends on [control=['if'], data=[]]
self.bots_ready = True
# Phase 2: import config files
for bot in self.bots.values():
bot.import_configs() # depends on [control=['for'], data=['bot']]
self.configs_ready = True
self.ready = True # depends on [control=['with'], data=[]] |
def get_vbox_version(config_kmk):
"Return the vbox config major, minor, build"
with open(config_kmk, 'rb') as f:
config = f.read()
major = b"6"#re.search(b"VBOX_VERSION_MAJOR = (?P<major>[\d])", config).groupdict()['major']
minor = b"0"#re.search(b"VBOX_VERSION_MINOR = (?P<minor>[\d])", config).groupdict()['minor']
build = b"4"#re.search(b"VBOX_VERSION_BUILD = (?P<build>[\d])", config).groupdict()['build']
return b".".join([major, minor, build]) | def function[get_vbox_version, parameter[config_kmk]]:
constant[Return the vbox config major, minor, build]
with call[name[open], parameter[name[config_kmk], constant[rb]]] begin[:]
variable[config] assign[=] call[name[f].read, parameter[]]
variable[major] assign[=] constant[b'6']
variable[minor] assign[=] constant[b'0']
variable[build] assign[=] constant[b'4']
return[call[constant[b'.'].join, parameter[list[[<ast.Name object at 0x7da204960670>, <ast.Name object at 0x7da204961e40>, <ast.Name object at 0x7da20e9b0ac0>]]]]] | keyword[def] identifier[get_vbox_version] ( identifier[config_kmk] ):
literal[string]
keyword[with] identifier[open] ( identifier[config_kmk] , literal[string] ) keyword[as] identifier[f] :
identifier[config] = identifier[f] . identifier[read] ()
identifier[major] = literal[string]
identifier[minor] = literal[string]
identifier[build] = literal[string]
keyword[return] literal[string] . identifier[join] ([ identifier[major] , identifier[minor] , identifier[build] ]) | def get_vbox_version(config_kmk):
"""Return the vbox config major, minor, build"""
with open(config_kmk, 'rb') as f:
config = f.read() # depends on [control=['with'], data=['f']]
major = b'6' #re.search(b"VBOX_VERSION_MAJOR = (?P<major>[\d])", config).groupdict()['major']
minor = b'0' #re.search(b"VBOX_VERSION_MINOR = (?P<minor>[\d])", config).groupdict()['minor']
build = b'4' #re.search(b"VBOX_VERSION_BUILD = (?P<build>[\d])", config).groupdict()['build']
return b'.'.join([major, minor, build]) |
def set_filesystems(
name,
device,
vfstype,
opts='-',
mount='true',
config='/etc/filesystems',
test=False,
match_on='auto',
**kwargs):
'''
.. versionadded:: 2018.3.3
Verify that this mount is represented in the filesystems, change the mount
to match the data passed, or add the mount if it is not present on AIX
Provide information if the path is mounted
:param name: The name of the mount point where the device is mounted.
:param device: The device that is being mounted.
:param vfstype: The file system that is used (AIX has two fstypes, fstype and vfstype - similar to Linux fstype)
:param opts: Additional options used when mounting the device.
:param mount: Mount if not mounted, default True.
:param config: Configuration file, default /etc/filesystems.
:param match: File systems type to match on, default auto
CLI Example:
.. code-block:: bash
salt '*' mount.set_filesystems /mnt/foo /dev/sdz1 jfs2
'''
# Fix the opts type if it is a list
if isinstance(opts, list):
opts = ','.join(opts)
# preserve arguments for updating
entry_args = {
'name': name,
'dev': device.replace('\\ ', '\\040'),
'vfstype': vfstype,
'opts': opts,
'mount': mount,
}
view_lines = []
ret = None
if 'AIX' not in __grains__['kernel']:
return ret
# Transform match_on into list--items will be checked later
if isinstance(match_on, list):
pass
elif not isinstance(match_on, six.string_types):
raise CommandExecutionError('match_on must be a string or list of strings')
elif match_on == 'auto':
# Try to guess right criteria for auto....
# added IBM types from sys/vmount.h after btrfs
# NOTE: missing some special fstypes here
specialFSes = frozenset([
'none',
'tmpfs',
'sysfs',
'proc',
'fusectl',
'debugfs',
'securityfs',
'devtmpfs',
'cgroup',
'btrfs',
'cdrfs',
'procfs',
'jfs',
'jfs2',
'nfs',
'sfs',
'nfs3',
'cachefs',
'udfs',
'cifs',
'namefs',
'pmemfs',
'ahafs',
'nfs4',
'autofs',
'stnfs'])
if vfstype in specialFSes:
match_on = ['name']
else:
match_on = ['dev']
else:
match_on = [match_on]
# generate entry and criteria objects, handle invalid keys in match_on
entry_ip = _FileSystemsEntry.from_line(entry_args, kwargs)
try:
criteria = entry_ip.pick(match_on)
except KeyError:
filterFn = lambda key: key not in _FileSystemsEntry.compatibility_keys
invalid_keys = filter(filterFn, match_on)
raise CommandExecutionError('Unrecognized keys in match_on: "{0}"'.format(invalid_keys))
# parse file, use ret to cache status
if not os.path.isfile(config):
raise CommandExecutionError('Bad config file "{0}"'.format(config))
# read in block of filesystem, block starts with '/' till empty line
try:
fsys_filedict = _filesystems(config, False)
for fsys_view in six.viewitems(fsys_filedict):
if criteria.match(fsys_view):
ret = 'present'
if entry_ip.match(fsys_view):
view_lines.append(fsys_view)
else:
ret = 'change'
kv = entry_ip['name']
view_lines.append((kv, entry_ip))
else:
view_lines.append(fsys_view)
except (IOError, OSError) as exc:
raise CommandExecutionError('Couldn\'t read from {0}: {1}'.format(config, exc))
# add line if not present or changed
if ret is None:
for dict_view in six.viewitems(entry_ip.dict_from_entry()):
view_lines.append(dict_view)
ret = 'new'
if ret != 'present': # ret in ['new', 'change']:
try:
with salt.utils.files.fopen(config, 'wb') as ofile:
# The line was changed, commit it!
for fsys_view in view_lines:
entry = fsys_view[1]
mystrg = _FileSystemsEntry.dict_to_lines(entry)
ofile.writelines(salt.utils.data.encode(mystrg))
except (IOError, OSError):
raise CommandExecutionError('File not writable {0}'.format(config))
return ret | def function[set_filesystems, parameter[name, device, vfstype, opts, mount, config, test, match_on]]:
constant[
.. versionadded:: 2018.3.3
Verify that this mount is represented in the filesystems, change the mount
to match the data passed, or add the mount if it is not present on AIX
Provide information if the path is mounted
:param name: The name of the mount point where the device is mounted.
:param device: The device that is being mounted.
:param vfstype: The file system that is used (AIX has two fstypes, fstype and vfstype - similar to Linux fstype)
:param opts: Additional options used when mounting the device.
:param mount: Mount if not mounted, default True.
:param config: Configuration file, default /etc/filesystems.
:param match: File systems type to match on, default auto
CLI Example:
.. code-block:: bash
salt '*' mount.set_filesystems /mnt/foo /dev/sdz1 jfs2
]
if call[name[isinstance], parameter[name[opts], name[list]]] begin[:]
variable[opts] assign[=] call[constant[,].join, parameter[name[opts]]]
variable[entry_args] assign[=] dictionary[[<ast.Constant object at 0x7da1b20ed3f0>, <ast.Constant object at 0x7da1b20ed660>, <ast.Constant object at 0x7da1b20eded0>, <ast.Constant object at 0x7da1b20eda20>, <ast.Constant object at 0x7da1b20ed240>], [<ast.Name object at 0x7da1b20ecbe0>, <ast.Call object at 0x7da1b20ec460>, <ast.Name object at 0x7da1b20ec100>, <ast.Name object at 0x7da1b20eca30>, <ast.Name object at 0x7da1b20ec0a0>]]
variable[view_lines] assign[=] list[[]]
variable[ret] assign[=] constant[None]
if compare[constant[AIX] <ast.NotIn object at 0x7da2590d7190> call[name[__grains__]][constant[kernel]]] begin[:]
return[name[ret]]
if call[name[isinstance], parameter[name[match_on], name[list]]] begin[:]
pass
variable[entry_ip] assign[=] call[name[_FileSystemsEntry].from_line, parameter[name[entry_args], name[kwargs]]]
<ast.Try object at 0x7da1b1f79c00>
if <ast.UnaryOp object at 0x7da1b1f79f90> begin[:]
<ast.Raise object at 0x7da1b1f799f0>
<ast.Try object at 0x7da1b1f79180>
if compare[name[ret] is constant[None]] begin[:]
for taget[name[dict_view]] in starred[call[name[six].viewitems, parameter[call[name[entry_ip].dict_from_entry, parameter[]]]]] begin[:]
call[name[view_lines].append, parameter[name[dict_view]]]
variable[ret] assign[=] constant[new]
if compare[name[ret] not_equal[!=] constant[present]] begin[:]
<ast.Try object at 0x7da1b1f7b6a0>
return[name[ret]] | keyword[def] identifier[set_filesystems] (
identifier[name] ,
identifier[device] ,
identifier[vfstype] ,
identifier[opts] = literal[string] ,
identifier[mount] = literal[string] ,
identifier[config] = literal[string] ,
identifier[test] = keyword[False] ,
identifier[match_on] = literal[string] ,
** identifier[kwargs] ):
literal[string]
keyword[if] identifier[isinstance] ( identifier[opts] , identifier[list] ):
identifier[opts] = literal[string] . identifier[join] ( identifier[opts] )
identifier[entry_args] ={
literal[string] : identifier[name] ,
literal[string] : identifier[device] . identifier[replace] ( literal[string] , literal[string] ),
literal[string] : identifier[vfstype] ,
literal[string] : identifier[opts] ,
literal[string] : identifier[mount] ,
}
identifier[view_lines] =[]
identifier[ret] = keyword[None]
keyword[if] literal[string] keyword[not] keyword[in] identifier[__grains__] [ literal[string] ]:
keyword[return] identifier[ret]
keyword[if] identifier[isinstance] ( identifier[match_on] , identifier[list] ):
keyword[pass]
keyword[elif] keyword[not] identifier[isinstance] ( identifier[match_on] , identifier[six] . identifier[string_types] ):
keyword[raise] identifier[CommandExecutionError] ( literal[string] )
keyword[elif] identifier[match_on] == literal[string] :
identifier[specialFSes] = identifier[frozenset] ([
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ,
literal[string] ])
keyword[if] identifier[vfstype] keyword[in] identifier[specialFSes] :
identifier[match_on] =[ literal[string] ]
keyword[else] :
identifier[match_on] =[ literal[string] ]
keyword[else] :
identifier[match_on] =[ identifier[match_on] ]
identifier[entry_ip] = identifier[_FileSystemsEntry] . identifier[from_line] ( identifier[entry_args] , identifier[kwargs] )
keyword[try] :
identifier[criteria] = identifier[entry_ip] . identifier[pick] ( identifier[match_on] )
keyword[except] identifier[KeyError] :
identifier[filterFn] = keyword[lambda] identifier[key] : identifier[key] keyword[not] keyword[in] identifier[_FileSystemsEntry] . identifier[compatibility_keys]
identifier[invalid_keys] = identifier[filter] ( identifier[filterFn] , identifier[match_on] )
keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[invalid_keys] ))
keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[config] ):
keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[config] ))
keyword[try] :
identifier[fsys_filedict] = identifier[_filesystems] ( identifier[config] , keyword[False] )
keyword[for] identifier[fsys_view] keyword[in] identifier[six] . identifier[viewitems] ( identifier[fsys_filedict] ):
keyword[if] identifier[criteria] . identifier[match] ( identifier[fsys_view] ):
identifier[ret] = literal[string]
keyword[if] identifier[entry_ip] . identifier[match] ( identifier[fsys_view] ):
identifier[view_lines] . identifier[append] ( identifier[fsys_view] )
keyword[else] :
identifier[ret] = literal[string]
identifier[kv] = identifier[entry_ip] [ literal[string] ]
identifier[view_lines] . identifier[append] (( identifier[kv] , identifier[entry_ip] ))
keyword[else] :
identifier[view_lines] . identifier[append] ( identifier[fsys_view] )
keyword[except] ( identifier[IOError] , identifier[OSError] ) keyword[as] identifier[exc] :
keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[config] , identifier[exc] ))
keyword[if] identifier[ret] keyword[is] keyword[None] :
keyword[for] identifier[dict_view] keyword[in] identifier[six] . identifier[viewitems] ( identifier[entry_ip] . identifier[dict_from_entry] ()):
identifier[view_lines] . identifier[append] ( identifier[dict_view] )
identifier[ret] = literal[string]
keyword[if] identifier[ret] != literal[string] :
keyword[try] :
keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[config] , literal[string] ) keyword[as] identifier[ofile] :
keyword[for] identifier[fsys_view] keyword[in] identifier[view_lines] :
identifier[entry] = identifier[fsys_view] [ literal[int] ]
identifier[mystrg] = identifier[_FileSystemsEntry] . identifier[dict_to_lines] ( identifier[entry] )
identifier[ofile] . identifier[writelines] ( identifier[salt] . identifier[utils] . identifier[data] . identifier[encode] ( identifier[mystrg] ))
keyword[except] ( identifier[IOError] , identifier[OSError] ):
keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[config] ))
keyword[return] identifier[ret] | def set_filesystems(name, device, vfstype, opts='-', mount='true', config='/etc/filesystems', test=False, match_on='auto', **kwargs):
"""
.. versionadded:: 2018.3.3
Verify that this mount is represented in the filesystems, change the mount
to match the data passed, or add the mount if it is not present on AIX
Provide information if the path is mounted
:param name: The name of the mount point where the device is mounted.
:param device: The device that is being mounted.
:param vfstype: The file system that is used (AIX has two fstypes, fstype and vfstype - similar to Linux fstype)
:param opts: Additional options used when mounting the device.
:param mount: Mount if not mounted, default True.
:param config: Configuration file, default /etc/filesystems.
:param match: File systems type to match on, default auto
CLI Example:
.. code-block:: bash
salt '*' mount.set_filesystems /mnt/foo /dev/sdz1 jfs2
"""
# Fix the opts type if it is a list
if isinstance(opts, list):
opts = ','.join(opts) # depends on [control=['if'], data=[]]
# preserve arguments for updating
entry_args = {'name': name, 'dev': device.replace('\\ ', '\\040'), 'vfstype': vfstype, 'opts': opts, 'mount': mount}
view_lines = []
ret = None
if 'AIX' not in __grains__['kernel']:
return ret # depends on [control=['if'], data=[]]
# Transform match_on into list--items will be checked later
if isinstance(match_on, list):
pass # depends on [control=['if'], data=[]]
elif not isinstance(match_on, six.string_types):
raise CommandExecutionError('match_on must be a string or list of strings') # depends on [control=['if'], data=[]]
elif match_on == 'auto':
# Try to guess right criteria for auto....
# added IBM types from sys/vmount.h after btrfs
# NOTE: missing some special fstypes here
specialFSes = frozenset(['none', 'tmpfs', 'sysfs', 'proc', 'fusectl', 'debugfs', 'securityfs', 'devtmpfs', 'cgroup', 'btrfs', 'cdrfs', 'procfs', 'jfs', 'jfs2', 'nfs', 'sfs', 'nfs3', 'cachefs', 'udfs', 'cifs', 'namefs', 'pmemfs', 'ahafs', 'nfs4', 'autofs', 'stnfs'])
if vfstype in specialFSes:
match_on = ['name'] # depends on [control=['if'], data=[]]
else:
match_on = ['dev'] # depends on [control=['if'], data=['match_on']]
else:
match_on = [match_on]
# generate entry and criteria objects, handle invalid keys in match_on
entry_ip = _FileSystemsEntry.from_line(entry_args, kwargs)
try:
criteria = entry_ip.pick(match_on) # depends on [control=['try'], data=[]]
except KeyError:
filterFn = lambda key: key not in _FileSystemsEntry.compatibility_keys
invalid_keys = filter(filterFn, match_on)
raise CommandExecutionError('Unrecognized keys in match_on: "{0}"'.format(invalid_keys)) # depends on [control=['except'], data=[]]
# parse file, use ret to cache status
if not os.path.isfile(config):
raise CommandExecutionError('Bad config file "{0}"'.format(config)) # depends on [control=['if'], data=[]]
# read in block of filesystem, block starts with '/' till empty line
try:
fsys_filedict = _filesystems(config, False)
for fsys_view in six.viewitems(fsys_filedict):
if criteria.match(fsys_view):
ret = 'present'
if entry_ip.match(fsys_view):
view_lines.append(fsys_view) # depends on [control=['if'], data=[]]
else:
ret = 'change'
kv = entry_ip['name']
view_lines.append((kv, entry_ip)) # depends on [control=['if'], data=[]]
else:
view_lines.append(fsys_view) # depends on [control=['for'], data=['fsys_view']] # depends on [control=['try'], data=[]]
except (IOError, OSError) as exc:
raise CommandExecutionError("Couldn't read from {0}: {1}".format(config, exc)) # depends on [control=['except'], data=['exc']]
# add line if not present or changed
if ret is None:
for dict_view in six.viewitems(entry_ip.dict_from_entry()):
view_lines.append(dict_view) # depends on [control=['for'], data=['dict_view']]
ret = 'new' # depends on [control=['if'], data=['ret']]
if ret != 'present': # ret in ['new', 'change']:
try:
with salt.utils.files.fopen(config, 'wb') as ofile:
# The line was changed, commit it!
for fsys_view in view_lines:
entry = fsys_view[1]
mystrg = _FileSystemsEntry.dict_to_lines(entry)
ofile.writelines(salt.utils.data.encode(mystrg)) # depends on [control=['for'], data=['fsys_view']] # depends on [control=['with'], data=['ofile']] # depends on [control=['try'], data=[]]
except (IOError, OSError):
raise CommandExecutionError('File not writable {0}'.format(config)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
return ret |
def match_gpus(available_devices, requirements):
"""
Determines sufficient GPUs for the given requirements and returns a list of GPUDevices.
If there aren't sufficient GPUs a InsufficientGPUException is thrown.
:param available_devices: A list of GPUDevices
:param requirements: A list of GPURequirements
:return: A list of sufficient devices
"""
if not requirements:
return []
if not available_devices:
raise InsufficientGPUError("No GPU devices available, but {} devices required.".format(len(requirements)))
available_devices = available_devices.copy()
used_devices = []
for req in requirements:
dev = search_device(req, available_devices)
if dev:
used_devices.append(dev)
available_devices.remove(dev)
else:
raise InsufficientGPUError("Not all GPU requirements could be fulfilled.")
return used_devices | def function[match_gpus, parameter[available_devices, requirements]]:
constant[
Determines sufficient GPUs for the given requirements and returns a list of GPUDevices.
If there aren't sufficient GPUs a InsufficientGPUException is thrown.
:param available_devices: A list of GPUDevices
:param requirements: A list of GPURequirements
:return: A list of sufficient devices
]
if <ast.UnaryOp object at 0x7da2054a74f0> begin[:]
return[list[[]]]
if <ast.UnaryOp object at 0x7da2054a6b30> begin[:]
<ast.Raise object at 0x7da2054a6f20>
variable[available_devices] assign[=] call[name[available_devices].copy, parameter[]]
variable[used_devices] assign[=] list[[]]
for taget[name[req]] in starred[name[requirements]] begin[:]
variable[dev] assign[=] call[name[search_device], parameter[name[req], name[available_devices]]]
if name[dev] begin[:]
call[name[used_devices].append, parameter[name[dev]]]
call[name[available_devices].remove, parameter[name[dev]]]
return[name[used_devices]] | keyword[def] identifier[match_gpus] ( identifier[available_devices] , identifier[requirements] ):
literal[string]
keyword[if] keyword[not] identifier[requirements] :
keyword[return] []
keyword[if] keyword[not] identifier[available_devices] :
keyword[raise] identifier[InsufficientGPUError] ( literal[string] . identifier[format] ( identifier[len] ( identifier[requirements] )))
identifier[available_devices] = identifier[available_devices] . identifier[copy] ()
identifier[used_devices] =[]
keyword[for] identifier[req] keyword[in] identifier[requirements] :
identifier[dev] = identifier[search_device] ( identifier[req] , identifier[available_devices] )
keyword[if] identifier[dev] :
identifier[used_devices] . identifier[append] ( identifier[dev] )
identifier[available_devices] . identifier[remove] ( identifier[dev] )
keyword[else] :
keyword[raise] identifier[InsufficientGPUError] ( literal[string] )
keyword[return] identifier[used_devices] | def match_gpus(available_devices, requirements):
"""
Determines sufficient GPUs for the given requirements and returns a list of GPUDevices.
If there aren't sufficient GPUs a InsufficientGPUException is thrown.
:param available_devices: A list of GPUDevices
:param requirements: A list of GPURequirements
:return: A list of sufficient devices
"""
if not requirements:
return [] # depends on [control=['if'], data=[]]
if not available_devices:
raise InsufficientGPUError('No GPU devices available, but {} devices required.'.format(len(requirements))) # depends on [control=['if'], data=[]]
available_devices = available_devices.copy()
used_devices = []
for req in requirements:
dev = search_device(req, available_devices)
if dev:
used_devices.append(dev)
available_devices.remove(dev) # depends on [control=['if'], data=[]]
else:
raise InsufficientGPUError('Not all GPU requirements could be fulfilled.') # depends on [control=['for'], data=['req']]
return used_devices |
def sample(self, bqm, beta_range=None, num_reads=10, sweeps=1000,
beta_schedule_type="geometric", seed=None,
interrupt_function=None, initial_states=None):
"""Sample from a binary quadratic model using an implemented sample method.
Args:
bqm (:obj:`dimod.BinaryQuadraticModel`):
The binary quadratic model to be sampled.
beta_range (tuple, optional):
A 2-tuple defining the beginning and end of the beta schedule, where beta is the
inverse temperature. The schedule is applied linearly in beta. Default range is set
based on the total bias associated with each node.
num_reads (int, optional, default=10):
Each read is the result of a single run of the simulated annealing algorithm.
sweeps (int, optional, default=1000):
Number of sweeps or steps.
beta_schedule_type (string, optional, default='geometric'):
Beta schedule type, or how the beta values are interpolated between
the given 'beta_range'. Supported values are:
* linear
* geometric
seed (int, optional):
Seed to use for the PRNG. Specifying a particular seed with a constant
set of parameters produces identical results. If not provided, a random seed
is chosen.
initial_states (tuple(numpy.ndarray, dict), optional):
A tuple where the first value is a numpy array of initial states to seed the
simulated annealing runs, and the second is a dict defining a linear variable
labelling.
interrupt_function (function, optional):
If provided, interrupt_function is called with no parameters between each sample of
simulated annealing. If the function returns True, then simulated annealing will
terminate and return with all of the samples and energies found so far.
Returns:
:obj:`dimod.Response`: A `dimod` :obj:`~dimod.Response` object.
Examples:
This example runs simulated annealing on a binary quadratic model with some
different input parameters.
>>> import dimod
>>> import neal
...
>>> sampler = neal.SimulatedAnnealingSampler()
>>> bqm = dimod.BinaryQuadraticModel({'a': .5, 'b': -.5}, {('a', 'b'): -1}, 0.0, dimod.SPIN)
>>> # Run with default parameters
>>> response = sampler.sample(bqm)
>>> # Run with specified parameters
>>> response = sampler.sample(bqm, seed=1234, beta_range=[0.1, 4.2],
... num_reads=1, sweeps=20,
... beta_schedule_type='geometric')
>>> # Reuse a seed
>>> a1 = next((sampler.sample(bqm, seed=88)).samples())['a']
>>> a2 = next((sampler.sample(bqm, seed=88)).samples())['a']
>>> a1 == a2
True
"""
# if already index-labelled, just continue
if all(v in bqm.linear for v in range(len(bqm))):
_bqm = bqm
use_label_map = False
else:
try:
inverse_mapping = dict(enumerate(sorted(bqm.linear)))
except TypeError:
# in python3 unlike types cannot be sorted
inverse_mapping = dict(enumerate(bqm.linear))
mapping = {v: i for i, v in iteritems(inverse_mapping)}
_bqm = bqm.relabel_variables(mapping, inplace=False)
use_label_map = True
# beta_range, sweeps are handled by simulated_annealing
if not isinstance(num_reads, Integral):
raise TypeError("'samples' should be a positive integer")
if num_reads < 1:
raise ValueError("'samples' should be a positive integer")
if not (seed is None or isinstance(seed, Integral)):
raise TypeError("'seed' should be None or a positive integer")
if isinstance(seed, Integral) and not (0 < seed < (2**64 - 1)):
error_msg = "'seed' should be an integer between 0 and 2^64 - 1"
raise ValueError(error_msg)
if interrupt_function is None:
def interrupt_function():
return False
num_variables = len(_bqm)
# get the Ising linear biases
linear = _bqm.spin.linear
h = [linear[v] for v in range(num_variables)]
quadratic = _bqm.spin.quadratic
if len(quadratic) > 0:
couplers, coupler_weights = zip(*iteritems(quadratic))
couplers = map(lambda c: (c[0], c[1]), couplers)
coupler_starts, coupler_ends = zip(*couplers)
else:
coupler_starts, coupler_ends, coupler_weights = [], [], []
if beta_range is None:
beta_range = _default_ising_beta_range(linear, quadratic)
sweeps_per_beta = max(1, sweeps // 1000.0)
num_betas = int(math.ceil(sweeps / sweeps_per_beta))
if beta_schedule_type == "linear":
# interpolate a linear beta schedule
beta_schedule = np.linspace(*beta_range, num=num_betas)
elif beta_schedule_type == "geometric":
# interpolate a geometric beta schedule
beta_schedule = np.geomspace(*beta_range, num=num_betas)
else:
raise ValueError("Beta schedule type {} not implemented".format(beta_schedule_type))
if seed is None:
# pick a random seed
seed = randint(0, (1 << 64 - 1))
np_rand = np.random.RandomState(seed % 2**32)
states_shape = (num_reads, num_variables)
if initial_states is not None:
initial_states_array, init_label_map = initial_states
if not initial_states_array.shape == states_shape:
raise ValueError("`initial_states` must have shape "
"{}".format(states_shape))
if init_label_map is not None:
get_label = inverse_mapping.get if use_label_map else lambda i: i
initial_states_array = initial_states_array[:, [init_label_map[get_label(i)] for i in range(num_variables)]]
numpy_initial_states = np.ascontiguousarray(initial_states_array, dtype=np.int8)
else:
numpy_initial_states = 2*np_rand.randint(2, size=(num_reads, num_variables)).astype(np.int8) - 1
# run the simulated annealing algorithm
samples, energies = sa.simulated_annealing(num_reads, h,
coupler_starts, coupler_ends,
coupler_weights,
sweeps_per_beta, beta_schedule,
seed,
numpy_initial_states,
interrupt_function)
off = _bqm.spin.offset
info = {
"beta_range": beta_range,
"beta_schedule_type": beta_schedule_type
}
response = dimod.SampleSet.from_samples(
samples,
energy=energies+off,
info=info,
vartype=dimod.SPIN
)
response.change_vartype(_bqm.vartype, inplace=True)
if use_label_map:
response.relabel_variables(inverse_mapping, inplace=True)
return response | def function[sample, parameter[self, bqm, beta_range, num_reads, sweeps, beta_schedule_type, seed, interrupt_function, initial_states]]:
constant[Sample from a binary quadratic model using an implemented sample method.
Args:
bqm (:obj:`dimod.BinaryQuadraticModel`):
The binary quadratic model to be sampled.
beta_range (tuple, optional):
A 2-tuple defining the beginning and end of the beta schedule, where beta is the
inverse temperature. The schedule is applied linearly in beta. Default range is set
based on the total bias associated with each node.
num_reads (int, optional, default=10):
Each read is the result of a single run of the simulated annealing algorithm.
sweeps (int, optional, default=1000):
Number of sweeps or steps.
beta_schedule_type (string, optional, default='geometric'):
Beta schedule type, or how the beta values are interpolated between
the given 'beta_range'. Supported values are:
* linear
* geometric
seed (int, optional):
Seed to use for the PRNG. Specifying a particular seed with a constant
set of parameters produces identical results. If not provided, a random seed
is chosen.
initial_states (tuple(numpy.ndarray, dict), optional):
A tuple where the first value is a numpy array of initial states to seed the
simulated annealing runs, and the second is a dict defining a linear variable
labelling.
interrupt_function (function, optional):
If provided, interrupt_function is called with no parameters between each sample of
simulated annealing. If the function returns True, then simulated annealing will
terminate and return with all of the samples and energies found so far.
Returns:
:obj:`dimod.Response`: A `dimod` :obj:`~dimod.Response` object.
Examples:
This example runs simulated annealing on a binary quadratic model with some
different input parameters.
>>> import dimod
>>> import neal
...
>>> sampler = neal.SimulatedAnnealingSampler()
>>> bqm = dimod.BinaryQuadraticModel({'a': .5, 'b': -.5}, {('a', 'b'): -1}, 0.0, dimod.SPIN)
>>> # Run with default parameters
>>> response = sampler.sample(bqm)
>>> # Run with specified parameters
>>> response = sampler.sample(bqm, seed=1234, beta_range=[0.1, 4.2],
... num_reads=1, sweeps=20,
... beta_schedule_type='geometric')
>>> # Reuse a seed
>>> a1 = next((sampler.sample(bqm, seed=88)).samples())['a']
>>> a2 = next((sampler.sample(bqm, seed=88)).samples())['a']
>>> a1 == a2
True
]
if call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b0b45ed0>]] begin[:]
variable[_bqm] assign[=] name[bqm]
variable[use_label_map] assign[=] constant[False]
if <ast.UnaryOp object at 0x7da1b0b452a0> begin[:]
<ast.Raise object at 0x7da1b0b451b0>
if compare[name[num_reads] less[<] constant[1]] begin[:]
<ast.Raise object at 0x7da1b0b45030>
if <ast.UnaryOp object at 0x7da1b0b44f40> begin[:]
<ast.Raise object at 0x7da1b0b44d90>
if <ast.BoolOp object at 0x7da1b0b44ca0> begin[:]
variable[error_msg] assign[=] constant['seed' should be an integer between 0 and 2^64 - 1]
<ast.Raise object at 0x7da1b0b44970>
if compare[name[interrupt_function] is constant[None]] begin[:]
def function[interrupt_function, parameter[]]:
return[constant[False]]
variable[num_variables] assign[=] call[name[len], parameter[name[_bqm]]]
variable[linear] assign[=] name[_bqm].spin.linear
variable[h] assign[=] <ast.ListComp object at 0x7da1b0b444c0>
variable[quadratic] assign[=] name[_bqm].spin.quadratic
if compare[call[name[len], parameter[name[quadratic]]] greater[>] constant[0]] begin[:]
<ast.Tuple object at 0x7da1b0b440a0> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b0b469b0>]]
variable[couplers] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da1b0b46b30>, name[couplers]]]
<ast.Tuple object at 0x7da1b0b46d70> assign[=] call[name[zip], parameter[<ast.Starred object at 0x7da1b0b46e60>]]
if compare[name[beta_range] is constant[None]] begin[:]
variable[beta_range] assign[=] call[name[_default_ising_beta_range], parameter[name[linear], name[quadratic]]]
variable[sweeps_per_beta] assign[=] call[name[max], parameter[constant[1], binary_operation[name[sweeps] <ast.FloorDiv object at 0x7da2590d6bc0> constant[1000.0]]]]
variable[num_betas] assign[=] call[name[int], parameter[call[name[math].ceil, parameter[binary_operation[name[sweeps] / name[sweeps_per_beta]]]]]]
if compare[name[beta_schedule_type] equal[==] constant[linear]] begin[:]
variable[beta_schedule] assign[=] call[name[np].linspace, parameter[<ast.Starred object at 0x7da1b0b47760>]]
if compare[name[seed] is constant[None]] begin[:]
variable[seed] assign[=] call[name[randint], parameter[constant[0], binary_operation[constant[1] <ast.LShift object at 0x7da2590d69e0> binary_operation[constant[64] - constant[1]]]]]
variable[np_rand] assign[=] call[name[np].random.RandomState, parameter[binary_operation[name[seed] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[2] ** constant[32]]]]]
variable[states_shape] assign[=] tuple[[<ast.Name object at 0x7da1b0b29420>, <ast.Name object at 0x7da1b0b29450>]]
if compare[name[initial_states] is_not constant[None]] begin[:]
<ast.Tuple object at 0x7da1b0b28fd0> assign[=] name[initial_states]
if <ast.UnaryOp object at 0x7da1b0b29600> begin[:]
<ast.Raise object at 0x7da1b0b28f70>
if compare[name[init_label_map] is_not constant[None]] begin[:]
variable[get_label] assign[=] <ast.IfExp object at 0x7da1b0b28460>
variable[initial_states_array] assign[=] call[name[initial_states_array]][tuple[[<ast.Slice object at 0x7da1b0b286d0>, <ast.ListComp object at 0x7da1b0b28700>]]]
variable[numpy_initial_states] assign[=] call[name[np].ascontiguousarray, parameter[name[initial_states_array]]]
<ast.Tuple object at 0x7da1b0b29390> assign[=] call[name[sa].simulated_annealing, parameter[name[num_reads], name[h], name[coupler_starts], name[coupler_ends], name[coupler_weights], name[sweeps_per_beta], name[beta_schedule], name[seed], name[numpy_initial_states], name[interrupt_function]]]
variable[off] assign[=] name[_bqm].spin.offset
variable[info] assign[=] dictionary[[<ast.Constant object at 0x7da1b0b282e0>, <ast.Constant object at 0x7da1b0b281f0>], [<ast.Name object at 0x7da1b0b28280>, <ast.Name object at 0x7da1b0b28250>]]
variable[response] assign[=] call[name[dimod].SampleSet.from_samples, parameter[name[samples]]]
call[name[response].change_vartype, parameter[name[_bqm].vartype]]
if name[use_label_map] begin[:]
call[name[response].relabel_variables, parameter[name[inverse_mapping]]]
return[name[response]] | keyword[def] identifier[sample] ( identifier[self] , identifier[bqm] , identifier[beta_range] = keyword[None] , identifier[num_reads] = literal[int] , identifier[sweeps] = literal[int] ,
identifier[beta_schedule_type] = literal[string] , identifier[seed] = keyword[None] ,
identifier[interrupt_function] = keyword[None] , identifier[initial_states] = keyword[None] ):
literal[string]
keyword[if] identifier[all] ( identifier[v] keyword[in] identifier[bqm] . identifier[linear] keyword[for] identifier[v] keyword[in] identifier[range] ( identifier[len] ( identifier[bqm] ))):
identifier[_bqm] = identifier[bqm]
identifier[use_label_map] = keyword[False]
keyword[else] :
keyword[try] :
identifier[inverse_mapping] = identifier[dict] ( identifier[enumerate] ( identifier[sorted] ( identifier[bqm] . identifier[linear] )))
keyword[except] identifier[TypeError] :
identifier[inverse_mapping] = identifier[dict] ( identifier[enumerate] ( identifier[bqm] . identifier[linear] ))
identifier[mapping] ={ identifier[v] : identifier[i] keyword[for] identifier[i] , identifier[v] keyword[in] identifier[iteritems] ( identifier[inverse_mapping] )}
identifier[_bqm] = identifier[bqm] . identifier[relabel_variables] ( identifier[mapping] , identifier[inplace] = keyword[False] )
identifier[use_label_map] = keyword[True]
keyword[if] keyword[not] identifier[isinstance] ( identifier[num_reads] , identifier[Integral] ):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[num_reads] < literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] ( identifier[seed] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[seed] , identifier[Integral] )):
keyword[raise] identifier[TypeError] ( literal[string] )
keyword[if] identifier[isinstance] ( identifier[seed] , identifier[Integral] ) keyword[and] keyword[not] ( literal[int] < identifier[seed] <( literal[int] ** literal[int] - literal[int] )):
identifier[error_msg] = literal[string]
keyword[raise] identifier[ValueError] ( identifier[error_msg] )
keyword[if] identifier[interrupt_function] keyword[is] keyword[None] :
keyword[def] identifier[interrupt_function] ():
keyword[return] keyword[False]
identifier[num_variables] = identifier[len] ( identifier[_bqm] )
identifier[linear] = identifier[_bqm] . identifier[spin] . identifier[linear]
identifier[h] =[ identifier[linear] [ identifier[v] ] keyword[for] identifier[v] keyword[in] identifier[range] ( identifier[num_variables] )]
identifier[quadratic] = identifier[_bqm] . identifier[spin] . identifier[quadratic]
keyword[if] identifier[len] ( identifier[quadratic] )> literal[int] :
identifier[couplers] , identifier[coupler_weights] = identifier[zip] (* identifier[iteritems] ( identifier[quadratic] ))
identifier[couplers] = identifier[map] ( keyword[lambda] identifier[c] :( identifier[c] [ literal[int] ], identifier[c] [ literal[int] ]), identifier[couplers] )
identifier[coupler_starts] , identifier[coupler_ends] = identifier[zip] (* identifier[couplers] )
keyword[else] :
identifier[coupler_starts] , identifier[coupler_ends] , identifier[coupler_weights] =[],[],[]
keyword[if] identifier[beta_range] keyword[is] keyword[None] :
identifier[beta_range] = identifier[_default_ising_beta_range] ( identifier[linear] , identifier[quadratic] )
identifier[sweeps_per_beta] = identifier[max] ( literal[int] , identifier[sweeps] // literal[int] )
identifier[num_betas] = identifier[int] ( identifier[math] . identifier[ceil] ( identifier[sweeps] / identifier[sweeps_per_beta] ))
keyword[if] identifier[beta_schedule_type] == literal[string] :
identifier[beta_schedule] = identifier[np] . identifier[linspace] (* identifier[beta_range] , identifier[num] = identifier[num_betas] )
keyword[elif] identifier[beta_schedule_type] == literal[string] :
identifier[beta_schedule] = identifier[np] . identifier[geomspace] (* identifier[beta_range] , identifier[num] = identifier[num_betas] )
keyword[else] :
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[beta_schedule_type] ))
keyword[if] identifier[seed] keyword[is] keyword[None] :
identifier[seed] = identifier[randint] ( literal[int] ,( literal[int] << literal[int] - literal[int] ))
identifier[np_rand] = identifier[np] . identifier[random] . identifier[RandomState] ( identifier[seed] % literal[int] ** literal[int] )
identifier[states_shape] =( identifier[num_reads] , identifier[num_variables] )
keyword[if] identifier[initial_states] keyword[is] keyword[not] keyword[None] :
identifier[initial_states_array] , identifier[init_label_map] = identifier[initial_states]
keyword[if] keyword[not] identifier[initial_states_array] . identifier[shape] == identifier[states_shape] :
keyword[raise] identifier[ValueError] ( literal[string]
literal[string] . identifier[format] ( identifier[states_shape] ))
keyword[if] identifier[init_label_map] keyword[is] keyword[not] keyword[None] :
identifier[get_label] = identifier[inverse_mapping] . identifier[get] keyword[if] identifier[use_label_map] keyword[else] keyword[lambda] identifier[i] : identifier[i]
identifier[initial_states_array] = identifier[initial_states_array] [:,[ identifier[init_label_map] [ identifier[get_label] ( identifier[i] )] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[num_variables] )]]
identifier[numpy_initial_states] = identifier[np] . identifier[ascontiguousarray] ( identifier[initial_states_array] , identifier[dtype] = identifier[np] . identifier[int8] )
keyword[else] :
identifier[numpy_initial_states] = literal[int] * identifier[np_rand] . identifier[randint] ( literal[int] , identifier[size] =( identifier[num_reads] , identifier[num_variables] )). identifier[astype] ( identifier[np] . identifier[int8] )- literal[int]
identifier[samples] , identifier[energies] = identifier[sa] . identifier[simulated_annealing] ( identifier[num_reads] , identifier[h] ,
identifier[coupler_starts] , identifier[coupler_ends] ,
identifier[coupler_weights] ,
identifier[sweeps_per_beta] , identifier[beta_schedule] ,
identifier[seed] ,
identifier[numpy_initial_states] ,
identifier[interrupt_function] )
identifier[off] = identifier[_bqm] . identifier[spin] . identifier[offset]
identifier[info] ={
literal[string] : identifier[beta_range] ,
literal[string] : identifier[beta_schedule_type]
}
identifier[response] = identifier[dimod] . identifier[SampleSet] . identifier[from_samples] (
identifier[samples] ,
identifier[energy] = identifier[energies] + identifier[off] ,
identifier[info] = identifier[info] ,
identifier[vartype] = identifier[dimod] . identifier[SPIN]
)
identifier[response] . identifier[change_vartype] ( identifier[_bqm] . identifier[vartype] , identifier[inplace] = keyword[True] )
keyword[if] identifier[use_label_map] :
identifier[response] . identifier[relabel_variables] ( identifier[inverse_mapping] , identifier[inplace] = keyword[True] )
keyword[return] identifier[response] | def sample(self, bqm, beta_range=None, num_reads=10, sweeps=1000, beta_schedule_type='geometric', seed=None, interrupt_function=None, initial_states=None):
"""Sample from a binary quadratic model using an implemented sample method.
Args:
bqm (:obj:`dimod.BinaryQuadraticModel`):
The binary quadratic model to be sampled.
beta_range (tuple, optional):
A 2-tuple defining the beginning and end of the beta schedule, where beta is the
inverse temperature. The schedule is applied linearly in beta. Default range is set
based on the total bias associated with each node.
num_reads (int, optional, default=10):
Each read is the result of a single run of the simulated annealing algorithm.
sweeps (int, optional, default=1000):
Number of sweeps or steps.
beta_schedule_type (string, optional, default='geometric'):
Beta schedule type, or how the beta values are interpolated between
the given 'beta_range'. Supported values are:
* linear
* geometric
seed (int, optional):
Seed to use for the PRNG. Specifying a particular seed with a constant
set of parameters produces identical results. If not provided, a random seed
is chosen.
initial_states (tuple(numpy.ndarray, dict), optional):
A tuple where the first value is a numpy array of initial states to seed the
simulated annealing runs, and the second is a dict defining a linear variable
labelling.
interrupt_function (function, optional):
If provided, interrupt_function is called with no parameters between each sample of
simulated annealing. If the function returns True, then simulated annealing will
terminate and return with all of the samples and energies found so far.
Returns:
:obj:`dimod.Response`: A `dimod` :obj:`~dimod.Response` object.
Examples:
This example runs simulated annealing on a binary quadratic model with some
different input parameters.
>>> import dimod
>>> import neal
...
>>> sampler = neal.SimulatedAnnealingSampler()
>>> bqm = dimod.BinaryQuadraticModel({'a': .5, 'b': -.5}, {('a', 'b'): -1}, 0.0, dimod.SPIN)
>>> # Run with default parameters
>>> response = sampler.sample(bqm)
>>> # Run with specified parameters
>>> response = sampler.sample(bqm, seed=1234, beta_range=[0.1, 4.2],
... num_reads=1, sweeps=20,
... beta_schedule_type='geometric')
>>> # Reuse a seed
>>> a1 = next((sampler.sample(bqm, seed=88)).samples())['a']
>>> a2 = next((sampler.sample(bqm, seed=88)).samples())['a']
>>> a1 == a2
True
"""
# if already index-labelled, just continue
if all((v in bqm.linear for v in range(len(bqm)))):
_bqm = bqm
use_label_map = False # depends on [control=['if'], data=[]]
else:
try:
inverse_mapping = dict(enumerate(sorted(bqm.linear))) # depends on [control=['try'], data=[]]
except TypeError:
# in python3 unlike types cannot be sorted
inverse_mapping = dict(enumerate(bqm.linear)) # depends on [control=['except'], data=[]]
mapping = {v: i for (i, v) in iteritems(inverse_mapping)}
_bqm = bqm.relabel_variables(mapping, inplace=False)
use_label_map = True
# beta_range, sweeps are handled by simulated_annealing
if not isinstance(num_reads, Integral):
raise TypeError("'samples' should be a positive integer") # depends on [control=['if'], data=[]]
if num_reads < 1:
raise ValueError("'samples' should be a positive integer") # depends on [control=['if'], data=[]]
if not (seed is None or isinstance(seed, Integral)):
raise TypeError("'seed' should be None or a positive integer") # depends on [control=['if'], data=[]]
if isinstance(seed, Integral) and (not 0 < seed < 2 ** 64 - 1):
error_msg = "'seed' should be an integer between 0 and 2^64 - 1"
raise ValueError(error_msg) # depends on [control=['if'], data=[]]
if interrupt_function is None:
def interrupt_function():
return False # depends on [control=['if'], data=[]]
num_variables = len(_bqm)
# get the Ising linear biases
linear = _bqm.spin.linear
h = [linear[v] for v in range(num_variables)]
quadratic = _bqm.spin.quadratic
if len(quadratic) > 0:
(couplers, coupler_weights) = zip(*iteritems(quadratic))
couplers = map(lambda c: (c[0], c[1]), couplers)
(coupler_starts, coupler_ends) = zip(*couplers) # depends on [control=['if'], data=[]]
else:
(coupler_starts, coupler_ends, coupler_weights) = ([], [], [])
if beta_range is None:
beta_range = _default_ising_beta_range(linear, quadratic) # depends on [control=['if'], data=['beta_range']]
sweeps_per_beta = max(1, sweeps // 1000.0)
num_betas = int(math.ceil(sweeps / sweeps_per_beta))
if beta_schedule_type == 'linear':
# interpolate a linear beta schedule
beta_schedule = np.linspace(*beta_range, num=num_betas) # depends on [control=['if'], data=[]]
elif beta_schedule_type == 'geometric':
# interpolate a geometric beta schedule
beta_schedule = np.geomspace(*beta_range, num=num_betas) # depends on [control=['if'], data=[]]
else:
raise ValueError('Beta schedule type {} not implemented'.format(beta_schedule_type))
if seed is None:
# pick a random seed
seed = randint(0, 1 << 64 - 1) # depends on [control=['if'], data=['seed']]
np_rand = np.random.RandomState(seed % 2 ** 32)
states_shape = (num_reads, num_variables)
if initial_states is not None:
(initial_states_array, init_label_map) = initial_states
if not initial_states_array.shape == states_shape:
raise ValueError('`initial_states` must have shape {}'.format(states_shape)) # depends on [control=['if'], data=[]]
if init_label_map is not None:
get_label = inverse_mapping.get if use_label_map else lambda i: i
initial_states_array = initial_states_array[:, [init_label_map[get_label(i)] for i in range(num_variables)]] # depends on [control=['if'], data=['init_label_map']]
numpy_initial_states = np.ascontiguousarray(initial_states_array, dtype=np.int8) # depends on [control=['if'], data=['initial_states']]
else:
numpy_initial_states = 2 * np_rand.randint(2, size=(num_reads, num_variables)).astype(np.int8) - 1
# run the simulated annealing algorithm
(samples, energies) = sa.simulated_annealing(num_reads, h, coupler_starts, coupler_ends, coupler_weights, sweeps_per_beta, beta_schedule, seed, numpy_initial_states, interrupt_function)
off = _bqm.spin.offset
info = {'beta_range': beta_range, 'beta_schedule_type': beta_schedule_type}
response = dimod.SampleSet.from_samples(samples, energy=energies + off, info=info, vartype=dimod.SPIN)
response.change_vartype(_bqm.vartype, inplace=True)
if use_label_map:
response.relabel_variables(inverse_mapping, inplace=True) # depends on [control=['if'], data=[]]
return response |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.