code stringlengths 75 104k | code_sememe stringlengths 47 309k | token_type stringlengths 215 214k | code_dependency stringlengths 75 155k |
|---|---|---|---|
def flushall(self, async_op=False):
"""
Remove all keys from all databases.
:param async_op: lets the entire dataset to be freed asynchronously. \
Defaults to False
"""
if async_op:
fut = self.execute(b'FLUSHALL', b'ASYNC')
else:
fut = self.execute(b'FLUSHALL')
return wait_ok(fut) | def function[flushall, parameter[self, async_op]]:
constant[
Remove all keys from all databases.
:param async_op: lets the entire dataset to be freed asynchronously. Defaults to False
]
if name[async_op] begin[:]
variable[fut] assign[=] call[name[self].execute, parameter[constant[b'FLUSHALL'], constant[b'ASYNC']]]
return[call[name[wait_ok], parameter[name[fut]]]] | keyword[def] identifier[flushall] ( identifier[self] , identifier[async_op] = keyword[False] ):
literal[string]
keyword[if] identifier[async_op] :
identifier[fut] = identifier[self] . identifier[execute] ( literal[string] , literal[string] )
keyword[else] :
identifier[fut] = identifier[self] . identifier[execute] ( literal[string] )
keyword[return] identifier[wait_ok] ( identifier[fut] ) | def flushall(self, async_op=False):
"""
Remove all keys from all databases.
:param async_op: lets the entire dataset to be freed asynchronously. Defaults to False
"""
if async_op:
fut = self.execute(b'FLUSHALL', b'ASYNC') # depends on [control=['if'], data=[]]
else:
fut = self.execute(b'FLUSHALL')
return wait_ok(fut) |
def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self,
'text_normalized') and self.text_normalized is not None:
_dict['text_normalized'] = self.text_normalized
return _dict | def function[_to_dict, parameter[self]]:
constant[Return a json dictionary representing this model.]
variable[_dict] assign[=] dictionary[[], []]
if <ast.BoolOp object at 0x7da20c6a88e0> begin[:]
call[name[_dict]][constant[text_normalized]] assign[=] name[self].text_normalized
return[name[_dict]] | keyword[def] identifier[_to_dict] ( identifier[self] ):
literal[string]
identifier[_dict] ={}
keyword[if] identifier[hasattr] ( identifier[self] ,
literal[string] ) keyword[and] identifier[self] . identifier[text_normalized] keyword[is] keyword[not] keyword[None] :
identifier[_dict] [ literal[string] ]= identifier[self] . identifier[text_normalized]
keyword[return] identifier[_dict] | def _to_dict(self):
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'text_normalized') and self.text_normalized is not None:
_dict['text_normalized'] = self.text_normalized # depends on [control=['if'], data=[]]
return _dict |
def uninstall(self, auto_confirm=False):
"""
Uninstall the distribution currently satisfying this requirement.
Prompts before removing or modifying files unless
``auto_confirm`` is True.
Refuses to delete or modify files outside of ``sys.prefix`` -
thus uninstallation within a virtual environment can only
modify that virtual environment, even if the virtualenv is
linked to global site-packages.
"""
if not self.check_if_exists():
raise UninstallationError("Cannot uninstall requirement %s, not installed" % (self.name,))
dist = self.satisfied_by or self.conflicts_with
paths_to_remove = UninstallPathSet(dist)
pip_egg_info_path = os.path.join(dist.location,
dist.egg_name()) + '.egg-info'
# workaround for http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=618367
debian_egg_info_path = pip_egg_info_path.replace(
'-py%s' % pkg_resources.PY_MAJOR, '')
easy_install_egg = dist.egg_name() + '.egg'
develop_egg_link = egg_link_path(dist)
pip_egg_info_exists = os.path.exists(pip_egg_info_path)
debian_egg_info_exists = os.path.exists(debian_egg_info_path)
if pip_egg_info_exists or debian_egg_info_exists:
# package installed by pip
if pip_egg_info_exists:
egg_info_path = pip_egg_info_path
else:
egg_info_path = debian_egg_info_path
paths_to_remove.add(egg_info_path)
if dist.has_metadata('installed-files.txt'):
for installed_file in dist.get_metadata('installed-files.txt').splitlines():
path = os.path.normpath(os.path.join(egg_info_path, installed_file))
paths_to_remove.add(path)
#FIXME: need a test for this elif block
#occurs with --single-version-externally-managed/--record outside of pip
elif dist.has_metadata('top_level.txt'):
if dist.has_metadata('namespace_packages.txt'):
namespaces = dist.get_metadata('namespace_packages.txt')
else:
namespaces = []
for top_level_pkg in [p for p
in dist.get_metadata('top_level.txt').splitlines()
if p and p not in namespaces]:
path = os.path.join(dist.location, top_level_pkg)
paths_to_remove.add(path)
paths_to_remove.add(path + '.py')
paths_to_remove.add(path + '.pyc')
elif dist.location.endswith(easy_install_egg):
# package installed by easy_install
paths_to_remove.add(dist.location)
easy_install_pth = os.path.join(os.path.dirname(dist.location),
'easy-install.pth')
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
elif develop_egg_link:
# develop egg
fh = open(develop_egg_link, 'r')
link_pointer = os.path.normcase(fh.readline().strip())
fh.close()
assert (link_pointer == dist.location), 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location)
paths_to_remove.add(develop_egg_link)
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
'easy-install.pth')
paths_to_remove.add_pth(easy_install_pth, dist.location)
# find distutils scripts= scripts
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
for script in dist.metadata_listdir('scripts'):
paths_to_remove.add(os.path.join(bin_py, script))
if sys.platform == 'win32':
paths_to_remove.add(os.path.join(bin_py, script) + '.bat')
# find console_scripts
if dist.has_metadata('entry_points.txt'):
config = ConfigParser.SafeConfigParser()
config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt')))
if config.has_section('console_scripts'):
for name, value in config.items('console_scripts'):
paths_to_remove.add(os.path.join(bin_py, name))
if sys.platform == 'win32':
paths_to_remove.add(os.path.join(bin_py, name) + '.exe')
paths_to_remove.add(os.path.join(bin_py, name) + '.exe.manifest')
paths_to_remove.add(os.path.join(bin_py, name) + '-script.py')
paths_to_remove.remove(auto_confirm)
self.uninstalled = paths_to_remove | def function[uninstall, parameter[self, auto_confirm]]:
constant[
Uninstall the distribution currently satisfying this requirement.
Prompts before removing or modifying files unless
``auto_confirm`` is True.
Refuses to delete or modify files outside of ``sys.prefix`` -
thus uninstallation within a virtual environment can only
modify that virtual environment, even if the virtualenv is
linked to global site-packages.
]
if <ast.UnaryOp object at 0x7da1b26adba0> begin[:]
<ast.Raise object at 0x7da1b26adea0>
variable[dist] assign[=] <ast.BoolOp object at 0x7da1b26ad240>
variable[paths_to_remove] assign[=] call[name[UninstallPathSet], parameter[name[dist]]]
variable[pip_egg_info_path] assign[=] binary_operation[call[name[os].path.join, parameter[name[dist].location, call[name[dist].egg_name, parameter[]]]] + constant[.egg-info]]
variable[debian_egg_info_path] assign[=] call[name[pip_egg_info_path].replace, parameter[binary_operation[constant[-py%s] <ast.Mod object at 0x7da2590d6920> name[pkg_resources].PY_MAJOR], constant[]]]
variable[easy_install_egg] assign[=] binary_operation[call[name[dist].egg_name, parameter[]] + constant[.egg]]
variable[develop_egg_link] assign[=] call[name[egg_link_path], parameter[name[dist]]]
variable[pip_egg_info_exists] assign[=] call[name[os].path.exists, parameter[name[pip_egg_info_path]]]
variable[debian_egg_info_exists] assign[=] call[name[os].path.exists, parameter[name[debian_egg_info_path]]]
if <ast.BoolOp object at 0x7da1b26aea40> begin[:]
if name[pip_egg_info_exists] begin[:]
variable[egg_info_path] assign[=] name[pip_egg_info_path]
call[name[paths_to_remove].add, parameter[name[egg_info_path]]]
if call[name[dist].has_metadata, parameter[constant[installed-files.txt]]] begin[:]
for taget[name[installed_file]] in starred[call[call[name[dist].get_metadata, parameter[constant[installed-files.txt]]].splitlines, parameter[]]] begin[:]
variable[path] assign[=] call[name[os].path.normpath, parameter[call[name[os].path.join, parameter[name[egg_info_path], name[installed_file]]]]]
call[name[paths_to_remove].add, parameter[name[path]]]
if <ast.BoolOp object at 0x7da18dc055d0> begin[:]
for taget[name[script]] in starred[call[name[dist].metadata_listdir, parameter[constant[scripts]]]] begin[:]
call[name[paths_to_remove].add, parameter[call[name[os].path.join, parameter[name[bin_py], name[script]]]]]
if compare[name[sys].platform equal[==] constant[win32]] begin[:]
call[name[paths_to_remove].add, parameter[binary_operation[call[name[os].path.join, parameter[name[bin_py], name[script]]] + constant[.bat]]]]
if call[name[dist].has_metadata, parameter[constant[entry_points.txt]]] begin[:]
variable[config] assign[=] call[name[ConfigParser].SafeConfigParser, parameter[]]
call[name[config].readfp, parameter[call[name[FakeFile], parameter[call[name[dist].get_metadata_lines, parameter[constant[entry_points.txt]]]]]]]
if call[name[config].has_section, parameter[constant[console_scripts]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da18dc07e50>, <ast.Name object at 0x7da18dc07c10>]]] in starred[call[name[config].items, parameter[constant[console_scripts]]]] begin[:]
call[name[paths_to_remove].add, parameter[call[name[os].path.join, parameter[name[bin_py], name[name]]]]]
if compare[name[sys].platform equal[==] constant[win32]] begin[:]
call[name[paths_to_remove].add, parameter[binary_operation[call[name[os].path.join, parameter[name[bin_py], name[name]]] + constant[.exe]]]]
call[name[paths_to_remove].add, parameter[binary_operation[call[name[os].path.join, parameter[name[bin_py], name[name]]] + constant[.exe.manifest]]]]
call[name[paths_to_remove].add, parameter[binary_operation[call[name[os].path.join, parameter[name[bin_py], name[name]]] + constant[-script.py]]]]
call[name[paths_to_remove].remove, parameter[name[auto_confirm]]]
name[self].uninstalled assign[=] name[paths_to_remove] | keyword[def] identifier[uninstall] ( identifier[self] , identifier[auto_confirm] = keyword[False] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[check_if_exists] ():
keyword[raise] identifier[UninstallationError] ( literal[string] %( identifier[self] . identifier[name] ,))
identifier[dist] = identifier[self] . identifier[satisfied_by] keyword[or] identifier[self] . identifier[conflicts_with]
identifier[paths_to_remove] = identifier[UninstallPathSet] ( identifier[dist] )
identifier[pip_egg_info_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dist] . identifier[location] ,
identifier[dist] . identifier[egg_name] ())+ literal[string]
identifier[debian_egg_info_path] = identifier[pip_egg_info_path] . identifier[replace] (
literal[string] % identifier[pkg_resources] . identifier[PY_MAJOR] , literal[string] )
identifier[easy_install_egg] = identifier[dist] . identifier[egg_name] ()+ literal[string]
identifier[develop_egg_link] = identifier[egg_link_path] ( identifier[dist] )
identifier[pip_egg_info_exists] = identifier[os] . identifier[path] . identifier[exists] ( identifier[pip_egg_info_path] )
identifier[debian_egg_info_exists] = identifier[os] . identifier[path] . identifier[exists] ( identifier[debian_egg_info_path] )
keyword[if] identifier[pip_egg_info_exists] keyword[or] identifier[debian_egg_info_exists] :
keyword[if] identifier[pip_egg_info_exists] :
identifier[egg_info_path] = identifier[pip_egg_info_path]
keyword[else] :
identifier[egg_info_path] = identifier[debian_egg_info_path]
identifier[paths_to_remove] . identifier[add] ( identifier[egg_info_path] )
keyword[if] identifier[dist] . identifier[has_metadata] ( literal[string] ):
keyword[for] identifier[installed_file] keyword[in] identifier[dist] . identifier[get_metadata] ( literal[string] ). identifier[splitlines] ():
identifier[path] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[egg_info_path] , identifier[installed_file] ))
identifier[paths_to_remove] . identifier[add] ( identifier[path] )
keyword[elif] identifier[dist] . identifier[has_metadata] ( literal[string] ):
keyword[if] identifier[dist] . identifier[has_metadata] ( literal[string] ):
identifier[namespaces] = identifier[dist] . identifier[get_metadata] ( literal[string] )
keyword[else] :
identifier[namespaces] =[]
keyword[for] identifier[top_level_pkg] keyword[in] [ identifier[p] keyword[for] identifier[p]
keyword[in] identifier[dist] . identifier[get_metadata] ( literal[string] ). identifier[splitlines] ()
keyword[if] identifier[p] keyword[and] identifier[p] keyword[not] keyword[in] identifier[namespaces] ]:
identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dist] . identifier[location] , identifier[top_level_pkg] )
identifier[paths_to_remove] . identifier[add] ( identifier[path] )
identifier[paths_to_remove] . identifier[add] ( identifier[path] + literal[string] )
identifier[paths_to_remove] . identifier[add] ( identifier[path] + literal[string] )
keyword[elif] identifier[dist] . identifier[location] . identifier[endswith] ( identifier[easy_install_egg] ):
identifier[paths_to_remove] . identifier[add] ( identifier[dist] . identifier[location] )
identifier[easy_install_pth] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[dist] . identifier[location] ),
literal[string] )
identifier[paths_to_remove] . identifier[add_pth] ( identifier[easy_install_pth] , literal[string] + identifier[easy_install_egg] )
keyword[elif] identifier[develop_egg_link] :
identifier[fh] = identifier[open] ( identifier[develop_egg_link] , literal[string] )
identifier[link_pointer] = identifier[os] . identifier[path] . identifier[normcase] ( identifier[fh] . identifier[readline] (). identifier[strip] ())
identifier[fh] . identifier[close] ()
keyword[assert] ( identifier[link_pointer] == identifier[dist] . identifier[location] ), literal[string] %( identifier[link_pointer] , identifier[self] . identifier[name] , identifier[dist] . identifier[location] )
identifier[paths_to_remove] . identifier[add] ( identifier[develop_egg_link] )
identifier[easy_install_pth] = identifier[os] . identifier[path] . identifier[join] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[develop_egg_link] ),
literal[string] )
identifier[paths_to_remove] . identifier[add_pth] ( identifier[easy_install_pth] , identifier[dist] . identifier[location] )
keyword[if] identifier[dist] . identifier[has_metadata] ( literal[string] ) keyword[and] identifier[dist] . identifier[metadata_isdir] ( literal[string] ):
keyword[for] identifier[script] keyword[in] identifier[dist] . identifier[metadata_listdir] ( literal[string] ):
identifier[paths_to_remove] . identifier[add] ( identifier[os] . identifier[path] . identifier[join] ( identifier[bin_py] , identifier[script] ))
keyword[if] identifier[sys] . identifier[platform] == literal[string] :
identifier[paths_to_remove] . identifier[add] ( identifier[os] . identifier[path] . identifier[join] ( identifier[bin_py] , identifier[script] )+ literal[string] )
keyword[if] identifier[dist] . identifier[has_metadata] ( literal[string] ):
identifier[config] = identifier[ConfigParser] . identifier[SafeConfigParser] ()
identifier[config] . identifier[readfp] ( identifier[FakeFile] ( identifier[dist] . identifier[get_metadata_lines] ( literal[string] )))
keyword[if] identifier[config] . identifier[has_section] ( literal[string] ):
keyword[for] identifier[name] , identifier[value] keyword[in] identifier[config] . identifier[items] ( literal[string] ):
identifier[paths_to_remove] . identifier[add] ( identifier[os] . identifier[path] . identifier[join] ( identifier[bin_py] , identifier[name] ))
keyword[if] identifier[sys] . identifier[platform] == literal[string] :
identifier[paths_to_remove] . identifier[add] ( identifier[os] . identifier[path] . identifier[join] ( identifier[bin_py] , identifier[name] )+ literal[string] )
identifier[paths_to_remove] . identifier[add] ( identifier[os] . identifier[path] . identifier[join] ( identifier[bin_py] , identifier[name] )+ literal[string] )
identifier[paths_to_remove] . identifier[add] ( identifier[os] . identifier[path] . identifier[join] ( identifier[bin_py] , identifier[name] )+ literal[string] )
identifier[paths_to_remove] . identifier[remove] ( identifier[auto_confirm] )
identifier[self] . identifier[uninstalled] = identifier[paths_to_remove] | def uninstall(self, auto_confirm=False):
"""
Uninstall the distribution currently satisfying this requirement.
Prompts before removing or modifying files unless
``auto_confirm`` is True.
Refuses to delete or modify files outside of ``sys.prefix`` -
thus uninstallation within a virtual environment can only
modify that virtual environment, even if the virtualenv is
linked to global site-packages.
"""
if not self.check_if_exists():
raise UninstallationError('Cannot uninstall requirement %s, not installed' % (self.name,)) # depends on [control=['if'], data=[]]
dist = self.satisfied_by or self.conflicts_with
paths_to_remove = UninstallPathSet(dist)
pip_egg_info_path = os.path.join(dist.location, dist.egg_name()) + '.egg-info'
# workaround for http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=618367
debian_egg_info_path = pip_egg_info_path.replace('-py%s' % pkg_resources.PY_MAJOR, '')
easy_install_egg = dist.egg_name() + '.egg'
develop_egg_link = egg_link_path(dist)
pip_egg_info_exists = os.path.exists(pip_egg_info_path)
debian_egg_info_exists = os.path.exists(debian_egg_info_path)
if pip_egg_info_exists or debian_egg_info_exists:
# package installed by pip
if pip_egg_info_exists:
egg_info_path = pip_egg_info_path # depends on [control=['if'], data=[]]
else:
egg_info_path = debian_egg_info_path
paths_to_remove.add(egg_info_path)
if dist.has_metadata('installed-files.txt'):
for installed_file in dist.get_metadata('installed-files.txt').splitlines():
path = os.path.normpath(os.path.join(egg_info_path, installed_file))
paths_to_remove.add(path) # depends on [control=['for'], data=['installed_file']] # depends on [control=['if'], data=[]]
#FIXME: need a test for this elif block
#occurs with --single-version-externally-managed/--record outside of pip
elif dist.has_metadata('top_level.txt'):
if dist.has_metadata('namespace_packages.txt'):
namespaces = dist.get_metadata('namespace_packages.txt') # depends on [control=['if'], data=[]]
else:
namespaces = []
for top_level_pkg in [p for p in dist.get_metadata('top_level.txt').splitlines() if p and p not in namespaces]:
path = os.path.join(dist.location, top_level_pkg)
paths_to_remove.add(path)
paths_to_remove.add(path + '.py')
paths_to_remove.add(path + '.pyc') # depends on [control=['for'], data=['top_level_pkg']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
elif dist.location.endswith(easy_install_egg):
# package installed by easy_install
paths_to_remove.add(dist.location)
easy_install_pth = os.path.join(os.path.dirname(dist.location), 'easy-install.pth')
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) # depends on [control=['if'], data=[]]
elif develop_egg_link:
# develop egg
fh = open(develop_egg_link, 'r')
link_pointer = os.path.normcase(fh.readline().strip())
fh.close()
assert link_pointer == dist.location, 'Egg-link %s does not match installed location of %s (at %s)' % (link_pointer, self.name, dist.location)
paths_to_remove.add(develop_egg_link)
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), 'easy-install.pth')
paths_to_remove.add_pth(easy_install_pth, dist.location) # depends on [control=['if'], data=[]]
# find distutils scripts= scripts
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
for script in dist.metadata_listdir('scripts'):
paths_to_remove.add(os.path.join(bin_py, script))
if sys.platform == 'win32':
paths_to_remove.add(os.path.join(bin_py, script) + '.bat') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['script']] # depends on [control=['if'], data=[]]
# find console_scripts
if dist.has_metadata('entry_points.txt'):
config = ConfigParser.SafeConfigParser()
config.readfp(FakeFile(dist.get_metadata_lines('entry_points.txt')))
if config.has_section('console_scripts'):
for (name, value) in config.items('console_scripts'):
paths_to_remove.add(os.path.join(bin_py, name))
if sys.platform == 'win32':
paths_to_remove.add(os.path.join(bin_py, name) + '.exe')
paths_to_remove.add(os.path.join(bin_py, name) + '.exe.manifest')
paths_to_remove.add(os.path.join(bin_py, name) + '-script.py') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
paths_to_remove.remove(auto_confirm)
self.uninstalled = paths_to_remove |
def form_invalid(self, post_form, attachment_formset, poll_option_formset, **kwargs):
""" Processes invalid forms. """
poll_errors = [k for k in post_form.errors.keys() if k.startswith('poll_')]
if (
poll_errors or
(
poll_option_formset and
not poll_option_formset.is_valid() and
len(post_form.cleaned_data['poll_question'])
)
):
messages.error(self.request, self.poll_option_formset_general_error_message)
return super().form_invalid(
post_form, attachment_formset, poll_option_formset=poll_option_formset, **kwargs) | def function[form_invalid, parameter[self, post_form, attachment_formset, poll_option_formset]]:
constant[ Processes invalid forms. ]
variable[poll_errors] assign[=] <ast.ListComp object at 0x7da207f98730>
if <ast.BoolOp object at 0x7da207f9b400> begin[:]
call[name[messages].error, parameter[name[self].request, name[self].poll_option_formset_general_error_message]]
return[call[call[name[super], parameter[]].form_invalid, parameter[name[post_form], name[attachment_formset]]]] | keyword[def] identifier[form_invalid] ( identifier[self] , identifier[post_form] , identifier[attachment_formset] , identifier[poll_option_formset] ,** identifier[kwargs] ):
literal[string]
identifier[poll_errors] =[ identifier[k] keyword[for] identifier[k] keyword[in] identifier[post_form] . identifier[errors] . identifier[keys] () keyword[if] identifier[k] . identifier[startswith] ( literal[string] )]
keyword[if] (
identifier[poll_errors] keyword[or]
(
identifier[poll_option_formset] keyword[and]
keyword[not] identifier[poll_option_formset] . identifier[is_valid] () keyword[and]
identifier[len] ( identifier[post_form] . identifier[cleaned_data] [ literal[string] ])
)
):
identifier[messages] . identifier[error] ( identifier[self] . identifier[request] , identifier[self] . identifier[poll_option_formset_general_error_message] )
keyword[return] identifier[super] (). identifier[form_invalid] (
identifier[post_form] , identifier[attachment_formset] , identifier[poll_option_formset] = identifier[poll_option_formset] ,** identifier[kwargs] ) | def form_invalid(self, post_form, attachment_formset, poll_option_formset, **kwargs):
""" Processes invalid forms. """
poll_errors = [k for k in post_form.errors.keys() if k.startswith('poll_')]
if poll_errors or (poll_option_formset and (not poll_option_formset.is_valid()) and len(post_form.cleaned_data['poll_question'])):
messages.error(self.request, self.poll_option_formset_general_error_message) # depends on [control=['if'], data=[]]
return super().form_invalid(post_form, attachment_formset, poll_option_formset=poll_option_formset, **kwargs) |
def gen_scripts(
file_name, file_name_ext, obj_name, obj_ext_name, output, output_ext,
field=1, notexplicit=None, ascii_props=False, append=False, prefix=""
):
"""Generate `script` property."""
obj = {}
obj2 = {}
aliases = {}
with codecs.open(os.path.join(HOME, 'unicodedata', UNIVERSION, 'PropertyValueAliases.txt'), 'r', 'utf-8') as uf:
for line in uf:
if line.startswith('sc ;'):
values = line.split(';')
aliases[format_name(values[1].strip())] = format_name(values[2].strip())
with codecs.open(os.path.join(HOME, 'unicodedata', UNIVERSION, file_name_ext), 'r', 'utf-8') as uf:
for line in uf:
if not line.startswith('#'):
data = line.split('#')[0].split(';')
if len(data) < 2:
continue
exts = [aliases[format_name(n)] for n in data[1].strip().split(' ')]
span = create_span([int(i, 16) for i in data[0].strip().split('..')], is_bytes=ascii_props)
for ext in exts:
if ext not in obj2:
obj2[ext] = []
if span is None:
continue
obj2[ext].extend(span)
with codecs.open(os.path.join(HOME, 'unicodedata', UNIVERSION, file_name), 'r', 'utf-8') as uf:
for line in uf:
if not line.startswith('#'):
data = line.split('#')[0].split(';')
if len(data) < 2:
continue
span = create_span([int(i, 16) for i in data[0].strip().split('..')], is_bytes=ascii_props)
name = format_name(data[1])
if name not in obj:
obj[name] = []
if name not in obj2:
obj2[name] = []
if span is None:
continue
obj[name].extend(span)
obj2[name].extend(span)
for name in list(obj.keys()):
s = set(obj[name])
obj[name] = sorted(s)
for name in list(obj2.keys()):
s = set(obj2[name])
obj2[name] = sorted(s)
if notexplicit:
not_explicitly_defined(obj, notexplicit, is_bytes=ascii_props)
not_explicitly_defined(obj2, notexplicit, is_bytes=ascii_props)
# Convert characters values to ranges
char2range(obj, is_bytes=ascii_props)
char2range(obj2, is_bytes=ascii_props)
with codecs.open(output, 'a' if append else 'w', 'utf-8') as f:
if not append:
f.write(HEADER)
# Write out the Unicode properties
f.write('%s_%s = {\n' % (prefix, obj_name))
count = len(obj) - 1
i = 0
for k1, v1 in sorted(obj.items()):
f.write(' "%s": "%s"' % (k1, v1))
if i == count:
f.write('\n}\n')
else:
f.write(',\n')
i += 1
with codecs.open(output_ext, 'a' if append else 'w', 'utf-8') as f:
if not append:
f.write(HEADER)
# Write out the Unicode properties
f.write('%s_%s = {\n' % (prefix, obj_ext_name))
count = len(obj2) - 1
i = 0
for k1, v1 in sorted(obj2.items()):
f.write(' "%s": "%s"' % (k1, v1))
if i == count:
f.write('\n}\n')
else:
f.write(',\n')
i += 1 | def function[gen_scripts, parameter[file_name, file_name_ext, obj_name, obj_ext_name, output, output_ext, field, notexplicit, ascii_props, append, prefix]]:
constant[Generate `script` property.]
variable[obj] assign[=] dictionary[[], []]
variable[obj2] assign[=] dictionary[[], []]
variable[aliases] assign[=] dictionary[[], []]
with call[name[codecs].open, parameter[call[name[os].path.join, parameter[name[HOME], constant[unicodedata], name[UNIVERSION], constant[PropertyValueAliases.txt]]], constant[r], constant[utf-8]]] begin[:]
for taget[name[line]] in starred[name[uf]] begin[:]
if call[name[line].startswith, parameter[constant[sc ;]]] begin[:]
variable[values] assign[=] call[name[line].split, parameter[constant[;]]]
call[name[aliases]][call[name[format_name], parameter[call[call[name[values]][constant[1]].strip, parameter[]]]]] assign[=] call[name[format_name], parameter[call[call[name[values]][constant[2]].strip, parameter[]]]]
with call[name[codecs].open, parameter[call[name[os].path.join, parameter[name[HOME], constant[unicodedata], name[UNIVERSION], name[file_name_ext]]], constant[r], constant[utf-8]]] begin[:]
for taget[name[line]] in starred[name[uf]] begin[:]
if <ast.UnaryOp object at 0x7da1b0399540> begin[:]
variable[data] assign[=] call[call[call[name[line].split, parameter[constant[#]]]][constant[0]].split, parameter[constant[;]]]
if compare[call[name[len], parameter[name[data]]] less[<] constant[2]] begin[:]
continue
variable[exts] assign[=] <ast.ListComp object at 0x7da1b0399030>
variable[span] assign[=] call[name[create_span], parameter[<ast.ListComp object at 0x7da1b0398c40>]]
for taget[name[ext]] in starred[name[exts]] begin[:]
if compare[name[ext] <ast.NotIn object at 0x7da2590d7190> name[obj2]] begin[:]
call[name[obj2]][name[ext]] assign[=] list[[]]
if compare[name[span] is constant[None]] begin[:]
continue
call[call[name[obj2]][name[ext]].extend, parameter[name[span]]]
with call[name[codecs].open, parameter[call[name[os].path.join, parameter[name[HOME], constant[unicodedata], name[UNIVERSION], name[file_name]]], constant[r], constant[utf-8]]] begin[:]
for taget[name[line]] in starred[name[uf]] begin[:]
if <ast.UnaryOp object at 0x7da1b03effd0> begin[:]
variable[data] assign[=] call[call[call[name[line].split, parameter[constant[#]]]][constant[0]].split, parameter[constant[;]]]
if compare[call[name[len], parameter[name[data]]] less[<] constant[2]] begin[:]
continue
variable[span] assign[=] call[name[create_span], parameter[<ast.ListComp object at 0x7da1b03efa90>]]
variable[name] assign[=] call[name[format_name], parameter[call[name[data]][constant[1]]]]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[obj]] begin[:]
call[name[obj]][name[name]] assign[=] list[[]]
if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[obj2]] begin[:]
call[name[obj2]][name[name]] assign[=] list[[]]
if compare[name[span] is constant[None]] begin[:]
continue
call[call[name[obj]][name[name]].extend, parameter[name[span]]]
call[call[name[obj2]][name[name]].extend, parameter[name[span]]]
for taget[name[name]] in starred[call[name[list], parameter[call[name[obj].keys, parameter[]]]]] begin[:]
variable[s] assign[=] call[name[set], parameter[call[name[obj]][name[name]]]]
call[name[obj]][name[name]] assign[=] call[name[sorted], parameter[name[s]]]
for taget[name[name]] in starred[call[name[list], parameter[call[name[obj2].keys, parameter[]]]]] begin[:]
variable[s] assign[=] call[name[set], parameter[call[name[obj2]][name[name]]]]
call[name[obj2]][name[name]] assign[=] call[name[sorted], parameter[name[s]]]
if name[notexplicit] begin[:]
call[name[not_explicitly_defined], parameter[name[obj], name[notexplicit]]]
call[name[not_explicitly_defined], parameter[name[obj2], name[notexplicit]]]
call[name[char2range], parameter[name[obj]]]
call[name[char2range], parameter[name[obj2]]]
with call[name[codecs].open, parameter[name[output], <ast.IfExp object at 0x7da1b03edf60>, constant[utf-8]]] begin[:]
if <ast.UnaryOp object at 0x7da1b03eddb0> begin[:]
call[name[f].write, parameter[name[HEADER]]]
call[name[f].write, parameter[binary_operation[constant[%s_%s = {
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b03edae0>, <ast.Name object at 0x7da1b03edab0>]]]]]
variable[count] assign[=] binary_operation[call[name[len], parameter[name[obj]]] - constant[1]]
variable[i] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b03ed810>, <ast.Name object at 0x7da1b03ed7e0>]]] in starred[call[name[sorted], parameter[call[name[obj].items, parameter[]]]]] begin[:]
call[name[f].write, parameter[binary_operation[constant[ "%s": "%s"] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b03ed570>, <ast.Name object at 0x7da1b03ed540>]]]]]
if compare[name[i] equal[==] name[count]] begin[:]
call[name[f].write, parameter[constant[
}
]]]
<ast.AugAssign object at 0x7da1b03ed1e0>
with call[name[codecs].open, parameter[name[output_ext], <ast.IfExp object at 0x7da1b03ed030>, constant[utf-8]]] begin[:]
if <ast.UnaryOp object at 0x7da1b03ece80> begin[:]
call[name[f].write, parameter[name[HEADER]]]
call[name[f].write, parameter[binary_operation[constant[%s_%s = {
] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b03ecbb0>, <ast.Name object at 0x7da1b03ecb80>]]]]]
variable[count] assign[=] binary_operation[call[name[len], parameter[name[obj2]]] - constant[1]]
variable[i] assign[=] constant[0]
for taget[tuple[[<ast.Name object at 0x7da1b03ec8e0>, <ast.Name object at 0x7da1b03ec8b0>]]] in starred[call[name[sorted], parameter[call[name[obj2].items, parameter[]]]]] begin[:]
call[name[f].write, parameter[binary_operation[constant[ "%s": "%s"] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b03000d0>, <ast.Name object at 0x7da1b0300d00>]]]]]
if compare[name[i] equal[==] name[count]] begin[:]
call[name[f].write, parameter[constant[
}
]]]
<ast.AugAssign object at 0x7da1b0301210> | keyword[def] identifier[gen_scripts] (
identifier[file_name] , identifier[file_name_ext] , identifier[obj_name] , identifier[obj_ext_name] , identifier[output] , identifier[output_ext] ,
identifier[field] = literal[int] , identifier[notexplicit] = keyword[None] , identifier[ascii_props] = keyword[False] , identifier[append] = keyword[False] , identifier[prefix] = literal[string]
):
literal[string]
identifier[obj] ={}
identifier[obj2] ={}
identifier[aliases] ={}
keyword[with] identifier[codecs] . identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[HOME] , literal[string] , identifier[UNIVERSION] , literal[string] ), literal[string] , literal[string] ) keyword[as] identifier[uf] :
keyword[for] identifier[line] keyword[in] identifier[uf] :
keyword[if] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[values] = identifier[line] . identifier[split] ( literal[string] )
identifier[aliases] [ identifier[format_name] ( identifier[values] [ literal[int] ]. identifier[strip] ())]= identifier[format_name] ( identifier[values] [ literal[int] ]. identifier[strip] ())
keyword[with] identifier[codecs] . identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[HOME] , literal[string] , identifier[UNIVERSION] , identifier[file_name_ext] ), literal[string] , literal[string] ) keyword[as] identifier[uf] :
keyword[for] identifier[line] keyword[in] identifier[uf] :
keyword[if] keyword[not] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[data] = identifier[line] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[data] )< literal[int] :
keyword[continue]
identifier[exts] =[ identifier[aliases] [ identifier[format_name] ( identifier[n] )] keyword[for] identifier[n] keyword[in] identifier[data] [ literal[int] ]. identifier[strip] (). identifier[split] ( literal[string] )]
identifier[span] = identifier[create_span] ([ identifier[int] ( identifier[i] , literal[int] ) keyword[for] identifier[i] keyword[in] identifier[data] [ literal[int] ]. identifier[strip] (). identifier[split] ( literal[string] )], identifier[is_bytes] = identifier[ascii_props] )
keyword[for] identifier[ext] keyword[in] identifier[exts] :
keyword[if] identifier[ext] keyword[not] keyword[in] identifier[obj2] :
identifier[obj2] [ identifier[ext] ]=[]
keyword[if] identifier[span] keyword[is] keyword[None] :
keyword[continue]
identifier[obj2] [ identifier[ext] ]. identifier[extend] ( identifier[span] )
keyword[with] identifier[codecs] . identifier[open] ( identifier[os] . identifier[path] . identifier[join] ( identifier[HOME] , literal[string] , identifier[UNIVERSION] , identifier[file_name] ), literal[string] , literal[string] ) keyword[as] identifier[uf] :
keyword[for] identifier[line] keyword[in] identifier[uf] :
keyword[if] keyword[not] identifier[line] . identifier[startswith] ( literal[string] ):
identifier[data] = identifier[line] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[data] )< literal[int] :
keyword[continue]
identifier[span] = identifier[create_span] ([ identifier[int] ( identifier[i] , literal[int] ) keyword[for] identifier[i] keyword[in] identifier[data] [ literal[int] ]. identifier[strip] (). identifier[split] ( literal[string] )], identifier[is_bytes] = identifier[ascii_props] )
identifier[name] = identifier[format_name] ( identifier[data] [ literal[int] ])
keyword[if] identifier[name] keyword[not] keyword[in] identifier[obj] :
identifier[obj] [ identifier[name] ]=[]
keyword[if] identifier[name] keyword[not] keyword[in] identifier[obj2] :
identifier[obj2] [ identifier[name] ]=[]
keyword[if] identifier[span] keyword[is] keyword[None] :
keyword[continue]
identifier[obj] [ identifier[name] ]. identifier[extend] ( identifier[span] )
identifier[obj2] [ identifier[name] ]. identifier[extend] ( identifier[span] )
keyword[for] identifier[name] keyword[in] identifier[list] ( identifier[obj] . identifier[keys] ()):
identifier[s] = identifier[set] ( identifier[obj] [ identifier[name] ])
identifier[obj] [ identifier[name] ]= identifier[sorted] ( identifier[s] )
keyword[for] identifier[name] keyword[in] identifier[list] ( identifier[obj2] . identifier[keys] ()):
identifier[s] = identifier[set] ( identifier[obj2] [ identifier[name] ])
identifier[obj2] [ identifier[name] ]= identifier[sorted] ( identifier[s] )
keyword[if] identifier[notexplicit] :
identifier[not_explicitly_defined] ( identifier[obj] , identifier[notexplicit] , identifier[is_bytes] = identifier[ascii_props] )
identifier[not_explicitly_defined] ( identifier[obj2] , identifier[notexplicit] , identifier[is_bytes] = identifier[ascii_props] )
identifier[char2range] ( identifier[obj] , identifier[is_bytes] = identifier[ascii_props] )
identifier[char2range] ( identifier[obj2] , identifier[is_bytes] = identifier[ascii_props] )
keyword[with] identifier[codecs] . identifier[open] ( identifier[output] , literal[string] keyword[if] identifier[append] keyword[else] literal[string] , literal[string] ) keyword[as] identifier[f] :
keyword[if] keyword[not] identifier[append] :
identifier[f] . identifier[write] ( identifier[HEADER] )
identifier[f] . identifier[write] ( literal[string] %( identifier[prefix] , identifier[obj_name] ))
identifier[count] = identifier[len] ( identifier[obj] )- literal[int]
identifier[i] = literal[int]
keyword[for] identifier[k1] , identifier[v1] keyword[in] identifier[sorted] ( identifier[obj] . identifier[items] ()):
identifier[f] . identifier[write] ( literal[string] %( identifier[k1] , identifier[v1] ))
keyword[if] identifier[i] == identifier[count] :
identifier[f] . identifier[write] ( literal[string] )
keyword[else] :
identifier[f] . identifier[write] ( literal[string] )
identifier[i] += literal[int]
keyword[with] identifier[codecs] . identifier[open] ( identifier[output_ext] , literal[string] keyword[if] identifier[append] keyword[else] literal[string] , literal[string] ) keyword[as] identifier[f] :
keyword[if] keyword[not] identifier[append] :
identifier[f] . identifier[write] ( identifier[HEADER] )
identifier[f] . identifier[write] ( literal[string] %( identifier[prefix] , identifier[obj_ext_name] ))
identifier[count] = identifier[len] ( identifier[obj2] )- literal[int]
identifier[i] = literal[int]
keyword[for] identifier[k1] , identifier[v1] keyword[in] identifier[sorted] ( identifier[obj2] . identifier[items] ()):
identifier[f] . identifier[write] ( literal[string] %( identifier[k1] , identifier[v1] ))
keyword[if] identifier[i] == identifier[count] :
identifier[f] . identifier[write] ( literal[string] )
keyword[else] :
identifier[f] . identifier[write] ( literal[string] )
identifier[i] += literal[int] | def gen_scripts(file_name, file_name_ext, obj_name, obj_ext_name, output, output_ext, field=1, notexplicit=None, ascii_props=False, append=False, prefix=''):
"""Generate `script` property."""
obj = {}
obj2 = {}
aliases = {}
with codecs.open(os.path.join(HOME, 'unicodedata', UNIVERSION, 'PropertyValueAliases.txt'), 'r', 'utf-8') as uf:
for line in uf:
if line.startswith('sc ;'):
values = line.split(';')
aliases[format_name(values[1].strip())] = format_name(values[2].strip()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['uf']]
with codecs.open(os.path.join(HOME, 'unicodedata', UNIVERSION, file_name_ext), 'r', 'utf-8') as uf:
for line in uf:
if not line.startswith('#'):
data = line.split('#')[0].split(';')
if len(data) < 2:
continue # depends on [control=['if'], data=[]]
exts = [aliases[format_name(n)] for n in data[1].strip().split(' ')]
span = create_span([int(i, 16) for i in data[0].strip().split('..')], is_bytes=ascii_props)
for ext in exts:
if ext not in obj2:
obj2[ext] = [] # depends on [control=['if'], data=['ext', 'obj2']]
if span is None:
continue # depends on [control=['if'], data=[]]
obj2[ext].extend(span) # depends on [control=['for'], data=['ext']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['uf']]
with codecs.open(os.path.join(HOME, 'unicodedata', UNIVERSION, file_name), 'r', 'utf-8') as uf:
for line in uf:
if not line.startswith('#'):
data = line.split('#')[0].split(';')
if len(data) < 2:
continue # depends on [control=['if'], data=[]]
span = create_span([int(i, 16) for i in data[0].strip().split('..')], is_bytes=ascii_props)
name = format_name(data[1])
if name not in obj:
obj[name] = [] # depends on [control=['if'], data=['name', 'obj']]
if name not in obj2:
obj2[name] = [] # depends on [control=['if'], data=['name', 'obj2']]
if span is None:
continue # depends on [control=['if'], data=[]]
obj[name].extend(span)
obj2[name].extend(span) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['uf']]
for name in list(obj.keys()):
s = set(obj[name])
obj[name] = sorted(s) # depends on [control=['for'], data=['name']]
for name in list(obj2.keys()):
s = set(obj2[name])
obj2[name] = sorted(s) # depends on [control=['for'], data=['name']]
if notexplicit:
not_explicitly_defined(obj, notexplicit, is_bytes=ascii_props)
not_explicitly_defined(obj2, notexplicit, is_bytes=ascii_props) # depends on [control=['if'], data=[]]
# Convert characters values to ranges
char2range(obj, is_bytes=ascii_props)
char2range(obj2, is_bytes=ascii_props)
with codecs.open(output, 'a' if append else 'w', 'utf-8') as f:
if not append:
f.write(HEADER) # depends on [control=['if'], data=[]]
# Write out the Unicode properties
f.write('%s_%s = {\n' % (prefix, obj_name))
count = len(obj) - 1
i = 0
for (k1, v1) in sorted(obj.items()):
f.write(' "%s": "%s"' % (k1, v1))
if i == count:
f.write('\n}\n') # depends on [control=['if'], data=[]]
else:
f.write(',\n')
i += 1 # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['f']]
with codecs.open(output_ext, 'a' if append else 'w', 'utf-8') as f:
if not append:
f.write(HEADER) # depends on [control=['if'], data=[]]
# Write out the Unicode properties
f.write('%s_%s = {\n' % (prefix, obj_ext_name))
count = len(obj2) - 1
i = 0
for (k1, v1) in sorted(obj2.items()):
f.write(' "%s": "%s"' % (k1, v1))
if i == count:
f.write('\n}\n') # depends on [control=['if'], data=[]]
else:
f.write(',\n')
i += 1 # depends on [control=['for'], data=[]] # depends on [control=['with'], data=['f']] |
def make_sure_path_exists(path):
"""Ensure that a directory exists.
:param path: A directory path.
"""
logger.debug('Making sure path exists: {}'.format(path))
try:
os.makedirs(path)
logger.debug('Created directory at: {}'.format(path))
except OSError as exception:
if exception.errno != errno.EEXIST:
return False
return True | def function[make_sure_path_exists, parameter[path]]:
constant[Ensure that a directory exists.
:param path: A directory path.
]
call[name[logger].debug, parameter[call[constant[Making sure path exists: {}].format, parameter[name[path]]]]]
<ast.Try object at 0x7da1b21a7d60>
return[constant[True]] | keyword[def] identifier[make_sure_path_exists] ( identifier[path] ):
literal[string]
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[path] ))
keyword[try] :
identifier[os] . identifier[makedirs] ( identifier[path] )
identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[path] ))
keyword[except] identifier[OSError] keyword[as] identifier[exception] :
keyword[if] identifier[exception] . identifier[errno] != identifier[errno] . identifier[EEXIST] :
keyword[return] keyword[False]
keyword[return] keyword[True] | def make_sure_path_exists(path):
"""Ensure that a directory exists.
:param path: A directory path.
"""
logger.debug('Making sure path exists: {}'.format(path))
try:
os.makedirs(path)
logger.debug('Created directory at: {}'.format(path)) # depends on [control=['try'], data=[]]
except OSError as exception:
if exception.errno != errno.EEXIST:
return False # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['exception']]
return True |
def method_repr_string(inst_str, meth_str, arg_strs=None,
allow_mixed_seps=True):
r"""Return a repr string for a method that respects line width.
This function is useful to generate a ``repr`` string for a derived
class that is created through a method, for instance ::
functional.translated(x)
as a better way of representing ::
FunctionalTranslation(functional, x)
Parameters
----------
inst_str : str
Stringification of a class instance.
meth_str : str
Name of the method (not including the ``'.'``).
arg_strs : sequence of str, optional
Stringification of the arguments to the method.
allow_mixed_seps : bool, optional
If ``False`` and the argument strings do not fit on one line, use
``',\n'`` to separate all strings.
By default, a mixture of ``', '`` and ``',\n'`` is used to fit
as much on one line as possible.
In case some of the ``arg_strs`` span multiple lines, it is
usually advisable to set ``allow_mixed_seps`` to ``False`` since
the result tends to be more readable that way.
Returns
-------
meth_repr_str : str
Concatenation of all strings in a way that the line width
is respected.
Examples
--------
>>> inst_str = 'MyClass'
>>> meth_str = 'empty'
>>> arg_strs = []
>>> print(method_repr_string(inst_str, meth_str, arg_strs))
MyClass.empty()
>>> inst_str = 'MyClass'
>>> meth_str = 'fromfile'
>>> arg_strs = ["'tmpfile.txt'"]
>>> print(method_repr_string(inst_str, meth_str, arg_strs))
MyClass.fromfile('tmpfile.txt')
>>> inst_str = "MyClass('init string')"
>>> meth_str = 'method'
>>> arg_strs = ['2.0']
>>> print(method_repr_string(inst_str, meth_str, arg_strs))
MyClass('init string').method(2.0)
>>> long_inst_str = (
... "MyClass('long string that will definitely trigger a line break')"
... )
>>> meth_str = 'method'
>>> long_arg1 = "'long argument string that should come on the next line'"
>>> arg2 = 'param1=1'
>>> arg3 = 'param2=2.0'
>>> arg_strs = [long_arg1, arg2, arg3]
>>> print(method_repr_string(long_inst_str, meth_str, arg_strs))
MyClass(
'long string that will definitely trigger a line break'
).method(
'long argument string that should come on the next line',
param1=1, param2=2.0
)
>>> print(method_repr_string(long_inst_str, meth_str, arg_strs,
... allow_mixed_seps=False))
MyClass(
'long string that will definitely trigger a line break'
).method(
'long argument string that should come on the next line',
param1=1,
param2=2.0
)
"""
linewidth = np.get_printoptions()['linewidth']
# Part up to the method name
if (len(inst_str) + 1 + len(meth_str) + 1 <= linewidth or
'(' not in inst_str):
init_parts = [inst_str, meth_str]
# Length of the line to the end of the method name
meth_line_start_len = len(inst_str) + 1 + len(meth_str)
else:
# TODO(kohr-h): use `maxsplit=1` kwarg, not supported in Py 2
left, rest = inst_str.split('(', 1)
right, middle = rest[::-1].split(')', 1)
middle, right = middle[::-1], right[::-1]
if middle.startswith('\n') and middle.endswith('\n'):
# Already on multiple lines
new_inst_str = inst_str
else:
new_inst_str = '(\n'.join([left, indent(middle)]) + '\n)' + right
# Length of the line to the end of the method name, consisting of
# ')' + '.' + <method name>
meth_line_start_len = 1 + 1 + len(meth_str)
init_parts = [new_inst_str, meth_str]
# Method call part
arg_str_oneline = ', '.join(arg_strs)
if meth_line_start_len + 1 + len(arg_str_oneline) + 1 <= linewidth:
meth_call_str = '(' + arg_str_oneline + ')'
elif not arg_str_oneline:
meth_call_str = '(\n)'
else:
if allow_mixed_seps:
arg_seps = _separators(arg_strs, linewidth - 4) # indented
else:
arg_seps = [',\n'] * (len(arg_strs) - 1)
full_arg_str = ''
for arg_str, sep in zip_longest(arg_strs, arg_seps, fillvalue=''):
full_arg_str += arg_str + sep
meth_call_str = '(\n' + indent(full_arg_str) + '\n)'
return '.'.join(init_parts) + meth_call_str | def function[method_repr_string, parameter[inst_str, meth_str, arg_strs, allow_mixed_seps]]:
constant[Return a repr string for a method that respects line width.
This function is useful to generate a ``repr`` string for a derived
class that is created through a method, for instance ::
functional.translated(x)
as a better way of representing ::
FunctionalTranslation(functional, x)
Parameters
----------
inst_str : str
Stringification of a class instance.
meth_str : str
Name of the method (not including the ``'.'``).
arg_strs : sequence of str, optional
Stringification of the arguments to the method.
allow_mixed_seps : bool, optional
If ``False`` and the argument strings do not fit on one line, use
``',\n'`` to separate all strings.
By default, a mixture of ``', '`` and ``',\n'`` is used to fit
as much on one line as possible.
In case some of the ``arg_strs`` span multiple lines, it is
usually advisable to set ``allow_mixed_seps`` to ``False`` since
the result tends to be more readable that way.
Returns
-------
meth_repr_str : str
Concatenation of all strings in a way that the line width
is respected.
Examples
--------
>>> inst_str = 'MyClass'
>>> meth_str = 'empty'
>>> arg_strs = []
>>> print(method_repr_string(inst_str, meth_str, arg_strs))
MyClass.empty()
>>> inst_str = 'MyClass'
>>> meth_str = 'fromfile'
>>> arg_strs = ["'tmpfile.txt'"]
>>> print(method_repr_string(inst_str, meth_str, arg_strs))
MyClass.fromfile('tmpfile.txt')
>>> inst_str = "MyClass('init string')"
>>> meth_str = 'method'
>>> arg_strs = ['2.0']
>>> print(method_repr_string(inst_str, meth_str, arg_strs))
MyClass('init string').method(2.0)
>>> long_inst_str = (
... "MyClass('long string that will definitely trigger a line break')"
... )
>>> meth_str = 'method'
>>> long_arg1 = "'long argument string that should come on the next line'"
>>> arg2 = 'param1=1'
>>> arg3 = 'param2=2.0'
>>> arg_strs = [long_arg1, arg2, arg3]
>>> print(method_repr_string(long_inst_str, meth_str, arg_strs))
MyClass(
'long string that will definitely trigger a line break'
).method(
'long argument string that should come on the next line',
param1=1, param2=2.0
)
>>> print(method_repr_string(long_inst_str, meth_str, arg_strs,
... allow_mixed_seps=False))
MyClass(
'long string that will definitely trigger a line break'
).method(
'long argument string that should come on the next line',
param1=1,
param2=2.0
)
]
variable[linewidth] assign[=] call[call[name[np].get_printoptions, parameter[]]][constant[linewidth]]
if <ast.BoolOp object at 0x7da1b1e5d5a0> begin[:]
variable[init_parts] assign[=] list[[<ast.Name object at 0x7da1b20b7910>, <ast.Name object at 0x7da1b20b51b0>]]
variable[meth_line_start_len] assign[=] binary_operation[binary_operation[call[name[len], parameter[name[inst_str]]] + constant[1]] + call[name[len], parameter[name[meth_str]]]]
variable[arg_str_oneline] assign[=] call[constant[, ].join, parameter[name[arg_strs]]]
if compare[binary_operation[binary_operation[binary_operation[name[meth_line_start_len] + constant[1]] + call[name[len], parameter[name[arg_str_oneline]]]] + constant[1]] less_or_equal[<=] name[linewidth]] begin[:]
variable[meth_call_str] assign[=] binary_operation[binary_operation[constant[(] + name[arg_str_oneline]] + constant[)]]
return[binary_operation[call[constant[.].join, parameter[name[init_parts]]] + name[meth_call_str]]] | keyword[def] identifier[method_repr_string] ( identifier[inst_str] , identifier[meth_str] , identifier[arg_strs] = keyword[None] ,
identifier[allow_mixed_seps] = keyword[True] ):
literal[string]
identifier[linewidth] = identifier[np] . identifier[get_printoptions] ()[ literal[string] ]
keyword[if] ( identifier[len] ( identifier[inst_str] )+ literal[int] + identifier[len] ( identifier[meth_str] )+ literal[int] <= identifier[linewidth] keyword[or]
literal[string] keyword[not] keyword[in] identifier[inst_str] ):
identifier[init_parts] =[ identifier[inst_str] , identifier[meth_str] ]
identifier[meth_line_start_len] = identifier[len] ( identifier[inst_str] )+ literal[int] + identifier[len] ( identifier[meth_str] )
keyword[else] :
identifier[left] , identifier[rest] = identifier[inst_str] . identifier[split] ( literal[string] , literal[int] )
identifier[right] , identifier[middle] = identifier[rest] [::- literal[int] ]. identifier[split] ( literal[string] , literal[int] )
identifier[middle] , identifier[right] = identifier[middle] [::- literal[int] ], identifier[right] [::- literal[int] ]
keyword[if] identifier[middle] . identifier[startswith] ( literal[string] ) keyword[and] identifier[middle] . identifier[endswith] ( literal[string] ):
identifier[new_inst_str] = identifier[inst_str]
keyword[else] :
identifier[new_inst_str] = literal[string] . identifier[join] ([ identifier[left] , identifier[indent] ( identifier[middle] )])+ literal[string] + identifier[right]
identifier[meth_line_start_len] = literal[int] + literal[int] + identifier[len] ( identifier[meth_str] )
identifier[init_parts] =[ identifier[new_inst_str] , identifier[meth_str] ]
identifier[arg_str_oneline] = literal[string] . identifier[join] ( identifier[arg_strs] )
keyword[if] identifier[meth_line_start_len] + literal[int] + identifier[len] ( identifier[arg_str_oneline] )+ literal[int] <= identifier[linewidth] :
identifier[meth_call_str] = literal[string] + identifier[arg_str_oneline] + literal[string]
keyword[elif] keyword[not] identifier[arg_str_oneline] :
identifier[meth_call_str] = literal[string]
keyword[else] :
keyword[if] identifier[allow_mixed_seps] :
identifier[arg_seps] = identifier[_separators] ( identifier[arg_strs] , identifier[linewidth] - literal[int] )
keyword[else] :
identifier[arg_seps] =[ literal[string] ]*( identifier[len] ( identifier[arg_strs] )- literal[int] )
identifier[full_arg_str] = literal[string]
keyword[for] identifier[arg_str] , identifier[sep] keyword[in] identifier[zip_longest] ( identifier[arg_strs] , identifier[arg_seps] , identifier[fillvalue] = literal[string] ):
identifier[full_arg_str] += identifier[arg_str] + identifier[sep]
identifier[meth_call_str] = literal[string] + identifier[indent] ( identifier[full_arg_str] )+ literal[string]
keyword[return] literal[string] . identifier[join] ( identifier[init_parts] )+ identifier[meth_call_str] | def method_repr_string(inst_str, meth_str, arg_strs=None, allow_mixed_seps=True):
"""Return a repr string for a method that respects line width.
This function is useful to generate a ``repr`` string for a derived
class that is created through a method, for instance ::
functional.translated(x)
as a better way of representing ::
FunctionalTranslation(functional, x)
Parameters
----------
inst_str : str
Stringification of a class instance.
meth_str : str
Name of the method (not including the ``'.'``).
arg_strs : sequence of str, optional
Stringification of the arguments to the method.
allow_mixed_seps : bool, optional
If ``False`` and the argument strings do not fit on one line, use
``',\\n'`` to separate all strings.
By default, a mixture of ``', '`` and ``',\\n'`` is used to fit
as much on one line as possible.
In case some of the ``arg_strs`` span multiple lines, it is
usually advisable to set ``allow_mixed_seps`` to ``False`` since
the result tends to be more readable that way.
Returns
-------
meth_repr_str : str
Concatenation of all strings in a way that the line width
is respected.
Examples
--------
>>> inst_str = 'MyClass'
>>> meth_str = 'empty'
>>> arg_strs = []
>>> print(method_repr_string(inst_str, meth_str, arg_strs))
MyClass.empty()
>>> inst_str = 'MyClass'
>>> meth_str = 'fromfile'
>>> arg_strs = ["'tmpfile.txt'"]
>>> print(method_repr_string(inst_str, meth_str, arg_strs))
MyClass.fromfile('tmpfile.txt')
>>> inst_str = "MyClass('init string')"
>>> meth_str = 'method'
>>> arg_strs = ['2.0']
>>> print(method_repr_string(inst_str, meth_str, arg_strs))
MyClass('init string').method(2.0)
>>> long_inst_str = (
... "MyClass('long string that will definitely trigger a line break')"
... )
>>> meth_str = 'method'
>>> long_arg1 = "'long argument string that should come on the next line'"
>>> arg2 = 'param1=1'
>>> arg3 = 'param2=2.0'
>>> arg_strs = [long_arg1, arg2, arg3]
>>> print(method_repr_string(long_inst_str, meth_str, arg_strs))
MyClass(
'long string that will definitely trigger a line break'
).method(
'long argument string that should come on the next line',
param1=1, param2=2.0
)
>>> print(method_repr_string(long_inst_str, meth_str, arg_strs,
... allow_mixed_seps=False))
MyClass(
'long string that will definitely trigger a line break'
).method(
'long argument string that should come on the next line',
param1=1,
param2=2.0
)
"""
linewidth = np.get_printoptions()['linewidth']
# Part up to the method name
if len(inst_str) + 1 + len(meth_str) + 1 <= linewidth or '(' not in inst_str:
init_parts = [inst_str, meth_str]
# Length of the line to the end of the method name
meth_line_start_len = len(inst_str) + 1 + len(meth_str) # depends on [control=['if'], data=[]]
else:
# TODO(kohr-h): use `maxsplit=1` kwarg, not supported in Py 2
(left, rest) = inst_str.split('(', 1)
(right, middle) = rest[::-1].split(')', 1)
(middle, right) = (middle[::-1], right[::-1])
if middle.startswith('\n') and middle.endswith('\n'):
# Already on multiple lines
new_inst_str = inst_str # depends on [control=['if'], data=[]]
else:
new_inst_str = '(\n'.join([left, indent(middle)]) + '\n)' + right
# Length of the line to the end of the method name, consisting of
# ')' + '.' + <method name>
meth_line_start_len = 1 + 1 + len(meth_str)
init_parts = [new_inst_str, meth_str]
# Method call part
arg_str_oneline = ', '.join(arg_strs)
if meth_line_start_len + 1 + len(arg_str_oneline) + 1 <= linewidth:
meth_call_str = '(' + arg_str_oneline + ')' # depends on [control=['if'], data=[]]
elif not arg_str_oneline:
meth_call_str = '(\n)' # depends on [control=['if'], data=[]]
else:
if allow_mixed_seps:
arg_seps = _separators(arg_strs, linewidth - 4) # indented # depends on [control=['if'], data=[]]
else:
arg_seps = [',\n'] * (len(arg_strs) - 1)
full_arg_str = ''
for (arg_str, sep) in zip_longest(arg_strs, arg_seps, fillvalue=''):
full_arg_str += arg_str + sep # depends on [control=['for'], data=[]]
meth_call_str = '(\n' + indent(full_arg_str) + '\n)'
return '.'.join(init_parts) + meth_call_str |
def serial_udb_extra_f6_send(self, sue_PITCHGAIN, sue_PITCHKD, sue_RUDDER_ELEV_MIX, sue_ROLL_ELEV_MIX, sue_ELEVATOR_BOOST, force_mavlink1=False):
'''
Backwards compatible version of SERIAL_UDB_EXTRA F6: format
sue_PITCHGAIN : Serial UDB Extra PITCHGAIN Proportional Control (float)
sue_PITCHKD : Serial UDB Extra Pitch Rate Control (float)
sue_RUDDER_ELEV_MIX : Serial UDB Extra Rudder to Elevator Mix (float)
sue_ROLL_ELEV_MIX : Serial UDB Extra Roll to Elevator Mix (float)
sue_ELEVATOR_BOOST : Gain For Boosting Manual Elevator control When Plane Stabilized (float)
'''
return self.send(self.serial_udb_extra_f6_encode(sue_PITCHGAIN, sue_PITCHKD, sue_RUDDER_ELEV_MIX, sue_ROLL_ELEV_MIX, sue_ELEVATOR_BOOST), force_mavlink1=force_mavlink1) | def function[serial_udb_extra_f6_send, parameter[self, sue_PITCHGAIN, sue_PITCHKD, sue_RUDDER_ELEV_MIX, sue_ROLL_ELEV_MIX, sue_ELEVATOR_BOOST, force_mavlink1]]:
constant[
Backwards compatible version of SERIAL_UDB_EXTRA F6: format
sue_PITCHGAIN : Serial UDB Extra PITCHGAIN Proportional Control (float)
sue_PITCHKD : Serial UDB Extra Pitch Rate Control (float)
sue_RUDDER_ELEV_MIX : Serial UDB Extra Rudder to Elevator Mix (float)
sue_ROLL_ELEV_MIX : Serial UDB Extra Roll to Elevator Mix (float)
sue_ELEVATOR_BOOST : Gain For Boosting Manual Elevator control When Plane Stabilized (float)
]
return[call[name[self].send, parameter[call[name[self].serial_udb_extra_f6_encode, parameter[name[sue_PITCHGAIN], name[sue_PITCHKD], name[sue_RUDDER_ELEV_MIX], name[sue_ROLL_ELEV_MIX], name[sue_ELEVATOR_BOOST]]]]]] | keyword[def] identifier[serial_udb_extra_f6_send] ( identifier[self] , identifier[sue_PITCHGAIN] , identifier[sue_PITCHKD] , identifier[sue_RUDDER_ELEV_MIX] , identifier[sue_ROLL_ELEV_MIX] , identifier[sue_ELEVATOR_BOOST] , identifier[force_mavlink1] = keyword[False] ):
literal[string]
keyword[return] identifier[self] . identifier[send] ( identifier[self] . identifier[serial_udb_extra_f6_encode] ( identifier[sue_PITCHGAIN] , identifier[sue_PITCHKD] , identifier[sue_RUDDER_ELEV_MIX] , identifier[sue_ROLL_ELEV_MIX] , identifier[sue_ELEVATOR_BOOST] ), identifier[force_mavlink1] = identifier[force_mavlink1] ) | def serial_udb_extra_f6_send(self, sue_PITCHGAIN, sue_PITCHKD, sue_RUDDER_ELEV_MIX, sue_ROLL_ELEV_MIX, sue_ELEVATOR_BOOST, force_mavlink1=False):
"""
Backwards compatible version of SERIAL_UDB_EXTRA F6: format
sue_PITCHGAIN : Serial UDB Extra PITCHGAIN Proportional Control (float)
sue_PITCHKD : Serial UDB Extra Pitch Rate Control (float)
sue_RUDDER_ELEV_MIX : Serial UDB Extra Rudder to Elevator Mix (float)
sue_ROLL_ELEV_MIX : Serial UDB Extra Roll to Elevator Mix (float)
sue_ELEVATOR_BOOST : Gain For Boosting Manual Elevator control When Plane Stabilized (float)
"""
return self.send(self.serial_udb_extra_f6_encode(sue_PITCHGAIN, sue_PITCHKD, sue_RUDDER_ELEV_MIX, sue_ROLL_ELEV_MIX, sue_ELEVATOR_BOOST), force_mavlink1=force_mavlink1) |
def midi_event(self, event_type, channel, param1, param2=None):
"""Convert and return the paraters as a MIDI event in bytes."""
assert event_type < 0x80 and event_type >= 0
assert channel < 16 and channel >= 0
tc = a2b_hex('%x%x' % (event_type, channel))
if param2 is None:
params = a2b_hex('%02x' % param1)
else:
params = a2b_hex('%02x%02x' % (param1, param2))
return self.delta_time + tc + params | def function[midi_event, parameter[self, event_type, channel, param1, param2]]:
constant[Convert and return the paraters as a MIDI event in bytes.]
assert[<ast.BoolOp object at 0x7da1b2347850>]
assert[<ast.BoolOp object at 0x7da18f00ffa0>]
variable[tc] assign[=] call[name[a2b_hex], parameter[binary_operation[constant[%x%x] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f00da50>, <ast.Name object at 0x7da18f00fe50>]]]]]
if compare[name[param2] is constant[None]] begin[:]
variable[params] assign[=] call[name[a2b_hex], parameter[binary_operation[constant[%02x] <ast.Mod object at 0x7da2590d6920> name[param1]]]]
return[binary_operation[binary_operation[name[self].delta_time + name[tc]] + name[params]]] | keyword[def] identifier[midi_event] ( identifier[self] , identifier[event_type] , identifier[channel] , identifier[param1] , identifier[param2] = keyword[None] ):
literal[string]
keyword[assert] identifier[event_type] < literal[int] keyword[and] identifier[event_type] >= literal[int]
keyword[assert] identifier[channel] < literal[int] keyword[and] identifier[channel] >= literal[int]
identifier[tc] = identifier[a2b_hex] ( literal[string] %( identifier[event_type] , identifier[channel] ))
keyword[if] identifier[param2] keyword[is] keyword[None] :
identifier[params] = identifier[a2b_hex] ( literal[string] % identifier[param1] )
keyword[else] :
identifier[params] = identifier[a2b_hex] ( literal[string] %( identifier[param1] , identifier[param2] ))
keyword[return] identifier[self] . identifier[delta_time] + identifier[tc] + identifier[params] | def midi_event(self, event_type, channel, param1, param2=None):
"""Convert and return the paraters as a MIDI event in bytes."""
assert event_type < 128 and event_type >= 0
assert channel < 16 and channel >= 0
tc = a2b_hex('%x%x' % (event_type, channel))
if param2 is None:
params = a2b_hex('%02x' % param1) # depends on [control=['if'], data=[]]
else:
params = a2b_hex('%02x%02x' % (param1, param2))
return self.delta_time + tc + params |
def bootstrap_isc(iscs, pairwise=False, summary_statistic='median',
n_bootstraps=1000, ci_percentile=95, random_state=None):
"""One-sample group-level bootstrap hypothesis test for ISCs
For ISCs from one more voxels or ROIs, resample subjects with replacement
to construct a bootstrap distribution. Input is a list or ndarray of
ISCs for a single voxel/ROI, or an ISCs-by-voxels ndarray. ISC values
should be either N ISC values for N subjects in the leave-one-out appraoch
(pairwise=False), N(N-1)/2 ISC values for N subjects in the pairwise
approach (pairwise=True). In the pairwise approach, ISC values should
correspond to the vectorized upper triangle of a square corrlation matrix
(see scipy.stats.distance.squareform). Shifts bootstrap distribution by
actual summary statistic (effectively to zero) for two-tailed null
hypothesis test (Hall & Wilson, 1991). Uses subject-wise (not pair-wise)
resampling in the pairwise approach. Returns the observed ISC, the
confidence interval, and a p-value for the bootstrap hypothesis test, as
well as the bootstrap distribution of summary statistics. According to
Chen et al., 2016, this is the preferred nonparametric approach for
controlling false positive rates (FPR) for one-sample tests in the pairwise
approach.
The implementation is based on the work in [Chen2016]_ and
[HallWilson1991]_.
.. [HallWilson1991] "Two guidelines for bootstrap hypothesis testing.",
P. Hall, S. R., Wilson, 1991, Biometrics, 757-762.
https://doi.org/10.2307/2532163
Parameters
----------
iscs : list or ndarray, ISCs by voxels array
ISC values for one or more voxels
pairwise : bool, default: False
Indicator of pairwise or leave-one-out, should match ISCs structure
summary_statistic : str, default: 'median'
Summary statistic, either 'median' (default) or 'mean'
n_bootstraps : int, default: 1000
Number of bootstrap samples (subject-level with replacement)
ci_percentile : int, default: 95
Percentile for computing confidence intervals
random_state = int or None, default: None
Initial random seed
Returns
-------
observed : float, median (or mean) ISC value
Summary statistic for actual ISCs
ci : tuple, bootstrap confidence intervals
Confidence intervals generated from bootstrap distribution
p : float, p-value
p-value based on bootstrap hypothesis test
distribution : ndarray, bootstraps by voxels (optional)
Bootstrap distribution if return_bootstrap=True
"""
# Standardize structure of input data
iscs, n_subjects, n_voxels = _check_isc_input(iscs, pairwise=pairwise)
# Check for valid summary statistic
if summary_statistic not in ('mean', 'median'):
raise ValueError("Summary statistic must be 'mean' or 'median'")
# Compute summary statistic for observed ISCs
observed = compute_summary_statistic(iscs,
summary_statistic=summary_statistic,
axis=0)
# Set up an empty list to build our bootstrap distribution
distribution = []
# Loop through n bootstrap iterations and populate distribution
for i in np.arange(n_bootstraps):
# Random seed to be deterministically re-randomized at each iteration
if isinstance(random_state, np.random.RandomState):
prng = random_state
else:
prng = np.random.RandomState(random_state)
# Randomly sample subject IDs with replacement
subject_sample = sorted(prng.choice(np.arange(n_subjects),
size=n_subjects))
# Squareform and shuffle rows/columns of pairwise ISC matrix to
# to retain correlation structure among ISCs, then get triangle
if pairwise:
# Loop through voxels
isc_sample = []
for voxel_iscs in iscs.T:
# Square the triangle and fill diagonal
voxel_iscs = squareform(voxel_iscs)
np.fill_diagonal(voxel_iscs, 1)
# Check that pairwise ISC matrix is square and symmetric
assert voxel_iscs.shape[0] == voxel_iscs.shape[1]
assert np.allclose(voxel_iscs, voxel_iscs.T)
# Shuffle square correlation matrix and get triangle
voxel_sample = voxel_iscs[subject_sample, :][:, subject_sample]
voxel_sample = squareform(voxel_sample, checks=False)
# Censor off-diagonal 1s for same-subject pairs
voxel_sample[voxel_sample == 1.] = np.NaN
isc_sample.append(voxel_sample)
isc_sample = np.column_stack(isc_sample)
# Get simple bootstrap sample if not pairwise
elif not pairwise:
isc_sample = iscs[subject_sample, :]
# Compute summary statistic for bootstrap ISCs per voxel
# (alternatively could construct distribution for all voxels
# then compute statistics, but larger memory footprint)
distribution.append(compute_summary_statistic(
isc_sample,
summary_statistic=summary_statistic,
axis=0))
# Update random state for next iteration
random_state = np.random.RandomState(prng.randint(0, MAX_RANDOM_SEED))
# Convert distribution to numpy array
distribution = np.array(distribution)
# Compute CIs of median from bootstrap distribution (default: 95%)
ci = (np.percentile(distribution, (100 - ci_percentile)/2, axis=0),
np.percentile(distribution, ci_percentile + (100 - ci_percentile)/2,
axis=0))
# Shift bootstrap distribution to 0 for hypothesis test
shifted = distribution - observed
# Get p-value for actual median from shifted distribution
p = p_from_null(observed, shifted,
side='two-sided', exact=False,
axis=0)
return observed, ci, p, distribution | def function[bootstrap_isc, parameter[iscs, pairwise, summary_statistic, n_bootstraps, ci_percentile, random_state]]:
constant[One-sample group-level bootstrap hypothesis test for ISCs
For ISCs from one more voxels or ROIs, resample subjects with replacement
to construct a bootstrap distribution. Input is a list or ndarray of
ISCs for a single voxel/ROI, or an ISCs-by-voxels ndarray. ISC values
should be either N ISC values for N subjects in the leave-one-out appraoch
(pairwise=False), N(N-1)/2 ISC values for N subjects in the pairwise
approach (pairwise=True). In the pairwise approach, ISC values should
correspond to the vectorized upper triangle of a square corrlation matrix
(see scipy.stats.distance.squareform). Shifts bootstrap distribution by
actual summary statistic (effectively to zero) for two-tailed null
hypothesis test (Hall & Wilson, 1991). Uses subject-wise (not pair-wise)
resampling in the pairwise approach. Returns the observed ISC, the
confidence interval, and a p-value for the bootstrap hypothesis test, as
well as the bootstrap distribution of summary statistics. According to
Chen et al., 2016, this is the preferred nonparametric approach for
controlling false positive rates (FPR) for one-sample tests in the pairwise
approach.
The implementation is based on the work in [Chen2016]_ and
[HallWilson1991]_.
.. [HallWilson1991] "Two guidelines for bootstrap hypothesis testing.",
P. Hall, S. R., Wilson, 1991, Biometrics, 757-762.
https://doi.org/10.2307/2532163
Parameters
----------
iscs : list or ndarray, ISCs by voxels array
ISC values for one or more voxels
pairwise : bool, default: False
Indicator of pairwise or leave-one-out, should match ISCs structure
summary_statistic : str, default: 'median'
Summary statistic, either 'median' (default) or 'mean'
n_bootstraps : int, default: 1000
Number of bootstrap samples (subject-level with replacement)
ci_percentile : int, default: 95
Percentile for computing confidence intervals
random_state = int or None, default: None
Initial random seed
Returns
-------
observed : float, median (or mean) ISC value
Summary statistic for actual ISCs
ci : tuple, bootstrap confidence intervals
Confidence intervals generated from bootstrap distribution
p : float, p-value
p-value based on bootstrap hypothesis test
distribution : ndarray, bootstraps by voxels (optional)
Bootstrap distribution if return_bootstrap=True
]
<ast.Tuple object at 0x7da1b07793f0> assign[=] call[name[_check_isc_input], parameter[name[iscs]]]
if compare[name[summary_statistic] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b077b3d0>, <ast.Constant object at 0x7da1b077acb0>]]] begin[:]
<ast.Raise object at 0x7da1b07787c0>
variable[observed] assign[=] call[name[compute_summary_statistic], parameter[name[iscs]]]
variable[distribution] assign[=] list[[]]
for taget[name[i]] in starred[call[name[np].arange, parameter[name[n_bootstraps]]]] begin[:]
if call[name[isinstance], parameter[name[random_state], name[np].random.RandomState]] begin[:]
variable[prng] assign[=] name[random_state]
variable[subject_sample] assign[=] call[name[sorted], parameter[call[name[prng].choice, parameter[call[name[np].arange, parameter[name[n_subjects]]]]]]]
if name[pairwise] begin[:]
variable[isc_sample] assign[=] list[[]]
for taget[name[voxel_iscs]] in starred[name[iscs].T] begin[:]
variable[voxel_iscs] assign[=] call[name[squareform], parameter[name[voxel_iscs]]]
call[name[np].fill_diagonal, parameter[name[voxel_iscs], constant[1]]]
assert[compare[call[name[voxel_iscs].shape][constant[0]] equal[==] call[name[voxel_iscs].shape][constant[1]]]]
assert[call[name[np].allclose, parameter[name[voxel_iscs], name[voxel_iscs].T]]]
variable[voxel_sample] assign[=] call[call[name[voxel_iscs]][tuple[[<ast.Name object at 0x7da1b0793cd0>, <ast.Slice object at 0x7da1b0791120>]]]][tuple[[<ast.Slice object at 0x7da1b0791600>, <ast.Name object at 0x7da1b0791780>]]]
variable[voxel_sample] assign[=] call[name[squareform], parameter[name[voxel_sample]]]
call[name[voxel_sample]][compare[name[voxel_sample] equal[==] constant[1.0]]] assign[=] name[np].NaN
call[name[isc_sample].append, parameter[name[voxel_sample]]]
variable[isc_sample] assign[=] call[name[np].column_stack, parameter[name[isc_sample]]]
call[name[distribution].append, parameter[call[name[compute_summary_statistic], parameter[name[isc_sample]]]]]
variable[random_state] assign[=] call[name[np].random.RandomState, parameter[call[name[prng].randint, parameter[constant[0], name[MAX_RANDOM_SEED]]]]]
variable[distribution] assign[=] call[name[np].array, parameter[name[distribution]]]
variable[ci] assign[=] tuple[[<ast.Call object at 0x7da1b07319f0>, <ast.Call object at 0x7da1b0731270>]]
variable[shifted] assign[=] binary_operation[name[distribution] - name[observed]]
variable[p] assign[=] call[name[p_from_null], parameter[name[observed], name[shifted]]]
return[tuple[[<ast.Name object at 0x7da1b07321d0>, <ast.Name object at 0x7da1b0732bf0>, <ast.Name object at 0x7da1b0732080>, <ast.Name object at 0x7da1b0732230>]]] | keyword[def] identifier[bootstrap_isc] ( identifier[iscs] , identifier[pairwise] = keyword[False] , identifier[summary_statistic] = literal[string] ,
identifier[n_bootstraps] = literal[int] , identifier[ci_percentile] = literal[int] , identifier[random_state] = keyword[None] ):
literal[string]
identifier[iscs] , identifier[n_subjects] , identifier[n_voxels] = identifier[_check_isc_input] ( identifier[iscs] , identifier[pairwise] = identifier[pairwise] )
keyword[if] identifier[summary_statistic] keyword[not] keyword[in] ( literal[string] , literal[string] ):
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[observed] = identifier[compute_summary_statistic] ( identifier[iscs] ,
identifier[summary_statistic] = identifier[summary_statistic] ,
identifier[axis] = literal[int] )
identifier[distribution] =[]
keyword[for] identifier[i] keyword[in] identifier[np] . identifier[arange] ( identifier[n_bootstraps] ):
keyword[if] identifier[isinstance] ( identifier[random_state] , identifier[np] . identifier[random] . identifier[RandomState] ):
identifier[prng] = identifier[random_state]
keyword[else] :
identifier[prng] = identifier[np] . identifier[random] . identifier[RandomState] ( identifier[random_state] )
identifier[subject_sample] = identifier[sorted] ( identifier[prng] . identifier[choice] ( identifier[np] . identifier[arange] ( identifier[n_subjects] ),
identifier[size] = identifier[n_subjects] ))
keyword[if] identifier[pairwise] :
identifier[isc_sample] =[]
keyword[for] identifier[voxel_iscs] keyword[in] identifier[iscs] . identifier[T] :
identifier[voxel_iscs] = identifier[squareform] ( identifier[voxel_iscs] )
identifier[np] . identifier[fill_diagonal] ( identifier[voxel_iscs] , literal[int] )
keyword[assert] identifier[voxel_iscs] . identifier[shape] [ literal[int] ]== identifier[voxel_iscs] . identifier[shape] [ literal[int] ]
keyword[assert] identifier[np] . identifier[allclose] ( identifier[voxel_iscs] , identifier[voxel_iscs] . identifier[T] )
identifier[voxel_sample] = identifier[voxel_iscs] [ identifier[subject_sample] ,:][:, identifier[subject_sample] ]
identifier[voxel_sample] = identifier[squareform] ( identifier[voxel_sample] , identifier[checks] = keyword[False] )
identifier[voxel_sample] [ identifier[voxel_sample] == literal[int] ]= identifier[np] . identifier[NaN]
identifier[isc_sample] . identifier[append] ( identifier[voxel_sample] )
identifier[isc_sample] = identifier[np] . identifier[column_stack] ( identifier[isc_sample] )
keyword[elif] keyword[not] identifier[pairwise] :
identifier[isc_sample] = identifier[iscs] [ identifier[subject_sample] ,:]
identifier[distribution] . identifier[append] ( identifier[compute_summary_statistic] (
identifier[isc_sample] ,
identifier[summary_statistic] = identifier[summary_statistic] ,
identifier[axis] = literal[int] ))
identifier[random_state] = identifier[np] . identifier[random] . identifier[RandomState] ( identifier[prng] . identifier[randint] ( literal[int] , identifier[MAX_RANDOM_SEED] ))
identifier[distribution] = identifier[np] . identifier[array] ( identifier[distribution] )
identifier[ci] =( identifier[np] . identifier[percentile] ( identifier[distribution] ,( literal[int] - identifier[ci_percentile] )/ literal[int] , identifier[axis] = literal[int] ),
identifier[np] . identifier[percentile] ( identifier[distribution] , identifier[ci_percentile] +( literal[int] - identifier[ci_percentile] )/ literal[int] ,
identifier[axis] = literal[int] ))
identifier[shifted] = identifier[distribution] - identifier[observed]
identifier[p] = identifier[p_from_null] ( identifier[observed] , identifier[shifted] ,
identifier[side] = literal[string] , identifier[exact] = keyword[False] ,
identifier[axis] = literal[int] )
keyword[return] identifier[observed] , identifier[ci] , identifier[p] , identifier[distribution] | def bootstrap_isc(iscs, pairwise=False, summary_statistic='median', n_bootstraps=1000, ci_percentile=95, random_state=None):
"""One-sample group-level bootstrap hypothesis test for ISCs
For ISCs from one more voxels or ROIs, resample subjects with replacement
to construct a bootstrap distribution. Input is a list or ndarray of
ISCs for a single voxel/ROI, or an ISCs-by-voxels ndarray. ISC values
should be either N ISC values for N subjects in the leave-one-out appraoch
(pairwise=False), N(N-1)/2 ISC values for N subjects in the pairwise
approach (pairwise=True). In the pairwise approach, ISC values should
correspond to the vectorized upper triangle of a square corrlation matrix
(see scipy.stats.distance.squareform). Shifts bootstrap distribution by
actual summary statistic (effectively to zero) for two-tailed null
hypothesis test (Hall & Wilson, 1991). Uses subject-wise (not pair-wise)
resampling in the pairwise approach. Returns the observed ISC, the
confidence interval, and a p-value for the bootstrap hypothesis test, as
well as the bootstrap distribution of summary statistics. According to
Chen et al., 2016, this is the preferred nonparametric approach for
controlling false positive rates (FPR) for one-sample tests in the pairwise
approach.
The implementation is based on the work in [Chen2016]_ and
[HallWilson1991]_.
.. [HallWilson1991] "Two guidelines for bootstrap hypothesis testing.",
P. Hall, S. R., Wilson, 1991, Biometrics, 757-762.
https://doi.org/10.2307/2532163
Parameters
----------
iscs : list or ndarray, ISCs by voxels array
ISC values for one or more voxels
pairwise : bool, default: False
Indicator of pairwise or leave-one-out, should match ISCs structure
summary_statistic : str, default: 'median'
Summary statistic, either 'median' (default) or 'mean'
n_bootstraps : int, default: 1000
Number of bootstrap samples (subject-level with replacement)
ci_percentile : int, default: 95
Percentile for computing confidence intervals
random_state = int or None, default: None
Initial random seed
Returns
-------
observed : float, median (or mean) ISC value
Summary statistic for actual ISCs
ci : tuple, bootstrap confidence intervals
Confidence intervals generated from bootstrap distribution
p : float, p-value
p-value based on bootstrap hypothesis test
distribution : ndarray, bootstraps by voxels (optional)
Bootstrap distribution if return_bootstrap=True
"""
# Standardize structure of input data
(iscs, n_subjects, n_voxels) = _check_isc_input(iscs, pairwise=pairwise)
# Check for valid summary statistic
if summary_statistic not in ('mean', 'median'):
raise ValueError("Summary statistic must be 'mean' or 'median'") # depends on [control=['if'], data=[]]
# Compute summary statistic for observed ISCs
observed = compute_summary_statistic(iscs, summary_statistic=summary_statistic, axis=0)
# Set up an empty list to build our bootstrap distribution
distribution = []
# Loop through n bootstrap iterations and populate distribution
for i in np.arange(n_bootstraps):
# Random seed to be deterministically re-randomized at each iteration
if isinstance(random_state, np.random.RandomState):
prng = random_state # depends on [control=['if'], data=[]]
else:
prng = np.random.RandomState(random_state)
# Randomly sample subject IDs with replacement
subject_sample = sorted(prng.choice(np.arange(n_subjects), size=n_subjects))
# Squareform and shuffle rows/columns of pairwise ISC matrix to
# to retain correlation structure among ISCs, then get triangle
if pairwise:
# Loop through voxels
isc_sample = []
for voxel_iscs in iscs.T:
# Square the triangle and fill diagonal
voxel_iscs = squareform(voxel_iscs)
np.fill_diagonal(voxel_iscs, 1)
# Check that pairwise ISC matrix is square and symmetric
assert voxel_iscs.shape[0] == voxel_iscs.shape[1]
assert np.allclose(voxel_iscs, voxel_iscs.T)
# Shuffle square correlation matrix and get triangle
voxel_sample = voxel_iscs[subject_sample, :][:, subject_sample]
voxel_sample = squareform(voxel_sample, checks=False)
# Censor off-diagonal 1s for same-subject pairs
voxel_sample[voxel_sample == 1.0] = np.NaN
isc_sample.append(voxel_sample) # depends on [control=['for'], data=['voxel_iscs']]
isc_sample = np.column_stack(isc_sample) # depends on [control=['if'], data=[]]
# Get simple bootstrap sample if not pairwise
elif not pairwise:
isc_sample = iscs[subject_sample, :] # depends on [control=['if'], data=[]]
# Compute summary statistic for bootstrap ISCs per voxel
# (alternatively could construct distribution for all voxels
# then compute statistics, but larger memory footprint)
distribution.append(compute_summary_statistic(isc_sample, summary_statistic=summary_statistic, axis=0))
# Update random state for next iteration
random_state = np.random.RandomState(prng.randint(0, MAX_RANDOM_SEED)) # depends on [control=['for'], data=[]]
# Convert distribution to numpy array
distribution = np.array(distribution)
# Compute CIs of median from bootstrap distribution (default: 95%)
ci = (np.percentile(distribution, (100 - ci_percentile) / 2, axis=0), np.percentile(distribution, ci_percentile + (100 - ci_percentile) / 2, axis=0))
# Shift bootstrap distribution to 0 for hypothesis test
shifted = distribution - observed
# Get p-value for actual median from shifted distribution
p = p_from_null(observed, shifted, side='two-sided', exact=False, axis=0)
return (observed, ci, p, distribution) |
def _second(self):
"""Find Smith normal form for Right-low 2x2 matrix"""
self._second_one_loop()
A = self._A
if A[2, 1] == 0:
return True
elif A[2, 1] % A[1, 1] == 0:
self._second_finalize()
self._Ps += self._L
self._L = []
return True
else:
return False | def function[_second, parameter[self]]:
constant[Find Smith normal form for Right-low 2x2 matrix]
call[name[self]._second_one_loop, parameter[]]
variable[A] assign[=] name[self]._A
if compare[call[name[A]][tuple[[<ast.Constant object at 0x7da20cabf460>, <ast.Constant object at 0x7da20cabde70>]]] equal[==] constant[0]] begin[:]
return[constant[True]] | keyword[def] identifier[_second] ( identifier[self] ):
literal[string]
identifier[self] . identifier[_second_one_loop] ()
identifier[A] = identifier[self] . identifier[_A]
keyword[if] identifier[A] [ literal[int] , literal[int] ]== literal[int] :
keyword[return] keyword[True]
keyword[elif] identifier[A] [ literal[int] , literal[int] ]% identifier[A] [ literal[int] , literal[int] ]== literal[int] :
identifier[self] . identifier[_second_finalize] ()
identifier[self] . identifier[_Ps] += identifier[self] . identifier[_L]
identifier[self] . identifier[_L] =[]
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False] | def _second(self):
"""Find Smith normal form for Right-low 2x2 matrix"""
self._second_one_loop()
A = self._A
if A[2, 1] == 0:
return True # depends on [control=['if'], data=[]]
elif A[2, 1] % A[1, 1] == 0:
self._second_finalize()
self._Ps += self._L
self._L = []
return True # depends on [control=['if'], data=[]]
else:
return False |
def add_data(self, new_cols=None):
"""Adds a column with the requested data.
If you want to see for example the mass, the colormap used in
jmol and the block of the element, just use::
['mass', 'jmol_color', 'block']
The underlying ``pd.DataFrame`` can be accessed with
``constants.elements``.
To see all available keys use ``constants.elements.info()``.
The data comes from the module `mendeleev
<http://mendeleev.readthedocs.org/en/latest/>`_ written
by Lukasz Mentel.
Please note that I added three columns to the mendeleev data::
['atomic_radius_cc', 'atomic_radius_gv', 'gv_color',
'valency']
The ``atomic_radius_cc`` is used by default by this module
for determining bond lengths.
The three others are taken from the MOLCAS grid viewer written
by Valera Veryazov.
Args:
new_cols (str): You can pass also just one value.
E.g. ``'mass'`` is equivalent to ``['mass']``. If
``new_cols`` is ``None`` all available data
is returned.
inplace (bool):
Returns:
Cartesian:
"""
atoms = self['atom']
data = constants.elements
if pd.api.types.is_list_like(new_cols):
new_cols = set(new_cols)
elif new_cols is None:
new_cols = set(data.columns)
else:
new_cols = [new_cols]
new_frame = data.loc[atoms, set(new_cols) - set(self.columns)]
new_frame.index = self.index
return self.__class__(pd.concat([self._frame, new_frame], axis=1)) | def function[add_data, parameter[self, new_cols]]:
constant[Adds a column with the requested data.
If you want to see for example the mass, the colormap used in
jmol and the block of the element, just use::
['mass', 'jmol_color', 'block']
The underlying ``pd.DataFrame`` can be accessed with
``constants.elements``.
To see all available keys use ``constants.elements.info()``.
The data comes from the module `mendeleev
<http://mendeleev.readthedocs.org/en/latest/>`_ written
by Lukasz Mentel.
Please note that I added three columns to the mendeleev data::
['atomic_radius_cc', 'atomic_radius_gv', 'gv_color',
'valency']
The ``atomic_radius_cc`` is used by default by this module
for determining bond lengths.
The three others are taken from the MOLCAS grid viewer written
by Valera Veryazov.
Args:
new_cols (str): You can pass also just one value.
E.g. ``'mass'`` is equivalent to ``['mass']``. If
``new_cols`` is ``None`` all available data
is returned.
inplace (bool):
Returns:
Cartesian:
]
variable[atoms] assign[=] call[name[self]][constant[atom]]
variable[data] assign[=] name[constants].elements
if call[name[pd].api.types.is_list_like, parameter[name[new_cols]]] begin[:]
variable[new_cols] assign[=] call[name[set], parameter[name[new_cols]]]
variable[new_frame] assign[=] call[name[data].loc][tuple[[<ast.Name object at 0x7da18dc04460>, <ast.BinOp object at 0x7da18dc07fa0>]]]
name[new_frame].index assign[=] name[self].index
return[call[name[self].__class__, parameter[call[name[pd].concat, parameter[list[[<ast.Attribute object at 0x7da18f58da80>, <ast.Name object at 0x7da18f58f700>]]]]]]] | keyword[def] identifier[add_data] ( identifier[self] , identifier[new_cols] = keyword[None] ):
literal[string]
identifier[atoms] = identifier[self] [ literal[string] ]
identifier[data] = identifier[constants] . identifier[elements]
keyword[if] identifier[pd] . identifier[api] . identifier[types] . identifier[is_list_like] ( identifier[new_cols] ):
identifier[new_cols] = identifier[set] ( identifier[new_cols] )
keyword[elif] identifier[new_cols] keyword[is] keyword[None] :
identifier[new_cols] = identifier[set] ( identifier[data] . identifier[columns] )
keyword[else] :
identifier[new_cols] =[ identifier[new_cols] ]
identifier[new_frame] = identifier[data] . identifier[loc] [ identifier[atoms] , identifier[set] ( identifier[new_cols] )- identifier[set] ( identifier[self] . identifier[columns] )]
identifier[new_frame] . identifier[index] = identifier[self] . identifier[index]
keyword[return] identifier[self] . identifier[__class__] ( identifier[pd] . identifier[concat] ([ identifier[self] . identifier[_frame] , identifier[new_frame] ], identifier[axis] = literal[int] )) | def add_data(self, new_cols=None):
"""Adds a column with the requested data.
If you want to see for example the mass, the colormap used in
jmol and the block of the element, just use::
['mass', 'jmol_color', 'block']
The underlying ``pd.DataFrame`` can be accessed with
``constants.elements``.
To see all available keys use ``constants.elements.info()``.
The data comes from the module `mendeleev
<http://mendeleev.readthedocs.org/en/latest/>`_ written
by Lukasz Mentel.
Please note that I added three columns to the mendeleev data::
['atomic_radius_cc', 'atomic_radius_gv', 'gv_color',
'valency']
The ``atomic_radius_cc`` is used by default by this module
for determining bond lengths.
The three others are taken from the MOLCAS grid viewer written
by Valera Veryazov.
Args:
new_cols (str): You can pass also just one value.
E.g. ``'mass'`` is equivalent to ``['mass']``. If
``new_cols`` is ``None`` all available data
is returned.
inplace (bool):
Returns:
Cartesian:
"""
atoms = self['atom']
data = constants.elements
if pd.api.types.is_list_like(new_cols):
new_cols = set(new_cols) # depends on [control=['if'], data=[]]
elif new_cols is None:
new_cols = set(data.columns) # depends on [control=['if'], data=['new_cols']]
else:
new_cols = [new_cols]
new_frame = data.loc[atoms, set(new_cols) - set(self.columns)]
new_frame.index = self.index
return self.__class__(pd.concat([self._frame, new_frame], axis=1)) |
def set_default_alarm_ranges(self, parameter, watch=None, warning=None,
distress=None, critical=None, severe=None,
min_violations=1):
"""
Generate out-of-limit alarms for a parameter using the specified
alarm ranges.
This replaces any previous default alarms on this parameter.
.. note::
Contextual range sets take precedence over the default alarm
ranges. See :meth:`set_alarm_range_sets` for setting contextual
range sets.
:param str parameter: Either a fully-qualified XTCE name or an alias
in the format ``NAMESPACE/NAME``.
:param (float,float) watch: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param (float,float) warning: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param (float,float) distress: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param (float,float) critical: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param (float,float) severe: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param int min_violations: Minimum violations before an alarm is
generated.
"""
req = mdb_pb2.ChangeParameterRequest()
req.action = mdb_pb2.ChangeParameterRequest.SET_DEFAULT_ALARMS
if(watch or warning or distress or critical or severe):
_add_alarms(req.defaultAlarm, watch, warning, distress, critical, severe, min_violations)
url = '/mdb/{}/{}/parameters/{}'.format(
self._instance, self._processor, parameter)
response = self._client.post_proto(url, data=req.SerializeToString()) | def function[set_default_alarm_ranges, parameter[self, parameter, watch, warning, distress, critical, severe, min_violations]]:
constant[
Generate out-of-limit alarms for a parameter using the specified
alarm ranges.
This replaces any previous default alarms on this parameter.
.. note::
Contextual range sets take precedence over the default alarm
ranges. See :meth:`set_alarm_range_sets` for setting contextual
range sets.
:param str parameter: Either a fully-qualified XTCE name or an alias
in the format ``NAMESPACE/NAME``.
:param (float,float) watch: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param (float,float) warning: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param (float,float) distress: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param (float,float) critical: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param (float,float) severe: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param int min_violations: Minimum violations before an alarm is
generated.
]
variable[req] assign[=] call[name[mdb_pb2].ChangeParameterRequest, parameter[]]
name[req].action assign[=] name[mdb_pb2].ChangeParameterRequest.SET_DEFAULT_ALARMS
if <ast.BoolOp object at 0x7da1b1eec850> begin[:]
call[name[_add_alarms], parameter[name[req].defaultAlarm, name[watch], name[warning], name[distress], name[critical], name[severe], name[min_violations]]]
variable[url] assign[=] call[constant[/mdb/{}/{}/parameters/{}].format, parameter[name[self]._instance, name[self]._processor, name[parameter]]]
variable[response] assign[=] call[name[self]._client.post_proto, parameter[name[url]]] | keyword[def] identifier[set_default_alarm_ranges] ( identifier[self] , identifier[parameter] , identifier[watch] = keyword[None] , identifier[warning] = keyword[None] ,
identifier[distress] = keyword[None] , identifier[critical] = keyword[None] , identifier[severe] = keyword[None] ,
identifier[min_violations] = literal[int] ):
literal[string]
identifier[req] = identifier[mdb_pb2] . identifier[ChangeParameterRequest] ()
identifier[req] . identifier[action] = identifier[mdb_pb2] . identifier[ChangeParameterRequest] . identifier[SET_DEFAULT_ALARMS]
keyword[if] ( identifier[watch] keyword[or] identifier[warning] keyword[or] identifier[distress] keyword[or] identifier[critical] keyword[or] identifier[severe] ):
identifier[_add_alarms] ( identifier[req] . identifier[defaultAlarm] , identifier[watch] , identifier[warning] , identifier[distress] , identifier[critical] , identifier[severe] , identifier[min_violations] )
identifier[url] = literal[string] . identifier[format] (
identifier[self] . identifier[_instance] , identifier[self] . identifier[_processor] , identifier[parameter] )
identifier[response] = identifier[self] . identifier[_client] . identifier[post_proto] ( identifier[url] , identifier[data] = identifier[req] . identifier[SerializeToString] ()) | def set_default_alarm_ranges(self, parameter, watch=None, warning=None, distress=None, critical=None, severe=None, min_violations=1):
"""
Generate out-of-limit alarms for a parameter using the specified
alarm ranges.
This replaces any previous default alarms on this parameter.
.. note::
Contextual range sets take precedence over the default alarm
ranges. See :meth:`set_alarm_range_sets` for setting contextual
range sets.
:param str parameter: Either a fully-qualified XTCE name or an alias
in the format ``NAMESPACE/NAME``.
:param (float,float) watch: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param (float,float) warning: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param (float,float) distress: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param (float,float) critical: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param (float,float) severe: Range expressed as a tuple ``(lo, hi)``
where lo and hi are assumed exclusive.
:param int min_violations: Minimum violations before an alarm is
generated.
"""
req = mdb_pb2.ChangeParameterRequest()
req.action = mdb_pb2.ChangeParameterRequest.SET_DEFAULT_ALARMS
if watch or warning or distress or critical or severe:
_add_alarms(req.defaultAlarm, watch, warning, distress, critical, severe, min_violations) # depends on [control=['if'], data=[]]
url = '/mdb/{}/{}/parameters/{}'.format(self._instance, self._processor, parameter)
response = self._client.post_proto(url, data=req.SerializeToString()) |
def dict_from_JSON(self, JSON_file):
'''
Takes a WDL-mapped json file and creates a dict containing the bindings.
The 'return' value is only used for unittests.
:param JSON_file: A required JSON file containing WDL variable bindings.
:return: Returns the self.json_dict purely for unittests.
'''
# TODO: Add context support for variables within multiple wdl files
with open(JSON_file) as data_file:
data = json.load(data_file)
for d in data:
if isinstance(data[d], basestring):
self.json_dict[d] = '"' + data[d] + '"'
else:
self.json_dict[d] = data[d]
return self.json_dict | def function[dict_from_JSON, parameter[self, JSON_file]]:
constant[
Takes a WDL-mapped json file and creates a dict containing the bindings.
The 'return' value is only used for unittests.
:param JSON_file: A required JSON file containing WDL variable bindings.
:return: Returns the self.json_dict purely for unittests.
]
with call[name[open], parameter[name[JSON_file]]] begin[:]
variable[data] assign[=] call[name[json].load, parameter[name[data_file]]]
for taget[name[d]] in starred[name[data]] begin[:]
if call[name[isinstance], parameter[call[name[data]][name[d]], name[basestring]]] begin[:]
call[name[self].json_dict][name[d]] assign[=] binary_operation[binary_operation[constant["] + call[name[data]][name[d]]] + constant["]]
return[name[self].json_dict] | keyword[def] identifier[dict_from_JSON] ( identifier[self] , identifier[JSON_file] ):
literal[string]
keyword[with] identifier[open] ( identifier[JSON_file] ) keyword[as] identifier[data_file] :
identifier[data] = identifier[json] . identifier[load] ( identifier[data_file] )
keyword[for] identifier[d] keyword[in] identifier[data] :
keyword[if] identifier[isinstance] ( identifier[data] [ identifier[d] ], identifier[basestring] ):
identifier[self] . identifier[json_dict] [ identifier[d] ]= literal[string] + identifier[data] [ identifier[d] ]+ literal[string]
keyword[else] :
identifier[self] . identifier[json_dict] [ identifier[d] ]= identifier[data] [ identifier[d] ]
keyword[return] identifier[self] . identifier[json_dict] | def dict_from_JSON(self, JSON_file):
"""
Takes a WDL-mapped json file and creates a dict containing the bindings.
The 'return' value is only used for unittests.
:param JSON_file: A required JSON file containing WDL variable bindings.
:return: Returns the self.json_dict purely for unittests.
"""
# TODO: Add context support for variables within multiple wdl files
with open(JSON_file) as data_file:
data = json.load(data_file) # depends on [control=['with'], data=['data_file']]
for d in data:
if isinstance(data[d], basestring):
self.json_dict[d] = '"' + data[d] + '"' # depends on [control=['if'], data=[]]
else:
self.json_dict[d] = data[d] # depends on [control=['for'], data=['d']]
return self.json_dict |
def _get_cache(cache_file, source_file=None):
"""Get cached taxonomy using the cPickle module.
No check is done at that stage.
:param cache_file: full path to the file holding pickled data
:param source_file: if we discover the cache is obsolete, we
will build a new cache, therefore we need the source path
of the cache
:return: (single_keywords, composite_keywords).
"""
timer_start = time.clock()
filestream = open(cache_file, "rb")
try:
cached_data = cPickle.load(filestream)
version_info = cached_data['version_info']
if version_info['rdflib'] != rdflib.__version__:
raise KeyError
except (cPickle.UnpicklingError, ImportError,
AttributeError, DeprecationWarning, EOFError):
current_app.logger.warning(
"The existing cache in %s is not readable. "
"Removing and rebuilding it." % cache_file
)
filestream.close()
os.remove(cache_file)
return _build_cache(source_file)
except KeyError:
current_app.logger.warning(
"The existing cache %s is not up-to-date. "
"Removing and rebuilding it." % cache_file
)
filestream.close()
os.remove(cache_file)
if source_file and os.path.exists(source_file):
return _build_cache(source_file)
else:
current_app.logger.error(
"The cache contains obsolete data (and it was deleted), "
"however I can't build a new cache, the source does not "
"exist or is inaccessible! - %s" % source_file
)
filestream.close()
single_keywords = cached_data["single"]
composite_keywords = cached_data["composite"]
# the cache contains only keys of the composite keywords, not the objects
# so now let's resolve them into objects
for kw in composite_keywords.values():
kw.refreshCompositeOf(single_keywords, composite_keywords)
current_app.logger.debug(
"Retrieved taxonomy from cache %s created on %s" %
(cache_file, time.asctime(cached_data["creation_time"]))
)
current_app.logger.debug(
"%d terms read in %.1f sec." %
(len(single_keywords) + len(composite_keywords),
time.clock() - timer_start)
)
return (single_keywords, composite_keywords) | def function[_get_cache, parameter[cache_file, source_file]]:
constant[Get cached taxonomy using the cPickle module.
No check is done at that stage.
:param cache_file: full path to the file holding pickled data
:param source_file: if we discover the cache is obsolete, we
will build a new cache, therefore we need the source path
of the cache
:return: (single_keywords, composite_keywords).
]
variable[timer_start] assign[=] call[name[time].clock, parameter[]]
variable[filestream] assign[=] call[name[open], parameter[name[cache_file], constant[rb]]]
<ast.Try object at 0x7da18fe93280>
call[name[filestream].close, parameter[]]
variable[single_keywords] assign[=] call[name[cached_data]][constant[single]]
variable[composite_keywords] assign[=] call[name[cached_data]][constant[composite]]
for taget[name[kw]] in starred[call[name[composite_keywords].values, parameter[]]] begin[:]
call[name[kw].refreshCompositeOf, parameter[name[single_keywords], name[composite_keywords]]]
call[name[current_app].logger.debug, parameter[binary_operation[constant[Retrieved taxonomy from cache %s created on %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f9b0d0>, <ast.Call object at 0x7da207f99cf0>]]]]]
call[name[current_app].logger.debug, parameter[binary_operation[constant[%d terms read in %.1f sec.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.BinOp object at 0x7da207f9a530>, <ast.BinOp object at 0x7da207f9b190>]]]]]
return[tuple[[<ast.Name object at 0x7da207f9a0e0>, <ast.Name object at 0x7da207f9b700>]]] | keyword[def] identifier[_get_cache] ( identifier[cache_file] , identifier[source_file] = keyword[None] ):
literal[string]
identifier[timer_start] = identifier[time] . identifier[clock] ()
identifier[filestream] = identifier[open] ( identifier[cache_file] , literal[string] )
keyword[try] :
identifier[cached_data] = identifier[cPickle] . identifier[load] ( identifier[filestream] )
identifier[version_info] = identifier[cached_data] [ literal[string] ]
keyword[if] identifier[version_info] [ literal[string] ]!= identifier[rdflib] . identifier[__version__] :
keyword[raise] identifier[KeyError]
keyword[except] ( identifier[cPickle] . identifier[UnpicklingError] , identifier[ImportError] ,
identifier[AttributeError] , identifier[DeprecationWarning] , identifier[EOFError] ):
identifier[current_app] . identifier[logger] . identifier[warning] (
literal[string]
literal[string] % identifier[cache_file]
)
identifier[filestream] . identifier[close] ()
identifier[os] . identifier[remove] ( identifier[cache_file] )
keyword[return] identifier[_build_cache] ( identifier[source_file] )
keyword[except] identifier[KeyError] :
identifier[current_app] . identifier[logger] . identifier[warning] (
literal[string]
literal[string] % identifier[cache_file]
)
identifier[filestream] . identifier[close] ()
identifier[os] . identifier[remove] ( identifier[cache_file] )
keyword[if] identifier[source_file] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[source_file] ):
keyword[return] identifier[_build_cache] ( identifier[source_file] )
keyword[else] :
identifier[current_app] . identifier[logger] . identifier[error] (
literal[string]
literal[string]
literal[string] % identifier[source_file]
)
identifier[filestream] . identifier[close] ()
identifier[single_keywords] = identifier[cached_data] [ literal[string] ]
identifier[composite_keywords] = identifier[cached_data] [ literal[string] ]
keyword[for] identifier[kw] keyword[in] identifier[composite_keywords] . identifier[values] ():
identifier[kw] . identifier[refreshCompositeOf] ( identifier[single_keywords] , identifier[composite_keywords] )
identifier[current_app] . identifier[logger] . identifier[debug] (
literal[string] %
( identifier[cache_file] , identifier[time] . identifier[asctime] ( identifier[cached_data] [ literal[string] ]))
)
identifier[current_app] . identifier[logger] . identifier[debug] (
literal[string] %
( identifier[len] ( identifier[single_keywords] )+ identifier[len] ( identifier[composite_keywords] ),
identifier[time] . identifier[clock] ()- identifier[timer_start] )
)
keyword[return] ( identifier[single_keywords] , identifier[composite_keywords] ) | def _get_cache(cache_file, source_file=None):
"""Get cached taxonomy using the cPickle module.
No check is done at that stage.
:param cache_file: full path to the file holding pickled data
:param source_file: if we discover the cache is obsolete, we
will build a new cache, therefore we need the source path
of the cache
:return: (single_keywords, composite_keywords).
"""
timer_start = time.clock()
filestream = open(cache_file, 'rb')
try:
cached_data = cPickle.load(filestream)
version_info = cached_data['version_info']
if version_info['rdflib'] != rdflib.__version__:
raise KeyError # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except (cPickle.UnpicklingError, ImportError, AttributeError, DeprecationWarning, EOFError):
current_app.logger.warning('The existing cache in %s is not readable. Removing and rebuilding it.' % cache_file)
filestream.close()
os.remove(cache_file)
return _build_cache(source_file) # depends on [control=['except'], data=[]]
except KeyError:
current_app.logger.warning('The existing cache %s is not up-to-date. Removing and rebuilding it.' % cache_file)
filestream.close()
os.remove(cache_file)
if source_file and os.path.exists(source_file):
return _build_cache(source_file) # depends on [control=['if'], data=[]]
else:
current_app.logger.error("The cache contains obsolete data (and it was deleted), however I can't build a new cache, the source does not exist or is inaccessible! - %s" % source_file) # depends on [control=['except'], data=[]]
filestream.close()
single_keywords = cached_data['single']
composite_keywords = cached_data['composite']
# the cache contains only keys of the composite keywords, not the objects
# so now let's resolve them into objects
for kw in composite_keywords.values():
kw.refreshCompositeOf(single_keywords, composite_keywords) # depends on [control=['for'], data=['kw']]
current_app.logger.debug('Retrieved taxonomy from cache %s created on %s' % (cache_file, time.asctime(cached_data['creation_time'])))
current_app.logger.debug('%d terms read in %.1f sec.' % (len(single_keywords) + len(composite_keywords), time.clock() - timer_start))
return (single_keywords, composite_keywords) |
def redo(self):
"""Redo the last cluster assignment operation.
Returns
-------
up : UpdateInfo instance of the changes done by this operation.
"""
# Go forward in the stack, and retrieve the new assignment.
item = self._undo_stack.forward()
if item is None:
# No redo has been performed: abort.
return
# NOTE: the undo_state object is only returned when undoing.
# It represents data associated to the state
# *before* the action. What might be more useful would be the
# undo_state object of the next item in the list (if it exists).
spike_ids, cluster_ids, undo_state = item
assert spike_ids is not None
# We apply the new assignment.
up = self._do_assign(spike_ids, cluster_ids)
up.history = 'redo'
self.emit('cluster', up)
return up | def function[redo, parameter[self]]:
constant[Redo the last cluster assignment operation.
Returns
-------
up : UpdateInfo instance of the changes done by this operation.
]
variable[item] assign[=] call[name[self]._undo_stack.forward, parameter[]]
if compare[name[item] is constant[None]] begin[:]
return[None]
<ast.Tuple object at 0x7da18bc730a0> assign[=] name[item]
assert[compare[name[spike_ids] is_not constant[None]]]
variable[up] assign[=] call[name[self]._do_assign, parameter[name[spike_ids], name[cluster_ids]]]
name[up].history assign[=] constant[redo]
call[name[self].emit, parameter[constant[cluster], name[up]]]
return[name[up]] | keyword[def] identifier[redo] ( identifier[self] ):
literal[string]
identifier[item] = identifier[self] . identifier[_undo_stack] . identifier[forward] ()
keyword[if] identifier[item] keyword[is] keyword[None] :
keyword[return]
identifier[spike_ids] , identifier[cluster_ids] , identifier[undo_state] = identifier[item]
keyword[assert] identifier[spike_ids] keyword[is] keyword[not] keyword[None]
identifier[up] = identifier[self] . identifier[_do_assign] ( identifier[spike_ids] , identifier[cluster_ids] )
identifier[up] . identifier[history] = literal[string]
identifier[self] . identifier[emit] ( literal[string] , identifier[up] )
keyword[return] identifier[up] | def redo(self):
"""Redo the last cluster assignment operation.
Returns
-------
up : UpdateInfo instance of the changes done by this operation.
"""
# Go forward in the stack, and retrieve the new assignment.
item = self._undo_stack.forward()
if item is None:
# No redo has been performed: abort.
return # depends on [control=['if'], data=[]]
# NOTE: the undo_state object is only returned when undoing.
# It represents data associated to the state
# *before* the action. What might be more useful would be the
# undo_state object of the next item in the list (if it exists).
(spike_ids, cluster_ids, undo_state) = item
assert spike_ids is not None
# We apply the new assignment.
up = self._do_assign(spike_ids, cluster_ids)
up.history = 'redo'
self.emit('cluster', up)
return up |
def export(self, storage_client, overwrite=True):
'''
a method to export all the records in collection to another platform
:param storage_client: class object with storage client methods
:return: string with exit message
'''
title = '%s.export' % self.__class__.__name__
# validate storage client
method_list = [ 'save', 'load', 'list', 'export', 'delete', 'remove', '_import', 'collection_name' ]
for method in method_list:
if not getattr(storage_client, method, None):
from labpack.parsing.grammar import join_words
raise ValueError('%s(storage_client=...) must be a client object with %s methods.' % (title, join_words(method_list)))
# walk collection folder to find files
import os
count = 0
skipped = 0
for file_path in self._walk():
path_segments = file_path.split(os.sep)
record_key = os.path.join(*path_segments)
record_key = record_key.replace('\\','/')
file_path = '/%s' % file_path
# retrieve data and metadata
try:
metadata, response = self.dropbox.files_download(file_path)
except:
raise DropboxConnectionError(title)
record_data = response.content
client_modified = metadata.client_modified
# import record into storage client
last_modified = 0.0
if client_modified:
from dateutil.tz import tzutc
from labpack.records.time import labDT
last_modified = labDT.fromPython(client_modified.replace(tzinfo=tzutc())).epoch()
outcome = storage_client._import(record_key, record_data, overwrite=overwrite, last_modified=last_modified)
if outcome:
count += 1
else:
skipped += 1
# report outcome
plural = ''
skip_insert = ''
new_folder = storage_client.collection_name
if count != 1:
plural = 's'
if skipped > 0:
skip_plural = ''
if skipped > 1:
skip_plural = 's'
skip_insert = ' %s record%s skipped to avoid overwrite.' % (str(skipped), skip_plural)
exit_msg = '%s record%s exported to %s.%s' % (str(count), plural, new_folder, skip_insert)
return exit_msg | def function[export, parameter[self, storage_client, overwrite]]:
constant[
a method to export all the records in collection to another platform
:param storage_client: class object with storage client methods
:return: string with exit message
]
variable[title] assign[=] binary_operation[constant[%s.export] <ast.Mod object at 0x7da2590d6920> name[self].__class__.__name__]
variable[method_list] assign[=] list[[<ast.Constant object at 0x7da18dc9a1a0>, <ast.Constant object at 0x7da18dc9b490>, <ast.Constant object at 0x7da18dc9aa40>, <ast.Constant object at 0x7da18dc9a590>, <ast.Constant object at 0x7da18dc9a740>, <ast.Constant object at 0x7da18dc98550>, <ast.Constant object at 0x7da18dc9b8b0>, <ast.Constant object at 0x7da18dc99660>]]
for taget[name[method]] in starred[name[method_list]] begin[:]
if <ast.UnaryOp object at 0x7da18dc9b040> begin[:]
from relative_module[labpack.parsing.grammar] import module[join_words]
<ast.Raise object at 0x7da18dc99c30>
import module[os]
variable[count] assign[=] constant[0]
variable[skipped] assign[=] constant[0]
for taget[name[file_path]] in starred[call[name[self]._walk, parameter[]]] begin[:]
variable[path_segments] assign[=] call[name[file_path].split, parameter[name[os].sep]]
variable[record_key] assign[=] call[name[os].path.join, parameter[<ast.Starred object at 0x7da18dc99ea0>]]
variable[record_key] assign[=] call[name[record_key].replace, parameter[constant[\], constant[/]]]
variable[file_path] assign[=] binary_operation[constant[/%s] <ast.Mod object at 0x7da2590d6920> name[file_path]]
<ast.Try object at 0x7da18dc98580>
variable[record_data] assign[=] name[response].content
variable[client_modified] assign[=] name[metadata].client_modified
variable[last_modified] assign[=] constant[0.0]
if name[client_modified] begin[:]
from relative_module[dateutil.tz] import module[tzutc]
from relative_module[labpack.records.time] import module[labDT]
variable[last_modified] assign[=] call[call[name[labDT].fromPython, parameter[call[name[client_modified].replace, parameter[]]]].epoch, parameter[]]
variable[outcome] assign[=] call[name[storage_client]._import, parameter[name[record_key], name[record_data]]]
if name[outcome] begin[:]
<ast.AugAssign object at 0x7da18dc98850>
variable[plural] assign[=] constant[]
variable[skip_insert] assign[=] constant[]
variable[new_folder] assign[=] name[storage_client].collection_name
if compare[name[count] not_equal[!=] constant[1]] begin[:]
variable[plural] assign[=] constant[s]
if compare[name[skipped] greater[>] constant[0]] begin[:]
variable[skip_plural] assign[=] constant[]
if compare[name[skipped] greater[>] constant[1]] begin[:]
variable[skip_plural] assign[=] constant[s]
variable[skip_insert] assign[=] binary_operation[constant[ %s record%s skipped to avoid overwrite.] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18dc99b10>, <ast.Name object at 0x7da18dc984c0>]]]
variable[exit_msg] assign[=] binary_operation[constant[%s record%s exported to %s.%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18dc9a890>, <ast.Name object at 0x7da18dc99000>, <ast.Name object at 0x7da18dc98e50>, <ast.Name object at 0x7da18dc9bee0>]]]
return[name[exit_msg]] | keyword[def] identifier[export] ( identifier[self] , identifier[storage_client] , identifier[overwrite] = keyword[True] ):
literal[string]
identifier[title] = literal[string] % identifier[self] . identifier[__class__] . identifier[__name__]
identifier[method_list] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[method] keyword[in] identifier[method_list] :
keyword[if] keyword[not] identifier[getattr] ( identifier[storage_client] , identifier[method] , keyword[None] ):
keyword[from] identifier[labpack] . identifier[parsing] . identifier[grammar] keyword[import] identifier[join_words]
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[title] , identifier[join_words] ( identifier[method_list] )))
keyword[import] identifier[os]
identifier[count] = literal[int]
identifier[skipped] = literal[int]
keyword[for] identifier[file_path] keyword[in] identifier[self] . identifier[_walk] ():
identifier[path_segments] = identifier[file_path] . identifier[split] ( identifier[os] . identifier[sep] )
identifier[record_key] = identifier[os] . identifier[path] . identifier[join] (* identifier[path_segments] )
identifier[record_key] = identifier[record_key] . identifier[replace] ( literal[string] , literal[string] )
identifier[file_path] = literal[string] % identifier[file_path]
keyword[try] :
identifier[metadata] , identifier[response] = identifier[self] . identifier[dropbox] . identifier[files_download] ( identifier[file_path] )
keyword[except] :
keyword[raise] identifier[DropboxConnectionError] ( identifier[title] )
identifier[record_data] = identifier[response] . identifier[content]
identifier[client_modified] = identifier[metadata] . identifier[client_modified]
identifier[last_modified] = literal[int]
keyword[if] identifier[client_modified] :
keyword[from] identifier[dateutil] . identifier[tz] keyword[import] identifier[tzutc]
keyword[from] identifier[labpack] . identifier[records] . identifier[time] keyword[import] identifier[labDT]
identifier[last_modified] = identifier[labDT] . identifier[fromPython] ( identifier[client_modified] . identifier[replace] ( identifier[tzinfo] = identifier[tzutc] ())). identifier[epoch] ()
identifier[outcome] = identifier[storage_client] . identifier[_import] ( identifier[record_key] , identifier[record_data] , identifier[overwrite] = identifier[overwrite] , identifier[last_modified] = identifier[last_modified] )
keyword[if] identifier[outcome] :
identifier[count] += literal[int]
keyword[else] :
identifier[skipped] += literal[int]
identifier[plural] = literal[string]
identifier[skip_insert] = literal[string]
identifier[new_folder] = identifier[storage_client] . identifier[collection_name]
keyword[if] identifier[count] != literal[int] :
identifier[plural] = literal[string]
keyword[if] identifier[skipped] > literal[int] :
identifier[skip_plural] = literal[string]
keyword[if] identifier[skipped] > literal[int] :
identifier[skip_plural] = literal[string]
identifier[skip_insert] = literal[string] %( identifier[str] ( identifier[skipped] ), identifier[skip_plural] )
identifier[exit_msg] = literal[string] %( identifier[str] ( identifier[count] ), identifier[plural] , identifier[new_folder] , identifier[skip_insert] )
keyword[return] identifier[exit_msg] | def export(self, storage_client, overwrite=True):
"""
a method to export all the records in collection to another platform
:param storage_client: class object with storage client methods
:return: string with exit message
"""
title = '%s.export' % self.__class__.__name__ # validate storage client
method_list = ['save', 'load', 'list', 'export', 'delete', 'remove', '_import', 'collection_name']
for method in method_list:
if not getattr(storage_client, method, None):
from labpack.parsing.grammar import join_words
raise ValueError('%s(storage_client=...) must be a client object with %s methods.' % (title, join_words(method_list))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['method']] # walk collection folder to find files
import os
count = 0
skipped = 0
for file_path in self._walk():
path_segments = file_path.split(os.sep)
record_key = os.path.join(*path_segments)
record_key = record_key.replace('\\', '/')
file_path = '/%s' % file_path # retrieve data and metadata
try:
(metadata, response) = self.dropbox.files_download(file_path) # depends on [control=['try'], data=[]]
except:
raise DropboxConnectionError(title) # depends on [control=['except'], data=[]]
record_data = response.content
client_modified = metadata.client_modified # import record into storage client
last_modified = 0.0
if client_modified:
from dateutil.tz import tzutc
from labpack.records.time import labDT
last_modified = labDT.fromPython(client_modified.replace(tzinfo=tzutc())).epoch() # depends on [control=['if'], data=[]]
outcome = storage_client._import(record_key, record_data, overwrite=overwrite, last_modified=last_modified)
if outcome:
count += 1 # depends on [control=['if'], data=[]]
else:
skipped += 1 # depends on [control=['for'], data=['file_path']] # report outcome
plural = ''
skip_insert = ''
new_folder = storage_client.collection_name
if count != 1:
plural = 's' # depends on [control=['if'], data=[]]
if skipped > 0:
skip_plural = ''
if skipped > 1:
skip_plural = 's' # depends on [control=['if'], data=[]]
skip_insert = ' %s record%s skipped to avoid overwrite.' % (str(skipped), skip_plural) # depends on [control=['if'], data=['skipped']]
exit_msg = '%s record%s exported to %s.%s' % (str(count), plural, new_folder, skip_insert)
return exit_msg |
def by_key(self, style_key, style_value):
"""Return a processor for a "simple" style value.
Parameters
----------
style_key : str
A style key.
style_value : bool or str
A "simple" style value that is either a style attribute (str) and a
boolean flag indicating to use the style attribute named by
`style_key`.
Returns
-------
A function.
"""
if self.style_types[style_key] is bool:
style_attr = style_key
else:
style_attr = style_value
def proc(_, result):
return self.render(style_attr, result)
return proc | def function[by_key, parameter[self, style_key, style_value]]:
constant[Return a processor for a "simple" style value.
Parameters
----------
style_key : str
A style key.
style_value : bool or str
A "simple" style value that is either a style attribute (str) and a
boolean flag indicating to use the style attribute named by
`style_key`.
Returns
-------
A function.
]
if compare[call[name[self].style_types][name[style_key]] is name[bool]] begin[:]
variable[style_attr] assign[=] name[style_key]
def function[proc, parameter[_, result]]:
return[call[name[self].render, parameter[name[style_attr], name[result]]]]
return[name[proc]] | keyword[def] identifier[by_key] ( identifier[self] , identifier[style_key] , identifier[style_value] ):
literal[string]
keyword[if] identifier[self] . identifier[style_types] [ identifier[style_key] ] keyword[is] identifier[bool] :
identifier[style_attr] = identifier[style_key]
keyword[else] :
identifier[style_attr] = identifier[style_value]
keyword[def] identifier[proc] ( identifier[_] , identifier[result] ):
keyword[return] identifier[self] . identifier[render] ( identifier[style_attr] , identifier[result] )
keyword[return] identifier[proc] | def by_key(self, style_key, style_value):
"""Return a processor for a "simple" style value.
Parameters
----------
style_key : str
A style key.
style_value : bool or str
A "simple" style value that is either a style attribute (str) and a
boolean flag indicating to use the style attribute named by
`style_key`.
Returns
-------
A function.
"""
if self.style_types[style_key] is bool:
style_attr = style_key # depends on [control=['if'], data=[]]
else:
style_attr = style_value
def proc(_, result):
return self.render(style_attr, result)
return proc |
def element_by_href_as_smcresult(href, params=None):
""" Get specified element returned as an SMCResult object
:param href: href direct link to object
:return: :py:class:`smc.api.web.SMCResult` with etag, href and
element field holding json, else None
"""
if href:
element = fetch_json_by_href(href, params=params)
if element:
return element | def function[element_by_href_as_smcresult, parameter[href, params]]:
constant[ Get specified element returned as an SMCResult object
:param href: href direct link to object
:return: :py:class:`smc.api.web.SMCResult` with etag, href and
element field holding json, else None
]
if name[href] begin[:]
variable[element] assign[=] call[name[fetch_json_by_href], parameter[name[href]]]
if name[element] begin[:]
return[name[element]] | keyword[def] identifier[element_by_href_as_smcresult] ( identifier[href] , identifier[params] = keyword[None] ):
literal[string]
keyword[if] identifier[href] :
identifier[element] = identifier[fetch_json_by_href] ( identifier[href] , identifier[params] = identifier[params] )
keyword[if] identifier[element] :
keyword[return] identifier[element] | def element_by_href_as_smcresult(href, params=None):
""" Get specified element returned as an SMCResult object
:param href: href direct link to object
:return: :py:class:`smc.api.web.SMCResult` with etag, href and
element field holding json, else None
"""
if href:
element = fetch_json_by_href(href, params=params)
if element:
return element # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] |
def set_cookie(cookies, key, value='', max_age=None, expires=None, path='/',
domain=None, secure=False, httponly=False):
'''Set a cookie key into the cookies dictionary *cookies*.'''
cookies[key] = value
if expires is not None:
if isinstance(expires, datetime):
now = (expires.now(expires.tzinfo) if expires.tzinfo else
expires.utcnow())
delta = expires - now
# Add one second so the date matches exactly (a fraction of
# time gets lost between converting to a timedelta and
# then the date string).
delta = delta + timedelta(seconds=1)
# Just set max_age - the max_age logic will set expires.
expires = None
max_age = max(0, delta.days * 86400 + delta.seconds)
else:
cookies[key]['expires'] = expires
if max_age is not None:
cookies[key]['max-age'] = max_age
# IE requires expires, so set it if hasn't been already.
if not expires:
cookies[key]['expires'] = http_date(time.time() + max_age)
if path is not None:
cookies[key]['path'] = path
if domain is not None:
cookies[key]['domain'] = domain
if secure:
cookies[key]['secure'] = True
if httponly:
cookies[key]['httponly'] = True | def function[set_cookie, parameter[cookies, key, value, max_age, expires, path, domain, secure, httponly]]:
constant[Set a cookie key into the cookies dictionary *cookies*.]
call[name[cookies]][name[key]] assign[=] name[value]
if compare[name[expires] is_not constant[None]] begin[:]
if call[name[isinstance], parameter[name[expires], name[datetime]]] begin[:]
variable[now] assign[=] <ast.IfExp object at 0x7da204564eb0>
variable[delta] assign[=] binary_operation[name[expires] - name[now]]
variable[delta] assign[=] binary_operation[name[delta] + call[name[timedelta], parameter[]]]
variable[expires] assign[=] constant[None]
variable[max_age] assign[=] call[name[max], parameter[constant[0], binary_operation[binary_operation[name[delta].days * constant[86400]] + name[delta].seconds]]]
if compare[name[max_age] is_not constant[None]] begin[:]
call[call[name[cookies]][name[key]]][constant[max-age]] assign[=] name[max_age]
if <ast.UnaryOp object at 0x7da204566980> begin[:]
call[call[name[cookies]][name[key]]][constant[expires]] assign[=] call[name[http_date], parameter[binary_operation[call[name[time].time, parameter[]] + name[max_age]]]]
if compare[name[path] is_not constant[None]] begin[:]
call[call[name[cookies]][name[key]]][constant[path]] assign[=] name[path]
if compare[name[domain] is_not constant[None]] begin[:]
call[call[name[cookies]][name[key]]][constant[domain]] assign[=] name[domain]
if name[secure] begin[:]
call[call[name[cookies]][name[key]]][constant[secure]] assign[=] constant[True]
if name[httponly] begin[:]
call[call[name[cookies]][name[key]]][constant[httponly]] assign[=] constant[True] | keyword[def] identifier[set_cookie] ( identifier[cookies] , identifier[key] , identifier[value] = literal[string] , identifier[max_age] = keyword[None] , identifier[expires] = keyword[None] , identifier[path] = literal[string] ,
identifier[domain] = keyword[None] , identifier[secure] = keyword[False] , identifier[httponly] = keyword[False] ):
literal[string]
identifier[cookies] [ identifier[key] ]= identifier[value]
keyword[if] identifier[expires] keyword[is] keyword[not] keyword[None] :
keyword[if] identifier[isinstance] ( identifier[expires] , identifier[datetime] ):
identifier[now] =( identifier[expires] . identifier[now] ( identifier[expires] . identifier[tzinfo] ) keyword[if] identifier[expires] . identifier[tzinfo] keyword[else]
identifier[expires] . identifier[utcnow] ())
identifier[delta] = identifier[expires] - identifier[now]
identifier[delta] = identifier[delta] + identifier[timedelta] ( identifier[seconds] = literal[int] )
identifier[expires] = keyword[None]
identifier[max_age] = identifier[max] ( literal[int] , identifier[delta] . identifier[days] * literal[int] + identifier[delta] . identifier[seconds] )
keyword[else] :
identifier[cookies] [ identifier[key] ][ literal[string] ]= identifier[expires]
keyword[if] identifier[max_age] keyword[is] keyword[not] keyword[None] :
identifier[cookies] [ identifier[key] ][ literal[string] ]= identifier[max_age]
keyword[if] keyword[not] identifier[expires] :
identifier[cookies] [ identifier[key] ][ literal[string] ]= identifier[http_date] ( identifier[time] . identifier[time] ()+ identifier[max_age] )
keyword[if] identifier[path] keyword[is] keyword[not] keyword[None] :
identifier[cookies] [ identifier[key] ][ literal[string] ]= identifier[path]
keyword[if] identifier[domain] keyword[is] keyword[not] keyword[None] :
identifier[cookies] [ identifier[key] ][ literal[string] ]= identifier[domain]
keyword[if] identifier[secure] :
identifier[cookies] [ identifier[key] ][ literal[string] ]= keyword[True]
keyword[if] identifier[httponly] :
identifier[cookies] [ identifier[key] ][ literal[string] ]= keyword[True] | def set_cookie(cookies, key, value='', max_age=None, expires=None, path='/', domain=None, secure=False, httponly=False):
"""Set a cookie key into the cookies dictionary *cookies*."""
cookies[key] = value
if expires is not None:
if isinstance(expires, datetime):
now = expires.now(expires.tzinfo) if expires.tzinfo else expires.utcnow()
delta = expires - now
# Add one second so the date matches exactly (a fraction of
# time gets lost between converting to a timedelta and
# then the date string).
delta = delta + timedelta(seconds=1)
# Just set max_age - the max_age logic will set expires.
expires = None
max_age = max(0, delta.days * 86400 + delta.seconds) # depends on [control=['if'], data=[]]
else:
cookies[key]['expires'] = expires # depends on [control=['if'], data=['expires']]
if max_age is not None:
cookies[key]['max-age'] = max_age
# IE requires expires, so set it if hasn't been already.
if not expires:
cookies[key]['expires'] = http_date(time.time() + max_age) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['max_age']]
if path is not None:
cookies[key]['path'] = path # depends on [control=['if'], data=['path']]
if domain is not None:
cookies[key]['domain'] = domain # depends on [control=['if'], data=['domain']]
if secure:
cookies[key]['secure'] = True # depends on [control=['if'], data=[]]
if httponly:
cookies[key]['httponly'] = True # depends on [control=['if'], data=[]] |
def Open(self):
"""Connects to the database and creates the required tables.
Raises:
IOError: if the specified output file already exists.
OSError: if the specified output file already exists.
ValueError: if the filename is not set.
"""
if not self._filename:
raise ValueError('Missing filename.')
if not self._append and os.path.isfile(self._filename):
raise IOError((
'Unable to use an already existing file for output '
'[{0:s}]').format(self._filename))
self._connection = sqlite3.connect(self._filename)
self._cursor = self._connection.cursor()
# Create table in database.
if not self._append:
self._cursor.execute(self._CREATE_TABLE_QUERY)
for field in self._META_FIELDS:
query = 'CREATE TABLE l2t_{0:s}s ({0:s}s TEXT, frequency INT)'.format(
field)
self._cursor.execute(query)
if self._set_status:
self._set_status('Created table: l2t_{0:s}'.format(field))
self._cursor.execute('CREATE TABLE l2t_tags (tag TEXT)')
if self._set_status:
self._set_status('Created table: l2t_tags')
query = 'CREATE TABLE l2t_saved_query (name TEXT, query TEXT)'
self._cursor.execute(query)
if self._set_status:
self._set_status('Created table: l2t_saved_query')
query = (
'CREATE TABLE l2t_disk (disk_type INT, mount_path TEXT, '
'dd_path TEXT, dd_offset TEXT, storage_file TEXT, export_path TEXT)')
self._cursor.execute(query)
query = (
'INSERT INTO l2t_disk (disk_type, mount_path, dd_path, dd_offset, '
'storage_file, export_path) VALUES (0, "", "", "", "", "")')
self._cursor.execute(query)
if self._set_status:
self._set_status('Created table: l2t_disk')
self._count = 0 | def function[Open, parameter[self]]:
constant[Connects to the database and creates the required tables.
Raises:
IOError: if the specified output file already exists.
OSError: if the specified output file already exists.
ValueError: if the filename is not set.
]
if <ast.UnaryOp object at 0x7da18dc98a60> begin[:]
<ast.Raise object at 0x7da18dc98130>
if <ast.BoolOp object at 0x7da18dc9a3e0> begin[:]
<ast.Raise object at 0x7da18dc9be20>
name[self]._connection assign[=] call[name[sqlite3].connect, parameter[name[self]._filename]]
name[self]._cursor assign[=] call[name[self]._connection.cursor, parameter[]]
if <ast.UnaryOp object at 0x7da18dc98310> begin[:]
call[name[self]._cursor.execute, parameter[name[self]._CREATE_TABLE_QUERY]]
for taget[name[field]] in starred[name[self]._META_FIELDS] begin[:]
variable[query] assign[=] call[constant[CREATE TABLE l2t_{0:s}s ({0:s}s TEXT, frequency INT)].format, parameter[name[field]]]
call[name[self]._cursor.execute, parameter[name[query]]]
if name[self]._set_status begin[:]
call[name[self]._set_status, parameter[call[constant[Created table: l2t_{0:s}].format, parameter[name[field]]]]]
call[name[self]._cursor.execute, parameter[constant[CREATE TABLE l2t_tags (tag TEXT)]]]
if name[self]._set_status begin[:]
call[name[self]._set_status, parameter[constant[Created table: l2t_tags]]]
variable[query] assign[=] constant[CREATE TABLE l2t_saved_query (name TEXT, query TEXT)]
call[name[self]._cursor.execute, parameter[name[query]]]
if name[self]._set_status begin[:]
call[name[self]._set_status, parameter[constant[Created table: l2t_saved_query]]]
variable[query] assign[=] constant[CREATE TABLE l2t_disk (disk_type INT, mount_path TEXT, dd_path TEXT, dd_offset TEXT, storage_file TEXT, export_path TEXT)]
call[name[self]._cursor.execute, parameter[name[query]]]
variable[query] assign[=] constant[INSERT INTO l2t_disk (disk_type, mount_path, dd_path, dd_offset, storage_file, export_path) VALUES (0, "", "", "", "", "")]
call[name[self]._cursor.execute, parameter[name[query]]]
if name[self]._set_status begin[:]
call[name[self]._set_status, parameter[constant[Created table: l2t_disk]]]
name[self]._count assign[=] constant[0] | keyword[def] identifier[Open] ( identifier[self] ):
literal[string]
keyword[if] keyword[not] identifier[self] . identifier[_filename] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] keyword[not] identifier[self] . identifier[_append] keyword[and] identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[_filename] ):
keyword[raise] identifier[IOError] ((
literal[string]
literal[string] ). identifier[format] ( identifier[self] . identifier[_filename] ))
identifier[self] . identifier[_connection] = identifier[sqlite3] . identifier[connect] ( identifier[self] . identifier[_filename] )
identifier[self] . identifier[_cursor] = identifier[self] . identifier[_connection] . identifier[cursor] ()
keyword[if] keyword[not] identifier[self] . identifier[_append] :
identifier[self] . identifier[_cursor] . identifier[execute] ( identifier[self] . identifier[_CREATE_TABLE_QUERY] )
keyword[for] identifier[field] keyword[in] identifier[self] . identifier[_META_FIELDS] :
identifier[query] = literal[string] . identifier[format] (
identifier[field] )
identifier[self] . identifier[_cursor] . identifier[execute] ( identifier[query] )
keyword[if] identifier[self] . identifier[_set_status] :
identifier[self] . identifier[_set_status] ( literal[string] . identifier[format] ( identifier[field] ))
identifier[self] . identifier[_cursor] . identifier[execute] ( literal[string] )
keyword[if] identifier[self] . identifier[_set_status] :
identifier[self] . identifier[_set_status] ( literal[string] )
identifier[query] = literal[string]
identifier[self] . identifier[_cursor] . identifier[execute] ( identifier[query] )
keyword[if] identifier[self] . identifier[_set_status] :
identifier[self] . identifier[_set_status] ( literal[string] )
identifier[query] =(
literal[string]
literal[string] )
identifier[self] . identifier[_cursor] . identifier[execute] ( identifier[query] )
identifier[query] =(
literal[string]
literal[string] )
identifier[self] . identifier[_cursor] . identifier[execute] ( identifier[query] )
keyword[if] identifier[self] . identifier[_set_status] :
identifier[self] . identifier[_set_status] ( literal[string] )
identifier[self] . identifier[_count] = literal[int] | def Open(self):
"""Connects to the database and creates the required tables.
Raises:
IOError: if the specified output file already exists.
OSError: if the specified output file already exists.
ValueError: if the filename is not set.
"""
if not self._filename:
raise ValueError('Missing filename.') # depends on [control=['if'], data=[]]
if not self._append and os.path.isfile(self._filename):
raise IOError('Unable to use an already existing file for output [{0:s}]'.format(self._filename)) # depends on [control=['if'], data=[]]
self._connection = sqlite3.connect(self._filename)
self._cursor = self._connection.cursor()
# Create table in database.
if not self._append:
self._cursor.execute(self._CREATE_TABLE_QUERY)
for field in self._META_FIELDS:
query = 'CREATE TABLE l2t_{0:s}s ({0:s}s TEXT, frequency INT)'.format(field)
self._cursor.execute(query)
if self._set_status:
self._set_status('Created table: l2t_{0:s}'.format(field)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']]
self._cursor.execute('CREATE TABLE l2t_tags (tag TEXT)')
if self._set_status:
self._set_status('Created table: l2t_tags') # depends on [control=['if'], data=[]]
query = 'CREATE TABLE l2t_saved_query (name TEXT, query TEXT)'
self._cursor.execute(query)
if self._set_status:
self._set_status('Created table: l2t_saved_query') # depends on [control=['if'], data=[]]
query = 'CREATE TABLE l2t_disk (disk_type INT, mount_path TEXT, dd_path TEXT, dd_offset TEXT, storage_file TEXT, export_path TEXT)'
self._cursor.execute(query)
query = 'INSERT INTO l2t_disk (disk_type, mount_path, dd_path, dd_offset, storage_file, export_path) VALUES (0, "", "", "", "", "")'
self._cursor.execute(query)
if self._set_status:
self._set_status('Created table: l2t_disk') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
self._count = 0 |
def merge_models(self, store_in_memory=False, outdir=None, outname=None, force_rerun=False):
"""Merge all existing models into a Structure's first_model attribute.
This directly modifies the Biopython Structure object. Chains IDs will start from A and increment for each new
chain (which is a Model that is converted).
Args:
store_in_memory (bool): If the modified Biopython Structure object should be stored in the attribute
``structure``
outdir (str): If ``store_in_memory`` is False, the structure file has to be written somewhere so an output
directory must be specified here
outname (str): If ``store_in_memory`` is False, the structure file has to be written somewhere so an output
filename must be specified here (i.e. 4BXI_bio1)
force_rerun (bool): If merged file should be overwritten if it already exists
"""
if store_in_memory:
if self.structure:
parsed = copy(self.structure)
else:
parsed = self.parse_structure()
self.structure = merge_all_models_into_first_model(parsed)
else:
new_structure_path = write_merged_bioassembly(inpath=self.structure_path,
outdir=outdir, outname=outname,
force_rerun=force_rerun)
self.load_structure_path(new_structure_path, file_type='pdb') | def function[merge_models, parameter[self, store_in_memory, outdir, outname, force_rerun]]:
constant[Merge all existing models into a Structure's first_model attribute.
This directly modifies the Biopython Structure object. Chains IDs will start from A and increment for each new
chain (which is a Model that is converted).
Args:
store_in_memory (bool): If the modified Biopython Structure object should be stored in the attribute
``structure``
outdir (str): If ``store_in_memory`` is False, the structure file has to be written somewhere so an output
directory must be specified here
outname (str): If ``store_in_memory`` is False, the structure file has to be written somewhere so an output
filename must be specified here (i.e. 4BXI_bio1)
force_rerun (bool): If merged file should be overwritten if it already exists
]
if name[store_in_memory] begin[:]
if name[self].structure begin[:]
variable[parsed] assign[=] call[name[copy], parameter[name[self].structure]]
name[self].structure assign[=] call[name[merge_all_models_into_first_model], parameter[name[parsed]]] | keyword[def] identifier[merge_models] ( identifier[self] , identifier[store_in_memory] = keyword[False] , identifier[outdir] = keyword[None] , identifier[outname] = keyword[None] , identifier[force_rerun] = keyword[False] ):
literal[string]
keyword[if] identifier[store_in_memory] :
keyword[if] identifier[self] . identifier[structure] :
identifier[parsed] = identifier[copy] ( identifier[self] . identifier[structure] )
keyword[else] :
identifier[parsed] = identifier[self] . identifier[parse_structure] ()
identifier[self] . identifier[structure] = identifier[merge_all_models_into_first_model] ( identifier[parsed] )
keyword[else] :
identifier[new_structure_path] = identifier[write_merged_bioassembly] ( identifier[inpath] = identifier[self] . identifier[structure_path] ,
identifier[outdir] = identifier[outdir] , identifier[outname] = identifier[outname] ,
identifier[force_rerun] = identifier[force_rerun] )
identifier[self] . identifier[load_structure_path] ( identifier[new_structure_path] , identifier[file_type] = literal[string] ) | def merge_models(self, store_in_memory=False, outdir=None, outname=None, force_rerun=False):
"""Merge all existing models into a Structure's first_model attribute.
This directly modifies the Biopython Structure object. Chains IDs will start from A and increment for each new
chain (which is a Model that is converted).
Args:
store_in_memory (bool): If the modified Biopython Structure object should be stored in the attribute
``structure``
outdir (str): If ``store_in_memory`` is False, the structure file has to be written somewhere so an output
directory must be specified here
outname (str): If ``store_in_memory`` is False, the structure file has to be written somewhere so an output
filename must be specified here (i.e. 4BXI_bio1)
force_rerun (bool): If merged file should be overwritten if it already exists
"""
if store_in_memory:
if self.structure:
parsed = copy(self.structure) # depends on [control=['if'], data=[]]
else:
parsed = self.parse_structure()
self.structure = merge_all_models_into_first_model(parsed) # depends on [control=['if'], data=[]]
else:
new_structure_path = write_merged_bioassembly(inpath=self.structure_path, outdir=outdir, outname=outname, force_rerun=force_rerun)
self.load_structure_path(new_structure_path, file_type='pdb') |
def generate_substitution_structures(self, atom, target_species=[],
sub_both_sides=False, range_tol=1e-2,
dist_from_surf=0):
"""
Function that performs substitution-type doping on the surface and
returns all possible configurations where one dopant is substituted
per surface. Can substitute one surface or both.
Args:
atom (str): atom corresponding to substitutional dopant
sub_both_sides (bool): If true, substitute an equivalent
site on the other surface
target_species (list): List of specific species to substitute
range_tol (float): Find viable substitution sites at a specific
distance from the surface +- this tolerance
dist_from_surf (float): Distance from the surface to find viable
substitution sites, defaults to 0 to substitute at the surface
"""
# Get symmetrized structure in case we want to substitue both sides
sym_slab = SpacegroupAnalyzer(self.slab).get_symmetrized_structure()
# Define a function for substituting a site
def substitute(site, i):
slab = self.slab.copy()
props = self.slab.site_properties
if sub_both_sides:
# Find an equivalent site on the other surface
eq_indices = [indices for indices in
sym_slab.equivalent_indices if i in indices][0]
for ii in eq_indices:
if "%.6f" % (sym_slab[ii].frac_coords[2]) != \
"%.6f" % (site.frac_coords[2]):
props["surface_properties"][ii] = "substitute"
slab.replace(ii, atom)
break
props["surface_properties"][i] = "substitute"
slab.replace(i, atom)
slab.add_site_property("surface_properties",
props["surface_properties"])
return slab
# Get all possible substitution sites
substituted_slabs = []
# Sort sites so that we can define a range relative to the position of the
# surface atoms, i.e. search for sites above (below) the bottom (top) surface
sorted_sites = sorted(sym_slab, key=lambda site: site.frac_coords[2])
if sorted_sites[0].surface_properties == "surface":
d = sorted_sites[0].frac_coords[2] + dist_from_surf
else:
d = sorted_sites[-1].frac_coords[2] - dist_from_surf
for i, site in enumerate(sym_slab):
if d - range_tol < site.frac_coords[2] < d + range_tol:
if target_species and site.species_string in target_species:
substituted_slabs.append(substitute(site, i))
elif not target_species:
substituted_slabs.append(substitute(site, i))
matcher = StructureMatcher()
return [s[0] for s in matcher.group_structures(substituted_slabs)] | def function[generate_substitution_structures, parameter[self, atom, target_species, sub_both_sides, range_tol, dist_from_surf]]:
constant[
Function that performs substitution-type doping on the surface and
returns all possible configurations where one dopant is substituted
per surface. Can substitute one surface or both.
Args:
atom (str): atom corresponding to substitutional dopant
sub_both_sides (bool): If true, substitute an equivalent
site on the other surface
target_species (list): List of specific species to substitute
range_tol (float): Find viable substitution sites at a specific
distance from the surface +- this tolerance
dist_from_surf (float): Distance from the surface to find viable
substitution sites, defaults to 0 to substitute at the surface
]
variable[sym_slab] assign[=] call[call[name[SpacegroupAnalyzer], parameter[name[self].slab]].get_symmetrized_structure, parameter[]]
def function[substitute, parameter[site, i]]:
variable[slab] assign[=] call[name[self].slab.copy, parameter[]]
variable[props] assign[=] name[self].slab.site_properties
if name[sub_both_sides] begin[:]
variable[eq_indices] assign[=] call[<ast.ListComp object at 0x7da18bc71720>][constant[0]]
for taget[name[ii]] in starred[name[eq_indices]] begin[:]
if compare[binary_operation[constant[%.6f] <ast.Mod object at 0x7da2590d6920> call[call[name[sym_slab]][name[ii]].frac_coords][constant[2]]] not_equal[!=] binary_operation[constant[%.6f] <ast.Mod object at 0x7da2590d6920> call[name[site].frac_coords][constant[2]]]] begin[:]
call[call[name[props]][constant[surface_properties]]][name[ii]] assign[=] constant[substitute]
call[name[slab].replace, parameter[name[ii], name[atom]]]
break
call[call[name[props]][constant[surface_properties]]][name[i]] assign[=] constant[substitute]
call[name[slab].replace, parameter[name[i], name[atom]]]
call[name[slab].add_site_property, parameter[constant[surface_properties], call[name[props]][constant[surface_properties]]]]
return[name[slab]]
variable[substituted_slabs] assign[=] list[[]]
variable[sorted_sites] assign[=] call[name[sorted], parameter[name[sym_slab]]]
if compare[call[name[sorted_sites]][constant[0]].surface_properties equal[==] constant[surface]] begin[:]
variable[d] assign[=] binary_operation[call[call[name[sorted_sites]][constant[0]].frac_coords][constant[2]] + name[dist_from_surf]]
for taget[tuple[[<ast.Name object at 0x7da18bc71570>, <ast.Name object at 0x7da18bc702b0>]]] in starred[call[name[enumerate], parameter[name[sym_slab]]]] begin[:]
if compare[binary_operation[name[d] - name[range_tol]] less[<] call[name[site].frac_coords][constant[2]]] begin[:]
if <ast.BoolOp object at 0x7da18bc701c0> begin[:]
call[name[substituted_slabs].append, parameter[call[name[substitute], parameter[name[site], name[i]]]]]
variable[matcher] assign[=] call[name[StructureMatcher], parameter[]]
return[<ast.ListComp object at 0x7da20c6c5b70>] | keyword[def] identifier[generate_substitution_structures] ( identifier[self] , identifier[atom] , identifier[target_species] =[],
identifier[sub_both_sides] = keyword[False] , identifier[range_tol] = literal[int] ,
identifier[dist_from_surf] = literal[int] ):
literal[string]
identifier[sym_slab] = identifier[SpacegroupAnalyzer] ( identifier[self] . identifier[slab] ). identifier[get_symmetrized_structure] ()
keyword[def] identifier[substitute] ( identifier[site] , identifier[i] ):
identifier[slab] = identifier[self] . identifier[slab] . identifier[copy] ()
identifier[props] = identifier[self] . identifier[slab] . identifier[site_properties]
keyword[if] identifier[sub_both_sides] :
identifier[eq_indices] =[ identifier[indices] keyword[for] identifier[indices] keyword[in]
identifier[sym_slab] . identifier[equivalent_indices] keyword[if] identifier[i] keyword[in] identifier[indices] ][ literal[int] ]
keyword[for] identifier[ii] keyword[in] identifier[eq_indices] :
keyword[if] literal[string] %( identifier[sym_slab] [ identifier[ii] ]. identifier[frac_coords] [ literal[int] ])!= literal[string] %( identifier[site] . identifier[frac_coords] [ literal[int] ]):
identifier[props] [ literal[string] ][ identifier[ii] ]= literal[string]
identifier[slab] . identifier[replace] ( identifier[ii] , identifier[atom] )
keyword[break]
identifier[props] [ literal[string] ][ identifier[i] ]= literal[string]
identifier[slab] . identifier[replace] ( identifier[i] , identifier[atom] )
identifier[slab] . identifier[add_site_property] ( literal[string] ,
identifier[props] [ literal[string] ])
keyword[return] identifier[slab]
identifier[substituted_slabs] =[]
identifier[sorted_sites] = identifier[sorted] ( identifier[sym_slab] , identifier[key] = keyword[lambda] identifier[site] : identifier[site] . identifier[frac_coords] [ literal[int] ])
keyword[if] identifier[sorted_sites] [ literal[int] ]. identifier[surface_properties] == literal[string] :
identifier[d] = identifier[sorted_sites] [ literal[int] ]. identifier[frac_coords] [ literal[int] ]+ identifier[dist_from_surf]
keyword[else] :
identifier[d] = identifier[sorted_sites] [- literal[int] ]. identifier[frac_coords] [ literal[int] ]- identifier[dist_from_surf]
keyword[for] identifier[i] , identifier[site] keyword[in] identifier[enumerate] ( identifier[sym_slab] ):
keyword[if] identifier[d] - identifier[range_tol] < identifier[site] . identifier[frac_coords] [ literal[int] ]< identifier[d] + identifier[range_tol] :
keyword[if] identifier[target_species] keyword[and] identifier[site] . identifier[species_string] keyword[in] identifier[target_species] :
identifier[substituted_slabs] . identifier[append] ( identifier[substitute] ( identifier[site] , identifier[i] ))
keyword[elif] keyword[not] identifier[target_species] :
identifier[substituted_slabs] . identifier[append] ( identifier[substitute] ( identifier[site] , identifier[i] ))
identifier[matcher] = identifier[StructureMatcher] ()
keyword[return] [ identifier[s] [ literal[int] ] keyword[for] identifier[s] keyword[in] identifier[matcher] . identifier[group_structures] ( identifier[substituted_slabs] )] | def generate_substitution_structures(self, atom, target_species=[], sub_both_sides=False, range_tol=0.01, dist_from_surf=0):
"""
Function that performs substitution-type doping on the surface and
returns all possible configurations where one dopant is substituted
per surface. Can substitute one surface or both.
Args:
atom (str): atom corresponding to substitutional dopant
sub_both_sides (bool): If true, substitute an equivalent
site on the other surface
target_species (list): List of specific species to substitute
range_tol (float): Find viable substitution sites at a specific
distance from the surface +- this tolerance
dist_from_surf (float): Distance from the surface to find viable
substitution sites, defaults to 0 to substitute at the surface
"""
# Get symmetrized structure in case we want to substitue both sides
sym_slab = SpacegroupAnalyzer(self.slab).get_symmetrized_structure()
# Define a function for substituting a site
def substitute(site, i):
slab = self.slab.copy()
props = self.slab.site_properties
if sub_both_sides:
# Find an equivalent site on the other surface
eq_indices = [indices for indices in sym_slab.equivalent_indices if i in indices][0]
for ii in eq_indices:
if '%.6f' % sym_slab[ii].frac_coords[2] != '%.6f' % site.frac_coords[2]:
props['surface_properties'][ii] = 'substitute'
slab.replace(ii, atom)
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ii']] # depends on [control=['if'], data=[]]
props['surface_properties'][i] = 'substitute'
slab.replace(i, atom)
slab.add_site_property('surface_properties', props['surface_properties'])
return slab
# Get all possible substitution sites
substituted_slabs = []
# Sort sites so that we can define a range relative to the position of the
# surface atoms, i.e. search for sites above (below) the bottom (top) surface
sorted_sites = sorted(sym_slab, key=lambda site: site.frac_coords[2])
if sorted_sites[0].surface_properties == 'surface':
d = sorted_sites[0].frac_coords[2] + dist_from_surf # depends on [control=['if'], data=[]]
else:
d = sorted_sites[-1].frac_coords[2] - dist_from_surf
for (i, site) in enumerate(sym_slab):
if d - range_tol < site.frac_coords[2] < d + range_tol:
if target_species and site.species_string in target_species:
substituted_slabs.append(substitute(site, i)) # depends on [control=['if'], data=[]]
elif not target_species:
substituted_slabs.append(substitute(site, i)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
matcher = StructureMatcher()
return [s[0] for s in matcher.group_structures(substituted_slabs)] |
def make_long_description(marker=None, intro=None):
"""
click_ is a framework to simplify writing composable commands for
command-line tools. This package extends the click_ functionality
by adding support for commands that use configuration files.
.. _click: https://click.pocoo.org/
EXAMPLE:
A configuration file, like:
.. code-block:: INI
# -- FILE: foo.ini
[foo]
flag = yes
name = Alice and Bob
numbers = 1 4 9 16 25
filenames = foo/xxx.txt
bar/baz/zzz.txt
[person.alice]
name = Alice
birthyear = 1995
[person.bob]
name = Bob
birthyear = 2001
can be processed with:
.. code-block:: python
# EXAMPLE:
"""
if intro is None:
intro = inspect.getdoc(make_long_description)
with open("README.rst", "r") as infile:
line = infile.readline()
while not line.strip().startswith(marker):
line = infile.readline()
# -- COLLECT REMAINING: Usage example
contents = infile.read()
text = intro +"\n" + contents
return text | def function[make_long_description, parameter[marker, intro]]:
constant[
click_ is a framework to simplify writing composable commands for
command-line tools. This package extends the click_ functionality
by adding support for commands that use configuration files.
.. _click: https://click.pocoo.org/
EXAMPLE:
A configuration file, like:
.. code-block:: INI
# -- FILE: foo.ini
[foo]
flag = yes
name = Alice and Bob
numbers = 1 4 9 16 25
filenames = foo/xxx.txt
bar/baz/zzz.txt
[person.alice]
name = Alice
birthyear = 1995
[person.bob]
name = Bob
birthyear = 2001
can be processed with:
.. code-block:: python
# EXAMPLE:
]
if compare[name[intro] is constant[None]] begin[:]
variable[intro] assign[=] call[name[inspect].getdoc, parameter[name[make_long_description]]]
with call[name[open], parameter[constant[README.rst], constant[r]]] begin[:]
variable[line] assign[=] call[name[infile].readline, parameter[]]
while <ast.UnaryOp object at 0x7da1b1179c90> begin[:]
variable[line] assign[=] call[name[infile].readline, parameter[]]
variable[contents] assign[=] call[name[infile].read, parameter[]]
variable[text] assign[=] binary_operation[binary_operation[name[intro] + constant[
]] + name[contents]]
return[name[text]] | keyword[def] identifier[make_long_description] ( identifier[marker] = keyword[None] , identifier[intro] = keyword[None] ):
literal[string]
keyword[if] identifier[intro] keyword[is] keyword[None] :
identifier[intro] = identifier[inspect] . identifier[getdoc] ( identifier[make_long_description] )
keyword[with] identifier[open] ( literal[string] , literal[string] ) keyword[as] identifier[infile] :
identifier[line] = identifier[infile] . identifier[readline] ()
keyword[while] keyword[not] identifier[line] . identifier[strip] (). identifier[startswith] ( identifier[marker] ):
identifier[line] = identifier[infile] . identifier[readline] ()
identifier[contents] = identifier[infile] . identifier[read] ()
identifier[text] = identifier[intro] + literal[string] + identifier[contents]
keyword[return] identifier[text] | def make_long_description(marker=None, intro=None):
"""
click_ is a framework to simplify writing composable commands for
command-line tools. This package extends the click_ functionality
by adding support for commands that use configuration files.
.. _click: https://click.pocoo.org/
EXAMPLE:
A configuration file, like:
.. code-block:: INI
# -- FILE: foo.ini
[foo]
flag = yes
name = Alice and Bob
numbers = 1 4 9 16 25
filenames = foo/xxx.txt
bar/baz/zzz.txt
[person.alice]
name = Alice
birthyear = 1995
[person.bob]
name = Bob
birthyear = 2001
can be processed with:
.. code-block:: python
# EXAMPLE:
"""
if intro is None:
intro = inspect.getdoc(make_long_description) # depends on [control=['if'], data=['intro']]
with open('README.rst', 'r') as infile:
line = infile.readline()
while not line.strip().startswith(marker):
line = infile.readline() # depends on [control=['while'], data=[]]
# -- COLLECT REMAINING: Usage example
contents = infile.read() # depends on [control=['with'], data=['infile']]
text = intro + '\n' + contents
return text |
def _force_float(v):
""" Converts given argument to float. On fail logs warning and returns 0.0.
Args:
v (any): value to convert to float
Returns:
float: converted v or 0.0 if conversion failed.
"""
try:
return float(v)
except Exception as exc:
return float('nan')
logger.warning('Failed to convert {} to float with {} error. Using 0 instead.'.format(v, exc)) | def function[_force_float, parameter[v]]:
constant[ Converts given argument to float. On fail logs warning and returns 0.0.
Args:
v (any): value to convert to float
Returns:
float: converted v or 0.0 if conversion failed.
]
<ast.Try object at 0x7da204622bf0> | keyword[def] identifier[_force_float] ( identifier[v] ):
literal[string]
keyword[try] :
keyword[return] identifier[float] ( identifier[v] )
keyword[except] identifier[Exception] keyword[as] identifier[exc] :
keyword[return] identifier[float] ( literal[string] )
identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[v] , identifier[exc] )) | def _force_float(v):
""" Converts given argument to float. On fail logs warning and returns 0.0.
Args:
v (any): value to convert to float
Returns:
float: converted v or 0.0 if conversion failed.
"""
try:
return float(v) # depends on [control=['try'], data=[]]
except Exception as exc:
return float('nan')
logger.warning('Failed to convert {} to float with {} error. Using 0 instead.'.format(v, exc)) # depends on [control=['except'], data=['exc']] |
def linkify_with_timeperiods(self, timeperiods, prop):
"""
Link items with timeperiods items
:param timeperiods: all timeperiods object
:type timeperiods: alignak.objects.timeperiod.Timeperiods
:param prop: property name
:type prop: str
:return: None
"""
for i in self:
if not hasattr(i, prop):
continue
tpname = getattr(i, prop).strip()
# some default values are '', so set None
if not tpname:
setattr(i, prop, '')
continue
# Ok, get a real name, search for it
timeperiod = timeperiods.find_by_name(tpname)
if timeperiod is None:
i.add_error("The %s of the %s '%s' named '%s' is unknown!"
% (prop, i.__class__.my_type, i.get_name(), tpname))
continue
setattr(i, prop, timeperiod.uuid) | def function[linkify_with_timeperiods, parameter[self, timeperiods, prop]]:
constant[
Link items with timeperiods items
:param timeperiods: all timeperiods object
:type timeperiods: alignak.objects.timeperiod.Timeperiods
:param prop: property name
:type prop: str
:return: None
]
for taget[name[i]] in starred[name[self]] begin[:]
if <ast.UnaryOp object at 0x7da18fe93250> begin[:]
continue
variable[tpname] assign[=] call[call[name[getattr], parameter[name[i], name[prop]]].strip, parameter[]]
if <ast.UnaryOp object at 0x7da207f03850> begin[:]
call[name[setattr], parameter[name[i], name[prop], constant[]]]
continue
variable[timeperiod] assign[=] call[name[timeperiods].find_by_name, parameter[name[tpname]]]
if compare[name[timeperiod] is constant[None]] begin[:]
call[name[i].add_error, parameter[binary_operation[constant[The %s of the %s '%s' named '%s' is unknown!] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da207f01210>, <ast.Attribute object at 0x7da207f01090>, <ast.Call object at 0x7da207f01000>, <ast.Name object at 0x7da207f025f0>]]]]]
continue
call[name[setattr], parameter[name[i], name[prop], name[timeperiod].uuid]] | keyword[def] identifier[linkify_with_timeperiods] ( identifier[self] , identifier[timeperiods] , identifier[prop] ):
literal[string]
keyword[for] identifier[i] keyword[in] identifier[self] :
keyword[if] keyword[not] identifier[hasattr] ( identifier[i] , identifier[prop] ):
keyword[continue]
identifier[tpname] = identifier[getattr] ( identifier[i] , identifier[prop] ). identifier[strip] ()
keyword[if] keyword[not] identifier[tpname] :
identifier[setattr] ( identifier[i] , identifier[prop] , literal[string] )
keyword[continue]
identifier[timeperiod] = identifier[timeperiods] . identifier[find_by_name] ( identifier[tpname] )
keyword[if] identifier[timeperiod] keyword[is] keyword[None] :
identifier[i] . identifier[add_error] ( literal[string]
%( identifier[prop] , identifier[i] . identifier[__class__] . identifier[my_type] , identifier[i] . identifier[get_name] (), identifier[tpname] ))
keyword[continue]
identifier[setattr] ( identifier[i] , identifier[prop] , identifier[timeperiod] . identifier[uuid] ) | def linkify_with_timeperiods(self, timeperiods, prop):
"""
Link items with timeperiods items
:param timeperiods: all timeperiods object
:type timeperiods: alignak.objects.timeperiod.Timeperiods
:param prop: property name
:type prop: str
:return: None
"""
for i in self:
if not hasattr(i, prop):
continue # depends on [control=['if'], data=[]]
tpname = getattr(i, prop).strip()
# some default values are '', so set None
if not tpname:
setattr(i, prop, '')
continue # depends on [control=['if'], data=[]]
# Ok, get a real name, search for it
timeperiod = timeperiods.find_by_name(tpname)
if timeperiod is None:
i.add_error("The %s of the %s '%s' named '%s' is unknown!" % (prop, i.__class__.my_type, i.get_name(), tpname))
continue # depends on [control=['if'], data=[]]
setattr(i, prop, timeperiod.uuid) # depends on [control=['for'], data=['i']] |
def values(self, column_major=False):
"""Return a nested list with the worksheet values.
Args:
column_major (bool): as list of columns (default list of rows)
Returns:
list: list of lists with values
"""
if column_major:
return list(map(list, zip(*self._values)))
return [row[:] for row in self._values] | def function[values, parameter[self, column_major]]:
constant[Return a nested list with the worksheet values.
Args:
column_major (bool): as list of columns (default list of rows)
Returns:
list: list of lists with values
]
if name[column_major] begin[:]
return[call[name[list], parameter[call[name[map], parameter[name[list], call[name[zip], parameter[<ast.Starred object at 0x7da1b1b168f0>]]]]]]]
return[<ast.ListComp object at 0x7da1b1b14040>] | keyword[def] identifier[values] ( identifier[self] , identifier[column_major] = keyword[False] ):
literal[string]
keyword[if] identifier[column_major] :
keyword[return] identifier[list] ( identifier[map] ( identifier[list] , identifier[zip] (* identifier[self] . identifier[_values] )))
keyword[return] [ identifier[row] [:] keyword[for] identifier[row] keyword[in] identifier[self] . identifier[_values] ] | def values(self, column_major=False):
"""Return a nested list with the worksheet values.
Args:
column_major (bool): as list of columns (default list of rows)
Returns:
list: list of lists with values
"""
if column_major:
return list(map(list, zip(*self._values))) # depends on [control=['if'], data=[]]
return [row[:] for row in self._values] |
def frequent_users(self):
"""Return a QuerySet of user id's and counts that have signed up for this activity more than
`settings.SIMILAR_THRESHOLD` times. This is be used for suggesting activities to users."""
key = "eighthactivity_{}:frequent_users".format(self.id)
cached = cache.get(key)
if cached:
return cached
freq_users = self.eighthscheduledactivity_set.exclude(eighthsignup_set__user=None).exclude(administrative=True).exclude(special=True).exclude(
restricted=True).values('eighthsignup_set__user').annotate(count=Count('eighthsignup_set__user')).filter(
count__gte=settings.SIMILAR_THRESHOLD).order_by('-count')
cache.set(key, freq_users, timeout=60 * 60 * 24 * 7)
return freq_users | def function[frequent_users, parameter[self]]:
constant[Return a QuerySet of user id's and counts that have signed up for this activity more than
`settings.SIMILAR_THRESHOLD` times. This is be used for suggesting activities to users.]
variable[key] assign[=] call[constant[eighthactivity_{}:frequent_users].format, parameter[name[self].id]]
variable[cached] assign[=] call[name[cache].get, parameter[name[key]]]
if name[cached] begin[:]
return[name[cached]]
variable[freq_users] assign[=] call[call[call[call[call[call[call[call[name[self].eighthscheduledactivity_set.exclude, parameter[]].exclude, parameter[]].exclude, parameter[]].exclude, parameter[]].values, parameter[constant[eighthsignup_set__user]]].annotate, parameter[]].filter, parameter[]].order_by, parameter[constant[-count]]]
call[name[cache].set, parameter[name[key], name[freq_users]]]
return[name[freq_users]] | keyword[def] identifier[frequent_users] ( identifier[self] ):
literal[string]
identifier[key] = literal[string] . identifier[format] ( identifier[self] . identifier[id] )
identifier[cached] = identifier[cache] . identifier[get] ( identifier[key] )
keyword[if] identifier[cached] :
keyword[return] identifier[cached]
identifier[freq_users] = identifier[self] . identifier[eighthscheduledactivity_set] . identifier[exclude] ( identifier[eighthsignup_set__user] = keyword[None] ). identifier[exclude] ( identifier[administrative] = keyword[True] ). identifier[exclude] ( identifier[special] = keyword[True] ). identifier[exclude] (
identifier[restricted] = keyword[True] ). identifier[values] ( literal[string] ). identifier[annotate] ( identifier[count] = identifier[Count] ( literal[string] )). identifier[filter] (
identifier[count__gte] = identifier[settings] . identifier[SIMILAR_THRESHOLD] ). identifier[order_by] ( literal[string] )
identifier[cache] . identifier[set] ( identifier[key] , identifier[freq_users] , identifier[timeout] = literal[int] * literal[int] * literal[int] * literal[int] )
keyword[return] identifier[freq_users] | def frequent_users(self):
"""Return a QuerySet of user id's and counts that have signed up for this activity more than
`settings.SIMILAR_THRESHOLD` times. This is be used for suggesting activities to users."""
key = 'eighthactivity_{}:frequent_users'.format(self.id)
cached = cache.get(key)
if cached:
return cached # depends on [control=['if'], data=[]]
freq_users = self.eighthscheduledactivity_set.exclude(eighthsignup_set__user=None).exclude(administrative=True).exclude(special=True).exclude(restricted=True).values('eighthsignup_set__user').annotate(count=Count('eighthsignup_set__user')).filter(count__gte=settings.SIMILAR_THRESHOLD).order_by('-count')
cache.set(key, freq_users, timeout=60 * 60 * 24 * 7)
return freq_users |
def _follow_link(self, value):
'''Returns given `value` or, if it is a symlink, the `value` it names.'''
seen_keys = set()
while True:
link_key = self._link_for_value(value)
if not link_key:
return value
assert link_key not in seen_keys, 'circular symlink reference'
seen_keys.add(link_key)
value = super(SymlinkDatastore, self).get(link_key) | def function[_follow_link, parameter[self, value]]:
constant[Returns given `value` or, if it is a symlink, the `value` it names.]
variable[seen_keys] assign[=] call[name[set], parameter[]]
while constant[True] begin[:]
variable[link_key] assign[=] call[name[self]._link_for_value, parameter[name[value]]]
if <ast.UnaryOp object at 0x7da20c992080> begin[:]
return[name[value]]
assert[compare[name[link_key] <ast.NotIn object at 0x7da2590d7190> name[seen_keys]]]
call[name[seen_keys].add, parameter[name[link_key]]]
variable[value] assign[=] call[call[name[super], parameter[name[SymlinkDatastore], name[self]]].get, parameter[name[link_key]]] | keyword[def] identifier[_follow_link] ( identifier[self] , identifier[value] ):
literal[string]
identifier[seen_keys] = identifier[set] ()
keyword[while] keyword[True] :
identifier[link_key] = identifier[self] . identifier[_link_for_value] ( identifier[value] )
keyword[if] keyword[not] identifier[link_key] :
keyword[return] identifier[value]
keyword[assert] identifier[link_key] keyword[not] keyword[in] identifier[seen_keys] , literal[string]
identifier[seen_keys] . identifier[add] ( identifier[link_key] )
identifier[value] = identifier[super] ( identifier[SymlinkDatastore] , identifier[self] ). identifier[get] ( identifier[link_key] ) | def _follow_link(self, value):
"""Returns given `value` or, if it is a symlink, the `value` it names."""
seen_keys = set()
while True:
link_key = self._link_for_value(value)
if not link_key:
return value # depends on [control=['if'], data=[]]
assert link_key not in seen_keys, 'circular symlink reference'
seen_keys.add(link_key)
value = super(SymlinkDatastore, self).get(link_key) # depends on [control=['while'], data=[]] |
def get_prediction_score(self, node_id):
"""
Return the prediction score (if leaf node) or None if its an
intermediate node.
Parameters
----------
node_id: id of the node to get the prediction value.
Returns
-------
float or None: returns float value of prediction if leaf node and None
if not.
Examples
--------
.. sourcecode:: python
>>> tree.get_prediction_score(120) # Leaf node
0.251092
>>> tree.get_prediction_score(120) # Not a leaf node
None
"""
_raise_error_if_not_of_type(node_id, [int,long], "node_id")
_numeric_param_check_range("node_id", node_id, 0, self.num_nodes - 1)
node = self.nodes[node_id]
return None if node.is_leaf is False else node.value | def function[get_prediction_score, parameter[self, node_id]]:
constant[
Return the prediction score (if leaf node) or None if its an
intermediate node.
Parameters
----------
node_id: id of the node to get the prediction value.
Returns
-------
float or None: returns float value of prediction if leaf node and None
if not.
Examples
--------
.. sourcecode:: python
>>> tree.get_prediction_score(120) # Leaf node
0.251092
>>> tree.get_prediction_score(120) # Not a leaf node
None
]
call[name[_raise_error_if_not_of_type], parameter[name[node_id], list[[<ast.Name object at 0x7da1b205ac20>, <ast.Name object at 0x7da1b205ab90>]], constant[node_id]]]
call[name[_numeric_param_check_range], parameter[constant[node_id], name[node_id], constant[0], binary_operation[name[self].num_nodes - constant[1]]]]
variable[node] assign[=] call[name[self].nodes][name[node_id]]
return[<ast.IfExp object at 0x7da1b20ec310>] | keyword[def] identifier[get_prediction_score] ( identifier[self] , identifier[node_id] ):
literal[string]
identifier[_raise_error_if_not_of_type] ( identifier[node_id] ,[ identifier[int] , identifier[long] ], literal[string] )
identifier[_numeric_param_check_range] ( literal[string] , identifier[node_id] , literal[int] , identifier[self] . identifier[num_nodes] - literal[int] )
identifier[node] = identifier[self] . identifier[nodes] [ identifier[node_id] ]
keyword[return] keyword[None] keyword[if] identifier[node] . identifier[is_leaf] keyword[is] keyword[False] keyword[else] identifier[node] . identifier[value] | def get_prediction_score(self, node_id):
"""
Return the prediction score (if leaf node) or None if its an
intermediate node.
Parameters
----------
node_id: id of the node to get the prediction value.
Returns
-------
float or None: returns float value of prediction if leaf node and None
if not.
Examples
--------
.. sourcecode:: python
>>> tree.get_prediction_score(120) # Leaf node
0.251092
>>> tree.get_prediction_score(120) # Not a leaf node
None
"""
_raise_error_if_not_of_type(node_id, [int, long], 'node_id')
_numeric_param_check_range('node_id', node_id, 0, self.num_nodes - 1)
node = self.nodes[node_id]
return None if node.is_leaf is False else node.value |
def GetAncestorControl(self, condition: Callable) -> 'Control':
"""
Get a ancestor control that matches the condition.
condition: Callable, function (control: Control, depth: int)->bool,
depth starts with -1 and decreses when search goes up.
Return `Control` subclass or None.
"""
ancestor = self
depth = 0
while True:
ancestor = ancestor.GetParentControl()
depth -= 1
if ancestor:
if condition(ancestor, depth):
return ancestor
else:
break | def function[GetAncestorControl, parameter[self, condition]]:
constant[
Get a ancestor control that matches the condition.
condition: Callable, function (control: Control, depth: int)->bool,
depth starts with -1 and decreses when search goes up.
Return `Control` subclass or None.
]
variable[ancestor] assign[=] name[self]
variable[depth] assign[=] constant[0]
while constant[True] begin[:]
variable[ancestor] assign[=] call[name[ancestor].GetParentControl, parameter[]]
<ast.AugAssign object at 0x7da18dc06b30>
if name[ancestor] begin[:]
if call[name[condition], parameter[name[ancestor], name[depth]]] begin[:]
return[name[ancestor]] | keyword[def] identifier[GetAncestorControl] ( identifier[self] , identifier[condition] : identifier[Callable] )-> literal[string] :
literal[string]
identifier[ancestor] = identifier[self]
identifier[depth] = literal[int]
keyword[while] keyword[True] :
identifier[ancestor] = identifier[ancestor] . identifier[GetParentControl] ()
identifier[depth] -= literal[int]
keyword[if] identifier[ancestor] :
keyword[if] identifier[condition] ( identifier[ancestor] , identifier[depth] ):
keyword[return] identifier[ancestor]
keyword[else] :
keyword[break] | def GetAncestorControl(self, condition: Callable) -> 'Control':
"""
Get a ancestor control that matches the condition.
condition: Callable, function (control: Control, depth: int)->bool,
depth starts with -1 and decreses when search goes up.
Return `Control` subclass or None.
"""
ancestor = self
depth = 0
while True:
ancestor = ancestor.GetParentControl()
depth -= 1
if ancestor:
if condition(ancestor, depth):
return ancestor # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
break # depends on [control=['while'], data=[]] |
def add_x10_device(self, housecode, unitcode, feature='OnOff'):
"""Add an X10 device based on a feature description.
Current features are:
- OnOff
- Dimmable
- Sensor
- AllUnitsOff
- AllLightsOn
- AllLightsOff
"""
device = insteonplm.devices.create_x10(self, housecode,
unitcode, feature)
if device:
self.devices[device.address.id] = device
return device | def function[add_x10_device, parameter[self, housecode, unitcode, feature]]:
constant[Add an X10 device based on a feature description.
Current features are:
- OnOff
- Dimmable
- Sensor
- AllUnitsOff
- AllLightsOn
- AllLightsOff
]
variable[device] assign[=] call[name[insteonplm].devices.create_x10, parameter[name[self], name[housecode], name[unitcode], name[feature]]]
if name[device] begin[:]
call[name[self].devices][name[device].address.id] assign[=] name[device]
return[name[device]] | keyword[def] identifier[add_x10_device] ( identifier[self] , identifier[housecode] , identifier[unitcode] , identifier[feature] = literal[string] ):
literal[string]
identifier[device] = identifier[insteonplm] . identifier[devices] . identifier[create_x10] ( identifier[self] , identifier[housecode] ,
identifier[unitcode] , identifier[feature] )
keyword[if] identifier[device] :
identifier[self] . identifier[devices] [ identifier[device] . identifier[address] . identifier[id] ]= identifier[device]
keyword[return] identifier[device] | def add_x10_device(self, housecode, unitcode, feature='OnOff'):
"""Add an X10 device based on a feature description.
Current features are:
- OnOff
- Dimmable
- Sensor
- AllUnitsOff
- AllLightsOn
- AllLightsOff
"""
device = insteonplm.devices.create_x10(self, housecode, unitcode, feature)
if device:
self.devices[device.address.id] = device # depends on [control=['if'], data=[]]
return device |
def join(self, join_streamlet, window_config, join_function):
"""Return a new Streamlet by joining join_streamlet with this streamlet
"""
from heronpy.streamlet.impl.joinbolt import JoinStreamlet, JoinBolt
join_streamlet_result = JoinStreamlet(JoinBolt.INNER, window_config,
join_function, self, join_streamlet)
self._add_child(join_streamlet_result)
join_streamlet._add_child(join_streamlet_result)
return join_streamlet_result | def function[join, parameter[self, join_streamlet, window_config, join_function]]:
constant[Return a new Streamlet by joining join_streamlet with this streamlet
]
from relative_module[heronpy.streamlet.impl.joinbolt] import module[JoinStreamlet], module[JoinBolt]
variable[join_streamlet_result] assign[=] call[name[JoinStreamlet], parameter[name[JoinBolt].INNER, name[window_config], name[join_function], name[self], name[join_streamlet]]]
call[name[self]._add_child, parameter[name[join_streamlet_result]]]
call[name[join_streamlet]._add_child, parameter[name[join_streamlet_result]]]
return[name[join_streamlet_result]] | keyword[def] identifier[join] ( identifier[self] , identifier[join_streamlet] , identifier[window_config] , identifier[join_function] ):
literal[string]
keyword[from] identifier[heronpy] . identifier[streamlet] . identifier[impl] . identifier[joinbolt] keyword[import] identifier[JoinStreamlet] , identifier[JoinBolt]
identifier[join_streamlet_result] = identifier[JoinStreamlet] ( identifier[JoinBolt] . identifier[INNER] , identifier[window_config] ,
identifier[join_function] , identifier[self] , identifier[join_streamlet] )
identifier[self] . identifier[_add_child] ( identifier[join_streamlet_result] )
identifier[join_streamlet] . identifier[_add_child] ( identifier[join_streamlet_result] )
keyword[return] identifier[join_streamlet_result] | def join(self, join_streamlet, window_config, join_function):
"""Return a new Streamlet by joining join_streamlet with this streamlet
"""
from heronpy.streamlet.impl.joinbolt import JoinStreamlet, JoinBolt
join_streamlet_result = JoinStreamlet(JoinBolt.INNER, window_config, join_function, self, join_streamlet)
self._add_child(join_streamlet_result)
join_streamlet._add_child(join_streamlet_result)
return join_streamlet_result |
def download(self, bucket, key, fileobj, transfer_config=None, subscribers=None):
''' download a file using Aspera '''
check_io_access(os.path.dirname(fileobj), os.W_OK)
return self._queue_task(bucket, [FilePair(key, fileobj)], transfer_config,
subscribers, enumAsperaDirection.RECEIVE) | def function[download, parameter[self, bucket, key, fileobj, transfer_config, subscribers]]:
constant[ download a file using Aspera ]
call[name[check_io_access], parameter[call[name[os].path.dirname, parameter[name[fileobj]]], name[os].W_OK]]
return[call[name[self]._queue_task, parameter[name[bucket], list[[<ast.Call object at 0x7da1b26ac5b0>]], name[transfer_config], name[subscribers], name[enumAsperaDirection].RECEIVE]]] | keyword[def] identifier[download] ( identifier[self] , identifier[bucket] , identifier[key] , identifier[fileobj] , identifier[transfer_config] = keyword[None] , identifier[subscribers] = keyword[None] ):
literal[string]
identifier[check_io_access] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[fileobj] ), identifier[os] . identifier[W_OK] )
keyword[return] identifier[self] . identifier[_queue_task] ( identifier[bucket] ,[ identifier[FilePair] ( identifier[key] , identifier[fileobj] )], identifier[transfer_config] ,
identifier[subscribers] , identifier[enumAsperaDirection] . identifier[RECEIVE] ) | def download(self, bucket, key, fileobj, transfer_config=None, subscribers=None):
""" download a file using Aspera """
check_io_access(os.path.dirname(fileobj), os.W_OK)
return self._queue_task(bucket, [FilePair(key, fileobj)], transfer_config, subscribers, enumAsperaDirection.RECEIVE) |
def autocommit(data_access):
"""Make statements autocommit.
:param data_access: a DataAccess instance
"""
if not data_access.autocommit:
data_access.commit()
old_autocommit = data_access.autocommit
data_access.autocommit = True
try:
yield data_access
finally:
data_access.autocommit = old_autocommit | def function[autocommit, parameter[data_access]]:
constant[Make statements autocommit.
:param data_access: a DataAccess instance
]
if <ast.UnaryOp object at 0x7da1b149e110> begin[:]
call[name[data_access].commit, parameter[]]
variable[old_autocommit] assign[=] name[data_access].autocommit
name[data_access].autocommit assign[=] constant[True]
<ast.Try object at 0x7da20c7c9870> | keyword[def] identifier[autocommit] ( identifier[data_access] ):
literal[string]
keyword[if] keyword[not] identifier[data_access] . identifier[autocommit] :
identifier[data_access] . identifier[commit] ()
identifier[old_autocommit] = identifier[data_access] . identifier[autocommit]
identifier[data_access] . identifier[autocommit] = keyword[True]
keyword[try] :
keyword[yield] identifier[data_access]
keyword[finally] :
identifier[data_access] . identifier[autocommit] = identifier[old_autocommit] | def autocommit(data_access):
"""Make statements autocommit.
:param data_access: a DataAccess instance
"""
if not data_access.autocommit:
data_access.commit() # depends on [control=['if'], data=[]]
old_autocommit = data_access.autocommit
data_access.autocommit = True
try:
yield data_access # depends on [control=['try'], data=[]]
finally:
data_access.autocommit = old_autocommit |
def sixtuple2baxter(chars, debug=False, rhymebook=None):
"""
Convert the classicial six-tuple representation of MCH readings into IPA
(or Baxter's ASCII system).
This function is more or less implemented in MiddleChinese.
"""
if not rhymebook:
rhymebook = _cd.GY
if len(chars) != 6:
raise ValueError('chars should be a sixtuple')
# convert chars to long chars
chars = gbk2big5(chars)
# assign basic values
she,hu,deng,diao,yun,sheng = list(chars)
# try converting the values to mch representations
initial = rhymebook['sheng'].get(sheng, '?')
final = rhymebook['yun'].get(yun, '?')
tone = rhymebook['diao'].get(diao, '?')
medial = rhymebook['hu'].get(hu, '?')
division = rhymebook['deng'].get(deng, '?')
# debug is for cross-checking
if debug:
return [(sheng, initial), (hu, medial), (deng, division),(yun, final),
(diao, tone)]
# check and raise error if things are not handled
if "?" in [initial, final, tone, medial, division]:
raise ValueError("Unrecognized elements in {0}.".format(
' '.join([initial, final, tone, medial, division])))
# treat the final if division is 3 and they start with 'j', note that so
# far, we don't handle chongnius
final = final[1:] if final[0] == 'j' and division in '4' \
else final
final = final[1:] if final[0] == 'j' and division in '3' \
else final
# reduce finals starting with 'w'
final = final[1:] if final[0] == 'w' else final
# resolve the medial (the hu) by checking for labial initial
medial = '' if (initial[0] in 'pbm' and '*' not in final) \
or final[0] in 'u' \
or 'o' in final and not '*' in final and not '?' in final \
else medial
# correct for initials with sandeng-i
initial = initial[:-1] if initial.endswith('j') else initial
# get the medial corrected by deng
medial = "j" + medial if division == '3' \
and 'i' not in final \
and 'y' not in initial \
else medial
# deprive the rime from its leading "j" if we have a medial
final = final[1:] if final[0] in 'j' and 'j' in medial else final
final = final[1:] if final[0] in 'w' and 'w' in medial else final
final = final[1:] if final[0] == '*' or final[0] == '?' else final
final = 'i' + final[1:] if final[0] == '!' \
and division == '4' \
and 'i' not in final \
and (initial[0] in "pbmkgx'" or initial.startswith('ng')) \
else final
# chongniu medial-re-order
medial = 'j' + medial if division == '4' \
and '!' in final \
and 'j' not in medial \
and (initial[0] in "pbmkgx'" or initial.startswith('ng')) \
else medial
final = final[1:] if final[0] == '!' else final
# put everything together
return [initial,medial,final,tone] | def function[sixtuple2baxter, parameter[chars, debug, rhymebook]]:
constant[
Convert the classicial six-tuple representation of MCH readings into IPA
(or Baxter's ASCII system).
This function is more or less implemented in MiddleChinese.
]
if <ast.UnaryOp object at 0x7da204346650> begin[:]
variable[rhymebook] assign[=] name[_cd].GY
if compare[call[name[len], parameter[name[chars]]] not_equal[!=] constant[6]] begin[:]
<ast.Raise object at 0x7da2043477f0>
variable[chars] assign[=] call[name[gbk2big5], parameter[name[chars]]]
<ast.Tuple object at 0x7da204346380> assign[=] call[name[list], parameter[name[chars]]]
variable[initial] assign[=] call[call[name[rhymebook]][constant[sheng]].get, parameter[name[sheng], constant[?]]]
variable[final] assign[=] call[call[name[rhymebook]][constant[yun]].get, parameter[name[yun], constant[?]]]
variable[tone] assign[=] call[call[name[rhymebook]][constant[diao]].get, parameter[name[diao], constant[?]]]
variable[medial] assign[=] call[call[name[rhymebook]][constant[hu]].get, parameter[name[hu], constant[?]]]
variable[division] assign[=] call[call[name[rhymebook]][constant[deng]].get, parameter[name[deng], constant[?]]]
if name[debug] begin[:]
return[list[[<ast.Tuple object at 0x7da204346b00>, <ast.Tuple object at 0x7da204347a30>, <ast.Tuple object at 0x7da2043471f0>, <ast.Tuple object at 0x7da204346ef0>, <ast.Tuple object at 0x7da204347df0>]]]
if compare[constant[?] in list[[<ast.Name object at 0x7da204346e30>, <ast.Name object at 0x7da2043447f0>, <ast.Name object at 0x7da2043478b0>, <ast.Name object at 0x7da204347c70>, <ast.Name object at 0x7da204346c20>]]] begin[:]
<ast.Raise object at 0x7da204347f70>
variable[final] assign[=] <ast.IfExp object at 0x7da204346020>
variable[final] assign[=] <ast.IfExp object at 0x7da204345450>
variable[final] assign[=] <ast.IfExp object at 0x7da2043457b0>
variable[medial] assign[=] <ast.IfExp object at 0x7da2043469b0>
variable[initial] assign[=] <ast.IfExp object at 0x7da204345f30>
variable[medial] assign[=] <ast.IfExp object at 0x7da204344bb0>
variable[final] assign[=] <ast.IfExp object at 0x7da18eb57460>
variable[final] assign[=] <ast.IfExp object at 0x7da18eb57730>
variable[final] assign[=] <ast.IfExp object at 0x7da18eb575e0>
variable[final] assign[=] <ast.IfExp object at 0x7da18eb547c0>
variable[medial] assign[=] <ast.IfExp object at 0x7da18eb57be0>
variable[final] assign[=] <ast.IfExp object at 0x7da18f00ff70>
return[list[[<ast.Name object at 0x7da18f00d060>, <ast.Name object at 0x7da18f00c0d0>, <ast.Name object at 0x7da18f00eb00>, <ast.Name object at 0x7da18f00c310>]]] | keyword[def] identifier[sixtuple2baxter] ( identifier[chars] , identifier[debug] = keyword[False] , identifier[rhymebook] = keyword[None] ):
literal[string]
keyword[if] keyword[not] identifier[rhymebook] :
identifier[rhymebook] = identifier[_cd] . identifier[GY]
keyword[if] identifier[len] ( identifier[chars] )!= literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[chars] = identifier[gbk2big5] ( identifier[chars] )
identifier[she] , identifier[hu] , identifier[deng] , identifier[diao] , identifier[yun] , identifier[sheng] = identifier[list] ( identifier[chars] )
identifier[initial] = identifier[rhymebook] [ literal[string] ]. identifier[get] ( identifier[sheng] , literal[string] )
identifier[final] = identifier[rhymebook] [ literal[string] ]. identifier[get] ( identifier[yun] , literal[string] )
identifier[tone] = identifier[rhymebook] [ literal[string] ]. identifier[get] ( identifier[diao] , literal[string] )
identifier[medial] = identifier[rhymebook] [ literal[string] ]. identifier[get] ( identifier[hu] , literal[string] )
identifier[division] = identifier[rhymebook] [ literal[string] ]. identifier[get] ( identifier[deng] , literal[string] )
keyword[if] identifier[debug] :
keyword[return] [( identifier[sheng] , identifier[initial] ),( identifier[hu] , identifier[medial] ),( identifier[deng] , identifier[division] ),( identifier[yun] , identifier[final] ),
( identifier[diao] , identifier[tone] )]
keyword[if] literal[string] keyword[in] [ identifier[initial] , identifier[final] , identifier[tone] , identifier[medial] , identifier[division] ]:
keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] (
literal[string] . identifier[join] ([ identifier[initial] , identifier[final] , identifier[tone] , identifier[medial] , identifier[division] ])))
identifier[final] = identifier[final] [ literal[int] :] keyword[if] identifier[final] [ literal[int] ]== literal[string] keyword[and] identifier[division] keyword[in] literal[string] keyword[else] identifier[final]
identifier[final] = identifier[final] [ literal[int] :] keyword[if] identifier[final] [ literal[int] ]== literal[string] keyword[and] identifier[division] keyword[in] literal[string] keyword[else] identifier[final]
identifier[final] = identifier[final] [ literal[int] :] keyword[if] identifier[final] [ literal[int] ]== literal[string] keyword[else] identifier[final]
identifier[medial] = literal[string] keyword[if] ( identifier[initial] [ literal[int] ] keyword[in] literal[string] keyword[and] literal[string] keyword[not] keyword[in] identifier[final] ) keyword[or] identifier[final] [ literal[int] ] keyword[in] literal[string] keyword[or] literal[string] keyword[in] identifier[final] keyword[and] keyword[not] literal[string] keyword[in] identifier[final] keyword[and] keyword[not] literal[string] keyword[in] identifier[final] keyword[else] identifier[medial]
identifier[initial] = identifier[initial] [:- literal[int] ] keyword[if] identifier[initial] . identifier[endswith] ( literal[string] ) keyword[else] identifier[initial]
identifier[medial] = literal[string] + identifier[medial] keyword[if] identifier[division] == literal[string] keyword[and] literal[string] keyword[not] keyword[in] identifier[final] keyword[and] literal[string] keyword[not] keyword[in] identifier[initial] keyword[else] identifier[medial]
identifier[final] = identifier[final] [ literal[int] :] keyword[if] identifier[final] [ literal[int] ] keyword[in] literal[string] keyword[and] literal[string] keyword[in] identifier[medial] keyword[else] identifier[final]
identifier[final] = identifier[final] [ literal[int] :] keyword[if] identifier[final] [ literal[int] ] keyword[in] literal[string] keyword[and] literal[string] keyword[in] identifier[medial] keyword[else] identifier[final]
identifier[final] = identifier[final] [ literal[int] :] keyword[if] identifier[final] [ literal[int] ]== literal[string] keyword[or] identifier[final] [ literal[int] ]== literal[string] keyword[else] identifier[final]
identifier[final] = literal[string] + identifier[final] [ literal[int] :] keyword[if] identifier[final] [ literal[int] ]== literal[string] keyword[and] identifier[division] == literal[string] keyword[and] literal[string] keyword[not] keyword[in] identifier[final] keyword[and] ( identifier[initial] [ literal[int] ] keyword[in] literal[string] keyword[or] identifier[initial] . identifier[startswith] ( literal[string] )) keyword[else] identifier[final]
identifier[medial] = literal[string] + identifier[medial] keyword[if] identifier[division] == literal[string] keyword[and] literal[string] keyword[in] identifier[final] keyword[and] literal[string] keyword[not] keyword[in] identifier[medial] keyword[and] ( identifier[initial] [ literal[int] ] keyword[in] literal[string] keyword[or] identifier[initial] . identifier[startswith] ( literal[string] )) keyword[else] identifier[medial]
identifier[final] = identifier[final] [ literal[int] :] keyword[if] identifier[final] [ literal[int] ]== literal[string] keyword[else] identifier[final]
keyword[return] [ identifier[initial] , identifier[medial] , identifier[final] , identifier[tone] ] | def sixtuple2baxter(chars, debug=False, rhymebook=None):
"""
Convert the classicial six-tuple representation of MCH readings into IPA
(or Baxter's ASCII system).
This function is more or less implemented in MiddleChinese.
"""
if not rhymebook:
rhymebook = _cd.GY # depends on [control=['if'], data=[]]
if len(chars) != 6:
raise ValueError('chars should be a sixtuple') # depends on [control=['if'], data=[]]
# convert chars to long chars
chars = gbk2big5(chars)
# assign basic values
(she, hu, deng, diao, yun, sheng) = list(chars)
# try converting the values to mch representations
initial = rhymebook['sheng'].get(sheng, '?')
final = rhymebook['yun'].get(yun, '?')
tone = rhymebook['diao'].get(diao, '?')
medial = rhymebook['hu'].get(hu, '?')
division = rhymebook['deng'].get(deng, '?')
# debug is for cross-checking
if debug:
return [(sheng, initial), (hu, medial), (deng, division), (yun, final), (diao, tone)] # depends on [control=['if'], data=[]]
# check and raise error if things are not handled
if '?' in [initial, final, tone, medial, division]:
raise ValueError('Unrecognized elements in {0}.'.format(' '.join([initial, final, tone, medial, division]))) # depends on [control=['if'], data=[]]
# treat the final if division is 3 and they start with 'j', note that so
# far, we don't handle chongnius
final = final[1:] if final[0] == 'j' and division in '4' else final
final = final[1:] if final[0] == 'j' and division in '3' else final
# reduce finals starting with 'w'
final = final[1:] if final[0] == 'w' else final
# resolve the medial (the hu) by checking for labial initial
medial = '' if initial[0] in 'pbm' and '*' not in final or final[0] in 'u' or ('o' in final and (not '*' in final) and (not '?' in final)) else medial
# correct for initials with sandeng-i
initial = initial[:-1] if initial.endswith('j') else initial
# get the medial corrected by deng
medial = 'j' + medial if division == '3' and 'i' not in final and ('y' not in initial) else medial
# deprive the rime from its leading "j" if we have a medial
final = final[1:] if final[0] in 'j' and 'j' in medial else final
final = final[1:] if final[0] in 'w' and 'w' in medial else final
final = final[1:] if final[0] == '*' or final[0] == '?' else final
final = 'i' + final[1:] if final[0] == '!' and division == '4' and ('i' not in final) and (initial[0] in "pbmkgx'" or initial.startswith('ng')) else final
# chongniu medial-re-order
medial = 'j' + medial if division == '4' and '!' in final and ('j' not in medial) and (initial[0] in "pbmkgx'" or initial.startswith('ng')) else medial
final = final[1:] if final[0] == '!' else final
# put everything together
return [initial, medial, final, tone] |
def parse(contents, tokens=None):
"""Parse a string called contents for an AST and return it."""
# Shortcut for users who are interested in tokens
if tokens is None:
tokens = [t for t in tokenize(contents)]
token_index, body = _ast_worker(tokens, len(tokens), 0, None)
assert token_index == len(tokens)
assert body.arguments == []
return ToplevelBody(statements=body.statements) | def function[parse, parameter[contents, tokens]]:
constant[Parse a string called contents for an AST and return it.]
if compare[name[tokens] is constant[None]] begin[:]
variable[tokens] assign[=] <ast.ListComp object at 0x7da204623130>
<ast.Tuple object at 0x7da2046201f0> assign[=] call[name[_ast_worker], parameter[name[tokens], call[name[len], parameter[name[tokens]]], constant[0], constant[None]]]
assert[compare[name[token_index] equal[==] call[name[len], parameter[name[tokens]]]]]
assert[compare[name[body].arguments equal[==] list[[]]]]
return[call[name[ToplevelBody], parameter[]]] | keyword[def] identifier[parse] ( identifier[contents] , identifier[tokens] = keyword[None] ):
literal[string]
keyword[if] identifier[tokens] keyword[is] keyword[None] :
identifier[tokens] =[ identifier[t] keyword[for] identifier[t] keyword[in] identifier[tokenize] ( identifier[contents] )]
identifier[token_index] , identifier[body] = identifier[_ast_worker] ( identifier[tokens] , identifier[len] ( identifier[tokens] ), literal[int] , keyword[None] )
keyword[assert] identifier[token_index] == identifier[len] ( identifier[tokens] )
keyword[assert] identifier[body] . identifier[arguments] ==[]
keyword[return] identifier[ToplevelBody] ( identifier[statements] = identifier[body] . identifier[statements] ) | def parse(contents, tokens=None):
"""Parse a string called contents for an AST and return it."""
# Shortcut for users who are interested in tokens
if tokens is None:
tokens = [t for t in tokenize(contents)] # depends on [control=['if'], data=['tokens']]
(token_index, body) = _ast_worker(tokens, len(tokens), 0, None)
assert token_index == len(tokens)
assert body.arguments == []
return ToplevelBody(statements=body.statements) |
def get_pull_request_files(project, num, auth=False):
"""get list of files in a pull request"""
url = "https://api.github.com/repos/{project}/pulls/{num}/files".format(project=project, num=num)
if auth:
header = make_auth_header()
else:
header = None
return get_paged_request(url, headers=header) | def function[get_pull_request_files, parameter[project, num, auth]]:
constant[get list of files in a pull request]
variable[url] assign[=] call[constant[https://api.github.com/repos/{project}/pulls/{num}/files].format, parameter[]]
if name[auth] begin[:]
variable[header] assign[=] call[name[make_auth_header], parameter[]]
return[call[name[get_paged_request], parameter[name[url]]]] | keyword[def] identifier[get_pull_request_files] ( identifier[project] , identifier[num] , identifier[auth] = keyword[False] ):
literal[string]
identifier[url] = literal[string] . identifier[format] ( identifier[project] = identifier[project] , identifier[num] = identifier[num] )
keyword[if] identifier[auth] :
identifier[header] = identifier[make_auth_header] ()
keyword[else] :
identifier[header] = keyword[None]
keyword[return] identifier[get_paged_request] ( identifier[url] , identifier[headers] = identifier[header] ) | def get_pull_request_files(project, num, auth=False):
"""get list of files in a pull request"""
url = 'https://api.github.com/repos/{project}/pulls/{num}/files'.format(project=project, num=num)
if auth:
header = make_auth_header() # depends on [control=['if'], data=[]]
else:
header = None
return get_paged_request(url, headers=header) |
def add_method(obj, func, name=None):
"""Adds an instance method to an object."""
if name is None:
name = func.__name__
if sys.version_info < (3,):
method = types.MethodType(func, obj, obj.__class__)
else:
method = types.MethodType(func, obj)
setattr(obj, name, method) | def function[add_method, parameter[obj, func, name]]:
constant[Adds an instance method to an object.]
if compare[name[name] is constant[None]] begin[:]
variable[name] assign[=] name[func].__name__
if compare[name[sys].version_info less[<] tuple[[<ast.Constant object at 0x7da1b056f610>]]] begin[:]
variable[method] assign[=] call[name[types].MethodType, parameter[name[func], name[obj], name[obj].__class__]]
call[name[setattr], parameter[name[obj], name[name], name[method]]] | keyword[def] identifier[add_method] ( identifier[obj] , identifier[func] , identifier[name] = keyword[None] ):
literal[string]
keyword[if] identifier[name] keyword[is] keyword[None] :
identifier[name] = identifier[func] . identifier[__name__]
keyword[if] identifier[sys] . identifier[version_info] <( literal[int] ,):
identifier[method] = identifier[types] . identifier[MethodType] ( identifier[func] , identifier[obj] , identifier[obj] . identifier[__class__] )
keyword[else] :
identifier[method] = identifier[types] . identifier[MethodType] ( identifier[func] , identifier[obj] )
identifier[setattr] ( identifier[obj] , identifier[name] , identifier[method] ) | def add_method(obj, func, name=None):
"""Adds an instance method to an object."""
if name is None:
name = func.__name__ # depends on [control=['if'], data=['name']]
if sys.version_info < (3,):
method = types.MethodType(func, obj, obj.__class__) # depends on [control=['if'], data=[]]
else:
method = types.MethodType(func, obj)
setattr(obj, name, method) |
def _linkFeature(self, feature):
"""
Link a feature with its parents.
"""
parentNames = feature.attributes.get("Parent")
if parentNames is None:
self.roots.add(feature)
else:
for parentName in parentNames:
self._linkToParent(feature, parentName) | def function[_linkFeature, parameter[self, feature]]:
constant[
Link a feature with its parents.
]
variable[parentNames] assign[=] call[name[feature].attributes.get, parameter[constant[Parent]]]
if compare[name[parentNames] is constant[None]] begin[:]
call[name[self].roots.add, parameter[name[feature]]] | keyword[def] identifier[_linkFeature] ( identifier[self] , identifier[feature] ):
literal[string]
identifier[parentNames] = identifier[feature] . identifier[attributes] . identifier[get] ( literal[string] )
keyword[if] identifier[parentNames] keyword[is] keyword[None] :
identifier[self] . identifier[roots] . identifier[add] ( identifier[feature] )
keyword[else] :
keyword[for] identifier[parentName] keyword[in] identifier[parentNames] :
identifier[self] . identifier[_linkToParent] ( identifier[feature] , identifier[parentName] ) | def _linkFeature(self, feature):
"""
Link a feature with its parents.
"""
parentNames = feature.attributes.get('Parent')
if parentNames is None:
self.roots.add(feature) # depends on [control=['if'], data=[]]
else:
for parentName in parentNames:
self._linkToParent(feature, parentName) # depends on [control=['for'], data=['parentName']] |
def specimens_results_magic(infile='pmag_specimens.txt', measfile='magic_measurements.txt', sampfile='er_samples.txt', sitefile='er_sites.txt', agefile='er_ages.txt', specout='er_specimens.txt', sampout='pmag_samples.txt', siteout='pmag_sites.txt', resout='pmag_results.txt', critout='pmag_criteria.txt', instout='magic_instruments.txt', plotsites=False, fmt='svg', dir_path='.', cors=[], priorities=['DA-AC-ARM', 'DA-AC-TRM'], coord='g', user='', vgps_level='site', do_site_intensity=True, DefaultAge=["none"], avg_directions_by_sample=False, avg_intensities_by_sample=False, avg_all_components=False, avg_by_polarity=False, skip_directions=False, skip_intensities=False, use_sample_latitude=False, use_paleolatitude=False, use_criteria='default'):
"""
Writes magic_instruments, er_specimens, pmag_samples, pmag_sites, pmag_criteria, and pmag_results. The data used to write this is obtained by reading a pmag_speciemns, a magic_measurements, a er_samples, a er_sites, a er_ages.
@param -> infile: path from the WD to the pmag speciemns table
@param -> measfile: path from the WD to the magic measurement file
@param -> sampfile: path from the WD to the er sample file
@param -> sitefile: path from the WD to the er sites data file
@param -> agefile: path from the WD to the er ages data file
@param -> specout: path from the WD to the place to write the er specimens data file
@param -> sampout: path from the WD to the place to write the pmag samples data file
@param -> siteout: path from the WD to the place to write the pmag sites data file
@param -> resout: path from the WD to the place to write the pmag results data file
@param -> critout: path from the WD to the place to write the pmag criteria file
@param -> instout: path from th WD to the place to write the magic instruments file
@param -> documentation incomplete if you know more about the purpose of the parameters in this function and it's side effects please extend and complete this string
"""
# initialize some variables
plotsites = False # cannot use draw_figs from within ipmag
Comps = [] # list of components
version_num = pmag.get_version()
args = sys.argv
model_lat_file = ""
Dcrit, Icrit, nocrit = 0, 0, 0
corrections = []
nocorrection = ['DA-NL', 'DA-AC', 'DA-CR']
# do some data adjustments
for cor in cors:
nocorrection.remove('DA-' + cor)
corrections.append('DA-' + cor)
for p in priorities:
if not p.startswith('DA-AC-'):
p = 'DA-AC-' + p
# translate coord into coords
if coord == 's':
coords = ['-1']
if coord == 'g':
coords = ['0']
if coord == 't':
coords = ['100']
if coord == 'b':
coords = ['0', '100']
if vgps_level == 'sample':
vgps = 1 # save sample level VGPS/VADMs
else:
vgps = 0 # site level
if do_site_intensity:
nositeints = 0
else:
nositeints = 1
# chagne these all to True/False instead of 1/0
if not skip_intensities:
# set model lat and
if use_sample_latitude and use_paleolatitude:
print("you should set a paleolatitude file OR use present day lat - not both")
return False
elif use_sample_latitude:
get_model_lat = 1
elif use_paleolatitude:
get_model_lat = 2
try:
model_lat_file = dir_path + '/' + args[ind + 1]
get_model_lat = 2
mlat = open(model_lat_file, 'r')
ModelLats = []
for line in mlat.readlines():
ModelLat = {}
tmp = line.split()
ModelLat["er_site_name"] = tmp[0]
ModelLat["site_model_lat"] = tmp[1]
ModelLat["er_sample_name"] = tmp[0]
ModelLat["sample_lat"] = tmp[1]
ModelLats.append(ModelLat)
mlat.clos()
except:
print("use_paleolatitude option requires a valid paleolatitude file")
else:
get_model_lat = 0 # skips VADM calculation entirely
if plotsites and not skip_directions: # plot by site - set up plot window
EQ = {}
EQ['eqarea'] = 1
# define figure 1 as equal area projection
pmagplotlib.plot_init(EQ['eqarea'], 5, 5)
# I don't know why this has to be here, but otherwise the first plot
# never plots...
pmagplotlib.plot_net(EQ['eqarea'])
pmagplotlib.draw_figs(EQ)
infile = os.path.join(dir_path, infile)
measfile = os.path.join(dir_path, measfile)
instout = os.path.join(dir_path, instout)
sampfile = os.path.join(dir_path, sampfile)
sitefile = os.path.join(dir_path, sitefile)
agefile = os.path.join(dir_path, agefile)
specout = os.path.join(dir_path, specout)
sampout = os.path.join(dir_path, sampout)
siteout = os.path.join(dir_path, siteout)
resout = os.path.join(dir_path, resout)
critout = os.path.join(dir_path, critout)
if use_criteria == 'none':
Dcrit, Icrit, nocrit = 1, 1, 1 # no selection criteria
crit_data = pmag.default_criteria(nocrit)
elif use_criteria == 'default':
crit_data = pmag.default_criteria(nocrit) # use default criteria
elif use_criteria == 'existing':
crit_data, file_type = pmag.magic_read(
critout) # use pmag_criteria file
print("Acceptance criteria read in from ", critout)
accept = {}
for critrec in crit_data:
for key in list(critrec.keys()):
# need to migrate specimen_dang to specimen_int_dang for intensity
# data using old format
if 'IE-SPEC' in list(critrec.keys()) and 'specimen_dang' in list(critrec.keys()) and 'specimen_int_dang' not in list(critrec.keys()):
critrec['specimen_int_dang'] = critrec['specimen_dang']
del critrec['specimen_dang']
# need to get rid of ron shaars sample_int_sigma_uT
if 'sample_int_sigma_uT' in list(critrec.keys()):
critrec['sample_int_sigma'] = '%10.3e' % (
eval(critrec['sample_int_sigma_uT']) * 1e-6)
if key not in list(accept.keys()) and critrec[key] != '':
accept[key] = critrec[key]
if use_criteria == 'default':
pmag.magic_write(critout, [accept], 'pmag_criteria')
print("\n Pmag Criteria stored in ", critout, '\n')
# now we're done slow dancing
# read in site data - has the lats and lons
SiteNFO, file_type = pmag.magic_read(sitefile)
# read in site data - has the lats and lons
SampNFO, file_type = pmag.magic_read(sampfile)
# find all the sites with height info.
height_nfo = pmag.get_dictitem(SiteNFO, 'site_height', '', 'F')
if agefile:
AgeNFO, file_type = pmag.magic_read(
agefile) # read in the age information
# read in specimen interpretations
Data, file_type = pmag.magic_read(infile)
# retrieve specimens with intensity data
IntData = pmag.get_dictitem(Data, 'specimen_int', '', 'F')
comment, orient = "", []
samples, sites = [], []
for rec in Data: # run through the data filling in missing keys and finding all components, coordinates available
# fill in missing fields, collect unique sample and site names
if 'er_sample_name' not in list(rec.keys()):
rec['er_sample_name'] = ""
elif rec['er_sample_name'] not in samples:
samples.append(rec['er_sample_name'])
if 'er_site_name' not in list(rec.keys()):
rec['er_site_name'] = ""
elif rec['er_site_name'] not in sites:
sites.append(rec['er_site_name'])
if 'specimen_int' not in list(rec.keys()):
rec['specimen_int'] = ''
if 'specimen_comp_name' not in list(rec.keys()) or rec['specimen_comp_name'] == "":
rec['specimen_comp_name'] = 'A'
if rec['specimen_comp_name'] not in Comps:
Comps.append(rec['specimen_comp_name'])
rec['specimen_tilt_correction'] = rec['specimen_tilt_correction'].strip(
'\n')
if "specimen_tilt_correction" not in list(rec.keys()):
rec["specimen_tilt_correction"] = "-1" # assume sample coordinates
if rec["specimen_tilt_correction"] not in orient:
# collect available coordinate systems
orient.append(rec["specimen_tilt_correction"])
if "specimen_direction_type" not in list(rec.keys()):
# assume direction is line - not plane
rec["specimen_direction_type"] = 'l'
if "specimen_dec" not in list(rec.keys()):
# if no declination, set direction type to blank
rec["specimen_direction_type"] = ''
if "specimen_n" not in list(rec.keys()):
rec["specimen_n"] = '' # put in n
if "specimen_alpha95" not in list(rec.keys()):
rec["specimen_alpha95"] = '' # put in alpha95
if "magic_method_codes" not in list(rec.keys()):
rec["magic_method_codes"] = ''
# start parsing data into SpecDirs, SpecPlanes, SpecInts
SpecInts, SpecDirs, SpecPlanes = [], [], []
samples.sort() # get sorted list of samples and sites
sites.sort()
if not skip_intensities: # don't skip intensities
# retrieve specimens with intensity data
IntData = pmag.get_dictitem(Data, 'specimen_int', '', 'F')
if nocrit == 0: # use selection criteria
for rec in IntData: # do selection criteria
kill = pmag.grade(rec, accept, 'specimen_int')
if len(kill) == 0:
# intensity record to be included in sample, site
# calculations
SpecInts.append(rec)
else:
SpecInts = IntData[:] # take everything - no selection criteria
# check for required data adjustments
if len(corrections) > 0 and len(SpecInts) > 0:
for cor in corrections:
# only take specimens with the required corrections
SpecInts = pmag.get_dictitem(
SpecInts, 'magic_method_codes', cor, 'has')
if len(nocorrection) > 0 and len(SpecInts) > 0:
for cor in nocorrection:
# exclude the corrections not specified for inclusion
SpecInts = pmag.get_dictitem(
SpecInts, 'magic_method_codes', cor, 'not')
# take top priority specimen of its name in remaining specimens (only one
# per customer)
PrioritySpecInts = []
specimens = pmag.get_specs(SpecInts) # get list of uniq specimen names
for spec in specimens:
# all the records for this specimen
ThisSpecRecs = pmag.get_dictitem(
SpecInts, 'er_specimen_name', spec, 'T')
if len(ThisSpecRecs) == 1:
PrioritySpecInts.append(ThisSpecRecs[0])
elif len(ThisSpecRecs) > 1: # more than one
prec = []
for p in priorities:
# all the records for this specimen
ThisSpecRecs = pmag.get_dictitem(
SpecInts, 'magic_method_codes', p, 'has')
if len(ThisSpecRecs) > 0:
prec.append(ThisSpecRecs[0])
PrioritySpecInts.append(prec[0]) # take the best one
SpecInts = PrioritySpecInts # this has the first specimen record
if not skip_directions: # don't skip directions
# retrieve specimens with directed lines and planes
AllDirs = pmag.get_dictitem(Data, 'specimen_direction_type', '', 'F')
# get all specimens with specimen_n information
Ns = pmag.get_dictitem(AllDirs, 'specimen_n', '', 'F')
if nocrit != 1: # use selection criteria
for rec in Ns: # look through everything with specimen_n for "good" data
kill = pmag.grade(rec, accept, 'specimen_dir')
if len(kill) == 0: # nothing killed it
SpecDirs.append(rec)
else: # no criteria
SpecDirs = AllDirs[:] # take them all
# SpecDirs is now the list of all specimen directions (lines and planes)
# that pass muster
# list of all sample data and list of those that pass the DE-SAMP criteria
PmagSamps, SampDirs = [], []
PmagSites, PmagResults = [], [] # list of all site data and selected results
SampInts = []
for samp in samples: # run through the sample names
if avg_directions_by_sample: # average by sample if desired
# get all the directional data for this sample
SampDir = pmag.get_dictitem(SpecDirs, 'er_sample_name', samp, 'T')
if len(SampDir) > 0: # there are some directions
for coord in coords: # step through desired coordinate systems
# get all the directions for this sample
CoordDir = pmag.get_dictitem(
SampDir, 'specimen_tilt_correction', coord, 'T')
if len(CoordDir) > 0: # there are some with this coordinate system
if not avg_all_components: # look component by component
for comp in Comps:
# get all directions from this component
CompDir = pmag.get_dictitem(
CoordDir, 'specimen_comp_name', comp, 'T')
if len(CompDir) > 0: # there are some
# get a sample average from all specimens
PmagSampRec = pmag.lnpbykey(
CompDir, 'sample', 'specimen')
# decorate the sample record
PmagSampRec["er_location_name"] = CompDir[0]['er_location_name']
PmagSampRec["er_site_name"] = CompDir[0]['er_site_name']
PmagSampRec["er_sample_name"] = samp
PmagSampRec["er_citation_names"] = "This study"
PmagSampRec["er_analyst_mail_names"] = user
PmagSampRec['magic_software_packages'] = version_num
if CompDir[0]['specimen_flag'] == 'g':
PmagSampRec['sample_flag'] = 'g'
else:
PmagSampRec['sample_flag'] = 'b'
if nocrit != 1:
PmagSampRec['pmag_criteria_codes'] = "ACCEPT"
if agefile != "":
PmagSampRec = pmag.get_age(
PmagSampRec, "er_site_name", "sample_inferred_", AgeNFO, DefaultAge)
site_height = pmag.get_dictitem(
height_nfo, 'er_site_name', PmagSampRec['er_site_name'], 'T')
if len(site_height) > 0:
# add in height if available
PmagSampRec["sample_height"] = site_height[0]['site_height']
PmagSampRec['sample_comp_name'] = comp
PmagSampRec['sample_tilt_correction'] = coord
PmagSampRec['er_specimen_names'] = pmag.get_list(
CompDir, 'er_specimen_name') # get a list of the specimen names used
PmagSampRec['magic_method_codes'] = pmag.get_list(
CompDir, 'magic_method_codes') # get a list of the methods used
if nocrit != 1: # apply selection criteria
kill = pmag.grade(
PmagSampRec, accept, 'sample_dir')
else:
kill = []
if len(kill) == 0:
SampDirs.append(PmagSampRec)
if vgps == 1: # if sample level VGP info desired, do that now
PmagResRec = pmag.getsampVGP(
PmagSampRec, SiteNFO)
if PmagResRec != "":
PmagResults.append(PmagResRec)
# print(PmagSampRec)
PmagSamps.append(PmagSampRec)
if avg_all_components: # average all components together basically same as above
PmagSampRec = pmag.lnpbykey(
CoordDir, 'sample', 'specimen')
PmagSampRec["er_location_name"] = CoordDir[0]['er_location_name']
PmagSampRec["er_site_name"] = CoordDir[0]['er_site_name']
PmagSampRec["er_sample_name"] = samp
PmagSampRec["er_citation_names"] = "This study"
PmagSampRec["er_analyst_mail_names"] = user
PmagSampRec['magic_software_packages'] = version_num
if all(i['specimen_flag'] == 'g' for i in CoordDir):
PmagSampRec['sample_flag'] = 'g'
else:
PmagSampRec['sample_flag'] = 'b'
if nocrit != 1:
PmagSampRec['pmag_criteria_codes'] = ""
if agefile != "":
PmagSampRec = pmag.get_age(
PmagSampRec, "er_site_name", "sample_inferred_", AgeNFO, DefaultAge)
site_height = pmag.get_dictitem(
height_nfo, 'er_site_name', site, 'T')
if len(site_height) > 0:
# add in height if available
PmagSampRec["sample_height"] = site_height[0]['site_height']
PmagSampRec['sample_tilt_correction'] = coord
PmagSampRec['sample_comp_name'] = pmag.get_list(
CoordDir, 'specimen_comp_name') # get components used
PmagSampRec['er_specimen_names'] = pmag.get_list(
CoordDir, 'er_specimen_name') # get specimne names averaged
PmagSampRec['magic_method_codes'] = pmag.get_list(
CoordDir, 'magic_method_codes') # assemble method codes
if nocrit != 1: # apply selection criteria
kill = pmag.grade(
PmagSampRec, accept, 'sample_dir')
if len(kill) == 0: # passes the mustard
SampDirs.append(PmagSampRec)
if vgps == 1:
PmagResRec = pmag.getsampVGP(
PmagSampRec, SiteNFO)
if PmagResRec != "":
PmagResults.append(PmagResRec)
else: # take everything
SampDirs.append(PmagSampRec)
if vgps == 1:
PmagResRec = pmag.getsampVGP(
PmagSampRec, SiteNFO)
if PmagResRec != "":
PmagResults.append(PmagResRec)
PmagSamps.append(PmagSampRec)
if avg_intensities_by_sample: # average by sample if desired
# get all the intensity data for this sample
SampI = pmag.get_dictitem(SpecInts, 'er_sample_name', samp, 'T')
if len(SampI) > 0: # there are some
# get average intensity stuff
PmagSampRec = pmag.average_int(SampI, 'specimen', 'sample')
# decorate sample record
PmagSampRec["sample_description"] = "sample intensity"
PmagSampRec["sample_direction_type"] = ""
PmagSampRec['er_site_name'] = SampI[0]["er_site_name"]
PmagSampRec['er_sample_name'] = samp
PmagSampRec['er_location_name'] = SampI[0]["er_location_name"]
PmagSampRec["er_citation_names"] = "This study"
PmagSampRec["er_analyst_mail_names"] = user
if agefile != "":
PmagSampRec = pmag.get_age(
PmagSampRec, "er_site_name", "sample_inferred_", AgeNFO, DefaultAge)
site_height = pmag.get_dictitem(
height_nfo, 'er_site_name', PmagSampRec['er_site_name'], 'T')
if len(site_height) > 0:
# add in height if available
PmagSampRec["sample_height"] = site_height[0]['site_height']
PmagSampRec['er_specimen_names'] = pmag.get_list(
SampI, 'er_specimen_name')
PmagSampRec['magic_method_codes'] = pmag.get_list(
SampI, 'magic_method_codes')
if nocrit != 1: # apply criteria!
kill = pmag.grade(PmagSampRec, accept, 'sample_int')
if len(kill) == 0:
PmagSampRec['pmag_criteria_codes'] = "ACCEPT"
SampInts.append(PmagSampRec)
PmagSamps.append(PmagSampRec)
else:
PmagSampRec = {} # sample rejected
else: # no criteria
SampInts.append(PmagSampRec)
PmagSamps.append(PmagSampRec)
PmagSampRec['pmag_criteria_codes'] = ""
if vgps == 1 and get_model_lat != 0 and PmagSampRec != {}:
if get_model_lat == 1: # use sample latitude
PmagResRec = pmag.getsampVDM(PmagSampRec, SampNFO)
# get rid of the model lat key
del(PmagResRec['model_lat'])
elif get_model_lat == 2: # use model latitude
PmagResRec = pmag.getsampVDM(PmagSampRec, ModelLats)
if PmagResRec != {}:
PmagResRec['magic_method_codes'] = PmagResRec['magic_method_codes'] + ":IE-MLAT"
if PmagResRec != {}:
PmagResRec['er_specimen_names'] = PmagSampRec['er_specimen_names']
PmagResRec['er_sample_names'] = PmagSampRec['er_sample_name']
PmagResRec['pmag_criteria_codes'] = 'ACCEPT'
PmagResRec['average_int_sigma_perc'] = PmagSampRec['sample_int_sigma_perc']
PmagResRec['average_int_sigma'] = PmagSampRec['sample_int_sigma']
PmagResRec['average_int_n'] = PmagSampRec['sample_int_n']
PmagResRec['vadm_n'] = PmagSampRec['sample_int_n']
PmagResRec['data_type'] = 'i'
PmagResults.append(PmagResRec)
if len(PmagSamps) > 0:
# fill in missing keys from different types of records
TmpSamps, keylist = pmag.fillkeys(PmagSamps)
# save in sample output file
pmag.magic_write(sampout, TmpSamps, 'pmag_samples')
print(' sample averages written to ', sampout)
#
# create site averages from specimens or samples as specified
#
for site in sites:
for coord in coords:
if not avg_directions_by_sample:
key, dirlist = 'specimen', SpecDirs # if specimen averages at site level desired
if avg_directions_by_sample:
key, dirlist = 'sample', SampDirs # if sample averages at site level desired
# get all the sites with directions
tmp = pmag.get_dictitem(dirlist, 'er_site_name', site, 'T')
# use only the last coordinate if avg_all_components==False
tmp1 = pmag.get_dictitem(tmp, key + '_tilt_correction', coord, 'T')
# fish out site information (lat/lon, etc.)
sd = pmag.get_dictitem(SiteNFO, 'er_site_name', site, 'T')
if len(sd) > 0:
sitedat = sd[0]
if not avg_all_components: # do component wise averaging
for comp in Comps:
# get all components comp
siteD = pmag.get_dictitem(
tmp1, key + '_comp_name', comp, 'T')
# remove bad data from means
quality_siteD = []
# remove any records for which specimen_flag or sample_flag are 'b'
# assume 'g' if flag is not provided
for rec in siteD:
spec_quality = rec.get('specimen_flag', 'g')
samp_quality = rec.get('sample_flag', 'g')
if (spec_quality == 'g') and (samp_quality == 'g'):
quality_siteD.append(rec)
siteD = quality_siteD
if len(siteD) > 0: # there are some for this site and component name
# get an average for this site
PmagSiteRec = pmag.lnpbykey(siteD, 'site', key)
# decorate the site record
PmagSiteRec['site_comp_name'] = comp
PmagSiteRec["er_location_name"] = siteD[0]['er_location_name']
PmagSiteRec["er_site_name"] = siteD[0]['er_site_name']
PmagSiteRec['site_tilt_correction'] = coord
PmagSiteRec['site_comp_name'] = pmag.get_list(
siteD, key + '_comp_name')
if avg_directions_by_sample:
PmagSiteRec['er_sample_names'] = pmag.get_list(
siteD, 'er_sample_name')
else:
PmagSiteRec['er_specimen_names'] = pmag.get_list(
siteD, 'er_specimen_name')
# determine the demagnetization code (DC3,4 or 5) for this site
AFnum = len(pmag.get_dictitem(
siteD, 'magic_method_codes', 'LP-DIR-AF', 'has'))
Tnum = len(pmag.get_dictitem(
siteD, 'magic_method_codes', 'LP-DIR-T', 'has'))
DC = 3
if AFnum > 0:
DC += 1
if Tnum > 0:
DC += 1
PmagSiteRec['magic_method_codes'] = pmag.get_list(
siteD, 'magic_method_codes') + ':' + 'LP-DC' + str(DC)
PmagSiteRec['magic_method_codes'].strip(":")
if plotsites:
print(PmagSiteRec['er_site_name'])
# plot and list the data
pmagplotlib.plot_site(
EQ['eqarea'], PmagSiteRec, siteD, key)
pmagplotlib.draw_figs(EQ)
PmagSites.append(PmagSiteRec)
else: # last component only
# get the last orientation system specified
siteD = tmp1[:]
if len(siteD) > 0: # there are some
# get the average for this site
PmagSiteRec = pmag.lnpbykey(siteD, 'site', key)
# decorate the record
PmagSiteRec["er_location_name"] = siteD[0]['er_location_name']
PmagSiteRec["er_site_name"] = siteD[0]['er_site_name']
PmagSiteRec['site_comp_name'] = comp
PmagSiteRec['site_tilt_correction'] = coord
PmagSiteRec['site_comp_name'] = pmag.get_list(
siteD, key + '_comp_name')
PmagSiteRec['er_specimen_names'] = pmag.get_list(
siteD, 'er_specimen_name')
PmagSiteRec['er_sample_names'] = pmag.get_list(
siteD, 'er_sample_name')
AFnum = len(pmag.get_dictitem(
siteD, 'magic_method_codes', 'LP-DIR-AF', 'has'))
Tnum = len(pmag.get_dictitem(
siteD, 'magic_method_codes', 'LP-DIR-T', 'has'))
DC = 3
if AFnum > 0:
DC += 1
if Tnum > 0:
DC += 1
PmagSiteRec['magic_method_codes'] = pmag.get_list(
siteD, 'magic_method_codes') + ':' + 'LP-DC' + str(DC)
PmagSiteRec['magic_method_codes'].strip(":")
if not avg_directions_by_sample:
PmagSiteRec['site_comp_name'] = pmag.get_list(
siteD, key + '_comp_name')
if plotsites:
pmagplotlib.plot_site(
EQ['eqarea'], PmagSiteRec, siteD, key)
pmagplotlib.draw_figs(EQ)
PmagSites.append(PmagSiteRec)
else:
print('site information not found in er_sites for site, ',
site, ' site will be skipped')
for PmagSiteRec in PmagSites: # now decorate each dictionary some more, and calculate VGPs etc. for results table
PmagSiteRec["er_citation_names"] = "This study"
PmagSiteRec["er_analyst_mail_names"] = user
PmagSiteRec['magic_software_packages'] = version_num
if agefile != "":
PmagSiteRec = pmag.get_age(
PmagSiteRec, "er_site_name", "site_inferred_", AgeNFO, DefaultAge)
PmagSiteRec['pmag_criteria_codes'] = 'ACCEPT'
if 'site_n_lines' in list(PmagSiteRec.keys()) and 'site_n_planes' in list(PmagSiteRec.keys()) and PmagSiteRec['site_n_lines'] != "" and PmagSiteRec['site_n_planes'] != "":
if int(PmagSiteRec["site_n_planes"]) > 0:
PmagSiteRec["magic_method_codes"] = PmagSiteRec['magic_method_codes'] + ":DE-FM-LP"
elif int(PmagSiteRec["site_n_lines"]) > 2:
PmagSiteRec["magic_method_codes"] = PmagSiteRec['magic_method_codes'] + ":DE-FM"
kill = pmag.grade(PmagSiteRec, accept, 'site_dir')
if len(kill) == 0:
PmagResRec = {} # set up dictionary for the pmag_results table entry
PmagResRec['data_type'] = 'i' # decorate it a bit
PmagResRec['magic_software_packages'] = version_num
PmagSiteRec['site_description'] = 'Site direction included in results table'
PmagResRec['pmag_criteria_codes'] = 'ACCEPT'
dec = float(PmagSiteRec["site_dec"])
inc = float(PmagSiteRec["site_inc"])
if 'site_alpha95' in list(PmagSiteRec.keys()) and PmagSiteRec['site_alpha95'] != "":
a95 = float(PmagSiteRec["site_alpha95"])
else:
a95 = 180.
sitedat = pmag.get_dictitem(SiteNFO, 'er_site_name', PmagSiteRec['er_site_name'], 'T')[
0] # fish out site information (lat/lon, etc.)
lat = float(sitedat['site_lat'])
lon = float(sitedat['site_lon'])
plon, plat, dp, dm = pmag.dia_vgp(
dec, inc, a95, lat, lon) # get the VGP for this site
if PmagSiteRec['site_tilt_correction'] == '-1':
C = ' (spec coord) '
if PmagSiteRec['site_tilt_correction'] == '0':
C = ' (geog. coord) '
if PmagSiteRec['site_tilt_correction'] == '100':
C = ' (strat. coord) '
PmagResRec["pmag_result_name"] = "VGP Site: " + \
PmagSiteRec["er_site_name"] # decorate some more
PmagResRec["result_description"] = "Site VGP, coord system = " + \
str(coord) + ' component: ' + comp
PmagResRec['er_site_names'] = PmagSiteRec['er_site_name']
PmagResRec['pmag_criteria_codes'] = 'ACCEPT'
PmagResRec['er_citation_names'] = 'This study'
PmagResRec['er_analyst_mail_names'] = user
PmagResRec["er_location_names"] = PmagSiteRec["er_location_name"]
if avg_directions_by_sample:
PmagResRec["er_sample_names"] = PmagSiteRec["er_sample_names"]
else:
PmagResRec["er_specimen_names"] = PmagSiteRec["er_specimen_names"]
PmagResRec["tilt_correction"] = PmagSiteRec['site_tilt_correction']
PmagResRec["pole_comp_name"] = PmagSiteRec['site_comp_name']
PmagResRec["average_dec"] = PmagSiteRec["site_dec"]
PmagResRec["average_inc"] = PmagSiteRec["site_inc"]
PmagResRec["average_alpha95"] = PmagSiteRec["site_alpha95"]
PmagResRec["average_n"] = PmagSiteRec["site_n"]
PmagResRec["average_n_lines"] = PmagSiteRec["site_n_lines"]
PmagResRec["average_n_planes"] = PmagSiteRec["site_n_planes"]
PmagResRec["vgp_n"] = PmagSiteRec["site_n"]
PmagResRec["average_k"] = PmagSiteRec["site_k"]
PmagResRec["average_r"] = PmagSiteRec["site_r"]
PmagResRec["average_lat"] = '%10.4f ' % (lat)
PmagResRec["average_lon"] = '%10.4f ' % (lon)
if agefile != "":
PmagResRec = pmag.get_age(
PmagResRec, "er_site_names", "average_", AgeNFO, DefaultAge)
site_height = pmag.get_dictitem(
height_nfo, 'er_site_name', site, 'T')
if len(site_height) > 0:
PmagResRec["average_height"] = site_height[0]['site_height']
PmagResRec["vgp_lat"] = '%7.1f ' % (plat)
PmagResRec["vgp_lon"] = '%7.1f ' % (plon)
PmagResRec["vgp_dp"] = '%7.1f ' % (dp)
PmagResRec["vgp_dm"] = '%7.1f ' % (dm)
PmagResRec["magic_method_codes"] = PmagSiteRec["magic_method_codes"]
if '0' in PmagSiteRec['site_tilt_correction'] and "DA-DIR-GEO" not in PmagSiteRec['magic_method_codes']:
PmagSiteRec['magic_method_codes'] = PmagSiteRec['magic_method_codes'] + ":DA-DIR-GEO"
if '100' in PmagSiteRec['site_tilt_correction'] and "DA-DIR-TILT" not in PmagSiteRec['magic_method_codes']:
PmagSiteRec['magic_method_codes'] = PmagSiteRec['magic_method_codes'] + ":DA-DIR-TILT"
PmagSiteRec['site_polarity'] = ""
if avg_by_polarity: # assign polarity based on angle of pole lat to spin axis - may want to re-think this sometime
angle = pmag.angle([0, 0], [0, (90 - plat)])
if angle <= 55.:
PmagSiteRec["site_polarity"] = 'n'
if angle > 55. and angle < 125.:
PmagSiteRec["site_polarity"] = 't'
if angle >= 125.:
PmagSiteRec["site_polarity"] = 'r'
PmagResults.append(PmagResRec)
if avg_by_polarity:
# find the tilt corrected data
crecs = pmag.get_dictitem(
PmagSites, 'site_tilt_correction', '100', 'T')
if len(crecs) < 2:
# if there aren't any, find the geographic corrected data
crecs = pmag.get_dictitem(
PmagSites, 'site_tilt_correction', '0', 'T')
if len(crecs) > 2: # if there are some,
comp = pmag.get_list(crecs, 'site_comp_name').split(':')[
0] # find the first component
# fish out all of the first component
crecs = pmag.get_dictitem(crecs, 'site_comp_name', comp, 'T')
precs = []
for rec in crecs:
precs.append({'dec': rec['site_dec'], 'inc': rec['site_inc'],
'name': rec['er_site_name'], 'loc': rec['er_location_name']})
# calculate average by polarity
polpars = pmag.fisher_by_pol(precs)
# hunt through all the modes (normal=A, reverse=B, all=ALL)
for mode in list(polpars.keys()):
PolRes = {}
PolRes['er_citation_names'] = 'This study'
PolRes["pmag_result_name"] = "Polarity Average: Polarity " + mode
PolRes["data_type"] = "a"
PolRes["average_dec"] = '%7.1f' % (polpars[mode]['dec'])
PolRes["average_inc"] = '%7.1f' % (polpars[mode]['inc'])
PolRes["average_n"] = '%i' % (polpars[mode]['n'])
PolRes["average_r"] = '%5.4f' % (polpars[mode]['r'])
PolRes["average_k"] = '%6.0f' % (polpars[mode]['k'])
PolRes["average_alpha95"] = '%7.1f' % (
polpars[mode]['alpha95'])
PolRes['er_site_names'] = polpars[mode]['sites']
PolRes['er_location_names'] = polpars[mode]['locs']
PolRes['magic_software_packages'] = version_num
PmagResults.append(PolRes)
if not skip_intensities and nositeints != 1:
for site in sites: # now do intensities for each site
if plotsites:
print(site)
if not avg_intensities_by_sample:
key, intlist = 'specimen', SpecInts # if using specimen level data
if avg_intensities_by_sample:
key, intlist = 'sample', PmagSamps # if using sample level data
# get all the intensities for this site
Ints = pmag.get_dictitem(intlist, 'er_site_name', site, 'T')
if len(Ints) > 0: # there are some
# get average intensity stuff for site table
PmagSiteRec = pmag.average_int(Ints, key, 'site')
# get average intensity stuff for results table
PmagResRec = pmag.average_int(Ints, key, 'average')
if plotsites: # if site by site examination requested - print this site out to the screen
for rec in Ints:
print(rec['er_' + key + '_name'], ' %7.1f' %
(1e6 * float(rec[key + '_int'])))
if len(Ints) > 1:
print('Average: ', '%7.1f' % (
1e6 * float(PmagResRec['average_int'])), 'N: ', len(Ints))
print('Sigma: ', '%7.1f' % (
1e6 * float(PmagResRec['average_int_sigma'])), 'Sigma %: ', PmagResRec['average_int_sigma_perc'])
input('Press any key to continue\n')
er_location_name = Ints[0]["er_location_name"]
# decorate the records
PmagSiteRec["er_location_name"] = er_location_name
PmagSiteRec["er_citation_names"] = "This study"
PmagResRec["er_location_names"] = er_location_name
PmagResRec["er_citation_names"] = "This study"
PmagSiteRec["er_analyst_mail_names"] = user
PmagResRec["er_analyst_mail_names"] = user
PmagResRec["data_type"] = 'i'
if not avg_intensities_by_sample:
PmagSiteRec['er_specimen_names'] = pmag.get_list(
Ints, 'er_specimen_name') # list of all specimens used
PmagResRec['er_specimen_names'] = pmag.get_list(
Ints, 'er_specimen_name')
PmagSiteRec['er_sample_names'] = pmag.get_list(
Ints, 'er_sample_name') # list of all samples used
PmagResRec['er_sample_names'] = pmag.get_list(
Ints, 'er_sample_name')
PmagSiteRec['er_site_name'] = site
PmagResRec['er_site_names'] = site
PmagSiteRec['magic_method_codes'] = pmag.get_list(
Ints, 'magic_method_codes')
PmagResRec['magic_method_codes'] = pmag.get_list(
Ints, 'magic_method_codes')
kill = pmag.grade(PmagSiteRec, accept, 'site_int')
if nocrit == 1 or len(kill) == 0:
b, sig = float(PmagResRec['average_int']), ""
if(PmagResRec['average_int_sigma']) != "":
sig = float(PmagResRec['average_int_sigma'])
# fish out site direction
sdir = pmag.get_dictitem(
PmagResults, 'er_site_names', site, 'T')
# get the VDM for this record using last average
# inclination (hope it is the right one!)
if len(sdir) > 0 and sdir[-1]['average_inc'] != "":
inc = float(sdir[0]['average_inc'])
# get magnetic latitude using dipole formula
mlat = pmag.magnetic_lat(inc)
# get VDM with magnetic latitude
PmagResRec["vdm"] = '%8.3e ' % (pmag.b_vdm(b, mlat))
PmagResRec["vdm_n"] = PmagResRec['average_int_n']
if 'average_int_sigma' in list(PmagResRec.keys()) and PmagResRec['average_int_sigma'] != "":
vdm_sig = pmag.b_vdm(
float(PmagResRec['average_int_sigma']), mlat)
PmagResRec["vdm_sigma"] = '%8.3e ' % (vdm_sig)
else:
PmagResRec["vdm_sigma"] = ""
mlat = "" # define a model latitude
if get_model_lat == 1: # use present site latitude
mlats = pmag.get_dictitem(
SiteNFO, 'er_site_name', site, 'T')
if len(mlats) > 0:
mlat = mlats[0]['site_lat']
# use a model latitude from some plate reconstruction model
# (or something)
elif get_model_lat == 2:
mlats = pmag.get_dictitem(
ModelLats, 'er_site_name', site, 'T')
if len(mlats) > 0:
PmagResRec['model_lat'] = mlats[0]['site_model_lat']
mlat = PmagResRec['model_lat']
if mlat != "":
# get the VADM using the desired latitude
PmagResRec["vadm"] = '%8.3e ' % (
pmag.b_vdm(b, float(mlat)))
if sig != "":
vdm_sig = pmag.b_vdm(
float(PmagResRec['average_int_sigma']), float(mlat))
PmagResRec["vadm_sigma"] = '%8.3e ' % (vdm_sig)
PmagResRec["vadm_n"] = PmagResRec['average_int_n']
else:
PmagResRec["vadm_sigma"] = ""
# fish out site information (lat/lon, etc.)
sitedat = pmag.get_dictitem(
SiteNFO, 'er_site_name', PmagSiteRec['er_site_name'], 'T')
if len(sitedat) > 0:
sitedat = sitedat[0]
PmagResRec['average_lat'] = sitedat['site_lat']
PmagResRec['average_lon'] = sitedat['site_lon']
else:
PmagResRec['average_lon'] = 'UNKNOWN'
PmagResRec['average_lon'] = 'UNKNOWN'
PmagResRec['magic_software_packages'] = version_num
PmagResRec["pmag_result_name"] = "V[A]DM: Site " + site
PmagResRec["result_description"] = "V[A]DM of site"
PmagResRec["pmag_criteria_codes"] = "ACCEPT"
if agefile != "":
PmagResRec = pmag.get_age(
PmagResRec, "er_site_names", "average_", AgeNFO, DefaultAge)
site_height = pmag.get_dictitem(
height_nfo, 'er_site_name', site, 'T')
if len(site_height) > 0:
PmagResRec["average_height"] = site_height[0]['site_height']
PmagSites.append(PmagSiteRec)
PmagResults.append(PmagResRec)
if len(PmagSites) > 0:
Tmp, keylist = pmag.fillkeys(PmagSites)
pmag.magic_write(siteout, Tmp, 'pmag_sites')
print(' sites written to ', siteout)
else:
print("No Site level table")
if len(PmagResults) > 0:
TmpRes, keylist = pmag.fillkeys(PmagResults)
pmag.magic_write(resout, TmpRes, 'pmag_results')
print(' results written to ', resout)
else:
print("No Results level table") | def function[specimens_results_magic, parameter[infile, measfile, sampfile, sitefile, agefile, specout, sampout, siteout, resout, critout, instout, plotsites, fmt, dir_path, cors, priorities, coord, user, vgps_level, do_site_intensity, DefaultAge, avg_directions_by_sample, avg_intensities_by_sample, avg_all_components, avg_by_polarity, skip_directions, skip_intensities, use_sample_latitude, use_paleolatitude, use_criteria]]:
constant[
Writes magic_instruments, er_specimens, pmag_samples, pmag_sites, pmag_criteria, and pmag_results. The data used to write this is obtained by reading a pmag_speciemns, a magic_measurements, a er_samples, a er_sites, a er_ages.
@param -> infile: path from the WD to the pmag speciemns table
@param -> measfile: path from the WD to the magic measurement file
@param -> sampfile: path from the WD to the er sample file
@param -> sitefile: path from the WD to the er sites data file
@param -> agefile: path from the WD to the er ages data file
@param -> specout: path from the WD to the place to write the er specimens data file
@param -> sampout: path from the WD to the place to write the pmag samples data file
@param -> siteout: path from the WD to the place to write the pmag sites data file
@param -> resout: path from the WD to the place to write the pmag results data file
@param -> critout: path from the WD to the place to write the pmag criteria file
@param -> instout: path from th WD to the place to write the magic instruments file
@param -> documentation incomplete if you know more about the purpose of the parameters in this function and it's side effects please extend and complete this string
]
variable[plotsites] assign[=] constant[False]
variable[Comps] assign[=] list[[]]
variable[version_num] assign[=] call[name[pmag].get_version, parameter[]]
variable[args] assign[=] name[sys].argv
variable[model_lat_file] assign[=] constant[]
<ast.Tuple object at 0x7da18bc71060> assign[=] tuple[[<ast.Constant object at 0x7da18bc70d30>, <ast.Constant object at 0x7da18bc71270>, <ast.Constant object at 0x7da18bc73bb0>]]
variable[corrections] assign[=] list[[]]
variable[nocorrection] assign[=] list[[<ast.Constant object at 0x7da18bc73220>, <ast.Constant object at 0x7da18bc70220>, <ast.Constant object at 0x7da18bc71450>]]
for taget[name[cor]] in starred[name[cors]] begin[:]
call[name[nocorrection].remove, parameter[binary_operation[constant[DA-] + name[cor]]]]
call[name[corrections].append, parameter[binary_operation[constant[DA-] + name[cor]]]]
for taget[name[p]] in starred[name[priorities]] begin[:]
if <ast.UnaryOp object at 0x7da18bc719c0> begin[:]
variable[p] assign[=] binary_operation[constant[DA-AC-] + name[p]]
if compare[name[coord] equal[==] constant[s]] begin[:]
variable[coords] assign[=] list[[<ast.Constant object at 0x7da18bc73c70>]]
if compare[name[coord] equal[==] constant[g]] begin[:]
variable[coords] assign[=] list[[<ast.Constant object at 0x7da18bc738b0>]]
if compare[name[coord] equal[==] constant[t]] begin[:]
variable[coords] assign[=] list[[<ast.Constant object at 0x7da18bc73ee0>]]
if compare[name[coord] equal[==] constant[b]] begin[:]
variable[coords] assign[=] list[[<ast.Constant object at 0x7da18bc72a40>, <ast.Constant object at 0x7da18bc71ed0>]]
if compare[name[vgps_level] equal[==] constant[sample]] begin[:]
variable[vgps] assign[=] constant[1]
if name[do_site_intensity] begin[:]
variable[nositeints] assign[=] constant[0]
if <ast.UnaryOp object at 0x7da18bc70040> begin[:]
if <ast.BoolOp object at 0x7da18bc72fb0> begin[:]
call[name[print], parameter[constant[you should set a paleolatitude file OR use present day lat - not both]]]
return[constant[False]]
if <ast.BoolOp object at 0x7da20e9604f0> begin[:]
variable[EQ] assign[=] dictionary[[], []]
call[name[EQ]][constant[eqarea]] assign[=] constant[1]
call[name[pmagplotlib].plot_init, parameter[call[name[EQ]][constant[eqarea]], constant[5], constant[5]]]
call[name[pmagplotlib].plot_net, parameter[call[name[EQ]][constant[eqarea]]]]
call[name[pmagplotlib].draw_figs, parameter[name[EQ]]]
variable[infile] assign[=] call[name[os].path.join, parameter[name[dir_path], name[infile]]]
variable[measfile] assign[=] call[name[os].path.join, parameter[name[dir_path], name[measfile]]]
variable[instout] assign[=] call[name[os].path.join, parameter[name[dir_path], name[instout]]]
variable[sampfile] assign[=] call[name[os].path.join, parameter[name[dir_path], name[sampfile]]]
variable[sitefile] assign[=] call[name[os].path.join, parameter[name[dir_path], name[sitefile]]]
variable[agefile] assign[=] call[name[os].path.join, parameter[name[dir_path], name[agefile]]]
variable[specout] assign[=] call[name[os].path.join, parameter[name[dir_path], name[specout]]]
variable[sampout] assign[=] call[name[os].path.join, parameter[name[dir_path], name[sampout]]]
variable[siteout] assign[=] call[name[os].path.join, parameter[name[dir_path], name[siteout]]]
variable[resout] assign[=] call[name[os].path.join, parameter[name[dir_path], name[resout]]]
variable[critout] assign[=] call[name[os].path.join, parameter[name[dir_path], name[critout]]]
if compare[name[use_criteria] equal[==] constant[none]] begin[:]
<ast.Tuple object at 0x7da2041d9e10> assign[=] tuple[[<ast.Constant object at 0x7da2041d8e80>, <ast.Constant object at 0x7da2041da890>, <ast.Constant object at 0x7da2041d9540>]]
variable[crit_data] assign[=] call[name[pmag].default_criteria, parameter[name[nocrit]]]
variable[accept] assign[=] dictionary[[], []]
for taget[name[critrec]] in starred[name[crit_data]] begin[:]
for taget[name[key]] in starred[call[name[list], parameter[call[name[critrec].keys, parameter[]]]]] begin[:]
if <ast.BoolOp object at 0x7da2041dbcd0> begin[:]
call[name[critrec]][constant[specimen_int_dang]] assign[=] call[name[critrec]][constant[specimen_dang]]
<ast.Delete object at 0x7da2041da1a0>
if compare[constant[sample_int_sigma_uT] in call[name[list], parameter[call[name[critrec].keys, parameter[]]]]] begin[:]
call[name[critrec]][constant[sample_int_sigma]] assign[=] binary_operation[constant[%10.3e] <ast.Mod object at 0x7da2590d6920> binary_operation[call[name[eval], parameter[call[name[critrec]][constant[sample_int_sigma_uT]]]] * constant[1e-06]]]
if <ast.BoolOp object at 0x7da2041d8df0> begin[:]
call[name[accept]][name[key]] assign[=] call[name[critrec]][name[key]]
if compare[name[use_criteria] equal[==] constant[default]] begin[:]
call[name[pmag].magic_write, parameter[name[critout], list[[<ast.Name object at 0x7da2041db0d0>]], constant[pmag_criteria]]]
call[name[print], parameter[constant[
Pmag Criteria stored in ], name[critout], constant[
]]]
<ast.Tuple object at 0x7da2041d9990> assign[=] call[name[pmag].magic_read, parameter[name[sitefile]]]
<ast.Tuple object at 0x7da2041d89a0> assign[=] call[name[pmag].magic_read, parameter[name[sampfile]]]
variable[height_nfo] assign[=] call[name[pmag].get_dictitem, parameter[name[SiteNFO], constant[site_height], constant[], constant[F]]]
if name[agefile] begin[:]
<ast.Tuple object at 0x7da2041da140> assign[=] call[name[pmag].magic_read, parameter[name[agefile]]]
<ast.Tuple object at 0x7da2041d9000> assign[=] call[name[pmag].magic_read, parameter[name[infile]]]
variable[IntData] assign[=] call[name[pmag].get_dictitem, parameter[name[Data], constant[specimen_int], constant[], constant[F]]]
<ast.Tuple object at 0x7da2041d8fa0> assign[=] tuple[[<ast.Constant object at 0x7da2041db370>, <ast.List object at 0x7da2041d9420>]]
<ast.Tuple object at 0x7da2041db7f0> assign[=] tuple[[<ast.List object at 0x7da2041dbbe0>, <ast.List object at 0x7da2041d8040>]]
for taget[name[rec]] in starred[name[Data]] begin[:]
if compare[constant[er_sample_name] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:]
call[name[rec]][constant[er_sample_name]] assign[=] constant[]
if compare[constant[er_site_name] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:]
call[name[rec]][constant[er_site_name]] assign[=] constant[]
if compare[constant[specimen_int] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:]
call[name[rec]][constant[specimen_int]] assign[=] constant[]
if <ast.BoolOp object at 0x7da20c7968c0> begin[:]
call[name[rec]][constant[specimen_comp_name]] assign[=] constant[A]
if compare[call[name[rec]][constant[specimen_comp_name]] <ast.NotIn object at 0x7da2590d7190> name[Comps]] begin[:]
call[name[Comps].append, parameter[call[name[rec]][constant[specimen_comp_name]]]]
call[name[rec]][constant[specimen_tilt_correction]] assign[=] call[call[name[rec]][constant[specimen_tilt_correction]].strip, parameter[constant[
]]]
if compare[constant[specimen_tilt_correction] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:]
call[name[rec]][constant[specimen_tilt_correction]] assign[=] constant[-1]
if compare[call[name[rec]][constant[specimen_tilt_correction]] <ast.NotIn object at 0x7da2590d7190> name[orient]] begin[:]
call[name[orient].append, parameter[call[name[rec]][constant[specimen_tilt_correction]]]]
if compare[constant[specimen_direction_type] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:]
call[name[rec]][constant[specimen_direction_type]] assign[=] constant[l]
if compare[constant[specimen_dec] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:]
call[name[rec]][constant[specimen_direction_type]] assign[=] constant[]
if compare[constant[specimen_n] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:]
call[name[rec]][constant[specimen_n]] assign[=] constant[]
if compare[constant[specimen_alpha95] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:]
call[name[rec]][constant[specimen_alpha95]] assign[=] constant[]
if compare[constant[magic_method_codes] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[rec].keys, parameter[]]]]] begin[:]
call[name[rec]][constant[magic_method_codes]] assign[=] constant[]
<ast.Tuple object at 0x7da20c7942e0> assign[=] tuple[[<ast.List object at 0x7da20c796650>, <ast.List object at 0x7da20c794e80>, <ast.List object at 0x7da20c795f00>]]
call[name[samples].sort, parameter[]]
call[name[sites].sort, parameter[]]
if <ast.UnaryOp object at 0x7da20c795510> begin[:]
variable[IntData] assign[=] call[name[pmag].get_dictitem, parameter[name[Data], constant[specimen_int], constant[], constant[F]]]
if compare[name[nocrit] equal[==] constant[0]] begin[:]
for taget[name[rec]] in starred[name[IntData]] begin[:]
variable[kill] assign[=] call[name[pmag].grade, parameter[name[rec], name[accept], constant[specimen_int]]]
if compare[call[name[len], parameter[name[kill]]] equal[==] constant[0]] begin[:]
call[name[SpecInts].append, parameter[name[rec]]]
if <ast.BoolOp object at 0x7da20c7c8e80> begin[:]
for taget[name[cor]] in starred[name[corrections]] begin[:]
variable[SpecInts] assign[=] call[name[pmag].get_dictitem, parameter[name[SpecInts], constant[magic_method_codes], name[cor], constant[has]]]
if <ast.BoolOp object at 0x7da20c7c83d0> begin[:]
for taget[name[cor]] in starred[name[nocorrection]] begin[:]
variable[SpecInts] assign[=] call[name[pmag].get_dictitem, parameter[name[SpecInts], constant[magic_method_codes], name[cor], constant[not]]]
variable[PrioritySpecInts] assign[=] list[[]]
variable[specimens] assign[=] call[name[pmag].get_specs, parameter[name[SpecInts]]]
for taget[name[spec]] in starred[name[specimens]] begin[:]
variable[ThisSpecRecs] assign[=] call[name[pmag].get_dictitem, parameter[name[SpecInts], constant[er_specimen_name], name[spec], constant[T]]]
if compare[call[name[len], parameter[name[ThisSpecRecs]]] equal[==] constant[1]] begin[:]
call[name[PrioritySpecInts].append, parameter[call[name[ThisSpecRecs]][constant[0]]]]
variable[SpecInts] assign[=] name[PrioritySpecInts]
if <ast.UnaryOp object at 0x7da20c7cbb80> begin[:]
variable[AllDirs] assign[=] call[name[pmag].get_dictitem, parameter[name[Data], constant[specimen_direction_type], constant[], constant[F]]]
variable[Ns] assign[=] call[name[pmag].get_dictitem, parameter[name[AllDirs], constant[specimen_n], constant[], constant[F]]]
if compare[name[nocrit] not_equal[!=] constant[1]] begin[:]
for taget[name[rec]] in starred[name[Ns]] begin[:]
variable[kill] assign[=] call[name[pmag].grade, parameter[name[rec], name[accept], constant[specimen_dir]]]
if compare[call[name[len], parameter[name[kill]]] equal[==] constant[0]] begin[:]
call[name[SpecDirs].append, parameter[name[rec]]]
<ast.Tuple object at 0x7da20c7ca740> assign[=] tuple[[<ast.List object at 0x7da20c7ca710>, <ast.List object at 0x7da20c7c9b70>]]
<ast.Tuple object at 0x7da20c7cb1f0> assign[=] tuple[[<ast.List object at 0x7da20c7c9150>, <ast.List object at 0x7da20c7cac50>]]
variable[SampInts] assign[=] list[[]]
for taget[name[samp]] in starred[name[samples]] begin[:]
if name[avg_directions_by_sample] begin[:]
variable[SampDir] assign[=] call[name[pmag].get_dictitem, parameter[name[SpecDirs], constant[er_sample_name], name[samp], constant[T]]]
if compare[call[name[len], parameter[name[SampDir]]] greater[>] constant[0]] begin[:]
for taget[name[coord]] in starred[name[coords]] begin[:]
variable[CoordDir] assign[=] call[name[pmag].get_dictitem, parameter[name[SampDir], constant[specimen_tilt_correction], name[coord], constant[T]]]
if compare[call[name[len], parameter[name[CoordDir]]] greater[>] constant[0]] begin[:]
if <ast.UnaryOp object at 0x7da20c7c8d60> begin[:]
for taget[name[comp]] in starred[name[Comps]] begin[:]
variable[CompDir] assign[=] call[name[pmag].get_dictitem, parameter[name[CoordDir], constant[specimen_comp_name], name[comp], constant[T]]]
if compare[call[name[len], parameter[name[CompDir]]] greater[>] constant[0]] begin[:]
variable[PmagSampRec] assign[=] call[name[pmag].lnpbykey, parameter[name[CompDir], constant[sample], constant[specimen]]]
call[name[PmagSampRec]][constant[er_location_name]] assign[=] call[call[name[CompDir]][constant[0]]][constant[er_location_name]]
call[name[PmagSampRec]][constant[er_site_name]] assign[=] call[call[name[CompDir]][constant[0]]][constant[er_site_name]]
call[name[PmagSampRec]][constant[er_sample_name]] assign[=] name[samp]
call[name[PmagSampRec]][constant[er_citation_names]] assign[=] constant[This study]
call[name[PmagSampRec]][constant[er_analyst_mail_names]] assign[=] name[user]
call[name[PmagSampRec]][constant[magic_software_packages]] assign[=] name[version_num]
if compare[call[call[name[CompDir]][constant[0]]][constant[specimen_flag]] equal[==] constant[g]] begin[:]
call[name[PmagSampRec]][constant[sample_flag]] assign[=] constant[g]
if compare[name[nocrit] not_equal[!=] constant[1]] begin[:]
call[name[PmagSampRec]][constant[pmag_criteria_codes]] assign[=] constant[ACCEPT]
if compare[name[agefile] not_equal[!=] constant[]] begin[:]
variable[PmagSampRec] assign[=] call[name[pmag].get_age, parameter[name[PmagSampRec], constant[er_site_name], constant[sample_inferred_], name[AgeNFO], name[DefaultAge]]]
variable[site_height] assign[=] call[name[pmag].get_dictitem, parameter[name[height_nfo], constant[er_site_name], call[name[PmagSampRec]][constant[er_site_name]], constant[T]]]
if compare[call[name[len], parameter[name[site_height]]] greater[>] constant[0]] begin[:]
call[name[PmagSampRec]][constant[sample_height]] assign[=] call[call[name[site_height]][constant[0]]][constant[site_height]]
call[name[PmagSampRec]][constant[sample_comp_name]] assign[=] name[comp]
call[name[PmagSampRec]][constant[sample_tilt_correction]] assign[=] name[coord]
call[name[PmagSampRec]][constant[er_specimen_names]] assign[=] call[name[pmag].get_list, parameter[name[CompDir], constant[er_specimen_name]]]
call[name[PmagSampRec]][constant[magic_method_codes]] assign[=] call[name[pmag].get_list, parameter[name[CompDir], constant[magic_method_codes]]]
if compare[name[nocrit] not_equal[!=] constant[1]] begin[:]
variable[kill] assign[=] call[name[pmag].grade, parameter[name[PmagSampRec], name[accept], constant[sample_dir]]]
if compare[call[name[len], parameter[name[kill]]] equal[==] constant[0]] begin[:]
call[name[SampDirs].append, parameter[name[PmagSampRec]]]
if compare[name[vgps] equal[==] constant[1]] begin[:]
variable[PmagResRec] assign[=] call[name[pmag].getsampVGP, parameter[name[PmagSampRec], name[SiteNFO]]]
if compare[name[PmagResRec] not_equal[!=] constant[]] begin[:]
call[name[PmagResults].append, parameter[name[PmagResRec]]]
call[name[PmagSamps].append, parameter[name[PmagSampRec]]]
if name[avg_all_components] begin[:]
variable[PmagSampRec] assign[=] call[name[pmag].lnpbykey, parameter[name[CoordDir], constant[sample], constant[specimen]]]
call[name[PmagSampRec]][constant[er_location_name]] assign[=] call[call[name[CoordDir]][constant[0]]][constant[er_location_name]]
call[name[PmagSampRec]][constant[er_site_name]] assign[=] call[call[name[CoordDir]][constant[0]]][constant[er_site_name]]
call[name[PmagSampRec]][constant[er_sample_name]] assign[=] name[samp]
call[name[PmagSampRec]][constant[er_citation_names]] assign[=] constant[This study]
call[name[PmagSampRec]][constant[er_analyst_mail_names]] assign[=] name[user]
call[name[PmagSampRec]][constant[magic_software_packages]] assign[=] name[version_num]
if call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b02aa4a0>]] begin[:]
call[name[PmagSampRec]][constant[sample_flag]] assign[=] constant[g]
if compare[name[nocrit] not_equal[!=] constant[1]] begin[:]
call[name[PmagSampRec]][constant[pmag_criteria_codes]] assign[=] constant[]
if compare[name[agefile] not_equal[!=] constant[]] begin[:]
variable[PmagSampRec] assign[=] call[name[pmag].get_age, parameter[name[PmagSampRec], constant[er_site_name], constant[sample_inferred_], name[AgeNFO], name[DefaultAge]]]
variable[site_height] assign[=] call[name[pmag].get_dictitem, parameter[name[height_nfo], constant[er_site_name], name[site], constant[T]]]
if compare[call[name[len], parameter[name[site_height]]] greater[>] constant[0]] begin[:]
call[name[PmagSampRec]][constant[sample_height]] assign[=] call[call[name[site_height]][constant[0]]][constant[site_height]]
call[name[PmagSampRec]][constant[sample_tilt_correction]] assign[=] name[coord]
call[name[PmagSampRec]][constant[sample_comp_name]] assign[=] call[name[pmag].get_list, parameter[name[CoordDir], constant[specimen_comp_name]]]
call[name[PmagSampRec]][constant[er_specimen_names]] assign[=] call[name[pmag].get_list, parameter[name[CoordDir], constant[er_specimen_name]]]
call[name[PmagSampRec]][constant[magic_method_codes]] assign[=] call[name[pmag].get_list, parameter[name[CoordDir], constant[magic_method_codes]]]
if compare[name[nocrit] not_equal[!=] constant[1]] begin[:]
variable[kill] assign[=] call[name[pmag].grade, parameter[name[PmagSampRec], name[accept], constant[sample_dir]]]
if compare[call[name[len], parameter[name[kill]]] equal[==] constant[0]] begin[:]
call[name[SampDirs].append, parameter[name[PmagSampRec]]]
if compare[name[vgps] equal[==] constant[1]] begin[:]
variable[PmagResRec] assign[=] call[name[pmag].getsampVGP, parameter[name[PmagSampRec], name[SiteNFO]]]
if compare[name[PmagResRec] not_equal[!=] constant[]] begin[:]
call[name[PmagResults].append, parameter[name[PmagResRec]]]
call[name[PmagSamps].append, parameter[name[PmagSampRec]]]
if name[avg_intensities_by_sample] begin[:]
variable[SampI] assign[=] call[name[pmag].get_dictitem, parameter[name[SpecInts], constant[er_sample_name], name[samp], constant[T]]]
if compare[call[name[len], parameter[name[SampI]]] greater[>] constant[0]] begin[:]
variable[PmagSampRec] assign[=] call[name[pmag].average_int, parameter[name[SampI], constant[specimen], constant[sample]]]
call[name[PmagSampRec]][constant[sample_description]] assign[=] constant[sample intensity]
call[name[PmagSampRec]][constant[sample_direction_type]] assign[=] constant[]
call[name[PmagSampRec]][constant[er_site_name]] assign[=] call[call[name[SampI]][constant[0]]][constant[er_site_name]]
call[name[PmagSampRec]][constant[er_sample_name]] assign[=] name[samp]
call[name[PmagSampRec]][constant[er_location_name]] assign[=] call[call[name[SampI]][constant[0]]][constant[er_location_name]]
call[name[PmagSampRec]][constant[er_citation_names]] assign[=] constant[This study]
call[name[PmagSampRec]][constant[er_analyst_mail_names]] assign[=] name[user]
if compare[name[agefile] not_equal[!=] constant[]] begin[:]
variable[PmagSampRec] assign[=] call[name[pmag].get_age, parameter[name[PmagSampRec], constant[er_site_name], constant[sample_inferred_], name[AgeNFO], name[DefaultAge]]]
variable[site_height] assign[=] call[name[pmag].get_dictitem, parameter[name[height_nfo], constant[er_site_name], call[name[PmagSampRec]][constant[er_site_name]], constant[T]]]
if compare[call[name[len], parameter[name[site_height]]] greater[>] constant[0]] begin[:]
call[name[PmagSampRec]][constant[sample_height]] assign[=] call[call[name[site_height]][constant[0]]][constant[site_height]]
call[name[PmagSampRec]][constant[er_specimen_names]] assign[=] call[name[pmag].get_list, parameter[name[SampI], constant[er_specimen_name]]]
call[name[PmagSampRec]][constant[magic_method_codes]] assign[=] call[name[pmag].get_list, parameter[name[SampI], constant[magic_method_codes]]]
if compare[name[nocrit] not_equal[!=] constant[1]] begin[:]
variable[kill] assign[=] call[name[pmag].grade, parameter[name[PmagSampRec], name[accept], constant[sample_int]]]
if compare[call[name[len], parameter[name[kill]]] equal[==] constant[0]] begin[:]
call[name[PmagSampRec]][constant[pmag_criteria_codes]] assign[=] constant[ACCEPT]
call[name[SampInts].append, parameter[name[PmagSampRec]]]
call[name[PmagSamps].append, parameter[name[PmagSampRec]]]
if <ast.BoolOp object at 0x7da1b02b4df0> begin[:]
if compare[name[get_model_lat] equal[==] constant[1]] begin[:]
variable[PmagResRec] assign[=] call[name[pmag].getsampVDM, parameter[name[PmagSampRec], name[SampNFO]]]
<ast.Delete object at 0x7da1b03035b0>
if compare[name[PmagResRec] not_equal[!=] dictionary[[], []]] begin[:]
call[name[PmagResRec]][constant[er_specimen_names]] assign[=] call[name[PmagSampRec]][constant[er_specimen_names]]
call[name[PmagResRec]][constant[er_sample_names]] assign[=] call[name[PmagSampRec]][constant[er_sample_name]]
call[name[PmagResRec]][constant[pmag_criteria_codes]] assign[=] constant[ACCEPT]
call[name[PmagResRec]][constant[average_int_sigma_perc]] assign[=] call[name[PmagSampRec]][constant[sample_int_sigma_perc]]
call[name[PmagResRec]][constant[average_int_sigma]] assign[=] call[name[PmagSampRec]][constant[sample_int_sigma]]
call[name[PmagResRec]][constant[average_int_n]] assign[=] call[name[PmagSampRec]][constant[sample_int_n]]
call[name[PmagResRec]][constant[vadm_n]] assign[=] call[name[PmagSampRec]][constant[sample_int_n]]
call[name[PmagResRec]][constant[data_type]] assign[=] constant[i]
call[name[PmagResults].append, parameter[name[PmagResRec]]]
if compare[call[name[len], parameter[name[PmagSamps]]] greater[>] constant[0]] begin[:]
<ast.Tuple object at 0x7da1b03e25c0> assign[=] call[name[pmag].fillkeys, parameter[name[PmagSamps]]]
call[name[pmag].magic_write, parameter[name[sampout], name[TmpSamps], constant[pmag_samples]]]
call[name[print], parameter[constant[ sample averages written to ], name[sampout]]]
for taget[name[site]] in starred[name[sites]] begin[:]
for taget[name[coord]] in starred[name[coords]] begin[:]
if <ast.UnaryOp object at 0x7da1b03e3160> begin[:]
<ast.Tuple object at 0x7da1b03e31f0> assign[=] tuple[[<ast.Constant object at 0x7da1b03e3190>, <ast.Name object at 0x7da1b03e3130>]]
if name[avg_directions_by_sample] begin[:]
<ast.Tuple object at 0x7da1b03e34f0> assign[=] tuple[[<ast.Constant object at 0x7da1b03e2d70>, <ast.Name object at 0x7da1b03e2dd0>]]
variable[tmp] assign[=] call[name[pmag].get_dictitem, parameter[name[dirlist], constant[er_site_name], name[site], constant[T]]]
variable[tmp1] assign[=] call[name[pmag].get_dictitem, parameter[name[tmp], binary_operation[name[key] + constant[_tilt_correction]], name[coord], constant[T]]]
variable[sd] assign[=] call[name[pmag].get_dictitem, parameter[name[SiteNFO], constant[er_site_name], name[site], constant[T]]]
if compare[call[name[len], parameter[name[sd]]] greater[>] constant[0]] begin[:]
variable[sitedat] assign[=] call[name[sd]][constant[0]]
if <ast.UnaryOp object at 0x7da1b03e2410> begin[:]
for taget[name[comp]] in starred[name[Comps]] begin[:]
variable[siteD] assign[=] call[name[pmag].get_dictitem, parameter[name[tmp1], binary_operation[name[key] + constant[_comp_name]], name[comp], constant[T]]]
variable[quality_siteD] assign[=] list[[]]
for taget[name[rec]] in starred[name[siteD]] begin[:]
variable[spec_quality] assign[=] call[name[rec].get, parameter[constant[specimen_flag], constant[g]]]
variable[samp_quality] assign[=] call[name[rec].get, parameter[constant[sample_flag], constant[g]]]
if <ast.BoolOp object at 0x7da1b03e0d90> begin[:]
call[name[quality_siteD].append, parameter[name[rec]]]
variable[siteD] assign[=] name[quality_siteD]
if compare[call[name[len], parameter[name[siteD]]] greater[>] constant[0]] begin[:]
variable[PmagSiteRec] assign[=] call[name[pmag].lnpbykey, parameter[name[siteD], constant[site], name[key]]]
call[name[PmagSiteRec]][constant[site_comp_name]] assign[=] name[comp]
call[name[PmagSiteRec]][constant[er_location_name]] assign[=] call[call[name[siteD]][constant[0]]][constant[er_location_name]]
call[name[PmagSiteRec]][constant[er_site_name]] assign[=] call[call[name[siteD]][constant[0]]][constant[er_site_name]]
call[name[PmagSiteRec]][constant[site_tilt_correction]] assign[=] name[coord]
call[name[PmagSiteRec]][constant[site_comp_name]] assign[=] call[name[pmag].get_list, parameter[name[siteD], binary_operation[name[key] + constant[_comp_name]]]]
if name[avg_directions_by_sample] begin[:]
call[name[PmagSiteRec]][constant[er_sample_names]] assign[=] call[name[pmag].get_list, parameter[name[siteD], constant[er_sample_name]]]
variable[AFnum] assign[=] call[name[len], parameter[call[name[pmag].get_dictitem, parameter[name[siteD], constant[magic_method_codes], constant[LP-DIR-AF], constant[has]]]]]
variable[Tnum] assign[=] call[name[len], parameter[call[name[pmag].get_dictitem, parameter[name[siteD], constant[magic_method_codes], constant[LP-DIR-T], constant[has]]]]]
variable[DC] assign[=] constant[3]
if compare[name[AFnum] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b03e04c0>
if compare[name[Tnum] greater[>] constant[0]] begin[:]
<ast.AugAssign object at 0x7da1b0246170>
call[name[PmagSiteRec]][constant[magic_method_codes]] assign[=] binary_operation[binary_operation[binary_operation[call[name[pmag].get_list, parameter[name[siteD], constant[magic_method_codes]]] + constant[:]] + constant[LP-DC]] + call[name[str], parameter[name[DC]]]]
call[call[name[PmagSiteRec]][constant[magic_method_codes]].strip, parameter[constant[:]]]
if name[plotsites] begin[:]
call[name[print], parameter[call[name[PmagSiteRec]][constant[er_site_name]]]]
call[name[pmagplotlib].plot_site, parameter[call[name[EQ]][constant[eqarea]], name[PmagSiteRec], name[siteD], name[key]]]
call[name[pmagplotlib].draw_figs, parameter[name[EQ]]]
call[name[PmagSites].append, parameter[name[PmagSiteRec]]]
for taget[name[PmagSiteRec]] in starred[name[PmagSites]] begin[:]
call[name[PmagSiteRec]][constant[er_citation_names]] assign[=] constant[This study]
call[name[PmagSiteRec]][constant[er_analyst_mail_names]] assign[=] name[user]
call[name[PmagSiteRec]][constant[magic_software_packages]] assign[=] name[version_num]
if compare[name[agefile] not_equal[!=] constant[]] begin[:]
variable[PmagSiteRec] assign[=] call[name[pmag].get_age, parameter[name[PmagSiteRec], constant[er_site_name], constant[site_inferred_], name[AgeNFO], name[DefaultAge]]]
call[name[PmagSiteRec]][constant[pmag_criteria_codes]] assign[=] constant[ACCEPT]
if <ast.BoolOp object at 0x7da1b036c1f0> begin[:]
if compare[call[name[int], parameter[call[name[PmagSiteRec]][constant[site_n_planes]]]] greater[>] constant[0]] begin[:]
call[name[PmagSiteRec]][constant[magic_method_codes]] assign[=] binary_operation[call[name[PmagSiteRec]][constant[magic_method_codes]] + constant[:DE-FM-LP]]
variable[kill] assign[=] call[name[pmag].grade, parameter[name[PmagSiteRec], name[accept], constant[site_dir]]]
if compare[call[name[len], parameter[name[kill]]] equal[==] constant[0]] begin[:]
variable[PmagResRec] assign[=] dictionary[[], []]
call[name[PmagResRec]][constant[data_type]] assign[=] constant[i]
call[name[PmagResRec]][constant[magic_software_packages]] assign[=] name[version_num]
call[name[PmagSiteRec]][constant[site_description]] assign[=] constant[Site direction included in results table]
call[name[PmagResRec]][constant[pmag_criteria_codes]] assign[=] constant[ACCEPT]
variable[dec] assign[=] call[name[float], parameter[call[name[PmagSiteRec]][constant[site_dec]]]]
variable[inc] assign[=] call[name[float], parameter[call[name[PmagSiteRec]][constant[site_inc]]]]
if <ast.BoolOp object at 0x7da1b23452d0> begin[:]
variable[a95] assign[=] call[name[float], parameter[call[name[PmagSiteRec]][constant[site_alpha95]]]]
variable[sitedat] assign[=] call[call[name[pmag].get_dictitem, parameter[name[SiteNFO], constant[er_site_name], call[name[PmagSiteRec]][constant[er_site_name]], constant[T]]]][constant[0]]
variable[lat] assign[=] call[name[float], parameter[call[name[sitedat]][constant[site_lat]]]]
variable[lon] assign[=] call[name[float], parameter[call[name[sitedat]][constant[site_lon]]]]
<ast.Tuple object at 0x7da1b2344790> assign[=] call[name[pmag].dia_vgp, parameter[name[dec], name[inc], name[a95], name[lat], name[lon]]]
if compare[call[name[PmagSiteRec]][constant[site_tilt_correction]] equal[==] constant[-1]] begin[:]
variable[C] assign[=] constant[ (spec coord) ]
if compare[call[name[PmagSiteRec]][constant[site_tilt_correction]] equal[==] constant[0]] begin[:]
variable[C] assign[=] constant[ (geog. coord) ]
if compare[call[name[PmagSiteRec]][constant[site_tilt_correction]] equal[==] constant[100]] begin[:]
variable[C] assign[=] constant[ (strat. coord) ]
call[name[PmagResRec]][constant[pmag_result_name]] assign[=] binary_operation[constant[VGP Site: ] + call[name[PmagSiteRec]][constant[er_site_name]]]
call[name[PmagResRec]][constant[result_description]] assign[=] binary_operation[binary_operation[binary_operation[constant[Site VGP, coord system = ] + call[name[str], parameter[name[coord]]]] + constant[ component: ]] + name[comp]]
call[name[PmagResRec]][constant[er_site_names]] assign[=] call[name[PmagSiteRec]][constant[er_site_name]]
call[name[PmagResRec]][constant[pmag_criteria_codes]] assign[=] constant[ACCEPT]
call[name[PmagResRec]][constant[er_citation_names]] assign[=] constant[This study]
call[name[PmagResRec]][constant[er_analyst_mail_names]] assign[=] name[user]
call[name[PmagResRec]][constant[er_location_names]] assign[=] call[name[PmagSiteRec]][constant[er_location_name]]
if name[avg_directions_by_sample] begin[:]
call[name[PmagResRec]][constant[er_sample_names]] assign[=] call[name[PmagSiteRec]][constant[er_sample_names]]
call[name[PmagResRec]][constant[tilt_correction]] assign[=] call[name[PmagSiteRec]][constant[site_tilt_correction]]
call[name[PmagResRec]][constant[pole_comp_name]] assign[=] call[name[PmagSiteRec]][constant[site_comp_name]]
call[name[PmagResRec]][constant[average_dec]] assign[=] call[name[PmagSiteRec]][constant[site_dec]]
call[name[PmagResRec]][constant[average_inc]] assign[=] call[name[PmagSiteRec]][constant[site_inc]]
call[name[PmagResRec]][constant[average_alpha95]] assign[=] call[name[PmagSiteRec]][constant[site_alpha95]]
call[name[PmagResRec]][constant[average_n]] assign[=] call[name[PmagSiteRec]][constant[site_n]]
call[name[PmagResRec]][constant[average_n_lines]] assign[=] call[name[PmagSiteRec]][constant[site_n_lines]]
call[name[PmagResRec]][constant[average_n_planes]] assign[=] call[name[PmagSiteRec]][constant[site_n_planes]]
call[name[PmagResRec]][constant[vgp_n]] assign[=] call[name[PmagSiteRec]][constant[site_n]]
call[name[PmagResRec]][constant[average_k]] assign[=] call[name[PmagSiteRec]][constant[site_k]]
call[name[PmagResRec]][constant[average_r]] assign[=] call[name[PmagSiteRec]][constant[site_r]]
call[name[PmagResRec]][constant[average_lat]] assign[=] binary_operation[constant[%10.4f ] <ast.Mod object at 0x7da2590d6920> name[lat]]
call[name[PmagResRec]][constant[average_lon]] assign[=] binary_operation[constant[%10.4f ] <ast.Mod object at 0x7da2590d6920> name[lon]]
if compare[name[agefile] not_equal[!=] constant[]] begin[:]
variable[PmagResRec] assign[=] call[name[pmag].get_age, parameter[name[PmagResRec], constant[er_site_names], constant[average_], name[AgeNFO], name[DefaultAge]]]
variable[site_height] assign[=] call[name[pmag].get_dictitem, parameter[name[height_nfo], constant[er_site_name], name[site], constant[T]]]
if compare[call[name[len], parameter[name[site_height]]] greater[>] constant[0]] begin[:]
call[name[PmagResRec]][constant[average_height]] assign[=] call[call[name[site_height]][constant[0]]][constant[site_height]]
call[name[PmagResRec]][constant[vgp_lat]] assign[=] binary_operation[constant[%7.1f ] <ast.Mod object at 0x7da2590d6920> name[plat]]
call[name[PmagResRec]][constant[vgp_lon]] assign[=] binary_operation[constant[%7.1f ] <ast.Mod object at 0x7da2590d6920> name[plon]]
call[name[PmagResRec]][constant[vgp_dp]] assign[=] binary_operation[constant[%7.1f ] <ast.Mod object at 0x7da2590d6920> name[dp]]
call[name[PmagResRec]][constant[vgp_dm]] assign[=] binary_operation[constant[%7.1f ] <ast.Mod object at 0x7da2590d6920> name[dm]]
call[name[PmagResRec]][constant[magic_method_codes]] assign[=] call[name[PmagSiteRec]][constant[magic_method_codes]]
if <ast.BoolOp object at 0x7da18dc05960> begin[:]
call[name[PmagSiteRec]][constant[magic_method_codes]] assign[=] binary_operation[call[name[PmagSiteRec]][constant[magic_method_codes]] + constant[:DA-DIR-GEO]]
if <ast.BoolOp object at 0x7da18dc04ee0> begin[:]
call[name[PmagSiteRec]][constant[magic_method_codes]] assign[=] binary_operation[call[name[PmagSiteRec]][constant[magic_method_codes]] + constant[:DA-DIR-TILT]]
call[name[PmagSiteRec]][constant[site_polarity]] assign[=] constant[]
if name[avg_by_polarity] begin[:]
variable[angle] assign[=] call[name[pmag].angle, parameter[list[[<ast.Constant object at 0x7da18dc051e0>, <ast.Constant object at 0x7da18dc050f0>]], list[[<ast.Constant object at 0x7da18dc07df0>, <ast.BinOp object at 0x7da18dc06c80>]]]]
if compare[name[angle] less_or_equal[<=] constant[55.0]] begin[:]
call[name[PmagSiteRec]][constant[site_polarity]] assign[=] constant[n]
if <ast.BoolOp object at 0x7da18dc05660> begin[:]
call[name[PmagSiteRec]][constant[site_polarity]] assign[=] constant[t]
if compare[name[angle] greater_or_equal[>=] constant[125.0]] begin[:]
call[name[PmagSiteRec]][constant[site_polarity]] assign[=] constant[r]
call[name[PmagResults].append, parameter[name[PmagResRec]]]
if name[avg_by_polarity] begin[:]
variable[crecs] assign[=] call[name[pmag].get_dictitem, parameter[name[PmagSites], constant[site_tilt_correction], constant[100], constant[T]]]
if compare[call[name[len], parameter[name[crecs]]] less[<] constant[2]] begin[:]
variable[crecs] assign[=] call[name[pmag].get_dictitem, parameter[name[PmagSites], constant[site_tilt_correction], constant[0], constant[T]]]
if compare[call[name[len], parameter[name[crecs]]] greater[>] constant[2]] begin[:]
variable[comp] assign[=] call[call[call[name[pmag].get_list, parameter[name[crecs], constant[site_comp_name]]].split, parameter[constant[:]]]][constant[0]]
variable[crecs] assign[=] call[name[pmag].get_dictitem, parameter[name[crecs], constant[site_comp_name], name[comp], constant[T]]]
variable[precs] assign[=] list[[]]
for taget[name[rec]] in starred[name[crecs]] begin[:]
call[name[precs].append, parameter[dictionary[[<ast.Constant object at 0x7da18dc079a0>, <ast.Constant object at 0x7da18dc049d0>, <ast.Constant object at 0x7da18dc06710>, <ast.Constant object at 0x7da18dc05090>], [<ast.Subscript object at 0x7da18dc04730>, <ast.Subscript object at 0x7da18dc06d40>, <ast.Subscript object at 0x7da18dc04ac0>, <ast.Subscript object at 0x7da18dc04220>]]]]
variable[polpars] assign[=] call[name[pmag].fisher_by_pol, parameter[name[precs]]]
for taget[name[mode]] in starred[call[name[list], parameter[call[name[polpars].keys, parameter[]]]]] begin[:]
variable[PolRes] assign[=] dictionary[[], []]
call[name[PolRes]][constant[er_citation_names]] assign[=] constant[This study]
call[name[PolRes]][constant[pmag_result_name]] assign[=] binary_operation[constant[Polarity Average: Polarity ] + name[mode]]
call[name[PolRes]][constant[data_type]] assign[=] constant[a]
call[name[PolRes]][constant[average_dec]] assign[=] binary_operation[constant[%7.1f] <ast.Mod object at 0x7da2590d6920> call[call[name[polpars]][name[mode]]][constant[dec]]]
call[name[PolRes]][constant[average_inc]] assign[=] binary_operation[constant[%7.1f] <ast.Mod object at 0x7da2590d6920> call[call[name[polpars]][name[mode]]][constant[inc]]]
call[name[PolRes]][constant[average_n]] assign[=] binary_operation[constant[%i] <ast.Mod object at 0x7da2590d6920> call[call[name[polpars]][name[mode]]][constant[n]]]
call[name[PolRes]][constant[average_r]] assign[=] binary_operation[constant[%5.4f] <ast.Mod object at 0x7da2590d6920> call[call[name[polpars]][name[mode]]][constant[r]]]
call[name[PolRes]][constant[average_k]] assign[=] binary_operation[constant[%6.0f] <ast.Mod object at 0x7da2590d6920> call[call[name[polpars]][name[mode]]][constant[k]]]
call[name[PolRes]][constant[average_alpha95]] assign[=] binary_operation[constant[%7.1f] <ast.Mod object at 0x7da2590d6920> call[call[name[polpars]][name[mode]]][constant[alpha95]]]
call[name[PolRes]][constant[er_site_names]] assign[=] call[call[name[polpars]][name[mode]]][constant[sites]]
call[name[PolRes]][constant[er_location_names]] assign[=] call[call[name[polpars]][name[mode]]][constant[locs]]
call[name[PolRes]][constant[magic_software_packages]] assign[=] name[version_num]
call[name[PmagResults].append, parameter[name[PolRes]]]
if <ast.BoolOp object at 0x7da18dc05bd0> begin[:]
for taget[name[site]] in starred[name[sites]] begin[:]
if name[plotsites] begin[:]
call[name[print], parameter[name[site]]]
if <ast.UnaryOp object at 0x7da20c6e6e00> begin[:]
<ast.Tuple object at 0x7da20c6e4dc0> assign[=] tuple[[<ast.Constant object at 0x7da20c6e4580>, <ast.Name object at 0x7da20c6e58d0>]]
if name[avg_intensities_by_sample] begin[:]
<ast.Tuple object at 0x7da20c6e63e0> assign[=] tuple[[<ast.Constant object at 0x7da20c6e59f0>, <ast.Name object at 0x7da20c6e78b0>]]
variable[Ints] assign[=] call[name[pmag].get_dictitem, parameter[name[intlist], constant[er_site_name], name[site], constant[T]]]
if compare[call[name[len], parameter[name[Ints]]] greater[>] constant[0]] begin[:]
variable[PmagSiteRec] assign[=] call[name[pmag].average_int, parameter[name[Ints], name[key], constant[site]]]
variable[PmagResRec] assign[=] call[name[pmag].average_int, parameter[name[Ints], name[key], constant[average]]]
if name[plotsites] begin[:]
for taget[name[rec]] in starred[name[Ints]] begin[:]
call[name[print], parameter[call[name[rec]][binary_operation[binary_operation[constant[er_] + name[key]] + constant[_name]]], binary_operation[constant[ %7.1f] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[1000000.0] * call[name[float], parameter[call[name[rec]][binary_operation[name[key] + constant[_int]]]]]]]]]
if compare[call[name[len], parameter[name[Ints]]] greater[>] constant[1]] begin[:]
call[name[print], parameter[constant[Average: ], binary_operation[constant[%7.1f] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[1000000.0] * call[name[float], parameter[call[name[PmagResRec]][constant[average_int]]]]]], constant[N: ], call[name[len], parameter[name[Ints]]]]]
call[name[print], parameter[constant[Sigma: ], binary_operation[constant[%7.1f] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[1000000.0] * call[name[float], parameter[call[name[PmagResRec]][constant[average_int_sigma]]]]]], constant[Sigma %: ], call[name[PmagResRec]][constant[average_int_sigma_perc]]]]
call[name[input], parameter[constant[Press any key to continue
]]]
variable[er_location_name] assign[=] call[call[name[Ints]][constant[0]]][constant[er_location_name]]
call[name[PmagSiteRec]][constant[er_location_name]] assign[=] name[er_location_name]
call[name[PmagSiteRec]][constant[er_citation_names]] assign[=] constant[This study]
call[name[PmagResRec]][constant[er_location_names]] assign[=] name[er_location_name]
call[name[PmagResRec]][constant[er_citation_names]] assign[=] constant[This study]
call[name[PmagSiteRec]][constant[er_analyst_mail_names]] assign[=] name[user]
call[name[PmagResRec]][constant[er_analyst_mail_names]] assign[=] name[user]
call[name[PmagResRec]][constant[data_type]] assign[=] constant[i]
if <ast.UnaryOp object at 0x7da20c6e6a40> begin[:]
call[name[PmagSiteRec]][constant[er_specimen_names]] assign[=] call[name[pmag].get_list, parameter[name[Ints], constant[er_specimen_name]]]
call[name[PmagResRec]][constant[er_specimen_names]] assign[=] call[name[pmag].get_list, parameter[name[Ints], constant[er_specimen_name]]]
call[name[PmagSiteRec]][constant[er_sample_names]] assign[=] call[name[pmag].get_list, parameter[name[Ints], constant[er_sample_name]]]
call[name[PmagResRec]][constant[er_sample_names]] assign[=] call[name[pmag].get_list, parameter[name[Ints], constant[er_sample_name]]]
call[name[PmagSiteRec]][constant[er_site_name]] assign[=] name[site]
call[name[PmagResRec]][constant[er_site_names]] assign[=] name[site]
call[name[PmagSiteRec]][constant[magic_method_codes]] assign[=] call[name[pmag].get_list, parameter[name[Ints], constant[magic_method_codes]]]
call[name[PmagResRec]][constant[magic_method_codes]] assign[=] call[name[pmag].get_list, parameter[name[Ints], constant[magic_method_codes]]]
variable[kill] assign[=] call[name[pmag].grade, parameter[name[PmagSiteRec], name[accept], constant[site_int]]]
if <ast.BoolOp object at 0x7da18f09c700> begin[:]
<ast.Tuple object at 0x7da18f09cb20> assign[=] tuple[[<ast.Call object at 0x7da18f09f7c0>, <ast.Constant object at 0x7da18f09df60>]]
if compare[call[name[PmagResRec]][constant[average_int_sigma]] not_equal[!=] constant[]] begin[:]
variable[sig] assign[=] call[name[float], parameter[call[name[PmagResRec]][constant[average_int_sigma]]]]
variable[sdir] assign[=] call[name[pmag].get_dictitem, parameter[name[PmagResults], constant[er_site_names], name[site], constant[T]]]
if <ast.BoolOp object at 0x7da18f09d420> begin[:]
variable[inc] assign[=] call[name[float], parameter[call[call[name[sdir]][constant[0]]][constant[average_inc]]]]
variable[mlat] assign[=] call[name[pmag].magnetic_lat, parameter[name[inc]]]
call[name[PmagResRec]][constant[vdm]] assign[=] binary_operation[constant[%8.3e ] <ast.Mod object at 0x7da2590d6920> call[name[pmag].b_vdm, parameter[name[b], name[mlat]]]]
call[name[PmagResRec]][constant[vdm_n]] assign[=] call[name[PmagResRec]][constant[average_int_n]]
if <ast.BoolOp object at 0x7da18f09df90> begin[:]
variable[vdm_sig] assign[=] call[name[pmag].b_vdm, parameter[call[name[float], parameter[call[name[PmagResRec]][constant[average_int_sigma]]]], name[mlat]]]
call[name[PmagResRec]][constant[vdm_sigma]] assign[=] binary_operation[constant[%8.3e ] <ast.Mod object at 0x7da2590d6920> name[vdm_sig]]
variable[mlat] assign[=] constant[]
if compare[name[get_model_lat] equal[==] constant[1]] begin[:]
variable[mlats] assign[=] call[name[pmag].get_dictitem, parameter[name[SiteNFO], constant[er_site_name], name[site], constant[T]]]
if compare[call[name[len], parameter[name[mlats]]] greater[>] constant[0]] begin[:]
variable[mlat] assign[=] call[call[name[mlats]][constant[0]]][constant[site_lat]]
if compare[name[mlat] not_equal[!=] constant[]] begin[:]
call[name[PmagResRec]][constant[vadm]] assign[=] binary_operation[constant[%8.3e ] <ast.Mod object at 0x7da2590d6920> call[name[pmag].b_vdm, parameter[name[b], call[name[float], parameter[name[mlat]]]]]]
if compare[name[sig] not_equal[!=] constant[]] begin[:]
variable[vdm_sig] assign[=] call[name[pmag].b_vdm, parameter[call[name[float], parameter[call[name[PmagResRec]][constant[average_int_sigma]]]], call[name[float], parameter[name[mlat]]]]]
call[name[PmagResRec]][constant[vadm_sigma]] assign[=] binary_operation[constant[%8.3e ] <ast.Mod object at 0x7da2590d6920> name[vdm_sig]]
call[name[PmagResRec]][constant[vadm_n]] assign[=] call[name[PmagResRec]][constant[average_int_n]]
variable[sitedat] assign[=] call[name[pmag].get_dictitem, parameter[name[SiteNFO], constant[er_site_name], call[name[PmagSiteRec]][constant[er_site_name]], constant[T]]]
if compare[call[name[len], parameter[name[sitedat]]] greater[>] constant[0]] begin[:]
variable[sitedat] assign[=] call[name[sitedat]][constant[0]]
call[name[PmagResRec]][constant[average_lat]] assign[=] call[name[sitedat]][constant[site_lat]]
call[name[PmagResRec]][constant[average_lon]] assign[=] call[name[sitedat]][constant[site_lon]]
call[name[PmagResRec]][constant[magic_software_packages]] assign[=] name[version_num]
call[name[PmagResRec]][constant[pmag_result_name]] assign[=] binary_operation[constant[V[A]DM: Site ] + name[site]]
call[name[PmagResRec]][constant[result_description]] assign[=] constant[V[A]DM of site]
call[name[PmagResRec]][constant[pmag_criteria_codes]] assign[=] constant[ACCEPT]
if compare[name[agefile] not_equal[!=] constant[]] begin[:]
variable[PmagResRec] assign[=] call[name[pmag].get_age, parameter[name[PmagResRec], constant[er_site_names], constant[average_], name[AgeNFO], name[DefaultAge]]]
variable[site_height] assign[=] call[name[pmag].get_dictitem, parameter[name[height_nfo], constant[er_site_name], name[site], constant[T]]]
if compare[call[name[len], parameter[name[site_height]]] greater[>] constant[0]] begin[:]
call[name[PmagResRec]][constant[average_height]] assign[=] call[call[name[site_height]][constant[0]]][constant[site_height]]
call[name[PmagSites].append, parameter[name[PmagSiteRec]]]
call[name[PmagResults].append, parameter[name[PmagResRec]]]
if compare[call[name[len], parameter[name[PmagSites]]] greater[>] constant[0]] begin[:]
<ast.Tuple object at 0x7da18f721630> assign[=] call[name[pmag].fillkeys, parameter[name[PmagSites]]]
call[name[pmag].magic_write, parameter[name[siteout], name[Tmp], constant[pmag_sites]]]
call[name[print], parameter[constant[ sites written to ], name[siteout]]]
if compare[call[name[len], parameter[name[PmagResults]]] greater[>] constant[0]] begin[:]
<ast.Tuple object at 0x7da18f720a60> assign[=] call[name[pmag].fillkeys, parameter[name[PmagResults]]]
call[name[pmag].magic_write, parameter[name[resout], name[TmpRes], constant[pmag_results]]]
call[name[print], parameter[constant[ results written to ], name[resout]]] | keyword[def] identifier[specimens_results_magic] ( identifier[infile] = literal[string] , identifier[measfile] = literal[string] , identifier[sampfile] = literal[string] , identifier[sitefile] = literal[string] , identifier[agefile] = literal[string] , identifier[specout] = literal[string] , identifier[sampout] = literal[string] , identifier[siteout] = literal[string] , identifier[resout] = literal[string] , identifier[critout] = literal[string] , identifier[instout] = literal[string] , identifier[plotsites] = keyword[False] , identifier[fmt] = literal[string] , identifier[dir_path] = literal[string] , identifier[cors] =[], identifier[priorities] =[ literal[string] , literal[string] ], identifier[coord] = literal[string] , identifier[user] = literal[string] , identifier[vgps_level] = literal[string] , identifier[do_site_intensity] = keyword[True] , identifier[DefaultAge] =[ literal[string] ], identifier[avg_directions_by_sample] = keyword[False] , identifier[avg_intensities_by_sample] = keyword[False] , identifier[avg_all_components] = keyword[False] , identifier[avg_by_polarity] = keyword[False] , identifier[skip_directions] = keyword[False] , identifier[skip_intensities] = keyword[False] , identifier[use_sample_latitude] = keyword[False] , identifier[use_paleolatitude] = keyword[False] , identifier[use_criteria] = literal[string] ):
literal[string]
identifier[plotsites] = keyword[False]
identifier[Comps] =[]
identifier[version_num] = identifier[pmag] . identifier[get_version] ()
identifier[args] = identifier[sys] . identifier[argv]
identifier[model_lat_file] = literal[string]
identifier[Dcrit] , identifier[Icrit] , identifier[nocrit] = literal[int] , literal[int] , literal[int]
identifier[corrections] =[]
identifier[nocorrection] =[ literal[string] , literal[string] , literal[string] ]
keyword[for] identifier[cor] keyword[in] identifier[cors] :
identifier[nocorrection] . identifier[remove] ( literal[string] + identifier[cor] )
identifier[corrections] . identifier[append] ( literal[string] + identifier[cor] )
keyword[for] identifier[p] keyword[in] identifier[priorities] :
keyword[if] keyword[not] identifier[p] . identifier[startswith] ( literal[string] ):
identifier[p] = literal[string] + identifier[p]
keyword[if] identifier[coord] == literal[string] :
identifier[coords] =[ literal[string] ]
keyword[if] identifier[coord] == literal[string] :
identifier[coords] =[ literal[string] ]
keyword[if] identifier[coord] == literal[string] :
identifier[coords] =[ literal[string] ]
keyword[if] identifier[coord] == literal[string] :
identifier[coords] =[ literal[string] , literal[string] ]
keyword[if] identifier[vgps_level] == literal[string] :
identifier[vgps] = literal[int]
keyword[else] :
identifier[vgps] = literal[int]
keyword[if] identifier[do_site_intensity] :
identifier[nositeints] = literal[int]
keyword[else] :
identifier[nositeints] = literal[int]
keyword[if] keyword[not] identifier[skip_intensities] :
keyword[if] identifier[use_sample_latitude] keyword[and] identifier[use_paleolatitude] :
identifier[print] ( literal[string] )
keyword[return] keyword[False]
keyword[elif] identifier[use_sample_latitude] :
identifier[get_model_lat] = literal[int]
keyword[elif] identifier[use_paleolatitude] :
identifier[get_model_lat] = literal[int]
keyword[try] :
identifier[model_lat_file] = identifier[dir_path] + literal[string] + identifier[args] [ identifier[ind] + literal[int] ]
identifier[get_model_lat] = literal[int]
identifier[mlat] = identifier[open] ( identifier[model_lat_file] , literal[string] )
identifier[ModelLats] =[]
keyword[for] identifier[line] keyword[in] identifier[mlat] . identifier[readlines] ():
identifier[ModelLat] ={}
identifier[tmp] = identifier[line] . identifier[split] ()
identifier[ModelLat] [ literal[string] ]= identifier[tmp] [ literal[int] ]
identifier[ModelLat] [ literal[string] ]= identifier[tmp] [ literal[int] ]
identifier[ModelLat] [ literal[string] ]= identifier[tmp] [ literal[int] ]
identifier[ModelLat] [ literal[string] ]= identifier[tmp] [ literal[int] ]
identifier[ModelLats] . identifier[append] ( identifier[ModelLat] )
identifier[mlat] . identifier[clos] ()
keyword[except] :
identifier[print] ( literal[string] )
keyword[else] :
identifier[get_model_lat] = literal[int]
keyword[if] identifier[plotsites] keyword[and] keyword[not] identifier[skip_directions] :
identifier[EQ] ={}
identifier[EQ] [ literal[string] ]= literal[int]
identifier[pmagplotlib] . identifier[plot_init] ( identifier[EQ] [ literal[string] ], literal[int] , literal[int] )
identifier[pmagplotlib] . identifier[plot_net] ( identifier[EQ] [ literal[string] ])
identifier[pmagplotlib] . identifier[draw_figs] ( identifier[EQ] )
identifier[infile] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[infile] )
identifier[measfile] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[measfile] )
identifier[instout] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[instout] )
identifier[sampfile] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[sampfile] )
identifier[sitefile] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[sitefile] )
identifier[agefile] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[agefile] )
identifier[specout] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[specout] )
identifier[sampout] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[sampout] )
identifier[siteout] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[siteout] )
identifier[resout] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[resout] )
identifier[critout] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[critout] )
keyword[if] identifier[use_criteria] == literal[string] :
identifier[Dcrit] , identifier[Icrit] , identifier[nocrit] = literal[int] , literal[int] , literal[int]
identifier[crit_data] = identifier[pmag] . identifier[default_criteria] ( identifier[nocrit] )
keyword[elif] identifier[use_criteria] == literal[string] :
identifier[crit_data] = identifier[pmag] . identifier[default_criteria] ( identifier[nocrit] )
keyword[elif] identifier[use_criteria] == literal[string] :
identifier[crit_data] , identifier[file_type] = identifier[pmag] . identifier[magic_read] (
identifier[critout] )
identifier[print] ( literal[string] , identifier[critout] )
identifier[accept] ={}
keyword[for] identifier[critrec] keyword[in] identifier[crit_data] :
keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[critrec] . identifier[keys] ()):
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[critrec] . identifier[keys] ()) keyword[and] literal[string] keyword[in] identifier[list] ( identifier[critrec] . identifier[keys] ()) keyword[and] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[critrec] . identifier[keys] ()):
identifier[critrec] [ literal[string] ]= identifier[critrec] [ literal[string] ]
keyword[del] identifier[critrec] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[critrec] . identifier[keys] ()):
identifier[critrec] [ literal[string] ]= literal[string] %(
identifier[eval] ( identifier[critrec] [ literal[string] ])* literal[int] )
keyword[if] identifier[key] keyword[not] keyword[in] identifier[list] ( identifier[accept] . identifier[keys] ()) keyword[and] identifier[critrec] [ identifier[key] ]!= literal[string] :
identifier[accept] [ identifier[key] ]= identifier[critrec] [ identifier[key] ]
keyword[if] identifier[use_criteria] == literal[string] :
identifier[pmag] . identifier[magic_write] ( identifier[critout] ,[ identifier[accept] ], literal[string] )
identifier[print] ( literal[string] , identifier[critout] , literal[string] )
identifier[SiteNFO] , identifier[file_type] = identifier[pmag] . identifier[magic_read] ( identifier[sitefile] )
identifier[SampNFO] , identifier[file_type] = identifier[pmag] . identifier[magic_read] ( identifier[sampfile] )
identifier[height_nfo] = identifier[pmag] . identifier[get_dictitem] ( identifier[SiteNFO] , literal[string] , literal[string] , literal[string] )
keyword[if] identifier[agefile] :
identifier[AgeNFO] , identifier[file_type] = identifier[pmag] . identifier[magic_read] (
identifier[agefile] )
identifier[Data] , identifier[file_type] = identifier[pmag] . identifier[magic_read] ( identifier[infile] )
identifier[IntData] = identifier[pmag] . identifier[get_dictitem] ( identifier[Data] , literal[string] , literal[string] , literal[string] )
identifier[comment] , identifier[orient] = literal[string] ,[]
identifier[samples] , identifier[sites] =[],[]
keyword[for] identifier[rec] keyword[in] identifier[Data] :
keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
identifier[rec] [ literal[string] ]= literal[string]
keyword[elif] identifier[rec] [ literal[string] ] keyword[not] keyword[in] identifier[samples] :
identifier[samples] . identifier[append] ( identifier[rec] [ literal[string] ])
keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
identifier[rec] [ literal[string] ]= literal[string]
keyword[elif] identifier[rec] [ literal[string] ] keyword[not] keyword[in] identifier[sites] :
identifier[sites] . identifier[append] ( identifier[rec] [ literal[string] ])
keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
identifier[rec] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()) keyword[or] identifier[rec] [ literal[string] ]== literal[string] :
identifier[rec] [ literal[string] ]= literal[string]
keyword[if] identifier[rec] [ literal[string] ] keyword[not] keyword[in] identifier[Comps] :
identifier[Comps] . identifier[append] ( identifier[rec] [ literal[string] ])
identifier[rec] [ literal[string] ]= identifier[rec] [ literal[string] ]. identifier[strip] (
literal[string] )
keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
identifier[rec] [ literal[string] ]= literal[string]
keyword[if] identifier[rec] [ literal[string] ] keyword[not] keyword[in] identifier[orient] :
identifier[orient] . identifier[append] ( identifier[rec] [ literal[string] ])
keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
identifier[rec] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
identifier[rec] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
identifier[rec] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
identifier[rec] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[rec] . identifier[keys] ()):
identifier[rec] [ literal[string] ]= literal[string]
identifier[SpecInts] , identifier[SpecDirs] , identifier[SpecPlanes] =[],[],[]
identifier[samples] . identifier[sort] ()
identifier[sites] . identifier[sort] ()
keyword[if] keyword[not] identifier[skip_intensities] :
identifier[IntData] = identifier[pmag] . identifier[get_dictitem] ( identifier[Data] , literal[string] , literal[string] , literal[string] )
keyword[if] identifier[nocrit] == literal[int] :
keyword[for] identifier[rec] keyword[in] identifier[IntData] :
identifier[kill] = identifier[pmag] . identifier[grade] ( identifier[rec] , identifier[accept] , literal[string] )
keyword[if] identifier[len] ( identifier[kill] )== literal[int] :
identifier[SpecInts] . identifier[append] ( identifier[rec] )
keyword[else] :
identifier[SpecInts] = identifier[IntData] [:]
keyword[if] identifier[len] ( identifier[corrections] )> literal[int] keyword[and] identifier[len] ( identifier[SpecInts] )> literal[int] :
keyword[for] identifier[cor] keyword[in] identifier[corrections] :
identifier[SpecInts] = identifier[pmag] . identifier[get_dictitem] (
identifier[SpecInts] , literal[string] , identifier[cor] , literal[string] )
keyword[if] identifier[len] ( identifier[nocorrection] )> literal[int] keyword[and] identifier[len] ( identifier[SpecInts] )> literal[int] :
keyword[for] identifier[cor] keyword[in] identifier[nocorrection] :
identifier[SpecInts] = identifier[pmag] . identifier[get_dictitem] (
identifier[SpecInts] , literal[string] , identifier[cor] , literal[string] )
identifier[PrioritySpecInts] =[]
identifier[specimens] = identifier[pmag] . identifier[get_specs] ( identifier[SpecInts] )
keyword[for] identifier[spec] keyword[in] identifier[specimens] :
identifier[ThisSpecRecs] = identifier[pmag] . identifier[get_dictitem] (
identifier[SpecInts] , literal[string] , identifier[spec] , literal[string] )
keyword[if] identifier[len] ( identifier[ThisSpecRecs] )== literal[int] :
identifier[PrioritySpecInts] . identifier[append] ( identifier[ThisSpecRecs] [ literal[int] ])
keyword[elif] identifier[len] ( identifier[ThisSpecRecs] )> literal[int] :
identifier[prec] =[]
keyword[for] identifier[p] keyword[in] identifier[priorities] :
identifier[ThisSpecRecs] = identifier[pmag] . identifier[get_dictitem] (
identifier[SpecInts] , literal[string] , identifier[p] , literal[string] )
keyword[if] identifier[len] ( identifier[ThisSpecRecs] )> literal[int] :
identifier[prec] . identifier[append] ( identifier[ThisSpecRecs] [ literal[int] ])
identifier[PrioritySpecInts] . identifier[append] ( identifier[prec] [ literal[int] ])
identifier[SpecInts] = identifier[PrioritySpecInts]
keyword[if] keyword[not] identifier[skip_directions] :
identifier[AllDirs] = identifier[pmag] . identifier[get_dictitem] ( identifier[Data] , literal[string] , literal[string] , literal[string] )
identifier[Ns] = identifier[pmag] . identifier[get_dictitem] ( identifier[AllDirs] , literal[string] , literal[string] , literal[string] )
keyword[if] identifier[nocrit] != literal[int] :
keyword[for] identifier[rec] keyword[in] identifier[Ns] :
identifier[kill] = identifier[pmag] . identifier[grade] ( identifier[rec] , identifier[accept] , literal[string] )
keyword[if] identifier[len] ( identifier[kill] )== literal[int] :
identifier[SpecDirs] . identifier[append] ( identifier[rec] )
keyword[else] :
identifier[SpecDirs] = identifier[AllDirs] [:]
identifier[PmagSamps] , identifier[SampDirs] =[],[]
identifier[PmagSites] , identifier[PmagResults] =[],[]
identifier[SampInts] =[]
keyword[for] identifier[samp] keyword[in] identifier[samples] :
keyword[if] identifier[avg_directions_by_sample] :
identifier[SampDir] = identifier[pmag] . identifier[get_dictitem] ( identifier[SpecDirs] , literal[string] , identifier[samp] , literal[string] )
keyword[if] identifier[len] ( identifier[SampDir] )> literal[int] :
keyword[for] identifier[coord] keyword[in] identifier[coords] :
identifier[CoordDir] = identifier[pmag] . identifier[get_dictitem] (
identifier[SampDir] , literal[string] , identifier[coord] , literal[string] )
keyword[if] identifier[len] ( identifier[CoordDir] )> literal[int] :
keyword[if] keyword[not] identifier[avg_all_components] :
keyword[for] identifier[comp] keyword[in] identifier[Comps] :
identifier[CompDir] = identifier[pmag] . identifier[get_dictitem] (
identifier[CoordDir] , literal[string] , identifier[comp] , literal[string] )
keyword[if] identifier[len] ( identifier[CompDir] )> literal[int] :
identifier[PmagSampRec] = identifier[pmag] . identifier[lnpbykey] (
identifier[CompDir] , literal[string] , literal[string] )
identifier[PmagSampRec] [ literal[string] ]= identifier[CompDir] [ literal[int] ][ literal[string] ]
identifier[PmagSampRec] [ literal[string] ]= identifier[CompDir] [ literal[int] ][ literal[string] ]
identifier[PmagSampRec] [ literal[string] ]= identifier[samp]
identifier[PmagSampRec] [ literal[string] ]= literal[string]
identifier[PmagSampRec] [ literal[string] ]= identifier[user]
identifier[PmagSampRec] [ literal[string] ]= identifier[version_num]
keyword[if] identifier[CompDir] [ literal[int] ][ literal[string] ]== literal[string] :
identifier[PmagSampRec] [ literal[string] ]= literal[string]
keyword[else] :
identifier[PmagSampRec] [ literal[string] ]= literal[string]
keyword[if] identifier[nocrit] != literal[int] :
identifier[PmagSampRec] [ literal[string] ]= literal[string]
keyword[if] identifier[agefile] != literal[string] :
identifier[PmagSampRec] = identifier[pmag] . identifier[get_age] (
identifier[PmagSampRec] , literal[string] , literal[string] , identifier[AgeNFO] , identifier[DefaultAge] )
identifier[site_height] = identifier[pmag] . identifier[get_dictitem] (
identifier[height_nfo] , literal[string] , identifier[PmagSampRec] [ literal[string] ], literal[string] )
keyword[if] identifier[len] ( identifier[site_height] )> literal[int] :
identifier[PmagSampRec] [ literal[string] ]= identifier[site_height] [ literal[int] ][ literal[string] ]
identifier[PmagSampRec] [ literal[string] ]= identifier[comp]
identifier[PmagSampRec] [ literal[string] ]= identifier[coord]
identifier[PmagSampRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[CompDir] , literal[string] )
identifier[PmagSampRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[CompDir] , literal[string] )
keyword[if] identifier[nocrit] != literal[int] :
identifier[kill] = identifier[pmag] . identifier[grade] (
identifier[PmagSampRec] , identifier[accept] , literal[string] )
keyword[else] :
identifier[kill] =[]
keyword[if] identifier[len] ( identifier[kill] )== literal[int] :
identifier[SampDirs] . identifier[append] ( identifier[PmagSampRec] )
keyword[if] identifier[vgps] == literal[int] :
identifier[PmagResRec] = identifier[pmag] . identifier[getsampVGP] (
identifier[PmagSampRec] , identifier[SiteNFO] )
keyword[if] identifier[PmagResRec] != literal[string] :
identifier[PmagResults] . identifier[append] ( identifier[PmagResRec] )
identifier[PmagSamps] . identifier[append] ( identifier[PmagSampRec] )
keyword[if] identifier[avg_all_components] :
identifier[PmagSampRec] = identifier[pmag] . identifier[lnpbykey] (
identifier[CoordDir] , literal[string] , literal[string] )
identifier[PmagSampRec] [ literal[string] ]= identifier[CoordDir] [ literal[int] ][ literal[string] ]
identifier[PmagSampRec] [ literal[string] ]= identifier[CoordDir] [ literal[int] ][ literal[string] ]
identifier[PmagSampRec] [ literal[string] ]= identifier[samp]
identifier[PmagSampRec] [ literal[string] ]= literal[string]
identifier[PmagSampRec] [ literal[string] ]= identifier[user]
identifier[PmagSampRec] [ literal[string] ]= identifier[version_num]
keyword[if] identifier[all] ( identifier[i] [ literal[string] ]== literal[string] keyword[for] identifier[i] keyword[in] identifier[CoordDir] ):
identifier[PmagSampRec] [ literal[string] ]= literal[string]
keyword[else] :
identifier[PmagSampRec] [ literal[string] ]= literal[string]
keyword[if] identifier[nocrit] != literal[int] :
identifier[PmagSampRec] [ literal[string] ]= literal[string]
keyword[if] identifier[agefile] != literal[string] :
identifier[PmagSampRec] = identifier[pmag] . identifier[get_age] (
identifier[PmagSampRec] , literal[string] , literal[string] , identifier[AgeNFO] , identifier[DefaultAge] )
identifier[site_height] = identifier[pmag] . identifier[get_dictitem] (
identifier[height_nfo] , literal[string] , identifier[site] , literal[string] )
keyword[if] identifier[len] ( identifier[site_height] )> literal[int] :
identifier[PmagSampRec] [ literal[string] ]= identifier[site_height] [ literal[int] ][ literal[string] ]
identifier[PmagSampRec] [ literal[string] ]= identifier[coord]
identifier[PmagSampRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[CoordDir] , literal[string] )
identifier[PmagSampRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[CoordDir] , literal[string] )
identifier[PmagSampRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[CoordDir] , literal[string] )
keyword[if] identifier[nocrit] != literal[int] :
identifier[kill] = identifier[pmag] . identifier[grade] (
identifier[PmagSampRec] , identifier[accept] , literal[string] )
keyword[if] identifier[len] ( identifier[kill] )== literal[int] :
identifier[SampDirs] . identifier[append] ( identifier[PmagSampRec] )
keyword[if] identifier[vgps] == literal[int] :
identifier[PmagResRec] = identifier[pmag] . identifier[getsampVGP] (
identifier[PmagSampRec] , identifier[SiteNFO] )
keyword[if] identifier[PmagResRec] != literal[string] :
identifier[PmagResults] . identifier[append] ( identifier[PmagResRec] )
keyword[else] :
identifier[SampDirs] . identifier[append] ( identifier[PmagSampRec] )
keyword[if] identifier[vgps] == literal[int] :
identifier[PmagResRec] = identifier[pmag] . identifier[getsampVGP] (
identifier[PmagSampRec] , identifier[SiteNFO] )
keyword[if] identifier[PmagResRec] != literal[string] :
identifier[PmagResults] . identifier[append] ( identifier[PmagResRec] )
identifier[PmagSamps] . identifier[append] ( identifier[PmagSampRec] )
keyword[if] identifier[avg_intensities_by_sample] :
identifier[SampI] = identifier[pmag] . identifier[get_dictitem] ( identifier[SpecInts] , literal[string] , identifier[samp] , literal[string] )
keyword[if] identifier[len] ( identifier[SampI] )> literal[int] :
identifier[PmagSampRec] = identifier[pmag] . identifier[average_int] ( identifier[SampI] , literal[string] , literal[string] )
identifier[PmagSampRec] [ literal[string] ]= literal[string]
identifier[PmagSampRec] [ literal[string] ]= literal[string]
identifier[PmagSampRec] [ literal[string] ]= identifier[SampI] [ literal[int] ][ literal[string] ]
identifier[PmagSampRec] [ literal[string] ]= identifier[samp]
identifier[PmagSampRec] [ literal[string] ]= identifier[SampI] [ literal[int] ][ literal[string] ]
identifier[PmagSampRec] [ literal[string] ]= literal[string]
identifier[PmagSampRec] [ literal[string] ]= identifier[user]
keyword[if] identifier[agefile] != literal[string] :
identifier[PmagSampRec] = identifier[pmag] . identifier[get_age] (
identifier[PmagSampRec] , literal[string] , literal[string] , identifier[AgeNFO] , identifier[DefaultAge] )
identifier[site_height] = identifier[pmag] . identifier[get_dictitem] (
identifier[height_nfo] , literal[string] , identifier[PmagSampRec] [ literal[string] ], literal[string] )
keyword[if] identifier[len] ( identifier[site_height] )> literal[int] :
identifier[PmagSampRec] [ literal[string] ]= identifier[site_height] [ literal[int] ][ literal[string] ]
identifier[PmagSampRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[SampI] , literal[string] )
identifier[PmagSampRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[SampI] , literal[string] )
keyword[if] identifier[nocrit] != literal[int] :
identifier[kill] = identifier[pmag] . identifier[grade] ( identifier[PmagSampRec] , identifier[accept] , literal[string] )
keyword[if] identifier[len] ( identifier[kill] )== literal[int] :
identifier[PmagSampRec] [ literal[string] ]= literal[string]
identifier[SampInts] . identifier[append] ( identifier[PmagSampRec] )
identifier[PmagSamps] . identifier[append] ( identifier[PmagSampRec] )
keyword[else] :
identifier[PmagSampRec] ={}
keyword[else] :
identifier[SampInts] . identifier[append] ( identifier[PmagSampRec] )
identifier[PmagSamps] . identifier[append] ( identifier[PmagSampRec] )
identifier[PmagSampRec] [ literal[string] ]= literal[string]
keyword[if] identifier[vgps] == literal[int] keyword[and] identifier[get_model_lat] != literal[int] keyword[and] identifier[PmagSampRec] !={}:
keyword[if] identifier[get_model_lat] == literal[int] :
identifier[PmagResRec] = identifier[pmag] . identifier[getsampVDM] ( identifier[PmagSampRec] , identifier[SampNFO] )
keyword[del] ( identifier[PmagResRec] [ literal[string] ])
keyword[elif] identifier[get_model_lat] == literal[int] :
identifier[PmagResRec] = identifier[pmag] . identifier[getsampVDM] ( identifier[PmagSampRec] , identifier[ModelLats] )
keyword[if] identifier[PmagResRec] !={}:
identifier[PmagResRec] [ literal[string] ]= identifier[PmagResRec] [ literal[string] ]+ literal[string]
keyword[if] identifier[PmagResRec] !={}:
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSampRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSampRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= literal[string]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSampRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSampRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSampRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSampRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= literal[string]
identifier[PmagResults] . identifier[append] ( identifier[PmagResRec] )
keyword[if] identifier[len] ( identifier[PmagSamps] )> literal[int] :
identifier[TmpSamps] , identifier[keylist] = identifier[pmag] . identifier[fillkeys] ( identifier[PmagSamps] )
identifier[pmag] . identifier[magic_write] ( identifier[sampout] , identifier[TmpSamps] , literal[string] )
identifier[print] ( literal[string] , identifier[sampout] )
keyword[for] identifier[site] keyword[in] identifier[sites] :
keyword[for] identifier[coord] keyword[in] identifier[coords] :
keyword[if] keyword[not] identifier[avg_directions_by_sample] :
identifier[key] , identifier[dirlist] = literal[string] , identifier[SpecDirs]
keyword[if] identifier[avg_directions_by_sample] :
identifier[key] , identifier[dirlist] = literal[string] , identifier[SampDirs]
identifier[tmp] = identifier[pmag] . identifier[get_dictitem] ( identifier[dirlist] , literal[string] , identifier[site] , literal[string] )
identifier[tmp1] = identifier[pmag] . identifier[get_dictitem] ( identifier[tmp] , identifier[key] + literal[string] , identifier[coord] , literal[string] )
identifier[sd] = identifier[pmag] . identifier[get_dictitem] ( identifier[SiteNFO] , literal[string] , identifier[site] , literal[string] )
keyword[if] identifier[len] ( identifier[sd] )> literal[int] :
identifier[sitedat] = identifier[sd] [ literal[int] ]
keyword[if] keyword[not] identifier[avg_all_components] :
keyword[for] identifier[comp] keyword[in] identifier[Comps] :
identifier[siteD] = identifier[pmag] . identifier[get_dictitem] (
identifier[tmp1] , identifier[key] + literal[string] , identifier[comp] , literal[string] )
identifier[quality_siteD] =[]
keyword[for] identifier[rec] keyword[in] identifier[siteD] :
identifier[spec_quality] = identifier[rec] . identifier[get] ( literal[string] , literal[string] )
identifier[samp_quality] = identifier[rec] . identifier[get] ( literal[string] , literal[string] )
keyword[if] ( identifier[spec_quality] == literal[string] ) keyword[and] ( identifier[samp_quality] == literal[string] ):
identifier[quality_siteD] . identifier[append] ( identifier[rec] )
identifier[siteD] = identifier[quality_siteD]
keyword[if] identifier[len] ( identifier[siteD] )> literal[int] :
identifier[PmagSiteRec] = identifier[pmag] . identifier[lnpbykey] ( identifier[siteD] , literal[string] , identifier[key] )
identifier[PmagSiteRec] [ literal[string] ]= identifier[comp]
identifier[PmagSiteRec] [ literal[string] ]= identifier[siteD] [ literal[int] ][ literal[string] ]
identifier[PmagSiteRec] [ literal[string] ]= identifier[siteD] [ literal[int] ][ literal[string] ]
identifier[PmagSiteRec] [ literal[string] ]= identifier[coord]
identifier[PmagSiteRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[siteD] , identifier[key] + literal[string] )
keyword[if] identifier[avg_directions_by_sample] :
identifier[PmagSiteRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[siteD] , literal[string] )
keyword[else] :
identifier[PmagSiteRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[siteD] , literal[string] )
identifier[AFnum] = identifier[len] ( identifier[pmag] . identifier[get_dictitem] (
identifier[siteD] , literal[string] , literal[string] , literal[string] ))
identifier[Tnum] = identifier[len] ( identifier[pmag] . identifier[get_dictitem] (
identifier[siteD] , literal[string] , literal[string] , literal[string] ))
identifier[DC] = literal[int]
keyword[if] identifier[AFnum] > literal[int] :
identifier[DC] += literal[int]
keyword[if] identifier[Tnum] > literal[int] :
identifier[DC] += literal[int]
identifier[PmagSiteRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[siteD] , literal[string] )+ literal[string] + literal[string] + identifier[str] ( identifier[DC] )
identifier[PmagSiteRec] [ literal[string] ]. identifier[strip] ( literal[string] )
keyword[if] identifier[plotsites] :
identifier[print] ( identifier[PmagSiteRec] [ literal[string] ])
identifier[pmagplotlib] . identifier[plot_site] (
identifier[EQ] [ literal[string] ], identifier[PmagSiteRec] , identifier[siteD] , identifier[key] )
identifier[pmagplotlib] . identifier[draw_figs] ( identifier[EQ] )
identifier[PmagSites] . identifier[append] ( identifier[PmagSiteRec] )
keyword[else] :
identifier[siteD] = identifier[tmp1] [:]
keyword[if] identifier[len] ( identifier[siteD] )> literal[int] :
identifier[PmagSiteRec] = identifier[pmag] . identifier[lnpbykey] ( identifier[siteD] , literal[string] , identifier[key] )
identifier[PmagSiteRec] [ literal[string] ]= identifier[siteD] [ literal[int] ][ literal[string] ]
identifier[PmagSiteRec] [ literal[string] ]= identifier[siteD] [ literal[int] ][ literal[string] ]
identifier[PmagSiteRec] [ literal[string] ]= identifier[comp]
identifier[PmagSiteRec] [ literal[string] ]= identifier[coord]
identifier[PmagSiteRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[siteD] , identifier[key] + literal[string] )
identifier[PmagSiteRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[siteD] , literal[string] )
identifier[PmagSiteRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[siteD] , literal[string] )
identifier[AFnum] = identifier[len] ( identifier[pmag] . identifier[get_dictitem] (
identifier[siteD] , literal[string] , literal[string] , literal[string] ))
identifier[Tnum] = identifier[len] ( identifier[pmag] . identifier[get_dictitem] (
identifier[siteD] , literal[string] , literal[string] , literal[string] ))
identifier[DC] = literal[int]
keyword[if] identifier[AFnum] > literal[int] :
identifier[DC] += literal[int]
keyword[if] identifier[Tnum] > literal[int] :
identifier[DC] += literal[int]
identifier[PmagSiteRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[siteD] , literal[string] )+ literal[string] + literal[string] + identifier[str] ( identifier[DC] )
identifier[PmagSiteRec] [ literal[string] ]. identifier[strip] ( literal[string] )
keyword[if] keyword[not] identifier[avg_directions_by_sample] :
identifier[PmagSiteRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[siteD] , identifier[key] + literal[string] )
keyword[if] identifier[plotsites] :
identifier[pmagplotlib] . identifier[plot_site] (
identifier[EQ] [ literal[string] ], identifier[PmagSiteRec] , identifier[siteD] , identifier[key] )
identifier[pmagplotlib] . identifier[draw_figs] ( identifier[EQ] )
identifier[PmagSites] . identifier[append] ( identifier[PmagSiteRec] )
keyword[else] :
identifier[print] ( literal[string] ,
identifier[site] , literal[string] )
keyword[for] identifier[PmagSiteRec] keyword[in] identifier[PmagSites] :
identifier[PmagSiteRec] [ literal[string] ]= literal[string]
identifier[PmagSiteRec] [ literal[string] ]= identifier[user]
identifier[PmagSiteRec] [ literal[string] ]= identifier[version_num]
keyword[if] identifier[agefile] != literal[string] :
identifier[PmagSiteRec] = identifier[pmag] . identifier[get_age] (
identifier[PmagSiteRec] , literal[string] , literal[string] , identifier[AgeNFO] , identifier[DefaultAge] )
identifier[PmagSiteRec] [ literal[string] ]= literal[string]
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[PmagSiteRec] . identifier[keys] ()) keyword[and] literal[string] keyword[in] identifier[list] ( identifier[PmagSiteRec] . identifier[keys] ()) keyword[and] identifier[PmagSiteRec] [ literal[string] ]!= literal[string] keyword[and] identifier[PmagSiteRec] [ literal[string] ]!= literal[string] :
keyword[if] identifier[int] ( identifier[PmagSiteRec] [ literal[string] ])> literal[int] :
identifier[PmagSiteRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]+ literal[string]
keyword[elif] identifier[int] ( identifier[PmagSiteRec] [ literal[string] ])> literal[int] :
identifier[PmagSiteRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]+ literal[string]
identifier[kill] = identifier[pmag] . identifier[grade] ( identifier[PmagSiteRec] , identifier[accept] , literal[string] )
keyword[if] identifier[len] ( identifier[kill] )== literal[int] :
identifier[PmagResRec] ={}
identifier[PmagResRec] [ literal[string] ]= literal[string]
identifier[PmagResRec] [ literal[string] ]= identifier[version_num]
identifier[PmagSiteRec] [ literal[string] ]= literal[string]
identifier[PmagResRec] [ literal[string] ]= literal[string]
identifier[dec] = identifier[float] ( identifier[PmagSiteRec] [ literal[string] ])
identifier[inc] = identifier[float] ( identifier[PmagSiteRec] [ literal[string] ])
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[PmagSiteRec] . identifier[keys] ()) keyword[and] identifier[PmagSiteRec] [ literal[string] ]!= literal[string] :
identifier[a95] = identifier[float] ( identifier[PmagSiteRec] [ literal[string] ])
keyword[else] :
identifier[a95] = literal[int]
identifier[sitedat] = identifier[pmag] . identifier[get_dictitem] ( identifier[SiteNFO] , literal[string] , identifier[PmagSiteRec] [ literal[string] ], literal[string] )[
literal[int] ]
identifier[lat] = identifier[float] ( identifier[sitedat] [ literal[string] ])
identifier[lon] = identifier[float] ( identifier[sitedat] [ literal[string] ])
identifier[plon] , identifier[plat] , identifier[dp] , identifier[dm] = identifier[pmag] . identifier[dia_vgp] (
identifier[dec] , identifier[inc] , identifier[a95] , identifier[lat] , identifier[lon] )
keyword[if] identifier[PmagSiteRec] [ literal[string] ]== literal[string] :
identifier[C] = literal[string]
keyword[if] identifier[PmagSiteRec] [ literal[string] ]== literal[string] :
identifier[C] = literal[string]
keyword[if] identifier[PmagSiteRec] [ literal[string] ]== literal[string] :
identifier[C] = literal[string]
identifier[PmagResRec] [ literal[string] ]= literal[string] + identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= literal[string] + identifier[str] ( identifier[coord] )+ literal[string] + identifier[comp]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= literal[string]
identifier[PmagResRec] [ literal[string] ]= literal[string]
identifier[PmagResRec] [ literal[string] ]= identifier[user]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
keyword[if] identifier[avg_directions_by_sample] :
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
keyword[else] :
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= literal[string] %( identifier[lat] )
identifier[PmagResRec] [ literal[string] ]= literal[string] %( identifier[lon] )
keyword[if] identifier[agefile] != literal[string] :
identifier[PmagResRec] = identifier[pmag] . identifier[get_age] (
identifier[PmagResRec] , literal[string] , literal[string] , identifier[AgeNFO] , identifier[DefaultAge] )
identifier[site_height] = identifier[pmag] . identifier[get_dictitem] (
identifier[height_nfo] , literal[string] , identifier[site] , literal[string] )
keyword[if] identifier[len] ( identifier[site_height] )> literal[int] :
identifier[PmagResRec] [ literal[string] ]= identifier[site_height] [ literal[int] ][ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= literal[string] %( identifier[plat] )
identifier[PmagResRec] [ literal[string] ]= literal[string] %( identifier[plon] )
identifier[PmagResRec] [ literal[string] ]= literal[string] %( identifier[dp] )
identifier[PmagResRec] [ literal[string] ]= literal[string] %( identifier[dm] )
identifier[PmagResRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[PmagSiteRec] [ literal[string] ] keyword[and] literal[string] keyword[not] keyword[in] identifier[PmagSiteRec] [ literal[string] ]:
identifier[PmagSiteRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]+ literal[string]
keyword[if] literal[string] keyword[in] identifier[PmagSiteRec] [ literal[string] ] keyword[and] literal[string] keyword[not] keyword[in] identifier[PmagSiteRec] [ literal[string] ]:
identifier[PmagSiteRec] [ literal[string] ]= identifier[PmagSiteRec] [ literal[string] ]+ literal[string]
identifier[PmagSiteRec] [ literal[string] ]= literal[string]
keyword[if] identifier[avg_by_polarity] :
identifier[angle] = identifier[pmag] . identifier[angle] ([ literal[int] , literal[int] ],[ literal[int] ,( literal[int] - identifier[plat] )])
keyword[if] identifier[angle] <= literal[int] :
identifier[PmagSiteRec] [ literal[string] ]= literal[string]
keyword[if] identifier[angle] > literal[int] keyword[and] identifier[angle] < literal[int] :
identifier[PmagSiteRec] [ literal[string] ]= literal[string]
keyword[if] identifier[angle] >= literal[int] :
identifier[PmagSiteRec] [ literal[string] ]= literal[string]
identifier[PmagResults] . identifier[append] ( identifier[PmagResRec] )
keyword[if] identifier[avg_by_polarity] :
identifier[crecs] = identifier[pmag] . identifier[get_dictitem] (
identifier[PmagSites] , literal[string] , literal[string] , literal[string] )
keyword[if] identifier[len] ( identifier[crecs] )< literal[int] :
identifier[crecs] = identifier[pmag] . identifier[get_dictitem] (
identifier[PmagSites] , literal[string] , literal[string] , literal[string] )
keyword[if] identifier[len] ( identifier[crecs] )> literal[int] :
identifier[comp] = identifier[pmag] . identifier[get_list] ( identifier[crecs] , literal[string] ). identifier[split] ( literal[string] )[
literal[int] ]
identifier[crecs] = identifier[pmag] . identifier[get_dictitem] ( identifier[crecs] , literal[string] , identifier[comp] , literal[string] )
identifier[precs] =[]
keyword[for] identifier[rec] keyword[in] identifier[crecs] :
identifier[precs] . identifier[append] ({ literal[string] : identifier[rec] [ literal[string] ], literal[string] : identifier[rec] [ literal[string] ],
literal[string] : identifier[rec] [ literal[string] ], literal[string] : identifier[rec] [ literal[string] ]})
identifier[polpars] = identifier[pmag] . identifier[fisher_by_pol] ( identifier[precs] )
keyword[for] identifier[mode] keyword[in] identifier[list] ( identifier[polpars] . identifier[keys] ()):
identifier[PolRes] ={}
identifier[PolRes] [ literal[string] ]= literal[string]
identifier[PolRes] [ literal[string] ]= literal[string] + identifier[mode]
identifier[PolRes] [ literal[string] ]= literal[string]
identifier[PolRes] [ literal[string] ]= literal[string] %( identifier[polpars] [ identifier[mode] ][ literal[string] ])
identifier[PolRes] [ literal[string] ]= literal[string] %( identifier[polpars] [ identifier[mode] ][ literal[string] ])
identifier[PolRes] [ literal[string] ]= literal[string] %( identifier[polpars] [ identifier[mode] ][ literal[string] ])
identifier[PolRes] [ literal[string] ]= literal[string] %( identifier[polpars] [ identifier[mode] ][ literal[string] ])
identifier[PolRes] [ literal[string] ]= literal[string] %( identifier[polpars] [ identifier[mode] ][ literal[string] ])
identifier[PolRes] [ literal[string] ]= literal[string] %(
identifier[polpars] [ identifier[mode] ][ literal[string] ])
identifier[PolRes] [ literal[string] ]= identifier[polpars] [ identifier[mode] ][ literal[string] ]
identifier[PolRes] [ literal[string] ]= identifier[polpars] [ identifier[mode] ][ literal[string] ]
identifier[PolRes] [ literal[string] ]= identifier[version_num]
identifier[PmagResults] . identifier[append] ( identifier[PolRes] )
keyword[if] keyword[not] identifier[skip_intensities] keyword[and] identifier[nositeints] != literal[int] :
keyword[for] identifier[site] keyword[in] identifier[sites] :
keyword[if] identifier[plotsites] :
identifier[print] ( identifier[site] )
keyword[if] keyword[not] identifier[avg_intensities_by_sample] :
identifier[key] , identifier[intlist] = literal[string] , identifier[SpecInts]
keyword[if] identifier[avg_intensities_by_sample] :
identifier[key] , identifier[intlist] = literal[string] , identifier[PmagSamps]
identifier[Ints] = identifier[pmag] . identifier[get_dictitem] ( identifier[intlist] , literal[string] , identifier[site] , literal[string] )
keyword[if] identifier[len] ( identifier[Ints] )> literal[int] :
identifier[PmagSiteRec] = identifier[pmag] . identifier[average_int] ( identifier[Ints] , identifier[key] , literal[string] )
identifier[PmagResRec] = identifier[pmag] . identifier[average_int] ( identifier[Ints] , identifier[key] , literal[string] )
keyword[if] identifier[plotsites] :
keyword[for] identifier[rec] keyword[in] identifier[Ints] :
identifier[print] ( identifier[rec] [ literal[string] + identifier[key] + literal[string] ], literal[string] %
( literal[int] * identifier[float] ( identifier[rec] [ identifier[key] + literal[string] ])))
keyword[if] identifier[len] ( identifier[Ints] )> literal[int] :
identifier[print] ( literal[string] , literal[string] %(
literal[int] * identifier[float] ( identifier[PmagResRec] [ literal[string] ])), literal[string] , identifier[len] ( identifier[Ints] ))
identifier[print] ( literal[string] , literal[string] %(
literal[int] * identifier[float] ( identifier[PmagResRec] [ literal[string] ])), literal[string] , identifier[PmagResRec] [ literal[string] ])
identifier[input] ( literal[string] )
identifier[er_location_name] = identifier[Ints] [ literal[int] ][ literal[string] ]
identifier[PmagSiteRec] [ literal[string] ]= identifier[er_location_name]
identifier[PmagSiteRec] [ literal[string] ]= literal[string]
identifier[PmagResRec] [ literal[string] ]= identifier[er_location_name]
identifier[PmagResRec] [ literal[string] ]= literal[string]
identifier[PmagSiteRec] [ literal[string] ]= identifier[user]
identifier[PmagResRec] [ literal[string] ]= identifier[user]
identifier[PmagResRec] [ literal[string] ]= literal[string]
keyword[if] keyword[not] identifier[avg_intensities_by_sample] :
identifier[PmagSiteRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[Ints] , literal[string] )
identifier[PmagResRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[Ints] , literal[string] )
identifier[PmagSiteRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[Ints] , literal[string] )
identifier[PmagResRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[Ints] , literal[string] )
identifier[PmagSiteRec] [ literal[string] ]= identifier[site]
identifier[PmagResRec] [ literal[string] ]= identifier[site]
identifier[PmagSiteRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[Ints] , literal[string] )
identifier[PmagResRec] [ literal[string] ]= identifier[pmag] . identifier[get_list] (
identifier[Ints] , literal[string] )
identifier[kill] = identifier[pmag] . identifier[grade] ( identifier[PmagSiteRec] , identifier[accept] , literal[string] )
keyword[if] identifier[nocrit] == literal[int] keyword[or] identifier[len] ( identifier[kill] )== literal[int] :
identifier[b] , identifier[sig] = identifier[float] ( identifier[PmagResRec] [ literal[string] ]), literal[string]
keyword[if] ( identifier[PmagResRec] [ literal[string] ])!= literal[string] :
identifier[sig] = identifier[float] ( identifier[PmagResRec] [ literal[string] ])
identifier[sdir] = identifier[pmag] . identifier[get_dictitem] (
identifier[PmagResults] , literal[string] , identifier[site] , literal[string] )
keyword[if] identifier[len] ( identifier[sdir] )> literal[int] keyword[and] identifier[sdir] [- literal[int] ][ literal[string] ]!= literal[string] :
identifier[inc] = identifier[float] ( identifier[sdir] [ literal[int] ][ literal[string] ])
identifier[mlat] = identifier[pmag] . identifier[magnetic_lat] ( identifier[inc] )
identifier[PmagResRec] [ literal[string] ]= literal[string] %( identifier[pmag] . identifier[b_vdm] ( identifier[b] , identifier[mlat] ))
identifier[PmagResRec] [ literal[string] ]= identifier[PmagResRec] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[list] ( identifier[PmagResRec] . identifier[keys] ()) keyword[and] identifier[PmagResRec] [ literal[string] ]!= literal[string] :
identifier[vdm_sig] = identifier[pmag] . identifier[b_vdm] (
identifier[float] ( identifier[PmagResRec] [ literal[string] ]), identifier[mlat] )
identifier[PmagResRec] [ literal[string] ]= literal[string] %( identifier[vdm_sig] )
keyword[else] :
identifier[PmagResRec] [ literal[string] ]= literal[string]
identifier[mlat] = literal[string]
keyword[if] identifier[get_model_lat] == literal[int] :
identifier[mlats] = identifier[pmag] . identifier[get_dictitem] (
identifier[SiteNFO] , literal[string] , identifier[site] , literal[string] )
keyword[if] identifier[len] ( identifier[mlats] )> literal[int] :
identifier[mlat] = identifier[mlats] [ literal[int] ][ literal[string] ]
keyword[elif] identifier[get_model_lat] == literal[int] :
identifier[mlats] = identifier[pmag] . identifier[get_dictitem] (
identifier[ModelLats] , literal[string] , identifier[site] , literal[string] )
keyword[if] identifier[len] ( identifier[mlats] )> literal[int] :
identifier[PmagResRec] [ literal[string] ]= identifier[mlats] [ literal[int] ][ literal[string] ]
identifier[mlat] = identifier[PmagResRec] [ literal[string] ]
keyword[if] identifier[mlat] != literal[string] :
identifier[PmagResRec] [ literal[string] ]= literal[string] %(
identifier[pmag] . identifier[b_vdm] ( identifier[b] , identifier[float] ( identifier[mlat] )))
keyword[if] identifier[sig] != literal[string] :
identifier[vdm_sig] = identifier[pmag] . identifier[b_vdm] (
identifier[float] ( identifier[PmagResRec] [ literal[string] ]), identifier[float] ( identifier[mlat] ))
identifier[PmagResRec] [ literal[string] ]= literal[string] %( identifier[vdm_sig] )
identifier[PmagResRec] [ literal[string] ]= identifier[PmagResRec] [ literal[string] ]
keyword[else] :
identifier[PmagResRec] [ literal[string] ]= literal[string]
identifier[sitedat] = identifier[pmag] . identifier[get_dictitem] (
identifier[SiteNFO] , literal[string] , identifier[PmagSiteRec] [ literal[string] ], literal[string] )
keyword[if] identifier[len] ( identifier[sitedat] )> literal[int] :
identifier[sitedat] = identifier[sitedat] [ literal[int] ]
identifier[PmagResRec] [ literal[string] ]= identifier[sitedat] [ literal[string] ]
identifier[PmagResRec] [ literal[string] ]= identifier[sitedat] [ literal[string] ]
keyword[else] :
identifier[PmagResRec] [ literal[string] ]= literal[string]
identifier[PmagResRec] [ literal[string] ]= literal[string]
identifier[PmagResRec] [ literal[string] ]= identifier[version_num]
identifier[PmagResRec] [ literal[string] ]= literal[string] + identifier[site]
identifier[PmagResRec] [ literal[string] ]= literal[string]
identifier[PmagResRec] [ literal[string] ]= literal[string]
keyword[if] identifier[agefile] != literal[string] :
identifier[PmagResRec] = identifier[pmag] . identifier[get_age] (
identifier[PmagResRec] , literal[string] , literal[string] , identifier[AgeNFO] , identifier[DefaultAge] )
identifier[site_height] = identifier[pmag] . identifier[get_dictitem] (
identifier[height_nfo] , literal[string] , identifier[site] , literal[string] )
keyword[if] identifier[len] ( identifier[site_height] )> literal[int] :
identifier[PmagResRec] [ literal[string] ]= identifier[site_height] [ literal[int] ][ literal[string] ]
identifier[PmagSites] . identifier[append] ( identifier[PmagSiteRec] )
identifier[PmagResults] . identifier[append] ( identifier[PmagResRec] )
keyword[if] identifier[len] ( identifier[PmagSites] )> literal[int] :
identifier[Tmp] , identifier[keylist] = identifier[pmag] . identifier[fillkeys] ( identifier[PmagSites] )
identifier[pmag] . identifier[magic_write] ( identifier[siteout] , identifier[Tmp] , literal[string] )
identifier[print] ( literal[string] , identifier[siteout] )
keyword[else] :
identifier[print] ( literal[string] )
keyword[if] identifier[len] ( identifier[PmagResults] )> literal[int] :
identifier[TmpRes] , identifier[keylist] = identifier[pmag] . identifier[fillkeys] ( identifier[PmagResults] )
identifier[pmag] . identifier[magic_write] ( identifier[resout] , identifier[TmpRes] , literal[string] )
identifier[print] ( literal[string] , identifier[resout] )
keyword[else] :
identifier[print] ( literal[string] ) | def specimens_results_magic(infile='pmag_specimens.txt', measfile='magic_measurements.txt', sampfile='er_samples.txt', sitefile='er_sites.txt', agefile='er_ages.txt', specout='er_specimens.txt', sampout='pmag_samples.txt', siteout='pmag_sites.txt', resout='pmag_results.txt', critout='pmag_criteria.txt', instout='magic_instruments.txt', plotsites=False, fmt='svg', dir_path='.', cors=[], priorities=['DA-AC-ARM', 'DA-AC-TRM'], coord='g', user='', vgps_level='site', do_site_intensity=True, DefaultAge=['none'], avg_directions_by_sample=False, avg_intensities_by_sample=False, avg_all_components=False, avg_by_polarity=False, skip_directions=False, skip_intensities=False, use_sample_latitude=False, use_paleolatitude=False, use_criteria='default'):
"""
Writes magic_instruments, er_specimens, pmag_samples, pmag_sites, pmag_criteria, and pmag_results. The data used to write this is obtained by reading a pmag_speciemns, a magic_measurements, a er_samples, a er_sites, a er_ages.
@param -> infile: path from the WD to the pmag speciemns table
@param -> measfile: path from the WD to the magic measurement file
@param -> sampfile: path from the WD to the er sample file
@param -> sitefile: path from the WD to the er sites data file
@param -> agefile: path from the WD to the er ages data file
@param -> specout: path from the WD to the place to write the er specimens data file
@param -> sampout: path from the WD to the place to write the pmag samples data file
@param -> siteout: path from the WD to the place to write the pmag sites data file
@param -> resout: path from the WD to the place to write the pmag results data file
@param -> critout: path from the WD to the place to write the pmag criteria file
@param -> instout: path from th WD to the place to write the magic instruments file
@param -> documentation incomplete if you know more about the purpose of the parameters in this function and it's side effects please extend and complete this string
"""
# initialize some variables
plotsites = False # cannot use draw_figs from within ipmag
Comps = [] # list of components
version_num = pmag.get_version()
args = sys.argv
model_lat_file = ''
(Dcrit, Icrit, nocrit) = (0, 0, 0)
corrections = []
nocorrection = ['DA-NL', 'DA-AC', 'DA-CR']
# do some data adjustments
for cor in cors:
nocorrection.remove('DA-' + cor)
corrections.append('DA-' + cor) # depends on [control=['for'], data=['cor']]
for p in priorities:
if not p.startswith('DA-AC-'):
p = 'DA-AC-' + p # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']]
# translate coord into coords
if coord == 's':
coords = ['-1'] # depends on [control=['if'], data=[]]
if coord == 'g':
coords = ['0'] # depends on [control=['if'], data=[]]
if coord == 't':
coords = ['100'] # depends on [control=['if'], data=[]]
if coord == 'b':
coords = ['0', '100'] # depends on [control=['if'], data=[]]
if vgps_level == 'sample':
vgps = 1 # save sample level VGPS/VADMs # depends on [control=['if'], data=[]]
else:
vgps = 0 # site level
if do_site_intensity:
nositeints = 0 # depends on [control=['if'], data=[]]
else:
nositeints = 1
# chagne these all to True/False instead of 1/0
if not skip_intensities:
# set model lat and
if use_sample_latitude and use_paleolatitude:
print('you should set a paleolatitude file OR use present day lat - not both')
return False # depends on [control=['if'], data=[]]
elif use_sample_latitude:
get_model_lat = 1 # depends on [control=['if'], data=[]]
elif use_paleolatitude:
get_model_lat = 2
try:
model_lat_file = dir_path + '/' + args[ind + 1]
get_model_lat = 2
mlat = open(model_lat_file, 'r')
ModelLats = []
for line in mlat.readlines():
ModelLat = {}
tmp = line.split()
ModelLat['er_site_name'] = tmp[0]
ModelLat['site_model_lat'] = tmp[1]
ModelLat['er_sample_name'] = tmp[0]
ModelLat['sample_lat'] = tmp[1]
ModelLats.append(ModelLat) # depends on [control=['for'], data=['line']]
mlat.clos() # depends on [control=['try'], data=[]]
except:
print('use_paleolatitude option requires a valid paleolatitude file') # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]]
else:
get_model_lat = 0 # skips VADM calculation entirely # depends on [control=['if'], data=[]]
if plotsites and (not skip_directions): # plot by site - set up plot window
EQ = {}
EQ['eqarea'] = 1
# define figure 1 as equal area projection
pmagplotlib.plot_init(EQ['eqarea'], 5, 5)
# I don't know why this has to be here, but otherwise the first plot
# never plots...
pmagplotlib.plot_net(EQ['eqarea'])
pmagplotlib.draw_figs(EQ) # depends on [control=['if'], data=[]]
infile = os.path.join(dir_path, infile)
measfile = os.path.join(dir_path, measfile)
instout = os.path.join(dir_path, instout)
sampfile = os.path.join(dir_path, sampfile)
sitefile = os.path.join(dir_path, sitefile)
agefile = os.path.join(dir_path, agefile)
specout = os.path.join(dir_path, specout)
sampout = os.path.join(dir_path, sampout)
siteout = os.path.join(dir_path, siteout)
resout = os.path.join(dir_path, resout)
critout = os.path.join(dir_path, critout)
if use_criteria == 'none':
(Dcrit, Icrit, nocrit) = (1, 1, 1) # no selection criteria
crit_data = pmag.default_criteria(nocrit) # depends on [control=['if'], data=[]]
elif use_criteria == 'default':
crit_data = pmag.default_criteria(nocrit) # use default criteria # depends on [control=['if'], data=[]]
elif use_criteria == 'existing':
(crit_data, file_type) = pmag.magic_read(critout) # use pmag_criteria file
print('Acceptance criteria read in from ', critout) # depends on [control=['if'], data=[]]
accept = {}
for critrec in crit_data:
for key in list(critrec.keys()):
# need to migrate specimen_dang to specimen_int_dang for intensity
# data using old format
if 'IE-SPEC' in list(critrec.keys()) and 'specimen_dang' in list(critrec.keys()) and ('specimen_int_dang' not in list(critrec.keys())):
critrec['specimen_int_dang'] = critrec['specimen_dang']
del critrec['specimen_dang'] # depends on [control=['if'], data=[]]
# need to get rid of ron shaars sample_int_sigma_uT
if 'sample_int_sigma_uT' in list(critrec.keys()):
critrec['sample_int_sigma'] = '%10.3e' % (eval(critrec['sample_int_sigma_uT']) * 1e-06) # depends on [control=['if'], data=[]]
if key not in list(accept.keys()) and critrec[key] != '':
accept[key] = critrec[key] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['critrec']]
if use_criteria == 'default':
pmag.magic_write(critout, [accept], 'pmag_criteria')
print('\n Pmag Criteria stored in ', critout, '\n') # depends on [control=['if'], data=[]]
# now we're done slow dancing
# read in site data - has the lats and lons
(SiteNFO, file_type) = pmag.magic_read(sitefile)
# read in site data - has the lats and lons
(SampNFO, file_type) = pmag.magic_read(sampfile)
# find all the sites with height info.
height_nfo = pmag.get_dictitem(SiteNFO, 'site_height', '', 'F')
if agefile:
(AgeNFO, file_type) = pmag.magic_read(agefile) # read in the age information # depends on [control=['if'], data=[]]
# read in specimen interpretations
(Data, file_type) = pmag.magic_read(infile)
# retrieve specimens with intensity data
IntData = pmag.get_dictitem(Data, 'specimen_int', '', 'F')
(comment, orient) = ('', [])
(samples, sites) = ([], [])
for rec in Data: # run through the data filling in missing keys and finding all components, coordinates available
# fill in missing fields, collect unique sample and site names
if 'er_sample_name' not in list(rec.keys()):
rec['er_sample_name'] = '' # depends on [control=['if'], data=[]]
elif rec['er_sample_name'] not in samples:
samples.append(rec['er_sample_name']) # depends on [control=['if'], data=['samples']]
if 'er_site_name' not in list(rec.keys()):
rec['er_site_name'] = '' # depends on [control=['if'], data=[]]
elif rec['er_site_name'] not in sites:
sites.append(rec['er_site_name']) # depends on [control=['if'], data=['sites']]
if 'specimen_int' not in list(rec.keys()):
rec['specimen_int'] = '' # depends on [control=['if'], data=[]]
if 'specimen_comp_name' not in list(rec.keys()) or rec['specimen_comp_name'] == '':
rec['specimen_comp_name'] = 'A' # depends on [control=['if'], data=[]]
if rec['specimen_comp_name'] not in Comps:
Comps.append(rec['specimen_comp_name']) # depends on [control=['if'], data=['Comps']]
rec['specimen_tilt_correction'] = rec['specimen_tilt_correction'].strip('\n')
if 'specimen_tilt_correction' not in list(rec.keys()):
rec['specimen_tilt_correction'] = '-1' # assume sample coordinates # depends on [control=['if'], data=[]]
if rec['specimen_tilt_correction'] not in orient:
# collect available coordinate systems
orient.append(rec['specimen_tilt_correction']) # depends on [control=['if'], data=['orient']]
if 'specimen_direction_type' not in list(rec.keys()):
# assume direction is line - not plane
rec['specimen_direction_type'] = 'l' # depends on [control=['if'], data=[]]
if 'specimen_dec' not in list(rec.keys()):
# if no declination, set direction type to blank
rec['specimen_direction_type'] = '' # depends on [control=['if'], data=[]]
if 'specimen_n' not in list(rec.keys()):
rec['specimen_n'] = '' # put in n # depends on [control=['if'], data=[]]
if 'specimen_alpha95' not in list(rec.keys()):
rec['specimen_alpha95'] = '' # put in alpha95 # depends on [control=['if'], data=[]]
if 'magic_method_codes' not in list(rec.keys()):
rec['magic_method_codes'] = '' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rec']]
# start parsing data into SpecDirs, SpecPlanes, SpecInts
(SpecInts, SpecDirs, SpecPlanes) = ([], [], [])
samples.sort() # get sorted list of samples and sites
sites.sort()
if not skip_intensities: # don't skip intensities
# retrieve specimens with intensity data
IntData = pmag.get_dictitem(Data, 'specimen_int', '', 'F')
if nocrit == 0: # use selection criteria
for rec in IntData: # do selection criteria
kill = pmag.grade(rec, accept, 'specimen_int')
if len(kill) == 0:
# intensity record to be included in sample, site
# calculations
SpecInts.append(rec) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rec']] # depends on [control=['if'], data=[]]
else:
SpecInts = IntData[:] # take everything - no selection criteria
# check for required data adjustments
if len(corrections) > 0 and len(SpecInts) > 0:
for cor in corrections:
# only take specimens with the required corrections
SpecInts = pmag.get_dictitem(SpecInts, 'magic_method_codes', cor, 'has') # depends on [control=['for'], data=['cor']] # depends on [control=['if'], data=[]]
if len(nocorrection) > 0 and len(SpecInts) > 0:
for cor in nocorrection:
# exclude the corrections not specified for inclusion
SpecInts = pmag.get_dictitem(SpecInts, 'magic_method_codes', cor, 'not') # depends on [control=['for'], data=['cor']] # depends on [control=['if'], data=[]]
# take top priority specimen of its name in remaining specimens (only one
# per customer)
PrioritySpecInts = []
specimens = pmag.get_specs(SpecInts) # get list of uniq specimen names
for spec in specimens:
# all the records for this specimen
ThisSpecRecs = pmag.get_dictitem(SpecInts, 'er_specimen_name', spec, 'T')
if len(ThisSpecRecs) == 1:
PrioritySpecInts.append(ThisSpecRecs[0]) # depends on [control=['if'], data=[]]
elif len(ThisSpecRecs) > 1: # more than one
prec = []
for p in priorities:
# all the records for this specimen
ThisSpecRecs = pmag.get_dictitem(SpecInts, 'magic_method_codes', p, 'has')
if len(ThisSpecRecs) > 0:
prec.append(ThisSpecRecs[0]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['p']]
PrioritySpecInts.append(prec[0]) # take the best one # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['spec']]
SpecInts = PrioritySpecInts # this has the first specimen record # depends on [control=['if'], data=[]]
if not skip_directions: # don't skip directions
# retrieve specimens with directed lines and planes
AllDirs = pmag.get_dictitem(Data, 'specimen_direction_type', '', 'F')
# get all specimens with specimen_n information
Ns = pmag.get_dictitem(AllDirs, 'specimen_n', '', 'F')
if nocrit != 1: # use selection criteria
for rec in Ns: # look through everything with specimen_n for "good" data
kill = pmag.grade(rec, accept, 'specimen_dir')
if len(kill) == 0: # nothing killed it
SpecDirs.append(rec) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rec']] # depends on [control=['if'], data=[]]
else: # no criteria
SpecDirs = AllDirs[:] # take them all # depends on [control=['if'], data=[]]
# SpecDirs is now the list of all specimen directions (lines and planes)
# that pass muster
# list of all sample data and list of those that pass the DE-SAMP criteria
(PmagSamps, SampDirs) = ([], [])
(PmagSites, PmagResults) = ([], []) # list of all site data and selected results
SampInts = []
for samp in samples: # run through the sample names
if avg_directions_by_sample: # average by sample if desired
# get all the directional data for this sample
SampDir = pmag.get_dictitem(SpecDirs, 'er_sample_name', samp, 'T')
if len(SampDir) > 0: # there are some directions
for coord in coords: # step through desired coordinate systems
# get all the directions for this sample
CoordDir = pmag.get_dictitem(SampDir, 'specimen_tilt_correction', coord, 'T')
if len(CoordDir) > 0: # there are some with this coordinate system
if not avg_all_components: # look component by component
for comp in Comps:
# get all directions from this component
CompDir = pmag.get_dictitem(CoordDir, 'specimen_comp_name', comp, 'T')
if len(CompDir) > 0: # there are some
# get a sample average from all specimens
PmagSampRec = pmag.lnpbykey(CompDir, 'sample', 'specimen')
# decorate the sample record
PmagSampRec['er_location_name'] = CompDir[0]['er_location_name']
PmagSampRec['er_site_name'] = CompDir[0]['er_site_name']
PmagSampRec['er_sample_name'] = samp
PmagSampRec['er_citation_names'] = 'This study'
PmagSampRec['er_analyst_mail_names'] = user
PmagSampRec['magic_software_packages'] = version_num
if CompDir[0]['specimen_flag'] == 'g':
PmagSampRec['sample_flag'] = 'g' # depends on [control=['if'], data=[]]
else:
PmagSampRec['sample_flag'] = 'b'
if nocrit != 1:
PmagSampRec['pmag_criteria_codes'] = 'ACCEPT' # depends on [control=['if'], data=[]]
if agefile != '':
PmagSampRec = pmag.get_age(PmagSampRec, 'er_site_name', 'sample_inferred_', AgeNFO, DefaultAge) # depends on [control=['if'], data=[]]
site_height = pmag.get_dictitem(height_nfo, 'er_site_name', PmagSampRec['er_site_name'], 'T')
if len(site_height) > 0:
# add in height if available
PmagSampRec['sample_height'] = site_height[0]['site_height'] # depends on [control=['if'], data=[]]
PmagSampRec['sample_comp_name'] = comp
PmagSampRec['sample_tilt_correction'] = coord
PmagSampRec['er_specimen_names'] = pmag.get_list(CompDir, 'er_specimen_name') # get a list of the specimen names used
PmagSampRec['magic_method_codes'] = pmag.get_list(CompDir, 'magic_method_codes') # get a list of the methods used
if nocrit != 1: # apply selection criteria
kill = pmag.grade(PmagSampRec, accept, 'sample_dir') # depends on [control=['if'], data=[]]
else:
kill = []
if len(kill) == 0:
SampDirs.append(PmagSampRec)
if vgps == 1: # if sample level VGP info desired, do that now
PmagResRec = pmag.getsampVGP(PmagSampRec, SiteNFO)
if PmagResRec != '':
PmagResults.append(PmagResRec) # depends on [control=['if'], data=['PmagResRec']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# print(PmagSampRec)
PmagSamps.append(PmagSampRec) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['comp']] # depends on [control=['if'], data=[]]
if avg_all_components: # average all components together basically same as above
PmagSampRec = pmag.lnpbykey(CoordDir, 'sample', 'specimen')
PmagSampRec['er_location_name'] = CoordDir[0]['er_location_name']
PmagSampRec['er_site_name'] = CoordDir[0]['er_site_name']
PmagSampRec['er_sample_name'] = samp
PmagSampRec['er_citation_names'] = 'This study'
PmagSampRec['er_analyst_mail_names'] = user
PmagSampRec['magic_software_packages'] = version_num
if all((i['specimen_flag'] == 'g' for i in CoordDir)):
PmagSampRec['sample_flag'] = 'g' # depends on [control=['if'], data=[]]
else:
PmagSampRec['sample_flag'] = 'b'
if nocrit != 1:
PmagSampRec['pmag_criteria_codes'] = '' # depends on [control=['if'], data=[]]
if agefile != '':
PmagSampRec = pmag.get_age(PmagSampRec, 'er_site_name', 'sample_inferred_', AgeNFO, DefaultAge) # depends on [control=['if'], data=[]]
site_height = pmag.get_dictitem(height_nfo, 'er_site_name', site, 'T')
if len(site_height) > 0:
# add in height if available
PmagSampRec['sample_height'] = site_height[0]['site_height'] # depends on [control=['if'], data=[]]
PmagSampRec['sample_tilt_correction'] = coord
PmagSampRec['sample_comp_name'] = pmag.get_list(CoordDir, 'specimen_comp_name') # get components used
PmagSampRec['er_specimen_names'] = pmag.get_list(CoordDir, 'er_specimen_name') # get specimne names averaged
PmagSampRec['magic_method_codes'] = pmag.get_list(CoordDir, 'magic_method_codes') # assemble method codes
if nocrit != 1: # apply selection criteria
kill = pmag.grade(PmagSampRec, accept, 'sample_dir')
if len(kill) == 0: # passes the mustard
SampDirs.append(PmagSampRec)
if vgps == 1:
PmagResRec = pmag.getsampVGP(PmagSampRec, SiteNFO)
if PmagResRec != '':
PmagResults.append(PmagResRec) # depends on [control=['if'], data=['PmagResRec']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else: # take everything
SampDirs.append(PmagSampRec)
if vgps == 1:
PmagResRec = pmag.getsampVGP(PmagSampRec, SiteNFO)
if PmagResRec != '':
PmagResults.append(PmagResRec) # depends on [control=['if'], data=['PmagResRec']] # depends on [control=['if'], data=[]]
PmagSamps.append(PmagSampRec) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['coord']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if avg_intensities_by_sample: # average by sample if desired
# get all the intensity data for this sample
SampI = pmag.get_dictitem(SpecInts, 'er_sample_name', samp, 'T')
if len(SampI) > 0: # there are some
# get average intensity stuff
PmagSampRec = pmag.average_int(SampI, 'specimen', 'sample')
# decorate sample record
PmagSampRec['sample_description'] = 'sample intensity'
PmagSampRec['sample_direction_type'] = ''
PmagSampRec['er_site_name'] = SampI[0]['er_site_name']
PmagSampRec['er_sample_name'] = samp
PmagSampRec['er_location_name'] = SampI[0]['er_location_name']
PmagSampRec['er_citation_names'] = 'This study'
PmagSampRec['er_analyst_mail_names'] = user
if agefile != '':
PmagSampRec = pmag.get_age(PmagSampRec, 'er_site_name', 'sample_inferred_', AgeNFO, DefaultAge) # depends on [control=['if'], data=[]]
site_height = pmag.get_dictitem(height_nfo, 'er_site_name', PmagSampRec['er_site_name'], 'T')
if len(site_height) > 0:
# add in height if available
PmagSampRec['sample_height'] = site_height[0]['site_height'] # depends on [control=['if'], data=[]]
PmagSampRec['er_specimen_names'] = pmag.get_list(SampI, 'er_specimen_name')
PmagSampRec['magic_method_codes'] = pmag.get_list(SampI, 'magic_method_codes')
if nocrit != 1: # apply criteria!
kill = pmag.grade(PmagSampRec, accept, 'sample_int')
if len(kill) == 0:
PmagSampRec['pmag_criteria_codes'] = 'ACCEPT'
SampInts.append(PmagSampRec)
PmagSamps.append(PmagSampRec) # depends on [control=['if'], data=[]]
else:
PmagSampRec = {} # sample rejected # depends on [control=['if'], data=[]]
else: # no criteria
SampInts.append(PmagSampRec)
PmagSamps.append(PmagSampRec)
PmagSampRec['pmag_criteria_codes'] = ''
if vgps == 1 and get_model_lat != 0 and (PmagSampRec != {}):
if get_model_lat == 1: # use sample latitude
PmagResRec = pmag.getsampVDM(PmagSampRec, SampNFO)
# get rid of the model lat key
del PmagResRec['model_lat'] # depends on [control=['if'], data=[]]
elif get_model_lat == 2: # use model latitude
PmagResRec = pmag.getsampVDM(PmagSampRec, ModelLats)
if PmagResRec != {}:
PmagResRec['magic_method_codes'] = PmagResRec['magic_method_codes'] + ':IE-MLAT' # depends on [control=['if'], data=['PmagResRec']] # depends on [control=['if'], data=[]]
if PmagResRec != {}:
PmagResRec['er_specimen_names'] = PmagSampRec['er_specimen_names']
PmagResRec['er_sample_names'] = PmagSampRec['er_sample_name']
PmagResRec['pmag_criteria_codes'] = 'ACCEPT'
PmagResRec['average_int_sigma_perc'] = PmagSampRec['sample_int_sigma_perc']
PmagResRec['average_int_sigma'] = PmagSampRec['sample_int_sigma']
PmagResRec['average_int_n'] = PmagSampRec['sample_int_n']
PmagResRec['vadm_n'] = PmagSampRec['sample_int_n']
PmagResRec['data_type'] = 'i'
PmagResults.append(PmagResRec) # depends on [control=['if'], data=['PmagResRec']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['samp']]
if len(PmagSamps) > 0:
# fill in missing keys from different types of records
(TmpSamps, keylist) = pmag.fillkeys(PmagSamps)
# save in sample output file
pmag.magic_write(sampout, TmpSamps, 'pmag_samples')
print(' sample averages written to ', sampout) # depends on [control=['if'], data=[]]
#
# create site averages from specimens or samples as specified
#
for site in sites:
for coord in coords:
if not avg_directions_by_sample:
(key, dirlist) = ('specimen', SpecDirs) # if specimen averages at site level desired # depends on [control=['if'], data=[]]
if avg_directions_by_sample:
(key, dirlist) = ('sample', SampDirs) # if sample averages at site level desired # depends on [control=['if'], data=[]]
# get all the sites with directions
tmp = pmag.get_dictitem(dirlist, 'er_site_name', site, 'T')
# use only the last coordinate if avg_all_components==False
tmp1 = pmag.get_dictitem(tmp, key + '_tilt_correction', coord, 'T')
# fish out site information (lat/lon, etc.)
sd = pmag.get_dictitem(SiteNFO, 'er_site_name', site, 'T')
if len(sd) > 0:
sitedat = sd[0]
if not avg_all_components: # do component wise averaging
for comp in Comps:
# get all components comp
siteD = pmag.get_dictitem(tmp1, key + '_comp_name', comp, 'T')
# remove bad data from means
quality_siteD = []
# remove any records for which specimen_flag or sample_flag are 'b'
# assume 'g' if flag is not provided
for rec in siteD:
spec_quality = rec.get('specimen_flag', 'g')
samp_quality = rec.get('sample_flag', 'g')
if spec_quality == 'g' and samp_quality == 'g':
quality_siteD.append(rec) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rec']]
siteD = quality_siteD
if len(siteD) > 0: # there are some for this site and component name
# get an average for this site
PmagSiteRec = pmag.lnpbykey(siteD, 'site', key)
# decorate the site record
PmagSiteRec['site_comp_name'] = comp
PmagSiteRec['er_location_name'] = siteD[0]['er_location_name']
PmagSiteRec['er_site_name'] = siteD[0]['er_site_name']
PmagSiteRec['site_tilt_correction'] = coord
PmagSiteRec['site_comp_name'] = pmag.get_list(siteD, key + '_comp_name')
if avg_directions_by_sample:
PmagSiteRec['er_sample_names'] = pmag.get_list(siteD, 'er_sample_name') # depends on [control=['if'], data=[]]
else:
PmagSiteRec['er_specimen_names'] = pmag.get_list(siteD, 'er_specimen_name')
# determine the demagnetization code (DC3,4 or 5) for this site
AFnum = len(pmag.get_dictitem(siteD, 'magic_method_codes', 'LP-DIR-AF', 'has'))
Tnum = len(pmag.get_dictitem(siteD, 'magic_method_codes', 'LP-DIR-T', 'has'))
DC = 3
if AFnum > 0:
DC += 1 # depends on [control=['if'], data=[]]
if Tnum > 0:
DC += 1 # depends on [control=['if'], data=[]]
PmagSiteRec['magic_method_codes'] = pmag.get_list(siteD, 'magic_method_codes') + ':' + 'LP-DC' + str(DC)
PmagSiteRec['magic_method_codes'].strip(':')
if plotsites:
print(PmagSiteRec['er_site_name'])
# plot and list the data
pmagplotlib.plot_site(EQ['eqarea'], PmagSiteRec, siteD, key)
pmagplotlib.draw_figs(EQ) # depends on [control=['if'], data=[]]
PmagSites.append(PmagSiteRec) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['comp']] # depends on [control=['if'], data=[]]
else: # last component only
# get the last orientation system specified
siteD = tmp1[:]
if len(siteD) > 0: # there are some
# get the average for this site
PmagSiteRec = pmag.lnpbykey(siteD, 'site', key)
# decorate the record
PmagSiteRec['er_location_name'] = siteD[0]['er_location_name']
PmagSiteRec['er_site_name'] = siteD[0]['er_site_name']
PmagSiteRec['site_comp_name'] = comp
PmagSiteRec['site_tilt_correction'] = coord
PmagSiteRec['site_comp_name'] = pmag.get_list(siteD, key + '_comp_name')
PmagSiteRec['er_specimen_names'] = pmag.get_list(siteD, 'er_specimen_name')
PmagSiteRec['er_sample_names'] = pmag.get_list(siteD, 'er_sample_name')
AFnum = len(pmag.get_dictitem(siteD, 'magic_method_codes', 'LP-DIR-AF', 'has'))
Tnum = len(pmag.get_dictitem(siteD, 'magic_method_codes', 'LP-DIR-T', 'has'))
DC = 3
if AFnum > 0:
DC += 1 # depends on [control=['if'], data=[]]
if Tnum > 0:
DC += 1 # depends on [control=['if'], data=[]]
PmagSiteRec['magic_method_codes'] = pmag.get_list(siteD, 'magic_method_codes') + ':' + 'LP-DC' + str(DC)
PmagSiteRec['magic_method_codes'].strip(':')
if not avg_directions_by_sample:
PmagSiteRec['site_comp_name'] = pmag.get_list(siteD, key + '_comp_name') # depends on [control=['if'], data=[]]
if plotsites:
pmagplotlib.plot_site(EQ['eqarea'], PmagSiteRec, siteD, key)
pmagplotlib.draw_figs(EQ) # depends on [control=['if'], data=[]]
PmagSites.append(PmagSiteRec) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
else:
print('site information not found in er_sites for site, ', site, ' site will be skipped') # depends on [control=['for'], data=['coord']] # depends on [control=['for'], data=['site']]
for PmagSiteRec in PmagSites: # now decorate each dictionary some more, and calculate VGPs etc. for results table
PmagSiteRec['er_citation_names'] = 'This study'
PmagSiteRec['er_analyst_mail_names'] = user
PmagSiteRec['magic_software_packages'] = version_num
if agefile != '':
PmagSiteRec = pmag.get_age(PmagSiteRec, 'er_site_name', 'site_inferred_', AgeNFO, DefaultAge) # depends on [control=['if'], data=[]]
PmagSiteRec['pmag_criteria_codes'] = 'ACCEPT'
if 'site_n_lines' in list(PmagSiteRec.keys()) and 'site_n_planes' in list(PmagSiteRec.keys()) and (PmagSiteRec['site_n_lines'] != '') and (PmagSiteRec['site_n_planes'] != ''):
if int(PmagSiteRec['site_n_planes']) > 0:
PmagSiteRec['magic_method_codes'] = PmagSiteRec['magic_method_codes'] + ':DE-FM-LP' # depends on [control=['if'], data=[]]
elif int(PmagSiteRec['site_n_lines']) > 2:
PmagSiteRec['magic_method_codes'] = PmagSiteRec['magic_method_codes'] + ':DE-FM' # depends on [control=['if'], data=[]]
kill = pmag.grade(PmagSiteRec, accept, 'site_dir')
if len(kill) == 0:
PmagResRec = {} # set up dictionary for the pmag_results table entry
PmagResRec['data_type'] = 'i' # decorate it a bit
PmagResRec['magic_software_packages'] = version_num
PmagSiteRec['site_description'] = 'Site direction included in results table'
PmagResRec['pmag_criteria_codes'] = 'ACCEPT'
dec = float(PmagSiteRec['site_dec'])
inc = float(PmagSiteRec['site_inc'])
if 'site_alpha95' in list(PmagSiteRec.keys()) and PmagSiteRec['site_alpha95'] != '':
a95 = float(PmagSiteRec['site_alpha95']) # depends on [control=['if'], data=[]]
else:
a95 = 180.0
sitedat = pmag.get_dictitem(SiteNFO, 'er_site_name', PmagSiteRec['er_site_name'], 'T')[0] # fish out site information (lat/lon, etc.)
lat = float(sitedat['site_lat'])
lon = float(sitedat['site_lon'])
(plon, plat, dp, dm) = pmag.dia_vgp(dec, inc, a95, lat, lon) # get the VGP for this site
if PmagSiteRec['site_tilt_correction'] == '-1':
C = ' (spec coord) ' # depends on [control=['if'], data=[]]
if PmagSiteRec['site_tilt_correction'] == '0':
C = ' (geog. coord) ' # depends on [control=['if'], data=[]]
if PmagSiteRec['site_tilt_correction'] == '100':
C = ' (strat. coord) ' # depends on [control=['if'], data=[]]
PmagResRec['pmag_result_name'] = 'VGP Site: ' + PmagSiteRec['er_site_name'] # decorate some more
PmagResRec['result_description'] = 'Site VGP, coord system = ' + str(coord) + ' component: ' + comp
PmagResRec['er_site_names'] = PmagSiteRec['er_site_name']
PmagResRec['pmag_criteria_codes'] = 'ACCEPT'
PmagResRec['er_citation_names'] = 'This study'
PmagResRec['er_analyst_mail_names'] = user
PmagResRec['er_location_names'] = PmagSiteRec['er_location_name']
if avg_directions_by_sample:
PmagResRec['er_sample_names'] = PmagSiteRec['er_sample_names'] # depends on [control=['if'], data=[]]
else:
PmagResRec['er_specimen_names'] = PmagSiteRec['er_specimen_names']
PmagResRec['tilt_correction'] = PmagSiteRec['site_tilt_correction']
PmagResRec['pole_comp_name'] = PmagSiteRec['site_comp_name']
PmagResRec['average_dec'] = PmagSiteRec['site_dec']
PmagResRec['average_inc'] = PmagSiteRec['site_inc']
PmagResRec['average_alpha95'] = PmagSiteRec['site_alpha95']
PmagResRec['average_n'] = PmagSiteRec['site_n']
PmagResRec['average_n_lines'] = PmagSiteRec['site_n_lines']
PmagResRec['average_n_planes'] = PmagSiteRec['site_n_planes']
PmagResRec['vgp_n'] = PmagSiteRec['site_n']
PmagResRec['average_k'] = PmagSiteRec['site_k']
PmagResRec['average_r'] = PmagSiteRec['site_r']
PmagResRec['average_lat'] = '%10.4f ' % lat
PmagResRec['average_lon'] = '%10.4f ' % lon
if agefile != '':
PmagResRec = pmag.get_age(PmagResRec, 'er_site_names', 'average_', AgeNFO, DefaultAge) # depends on [control=['if'], data=[]]
site_height = pmag.get_dictitem(height_nfo, 'er_site_name', site, 'T')
if len(site_height) > 0:
PmagResRec['average_height'] = site_height[0]['site_height'] # depends on [control=['if'], data=[]]
PmagResRec['vgp_lat'] = '%7.1f ' % plat
PmagResRec['vgp_lon'] = '%7.1f ' % plon
PmagResRec['vgp_dp'] = '%7.1f ' % dp
PmagResRec['vgp_dm'] = '%7.1f ' % dm
PmagResRec['magic_method_codes'] = PmagSiteRec['magic_method_codes']
if '0' in PmagSiteRec['site_tilt_correction'] and 'DA-DIR-GEO' not in PmagSiteRec['magic_method_codes']:
PmagSiteRec['magic_method_codes'] = PmagSiteRec['magic_method_codes'] + ':DA-DIR-GEO' # depends on [control=['if'], data=[]]
if '100' in PmagSiteRec['site_tilt_correction'] and 'DA-DIR-TILT' not in PmagSiteRec['magic_method_codes']:
PmagSiteRec['magic_method_codes'] = PmagSiteRec['magic_method_codes'] + ':DA-DIR-TILT' # depends on [control=['if'], data=[]]
PmagSiteRec['site_polarity'] = ''
if avg_by_polarity: # assign polarity based on angle of pole lat to spin axis - may want to re-think this sometime
angle = pmag.angle([0, 0], [0, 90 - plat])
if angle <= 55.0:
PmagSiteRec['site_polarity'] = 'n' # depends on [control=['if'], data=[]]
if angle > 55.0 and angle < 125.0:
PmagSiteRec['site_polarity'] = 't' # depends on [control=['if'], data=[]]
if angle >= 125.0:
PmagSiteRec['site_polarity'] = 'r' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
PmagResults.append(PmagResRec) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['PmagSiteRec']]
if avg_by_polarity:
# find the tilt corrected data
crecs = pmag.get_dictitem(PmagSites, 'site_tilt_correction', '100', 'T')
if len(crecs) < 2:
# if there aren't any, find the geographic corrected data
crecs = pmag.get_dictitem(PmagSites, 'site_tilt_correction', '0', 'T') # depends on [control=['if'], data=[]]
if len(crecs) > 2: # if there are some,
comp = pmag.get_list(crecs, 'site_comp_name').split(':')[0] # find the first component
# fish out all of the first component
crecs = pmag.get_dictitem(crecs, 'site_comp_name', comp, 'T')
precs = []
for rec in crecs:
precs.append({'dec': rec['site_dec'], 'inc': rec['site_inc'], 'name': rec['er_site_name'], 'loc': rec['er_location_name']}) # depends on [control=['for'], data=['rec']]
# calculate average by polarity
polpars = pmag.fisher_by_pol(precs)
# hunt through all the modes (normal=A, reverse=B, all=ALL)
for mode in list(polpars.keys()):
PolRes = {}
PolRes['er_citation_names'] = 'This study'
PolRes['pmag_result_name'] = 'Polarity Average: Polarity ' + mode
PolRes['data_type'] = 'a'
PolRes['average_dec'] = '%7.1f' % polpars[mode]['dec']
PolRes['average_inc'] = '%7.1f' % polpars[mode]['inc']
PolRes['average_n'] = '%i' % polpars[mode]['n']
PolRes['average_r'] = '%5.4f' % polpars[mode]['r']
PolRes['average_k'] = '%6.0f' % polpars[mode]['k']
PolRes['average_alpha95'] = '%7.1f' % polpars[mode]['alpha95']
PolRes['er_site_names'] = polpars[mode]['sites']
PolRes['er_location_names'] = polpars[mode]['locs']
PolRes['magic_software_packages'] = version_num
PmagResults.append(PolRes) # depends on [control=['for'], data=['mode']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
if not skip_intensities and nositeints != 1:
for site in sites: # now do intensities for each site
if plotsites:
print(site) # depends on [control=['if'], data=[]]
if not avg_intensities_by_sample:
(key, intlist) = ('specimen', SpecInts) # if using specimen level data # depends on [control=['if'], data=[]]
if avg_intensities_by_sample:
(key, intlist) = ('sample', PmagSamps) # if using sample level data # depends on [control=['if'], data=[]]
# get all the intensities for this site
Ints = pmag.get_dictitem(intlist, 'er_site_name', site, 'T')
if len(Ints) > 0: # there are some
# get average intensity stuff for site table
PmagSiteRec = pmag.average_int(Ints, key, 'site')
# get average intensity stuff for results table
PmagResRec = pmag.average_int(Ints, key, 'average')
if plotsites: # if site by site examination requested - print this site out to the screen
for rec in Ints:
print(rec['er_' + key + '_name'], ' %7.1f' % (1000000.0 * float(rec[key + '_int']))) # depends on [control=['for'], data=['rec']]
if len(Ints) > 1:
print('Average: ', '%7.1f' % (1000000.0 * float(PmagResRec['average_int'])), 'N: ', len(Ints))
print('Sigma: ', '%7.1f' % (1000000.0 * float(PmagResRec['average_int_sigma'])), 'Sigma %: ', PmagResRec['average_int_sigma_perc']) # depends on [control=['if'], data=[]]
input('Press any key to continue\n') # depends on [control=['if'], data=[]]
er_location_name = Ints[0]['er_location_name']
# decorate the records
PmagSiteRec['er_location_name'] = er_location_name
PmagSiteRec['er_citation_names'] = 'This study'
PmagResRec['er_location_names'] = er_location_name
PmagResRec['er_citation_names'] = 'This study'
PmagSiteRec['er_analyst_mail_names'] = user
PmagResRec['er_analyst_mail_names'] = user
PmagResRec['data_type'] = 'i'
if not avg_intensities_by_sample:
PmagSiteRec['er_specimen_names'] = pmag.get_list(Ints, 'er_specimen_name') # list of all specimens used
PmagResRec['er_specimen_names'] = pmag.get_list(Ints, 'er_specimen_name') # depends on [control=['if'], data=[]]
PmagSiteRec['er_sample_names'] = pmag.get_list(Ints, 'er_sample_name') # list of all samples used
PmagResRec['er_sample_names'] = pmag.get_list(Ints, 'er_sample_name')
PmagSiteRec['er_site_name'] = site
PmagResRec['er_site_names'] = site
PmagSiteRec['magic_method_codes'] = pmag.get_list(Ints, 'magic_method_codes')
PmagResRec['magic_method_codes'] = pmag.get_list(Ints, 'magic_method_codes')
kill = pmag.grade(PmagSiteRec, accept, 'site_int')
if nocrit == 1 or len(kill) == 0:
(b, sig) = (float(PmagResRec['average_int']), '')
if PmagResRec['average_int_sigma'] != '':
sig = float(PmagResRec['average_int_sigma']) # depends on [control=['if'], data=[]]
# fish out site direction
sdir = pmag.get_dictitem(PmagResults, 'er_site_names', site, 'T')
# get the VDM for this record using last average
# inclination (hope it is the right one!)
if len(sdir) > 0 and sdir[-1]['average_inc'] != '':
inc = float(sdir[0]['average_inc'])
# get magnetic latitude using dipole formula
mlat = pmag.magnetic_lat(inc)
# get VDM with magnetic latitude
PmagResRec['vdm'] = '%8.3e ' % pmag.b_vdm(b, mlat)
PmagResRec['vdm_n'] = PmagResRec['average_int_n']
if 'average_int_sigma' in list(PmagResRec.keys()) and PmagResRec['average_int_sigma'] != '':
vdm_sig = pmag.b_vdm(float(PmagResRec['average_int_sigma']), mlat)
PmagResRec['vdm_sigma'] = '%8.3e ' % vdm_sig # depends on [control=['if'], data=[]]
else:
PmagResRec['vdm_sigma'] = '' # depends on [control=['if'], data=[]]
mlat = '' # define a model latitude
if get_model_lat == 1: # use present site latitude
mlats = pmag.get_dictitem(SiteNFO, 'er_site_name', site, 'T')
if len(mlats) > 0:
mlat = mlats[0]['site_lat'] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# use a model latitude from some plate reconstruction model
# (or something)
elif get_model_lat == 2:
mlats = pmag.get_dictitem(ModelLats, 'er_site_name', site, 'T')
if len(mlats) > 0:
PmagResRec['model_lat'] = mlats[0]['site_model_lat'] # depends on [control=['if'], data=[]]
mlat = PmagResRec['model_lat'] # depends on [control=['if'], data=[]]
if mlat != '':
# get the VADM using the desired latitude
PmagResRec['vadm'] = '%8.3e ' % pmag.b_vdm(b, float(mlat))
if sig != '':
vdm_sig = pmag.b_vdm(float(PmagResRec['average_int_sigma']), float(mlat))
PmagResRec['vadm_sigma'] = '%8.3e ' % vdm_sig
PmagResRec['vadm_n'] = PmagResRec['average_int_n'] # depends on [control=['if'], data=[]]
else:
PmagResRec['vadm_sigma'] = '' # depends on [control=['if'], data=['mlat']]
# fish out site information (lat/lon, etc.)
sitedat = pmag.get_dictitem(SiteNFO, 'er_site_name', PmagSiteRec['er_site_name'], 'T')
if len(sitedat) > 0:
sitedat = sitedat[0]
PmagResRec['average_lat'] = sitedat['site_lat']
PmagResRec['average_lon'] = sitedat['site_lon'] # depends on [control=['if'], data=[]]
else:
PmagResRec['average_lon'] = 'UNKNOWN'
PmagResRec['average_lon'] = 'UNKNOWN'
PmagResRec['magic_software_packages'] = version_num
PmagResRec['pmag_result_name'] = 'V[A]DM: Site ' + site
PmagResRec['result_description'] = 'V[A]DM of site'
PmagResRec['pmag_criteria_codes'] = 'ACCEPT'
if agefile != '':
PmagResRec = pmag.get_age(PmagResRec, 'er_site_names', 'average_', AgeNFO, DefaultAge) # depends on [control=['if'], data=[]]
site_height = pmag.get_dictitem(height_nfo, 'er_site_name', site, 'T')
if len(site_height) > 0:
PmagResRec['average_height'] = site_height[0]['site_height'] # depends on [control=['if'], data=[]]
PmagSites.append(PmagSiteRec)
PmagResults.append(PmagResRec) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['site']] # depends on [control=['if'], data=[]]
if len(PmagSites) > 0:
(Tmp, keylist) = pmag.fillkeys(PmagSites)
pmag.magic_write(siteout, Tmp, 'pmag_sites')
print(' sites written to ', siteout) # depends on [control=['if'], data=[]]
else:
print('No Site level table')
if len(PmagResults) > 0:
(TmpRes, keylist) = pmag.fillkeys(PmagResults)
pmag.magic_write(resout, TmpRes, 'pmag_results')
print(' results written to ', resout) # depends on [control=['if'], data=[]]
else:
print('No Results level table') |
def dict_diff(old, new):
"""
Return a dict representing the differences between the dicts `old` and
`new`. Deleted keys appear as a key with the value :data:`None`, added and
changed keys appear as a key with the new value.
"""
old_keys = viewkeys(old)
new_keys = viewkeys(dict(new))
out = {}
for key in new_keys - old_keys:
out[key] = new[key]
for key in old_keys - new_keys:
out[key] = None
for key in old_keys & new_keys:
if old[key] != new[key]:
out[key] = new[key]
return out | def function[dict_diff, parameter[old, new]]:
constant[
Return a dict representing the differences between the dicts `old` and
`new`. Deleted keys appear as a key with the value :data:`None`, added and
changed keys appear as a key with the new value.
]
variable[old_keys] assign[=] call[name[viewkeys], parameter[name[old]]]
variable[new_keys] assign[=] call[name[viewkeys], parameter[call[name[dict], parameter[name[new]]]]]
variable[out] assign[=] dictionary[[], []]
for taget[name[key]] in starred[binary_operation[name[new_keys] - name[old_keys]]] begin[:]
call[name[out]][name[key]] assign[=] call[name[new]][name[key]]
for taget[name[key]] in starred[binary_operation[name[old_keys] - name[new_keys]]] begin[:]
call[name[out]][name[key]] assign[=] constant[None]
for taget[name[key]] in starred[binary_operation[name[old_keys] <ast.BitAnd object at 0x7da2590d6b60> name[new_keys]]] begin[:]
if compare[call[name[old]][name[key]] not_equal[!=] call[name[new]][name[key]]] begin[:]
call[name[out]][name[key]] assign[=] call[name[new]][name[key]]
return[name[out]] | keyword[def] identifier[dict_diff] ( identifier[old] , identifier[new] ):
literal[string]
identifier[old_keys] = identifier[viewkeys] ( identifier[old] )
identifier[new_keys] = identifier[viewkeys] ( identifier[dict] ( identifier[new] ))
identifier[out] ={}
keyword[for] identifier[key] keyword[in] identifier[new_keys] - identifier[old_keys] :
identifier[out] [ identifier[key] ]= identifier[new] [ identifier[key] ]
keyword[for] identifier[key] keyword[in] identifier[old_keys] - identifier[new_keys] :
identifier[out] [ identifier[key] ]= keyword[None]
keyword[for] identifier[key] keyword[in] identifier[old_keys] & identifier[new_keys] :
keyword[if] identifier[old] [ identifier[key] ]!= identifier[new] [ identifier[key] ]:
identifier[out] [ identifier[key] ]= identifier[new] [ identifier[key] ]
keyword[return] identifier[out] | def dict_diff(old, new):
"""
Return a dict representing the differences between the dicts `old` and
`new`. Deleted keys appear as a key with the value :data:`None`, added and
changed keys appear as a key with the new value.
"""
old_keys = viewkeys(old)
new_keys = viewkeys(dict(new))
out = {}
for key in new_keys - old_keys:
out[key] = new[key] # depends on [control=['for'], data=['key']]
for key in old_keys - new_keys:
out[key] = None # depends on [control=['for'], data=['key']]
for key in old_keys & new_keys:
if old[key] != new[key]:
out[key] = new[key] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']]
return out |
def objects_to_root(objects: List) -> Root:
"""
Convert a list of s3 ObjectSummaries into a directory tree.
:param objects: The list of objects, e.g. the result of calling
`.objects.all()` on a bucket.
:return: The tree structure, contained within a root node.
"""
def _to_tree(objs: Iterable) -> Dict:
"""
Build a tree structure from a flat list of objects.
:param objs: The raw iterable of S3 `ObjectSummary`s, as returned by a
bucket listing.
:return: The listing as a nested dictionary where keys are directory
and file names. The values of directories will in turn be a
dict. The values of keys representing files will be the
`ObjectSummary` instance.
"""
path_tree = {}
for obj in objs:
is_dir = obj.key.endswith('/')
chunks = [chunk for chunk in obj.key.split('/') if chunk]
chunk_count = len(chunks)
tmp = path_tree
for i, chunk in enumerate(chunks):
is_last_chunk = i == chunk_count - 1
if is_last_chunk and not is_dir:
tmp[chunk] = obj
else:
# must be a directory
if chunk not in tmp:
# it doesn't exist - create it
tmp[chunk] = {}
tmp = tmp[chunk]
return path_tree
def _to_entity(key: str, value: Union[Dict, Any]) -> Entity:
"""
Turn a nested dictionary representing an S3 bucket into the correct
`Entity` object.
:param key: The name of the entity.
:param value: If the entity is a directory, the nested dict
representing its contents. Otherwise, the `ObjectSummary`
instance representing the file.
:return: The entity representing the entity name and value pair.
"""
if isinstance(value, dict):
return Directory(
key,
{key_: _to_entity(key_, value_)
for key_, value_ in value.items()})
return File(pathlib.PurePath(value.key).name, value.size,
value.e_tag.strip('"'))
tree = _to_tree(objects)
return Root({pathlib.PurePath(key).name: _to_entity(key, value)
for key, value in tree.items()}) | def function[objects_to_root, parameter[objects]]:
constant[
Convert a list of s3 ObjectSummaries into a directory tree.
:param objects: The list of objects, e.g. the result of calling
`.objects.all()` on a bucket.
:return: The tree structure, contained within a root node.
]
def function[_to_tree, parameter[objs]]:
constant[
Build a tree structure from a flat list of objects.
:param objs: The raw iterable of S3 `ObjectSummary`s, as returned by a
bucket listing.
:return: The listing as a nested dictionary where keys are directory
and file names. The values of directories will in turn be a
dict. The values of keys representing files will be the
`ObjectSummary` instance.
]
variable[path_tree] assign[=] dictionary[[], []]
for taget[name[obj]] in starred[name[objs]] begin[:]
variable[is_dir] assign[=] call[name[obj].key.endswith, parameter[constant[/]]]
variable[chunks] assign[=] <ast.ListComp object at 0x7da18fe926b0>
variable[chunk_count] assign[=] call[name[len], parameter[name[chunks]]]
variable[tmp] assign[=] name[path_tree]
for taget[tuple[[<ast.Name object at 0x7da18fe920b0>, <ast.Name object at 0x7da18fe90640>]]] in starred[call[name[enumerate], parameter[name[chunks]]]] begin[:]
variable[is_last_chunk] assign[=] compare[name[i] equal[==] binary_operation[name[chunk_count] - constant[1]]]
if <ast.BoolOp object at 0x7da18fe92650> begin[:]
call[name[tmp]][name[chunk]] assign[=] name[obj]
return[name[path_tree]]
def function[_to_entity, parameter[key, value]]:
constant[
Turn a nested dictionary representing an S3 bucket into the correct
`Entity` object.
:param key: The name of the entity.
:param value: If the entity is a directory, the nested dict
representing its contents. Otherwise, the `ObjectSummary`
instance representing the file.
:return: The entity representing the entity name and value pair.
]
if call[name[isinstance], parameter[name[value], name[dict]]] begin[:]
return[call[name[Directory], parameter[name[key], <ast.DictComp object at 0x7da18fe919c0>]]]
return[call[name[File], parameter[call[name[pathlib].PurePath, parameter[name[value].key]].name, name[value].size, call[name[value].e_tag.strip, parameter[constant["]]]]]]
variable[tree] assign[=] call[name[_to_tree], parameter[name[objects]]]
return[call[name[Root], parameter[<ast.DictComp object at 0x7da20c795570>]]] | keyword[def] identifier[objects_to_root] ( identifier[objects] : identifier[List] )-> identifier[Root] :
literal[string]
keyword[def] identifier[_to_tree] ( identifier[objs] : identifier[Iterable] )-> identifier[Dict] :
literal[string]
identifier[path_tree] ={}
keyword[for] identifier[obj] keyword[in] identifier[objs] :
identifier[is_dir] = identifier[obj] . identifier[key] . identifier[endswith] ( literal[string] )
identifier[chunks] =[ identifier[chunk] keyword[for] identifier[chunk] keyword[in] identifier[obj] . identifier[key] . identifier[split] ( literal[string] ) keyword[if] identifier[chunk] ]
identifier[chunk_count] = identifier[len] ( identifier[chunks] )
identifier[tmp] = identifier[path_tree]
keyword[for] identifier[i] , identifier[chunk] keyword[in] identifier[enumerate] ( identifier[chunks] ):
identifier[is_last_chunk] = identifier[i] == identifier[chunk_count] - literal[int]
keyword[if] identifier[is_last_chunk] keyword[and] keyword[not] identifier[is_dir] :
identifier[tmp] [ identifier[chunk] ]= identifier[obj]
keyword[else] :
keyword[if] identifier[chunk] keyword[not] keyword[in] identifier[tmp] :
identifier[tmp] [ identifier[chunk] ]={}
identifier[tmp] = identifier[tmp] [ identifier[chunk] ]
keyword[return] identifier[path_tree]
keyword[def] identifier[_to_entity] ( identifier[key] : identifier[str] , identifier[value] : identifier[Union] [ identifier[Dict] , identifier[Any] ])-> identifier[Entity] :
literal[string]
keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ):
keyword[return] identifier[Directory] (
identifier[key] ,
{ identifier[key_] : identifier[_to_entity] ( identifier[key_] , identifier[value_] )
keyword[for] identifier[key_] , identifier[value_] keyword[in] identifier[value] . identifier[items] ()})
keyword[return] identifier[File] ( identifier[pathlib] . identifier[PurePath] ( identifier[value] . identifier[key] ). identifier[name] , identifier[value] . identifier[size] ,
identifier[value] . identifier[e_tag] . identifier[strip] ( literal[string] ))
identifier[tree] = identifier[_to_tree] ( identifier[objects] )
keyword[return] identifier[Root] ({ identifier[pathlib] . identifier[PurePath] ( identifier[key] ). identifier[name] : identifier[_to_entity] ( identifier[key] , identifier[value] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[tree] . identifier[items] ()}) | def objects_to_root(objects: List) -> Root:
"""
Convert a list of s3 ObjectSummaries into a directory tree.
:param objects: The list of objects, e.g. the result of calling
`.objects.all()` on a bucket.
:return: The tree structure, contained within a root node.
"""
def _to_tree(objs: Iterable) -> Dict:
"""
Build a tree structure from a flat list of objects.
:param objs: The raw iterable of S3 `ObjectSummary`s, as returned by a
bucket listing.
:return: The listing as a nested dictionary where keys are directory
and file names. The values of directories will in turn be a
dict. The values of keys representing files will be the
`ObjectSummary` instance.
"""
path_tree = {}
for obj in objs:
is_dir = obj.key.endswith('/')
chunks = [chunk for chunk in obj.key.split('/') if chunk]
chunk_count = len(chunks)
tmp = path_tree
for (i, chunk) in enumerate(chunks):
is_last_chunk = i == chunk_count - 1
if is_last_chunk and (not is_dir):
tmp[chunk] = obj # depends on [control=['if'], data=[]]
else:
# must be a directory
if chunk not in tmp:
# it doesn't exist - create it
tmp[chunk] = {} # depends on [control=['if'], data=['chunk', 'tmp']]
tmp = tmp[chunk] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['obj']]
return path_tree
def _to_entity(key: str, value: Union[Dict, Any]) -> Entity:
"""
Turn a nested dictionary representing an S3 bucket into the correct
`Entity` object.
:param key: The name of the entity.
:param value: If the entity is a directory, the nested dict
representing its contents. Otherwise, the `ObjectSummary`
instance representing the file.
:return: The entity representing the entity name and value pair.
"""
if isinstance(value, dict):
return Directory(key, {key_: _to_entity(key_, value_) for (key_, value_) in value.items()}) # depends on [control=['if'], data=[]]
return File(pathlib.PurePath(value.key).name, value.size, value.e_tag.strip('"'))
tree = _to_tree(objects)
return Root({pathlib.PurePath(key).name: _to_entity(key, value) for (key, value) in tree.items()}) |
def invokeSMCLI(rh, api, parms, hideInLog=[]):
"""
Invoke SMCLI and parse the results.
Input:
Request Handle
API name,
SMCLI parms as an array
(Optional) List of parms (by index) to hide in
sysLog by replacing the parm with "<hidden>".
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from SMCLI if overallRC = 0.
rs - RS returned from SMCLI if overallRC = 0.
errno - Errno returned from SMCLI if overallRC = 0.
response - String output of the SMCLI command.
Note:
- If the first three words of the header returned from smcli
do not do not contain words that represent valid integer
values or contain too few words then one or more error
messages are generated. THIS SHOULD NEVER OCCUR !!!!
"""
if len(hideInLog) == 0:
rh.printSysLog("Enter vmUtils.invokeSMCLI, userid: " +
rh.userid + ", function: " + api +
", parms: " + str(parms))
else:
logParms = parms
for i in hideInLog:
logParms[i] = '<hidden>'
rh.printSysLog("Enter vmUtils.invokeSMCLI, userid: " +
rh.userid + ", function: " + api +
", parms: " + str(logParms))
goodHeader = False
results = {
'overallRC': 0,
'rc': 0,
'rs': 0,
'errno': 0,
'response': [],
'strError': '',
}
cmd = []
cmd.append('sudo')
cmd.append('/opt/zthin/bin/smcli')
cmd.append(api)
cmd.append('--addRCheader')
try:
smcliResp = subprocess.check_output(cmd + parms,
close_fds=True)
if isinstance(smcliResp, bytes):
smcliResp = bytes.decode(smcliResp, errors='replace')
smcliResp = smcliResp.split('\n', 1)
results['response'] = smcliResp[1]
results['overallRC'] = 0
results['rc'] = 0
except CalledProcessError as e:
strCmd = " ".join(cmd + parms)
# Break up the RC header into its component parts.
if e.output == '':
smcliResp = ['']
else:
smcliResp = bytes.decode(e.output).split('\n', 1)
# Split the header into its component pieces.
rcHeader = smcliResp[0].split('(details)', 1)
if len(rcHeader) == 0:
rcHeader = ['', '']
elif len(rcHeader) == 1:
# No data after the details tag. Add empty [1] value.
rcHeader.append('')
codes = rcHeader[0].split(' ')
# Validate the rc, rs, and errno.
if len(codes) < 3:
# Unexpected number of codes. Need at least 3.
results = msgs.msg['0301'][0]
results['response'] = msgs.msg['0301'][1] % (modId, api,
strCmd, rcHeader[0], rcHeader[1])
else:
goodHeader = True
# Convert the first word (overall rc from SMAPI) to an int
# and set the SMT overall rc based on this value.
orcError = False
try:
results['overallRC'] = int(codes[0])
if results['overallRC'] not in [8, 24, 25]:
orcError = True
except ValueError:
goodHeader = False
orcError = True
if orcError:
results['overallRC'] = 25 # SMCLI Internal Error
results = msgs.msg['0302'][0]
results['response'] = msgs.msg['0302'][1] % (modId,
api, codes[0], strCmd, rcHeader[0], rcHeader[1])
# Convert the second word to an int and save as rc.
try:
results['rc'] = int(codes[1])
except ValueError:
goodHeader = False
results = msgs.msg['0303'][0]
results['response'] = msgs.msg['0303'][1] % (modId,
api, codes[1], strCmd, rcHeader[0], rcHeader[1])
# Convert the second word to an int and save it as either
# the rs or errno.
try:
word3 = int(codes[2])
if results['overallRC'] == 8:
results['rs'] = word3 # Must be an rs
elif results['overallRC'] == 25:
results['errno'] = word3 # Must be the errno
# We ignore word 3 for everyone else and default to 0.
except ValueError:
goodHeader = False
results = msgs.msg['0304'][0]
results['response'] = msgs.msg['0304'][1] % (modId,
api, codes[1], strCmd, rcHeader[0], rcHeader[1])
results['strError'] = rcHeader[1].lstrip()
if goodHeader:
# Produce a message that provides the error info.
results['response'] = msgs.msg['0300'][1] % (modId,
api, results['overallRC'], results['rc'],
results['rs'], results['errno'],
strCmd, smcliResp[1])
except Exception as e:
# All other exceptions.
strCmd = " ".join(cmd + parms)
results = msgs.msg['0305'][0]
results['response'] = msgs.msg['0305'][1] % (modId, strCmd,
type(e).__name__, str(e))
rh.printSysLog("Exit vmUtils.invokeSMCLI, rc: " +
str(results['overallRC']))
return results | def function[invokeSMCLI, parameter[rh, api, parms, hideInLog]]:
constant[
Invoke SMCLI and parse the results.
Input:
Request Handle
API name,
SMCLI parms as an array
(Optional) List of parms (by index) to hide in
sysLog by replacing the parm with "<hidden>".
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from SMCLI if overallRC = 0.
rs - RS returned from SMCLI if overallRC = 0.
errno - Errno returned from SMCLI if overallRC = 0.
response - String output of the SMCLI command.
Note:
- If the first three words of the header returned from smcli
do not do not contain words that represent valid integer
values or contain too few words then one or more error
messages are generated. THIS SHOULD NEVER OCCUR !!!!
]
if compare[call[name[len], parameter[name[hideInLog]]] equal[==] constant[0]] begin[:]
call[name[rh].printSysLog, parameter[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[constant[Enter vmUtils.invokeSMCLI, userid: ] + name[rh].userid] + constant[, function: ]] + name[api]] + constant[, parms: ]] + call[name[str], parameter[name[parms]]]]]]
variable[goodHeader] assign[=] constant[False]
variable[results] assign[=] dictionary[[<ast.Constant object at 0x7da204567550>, <ast.Constant object at 0x7da204566020>, <ast.Constant object at 0x7da204565120>, <ast.Constant object at 0x7da204565090>, <ast.Constant object at 0x7da204565390>, <ast.Constant object at 0x7da2045668f0>], [<ast.Constant object at 0x7da2045671c0>, <ast.Constant object at 0x7da204567ac0>, <ast.Constant object at 0x7da204566a10>, <ast.Constant object at 0x7da204566f20>, <ast.List object at 0x7da204566d10>, <ast.Constant object at 0x7da204566aa0>]]
variable[cmd] assign[=] list[[]]
call[name[cmd].append, parameter[constant[sudo]]]
call[name[cmd].append, parameter[constant[/opt/zthin/bin/smcli]]]
call[name[cmd].append, parameter[name[api]]]
call[name[cmd].append, parameter[constant[--addRCheader]]]
<ast.Try object at 0x7da204564910>
call[name[rh].printSysLog, parameter[binary_operation[constant[Exit vmUtils.invokeSMCLI, rc: ] + call[name[str], parameter[call[name[results]][constant[overallRC]]]]]]]
return[name[results]] | keyword[def] identifier[invokeSMCLI] ( identifier[rh] , identifier[api] , identifier[parms] , identifier[hideInLog] =[]):
literal[string]
keyword[if] identifier[len] ( identifier[hideInLog] )== literal[int] :
identifier[rh] . identifier[printSysLog] ( literal[string] +
identifier[rh] . identifier[userid] + literal[string] + identifier[api] +
literal[string] + identifier[str] ( identifier[parms] ))
keyword[else] :
identifier[logParms] = identifier[parms]
keyword[for] identifier[i] keyword[in] identifier[hideInLog] :
identifier[logParms] [ identifier[i] ]= literal[string]
identifier[rh] . identifier[printSysLog] ( literal[string] +
identifier[rh] . identifier[userid] + literal[string] + identifier[api] +
literal[string] + identifier[str] ( identifier[logParms] ))
identifier[goodHeader] = keyword[False]
identifier[results] ={
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] : literal[int] ,
literal[string] :[],
literal[string] : literal[string] ,
}
identifier[cmd] =[]
identifier[cmd] . identifier[append] ( literal[string] )
identifier[cmd] . identifier[append] ( literal[string] )
identifier[cmd] . identifier[append] ( identifier[api] )
identifier[cmd] . identifier[append] ( literal[string] )
keyword[try] :
identifier[smcliResp] = identifier[subprocess] . identifier[check_output] ( identifier[cmd] + identifier[parms] ,
identifier[close_fds] = keyword[True] )
keyword[if] identifier[isinstance] ( identifier[smcliResp] , identifier[bytes] ):
identifier[smcliResp] = identifier[bytes] . identifier[decode] ( identifier[smcliResp] , identifier[errors] = literal[string] )
identifier[smcliResp] = identifier[smcliResp] . identifier[split] ( literal[string] , literal[int] )
identifier[results] [ literal[string] ]= identifier[smcliResp] [ literal[int] ]
identifier[results] [ literal[string] ]= literal[int]
identifier[results] [ literal[string] ]= literal[int]
keyword[except] identifier[CalledProcessError] keyword[as] identifier[e] :
identifier[strCmd] = literal[string] . identifier[join] ( identifier[cmd] + identifier[parms] )
keyword[if] identifier[e] . identifier[output] == literal[string] :
identifier[smcliResp] =[ literal[string] ]
keyword[else] :
identifier[smcliResp] = identifier[bytes] . identifier[decode] ( identifier[e] . identifier[output] ). identifier[split] ( literal[string] , literal[int] )
identifier[rcHeader] = identifier[smcliResp] [ literal[int] ]. identifier[split] ( literal[string] , literal[int] )
keyword[if] identifier[len] ( identifier[rcHeader] )== literal[int] :
identifier[rcHeader] =[ literal[string] , literal[string] ]
keyword[elif] identifier[len] ( identifier[rcHeader] )== literal[int] :
identifier[rcHeader] . identifier[append] ( literal[string] )
identifier[codes] = identifier[rcHeader] [ literal[int] ]. identifier[split] ( literal[string] )
keyword[if] identifier[len] ( identifier[codes] )< literal[int] :
identifier[results] = identifier[msgs] . identifier[msg] [ literal[string] ][ literal[int] ]
identifier[results] [ literal[string] ]= identifier[msgs] . identifier[msg] [ literal[string] ][ literal[int] ]%( identifier[modId] , identifier[api] ,
identifier[strCmd] , identifier[rcHeader] [ literal[int] ], identifier[rcHeader] [ literal[int] ])
keyword[else] :
identifier[goodHeader] = keyword[True]
identifier[orcError] = keyword[False]
keyword[try] :
identifier[results] [ literal[string] ]= identifier[int] ( identifier[codes] [ literal[int] ])
keyword[if] identifier[results] [ literal[string] ] keyword[not] keyword[in] [ literal[int] , literal[int] , literal[int] ]:
identifier[orcError] = keyword[True]
keyword[except] identifier[ValueError] :
identifier[goodHeader] = keyword[False]
identifier[orcError] = keyword[True]
keyword[if] identifier[orcError] :
identifier[results] [ literal[string] ]= literal[int]
identifier[results] = identifier[msgs] . identifier[msg] [ literal[string] ][ literal[int] ]
identifier[results] [ literal[string] ]= identifier[msgs] . identifier[msg] [ literal[string] ][ literal[int] ]%( identifier[modId] ,
identifier[api] , identifier[codes] [ literal[int] ], identifier[strCmd] , identifier[rcHeader] [ literal[int] ], identifier[rcHeader] [ literal[int] ])
keyword[try] :
identifier[results] [ literal[string] ]= identifier[int] ( identifier[codes] [ literal[int] ])
keyword[except] identifier[ValueError] :
identifier[goodHeader] = keyword[False]
identifier[results] = identifier[msgs] . identifier[msg] [ literal[string] ][ literal[int] ]
identifier[results] [ literal[string] ]= identifier[msgs] . identifier[msg] [ literal[string] ][ literal[int] ]%( identifier[modId] ,
identifier[api] , identifier[codes] [ literal[int] ], identifier[strCmd] , identifier[rcHeader] [ literal[int] ], identifier[rcHeader] [ literal[int] ])
keyword[try] :
identifier[word3] = identifier[int] ( identifier[codes] [ literal[int] ])
keyword[if] identifier[results] [ literal[string] ]== literal[int] :
identifier[results] [ literal[string] ]= identifier[word3]
keyword[elif] identifier[results] [ literal[string] ]== literal[int] :
identifier[results] [ literal[string] ]= identifier[word3]
keyword[except] identifier[ValueError] :
identifier[goodHeader] = keyword[False]
identifier[results] = identifier[msgs] . identifier[msg] [ literal[string] ][ literal[int] ]
identifier[results] [ literal[string] ]= identifier[msgs] . identifier[msg] [ literal[string] ][ literal[int] ]%( identifier[modId] ,
identifier[api] , identifier[codes] [ literal[int] ], identifier[strCmd] , identifier[rcHeader] [ literal[int] ], identifier[rcHeader] [ literal[int] ])
identifier[results] [ literal[string] ]= identifier[rcHeader] [ literal[int] ]. identifier[lstrip] ()
keyword[if] identifier[goodHeader] :
identifier[results] [ literal[string] ]= identifier[msgs] . identifier[msg] [ literal[string] ][ literal[int] ]%( identifier[modId] ,
identifier[api] , identifier[results] [ literal[string] ], identifier[results] [ literal[string] ],
identifier[results] [ literal[string] ], identifier[results] [ literal[string] ],
identifier[strCmd] , identifier[smcliResp] [ literal[int] ])
keyword[except] identifier[Exception] keyword[as] identifier[e] :
identifier[strCmd] = literal[string] . identifier[join] ( identifier[cmd] + identifier[parms] )
identifier[results] = identifier[msgs] . identifier[msg] [ literal[string] ][ literal[int] ]
identifier[results] [ literal[string] ]= identifier[msgs] . identifier[msg] [ literal[string] ][ literal[int] ]%( identifier[modId] , identifier[strCmd] ,
identifier[type] ( identifier[e] ). identifier[__name__] , identifier[str] ( identifier[e] ))
identifier[rh] . identifier[printSysLog] ( literal[string] +
identifier[str] ( identifier[results] [ literal[string] ]))
keyword[return] identifier[results] | def invokeSMCLI(rh, api, parms, hideInLog=[]):
"""
Invoke SMCLI and parse the results.
Input:
Request Handle
API name,
SMCLI parms as an array
(Optional) List of parms (by index) to hide in
sysLog by replacing the parm with "<hidden>".
Output:
Dictionary containing the following:
overallRC - overall return code, 0: success, non-zero: failure
rc - RC returned from SMCLI if overallRC = 0.
rs - RS returned from SMCLI if overallRC = 0.
errno - Errno returned from SMCLI if overallRC = 0.
response - String output of the SMCLI command.
Note:
- If the first three words of the header returned from smcli
do not do not contain words that represent valid integer
values or contain too few words then one or more error
messages are generated. THIS SHOULD NEVER OCCUR !!!!
"""
if len(hideInLog) == 0:
rh.printSysLog('Enter vmUtils.invokeSMCLI, userid: ' + rh.userid + ', function: ' + api + ', parms: ' + str(parms)) # depends on [control=['if'], data=[]]
else:
logParms = parms
for i in hideInLog:
logParms[i] = '<hidden>' # depends on [control=['for'], data=['i']]
rh.printSysLog('Enter vmUtils.invokeSMCLI, userid: ' + rh.userid + ', function: ' + api + ', parms: ' + str(logParms))
goodHeader = False
results = {'overallRC': 0, 'rc': 0, 'rs': 0, 'errno': 0, 'response': [], 'strError': ''}
cmd = []
cmd.append('sudo')
cmd.append('/opt/zthin/bin/smcli')
cmd.append(api)
cmd.append('--addRCheader')
try:
smcliResp = subprocess.check_output(cmd + parms, close_fds=True)
if isinstance(smcliResp, bytes):
smcliResp = bytes.decode(smcliResp, errors='replace') # depends on [control=['if'], data=[]]
smcliResp = smcliResp.split('\n', 1)
results['response'] = smcliResp[1]
results['overallRC'] = 0
results['rc'] = 0 # depends on [control=['try'], data=[]]
except CalledProcessError as e:
strCmd = ' '.join(cmd + parms)
# Break up the RC header into its component parts.
if e.output == '':
smcliResp = [''] # depends on [control=['if'], data=[]]
else:
smcliResp = bytes.decode(e.output).split('\n', 1)
# Split the header into its component pieces.
rcHeader = smcliResp[0].split('(details)', 1)
if len(rcHeader) == 0:
rcHeader = ['', ''] # depends on [control=['if'], data=[]]
elif len(rcHeader) == 1:
# No data after the details tag. Add empty [1] value.
rcHeader.append('') # depends on [control=['if'], data=[]]
codes = rcHeader[0].split(' ')
# Validate the rc, rs, and errno.
if len(codes) < 3:
# Unexpected number of codes. Need at least 3.
results = msgs.msg['0301'][0]
results['response'] = msgs.msg['0301'][1] % (modId, api, strCmd, rcHeader[0], rcHeader[1]) # depends on [control=['if'], data=[]]
else:
goodHeader = True
# Convert the first word (overall rc from SMAPI) to an int
# and set the SMT overall rc based on this value.
orcError = False
try:
results['overallRC'] = int(codes[0])
if results['overallRC'] not in [8, 24, 25]:
orcError = True # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
except ValueError:
goodHeader = False
orcError = True # depends on [control=['except'], data=[]]
if orcError:
results['overallRC'] = 25 # SMCLI Internal Error
results = msgs.msg['0302'][0]
results['response'] = msgs.msg['0302'][1] % (modId, api, codes[0], strCmd, rcHeader[0], rcHeader[1]) # depends on [control=['if'], data=[]]
# Convert the second word to an int and save as rc.
try:
results['rc'] = int(codes[1]) # depends on [control=['try'], data=[]]
except ValueError:
goodHeader = False
results = msgs.msg['0303'][0]
results['response'] = msgs.msg['0303'][1] % (modId, api, codes[1], strCmd, rcHeader[0], rcHeader[1]) # depends on [control=['except'], data=[]]
# Convert the second word to an int and save it as either
# the rs or errno.
try:
word3 = int(codes[2])
if results['overallRC'] == 8:
results['rs'] = word3 # Must be an rs # depends on [control=['if'], data=[]]
elif results['overallRC'] == 25:
results['errno'] = word3 # Must be the errno # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]]
# We ignore word 3 for everyone else and default to 0.
except ValueError:
goodHeader = False
results = msgs.msg['0304'][0]
results['response'] = msgs.msg['0304'][1] % (modId, api, codes[1], strCmd, rcHeader[0], rcHeader[1]) # depends on [control=['except'], data=[]]
results['strError'] = rcHeader[1].lstrip()
if goodHeader:
# Produce a message that provides the error info.
results['response'] = msgs.msg['0300'][1] % (modId, api, results['overallRC'], results['rc'], results['rs'], results['errno'], strCmd, smcliResp[1]) # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']]
except Exception as e:
# All other exceptions.
strCmd = ' '.join(cmd + parms)
results = msgs.msg['0305'][0]
results['response'] = msgs.msg['0305'][1] % (modId, strCmd, type(e).__name__, str(e)) # depends on [control=['except'], data=['e']]
rh.printSysLog('Exit vmUtils.invokeSMCLI, rc: ' + str(results['overallRC']))
return results |
def run_inline_script(host,
name=None,
port=22,
timeout=900,
username='root',
key_filename=None,
inline_script=None,
ssh_timeout=15,
display_ssh_output=True,
parallel=False,
sudo_password=None,
sudo=False,
password=None,
tty=None,
opts=None,
tmp_dir='/tmp/.saltcloud-inline_script',
**kwargs):
'''
Run the inline script commands, one by one
:**kwargs: catch all other things we may get but don't actually need/use
'''
gateway = None
if 'gateway' in kwargs:
gateway = kwargs['gateway']
starttime = time.mktime(time.localtime())
log.debug('Deploying %s at %s', host, starttime)
known_hosts_file = kwargs.get('known_hosts_file', '/dev/null')
if wait_for_port(host=host, port=port, gateway=gateway):
log.debug('SSH port %s on %s is available', port, host)
newtimeout = timeout - (time.mktime(time.localtime()) - starttime)
if wait_for_passwd(host, port=port, username=username,
password=password, key_filename=key_filename,
ssh_timeout=ssh_timeout,
display_ssh_output=display_ssh_output,
gateway=gateway, known_hosts_file=known_hosts_file):
log.debug('Logging into %s:%s as %s', host, port, username)
newtimeout = timeout - (time.mktime(time.localtime()) - starttime)
ssh_kwargs = {
'hostname': host,
'port': port,
'username': username,
'timeout': ssh_timeout,
'display_ssh_output': display_ssh_output,
'sudo_password': sudo_password,
'sftp': opts.get('use_sftp', False)
}
ssh_kwargs.update(__ssh_gateway_config_dict(gateway))
if key_filename:
log.debug('Using %s as the key_filename', key_filename)
ssh_kwargs['key_filename'] = key_filename
elif password and 'has_ssh_agent' in kwargs and kwargs['has_ssh_agent'] is False:
ssh_kwargs['password'] = password
# TODO: write some tests ???
# TODO: check edge cases (e.g. ssh gateways, salt deploy disabled, etc.)
if root_cmd('test -e \\"{0}\\"'.format(tmp_dir), tty, sudo,
allow_failure=True, **ssh_kwargs) and inline_script:
log.debug('Found inline script to execute.')
for cmd_line in inline_script:
log.info('Executing inline command: %s', cmd_line)
ret = root_cmd('sh -c "( {0} )"'.format(cmd_line),
tty, sudo, allow_failure=True, **ssh_kwargs)
if ret:
log.info('[%s] Output: %s', cmd_line, ret)
# TODO: ensure we send the correct return value
return True | def function[run_inline_script, parameter[host, name, port, timeout, username, key_filename, inline_script, ssh_timeout, display_ssh_output, parallel, sudo_password, sudo, password, tty, opts, tmp_dir]]:
constant[
Run the inline script commands, one by one
:**kwargs: catch all other things we may get but don't actually need/use
]
variable[gateway] assign[=] constant[None]
if compare[constant[gateway] in name[kwargs]] begin[:]
variable[gateway] assign[=] call[name[kwargs]][constant[gateway]]
variable[starttime] assign[=] call[name[time].mktime, parameter[call[name[time].localtime, parameter[]]]]
call[name[log].debug, parameter[constant[Deploying %s at %s], name[host], name[starttime]]]
variable[known_hosts_file] assign[=] call[name[kwargs].get, parameter[constant[known_hosts_file], constant[/dev/null]]]
if call[name[wait_for_port], parameter[]] begin[:]
call[name[log].debug, parameter[constant[SSH port %s on %s is available], name[port], name[host]]]
variable[newtimeout] assign[=] binary_operation[name[timeout] - binary_operation[call[name[time].mktime, parameter[call[name[time].localtime, parameter[]]]] - name[starttime]]]
if call[name[wait_for_passwd], parameter[name[host]]] begin[:]
call[name[log].debug, parameter[constant[Logging into %s:%s as %s], name[host], name[port], name[username]]]
variable[newtimeout] assign[=] binary_operation[name[timeout] - binary_operation[call[name[time].mktime, parameter[call[name[time].localtime, parameter[]]]] - name[starttime]]]
variable[ssh_kwargs] assign[=] dictionary[[<ast.Constant object at 0x7da1b2096110>, <ast.Constant object at 0x7da1b20960e0>, <ast.Constant object at 0x7da1b20960b0>, <ast.Constant object at 0x7da1b2096080>, <ast.Constant object at 0x7da1b2096050>, <ast.Constant object at 0x7da1b2096020>, <ast.Constant object at 0x7da1b2095ff0>], [<ast.Name object at 0x7da1b2095fc0>, <ast.Name object at 0x7da1b2095f90>, <ast.Name object at 0x7da1b2095f60>, <ast.Name object at 0x7da1b2095f30>, <ast.Name object at 0x7da1b2095f00>, <ast.Name object at 0x7da1b2095ed0>, <ast.Call object at 0x7da1b2095ea0>]]
call[name[ssh_kwargs].update, parameter[call[name[__ssh_gateway_config_dict], parameter[name[gateway]]]]]
if name[key_filename] begin[:]
call[name[log].debug, parameter[constant[Using %s as the key_filename], name[key_filename]]]
call[name[ssh_kwargs]][constant[key_filename]] assign[=] name[key_filename]
if <ast.BoolOp object at 0x7da1b2095660> begin[:]
call[name[log].debug, parameter[constant[Found inline script to execute.]]]
for taget[name[cmd_line]] in starred[name[inline_script]] begin[:]
call[name[log].info, parameter[constant[Executing inline command: %s], name[cmd_line]]]
variable[ret] assign[=] call[name[root_cmd], parameter[call[constant[sh -c "( {0} )"].format, parameter[name[cmd_line]]], name[tty], name[sudo]]]
if name[ret] begin[:]
call[name[log].info, parameter[constant[[%s] Output: %s], name[cmd_line], name[ret]]]
return[constant[True]] | keyword[def] identifier[run_inline_script] ( identifier[host] ,
identifier[name] = keyword[None] ,
identifier[port] = literal[int] ,
identifier[timeout] = literal[int] ,
identifier[username] = literal[string] ,
identifier[key_filename] = keyword[None] ,
identifier[inline_script] = keyword[None] ,
identifier[ssh_timeout] = literal[int] ,
identifier[display_ssh_output] = keyword[True] ,
identifier[parallel] = keyword[False] ,
identifier[sudo_password] = keyword[None] ,
identifier[sudo] = keyword[False] ,
identifier[password] = keyword[None] ,
identifier[tty] = keyword[None] ,
identifier[opts] = keyword[None] ,
identifier[tmp_dir] = literal[string] ,
** identifier[kwargs] ):
literal[string]
identifier[gateway] = keyword[None]
keyword[if] literal[string] keyword[in] identifier[kwargs] :
identifier[gateway] = identifier[kwargs] [ literal[string] ]
identifier[starttime] = identifier[time] . identifier[mktime] ( identifier[time] . identifier[localtime] ())
identifier[log] . identifier[debug] ( literal[string] , identifier[host] , identifier[starttime] )
identifier[known_hosts_file] = identifier[kwargs] . identifier[get] ( literal[string] , literal[string] )
keyword[if] identifier[wait_for_port] ( identifier[host] = identifier[host] , identifier[port] = identifier[port] , identifier[gateway] = identifier[gateway] ):
identifier[log] . identifier[debug] ( literal[string] , identifier[port] , identifier[host] )
identifier[newtimeout] = identifier[timeout] -( identifier[time] . identifier[mktime] ( identifier[time] . identifier[localtime] ())- identifier[starttime] )
keyword[if] identifier[wait_for_passwd] ( identifier[host] , identifier[port] = identifier[port] , identifier[username] = identifier[username] ,
identifier[password] = identifier[password] , identifier[key_filename] = identifier[key_filename] ,
identifier[ssh_timeout] = identifier[ssh_timeout] ,
identifier[display_ssh_output] = identifier[display_ssh_output] ,
identifier[gateway] = identifier[gateway] , identifier[known_hosts_file] = identifier[known_hosts_file] ):
identifier[log] . identifier[debug] ( literal[string] , identifier[host] , identifier[port] , identifier[username] )
identifier[newtimeout] = identifier[timeout] -( identifier[time] . identifier[mktime] ( identifier[time] . identifier[localtime] ())- identifier[starttime] )
identifier[ssh_kwargs] ={
literal[string] : identifier[host] ,
literal[string] : identifier[port] ,
literal[string] : identifier[username] ,
literal[string] : identifier[ssh_timeout] ,
literal[string] : identifier[display_ssh_output] ,
literal[string] : identifier[sudo_password] ,
literal[string] : identifier[opts] . identifier[get] ( literal[string] , keyword[False] )
}
identifier[ssh_kwargs] . identifier[update] ( identifier[__ssh_gateway_config_dict] ( identifier[gateway] ))
keyword[if] identifier[key_filename] :
identifier[log] . identifier[debug] ( literal[string] , identifier[key_filename] )
identifier[ssh_kwargs] [ literal[string] ]= identifier[key_filename]
keyword[elif] identifier[password] keyword[and] literal[string] keyword[in] identifier[kwargs] keyword[and] identifier[kwargs] [ literal[string] ] keyword[is] keyword[False] :
identifier[ssh_kwargs] [ literal[string] ]= identifier[password]
keyword[if] identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[tmp_dir] ), identifier[tty] , identifier[sudo] ,
identifier[allow_failure] = keyword[True] ,** identifier[ssh_kwargs] ) keyword[and] identifier[inline_script] :
identifier[log] . identifier[debug] ( literal[string] )
keyword[for] identifier[cmd_line] keyword[in] identifier[inline_script] :
identifier[log] . identifier[info] ( literal[string] , identifier[cmd_line] )
identifier[ret] = identifier[root_cmd] ( literal[string] . identifier[format] ( identifier[cmd_line] ),
identifier[tty] , identifier[sudo] , identifier[allow_failure] = keyword[True] ,** identifier[ssh_kwargs] )
keyword[if] identifier[ret] :
identifier[log] . identifier[info] ( literal[string] , identifier[cmd_line] , identifier[ret] )
keyword[return] keyword[True] | def run_inline_script(host, name=None, port=22, timeout=900, username='root', key_filename=None, inline_script=None, ssh_timeout=15, display_ssh_output=True, parallel=False, sudo_password=None, sudo=False, password=None, tty=None, opts=None, tmp_dir='/tmp/.saltcloud-inline_script', **kwargs):
"""
Run the inline script commands, one by one
:**kwargs: catch all other things we may get but don't actually need/use
"""
gateway = None
if 'gateway' in kwargs:
gateway = kwargs['gateway'] # depends on [control=['if'], data=['kwargs']]
starttime = time.mktime(time.localtime())
log.debug('Deploying %s at %s', host, starttime)
known_hosts_file = kwargs.get('known_hosts_file', '/dev/null')
if wait_for_port(host=host, port=port, gateway=gateway):
log.debug('SSH port %s on %s is available', port, host)
newtimeout = timeout - (time.mktime(time.localtime()) - starttime)
if wait_for_passwd(host, port=port, username=username, password=password, key_filename=key_filename, ssh_timeout=ssh_timeout, display_ssh_output=display_ssh_output, gateway=gateway, known_hosts_file=known_hosts_file):
log.debug('Logging into %s:%s as %s', host, port, username)
newtimeout = timeout - (time.mktime(time.localtime()) - starttime)
ssh_kwargs = {'hostname': host, 'port': port, 'username': username, 'timeout': ssh_timeout, 'display_ssh_output': display_ssh_output, 'sudo_password': sudo_password, 'sftp': opts.get('use_sftp', False)}
ssh_kwargs.update(__ssh_gateway_config_dict(gateway))
if key_filename:
log.debug('Using %s as the key_filename', key_filename)
ssh_kwargs['key_filename'] = key_filename # depends on [control=['if'], data=[]]
elif password and 'has_ssh_agent' in kwargs and (kwargs['has_ssh_agent'] is False):
ssh_kwargs['password'] = password # depends on [control=['if'], data=[]]
# TODO: write some tests ???
# TODO: check edge cases (e.g. ssh gateways, salt deploy disabled, etc.)
if root_cmd('test -e \\"{0}\\"'.format(tmp_dir), tty, sudo, allow_failure=True, **ssh_kwargs) and inline_script:
log.debug('Found inline script to execute.')
for cmd_line in inline_script:
log.info('Executing inline command: %s', cmd_line)
ret = root_cmd('sh -c "( {0} )"'.format(cmd_line), tty, sudo, allow_failure=True, **ssh_kwargs)
if ret:
log.info('[%s] Output: %s', cmd_line, ret) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cmd_line']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# TODO: ensure we send the correct return value
return True |
def is_molecular_function(self, go_term):
"""
Returns True is go_term has is_a, part_of ancestor of molecular function GO:0003674
"""
mf_root = "GO:0003674"
if go_term == mf_root:
return True
ancestors = self.get_isa_closure(go_term)
if mf_root in ancestors:
return True
else:
return False | def function[is_molecular_function, parameter[self, go_term]]:
constant[
Returns True is go_term has is_a, part_of ancestor of molecular function GO:0003674
]
variable[mf_root] assign[=] constant[GO:0003674]
if compare[name[go_term] equal[==] name[mf_root]] begin[:]
return[constant[True]]
variable[ancestors] assign[=] call[name[self].get_isa_closure, parameter[name[go_term]]]
if compare[name[mf_root] in name[ancestors]] begin[:]
return[constant[True]] | keyword[def] identifier[is_molecular_function] ( identifier[self] , identifier[go_term] ):
literal[string]
identifier[mf_root] = literal[string]
keyword[if] identifier[go_term] == identifier[mf_root] :
keyword[return] keyword[True]
identifier[ancestors] = identifier[self] . identifier[get_isa_closure] ( identifier[go_term] )
keyword[if] identifier[mf_root] keyword[in] identifier[ancestors] :
keyword[return] keyword[True]
keyword[else] :
keyword[return] keyword[False] | def is_molecular_function(self, go_term):
"""
Returns True is go_term has is_a, part_of ancestor of molecular function GO:0003674
"""
mf_root = 'GO:0003674'
if go_term == mf_root:
return True # depends on [control=['if'], data=[]]
ancestors = self.get_isa_closure(go_term)
if mf_root in ancestors:
return True # depends on [control=['if'], data=[]]
else:
return False |
def _aggregate_func(self, aggregate):
"""
Return a suitable aggregate score function.
"""
funcs = {"sum": add, "min": min, "max": max}
func_name = aggregate.lower() if aggregate else 'sum'
try:
return funcs[func_name]
except KeyError:
raise TypeError("Unsupported aggregate: {}".format(aggregate)) | def function[_aggregate_func, parameter[self, aggregate]]:
constant[
Return a suitable aggregate score function.
]
variable[funcs] assign[=] dictionary[[<ast.Constant object at 0x7da2054a7e50>, <ast.Constant object at 0x7da2054a5000>, <ast.Constant object at 0x7da204347520>], [<ast.Name object at 0x7da204345300>, <ast.Name object at 0x7da204344b50>, <ast.Name object at 0x7da204344e20>]]
variable[func_name] assign[=] <ast.IfExp object at 0x7da2043454e0>
<ast.Try object at 0x7da2043452d0> | keyword[def] identifier[_aggregate_func] ( identifier[self] , identifier[aggregate] ):
literal[string]
identifier[funcs] ={ literal[string] : identifier[add] , literal[string] : identifier[min] , literal[string] : identifier[max] }
identifier[func_name] = identifier[aggregate] . identifier[lower] () keyword[if] identifier[aggregate] keyword[else] literal[string]
keyword[try] :
keyword[return] identifier[funcs] [ identifier[func_name] ]
keyword[except] identifier[KeyError] :
keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[aggregate] )) | def _aggregate_func(self, aggregate):
"""
Return a suitable aggregate score function.
"""
funcs = {'sum': add, 'min': min, 'max': max}
func_name = aggregate.lower() if aggregate else 'sum'
try:
return funcs[func_name] # depends on [control=['try'], data=[]]
except KeyError:
raise TypeError('Unsupported aggregate: {}'.format(aggregate)) # depends on [control=['except'], data=[]] |
def send_to_databox_header(self, destination_databox):
"""
Sends all the information currently in the tree to the supplied
databox's header, in alphabetical order. If the entries already
exists, just updates them.
"""
k, d = self.get_dictionary()
destination_databox.update_headers(d,k) | def function[send_to_databox_header, parameter[self, destination_databox]]:
constant[
Sends all the information currently in the tree to the supplied
databox's header, in alphabetical order. If the entries already
exists, just updates them.
]
<ast.Tuple object at 0x7da18bc71f90> assign[=] call[name[self].get_dictionary, parameter[]]
call[name[destination_databox].update_headers, parameter[name[d], name[k]]] | keyword[def] identifier[send_to_databox_header] ( identifier[self] , identifier[destination_databox] ):
literal[string]
identifier[k] , identifier[d] = identifier[self] . identifier[get_dictionary] ()
identifier[destination_databox] . identifier[update_headers] ( identifier[d] , identifier[k] ) | def send_to_databox_header(self, destination_databox):
"""
Sends all the information currently in the tree to the supplied
databox's header, in alphabetical order. If the entries already
exists, just updates them.
"""
(k, d) = self.get_dictionary()
destination_databox.update_headers(d, k) |
def computeFunctional(x, cooP):
'''
Compute value of functional J(X) = ||PX - PA||^2_F,
where P is projector into index subspace of known elements,
X is our approximation,
A is original tensor.
Parameters:
:tt.vector: x
current approximation [X]
:dict: cooP
dictionary with two records
- 'indices': numpy.array of P x d shape,
contains index subspace of P known elements;
each string is an index of one element.
- 'values': numpy array of size P,
contains P known values.
Returns:
:float: result
value of functional
'''
indices = cooP['indices']
values = cooP['values']
[P, d] = indices.shape
assert P == len(values)
result = 0
for p in xrange(P):
index = tuple(indices[p, :])
result += (x[index] - values[p])**2
result *= 0.5
return result | def function[computeFunctional, parameter[x, cooP]]:
constant[
Compute value of functional J(X) = ||PX - PA||^2_F,
where P is projector into index subspace of known elements,
X is our approximation,
A is original tensor.
Parameters:
:tt.vector: x
current approximation [X]
:dict: cooP
dictionary with two records
- 'indices': numpy.array of P x d shape,
contains index subspace of P known elements;
each string is an index of one element.
- 'values': numpy array of size P,
contains P known values.
Returns:
:float: result
value of functional
]
variable[indices] assign[=] call[name[cooP]][constant[indices]]
variable[values] assign[=] call[name[cooP]][constant[values]]
<ast.List object at 0x7da1b057aaa0> assign[=] name[indices].shape
assert[compare[name[P] equal[==] call[name[len], parameter[name[values]]]]]
variable[result] assign[=] constant[0]
for taget[name[p]] in starred[call[name[xrange], parameter[name[P]]]] begin[:]
variable[index] assign[=] call[name[tuple], parameter[call[name[indices]][tuple[[<ast.Name object at 0x7da1b0579c30>, <ast.Slice object at 0x7da1b0579b10>]]]]]
<ast.AugAssign object at 0x7da1b0579ab0>
<ast.AugAssign object at 0x7da1b0579bd0>
return[name[result]] | keyword[def] identifier[computeFunctional] ( identifier[x] , identifier[cooP] ):
literal[string]
identifier[indices] = identifier[cooP] [ literal[string] ]
identifier[values] = identifier[cooP] [ literal[string] ]
[ identifier[P] , identifier[d] ]= identifier[indices] . identifier[shape]
keyword[assert] identifier[P] == identifier[len] ( identifier[values] )
identifier[result] = literal[int]
keyword[for] identifier[p] keyword[in] identifier[xrange] ( identifier[P] ):
identifier[index] = identifier[tuple] ( identifier[indices] [ identifier[p] ,:])
identifier[result] +=( identifier[x] [ identifier[index] ]- identifier[values] [ identifier[p] ])** literal[int]
identifier[result] *= literal[int]
keyword[return] identifier[result] | def computeFunctional(x, cooP):
"""
Compute value of functional J(X) = ||PX - PA||^2_F,
where P is projector into index subspace of known elements,
X is our approximation,
A is original tensor.
Parameters:
:tt.vector: x
current approximation [X]
:dict: cooP
dictionary with two records
- 'indices': numpy.array of P x d shape,
contains index subspace of P known elements;
each string is an index of one element.
- 'values': numpy array of size P,
contains P known values.
Returns:
:float: result
value of functional
"""
indices = cooP['indices']
values = cooP['values']
[P, d] = indices.shape
assert P == len(values)
result = 0
for p in xrange(P):
index = tuple(indices[p, :])
result += (x[index] - values[p]) ** 2 # depends on [control=['for'], data=['p']]
result *= 0.5
return result |
def merge(self, *series, **kwargs):
'''Merge this :class:`ColumnTS` with several other *series*.
:parameters series: a list of tuples where the nth element is a tuple
of the form::
(wight_n, ts_n1, ts_n2, ..., ts_nMn)
The result will be calculated using the formula::
ts = weight_1*ts_11*ts_12*...*ts_1M1 + weight_2*ts_21*ts_22*...*ts_2M2 +
...
'''
session = self.session
if not session:
raise SessionNotAvailable('No session available')
self.check_router(session.router, *series)
return self._merge(*series, **kwargs) | def function[merge, parameter[self]]:
constant[Merge this :class:`ColumnTS` with several other *series*.
:parameters series: a list of tuples where the nth element is a tuple
of the form::
(wight_n, ts_n1, ts_n2, ..., ts_nMn)
The result will be calculated using the formula::
ts = weight_1*ts_11*ts_12*...*ts_1M1 + weight_2*ts_21*ts_22*...*ts_2M2 +
...
]
variable[session] assign[=] name[self].session
if <ast.UnaryOp object at 0x7da1b0fe6cb0> begin[:]
<ast.Raise object at 0x7da1b0fe6050>
call[name[self].check_router, parameter[name[session].router, <ast.Starred object at 0x7da1b0fe6e60>]]
return[call[name[self]._merge, parameter[<ast.Starred object at 0x7da1b0fe7e20>]]] | keyword[def] identifier[merge] ( identifier[self] ,* identifier[series] ,** identifier[kwargs] ):
literal[string]
identifier[session] = identifier[self] . identifier[session]
keyword[if] keyword[not] identifier[session] :
keyword[raise] identifier[SessionNotAvailable] ( literal[string] )
identifier[self] . identifier[check_router] ( identifier[session] . identifier[router] ,* identifier[series] )
keyword[return] identifier[self] . identifier[_merge] (* identifier[series] ,** identifier[kwargs] ) | def merge(self, *series, **kwargs):
"""Merge this :class:`ColumnTS` with several other *series*.
:parameters series: a list of tuples where the nth element is a tuple
of the form::
(wight_n, ts_n1, ts_n2, ..., ts_nMn)
The result will be calculated using the formula::
ts = weight_1*ts_11*ts_12*...*ts_1M1 + weight_2*ts_21*ts_22*...*ts_2M2 +
...
"""
session = self.session
if not session:
raise SessionNotAvailable('No session available') # depends on [control=['if'], data=[]]
self.check_router(session.router, *series)
return self._merge(*series, **kwargs) |
def train(self, params, epoch):
"""
Train one epoch of this model by iterating through mini batches. An epoch
ends after one pass through the training set, or if the number of mini
batches exceeds the parameter "batches_in_epoch".
"""
# Callback used to log information on every batch
def log(model, batch_idx):
if batch_idx % params["log_interval"] == 0:
entropy = model.entropy()
print("logging: {} learning iterations, entropy: {} / {}".format(
model.getLearningIterations(), float(entropy), model.maxEntropy()))
if params["create_plots"]:
plotDutyCycles(model.dutyCycle,
self.resultsDir + "/figure_" + str(epoch) + "_" +
str(model.getLearningIterations()))
# Adjust first epoch batch size to stabilize the dutycycles at the
# beginning of the training
loader = self.train_loader
batches_in_epoch = params["batches_in_epoch"]
if "first_epoch_batch_size" in params:
if epoch == 0:
batches_in_epoch = params.get("batches_in_first_epoch", batches_in_epoch)
loader = torch.utils.data.DataLoader(self.train_loader.dataset,
batch_size=params["first_epoch_batch_size"],
sampler=self.train_loader.sampler)
trainModel(model=self.model, loader=loader,
optimizer=self.optimizer, device=self.device,
batches_in_epoch=batches_in_epoch,
batch_callback=log)
self.model.postEpoch() | def function[train, parameter[self, params, epoch]]:
constant[
Train one epoch of this model by iterating through mini batches. An epoch
ends after one pass through the training set, or if the number of mini
batches exceeds the parameter "batches_in_epoch".
]
def function[log, parameter[model, batch_idx]]:
if compare[binary_operation[name[batch_idx] <ast.Mod object at 0x7da2590d6920> call[name[params]][constant[log_interval]]] equal[==] constant[0]] begin[:]
variable[entropy] assign[=] call[name[model].entropy, parameter[]]
call[name[print], parameter[call[constant[logging: {} learning iterations, entropy: {} / {}].format, parameter[call[name[model].getLearningIterations, parameter[]], call[name[float], parameter[name[entropy]]], call[name[model].maxEntropy, parameter[]]]]]]
if call[name[params]][constant[create_plots]] begin[:]
call[name[plotDutyCycles], parameter[name[model].dutyCycle, binary_operation[binary_operation[binary_operation[binary_operation[name[self].resultsDir + constant[/figure_]] + call[name[str], parameter[name[epoch]]]] + constant[_]] + call[name[str], parameter[call[name[model].getLearningIterations, parameter[]]]]]]]
variable[loader] assign[=] name[self].train_loader
variable[batches_in_epoch] assign[=] call[name[params]][constant[batches_in_epoch]]
if compare[constant[first_epoch_batch_size] in name[params]] begin[:]
if compare[name[epoch] equal[==] constant[0]] begin[:]
variable[batches_in_epoch] assign[=] call[name[params].get, parameter[constant[batches_in_first_epoch], name[batches_in_epoch]]]
variable[loader] assign[=] call[name[torch].utils.data.DataLoader, parameter[name[self].train_loader.dataset]]
call[name[trainModel], parameter[]]
call[name[self].model.postEpoch, parameter[]] | keyword[def] identifier[train] ( identifier[self] , identifier[params] , identifier[epoch] ):
literal[string]
keyword[def] identifier[log] ( identifier[model] , identifier[batch_idx] ):
keyword[if] identifier[batch_idx] % identifier[params] [ literal[string] ]== literal[int] :
identifier[entropy] = identifier[model] . identifier[entropy] ()
identifier[print] ( literal[string] . identifier[format] (
identifier[model] . identifier[getLearningIterations] (), identifier[float] ( identifier[entropy] ), identifier[model] . identifier[maxEntropy] ()))
keyword[if] identifier[params] [ literal[string] ]:
identifier[plotDutyCycles] ( identifier[model] . identifier[dutyCycle] ,
identifier[self] . identifier[resultsDir] + literal[string] + identifier[str] ( identifier[epoch] )+ literal[string] +
identifier[str] ( identifier[model] . identifier[getLearningIterations] ()))
identifier[loader] = identifier[self] . identifier[train_loader]
identifier[batches_in_epoch] = identifier[params] [ literal[string] ]
keyword[if] literal[string] keyword[in] identifier[params] :
keyword[if] identifier[epoch] == literal[int] :
identifier[batches_in_epoch] = identifier[params] . identifier[get] ( literal[string] , identifier[batches_in_epoch] )
identifier[loader] = identifier[torch] . identifier[utils] . identifier[data] . identifier[DataLoader] ( identifier[self] . identifier[train_loader] . identifier[dataset] ,
identifier[batch_size] = identifier[params] [ literal[string] ],
identifier[sampler] = identifier[self] . identifier[train_loader] . identifier[sampler] )
identifier[trainModel] ( identifier[model] = identifier[self] . identifier[model] , identifier[loader] = identifier[loader] ,
identifier[optimizer] = identifier[self] . identifier[optimizer] , identifier[device] = identifier[self] . identifier[device] ,
identifier[batches_in_epoch] = identifier[batches_in_epoch] ,
identifier[batch_callback] = identifier[log] )
identifier[self] . identifier[model] . identifier[postEpoch] () | def train(self, params, epoch):
"""
Train one epoch of this model by iterating through mini batches. An epoch
ends after one pass through the training set, or if the number of mini
batches exceeds the parameter "batches_in_epoch".
"""
# Callback used to log information on every batch
def log(model, batch_idx):
if batch_idx % params['log_interval'] == 0:
entropy = model.entropy()
print('logging: {} learning iterations, entropy: {} / {}'.format(model.getLearningIterations(), float(entropy), model.maxEntropy()))
if params['create_plots']:
plotDutyCycles(model.dutyCycle, self.resultsDir + '/figure_' + str(epoch) + '_' + str(model.getLearningIterations())) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
# Adjust first epoch batch size to stabilize the dutycycles at the
# beginning of the training
loader = self.train_loader
batches_in_epoch = params['batches_in_epoch']
if 'first_epoch_batch_size' in params:
if epoch == 0:
batches_in_epoch = params.get('batches_in_first_epoch', batches_in_epoch)
loader = torch.utils.data.DataLoader(self.train_loader.dataset, batch_size=params['first_epoch_batch_size'], sampler=self.train_loader.sampler) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['params']]
trainModel(model=self.model, loader=loader, optimizer=self.optimizer, device=self.device, batches_in_epoch=batches_in_epoch, batch_callback=log)
self.model.postEpoch() |
def get_generic_fields():
"""Return a list of all GenericForeignKeys in all models."""
generic_fields = []
for model in apps.get_models():
for field_name, field in model.__dict__.items():
if isinstance(field, GenericForeignKey):
generic_fields.append(field)
return generic_fields | def function[get_generic_fields, parameter[]]:
constant[Return a list of all GenericForeignKeys in all models.]
variable[generic_fields] assign[=] list[[]]
for taget[name[model]] in starred[call[name[apps].get_models, parameter[]]] begin[:]
for taget[tuple[[<ast.Name object at 0x7da1b17e34f0>, <ast.Name object at 0x7da1b17e2710>]]] in starred[call[name[model].__dict__.items, parameter[]]] begin[:]
if call[name[isinstance], parameter[name[field], name[GenericForeignKey]]] begin[:]
call[name[generic_fields].append, parameter[name[field]]]
return[name[generic_fields]] | keyword[def] identifier[get_generic_fields] ():
literal[string]
identifier[generic_fields] =[]
keyword[for] identifier[model] keyword[in] identifier[apps] . identifier[get_models] ():
keyword[for] identifier[field_name] , identifier[field] keyword[in] identifier[model] . identifier[__dict__] . identifier[items] ():
keyword[if] identifier[isinstance] ( identifier[field] , identifier[GenericForeignKey] ):
identifier[generic_fields] . identifier[append] ( identifier[field] )
keyword[return] identifier[generic_fields] | def get_generic_fields():
"""Return a list of all GenericForeignKeys in all models."""
generic_fields = []
for model in apps.get_models():
for (field_name, field) in model.__dict__.items():
if isinstance(field, GenericForeignKey):
generic_fields.append(field) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['model']]
return generic_fields |
def upload_metric(self, dataset_name, table_name, run_id):
"""Upload metric information to Bigquery.
Args:
dataset_name: string, the name of bigquery dataset where the data will be
uploaded.
table_name: string, the name of bigquery table under the dataset where
the metric data will be uploaded. This is different from the
benchmark_run table.
run_id: string, a unique ID that will be attached to the data, usually
this is a UUID4 format. This should be the same as the benchmark run_id.
"""
expected_file = os.path.join(
self._logging_dir, logger.METRIC_LOG_FILE_NAME)
with tf.gfile.GFile(expected_file) as f:
lines = f.readlines()
metrics = []
for line in filter(lambda l: l.strip(), lines):
metric = json.loads(line)
metric["run_id"] = run_id
metrics.append(metric)
table_ref = self._bq_client.dataset(dataset_name).table(table_name)
errors = self._bq_client.insert_rows_json(table_ref, metrics)
if errors:
tf.logging.error(
"Failed to upload benchmark info to bigquery: {}".format(errors)) | def function[upload_metric, parameter[self, dataset_name, table_name, run_id]]:
constant[Upload metric information to Bigquery.
Args:
dataset_name: string, the name of bigquery dataset where the data will be
uploaded.
table_name: string, the name of bigquery table under the dataset where
the metric data will be uploaded. This is different from the
benchmark_run table.
run_id: string, a unique ID that will be attached to the data, usually
this is a UUID4 format. This should be the same as the benchmark run_id.
]
variable[expected_file] assign[=] call[name[os].path.join, parameter[name[self]._logging_dir, name[logger].METRIC_LOG_FILE_NAME]]
with call[name[tf].gfile.GFile, parameter[name[expected_file]]] begin[:]
variable[lines] assign[=] call[name[f].readlines, parameter[]]
variable[metrics] assign[=] list[[]]
for taget[name[line]] in starred[call[name[filter], parameter[<ast.Lambda object at 0x7da1b23441c0>, name[lines]]]] begin[:]
variable[metric] assign[=] call[name[json].loads, parameter[name[line]]]
call[name[metric]][constant[run_id]] assign[=] name[run_id]
call[name[metrics].append, parameter[name[metric]]]
variable[table_ref] assign[=] call[call[name[self]._bq_client.dataset, parameter[name[dataset_name]]].table, parameter[name[table_name]]]
variable[errors] assign[=] call[name[self]._bq_client.insert_rows_json, parameter[name[table_ref], name[metrics]]]
if name[errors] begin[:]
call[name[tf].logging.error, parameter[call[constant[Failed to upload benchmark info to bigquery: {}].format, parameter[name[errors]]]]] | keyword[def] identifier[upload_metric] ( identifier[self] , identifier[dataset_name] , identifier[table_name] , identifier[run_id] ):
literal[string]
identifier[expected_file] = identifier[os] . identifier[path] . identifier[join] (
identifier[self] . identifier[_logging_dir] , identifier[logger] . identifier[METRIC_LOG_FILE_NAME] )
keyword[with] identifier[tf] . identifier[gfile] . identifier[GFile] ( identifier[expected_file] ) keyword[as] identifier[f] :
identifier[lines] = identifier[f] . identifier[readlines] ()
identifier[metrics] =[]
keyword[for] identifier[line] keyword[in] identifier[filter] ( keyword[lambda] identifier[l] : identifier[l] . identifier[strip] (), identifier[lines] ):
identifier[metric] = identifier[json] . identifier[loads] ( identifier[line] )
identifier[metric] [ literal[string] ]= identifier[run_id]
identifier[metrics] . identifier[append] ( identifier[metric] )
identifier[table_ref] = identifier[self] . identifier[_bq_client] . identifier[dataset] ( identifier[dataset_name] ). identifier[table] ( identifier[table_name] )
identifier[errors] = identifier[self] . identifier[_bq_client] . identifier[insert_rows_json] ( identifier[table_ref] , identifier[metrics] )
keyword[if] identifier[errors] :
identifier[tf] . identifier[logging] . identifier[error] (
literal[string] . identifier[format] ( identifier[errors] )) | def upload_metric(self, dataset_name, table_name, run_id):
"""Upload metric information to Bigquery.
Args:
dataset_name: string, the name of bigquery dataset where the data will be
uploaded.
table_name: string, the name of bigquery table under the dataset where
the metric data will be uploaded. This is different from the
benchmark_run table.
run_id: string, a unique ID that will be attached to the data, usually
this is a UUID4 format. This should be the same as the benchmark run_id.
"""
expected_file = os.path.join(self._logging_dir, logger.METRIC_LOG_FILE_NAME)
with tf.gfile.GFile(expected_file) as f:
lines = f.readlines()
metrics = []
for line in filter(lambda l: l.strip(), lines):
metric = json.loads(line)
metric['run_id'] = run_id
metrics.append(metric) # depends on [control=['for'], data=['line']]
table_ref = self._bq_client.dataset(dataset_name).table(table_name)
errors = self._bq_client.insert_rows_json(table_ref, metrics)
if errors:
tf.logging.error('Failed to upload benchmark info to bigquery: {}'.format(errors)) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['f']] |
def write_stream(src_file, destination_path):
"""
Write the file-like src_file object to the string dest_path
:param src_file: file-like data to be written
:param destination_path: string of the destionation file
"""
with open(destination_path, 'wb') as destination_file:
shutil.copyfileobj(fsrc=src_file, fdst=destination_file) | def function[write_stream, parameter[src_file, destination_path]]:
constant[
Write the file-like src_file object to the string dest_path
:param src_file: file-like data to be written
:param destination_path: string of the destionation file
]
with call[name[open], parameter[name[destination_path], constant[wb]]] begin[:]
call[name[shutil].copyfileobj, parameter[]] | keyword[def] identifier[write_stream] ( identifier[src_file] , identifier[destination_path] ):
literal[string]
keyword[with] identifier[open] ( identifier[destination_path] , literal[string] ) keyword[as] identifier[destination_file] :
identifier[shutil] . identifier[copyfileobj] ( identifier[fsrc] = identifier[src_file] , identifier[fdst] = identifier[destination_file] ) | def write_stream(src_file, destination_path):
"""
Write the file-like src_file object to the string dest_path
:param src_file: file-like data to be written
:param destination_path: string of the destionation file
"""
with open(destination_path, 'wb') as destination_file:
shutil.copyfileobj(fsrc=src_file, fdst=destination_file) # depends on [control=['with'], data=['destination_file']] |
def reverse_compl_with_name(old_seq):
"""Reverse a SeqIO sequence, but keep its name intact."""
new_seq = old_seq.reverse_complement()
new_seq.id = old_seq.id
new_seq.description = old_seq.description
return new_seq | def function[reverse_compl_with_name, parameter[old_seq]]:
constant[Reverse a SeqIO sequence, but keep its name intact.]
variable[new_seq] assign[=] call[name[old_seq].reverse_complement, parameter[]]
name[new_seq].id assign[=] name[old_seq].id
name[new_seq].description assign[=] name[old_seq].description
return[name[new_seq]] | keyword[def] identifier[reverse_compl_with_name] ( identifier[old_seq] ):
literal[string]
identifier[new_seq] = identifier[old_seq] . identifier[reverse_complement] ()
identifier[new_seq] . identifier[id] = identifier[old_seq] . identifier[id]
identifier[new_seq] . identifier[description] = identifier[old_seq] . identifier[description]
keyword[return] identifier[new_seq] | def reverse_compl_with_name(old_seq):
"""Reverse a SeqIO sequence, but keep its name intact."""
new_seq = old_seq.reverse_complement()
new_seq.id = old_seq.id
new_seq.description = old_seq.description
return new_seq |
def _evaluate(self,R,z,phi=0.,t=0.):
"""
NAME:
_evaluate
PURPOSE:
evaluate the potential at R,z
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
Phi(R,z)
HISTORY:
2018-08-04 - Written - Bovy (UofT)
"""
r2= R**2+z**2
if r2 <= self.a2:
return -1./self.a
else:
return -1./nu.sqrt(r2) | def function[_evaluate, parameter[self, R, z, phi, t]]:
constant[
NAME:
_evaluate
PURPOSE:
evaluate the potential at R,z
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
Phi(R,z)
HISTORY:
2018-08-04 - Written - Bovy (UofT)
]
variable[r2] assign[=] binary_operation[binary_operation[name[R] ** constant[2]] + binary_operation[name[z] ** constant[2]]]
if compare[name[r2] less_or_equal[<=] name[self].a2] begin[:]
return[binary_operation[<ast.UnaryOp object at 0x7da1b0c411e0> / name[self].a]] | keyword[def] identifier[_evaluate] ( identifier[self] , identifier[R] , identifier[z] , identifier[phi] = literal[int] , identifier[t] = literal[int] ):
literal[string]
identifier[r2] = identifier[R] ** literal[int] + identifier[z] ** literal[int]
keyword[if] identifier[r2] <= identifier[self] . identifier[a2] :
keyword[return] - literal[int] / identifier[self] . identifier[a]
keyword[else] :
keyword[return] - literal[int] / identifier[nu] . identifier[sqrt] ( identifier[r2] ) | def _evaluate(self, R, z, phi=0.0, t=0.0):
"""
NAME:
_evaluate
PURPOSE:
evaluate the potential at R,z
INPUT:
R - Galactocentric cylindrical radius
z - vertical height
phi - azimuth
t - time
OUTPUT:
Phi(R,z)
HISTORY:
2018-08-04 - Written - Bovy (UofT)
"""
r2 = R ** 2 + z ** 2
if r2 <= self.a2:
return -1.0 / self.a # depends on [control=['if'], data=[]]
else:
return -1.0 / nu.sqrt(r2) |
def codigo_ibge_uf(sigla):
"""Retorna o código do IBGE para a UF informada."""
idx = [s for s, i, n, r in UNIDADES_FEDERACAO].index(sigla)
return UNIDADES_FEDERACAO[idx][_UF_CODIGO_IBGE] | def function[codigo_ibge_uf, parameter[sigla]]:
constant[Retorna o código do IBGE para a UF informada.]
variable[idx] assign[=] call[<ast.ListComp object at 0x7da1b1bb26b0>.index, parameter[name[sigla]]]
return[call[call[name[UNIDADES_FEDERACAO]][name[idx]]][name[_UF_CODIGO_IBGE]]] | keyword[def] identifier[codigo_ibge_uf] ( identifier[sigla] ):
literal[string]
identifier[idx] =[ identifier[s] keyword[for] identifier[s] , identifier[i] , identifier[n] , identifier[r] keyword[in] identifier[UNIDADES_FEDERACAO] ]. identifier[index] ( identifier[sigla] )
keyword[return] identifier[UNIDADES_FEDERACAO] [ identifier[idx] ][ identifier[_UF_CODIGO_IBGE] ] | def codigo_ibge_uf(sigla):
"""Retorna o código do IBGE para a UF informada."""
idx = [s for (s, i, n, r) in UNIDADES_FEDERACAO].index(sigla)
return UNIDADES_FEDERACAO[idx][_UF_CODIGO_IBGE] |
def load(*fps, missing=Missing.silent):
"""
Read a `.Configuration` instance from file-like objects.
:param fps: file-like objects (supporting ``.read()``)
:param missing: policy to be used when a configured key is missing, either
as a `.Missing` instance or a default value
:return: a `.Configuration` instance providing values from *fps*
:rtype: `.Configuration`
"""
return Configuration(*(yaml.safe_load(fp.read()) for fp in fps), missing=missing) | def function[load, parameter[]]:
constant[
Read a `.Configuration` instance from file-like objects.
:param fps: file-like objects (supporting ``.read()``)
:param missing: policy to be used when a configured key is missing, either
as a `.Missing` instance or a default value
:return: a `.Configuration` instance providing values from *fps*
:rtype: `.Configuration`
]
return[call[name[Configuration], parameter[<ast.Starred object at 0x7da1b1713940>]]] | keyword[def] identifier[load] (* identifier[fps] , identifier[missing] = identifier[Missing] . identifier[silent] ):
literal[string]
keyword[return] identifier[Configuration] (*( identifier[yaml] . identifier[safe_load] ( identifier[fp] . identifier[read] ()) keyword[for] identifier[fp] keyword[in] identifier[fps] ), identifier[missing] = identifier[missing] ) | def load(*fps, missing=Missing.silent):
"""
Read a `.Configuration` instance from file-like objects.
:param fps: file-like objects (supporting ``.read()``)
:param missing: policy to be used when a configured key is missing, either
as a `.Missing` instance or a default value
:return: a `.Configuration` instance providing values from *fps*
:rtype: `.Configuration`
"""
return Configuration(*(yaml.safe_load(fp.read()) for fp in fps), missing=missing) |
def rdf_generation(kg_object) -> str:
"""
Convert input knowledge graph object into n-triples RDF
:param kg_object: str, dict, or json object
:return: n-triples RDF in str
"""
import json
if isinstance(kg_object, dict):
kg_object = json.dumps(kg_object)
g = Graph()
g.parse(data=kg_object, format='json-ld')
return g.serialize(format='nt').decode('utf-8') | def function[rdf_generation, parameter[kg_object]]:
constant[
Convert input knowledge graph object into n-triples RDF
:param kg_object: str, dict, or json object
:return: n-triples RDF in str
]
import module[json]
if call[name[isinstance], parameter[name[kg_object], name[dict]]] begin[:]
variable[kg_object] assign[=] call[name[json].dumps, parameter[name[kg_object]]]
variable[g] assign[=] call[name[Graph], parameter[]]
call[name[g].parse, parameter[]]
return[call[call[name[g].serialize, parameter[]].decode, parameter[constant[utf-8]]]] | keyword[def] identifier[rdf_generation] ( identifier[kg_object] )-> identifier[str] :
literal[string]
keyword[import] identifier[json]
keyword[if] identifier[isinstance] ( identifier[kg_object] , identifier[dict] ):
identifier[kg_object] = identifier[json] . identifier[dumps] ( identifier[kg_object] )
identifier[g] = identifier[Graph] ()
identifier[g] . identifier[parse] ( identifier[data] = identifier[kg_object] , identifier[format] = literal[string] )
keyword[return] identifier[g] . identifier[serialize] ( identifier[format] = literal[string] ). identifier[decode] ( literal[string] ) | def rdf_generation(kg_object) -> str:
"""
Convert input knowledge graph object into n-triples RDF
:param kg_object: str, dict, or json object
:return: n-triples RDF in str
"""
import json
if isinstance(kg_object, dict):
kg_object = json.dumps(kg_object) # depends on [control=['if'], data=[]]
g = Graph()
g.parse(data=kg_object, format='json-ld')
return g.serialize(format='nt').decode('utf-8') |
def process_refs_for_node(cls, manifest, current_project, node):
"""Given a manifest and a node in that manifest, process its refs"""
target_model = None
target_model_name = None
target_model_package = None
for ref in node.refs:
if len(ref) == 1:
target_model_name = ref[0]
elif len(ref) == 2:
target_model_package, target_model_name = ref
target_model = cls.resolve_ref(
manifest,
target_model_name,
target_model_package,
current_project,
node.get('package_name'))
if target_model is None or target_model is cls.DISABLED:
# This may raise. Even if it doesn't, we don't want to add
# this node to the graph b/c there is no destination node
node.config['enabled'] = False
dbt.utils.invalid_ref_fail_unless_test(
node, target_model_name, target_model_package,
disabled=(target_model is cls.DISABLED)
)
continue
target_model_id = target_model.get('unique_id')
node.depends_on['nodes'].append(target_model_id)
manifest.nodes[node['unique_id']] = node | def function[process_refs_for_node, parameter[cls, manifest, current_project, node]]:
constant[Given a manifest and a node in that manifest, process its refs]
variable[target_model] assign[=] constant[None]
variable[target_model_name] assign[=] constant[None]
variable[target_model_package] assign[=] constant[None]
for taget[name[ref]] in starred[name[node].refs] begin[:]
if compare[call[name[len], parameter[name[ref]]] equal[==] constant[1]] begin[:]
variable[target_model_name] assign[=] call[name[ref]][constant[0]]
variable[target_model] assign[=] call[name[cls].resolve_ref, parameter[name[manifest], name[target_model_name], name[target_model_package], name[current_project], call[name[node].get, parameter[constant[package_name]]]]]
if <ast.BoolOp object at 0x7da1b1b28e20> begin[:]
call[name[node].config][constant[enabled]] assign[=] constant[False]
call[name[dbt].utils.invalid_ref_fail_unless_test, parameter[name[node], name[target_model_name], name[target_model_package]]]
continue
variable[target_model_id] assign[=] call[name[target_model].get, parameter[constant[unique_id]]]
call[call[name[node].depends_on][constant[nodes]].append, parameter[name[target_model_id]]]
call[name[manifest].nodes][call[name[node]][constant[unique_id]]] assign[=] name[node] | keyword[def] identifier[process_refs_for_node] ( identifier[cls] , identifier[manifest] , identifier[current_project] , identifier[node] ):
literal[string]
identifier[target_model] = keyword[None]
identifier[target_model_name] = keyword[None]
identifier[target_model_package] = keyword[None]
keyword[for] identifier[ref] keyword[in] identifier[node] . identifier[refs] :
keyword[if] identifier[len] ( identifier[ref] )== literal[int] :
identifier[target_model_name] = identifier[ref] [ literal[int] ]
keyword[elif] identifier[len] ( identifier[ref] )== literal[int] :
identifier[target_model_package] , identifier[target_model_name] = identifier[ref]
identifier[target_model] = identifier[cls] . identifier[resolve_ref] (
identifier[manifest] ,
identifier[target_model_name] ,
identifier[target_model_package] ,
identifier[current_project] ,
identifier[node] . identifier[get] ( literal[string] ))
keyword[if] identifier[target_model] keyword[is] keyword[None] keyword[or] identifier[target_model] keyword[is] identifier[cls] . identifier[DISABLED] :
identifier[node] . identifier[config] [ literal[string] ]= keyword[False]
identifier[dbt] . identifier[utils] . identifier[invalid_ref_fail_unless_test] (
identifier[node] , identifier[target_model_name] , identifier[target_model_package] ,
identifier[disabled] =( identifier[target_model] keyword[is] identifier[cls] . identifier[DISABLED] )
)
keyword[continue]
identifier[target_model_id] = identifier[target_model] . identifier[get] ( literal[string] )
identifier[node] . identifier[depends_on] [ literal[string] ]. identifier[append] ( identifier[target_model_id] )
identifier[manifest] . identifier[nodes] [ identifier[node] [ literal[string] ]]= identifier[node] | def process_refs_for_node(cls, manifest, current_project, node):
"""Given a manifest and a node in that manifest, process its refs"""
target_model = None
target_model_name = None
target_model_package = None
for ref in node.refs:
if len(ref) == 1:
target_model_name = ref[0] # depends on [control=['if'], data=[]]
elif len(ref) == 2:
(target_model_package, target_model_name) = ref # depends on [control=['if'], data=[]]
target_model = cls.resolve_ref(manifest, target_model_name, target_model_package, current_project, node.get('package_name'))
if target_model is None or target_model is cls.DISABLED:
# This may raise. Even if it doesn't, we don't want to add
# this node to the graph b/c there is no destination node
node.config['enabled'] = False
dbt.utils.invalid_ref_fail_unless_test(node, target_model_name, target_model_package, disabled=target_model is cls.DISABLED)
continue # depends on [control=['if'], data=[]]
target_model_id = target_model.get('unique_id')
node.depends_on['nodes'].append(target_model_id)
manifest.nodes[node['unique_id']] = node # depends on [control=['for'], data=['ref']] |
def fill_borders(self, *args):
"""Extrapolate tiepoint lons and lats to fill in the border of the
chunks.
"""
to_run = []
cases = {"y": self._fill_row_borders,
"x": self._fill_col_borders}
for dim in args:
try:
to_run.append(cases[dim])
except KeyError:
raise NameError("Unrecognized dimension: " + str(dim))
for fun in to_run:
fun() | def function[fill_borders, parameter[self]]:
constant[Extrapolate tiepoint lons and lats to fill in the border of the
chunks.
]
variable[to_run] assign[=] list[[]]
variable[cases] assign[=] dictionary[[<ast.Constant object at 0x7da1b1950a30>, <ast.Constant object at 0x7da1b1950b50>], [<ast.Attribute object at 0x7da1b1950a00>, <ast.Attribute object at 0x7da1b1950a60>]]
for taget[name[dim]] in starred[name[args]] begin[:]
<ast.Try object at 0x7da1b1950b80>
for taget[name[fun]] in starred[name[to_run]] begin[:]
call[name[fun], parameter[]] | keyword[def] identifier[fill_borders] ( identifier[self] ,* identifier[args] ):
literal[string]
identifier[to_run] =[]
identifier[cases] ={ literal[string] : identifier[self] . identifier[_fill_row_borders] ,
literal[string] : identifier[self] . identifier[_fill_col_borders] }
keyword[for] identifier[dim] keyword[in] identifier[args] :
keyword[try] :
identifier[to_run] . identifier[append] ( identifier[cases] [ identifier[dim] ])
keyword[except] identifier[KeyError] :
keyword[raise] identifier[NameError] ( literal[string] + identifier[str] ( identifier[dim] ))
keyword[for] identifier[fun] keyword[in] identifier[to_run] :
identifier[fun] () | def fill_borders(self, *args):
"""Extrapolate tiepoint lons and lats to fill in the border of the
chunks.
"""
to_run = []
cases = {'y': self._fill_row_borders, 'x': self._fill_col_borders}
for dim in args:
try:
to_run.append(cases[dim]) # depends on [control=['try'], data=[]]
except KeyError:
raise NameError('Unrecognized dimension: ' + str(dim)) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['dim']]
for fun in to_run:
fun() # depends on [control=['for'], data=['fun']] |
def find_element_by_name(self, name):
"""
Finds an element by name.
:Args:
- name: The name of the element to find.
:Returns:
- WebElement - the element if it was found
:Raises:
- NoSuchElementException - if the element wasn't found
:Usage:
::
element = driver.find_element_by_name('foo')
"""
return self.find_element(by=By.NAME, value=name) | def function[find_element_by_name, parameter[self, name]]:
constant[
Finds an element by name.
:Args:
- name: The name of the element to find.
:Returns:
- WebElement - the element if it was found
:Raises:
- NoSuchElementException - if the element wasn't found
:Usage:
::
element = driver.find_element_by_name('foo')
]
return[call[name[self].find_element, parameter[]]] | keyword[def] identifier[find_element_by_name] ( identifier[self] , identifier[name] ):
literal[string]
keyword[return] identifier[self] . identifier[find_element] ( identifier[by] = identifier[By] . identifier[NAME] , identifier[value] = identifier[name] ) | def find_element_by_name(self, name):
"""
Finds an element by name.
:Args:
- name: The name of the element to find.
:Returns:
- WebElement - the element if it was found
:Raises:
- NoSuchElementException - if the element wasn't found
:Usage:
::
element = driver.find_element_by_name('foo')
"""
return self.find_element(by=By.NAME, value=name) |
def phonetic_fingerprint(
phrase, phonetic_algorithm=double_metaphone, joiner=' ', *args, **kwargs
):
"""Return the phonetic fingerprint of a phrase.
This is a wrapper for :py:meth:`Phonetic.fingerprint`.
Parameters
----------
phrase : str
The string from which to calculate the phonetic fingerprint
phonetic_algorithm : function
A phonetic algorithm that takes a string and returns a string
(presumably a phonetic representation of the original string). By
default, this function uses :py:func:`.double_metaphone`.
joiner : str
The string that will be placed between each word
*args
Variable length argument list
**kwargs
Arbitrary keyword arguments
Returns
-------
str
The phonetic fingerprint of the phrase
Examples
--------
>>> phonetic_fingerprint('The quick brown fox jumped over the lazy dog.')
'0 afr fks jmpt kk ls prn tk'
>>> from abydos.phonetic import soundex
>>> phonetic_fingerprint('The quick brown fox jumped over the lazy dog.',
... phonetic_algorithm=soundex)
'b650 d200 f200 j513 l200 o160 q200 t000'
"""
return Phonetic().fingerprint(
phrase, phonetic_algorithm, joiner, *args, **kwargs
) | def function[phonetic_fingerprint, parameter[phrase, phonetic_algorithm, joiner]]:
constant[Return the phonetic fingerprint of a phrase.
This is a wrapper for :py:meth:`Phonetic.fingerprint`.
Parameters
----------
phrase : str
The string from which to calculate the phonetic fingerprint
phonetic_algorithm : function
A phonetic algorithm that takes a string and returns a string
(presumably a phonetic representation of the original string). By
default, this function uses :py:func:`.double_metaphone`.
joiner : str
The string that will be placed between each word
*args
Variable length argument list
**kwargs
Arbitrary keyword arguments
Returns
-------
str
The phonetic fingerprint of the phrase
Examples
--------
>>> phonetic_fingerprint('The quick brown fox jumped over the lazy dog.')
'0 afr fks jmpt kk ls prn tk'
>>> from abydos.phonetic import soundex
>>> phonetic_fingerprint('The quick brown fox jumped over the lazy dog.',
... phonetic_algorithm=soundex)
'b650 d200 f200 j513 l200 o160 q200 t000'
]
return[call[call[name[Phonetic], parameter[]].fingerprint, parameter[name[phrase], name[phonetic_algorithm], name[joiner], <ast.Starred object at 0x7da2054a7f70>]]] | keyword[def] identifier[phonetic_fingerprint] (
identifier[phrase] , identifier[phonetic_algorithm] = identifier[double_metaphone] , identifier[joiner] = literal[string] ,* identifier[args] ,** identifier[kwargs]
):
literal[string]
keyword[return] identifier[Phonetic] (). identifier[fingerprint] (
identifier[phrase] , identifier[phonetic_algorithm] , identifier[joiner] ,* identifier[args] ,** identifier[kwargs]
) | def phonetic_fingerprint(phrase, phonetic_algorithm=double_metaphone, joiner=' ', *args, **kwargs):
"""Return the phonetic fingerprint of a phrase.
This is a wrapper for :py:meth:`Phonetic.fingerprint`.
Parameters
----------
phrase : str
The string from which to calculate the phonetic fingerprint
phonetic_algorithm : function
A phonetic algorithm that takes a string and returns a string
(presumably a phonetic representation of the original string). By
default, this function uses :py:func:`.double_metaphone`.
joiner : str
The string that will be placed between each word
*args
Variable length argument list
**kwargs
Arbitrary keyword arguments
Returns
-------
str
The phonetic fingerprint of the phrase
Examples
--------
>>> phonetic_fingerprint('The quick brown fox jumped over the lazy dog.')
'0 afr fks jmpt kk ls prn tk'
>>> from abydos.phonetic import soundex
>>> phonetic_fingerprint('The quick brown fox jumped over the lazy dog.',
... phonetic_algorithm=soundex)
'b650 d200 f200 j513 l200 o160 q200 t000'
"""
return Phonetic().fingerprint(phrase, phonetic_algorithm, joiner, *args, **kwargs) |
def is_pp_seq_no_stable(self, msg: Checkpoint):
"""
:param ppSeqNo:
:return: True if ppSeqNo is less than or equal to last stable
checkpoint, false otherwise
"""
pp_seq_no = msg.seqNoEnd
ck = self.firstCheckPoint
if ck:
_, ckState = ck
return ckState.isStable and ckState.seqNo >= pp_seq_no
else:
return False | def function[is_pp_seq_no_stable, parameter[self, msg]]:
constant[
:param ppSeqNo:
:return: True if ppSeqNo is less than or equal to last stable
checkpoint, false otherwise
]
variable[pp_seq_no] assign[=] name[msg].seqNoEnd
variable[ck] assign[=] name[self].firstCheckPoint
if name[ck] begin[:]
<ast.Tuple object at 0x7da1b1737940> assign[=] name[ck]
return[<ast.BoolOp object at 0x7da1b1734820>] | keyword[def] identifier[is_pp_seq_no_stable] ( identifier[self] , identifier[msg] : identifier[Checkpoint] ):
literal[string]
identifier[pp_seq_no] = identifier[msg] . identifier[seqNoEnd]
identifier[ck] = identifier[self] . identifier[firstCheckPoint]
keyword[if] identifier[ck] :
identifier[_] , identifier[ckState] = identifier[ck]
keyword[return] identifier[ckState] . identifier[isStable] keyword[and] identifier[ckState] . identifier[seqNo] >= identifier[pp_seq_no]
keyword[else] :
keyword[return] keyword[False] | def is_pp_seq_no_stable(self, msg: Checkpoint):
"""
:param ppSeqNo:
:return: True if ppSeqNo is less than or equal to last stable
checkpoint, false otherwise
"""
pp_seq_no = msg.seqNoEnd
ck = self.firstCheckPoint
if ck:
(_, ckState) = ck
return ckState.isStable and ckState.seqNo >= pp_seq_no # depends on [control=['if'], data=[]]
else:
return False |
def clip_gradients(batch_result, model, max_grad_norm):
""" Clip gradients to a given maximum length """
if max_grad_norm is not None:
grad_norm = torch.nn.utils.clip_grad_norm_(
filter(lambda p: p.requires_grad, model.parameters()),
max_norm=max_grad_norm
)
else:
grad_norm = 0.0
batch_result['grad_norm'] = grad_norm | def function[clip_gradients, parameter[batch_result, model, max_grad_norm]]:
constant[ Clip gradients to a given maximum length ]
if compare[name[max_grad_norm] is_not constant[None]] begin[:]
variable[grad_norm] assign[=] call[name[torch].nn.utils.clip_grad_norm_, parameter[call[name[filter], parameter[<ast.Lambda object at 0x7da1b155fac0>, call[name[model].parameters, parameter[]]]]]]
call[name[batch_result]][constant[grad_norm]] assign[=] name[grad_norm] | keyword[def] identifier[clip_gradients] ( identifier[batch_result] , identifier[model] , identifier[max_grad_norm] ):
literal[string]
keyword[if] identifier[max_grad_norm] keyword[is] keyword[not] keyword[None] :
identifier[grad_norm] = identifier[torch] . identifier[nn] . identifier[utils] . identifier[clip_grad_norm_] (
identifier[filter] ( keyword[lambda] identifier[p] : identifier[p] . identifier[requires_grad] , identifier[model] . identifier[parameters] ()),
identifier[max_norm] = identifier[max_grad_norm]
)
keyword[else] :
identifier[grad_norm] = literal[int]
identifier[batch_result] [ literal[string] ]= identifier[grad_norm] | def clip_gradients(batch_result, model, max_grad_norm):
""" Clip gradients to a given maximum length """
if max_grad_norm is not None:
grad_norm = torch.nn.utils.clip_grad_norm_(filter(lambda p: p.requires_grad, model.parameters()), max_norm=max_grad_norm) # depends on [control=['if'], data=['max_grad_norm']]
else:
grad_norm = 0.0
batch_result['grad_norm'] = grad_norm |
def get_nfc_chars(self):
"""
Returns the set of IPA symbols that are precomposed (decomposable)
chars. These should not be decomposed during string normalisation,
because they will not be recognised otherwise.
In IPA 2015 there is only one precomposed character: ç, the voiceless
palatal fricative.
"""
ex = []
for char in self.ipa.keys():
if len(char) == 1:
decomp = unicodedata.normalize('NFD', char)
if len(decomp) == 2:
ex.append(char)
return set(ex) | def function[get_nfc_chars, parameter[self]]:
constant[
Returns the set of IPA symbols that are precomposed (decomposable)
chars. These should not be decomposed during string normalisation,
because they will not be recognised otherwise.
In IPA 2015 there is only one precomposed character: ç, the voiceless
palatal fricative.
]
variable[ex] assign[=] list[[]]
for taget[name[char]] in starred[call[name[self].ipa.keys, parameter[]]] begin[:]
if compare[call[name[len], parameter[name[char]]] equal[==] constant[1]] begin[:]
variable[decomp] assign[=] call[name[unicodedata].normalize, parameter[constant[NFD], name[char]]]
if compare[call[name[len], parameter[name[decomp]]] equal[==] constant[2]] begin[:]
call[name[ex].append, parameter[name[char]]]
return[call[name[set], parameter[name[ex]]]] | keyword[def] identifier[get_nfc_chars] ( identifier[self] ):
literal[string]
identifier[ex] =[]
keyword[for] identifier[char] keyword[in] identifier[self] . identifier[ipa] . identifier[keys] ():
keyword[if] identifier[len] ( identifier[char] )== literal[int] :
identifier[decomp] = identifier[unicodedata] . identifier[normalize] ( literal[string] , identifier[char] )
keyword[if] identifier[len] ( identifier[decomp] )== literal[int] :
identifier[ex] . identifier[append] ( identifier[char] )
keyword[return] identifier[set] ( identifier[ex] ) | def get_nfc_chars(self):
"""
Returns the set of IPA symbols that are precomposed (decomposable)
chars. These should not be decomposed during string normalisation,
because they will not be recognised otherwise.
In IPA 2015 there is only one precomposed character: ç, the voiceless
palatal fricative.
"""
ex = []
for char in self.ipa.keys():
if len(char) == 1:
decomp = unicodedata.normalize('NFD', char)
if len(decomp) == 2:
ex.append(char) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['char']]
return set(ex) |
def clean_fail(func):
'''
A decorator to cleanly exit on a failed call to AWS.
catch a `botocore.exceptions.ClientError` raised from an action.
This sort of error is raised if you are targeting a region that
isn't set up (see, `credstash setup`.
'''
def func_wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except botocore.exceptions.ClientError as e:
print(str(e), file=sys.stderr)
sys.exit(1)
return func_wrapper | def function[clean_fail, parameter[func]]:
constant[
A decorator to cleanly exit on a failed call to AWS.
catch a `botocore.exceptions.ClientError` raised from an action.
This sort of error is raised if you are targeting a region that
isn't set up (see, `credstash setup`.
]
def function[func_wrapper, parameter[]]:
<ast.Try object at 0x7da18bc714b0>
return[name[func_wrapper]] | keyword[def] identifier[clean_fail] ( identifier[func] ):
literal[string]
keyword[def] identifier[func_wrapper] (* identifier[args] ,** identifier[kwargs] ):
keyword[try] :
keyword[return] identifier[func] (* identifier[args] ,** identifier[kwargs] )
keyword[except] identifier[botocore] . identifier[exceptions] . identifier[ClientError] keyword[as] identifier[e] :
identifier[print] ( identifier[str] ( identifier[e] ), identifier[file] = identifier[sys] . identifier[stderr] )
identifier[sys] . identifier[exit] ( literal[int] )
keyword[return] identifier[func_wrapper] | def clean_fail(func):
"""
A decorator to cleanly exit on a failed call to AWS.
catch a `botocore.exceptions.ClientError` raised from an action.
This sort of error is raised if you are targeting a region that
isn't set up (see, `credstash setup`.
"""
def func_wrapper(*args, **kwargs):
try:
return func(*args, **kwargs) # depends on [control=['try'], data=[]]
except botocore.exceptions.ClientError as e:
print(str(e), file=sys.stderr)
sys.exit(1) # depends on [control=['except'], data=['e']]
return func_wrapper |
def build_items(self):
"""
get the items from STATS QUEUE
calculate self.stats
make new items from self.stats
put the new items for ITEM QUEUE
"""
while not self.stats_queue.empty():
item = self.stats_queue.get()
self.calculate(item)
for key, value in self.stats.iteritems():
if 'blackbird.queue.length' == key:
value = self.queue.qsize()
item = BlackbirdStatisticsItem(
key=key,
value=value,
host=self.options['hostname']
)
if self.enqueue(item=item, queue=self.queue):
self.logger.debug(
'Inserted {0} to the queue.'.format(item.data)
) | def function[build_items, parameter[self]]:
constant[
get the items from STATS QUEUE
calculate self.stats
make new items from self.stats
put the new items for ITEM QUEUE
]
while <ast.UnaryOp object at 0x7da20c6a9810> begin[:]
variable[item] assign[=] call[name[self].stats_queue.get, parameter[]]
call[name[self].calculate, parameter[name[item]]]
for taget[tuple[[<ast.Name object at 0x7da20c6a98a0>, <ast.Name object at 0x7da20c6ab7f0>]]] in starred[call[name[self].stats.iteritems, parameter[]]] begin[:]
if compare[constant[blackbird.queue.length] equal[==] name[key]] begin[:]
variable[value] assign[=] call[name[self].queue.qsize, parameter[]]
variable[item] assign[=] call[name[BlackbirdStatisticsItem], parameter[]]
if call[name[self].enqueue, parameter[]] begin[:]
call[name[self].logger.debug, parameter[call[constant[Inserted {0} to the queue.].format, parameter[name[item].data]]]] | keyword[def] identifier[build_items] ( identifier[self] ):
literal[string]
keyword[while] keyword[not] identifier[self] . identifier[stats_queue] . identifier[empty] ():
identifier[item] = identifier[self] . identifier[stats_queue] . identifier[get] ()
identifier[self] . identifier[calculate] ( identifier[item] )
keyword[for] identifier[key] , identifier[value] keyword[in] identifier[self] . identifier[stats] . identifier[iteritems] ():
keyword[if] literal[string] == identifier[key] :
identifier[value] = identifier[self] . identifier[queue] . identifier[qsize] ()
identifier[item] = identifier[BlackbirdStatisticsItem] (
identifier[key] = identifier[key] ,
identifier[value] = identifier[value] ,
identifier[host] = identifier[self] . identifier[options] [ literal[string] ]
)
keyword[if] identifier[self] . identifier[enqueue] ( identifier[item] = identifier[item] , identifier[queue] = identifier[self] . identifier[queue] ):
identifier[self] . identifier[logger] . identifier[debug] (
literal[string] . identifier[format] ( identifier[item] . identifier[data] )
) | def build_items(self):
"""
get the items from STATS QUEUE
calculate self.stats
make new items from self.stats
put the new items for ITEM QUEUE
"""
while not self.stats_queue.empty():
item = self.stats_queue.get()
self.calculate(item) # depends on [control=['while'], data=[]]
for (key, value) in self.stats.iteritems():
if 'blackbird.queue.length' == key:
value = self.queue.qsize() # depends on [control=['if'], data=[]]
item = BlackbirdStatisticsItem(key=key, value=value, host=self.options['hostname'])
if self.enqueue(item=item, queue=self.queue):
self.logger.debug('Inserted {0} to the queue.'.format(item.data)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] |
def check_nova():
'''
Check version of novaclient
'''
if HAS_NOVA:
novaclient_ver = _LooseVersion(novaclient.__version__)
min_ver = _LooseVersion(NOVACLIENT_MINVER)
if min_ver <= novaclient_ver:
return HAS_NOVA
log.debug('Newer novaclient version required. Minimum: %s', NOVACLIENT_MINVER)
return False | def function[check_nova, parameter[]]:
constant[
Check version of novaclient
]
if name[HAS_NOVA] begin[:]
variable[novaclient_ver] assign[=] call[name[_LooseVersion], parameter[name[novaclient].__version__]]
variable[min_ver] assign[=] call[name[_LooseVersion], parameter[name[NOVACLIENT_MINVER]]]
if compare[name[min_ver] less_or_equal[<=] name[novaclient_ver]] begin[:]
return[name[HAS_NOVA]]
call[name[log].debug, parameter[constant[Newer novaclient version required. Minimum: %s], name[NOVACLIENT_MINVER]]]
return[constant[False]] | keyword[def] identifier[check_nova] ():
literal[string]
keyword[if] identifier[HAS_NOVA] :
identifier[novaclient_ver] = identifier[_LooseVersion] ( identifier[novaclient] . identifier[__version__] )
identifier[min_ver] = identifier[_LooseVersion] ( identifier[NOVACLIENT_MINVER] )
keyword[if] identifier[min_ver] <= identifier[novaclient_ver] :
keyword[return] identifier[HAS_NOVA]
identifier[log] . identifier[debug] ( literal[string] , identifier[NOVACLIENT_MINVER] )
keyword[return] keyword[False] | def check_nova():
"""
Check version of novaclient
"""
if HAS_NOVA:
novaclient_ver = _LooseVersion(novaclient.__version__)
min_ver = _LooseVersion(NOVACLIENT_MINVER)
if min_ver <= novaclient_ver:
return HAS_NOVA # depends on [control=['if'], data=[]]
log.debug('Newer novaclient version required. Minimum: %s', NOVACLIENT_MINVER) # depends on [control=['if'], data=[]]
return False |
def note_off(self, channel, note, velocity):
"""Return bytes for a 'note off' event."""
return self.midi_event(NOTE_OFF, channel, note, velocity) | def function[note_off, parameter[self, channel, note, velocity]]:
constant[Return bytes for a 'note off' event.]
return[call[name[self].midi_event, parameter[name[NOTE_OFF], name[channel], name[note], name[velocity]]]] | keyword[def] identifier[note_off] ( identifier[self] , identifier[channel] , identifier[note] , identifier[velocity] ):
literal[string]
keyword[return] identifier[self] . identifier[midi_event] ( identifier[NOTE_OFF] , identifier[channel] , identifier[note] , identifier[velocity] ) | def note_off(self, channel, note, velocity):
"""Return bytes for a 'note off' event."""
return self.midi_event(NOTE_OFF, channel, note, velocity) |
def getColumnsByName(elem, name):
"""
Return a list of Column elements named name under elem. The name
comparison is done with CompareColumnNames().
"""
name = StripColumnName(name)
return elem.getElements(lambda e: (e.tagName == ligolw.Column.tagName) and (e.Name == name)) | def function[getColumnsByName, parameter[elem, name]]:
constant[
Return a list of Column elements named name under elem. The name
comparison is done with CompareColumnNames().
]
variable[name] assign[=] call[name[StripColumnName], parameter[name[name]]]
return[call[name[elem].getElements, parameter[<ast.Lambda object at 0x7da2041d9b40>]]] | keyword[def] identifier[getColumnsByName] ( identifier[elem] , identifier[name] ):
literal[string]
identifier[name] = identifier[StripColumnName] ( identifier[name] )
keyword[return] identifier[elem] . identifier[getElements] ( keyword[lambda] identifier[e] :( identifier[e] . identifier[tagName] == identifier[ligolw] . identifier[Column] . identifier[tagName] ) keyword[and] ( identifier[e] . identifier[Name] == identifier[name] )) | def getColumnsByName(elem, name):
"""
Return a list of Column elements named name under elem. The name
comparison is done with CompareColumnNames().
"""
name = StripColumnName(name)
return elem.getElements(lambda e: e.tagName == ligolw.Column.tagName and e.Name == name) |
def next_batch(self, n=1):
"""Return the next requests that should be dispatched."""
if len(self.queue) == 0:
return []
batch = list(reversed((self.queue[-n:])))
self.queue = self.queue[:-n]
return batch | def function[next_batch, parameter[self, n]]:
constant[Return the next requests that should be dispatched.]
if compare[call[name[len], parameter[name[self].queue]] equal[==] constant[0]] begin[:]
return[list[[]]]
variable[batch] assign[=] call[name[list], parameter[call[name[reversed], parameter[call[name[self].queue][<ast.Slice object at 0x7da2041da860>]]]]]
name[self].queue assign[=] call[name[self].queue][<ast.Slice object at 0x7da2041d8a30>]
return[name[batch]] | keyword[def] identifier[next_batch] ( identifier[self] , identifier[n] = literal[int] ):
literal[string]
keyword[if] identifier[len] ( identifier[self] . identifier[queue] )== literal[int] :
keyword[return] []
identifier[batch] = identifier[list] ( identifier[reversed] (( identifier[self] . identifier[queue] [- identifier[n] :])))
identifier[self] . identifier[queue] = identifier[self] . identifier[queue] [:- identifier[n] ]
keyword[return] identifier[batch] | def next_batch(self, n=1):
"""Return the next requests that should be dispatched."""
if len(self.queue) == 0:
return [] # depends on [control=['if'], data=[]]
batch = list(reversed(self.queue[-n:]))
self.queue = self.queue[:-n]
return batch |
def remove_tag(self, key, value=None):
"""
Remove a tag from this object. Removing a tag involves a round-trip
to the EC2 service.
:type key: str
:param key: The key or name of the tag being stored.
:type value: str
:param value: An optional value that can be stored with the tag.
If a value is provided, it must match the value
currently stored in EC2. If not, the tag will not
be removed. If a value of None is provided, all
tags with the specified name will be deleted.
NOTE: There is an important distinction between
a value of '' and a value of None.
"""
if value:
tags = {key : value}
else:
tags = [key]
status = self.connection.delete_tags([self.id], tags)
if key in self.tags:
del self.tags[key] | def function[remove_tag, parameter[self, key, value]]:
constant[
Remove a tag from this object. Removing a tag involves a round-trip
to the EC2 service.
:type key: str
:param key: The key or name of the tag being stored.
:type value: str
:param value: An optional value that can be stored with the tag.
If a value is provided, it must match the value
currently stored in EC2. If not, the tag will not
be removed. If a value of None is provided, all
tags with the specified name will be deleted.
NOTE: There is an important distinction between
a value of '' and a value of None.
]
if name[value] begin[:]
variable[tags] assign[=] dictionary[[<ast.Name object at 0x7da1b2677400>], [<ast.Name object at 0x7da1b2677100>]]
variable[status] assign[=] call[name[self].connection.delete_tags, parameter[list[[<ast.Attribute object at 0x7da1b2677820>]], name[tags]]]
if compare[name[key] in name[self].tags] begin[:]
<ast.Delete object at 0x7da1b2676e30> | keyword[def] identifier[remove_tag] ( identifier[self] , identifier[key] , identifier[value] = keyword[None] ):
literal[string]
keyword[if] identifier[value] :
identifier[tags] ={ identifier[key] : identifier[value] }
keyword[else] :
identifier[tags] =[ identifier[key] ]
identifier[status] = identifier[self] . identifier[connection] . identifier[delete_tags] ([ identifier[self] . identifier[id] ], identifier[tags] )
keyword[if] identifier[key] keyword[in] identifier[self] . identifier[tags] :
keyword[del] identifier[self] . identifier[tags] [ identifier[key] ] | def remove_tag(self, key, value=None):
"""
Remove a tag from this object. Removing a tag involves a round-trip
to the EC2 service.
:type key: str
:param key: The key or name of the tag being stored.
:type value: str
:param value: An optional value that can be stored with the tag.
If a value is provided, it must match the value
currently stored in EC2. If not, the tag will not
be removed. If a value of None is provided, all
tags with the specified name will be deleted.
NOTE: There is an important distinction between
a value of '' and a value of None.
"""
if value:
tags = {key: value} # depends on [control=['if'], data=[]]
else:
tags = [key]
status = self.connection.delete_tags([self.id], tags)
if key in self.tags:
del self.tags[key] # depends on [control=['if'], data=['key']] |
def parent(self):
"""Return parent resource
:rtype: Resource
:raises ResourceNotFound: parent resource doesn't exists
:raises ResourceMissing: parent resource is not defined
"""
try:
return Resource(self['parent_type'], uuid=self['parent_uuid'], check=True)
except KeyError:
raise ResourceMissing('%s has no parent resource' % self) | def function[parent, parameter[self]]:
constant[Return parent resource
:rtype: Resource
:raises ResourceNotFound: parent resource doesn't exists
:raises ResourceMissing: parent resource is not defined
]
<ast.Try object at 0x7da1b247c970> | keyword[def] identifier[parent] ( identifier[self] ):
literal[string]
keyword[try] :
keyword[return] identifier[Resource] ( identifier[self] [ literal[string] ], identifier[uuid] = identifier[self] [ literal[string] ], identifier[check] = keyword[True] )
keyword[except] identifier[KeyError] :
keyword[raise] identifier[ResourceMissing] ( literal[string] % identifier[self] ) | def parent(self):
"""Return parent resource
:rtype: Resource
:raises ResourceNotFound: parent resource doesn't exists
:raises ResourceMissing: parent resource is not defined
"""
try:
return Resource(self['parent_type'], uuid=self['parent_uuid'], check=True) # depends on [control=['try'], data=[]]
except KeyError:
raise ResourceMissing('%s has no parent resource' % self) # depends on [control=['except'], data=[]] |
def description(self, value):
"""Fetches the translated description for the given datatype.
The given value will be converted to a `numpy.dtype` object, matched
against the supported datatypes and the description will be translated
into the preferred language. (Usually a settings dialog should be
available to change the language).
If the conversion fails or no match can be found, `None` will be returned.
Args:
value (type|numpy.dtype): Any object or type.
Returns:
str: The translated description of the datatype
None: If no match could be found or an error occured during convertion.
"""
# lists, tuples, dicts refer to numpy.object types and
# return a 'text' description - working as intended or bug?
try:
value = np.dtype(value)
except TypeError as e:
return None
for (dtype, string) in self._all:
if dtype == value:
return string
# no match found return given value
return None | def function[description, parameter[self, value]]:
constant[Fetches the translated description for the given datatype.
The given value will be converted to a `numpy.dtype` object, matched
against the supported datatypes and the description will be translated
into the preferred language. (Usually a settings dialog should be
available to change the language).
If the conversion fails or no match can be found, `None` will be returned.
Args:
value (type|numpy.dtype): Any object or type.
Returns:
str: The translated description of the datatype
None: If no match could be found or an error occured during convertion.
]
<ast.Try object at 0x7da1b07e3ca0>
for taget[tuple[[<ast.Name object at 0x7da1b07e3b80>, <ast.Name object at 0x7da1b07e1870>]]] in starred[name[self]._all] begin[:]
if compare[name[dtype] equal[==] name[value]] begin[:]
return[name[string]]
return[constant[None]] | keyword[def] identifier[description] ( identifier[self] , identifier[value] ):
literal[string]
keyword[try] :
identifier[value] = identifier[np] . identifier[dtype] ( identifier[value] )
keyword[except] identifier[TypeError] keyword[as] identifier[e] :
keyword[return] keyword[None]
keyword[for] ( identifier[dtype] , identifier[string] ) keyword[in] identifier[self] . identifier[_all] :
keyword[if] identifier[dtype] == identifier[value] :
keyword[return] identifier[string]
keyword[return] keyword[None] | def description(self, value):
"""Fetches the translated description for the given datatype.
The given value will be converted to a `numpy.dtype` object, matched
against the supported datatypes and the description will be translated
into the preferred language. (Usually a settings dialog should be
available to change the language).
If the conversion fails or no match can be found, `None` will be returned.
Args:
value (type|numpy.dtype): Any object or type.
Returns:
str: The translated description of the datatype
None: If no match could be found or an error occured during convertion.
"""
# lists, tuples, dicts refer to numpy.object types and
# return a 'text' description - working as intended or bug?
try:
value = np.dtype(value) # depends on [control=['try'], data=[]]
except TypeError as e:
return None # depends on [control=['except'], data=[]]
for (dtype, string) in self._all:
if dtype == value:
return string # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
# no match found return given value
return None |
def lacp_system_priority(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element("config")
lacp = ET.SubElement(config, "lacp", xmlns="urn:brocade.com:mgmt:brocade-lacp")
system_priority = ET.SubElement(lacp, "system-priority")
system_priority.text = kwargs.pop('system_priority')
callback = kwargs.pop('callback', self._callback)
return callback(config) | def function[lacp_system_priority, parameter[self]]:
constant[Auto Generated Code
]
variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]]
variable[lacp] assign[=] call[name[ET].SubElement, parameter[name[config], constant[lacp]]]
variable[system_priority] assign[=] call[name[ET].SubElement, parameter[name[lacp], constant[system-priority]]]
name[system_priority].text assign[=] call[name[kwargs].pop, parameter[constant[system_priority]]]
variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]]
return[call[name[callback], parameter[name[config]]]] | keyword[def] identifier[lacp_system_priority] ( identifier[self] ,** identifier[kwargs] ):
literal[string]
identifier[config] = identifier[ET] . identifier[Element] ( literal[string] )
identifier[lacp] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] )
identifier[system_priority] = identifier[ET] . identifier[SubElement] ( identifier[lacp] , literal[string] )
identifier[system_priority] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] )
identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] )
keyword[return] identifier[callback] ( identifier[config] ) | def lacp_system_priority(self, **kwargs):
"""Auto Generated Code
"""
config = ET.Element('config')
lacp = ET.SubElement(config, 'lacp', xmlns='urn:brocade.com:mgmt:brocade-lacp')
system_priority = ET.SubElement(lacp, 'system-priority')
system_priority.text = kwargs.pop('system_priority')
callback = kwargs.pop('callback', self._callback)
return callback(config) |
def rest_add_filters(self, data):
"""
Adds list of dicts
:param data: list of dicts
:return:
"""
for _filter in data:
filter_class = map_args_filter.get(_filter["opr"], None)
if filter_class:
self.add_filter(_filter["col"], filter_class, _filter["value"]) | def function[rest_add_filters, parameter[self, data]]:
constant[
Adds list of dicts
:param data: list of dicts
:return:
]
for taget[name[_filter]] in starred[name[data]] begin[:]
variable[filter_class] assign[=] call[name[map_args_filter].get, parameter[call[name[_filter]][constant[opr]], constant[None]]]
if name[filter_class] begin[:]
call[name[self].add_filter, parameter[call[name[_filter]][constant[col]], name[filter_class], call[name[_filter]][constant[value]]]] | keyword[def] identifier[rest_add_filters] ( identifier[self] , identifier[data] ):
literal[string]
keyword[for] identifier[_filter] keyword[in] identifier[data] :
identifier[filter_class] = identifier[map_args_filter] . identifier[get] ( identifier[_filter] [ literal[string] ], keyword[None] )
keyword[if] identifier[filter_class] :
identifier[self] . identifier[add_filter] ( identifier[_filter] [ literal[string] ], identifier[filter_class] , identifier[_filter] [ literal[string] ]) | def rest_add_filters(self, data):
"""
Adds list of dicts
:param data: list of dicts
:return:
"""
for _filter in data:
filter_class = map_args_filter.get(_filter['opr'], None)
if filter_class:
self.add_filter(_filter['col'], filter_class, _filter['value']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_filter']] |
def wait_for_instance_deletion(credentials, project, zone, instance_name,
interval_seconds=5):
"""Wait until an instance is deleted.
We require that initially, the specified instance exists.
TODO: docstring
"""
t0 = time.time()
access_token = credentials.get_access_token()
headers = {
'Authorization': 'Bearer %s' % access_token.access_token
}
r = requests.get('https://www.googleapis.com/compute/v1/'
'projects/%s/zones/%s/instances/%s'
% (project, zone, instance_name),
headers=headers)
if r.status_code == 404:
raise AssertionError('Instance "%s" does not exist!' % instance_name)
r.raise_for_status()
_LOGGER.debug('Instance "%s" exists.', instance_name)
while True:
time.sleep(interval_seconds)
access_token = credentials.get_access_token()
headers = {
'Authorization': 'Bearer %s' % access_token.access_token
}
r = requests.get('https://www.googleapis.com/compute/v1/'
'projects/%s/zones/%s/instances/%s'
% (project, zone, instance_name),
headers=headers)
if r.status_code == 404:
break
r.raise_for_status()
_LOGGER.debug('Instance "%s" still exists.', instance_name)
t1 = time.time()
t = t1-t0
t_min = t/60.0
_LOGGER.info('Instance was deleted after %.1f s (%.1f m).', t, t_min) | def function[wait_for_instance_deletion, parameter[credentials, project, zone, instance_name, interval_seconds]]:
constant[Wait until an instance is deleted.
We require that initially, the specified instance exists.
TODO: docstring
]
variable[t0] assign[=] call[name[time].time, parameter[]]
variable[access_token] assign[=] call[name[credentials].get_access_token, parameter[]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18f721ba0>], [<ast.BinOp object at 0x7da18f721180>]]
variable[r] assign[=] call[name[requests].get, parameter[binary_operation[constant[https://www.googleapis.com/compute/v1/projects/%s/zones/%s/instances/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f722230>, <ast.Name object at 0x7da18f7230a0>, <ast.Name object at 0x7da18f720be0>]]]]]
if compare[name[r].status_code equal[==] constant[404]] begin[:]
<ast.Raise object at 0x7da18f722170>
call[name[r].raise_for_status, parameter[]]
call[name[_LOGGER].debug, parameter[constant[Instance "%s" exists.], name[instance_name]]]
while constant[True] begin[:]
call[name[time].sleep, parameter[name[interval_seconds]]]
variable[access_token] assign[=] call[name[credentials].get_access_token, parameter[]]
variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18f721de0>], [<ast.BinOp object at 0x7da18f720550>]]
variable[r] assign[=] call[name[requests].get, parameter[binary_operation[constant[https://www.googleapis.com/compute/v1/projects/%s/zones/%s/instances/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f720ca0>, <ast.Name object at 0x7da18f723be0>, <ast.Name object at 0x7da18f720d90>]]]]]
if compare[name[r].status_code equal[==] constant[404]] begin[:]
break
call[name[r].raise_for_status, parameter[]]
call[name[_LOGGER].debug, parameter[constant[Instance "%s" still exists.], name[instance_name]]]
variable[t1] assign[=] call[name[time].time, parameter[]]
variable[t] assign[=] binary_operation[name[t1] - name[t0]]
variable[t_min] assign[=] binary_operation[name[t] / constant[60.0]]
call[name[_LOGGER].info, parameter[constant[Instance was deleted after %.1f s (%.1f m).], name[t], name[t_min]]] | keyword[def] identifier[wait_for_instance_deletion] ( identifier[credentials] , identifier[project] , identifier[zone] , identifier[instance_name] ,
identifier[interval_seconds] = literal[int] ):
literal[string]
identifier[t0] = identifier[time] . identifier[time] ()
identifier[access_token] = identifier[credentials] . identifier[get_access_token] ()
identifier[headers] ={
literal[string] : literal[string] % identifier[access_token] . identifier[access_token]
}
identifier[r] = identifier[requests] . identifier[get] ( literal[string]
literal[string]
%( identifier[project] , identifier[zone] , identifier[instance_name] ),
identifier[headers] = identifier[headers] )
keyword[if] identifier[r] . identifier[status_code] == literal[int] :
keyword[raise] identifier[AssertionError] ( literal[string] % identifier[instance_name] )
identifier[r] . identifier[raise_for_status] ()
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[instance_name] )
keyword[while] keyword[True] :
identifier[time] . identifier[sleep] ( identifier[interval_seconds] )
identifier[access_token] = identifier[credentials] . identifier[get_access_token] ()
identifier[headers] ={
literal[string] : literal[string] % identifier[access_token] . identifier[access_token]
}
identifier[r] = identifier[requests] . identifier[get] ( literal[string]
literal[string]
%( identifier[project] , identifier[zone] , identifier[instance_name] ),
identifier[headers] = identifier[headers] )
keyword[if] identifier[r] . identifier[status_code] == literal[int] :
keyword[break]
identifier[r] . identifier[raise_for_status] ()
identifier[_LOGGER] . identifier[debug] ( literal[string] , identifier[instance_name] )
identifier[t1] = identifier[time] . identifier[time] ()
identifier[t] = identifier[t1] - identifier[t0]
identifier[t_min] = identifier[t] / literal[int]
identifier[_LOGGER] . identifier[info] ( literal[string] , identifier[t] , identifier[t_min] ) | def wait_for_instance_deletion(credentials, project, zone, instance_name, interval_seconds=5):
"""Wait until an instance is deleted.
We require that initially, the specified instance exists.
TODO: docstring
"""
t0 = time.time()
access_token = credentials.get_access_token()
headers = {'Authorization': 'Bearer %s' % access_token.access_token}
r = requests.get('https://www.googleapis.com/compute/v1/projects/%s/zones/%s/instances/%s' % (project, zone, instance_name), headers=headers)
if r.status_code == 404:
raise AssertionError('Instance "%s" does not exist!' % instance_name) # depends on [control=['if'], data=[]]
r.raise_for_status()
_LOGGER.debug('Instance "%s" exists.', instance_name)
while True:
time.sleep(interval_seconds)
access_token = credentials.get_access_token()
headers = {'Authorization': 'Bearer %s' % access_token.access_token}
r = requests.get('https://www.googleapis.com/compute/v1/projects/%s/zones/%s/instances/%s' % (project, zone, instance_name), headers=headers)
if r.status_code == 404:
break # depends on [control=['if'], data=[]]
r.raise_for_status()
_LOGGER.debug('Instance "%s" still exists.', instance_name) # depends on [control=['while'], data=[]]
t1 = time.time()
t = t1 - t0
t_min = t / 60.0
_LOGGER.info('Instance was deleted after %.1f s (%.1f m).', t, t_min) |
def sample_forward_transitions(self, batch_size, batch_info, forward_steps: int,
discount_factor: float) -> Transitions:
"""
Sample transitions from replay buffer with _forward steps_.
That is, instead of getting a transition s_t -> s_t+1 with reward r,
get a transition s_t -> s_t+n with sum of intermediate rewards.
Used in a variant of Deep Q-Learning
"""
indexes = self.backend.sample_batch_transitions(batch_size, forward_steps=forward_steps)
transition_tensors = self.backend.get_transitions_forward_steps(
indexes, forward_steps=forward_steps, discount_factor=discount_factor
)
return Trajectories(
num_steps=batch_size,
num_envs=self.backend.num_envs,
environment_information=None,
transition_tensors={k: torch.from_numpy(v) for k, v in transition_tensors.items()},
rollout_tensors={},
extra_data={
'forward_steps': forward_steps
}
).to_transitions() | def function[sample_forward_transitions, parameter[self, batch_size, batch_info, forward_steps, discount_factor]]:
constant[
Sample transitions from replay buffer with _forward steps_.
That is, instead of getting a transition s_t -> s_t+1 with reward r,
get a transition s_t -> s_t+n with sum of intermediate rewards.
Used in a variant of Deep Q-Learning
]
variable[indexes] assign[=] call[name[self].backend.sample_batch_transitions, parameter[name[batch_size]]]
variable[transition_tensors] assign[=] call[name[self].backend.get_transitions_forward_steps, parameter[name[indexes]]]
return[call[call[name[Trajectories], parameter[]].to_transitions, parameter[]]] | keyword[def] identifier[sample_forward_transitions] ( identifier[self] , identifier[batch_size] , identifier[batch_info] , identifier[forward_steps] : identifier[int] ,
identifier[discount_factor] : identifier[float] )-> identifier[Transitions] :
literal[string]
identifier[indexes] = identifier[self] . identifier[backend] . identifier[sample_batch_transitions] ( identifier[batch_size] , identifier[forward_steps] = identifier[forward_steps] )
identifier[transition_tensors] = identifier[self] . identifier[backend] . identifier[get_transitions_forward_steps] (
identifier[indexes] , identifier[forward_steps] = identifier[forward_steps] , identifier[discount_factor] = identifier[discount_factor]
)
keyword[return] identifier[Trajectories] (
identifier[num_steps] = identifier[batch_size] ,
identifier[num_envs] = identifier[self] . identifier[backend] . identifier[num_envs] ,
identifier[environment_information] = keyword[None] ,
identifier[transition_tensors] ={ identifier[k] : identifier[torch] . identifier[from_numpy] ( identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[transition_tensors] . identifier[items] ()},
identifier[rollout_tensors] ={},
identifier[extra_data] ={
literal[string] : identifier[forward_steps]
}
). identifier[to_transitions] () | def sample_forward_transitions(self, batch_size, batch_info, forward_steps: int, discount_factor: float) -> Transitions:
"""
Sample transitions from replay buffer with _forward steps_.
That is, instead of getting a transition s_t -> s_t+1 with reward r,
get a transition s_t -> s_t+n with sum of intermediate rewards.
Used in a variant of Deep Q-Learning
"""
indexes = self.backend.sample_batch_transitions(batch_size, forward_steps=forward_steps)
transition_tensors = self.backend.get_transitions_forward_steps(indexes, forward_steps=forward_steps, discount_factor=discount_factor)
return Trajectories(num_steps=batch_size, num_envs=self.backend.num_envs, environment_information=None, transition_tensors={k: torch.from_numpy(v) for (k, v) in transition_tensors.items()}, rollout_tensors={}, extra_data={'forward_steps': forward_steps}).to_transitions() |
def slice_sequences(sequences, start, end, apply_slice=None):
"""
Performs a slice across multiple sequences.
Useful when paginating across chained collections.
:param sequences: an iterable of iterables, each nested iterable should contain
a sequence and its size
:param start: starting index to apply the slice from
:param end: index that the slice should end at
:param apply_slice: function that takes the sequence and start/end offsets, and
returns the sliced sequence
:return: a list of the items sliced from the sequences
"""
if start < 0 or end < 0 or end <= start:
raise ValueError('Start and/or End out of range. Start: %s. End: %s' % (start, end))
items_to_take = end - start
items_passed = 0
collected_items = []
if apply_slice is None:
apply_slice = _apply_slice
for sequence, count in sequences:
offset_start = start - items_passed
offset_end = end - items_passed
if items_passed == start:
items = apply_slice(sequence, 0, items_to_take)
elif 0 < offset_start < count:
items = apply_slice(sequence, offset_start, offset_end)
elif offset_start < 0:
items = apply_slice(sequence, 0, offset_end)
else:
items = []
items = list(items)
collected_items += items
items_to_take -= len(items)
items_passed += count
if items_passed > end or items_to_take == 0:
break
return collected_items | def function[slice_sequences, parameter[sequences, start, end, apply_slice]]:
constant[
Performs a slice across multiple sequences.
Useful when paginating across chained collections.
:param sequences: an iterable of iterables, each nested iterable should contain
a sequence and its size
:param start: starting index to apply the slice from
:param end: index that the slice should end at
:param apply_slice: function that takes the sequence and start/end offsets, and
returns the sliced sequence
:return: a list of the items sliced from the sequences
]
if <ast.BoolOp object at 0x7da20c6aa170> begin[:]
<ast.Raise object at 0x7da20c6a9120>
variable[items_to_take] assign[=] binary_operation[name[end] - name[start]]
variable[items_passed] assign[=] constant[0]
variable[collected_items] assign[=] list[[]]
if compare[name[apply_slice] is constant[None]] begin[:]
variable[apply_slice] assign[=] name[_apply_slice]
for taget[tuple[[<ast.Name object at 0x7da20c6a9ab0>, <ast.Name object at 0x7da20c6aa770>]]] in starred[name[sequences]] begin[:]
variable[offset_start] assign[=] binary_operation[name[start] - name[items_passed]]
variable[offset_end] assign[=] binary_operation[name[end] - name[items_passed]]
if compare[name[items_passed] equal[==] name[start]] begin[:]
variable[items] assign[=] call[name[apply_slice], parameter[name[sequence], constant[0], name[items_to_take]]]
variable[items] assign[=] call[name[list], parameter[name[items]]]
<ast.AugAssign object at 0x7da20c6ab070>
<ast.AugAssign object at 0x7da20c6a9630>
<ast.AugAssign object at 0x7da20c6ab5b0>
if <ast.BoolOp object at 0x7da20c6a9210> begin[:]
break
return[name[collected_items]] | keyword[def] identifier[slice_sequences] ( identifier[sequences] , identifier[start] , identifier[end] , identifier[apply_slice] = keyword[None] ):
literal[string]
keyword[if] identifier[start] < literal[int] keyword[or] identifier[end] < literal[int] keyword[or] identifier[end] <= identifier[start] :
keyword[raise] identifier[ValueError] ( literal[string] %( identifier[start] , identifier[end] ))
identifier[items_to_take] = identifier[end] - identifier[start]
identifier[items_passed] = literal[int]
identifier[collected_items] =[]
keyword[if] identifier[apply_slice] keyword[is] keyword[None] :
identifier[apply_slice] = identifier[_apply_slice]
keyword[for] identifier[sequence] , identifier[count] keyword[in] identifier[sequences] :
identifier[offset_start] = identifier[start] - identifier[items_passed]
identifier[offset_end] = identifier[end] - identifier[items_passed]
keyword[if] identifier[items_passed] == identifier[start] :
identifier[items] = identifier[apply_slice] ( identifier[sequence] , literal[int] , identifier[items_to_take] )
keyword[elif] literal[int] < identifier[offset_start] < identifier[count] :
identifier[items] = identifier[apply_slice] ( identifier[sequence] , identifier[offset_start] , identifier[offset_end] )
keyword[elif] identifier[offset_start] < literal[int] :
identifier[items] = identifier[apply_slice] ( identifier[sequence] , literal[int] , identifier[offset_end] )
keyword[else] :
identifier[items] =[]
identifier[items] = identifier[list] ( identifier[items] )
identifier[collected_items] += identifier[items]
identifier[items_to_take] -= identifier[len] ( identifier[items] )
identifier[items_passed] += identifier[count]
keyword[if] identifier[items_passed] > identifier[end] keyword[or] identifier[items_to_take] == literal[int] :
keyword[break]
keyword[return] identifier[collected_items] | def slice_sequences(sequences, start, end, apply_slice=None):
"""
Performs a slice across multiple sequences.
Useful when paginating across chained collections.
:param sequences: an iterable of iterables, each nested iterable should contain
a sequence and its size
:param start: starting index to apply the slice from
:param end: index that the slice should end at
:param apply_slice: function that takes the sequence and start/end offsets, and
returns the sliced sequence
:return: a list of the items sliced from the sequences
"""
if start < 0 or end < 0 or end <= start:
raise ValueError('Start and/or End out of range. Start: %s. End: %s' % (start, end)) # depends on [control=['if'], data=[]]
items_to_take = end - start
items_passed = 0
collected_items = []
if apply_slice is None:
apply_slice = _apply_slice # depends on [control=['if'], data=['apply_slice']]
for (sequence, count) in sequences:
offset_start = start - items_passed
offset_end = end - items_passed
if items_passed == start:
items = apply_slice(sequence, 0, items_to_take) # depends on [control=['if'], data=[]]
elif 0 < offset_start < count:
items = apply_slice(sequence, offset_start, offset_end) # depends on [control=['if'], data=['offset_start']]
elif offset_start < 0:
items = apply_slice(sequence, 0, offset_end) # depends on [control=['if'], data=[]]
else:
items = []
items = list(items)
collected_items += items
items_to_take -= len(items)
items_passed += count
if items_passed > end or items_to_take == 0:
break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]]
return collected_items |
def order_guest(self, guest_object, test=False):
"""Uses Product_Order::placeOrder to create a virtual guest.
Useful when creating a virtual guest with options not supported by Virtual_Guest::createObject
specifically ipv6 support.
:param dictionary guest_object: See SoftLayer.CLI.virt.create._parse_create_args
Example::
new_vsi = {
'domain': u'test01.labs.sftlyr.ws',
'hostname': u'minion05',
'datacenter': u'hkg02',
'flavor': 'BL1_1X2X100'
'dedicated': False,
'private': False,
'os_code' : u'UBUNTU_LATEST',
'hourly': True,
'ssh_keys': [1234],
'disks': ('100','25'),
'local_disk': True,
'tags': 'test, pleaseCancel',
'public_security_groups': [12, 15],
'ipv6': True
}
vsi = mgr.order_guest(new_vsi)
# vsi will have the newly created vsi receipt.
# vsi['orderDetails']['virtualGuests'] will be an array of created Guests
print vsi
"""
tags = guest_object.pop('tags', None)
template = self.verify_create_instance(**guest_object)
if guest_object.get('ipv6'):
ipv6_price = self.ordering_manager.get_price_id_list('PUBLIC_CLOUD_SERVER', ['1_IPV6_ADDRESS'])
template['prices'].append({'id': ipv6_price[0]})
# Notice this is `userdata` from the cli, but we send it in as `userData`
if guest_object.get('userdata'):
# SL_Virtual_Guest::generateOrderTemplate() doesn't respect userData, so we need to add it ourself
template['virtualGuests'][0]['userData'] = [{"value": guest_object.get('userdata')}]
if guest_object.get('host_id'):
template['hostId'] = guest_object.get('host_id')
if guest_object.get('placement_id'):
template['virtualGuests'][0]['placementGroupId'] = guest_object.get('placement_id')
if test:
result = self.client.call('Product_Order', 'verifyOrder', template)
else:
result = self.client.call('Product_Order', 'placeOrder', template)
if tags is not None:
virtual_guests = utils.lookup(result, 'orderDetails', 'virtualGuests')
for guest in virtual_guests:
self.set_tags(tags, guest_id=guest['id'])
return result | def function[order_guest, parameter[self, guest_object, test]]:
constant[Uses Product_Order::placeOrder to create a virtual guest.
Useful when creating a virtual guest with options not supported by Virtual_Guest::createObject
specifically ipv6 support.
:param dictionary guest_object: See SoftLayer.CLI.virt.create._parse_create_args
Example::
new_vsi = {
'domain': u'test01.labs.sftlyr.ws',
'hostname': u'minion05',
'datacenter': u'hkg02',
'flavor': 'BL1_1X2X100'
'dedicated': False,
'private': False,
'os_code' : u'UBUNTU_LATEST',
'hourly': True,
'ssh_keys': [1234],
'disks': ('100','25'),
'local_disk': True,
'tags': 'test, pleaseCancel',
'public_security_groups': [12, 15],
'ipv6': True
}
vsi = mgr.order_guest(new_vsi)
# vsi will have the newly created vsi receipt.
# vsi['orderDetails']['virtualGuests'] will be an array of created Guests
print vsi
]
variable[tags] assign[=] call[name[guest_object].pop, parameter[constant[tags], constant[None]]]
variable[template] assign[=] call[name[self].verify_create_instance, parameter[]]
if call[name[guest_object].get, parameter[constant[ipv6]]] begin[:]
variable[ipv6_price] assign[=] call[name[self].ordering_manager.get_price_id_list, parameter[constant[PUBLIC_CLOUD_SERVER], list[[<ast.Constant object at 0x7da20c794a60>]]]]
call[call[name[template]][constant[prices]].append, parameter[dictionary[[<ast.Constant object at 0x7da20c794250>], [<ast.Subscript object at 0x7da20c794f70>]]]]
if call[name[guest_object].get, parameter[constant[userdata]]] begin[:]
call[call[call[name[template]][constant[virtualGuests]]][constant[0]]][constant[userData]] assign[=] list[[<ast.Dict object at 0x7da20c795780>]]
if call[name[guest_object].get, parameter[constant[host_id]]] begin[:]
call[name[template]][constant[hostId]] assign[=] call[name[guest_object].get, parameter[constant[host_id]]]
if call[name[guest_object].get, parameter[constant[placement_id]]] begin[:]
call[call[call[name[template]][constant[virtualGuests]]][constant[0]]][constant[placementGroupId]] assign[=] call[name[guest_object].get, parameter[constant[placement_id]]]
if name[test] begin[:]
variable[result] assign[=] call[name[self].client.call, parameter[constant[Product_Order], constant[verifyOrder], name[template]]]
return[name[result]] | keyword[def] identifier[order_guest] ( identifier[self] , identifier[guest_object] , identifier[test] = keyword[False] ):
literal[string]
identifier[tags] = identifier[guest_object] . identifier[pop] ( literal[string] , keyword[None] )
identifier[template] = identifier[self] . identifier[verify_create_instance] (** identifier[guest_object] )
keyword[if] identifier[guest_object] . identifier[get] ( literal[string] ):
identifier[ipv6_price] = identifier[self] . identifier[ordering_manager] . identifier[get_price_id_list] ( literal[string] ,[ literal[string] ])
identifier[template] [ literal[string] ]. identifier[append] ({ literal[string] : identifier[ipv6_price] [ literal[int] ]})
keyword[if] identifier[guest_object] . identifier[get] ( literal[string] ):
identifier[template] [ literal[string] ][ literal[int] ][ literal[string] ]=[{ literal[string] : identifier[guest_object] . identifier[get] ( literal[string] )}]
keyword[if] identifier[guest_object] . identifier[get] ( literal[string] ):
identifier[template] [ literal[string] ]= identifier[guest_object] . identifier[get] ( literal[string] )
keyword[if] identifier[guest_object] . identifier[get] ( literal[string] ):
identifier[template] [ literal[string] ][ literal[int] ][ literal[string] ]= identifier[guest_object] . identifier[get] ( literal[string] )
keyword[if] identifier[test] :
identifier[result] = identifier[self] . identifier[client] . identifier[call] ( literal[string] , literal[string] , identifier[template] )
keyword[else] :
identifier[result] = identifier[self] . identifier[client] . identifier[call] ( literal[string] , literal[string] , identifier[template] )
keyword[if] identifier[tags] keyword[is] keyword[not] keyword[None] :
identifier[virtual_guests] = identifier[utils] . identifier[lookup] ( identifier[result] , literal[string] , literal[string] )
keyword[for] identifier[guest] keyword[in] identifier[virtual_guests] :
identifier[self] . identifier[set_tags] ( identifier[tags] , identifier[guest_id] = identifier[guest] [ literal[string] ])
keyword[return] identifier[result] | def order_guest(self, guest_object, test=False):
"""Uses Product_Order::placeOrder to create a virtual guest.
Useful when creating a virtual guest with options not supported by Virtual_Guest::createObject
specifically ipv6 support.
:param dictionary guest_object: See SoftLayer.CLI.virt.create._parse_create_args
Example::
new_vsi = {
'domain': u'test01.labs.sftlyr.ws',
'hostname': u'minion05',
'datacenter': u'hkg02',
'flavor': 'BL1_1X2X100'
'dedicated': False,
'private': False,
'os_code' : u'UBUNTU_LATEST',
'hourly': True,
'ssh_keys': [1234],
'disks': ('100','25'),
'local_disk': True,
'tags': 'test, pleaseCancel',
'public_security_groups': [12, 15],
'ipv6': True
}
vsi = mgr.order_guest(new_vsi)
# vsi will have the newly created vsi receipt.
# vsi['orderDetails']['virtualGuests'] will be an array of created Guests
print vsi
"""
tags = guest_object.pop('tags', None)
template = self.verify_create_instance(**guest_object)
if guest_object.get('ipv6'):
ipv6_price = self.ordering_manager.get_price_id_list('PUBLIC_CLOUD_SERVER', ['1_IPV6_ADDRESS'])
template['prices'].append({'id': ipv6_price[0]}) # depends on [control=['if'], data=[]]
# Notice this is `userdata` from the cli, but we send it in as `userData`
if guest_object.get('userdata'):
# SL_Virtual_Guest::generateOrderTemplate() doesn't respect userData, so we need to add it ourself
template['virtualGuests'][0]['userData'] = [{'value': guest_object.get('userdata')}] # depends on [control=['if'], data=[]]
if guest_object.get('host_id'):
template['hostId'] = guest_object.get('host_id') # depends on [control=['if'], data=[]]
if guest_object.get('placement_id'):
template['virtualGuests'][0]['placementGroupId'] = guest_object.get('placement_id') # depends on [control=['if'], data=[]]
if test:
result = self.client.call('Product_Order', 'verifyOrder', template) # depends on [control=['if'], data=[]]
else:
result = self.client.call('Product_Order', 'placeOrder', template)
if tags is not None:
virtual_guests = utils.lookup(result, 'orderDetails', 'virtualGuests')
for guest in virtual_guests:
self.set_tags(tags, guest_id=guest['id']) # depends on [control=['for'], data=['guest']] # depends on [control=['if'], data=['tags']]
return result |
def message(self):
"""The message contents"""
if self.type == 'cleartext':
return self.bytes_to_text(self._message)
if self.type == 'literal':
return self._message.contents
if self.type == 'encrypted':
return self._message | def function[message, parameter[self]]:
constant[The message contents]
if compare[name[self].type equal[==] constant[cleartext]] begin[:]
return[call[name[self].bytes_to_text, parameter[name[self]._message]]]
if compare[name[self].type equal[==] constant[literal]] begin[:]
return[name[self]._message.contents]
if compare[name[self].type equal[==] constant[encrypted]] begin[:]
return[name[self]._message] | keyword[def] identifier[message] ( identifier[self] ):
literal[string]
keyword[if] identifier[self] . identifier[type] == literal[string] :
keyword[return] identifier[self] . identifier[bytes_to_text] ( identifier[self] . identifier[_message] )
keyword[if] identifier[self] . identifier[type] == literal[string] :
keyword[return] identifier[self] . identifier[_message] . identifier[contents]
keyword[if] identifier[self] . identifier[type] == literal[string] :
keyword[return] identifier[self] . identifier[_message] | def message(self):
"""The message contents"""
if self.type == 'cleartext':
return self.bytes_to_text(self._message) # depends on [control=['if'], data=[]]
if self.type == 'literal':
return self._message.contents # depends on [control=['if'], data=[]]
if self.type == 'encrypted':
return self._message # depends on [control=['if'], data=[]] |
def mimic_adam_with_adafactor(hparams):
"""Switch from Adam to Adafactor, approximating the behavior of Adam.
Some minor things may be different, like epsilon and beta1 correction.
Args:
hparams: model hyperparameters where "adam" in hparams.optimizer
"""
assert "adam" in hparams.optimizer
hparams.optimizer = "adafactor"
hparams.optimizer_adafactor_beta1 = hparams.optimizer_adam_beta1
hparams.optimizer_adafactor_beta2 = hparams.optimizer_adam_beta2
hparams.optimizer_adafactor_multiply_by_parameter_scale = False
hparams.optimizer_adafactor_factored = False
hparams.optimizer_adafactor_clipping_threshold = None
hparams.optimizer_adafactor_decay_type = "adam" | def function[mimic_adam_with_adafactor, parameter[hparams]]:
constant[Switch from Adam to Adafactor, approximating the behavior of Adam.
Some minor things may be different, like epsilon and beta1 correction.
Args:
hparams: model hyperparameters where "adam" in hparams.optimizer
]
assert[compare[constant[adam] in name[hparams].optimizer]]
name[hparams].optimizer assign[=] constant[adafactor]
name[hparams].optimizer_adafactor_beta1 assign[=] name[hparams].optimizer_adam_beta1
name[hparams].optimizer_adafactor_beta2 assign[=] name[hparams].optimizer_adam_beta2
name[hparams].optimizer_adafactor_multiply_by_parameter_scale assign[=] constant[False]
name[hparams].optimizer_adafactor_factored assign[=] constant[False]
name[hparams].optimizer_adafactor_clipping_threshold assign[=] constant[None]
name[hparams].optimizer_adafactor_decay_type assign[=] constant[adam] | keyword[def] identifier[mimic_adam_with_adafactor] ( identifier[hparams] ):
literal[string]
keyword[assert] literal[string] keyword[in] identifier[hparams] . identifier[optimizer]
identifier[hparams] . identifier[optimizer] = literal[string]
identifier[hparams] . identifier[optimizer_adafactor_beta1] = identifier[hparams] . identifier[optimizer_adam_beta1]
identifier[hparams] . identifier[optimizer_adafactor_beta2] = identifier[hparams] . identifier[optimizer_adam_beta2]
identifier[hparams] . identifier[optimizer_adafactor_multiply_by_parameter_scale] = keyword[False]
identifier[hparams] . identifier[optimizer_adafactor_factored] = keyword[False]
identifier[hparams] . identifier[optimizer_adafactor_clipping_threshold] = keyword[None]
identifier[hparams] . identifier[optimizer_adafactor_decay_type] = literal[string] | def mimic_adam_with_adafactor(hparams):
"""Switch from Adam to Adafactor, approximating the behavior of Adam.
Some minor things may be different, like epsilon and beta1 correction.
Args:
hparams: model hyperparameters where "adam" in hparams.optimizer
"""
assert 'adam' in hparams.optimizer
hparams.optimizer = 'adafactor'
hparams.optimizer_adafactor_beta1 = hparams.optimizer_adam_beta1
hparams.optimizer_adafactor_beta2 = hparams.optimizer_adam_beta2
hparams.optimizer_adafactor_multiply_by_parameter_scale = False
hparams.optimizer_adafactor_factored = False
hparams.optimizer_adafactor_clipping_threshold = None
hparams.optimizer_adafactor_decay_type = 'adam' |
def selectgt(table, field, value, complement=False):
"""Select rows where the given field is greater than the given value."""
value = Comparable(value)
return selectop(table, field, value, operator.gt, complement=complement) | def function[selectgt, parameter[table, field, value, complement]]:
constant[Select rows where the given field is greater than the given value.]
variable[value] assign[=] call[name[Comparable], parameter[name[value]]]
return[call[name[selectop], parameter[name[table], name[field], name[value], name[operator].gt]]] | keyword[def] identifier[selectgt] ( identifier[table] , identifier[field] , identifier[value] , identifier[complement] = keyword[False] ):
literal[string]
identifier[value] = identifier[Comparable] ( identifier[value] )
keyword[return] identifier[selectop] ( identifier[table] , identifier[field] , identifier[value] , identifier[operator] . identifier[gt] , identifier[complement] = identifier[complement] ) | def selectgt(table, field, value, complement=False):
"""Select rows where the given field is greater than the given value."""
value = Comparable(value)
return selectop(table, field, value, operator.gt, complement=complement) |
def execute(self):
"""
Run the steps to execute the CLI
"""
# self._get_environment()
self.add_arguments()
self._parse_args()
self.get_arguments()
if self._validate_arguments():
self._plot_data()
else:
print(self._message) | def function[execute, parameter[self]]:
constant[
Run the steps to execute the CLI
]
call[name[self].add_arguments, parameter[]]
call[name[self]._parse_args, parameter[]]
call[name[self].get_arguments, parameter[]]
if call[name[self]._validate_arguments, parameter[]] begin[:]
call[name[self]._plot_data, parameter[]] | keyword[def] identifier[execute] ( identifier[self] ):
literal[string]
identifier[self] . identifier[add_arguments] ()
identifier[self] . identifier[_parse_args] ()
identifier[self] . identifier[get_arguments] ()
keyword[if] identifier[self] . identifier[_validate_arguments] ():
identifier[self] . identifier[_plot_data] ()
keyword[else] :
identifier[print] ( identifier[self] . identifier[_message] ) | def execute(self):
"""
Run the steps to execute the CLI
"""
# self._get_environment()
self.add_arguments()
self._parse_args()
self.get_arguments()
if self._validate_arguments():
self._plot_data() # depends on [control=['if'], data=[]]
else:
print(self._message) |
def validate_function(method, **kwargs):
"""
Validate the field matches the result of calling the given method. Example::
def myfunc(value, name):
return value == name
validator = validate_function(myfunc, name='tim')
Essentially creates a validator that only accepts the name 'tim'.
:param method: Method to call.
:param kwargs: Additional keyword arguments passed to the method.
:raises: ``ValidationError('function')``
"""
def function_validator(field, data):
if field.value is None:
return
if not method(field.value, **kwargs):
raise ValidationError('function', function=method.__name__)
return function_validator | def function[validate_function, parameter[method]]:
constant[
Validate the field matches the result of calling the given method. Example::
def myfunc(value, name):
return value == name
validator = validate_function(myfunc, name='tim')
Essentially creates a validator that only accepts the name 'tim'.
:param method: Method to call.
:param kwargs: Additional keyword arguments passed to the method.
:raises: ``ValidationError('function')``
]
def function[function_validator, parameter[field, data]]:
if compare[name[field].value is constant[None]] begin[:]
return[None]
if <ast.UnaryOp object at 0x7da20c7cb2b0> begin[:]
<ast.Raise object at 0x7da20c7c8580>
return[name[function_validator]] | keyword[def] identifier[validate_function] ( identifier[method] ,** identifier[kwargs] ):
literal[string]
keyword[def] identifier[function_validator] ( identifier[field] , identifier[data] ):
keyword[if] identifier[field] . identifier[value] keyword[is] keyword[None] :
keyword[return]
keyword[if] keyword[not] identifier[method] ( identifier[field] . identifier[value] ,** identifier[kwargs] ):
keyword[raise] identifier[ValidationError] ( literal[string] , identifier[function] = identifier[method] . identifier[__name__] )
keyword[return] identifier[function_validator] | def validate_function(method, **kwargs):
"""
Validate the field matches the result of calling the given method. Example::
def myfunc(value, name):
return value == name
validator = validate_function(myfunc, name='tim')
Essentially creates a validator that only accepts the name 'tim'.
:param method: Method to call.
:param kwargs: Additional keyword arguments passed to the method.
:raises: ``ValidationError('function')``
"""
def function_validator(field, data):
if field.value is None:
return # depends on [control=['if'], data=[]]
if not method(field.value, **kwargs):
raise ValidationError('function', function=method.__name__) # depends on [control=['if'], data=[]]
return function_validator |
def asn1_generaltime_to_seconds(timestr):
"""The given string has one of the following formats
YYYYMMDDhhmmssZ
YYYYMMDDhhmmss+hhmm
YYYYMMDDhhmmss-hhmm
@return: a datetime object or None on error
"""
res = None
timeformat = "%Y%m%d%H%M%S"
try:
res = datetime.strptime(timestr, timeformat + 'Z')
except ValueError:
try:
res = datetime.strptime(timestr, timeformat + '%z')
except ValueError:
pass
return res | def function[asn1_generaltime_to_seconds, parameter[timestr]]:
constant[The given string has one of the following formats
YYYYMMDDhhmmssZ
YYYYMMDDhhmmss+hhmm
YYYYMMDDhhmmss-hhmm
@return: a datetime object or None on error
]
variable[res] assign[=] constant[None]
variable[timeformat] assign[=] constant[%Y%m%d%H%M%S]
<ast.Try object at 0x7da1b2346bf0>
return[name[res]] | keyword[def] identifier[asn1_generaltime_to_seconds] ( identifier[timestr] ):
literal[string]
identifier[res] = keyword[None]
identifier[timeformat] = literal[string]
keyword[try] :
identifier[res] = identifier[datetime] . identifier[strptime] ( identifier[timestr] , identifier[timeformat] + literal[string] )
keyword[except] identifier[ValueError] :
keyword[try] :
identifier[res] = identifier[datetime] . identifier[strptime] ( identifier[timestr] , identifier[timeformat] + literal[string] )
keyword[except] identifier[ValueError] :
keyword[pass]
keyword[return] identifier[res] | def asn1_generaltime_to_seconds(timestr):
"""The given string has one of the following formats
YYYYMMDDhhmmssZ
YYYYMMDDhhmmss+hhmm
YYYYMMDDhhmmss-hhmm
@return: a datetime object or None on error
"""
res = None
timeformat = '%Y%m%d%H%M%S'
try:
res = datetime.strptime(timestr, timeformat + 'Z') # depends on [control=['try'], data=[]]
except ValueError:
try:
res = datetime.strptime(timestr, timeformat + '%z') # depends on [control=['try'], data=[]]
except ValueError:
pass # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]]
return res |
def get_config(config_file):
"""Get config file and parse YAML into dict."""
config_path = os.path.abspath(config_file)
try:
with open(config_path, 'r') as data:
config = yaml.safe_load(data)
except IOError as exc:
sys.exit(str(exc))
return config | def function[get_config, parameter[config_file]]:
constant[Get config file and parse YAML into dict.]
variable[config_path] assign[=] call[name[os].path.abspath, parameter[name[config_file]]]
<ast.Try object at 0x7da18f09d630>
return[name[config]] | keyword[def] identifier[get_config] ( identifier[config_file] ):
literal[string]
identifier[config_path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[config_file] )
keyword[try] :
keyword[with] identifier[open] ( identifier[config_path] , literal[string] ) keyword[as] identifier[data] :
identifier[config] = identifier[yaml] . identifier[safe_load] ( identifier[data] )
keyword[except] identifier[IOError] keyword[as] identifier[exc] :
identifier[sys] . identifier[exit] ( identifier[str] ( identifier[exc] ))
keyword[return] identifier[config] | def get_config(config_file):
"""Get config file and parse YAML into dict."""
config_path = os.path.abspath(config_file)
try:
with open(config_path, 'r') as data:
config = yaml.safe_load(data) # depends on [control=['with'], data=['data']] # depends on [control=['try'], data=[]]
except IOError as exc:
sys.exit(str(exc)) # depends on [control=['except'], data=['exc']]
return config |
def sample(self, probs, _covs, idxs, epsilons):
"""
Sample the .loss_ratios with the given probabilities.
:param probs:
array of E' floats
:param _covs:
ignored, it is there only for API consistency
:param idxs:
array of E booleans with E >= E'
:param epsilons:
array of E floats
:returns:
array of E' probabilities
"""
self.set_distribution(epsilons)
return self.distribution.sample(self.loss_ratios, probs) | def function[sample, parameter[self, probs, _covs, idxs, epsilons]]:
constant[
Sample the .loss_ratios with the given probabilities.
:param probs:
array of E' floats
:param _covs:
ignored, it is there only for API consistency
:param idxs:
array of E booleans with E >= E'
:param epsilons:
array of E floats
:returns:
array of E' probabilities
]
call[name[self].set_distribution, parameter[name[epsilons]]]
return[call[name[self].distribution.sample, parameter[name[self].loss_ratios, name[probs]]]] | keyword[def] identifier[sample] ( identifier[self] , identifier[probs] , identifier[_covs] , identifier[idxs] , identifier[epsilons] ):
literal[string]
identifier[self] . identifier[set_distribution] ( identifier[epsilons] )
keyword[return] identifier[self] . identifier[distribution] . identifier[sample] ( identifier[self] . identifier[loss_ratios] , identifier[probs] ) | def sample(self, probs, _covs, idxs, epsilons):
"""
Sample the .loss_ratios with the given probabilities.
:param probs:
array of E' floats
:param _covs:
ignored, it is there only for API consistency
:param idxs:
array of E booleans with E >= E'
:param epsilons:
array of E floats
:returns:
array of E' probabilities
"""
self.set_distribution(epsilons)
return self.distribution.sample(self.loss_ratios, probs) |
def get_nested_groups(self, username):
"""Retrieve a list of all group names that have <username> as a direct or indirect member.
Args:
username: The account username.
Returns:
list:
A list of strings of group names.
"""
response = self._get(self.rest_url + "/user/group/nested",
params={"username": username})
if not response.ok:
return None
return [g['name'] for g in response.json()['groups']] | def function[get_nested_groups, parameter[self, username]]:
constant[Retrieve a list of all group names that have <username> as a direct or indirect member.
Args:
username: The account username.
Returns:
list:
A list of strings of group names.
]
variable[response] assign[=] call[name[self]._get, parameter[binary_operation[name[self].rest_url + constant[/user/group/nested]]]]
if <ast.UnaryOp object at 0x7da1b0f9c400> begin[:]
return[constant[None]]
return[<ast.ListComp object at 0x7da1b0f5b310>] | keyword[def] identifier[get_nested_groups] ( identifier[self] , identifier[username] ):
literal[string]
identifier[response] = identifier[self] . identifier[_get] ( identifier[self] . identifier[rest_url] + literal[string] ,
identifier[params] ={ literal[string] : identifier[username] })
keyword[if] keyword[not] identifier[response] . identifier[ok] :
keyword[return] keyword[None]
keyword[return] [ identifier[g] [ literal[string] ] keyword[for] identifier[g] keyword[in] identifier[response] . identifier[json] ()[ literal[string] ]] | def get_nested_groups(self, username):
"""Retrieve a list of all group names that have <username> as a direct or indirect member.
Args:
username: The account username.
Returns:
list:
A list of strings of group names.
"""
response = self._get(self.rest_url + '/user/group/nested', params={'username': username})
if not response.ok:
return None # depends on [control=['if'], data=[]]
return [g['name'] for g in response.json()['groups']] |
def gen_from_yaml_config(self, config_path: str) -> Iterator:
"""
Convention is to uppercase first level keys.
:param config_path: Valid path to the yml config file.
:return: Config loaded from yml file
"""
if not config_path:
return {}
with open(config_path, 'r') as f:
yaml_config = yaml.load(f)
gen = map(lambda x: (x[0].upper(), x[1]), yaml_config.items())
return gen | def function[gen_from_yaml_config, parameter[self, config_path]]:
constant[
Convention is to uppercase first level keys.
:param config_path: Valid path to the yml config file.
:return: Config loaded from yml file
]
if <ast.UnaryOp object at 0x7da18bc73580> begin[:]
return[dictionary[[], []]]
with call[name[open], parameter[name[config_path], constant[r]]] begin[:]
variable[yaml_config] assign[=] call[name[yaml].load, parameter[name[f]]]
variable[gen] assign[=] call[name[map], parameter[<ast.Lambda object at 0x7da18bc736a0>, call[name[yaml_config].items, parameter[]]]]
return[name[gen]] | keyword[def] identifier[gen_from_yaml_config] ( identifier[self] , identifier[config_path] : identifier[str] )-> identifier[Iterator] :
literal[string]
keyword[if] keyword[not] identifier[config_path] :
keyword[return] {}
keyword[with] identifier[open] ( identifier[config_path] , literal[string] ) keyword[as] identifier[f] :
identifier[yaml_config] = identifier[yaml] . identifier[load] ( identifier[f] )
identifier[gen] = identifier[map] ( keyword[lambda] identifier[x] :( identifier[x] [ literal[int] ]. identifier[upper] (), identifier[x] [ literal[int] ]), identifier[yaml_config] . identifier[items] ())
keyword[return] identifier[gen] | def gen_from_yaml_config(self, config_path: str) -> Iterator:
"""
Convention is to uppercase first level keys.
:param config_path: Valid path to the yml config file.
:return: Config loaded from yml file
"""
if not config_path:
return {} # depends on [control=['if'], data=[]]
with open(config_path, 'r') as f:
yaml_config = yaml.load(f) # depends on [control=['with'], data=['f']]
gen = map(lambda x: (x[0].upper(), x[1]), yaml_config.items())
return gen |
def time_enqueued(self):
"""Retrieve the timestamp at which the task was enqueued.
See: https://cloud.google.com/appengine/docs/python/taskqueue/rest/tasks
:rtype: :class:`datetime.datetime` or ``NoneType``
:returns: Datetime object parsed from microsecond timestamp, or
``None`` if the property is not set locally.
"""
value = self._properties.get('enqueueTimestamp')
if value is not None:
return _datetime_from_microseconds(int(value)) | def function[time_enqueued, parameter[self]]:
constant[Retrieve the timestamp at which the task was enqueued.
See: https://cloud.google.com/appengine/docs/python/taskqueue/rest/tasks
:rtype: :class:`datetime.datetime` or ``NoneType``
:returns: Datetime object parsed from microsecond timestamp, or
``None`` if the property is not set locally.
]
variable[value] assign[=] call[name[self]._properties.get, parameter[constant[enqueueTimestamp]]]
if compare[name[value] is_not constant[None]] begin[:]
return[call[name[_datetime_from_microseconds], parameter[call[name[int], parameter[name[value]]]]]] | keyword[def] identifier[time_enqueued] ( identifier[self] ):
literal[string]
identifier[value] = identifier[self] . identifier[_properties] . identifier[get] ( literal[string] )
keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] :
keyword[return] identifier[_datetime_from_microseconds] ( identifier[int] ( identifier[value] )) | def time_enqueued(self):
"""Retrieve the timestamp at which the task was enqueued.
See: https://cloud.google.com/appengine/docs/python/taskqueue/rest/tasks
:rtype: :class:`datetime.datetime` or ``NoneType``
:returns: Datetime object parsed from microsecond timestamp, or
``None`` if the property is not set locally.
"""
value = self._properties.get('enqueueTimestamp')
if value is not None:
return _datetime_from_microseconds(int(value)) # depends on [control=['if'], data=['value']] |
def update(gandi, email, password, quota, fallback, alias_add, alias_del):
"""Update a mailbox."""
options = {}
if password:
password = click.prompt('password', hide_input=True,
confirmation_prompt=True)
options['password'] = password
if quota is not None:
options['quota'] = quota
if fallback is not None:
options['fallback_email'] = fallback
login, domain = email
result = gandi.mail.update(domain, login, options, alias_add, alias_del)
return result | def function[update, parameter[gandi, email, password, quota, fallback, alias_add, alias_del]]:
constant[Update a mailbox.]
variable[options] assign[=] dictionary[[], []]
if name[password] begin[:]
variable[password] assign[=] call[name[click].prompt, parameter[constant[password]]]
call[name[options]][constant[password]] assign[=] name[password]
if compare[name[quota] is_not constant[None]] begin[:]
call[name[options]][constant[quota]] assign[=] name[quota]
if compare[name[fallback] is_not constant[None]] begin[:]
call[name[options]][constant[fallback_email]] assign[=] name[fallback]
<ast.Tuple object at 0x7da20c7c9a80> assign[=] name[email]
variable[result] assign[=] call[name[gandi].mail.update, parameter[name[domain], name[login], name[options], name[alias_add], name[alias_del]]]
return[name[result]] | keyword[def] identifier[update] ( identifier[gandi] , identifier[email] , identifier[password] , identifier[quota] , identifier[fallback] , identifier[alias_add] , identifier[alias_del] ):
literal[string]
identifier[options] ={}
keyword[if] identifier[password] :
identifier[password] = identifier[click] . identifier[prompt] ( literal[string] , identifier[hide_input] = keyword[True] ,
identifier[confirmation_prompt] = keyword[True] )
identifier[options] [ literal[string] ]= identifier[password]
keyword[if] identifier[quota] keyword[is] keyword[not] keyword[None] :
identifier[options] [ literal[string] ]= identifier[quota]
keyword[if] identifier[fallback] keyword[is] keyword[not] keyword[None] :
identifier[options] [ literal[string] ]= identifier[fallback]
identifier[login] , identifier[domain] = identifier[email]
identifier[result] = identifier[gandi] . identifier[mail] . identifier[update] ( identifier[domain] , identifier[login] , identifier[options] , identifier[alias_add] , identifier[alias_del] )
keyword[return] identifier[result] | def update(gandi, email, password, quota, fallback, alias_add, alias_del):
"""Update a mailbox."""
options = {}
if password:
password = click.prompt('password', hide_input=True, confirmation_prompt=True)
options['password'] = password # depends on [control=['if'], data=[]]
if quota is not None:
options['quota'] = quota # depends on [control=['if'], data=['quota']]
if fallback is not None:
options['fallback_email'] = fallback # depends on [control=['if'], data=['fallback']]
(login, domain) = email
result = gandi.mail.update(domain, login, options, alias_add, alias_del)
return result |
async def create(self) -> 'Wallet':
"""
Create wallet as configured and store DID, or else re-use any existing configuration.
Operation sequence create/store-DID/close does not auto-remove the wallet on close,
even if so configured.
:return: current object
"""
LOGGER.debug('Wallet.create >>>')
try:
await wallet.create_wallet(
config=json.dumps(self.cfg),
credentials=json.dumps(self.access_creds))
self._created = True
LOGGER.info('Created wallet %s', self.name)
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletAlreadyExistsError:
LOGGER.info('Wallet already exists: %s', self.name)
else:
LOGGER.debug(
'Wallet.create: <!< indy error code %s on creation of wallet %s',
x_indy.error_code,
self.name)
raise
LOGGER.debug('Attempting to open wallet %s', self.name)
self._handle = await wallet.open_wallet(
json.dumps(self.cfg),
json.dumps(self.access_creds))
LOGGER.info('Opened wallet %s on handle %s', self.name, self.handle)
if self._created:
(self._did, self._verkey) = await did.create_and_store_my_did(
self.handle,
json.dumps({'seed': self._seed}))
LOGGER.debug('Wallet %s stored new DID %s, verkey %s from seed', self.name, self.did, self.verkey)
await did.set_did_metadata(self.handle, self.did, json.dumps({'seed': self._seed}))
else:
self._created = True
LOGGER.debug('Attempting to derive seed to did for wallet %s', self.name)
self._did = await self._seed2did()
try:
self._verkey = await did.key_for_local_did(self.handle, self.did)
except IndyError:
LOGGER.debug(
'Wallet.create: <!< no verkey for DID %s on ledger, wallet %s may pertain to another',
self.did,
self.name)
raise CorruptWallet(
'No verkey for DID {} on ledger, wallet {} may pertain to another'.format(
self.did,
self.name))
LOGGER.info('Wallet %s got verkey %s for existing DID %s', self.name, self.verkey, self.did)
await wallet.close_wallet(self.handle)
LOGGER.debug('Wallet.create <<<')
return self | <ast.AsyncFunctionDef object at 0x7da2047eba30> | keyword[async] keyword[def] identifier[create] ( identifier[self] )-> literal[string] :
literal[string]
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[try] :
keyword[await] identifier[wallet] . identifier[create_wallet] (
identifier[config] = identifier[json] . identifier[dumps] ( identifier[self] . identifier[cfg] ),
identifier[credentials] = identifier[json] . identifier[dumps] ( identifier[self] . identifier[access_creds] ))
identifier[self] . identifier[_created] = keyword[True]
identifier[LOGGER] . identifier[info] ( literal[string] , identifier[self] . identifier[name] )
keyword[except] identifier[IndyError] keyword[as] identifier[x_indy] :
keyword[if] identifier[x_indy] . identifier[error_code] == identifier[ErrorCode] . identifier[WalletAlreadyExistsError] :
identifier[LOGGER] . identifier[info] ( literal[string] , identifier[self] . identifier[name] )
keyword[else] :
identifier[LOGGER] . identifier[debug] (
literal[string] ,
identifier[x_indy] . identifier[error_code] ,
identifier[self] . identifier[name] )
keyword[raise]
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] )
identifier[self] . identifier[_handle] = keyword[await] identifier[wallet] . identifier[open_wallet] (
identifier[json] . identifier[dumps] ( identifier[self] . identifier[cfg] ),
identifier[json] . identifier[dumps] ( identifier[self] . identifier[access_creds] ))
identifier[LOGGER] . identifier[info] ( literal[string] , identifier[self] . identifier[name] , identifier[self] . identifier[handle] )
keyword[if] identifier[self] . identifier[_created] :
( identifier[self] . identifier[_did] , identifier[self] . identifier[_verkey] )= keyword[await] identifier[did] . identifier[create_and_store_my_did] (
identifier[self] . identifier[handle] ,
identifier[json] . identifier[dumps] ({ literal[string] : identifier[self] . identifier[_seed] }))
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] , identifier[self] . identifier[did] , identifier[self] . identifier[verkey] )
keyword[await] identifier[did] . identifier[set_did_metadata] ( identifier[self] . identifier[handle] , identifier[self] . identifier[did] , identifier[json] . identifier[dumps] ({ literal[string] : identifier[self] . identifier[_seed] }))
keyword[else] :
identifier[self] . identifier[_created] = keyword[True]
identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[self] . identifier[name] )
identifier[self] . identifier[_did] = keyword[await] identifier[self] . identifier[_seed2did] ()
keyword[try] :
identifier[self] . identifier[_verkey] = keyword[await] identifier[did] . identifier[key_for_local_did] ( identifier[self] . identifier[handle] , identifier[self] . identifier[did] )
keyword[except] identifier[IndyError] :
identifier[LOGGER] . identifier[debug] (
literal[string] ,
identifier[self] . identifier[did] ,
identifier[self] . identifier[name] )
keyword[raise] identifier[CorruptWallet] (
literal[string] . identifier[format] (
identifier[self] . identifier[did] ,
identifier[self] . identifier[name] ))
identifier[LOGGER] . identifier[info] ( literal[string] , identifier[self] . identifier[name] , identifier[self] . identifier[verkey] , identifier[self] . identifier[did] )
keyword[await] identifier[wallet] . identifier[close_wallet] ( identifier[self] . identifier[handle] )
identifier[LOGGER] . identifier[debug] ( literal[string] )
keyword[return] identifier[self] | async def create(self) -> 'Wallet':
"""
Create wallet as configured and store DID, or else re-use any existing configuration.
Operation sequence create/store-DID/close does not auto-remove the wallet on close,
even if so configured.
:return: current object
"""
LOGGER.debug('Wallet.create >>>')
try:
await wallet.create_wallet(config=json.dumps(self.cfg), credentials=json.dumps(self.access_creds))
self._created = True
LOGGER.info('Created wallet %s', self.name) # depends on [control=['try'], data=[]]
except IndyError as x_indy:
if x_indy.error_code == ErrorCode.WalletAlreadyExistsError:
LOGGER.info('Wallet already exists: %s', self.name) # depends on [control=['if'], data=[]]
else:
LOGGER.debug('Wallet.create: <!< indy error code %s on creation of wallet %s', x_indy.error_code, self.name)
raise # depends on [control=['except'], data=['x_indy']]
LOGGER.debug('Attempting to open wallet %s', self.name)
self._handle = await wallet.open_wallet(json.dumps(self.cfg), json.dumps(self.access_creds))
LOGGER.info('Opened wallet %s on handle %s', self.name, self.handle)
if self._created:
(self._did, self._verkey) = await did.create_and_store_my_did(self.handle, json.dumps({'seed': self._seed}))
LOGGER.debug('Wallet %s stored new DID %s, verkey %s from seed', self.name, self.did, self.verkey)
await did.set_did_metadata(self.handle, self.did, json.dumps({'seed': self._seed})) # depends on [control=['if'], data=[]]
else:
self._created = True
LOGGER.debug('Attempting to derive seed to did for wallet %s', self.name)
self._did = await self._seed2did()
try:
self._verkey = await did.key_for_local_did(self.handle, self.did) # depends on [control=['try'], data=[]]
except IndyError:
LOGGER.debug('Wallet.create: <!< no verkey for DID %s on ledger, wallet %s may pertain to another', self.did, self.name)
raise CorruptWallet('No verkey for DID {} on ledger, wallet {} may pertain to another'.format(self.did, self.name)) # depends on [control=['except'], data=[]]
LOGGER.info('Wallet %s got verkey %s for existing DID %s', self.name, self.verkey, self.did)
await wallet.close_wallet(self.handle)
LOGGER.debug('Wallet.create <<<')
return self |
def mssql_get_pk_index_name(engine: Engine,
tablename: str,
schemaname: str = MSSQL_DEFAULT_SCHEMA) -> str:
"""
For Microsoft SQL Server specifically: fetch the name of the PK index
for the specified table (in the specified schema), or ``''`` if none is
found.
"""
# http://docs.sqlalchemy.org/en/latest/core/connections.html#sqlalchemy.engine.Connection.execute # noqa
# http://docs.sqlalchemy.org/en/latest/core/sqlelement.html#sqlalchemy.sql.expression.text # noqa
# http://docs.sqlalchemy.org/en/latest/core/sqlelement.html#sqlalchemy.sql.expression.TextClause.bindparams # noqa
# http://docs.sqlalchemy.org/en/latest/core/connections.html#sqlalchemy.engine.ResultProxy # noqa
query = text("""
SELECT
kc.name AS index_name
FROM
sys.key_constraints AS kc
INNER JOIN sys.tables AS ta ON ta.object_id = kc.parent_object_id
INNER JOIN sys.schemas AS s ON ta.schema_id = s.schema_id
WHERE
kc.[type] = 'PK'
AND ta.name = :tablename
AND s.name = :schemaname
""").bindparams(
tablename=tablename,
schemaname=schemaname,
)
with contextlib.closing(
engine.execute(query)) as result: # type: ResultProxy # noqa
row = result.fetchone()
return row[0] if row else '' | def function[mssql_get_pk_index_name, parameter[engine, tablename, schemaname]]:
constant[
For Microsoft SQL Server specifically: fetch the name of the PK index
for the specified table (in the specified schema), or ``''`` if none is
found.
]
variable[query] assign[=] call[call[name[text], parameter[constant[
SELECT
kc.name AS index_name
FROM
sys.key_constraints AS kc
INNER JOIN sys.tables AS ta ON ta.object_id = kc.parent_object_id
INNER JOIN sys.schemas AS s ON ta.schema_id = s.schema_id
WHERE
kc.[type] = 'PK'
AND ta.name = :tablename
AND s.name = :schemaname
]]].bindparams, parameter[]]
with call[name[contextlib].closing, parameter[call[name[engine].execute, parameter[name[query]]]]] begin[:]
variable[row] assign[=] call[name[result].fetchone, parameter[]]
return[<ast.IfExp object at 0x7da1b18e5ae0>] | keyword[def] identifier[mssql_get_pk_index_name] ( identifier[engine] : identifier[Engine] ,
identifier[tablename] : identifier[str] ,
identifier[schemaname] : identifier[str] = identifier[MSSQL_DEFAULT_SCHEMA] )-> identifier[str] :
literal[string]
identifier[query] = identifier[text] ( literal[string] ). identifier[bindparams] (
identifier[tablename] = identifier[tablename] ,
identifier[schemaname] = identifier[schemaname] ,
)
keyword[with] identifier[contextlib] . identifier[closing] (
identifier[engine] . identifier[execute] ( identifier[query] )) keyword[as] identifier[result] :
identifier[row] = identifier[result] . identifier[fetchone] ()
keyword[return] identifier[row] [ literal[int] ] keyword[if] identifier[row] keyword[else] literal[string] | def mssql_get_pk_index_name(engine: Engine, tablename: str, schemaname: str=MSSQL_DEFAULT_SCHEMA) -> str:
"""
For Microsoft SQL Server specifically: fetch the name of the PK index
for the specified table (in the specified schema), or ``''`` if none is
found.
"""
# http://docs.sqlalchemy.org/en/latest/core/connections.html#sqlalchemy.engine.Connection.execute # noqa
# http://docs.sqlalchemy.org/en/latest/core/sqlelement.html#sqlalchemy.sql.expression.text # noqa
# http://docs.sqlalchemy.org/en/latest/core/sqlelement.html#sqlalchemy.sql.expression.TextClause.bindparams # noqa
# http://docs.sqlalchemy.org/en/latest/core/connections.html#sqlalchemy.engine.ResultProxy # noqa
query = text("\nSELECT\n kc.name AS index_name\nFROM\n sys.key_constraints AS kc\n INNER JOIN sys.tables AS ta ON ta.object_id = kc.parent_object_id\n INNER JOIN sys.schemas AS s ON ta.schema_id = s.schema_id\nWHERE\n kc.[type] = 'PK'\n AND ta.name = :tablename\n AND s.name = :schemaname\n ").bindparams(tablename=tablename, schemaname=schemaname)
with contextlib.closing(engine.execute(query)) as result: # type: ResultProxy # noqa
row = result.fetchone()
return row[0] if row else '' # depends on [control=['with'], data=['result']] |
def get_weather_forecast_days(self, latitude, longitude,
days=1, frequency=1, reading_type=None):
"""
Return the weather forecast for a given location.
::
results = ws.get_weather_forecast_days(lat, long)
for w in results['hits']:
print w['start_datetime_local']
print w['reading_type'], w['reading_value']
For description of reading types:
https://graphical.weather.gov/xml/docs/elementInputNames.php
"""
params = {}
# Can get data from NWS1 or NWS3 representing 1-hr and 3-hr
# intervals.
if frequency not in [1, 3]:
raise ValueError("Reading frequency must be 1 or 3")
params['days'] = days
params['source'] = 'NWS' + str(frequency)
params['latitude'] = latitude
params['longitude'] = longitude
if reading_type:
# url encoding will make spaces a + instead of %20, which service
# interprets as an "and" search which is undesirable
reading_type = reading_type.replace(' ', '%20')
params['reading_type'] = urllib.quote_plus(reading_type)
url = self.uri + '/v1/weather-forecast-days/'
return self.service._get(url, params=params) | def function[get_weather_forecast_days, parameter[self, latitude, longitude, days, frequency, reading_type]]:
constant[
Return the weather forecast for a given location.
::
results = ws.get_weather_forecast_days(lat, long)
for w in results['hits']:
print w['start_datetime_local']
print w['reading_type'], w['reading_value']
For description of reading types:
https://graphical.weather.gov/xml/docs/elementInputNames.php
]
variable[params] assign[=] dictionary[[], []]
if compare[name[frequency] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da18f00d0f0>, <ast.Constant object at 0x7da18f00f940>]]] begin[:]
<ast.Raise object at 0x7da18f00ee30>
call[name[params]][constant[days]] assign[=] name[days]
call[name[params]][constant[source]] assign[=] binary_operation[constant[NWS] + call[name[str], parameter[name[frequency]]]]
call[name[params]][constant[latitude]] assign[=] name[latitude]
call[name[params]][constant[longitude]] assign[=] name[longitude]
if name[reading_type] begin[:]
variable[reading_type] assign[=] call[name[reading_type].replace, parameter[constant[ ], constant[%20]]]
call[name[params]][constant[reading_type]] assign[=] call[name[urllib].quote_plus, parameter[name[reading_type]]]
variable[url] assign[=] binary_operation[name[self].uri + constant[/v1/weather-forecast-days/]]
return[call[name[self].service._get, parameter[name[url]]]] | keyword[def] identifier[get_weather_forecast_days] ( identifier[self] , identifier[latitude] , identifier[longitude] ,
identifier[days] = literal[int] , identifier[frequency] = literal[int] , identifier[reading_type] = keyword[None] ):
literal[string]
identifier[params] ={}
keyword[if] identifier[frequency] keyword[not] keyword[in] [ literal[int] , literal[int] ]:
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[params] [ literal[string] ]= identifier[days]
identifier[params] [ literal[string] ]= literal[string] + identifier[str] ( identifier[frequency] )
identifier[params] [ literal[string] ]= identifier[latitude]
identifier[params] [ literal[string] ]= identifier[longitude]
keyword[if] identifier[reading_type] :
identifier[reading_type] = identifier[reading_type] . identifier[replace] ( literal[string] , literal[string] )
identifier[params] [ literal[string] ]= identifier[urllib] . identifier[quote_plus] ( identifier[reading_type] )
identifier[url] = identifier[self] . identifier[uri] + literal[string]
keyword[return] identifier[self] . identifier[service] . identifier[_get] ( identifier[url] , identifier[params] = identifier[params] ) | def get_weather_forecast_days(self, latitude, longitude, days=1, frequency=1, reading_type=None):
"""
Return the weather forecast for a given location.
::
results = ws.get_weather_forecast_days(lat, long)
for w in results['hits']:
print w['start_datetime_local']
print w['reading_type'], w['reading_value']
For description of reading types:
https://graphical.weather.gov/xml/docs/elementInputNames.php
"""
params = {}
# Can get data from NWS1 or NWS3 representing 1-hr and 3-hr
# intervals.
if frequency not in [1, 3]:
raise ValueError('Reading frequency must be 1 or 3') # depends on [control=['if'], data=[]]
params['days'] = days
params['source'] = 'NWS' + str(frequency)
params['latitude'] = latitude
params['longitude'] = longitude
if reading_type:
# url encoding will make spaces a + instead of %20, which service
# interprets as an "and" search which is undesirable
reading_type = reading_type.replace(' ', '%20')
params['reading_type'] = urllib.quote_plus(reading_type) # depends on [control=['if'], data=[]]
url = self.uri + '/v1/weather-forecast-days/'
return self.service._get(url, params=params) |
def client_certificate(self, client_certificate):
"""
Sets the client_certificate of this GlobalSignCredentials.
The client certificate provided by GlobalSign to allow HTTPS connection over TLS/SSL. The certificate wraps a public key that matches a private key provided by the customer. The certificate must be in PEM format.
:param client_certificate: The client_certificate of this GlobalSignCredentials.
:type: str
"""
if client_certificate is None:
raise ValueError("Invalid value for `client_certificate`, must not be `None`")
if client_certificate is not None and len(client_certificate) > 3000:
raise ValueError("Invalid value for `client_certificate`, length must be less than or equal to `3000`")
self._client_certificate = client_certificate | def function[client_certificate, parameter[self, client_certificate]]:
constant[
Sets the client_certificate of this GlobalSignCredentials.
The client certificate provided by GlobalSign to allow HTTPS connection over TLS/SSL. The certificate wraps a public key that matches a private key provided by the customer. The certificate must be in PEM format.
:param client_certificate: The client_certificate of this GlobalSignCredentials.
:type: str
]
if compare[name[client_certificate] is constant[None]] begin[:]
<ast.Raise object at 0x7da18ede7ac0>
if <ast.BoolOp object at 0x7da18ede5ae0> begin[:]
<ast.Raise object at 0x7da18ede7e50>
name[self]._client_certificate assign[=] name[client_certificate] | keyword[def] identifier[client_certificate] ( identifier[self] , identifier[client_certificate] ):
literal[string]
keyword[if] identifier[client_certificate] keyword[is] keyword[None] :
keyword[raise] identifier[ValueError] ( literal[string] )
keyword[if] identifier[client_certificate] keyword[is] keyword[not] keyword[None] keyword[and] identifier[len] ( identifier[client_certificate] )> literal[int] :
keyword[raise] identifier[ValueError] ( literal[string] )
identifier[self] . identifier[_client_certificate] = identifier[client_certificate] | def client_certificate(self, client_certificate):
"""
Sets the client_certificate of this GlobalSignCredentials.
The client certificate provided by GlobalSign to allow HTTPS connection over TLS/SSL. The certificate wraps a public key that matches a private key provided by the customer. The certificate must be in PEM format.
:param client_certificate: The client_certificate of this GlobalSignCredentials.
:type: str
"""
if client_certificate is None:
raise ValueError('Invalid value for `client_certificate`, must not be `None`') # depends on [control=['if'], data=[]]
if client_certificate is not None and len(client_certificate) > 3000:
raise ValueError('Invalid value for `client_certificate`, length must be less than or equal to `3000`') # depends on [control=['if'], data=[]]
self._client_certificate = client_certificate |
def index_firstnot(ol,value):
'''
from elist.elist import *
ol = [1,'a',3,'a',4,'a',5]
index_firstnot(ol,'a')
####index_firstnot, array_indexnot, indexOfnot are the same
array_indexnot(ol,'a')
indexOfnot(ol,'a')
'''
length = ol.__len__()
for i in range(0,length):
if(value == ol[i]):
pass
else:
return(i)
return(None) | def function[index_firstnot, parameter[ol, value]]:
constant[
from elist.elist import *
ol = [1,'a',3,'a',4,'a',5]
index_firstnot(ol,'a')
####index_firstnot, array_indexnot, indexOfnot are the same
array_indexnot(ol,'a')
indexOfnot(ol,'a')
]
variable[length] assign[=] call[name[ol].__len__, parameter[]]
for taget[name[i]] in starred[call[name[range], parameter[constant[0], name[length]]]] begin[:]
if compare[name[value] equal[==] call[name[ol]][name[i]]] begin[:]
pass
return[constant[None]] | keyword[def] identifier[index_firstnot] ( identifier[ol] , identifier[value] ):
literal[string]
identifier[length] = identifier[ol] . identifier[__len__] ()
keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[length] ):
keyword[if] ( identifier[value] == identifier[ol] [ identifier[i] ]):
keyword[pass]
keyword[else] :
keyword[return] ( identifier[i] )
keyword[return] ( keyword[None] ) | def index_firstnot(ol, value):
"""
from elist.elist import *
ol = [1,'a',3,'a',4,'a',5]
index_firstnot(ol,'a')
####index_firstnot, array_indexnot, indexOfnot are the same
array_indexnot(ol,'a')
indexOfnot(ol,'a')
"""
length = ol.__len__()
for i in range(0, length):
if value == ol[i]:
pass # depends on [control=['if'], data=[]]
else:
return i # depends on [control=['for'], data=['i']]
return None |
def get_ar_data(self):
"""Returns a list of AR data
"""
for obj in self.get_objects():
info = self.get_base_info(obj)
info.update({
"analyses": self.get_analysis_data_for(obj),
"sampletype": self.get_base_info(obj.getSampleType()),
"number_of_partitions": self.get_number_of_partitions_for(obj),
"template": self.get_template_data_for(obj),
})
yield info | def function[get_ar_data, parameter[self]]:
constant[Returns a list of AR data
]
for taget[name[obj]] in starred[call[name[self].get_objects, parameter[]]] begin[:]
variable[info] assign[=] call[name[self].get_base_info, parameter[name[obj]]]
call[name[info].update, parameter[dictionary[[<ast.Constant object at 0x7da1b1d39690>, <ast.Constant object at 0x7da1b1d38be0>, <ast.Constant object at 0x7da1b1d3b760>, <ast.Constant object at 0x7da1b1d3a650>], [<ast.Call object at 0x7da1b1d3abf0>, <ast.Call object at 0x7da1b1d3b490>, <ast.Call object at 0x7da1b1d396f0>, <ast.Call object at 0x7da1b1d397b0>]]]]
<ast.Yield object at 0x7da1b1d3a3e0> | keyword[def] identifier[get_ar_data] ( identifier[self] ):
literal[string]
keyword[for] identifier[obj] keyword[in] identifier[self] . identifier[get_objects] ():
identifier[info] = identifier[self] . identifier[get_base_info] ( identifier[obj] )
identifier[info] . identifier[update] ({
literal[string] : identifier[self] . identifier[get_analysis_data_for] ( identifier[obj] ),
literal[string] : identifier[self] . identifier[get_base_info] ( identifier[obj] . identifier[getSampleType] ()),
literal[string] : identifier[self] . identifier[get_number_of_partitions_for] ( identifier[obj] ),
literal[string] : identifier[self] . identifier[get_template_data_for] ( identifier[obj] ),
})
keyword[yield] identifier[info] | def get_ar_data(self):
"""Returns a list of AR data
"""
for obj in self.get_objects():
info = self.get_base_info(obj)
info.update({'analyses': self.get_analysis_data_for(obj), 'sampletype': self.get_base_info(obj.getSampleType()), 'number_of_partitions': self.get_number_of_partitions_for(obj), 'template': self.get_template_data_for(obj)})
yield info # depends on [control=['for'], data=['obj']] |
def get_surface_state(self, data=None):
"""Return the Euclidean distance between codebook and data.
:param data: Optional parameter to specify data, otherwise the
data used previously to train the SOM is used.
:type data: 2D numpy.array of float32.
:returns: The the dot product of the codebook and the data.
:rtype: 2D numpy.array
"""
if data is None:
d = self._data
else:
d = data
codebookReshaped = self.codebook.reshape(self.codebook.shape[0] * self.codebook.shape[1], self.codebook.shape[2])
parts = np.array_split(d, 200, axis=0)
am = np.empty((0, (self._n_columns * self._n_rows)), dtype="float64")
for part in parts:
am = np.concatenate((am, (cdist((part), codebookReshaped, 'euclidean'))), axis=0)
if data is None:
self.activation_map = am
return am | def function[get_surface_state, parameter[self, data]]:
constant[Return the Euclidean distance between codebook and data.
:param data: Optional parameter to specify data, otherwise the
data used previously to train the SOM is used.
:type data: 2D numpy.array of float32.
:returns: The the dot product of the codebook and the data.
:rtype: 2D numpy.array
]
if compare[name[data] is constant[None]] begin[:]
variable[d] assign[=] name[self]._data
variable[codebookReshaped] assign[=] call[name[self].codebook.reshape, parameter[binary_operation[call[name[self].codebook.shape][constant[0]] * call[name[self].codebook.shape][constant[1]]], call[name[self].codebook.shape][constant[2]]]]
variable[parts] assign[=] call[name[np].array_split, parameter[name[d], constant[200]]]
variable[am] assign[=] call[name[np].empty, parameter[tuple[[<ast.Constant object at 0x7da1b0b59300>, <ast.BinOp object at 0x7da1b0b593f0>]]]]
for taget[name[part]] in starred[name[parts]] begin[:]
variable[am] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Name object at 0x7da1b0b58f10>, <ast.Call object at 0x7da1b0b59000>]]]]
if compare[name[data] is constant[None]] begin[:]
name[self].activation_map assign[=] name[am]
return[name[am]] | keyword[def] identifier[get_surface_state] ( identifier[self] , identifier[data] = keyword[None] ):
literal[string]
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[d] = identifier[self] . identifier[_data]
keyword[else] :
identifier[d] = identifier[data]
identifier[codebookReshaped] = identifier[self] . identifier[codebook] . identifier[reshape] ( identifier[self] . identifier[codebook] . identifier[shape] [ literal[int] ]* identifier[self] . identifier[codebook] . identifier[shape] [ literal[int] ], identifier[self] . identifier[codebook] . identifier[shape] [ literal[int] ])
identifier[parts] = identifier[np] . identifier[array_split] ( identifier[d] , literal[int] , identifier[axis] = literal[int] )
identifier[am] = identifier[np] . identifier[empty] (( literal[int] ,( identifier[self] . identifier[_n_columns] * identifier[self] . identifier[_n_rows] )), identifier[dtype] = literal[string] )
keyword[for] identifier[part] keyword[in] identifier[parts] :
identifier[am] = identifier[np] . identifier[concatenate] (( identifier[am] ,( identifier[cdist] (( identifier[part] ), identifier[codebookReshaped] , literal[string] ))), identifier[axis] = literal[int] )
keyword[if] identifier[data] keyword[is] keyword[None] :
identifier[self] . identifier[activation_map] = identifier[am]
keyword[return] identifier[am] | def get_surface_state(self, data=None):
"""Return the Euclidean distance between codebook and data.
:param data: Optional parameter to specify data, otherwise the
data used previously to train the SOM is used.
:type data: 2D numpy.array of float32.
:returns: The the dot product of the codebook and the data.
:rtype: 2D numpy.array
"""
if data is None:
d = self._data # depends on [control=['if'], data=[]]
else:
d = data
codebookReshaped = self.codebook.reshape(self.codebook.shape[0] * self.codebook.shape[1], self.codebook.shape[2])
parts = np.array_split(d, 200, axis=0)
am = np.empty((0, self._n_columns * self._n_rows), dtype='float64')
for part in parts:
am = np.concatenate((am, cdist(part, codebookReshaped, 'euclidean')), axis=0) # depends on [control=['for'], data=['part']]
if data is None:
self.activation_map = am # depends on [control=['if'], data=[]]
return am |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.